commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
bb80ef40356be4384b0ddf0e4510865d4d33c654
|
appengine_config.py
|
appengine_config.py
|
"""
`appengine_config` gets loaded when starting a new application instance.
"""
import site
import os.path
# add `lib` subdirectory as a site packages directory, so our `main` module can load
# third-party libraries.
site.addsitedir(os.path.join(os.path.dirname(__file__), 'lib'))
|
"""
`appengine_config` gets loaded when starting a new application instance.
"""
from google.appengine.ext import vendor
vendor.add('lib')
|
Use a newer method for specifying the vendored packages directory.
|
Use a newer method for specifying the vendored packages directory.
|
Python
|
mit
|
boulder-python/boulderpython.org,boulder-python/boulderpython.org,boulder-python/boulderpython.org,boulder-python/boulderpython.org
|
"""
`appengine_config` gets loaded when starting a new application instance.
"""
import site
import os.path
# add `lib` subdirectory as a site packages directory, so our `main` module can load
# third-party libraries.
site.addsitedir(os.path.join(os.path.dirname(__file__), 'lib'))
Use a newer method for specifying the vendored packages directory.
|
"""
`appengine_config` gets loaded when starting a new application instance.
"""
from google.appengine.ext import vendor
vendor.add('lib')
|
<commit_before>"""
`appengine_config` gets loaded when starting a new application instance.
"""
import site
import os.path
# add `lib` subdirectory as a site packages directory, so our `main` module can load
# third-party libraries.
site.addsitedir(os.path.join(os.path.dirname(__file__), 'lib'))
<commit_msg>Use a newer method for specifying the vendored packages directory.<commit_after>
|
"""
`appengine_config` gets loaded when starting a new application instance.
"""
from google.appengine.ext import vendor
vendor.add('lib')
|
"""
`appengine_config` gets loaded when starting a new application instance.
"""
import site
import os.path
# add `lib` subdirectory as a site packages directory, so our `main` module can load
# third-party libraries.
site.addsitedir(os.path.join(os.path.dirname(__file__), 'lib'))
Use a newer method for specifying the vendored packages directory."""
`appengine_config` gets loaded when starting a new application instance.
"""
from google.appengine.ext import vendor
vendor.add('lib')
|
<commit_before>"""
`appengine_config` gets loaded when starting a new application instance.
"""
import site
import os.path
# add `lib` subdirectory as a site packages directory, so our `main` module can load
# third-party libraries.
site.addsitedir(os.path.join(os.path.dirname(__file__), 'lib'))
<commit_msg>Use a newer method for specifying the vendored packages directory.<commit_after>"""
`appengine_config` gets loaded when starting a new application instance.
"""
from google.appengine.ext import vendor
vendor.add('lib')
|
2c4823d7a1acbfc048e67a58c0cd581c5699129e
|
biwako/bin/fields/util.py
|
biwako/bin/fields/util.py
|
import sys
from .base import Field
class Reserved(Field):
def __init__(self, *args, **kwargs):
super(Reserved, self).__init__(*args, **kwargs)
# Hack to add the reserved field to the class without
# having to explicitly give it a (likely useless) name
frame = sys._getframe(2)
locals = frame.f_locals
locals[self.get_available_name(locals.keys())] = self
def get_available_name(self, locals):
i = 0
while True:
name = '_reserved_%s' % i
if name not in locals:
return name
i += 1
def set_name(self, name):
if self.name:
raise TypeError('Reserved fields must not be given an attribute name')
super(Reserved, self).set_name(name)
def encode(self, value):
return b'\x00' * self.size
def decode(self, value):
return None
|
import sys
from .base import Field
class Reserved(Field):
def __init__(self, *args, **kwargs):
super(Reserved, self).__init__(*args, **kwargs)
# Hack to add the reserved field to the class without
# having to explicitly give it a (likely useless) name
frame = sys._getframe(2)
locals = frame.f_locals
locals[self.get_available_name(locals.keys())] = self
def get_available_name(self, locals):
i = 0
while True:
name = '_reserved_%s' % i
if name not in locals:
return name
i += 1
def set_name(self, name):
if hasattr(self, 'name'):
raise TypeError('Reserved fields must not be given an attribute name')
super(Reserved, self).set_name(name)
def encode(self, value):
return b'\x00' * self.size
def decode(self, value):
return None
|
Fix reserved field name setting
|
Fix reserved field name setting
|
Python
|
bsd-3-clause
|
gulopine/steel
|
import sys
from .base import Field
class Reserved(Field):
def __init__(self, *args, **kwargs):
super(Reserved, self).__init__(*args, **kwargs)
# Hack to add the reserved field to the class without
# having to explicitly give it a (likely useless) name
frame = sys._getframe(2)
locals = frame.f_locals
locals[self.get_available_name(locals.keys())] = self
def get_available_name(self, locals):
i = 0
while True:
name = '_reserved_%s' % i
if name not in locals:
return name
i += 1
def set_name(self, name):
if self.name:
raise TypeError('Reserved fields must not be given an attribute name')
super(Reserved, self).set_name(name)
def encode(self, value):
return b'\x00' * self.size
def decode(self, value):
return None
Fix reserved field name setting
|
import sys
from .base import Field
class Reserved(Field):
def __init__(self, *args, **kwargs):
super(Reserved, self).__init__(*args, **kwargs)
# Hack to add the reserved field to the class without
# having to explicitly give it a (likely useless) name
frame = sys._getframe(2)
locals = frame.f_locals
locals[self.get_available_name(locals.keys())] = self
def get_available_name(self, locals):
i = 0
while True:
name = '_reserved_%s' % i
if name not in locals:
return name
i += 1
def set_name(self, name):
if hasattr(self, 'name'):
raise TypeError('Reserved fields must not be given an attribute name')
super(Reserved, self).set_name(name)
def encode(self, value):
return b'\x00' * self.size
def decode(self, value):
return None
|
<commit_before>import sys
from .base import Field
class Reserved(Field):
def __init__(self, *args, **kwargs):
super(Reserved, self).__init__(*args, **kwargs)
# Hack to add the reserved field to the class without
# having to explicitly give it a (likely useless) name
frame = sys._getframe(2)
locals = frame.f_locals
locals[self.get_available_name(locals.keys())] = self
def get_available_name(self, locals):
i = 0
while True:
name = '_reserved_%s' % i
if name not in locals:
return name
i += 1
def set_name(self, name):
if self.name:
raise TypeError('Reserved fields must not be given an attribute name')
super(Reserved, self).set_name(name)
def encode(self, value):
return b'\x00' * self.size
def decode(self, value):
return None
<commit_msg>Fix reserved field name setting<commit_after>
|
import sys
from .base import Field
class Reserved(Field):
def __init__(self, *args, **kwargs):
super(Reserved, self).__init__(*args, **kwargs)
# Hack to add the reserved field to the class without
# having to explicitly give it a (likely useless) name
frame = sys._getframe(2)
locals = frame.f_locals
locals[self.get_available_name(locals.keys())] = self
def get_available_name(self, locals):
i = 0
while True:
name = '_reserved_%s' % i
if name not in locals:
return name
i += 1
def set_name(self, name):
if hasattr(self, 'name'):
raise TypeError('Reserved fields must not be given an attribute name')
super(Reserved, self).set_name(name)
def encode(self, value):
return b'\x00' * self.size
def decode(self, value):
return None
|
import sys
from .base import Field
class Reserved(Field):
def __init__(self, *args, **kwargs):
super(Reserved, self).__init__(*args, **kwargs)
# Hack to add the reserved field to the class without
# having to explicitly give it a (likely useless) name
frame = sys._getframe(2)
locals = frame.f_locals
locals[self.get_available_name(locals.keys())] = self
def get_available_name(self, locals):
i = 0
while True:
name = '_reserved_%s' % i
if name not in locals:
return name
i += 1
def set_name(self, name):
if self.name:
raise TypeError('Reserved fields must not be given an attribute name')
super(Reserved, self).set_name(name)
def encode(self, value):
return b'\x00' * self.size
def decode(self, value):
return None
Fix reserved field name settingimport sys
from .base import Field
class Reserved(Field):
def __init__(self, *args, **kwargs):
super(Reserved, self).__init__(*args, **kwargs)
# Hack to add the reserved field to the class without
# having to explicitly give it a (likely useless) name
frame = sys._getframe(2)
locals = frame.f_locals
locals[self.get_available_name(locals.keys())] = self
def get_available_name(self, locals):
i = 0
while True:
name = '_reserved_%s' % i
if name not in locals:
return name
i += 1
def set_name(self, name):
if hasattr(self, 'name'):
raise TypeError('Reserved fields must not be given an attribute name')
super(Reserved, self).set_name(name)
def encode(self, value):
return b'\x00' * self.size
def decode(self, value):
return None
|
<commit_before>import sys
from .base import Field
class Reserved(Field):
def __init__(self, *args, **kwargs):
super(Reserved, self).__init__(*args, **kwargs)
# Hack to add the reserved field to the class without
# having to explicitly give it a (likely useless) name
frame = sys._getframe(2)
locals = frame.f_locals
locals[self.get_available_name(locals.keys())] = self
def get_available_name(self, locals):
i = 0
while True:
name = '_reserved_%s' % i
if name not in locals:
return name
i += 1
def set_name(self, name):
if self.name:
raise TypeError('Reserved fields must not be given an attribute name')
super(Reserved, self).set_name(name)
def encode(self, value):
return b'\x00' * self.size
def decode(self, value):
return None
<commit_msg>Fix reserved field name setting<commit_after>import sys
from .base import Field
class Reserved(Field):
def __init__(self, *args, **kwargs):
super(Reserved, self).__init__(*args, **kwargs)
# Hack to add the reserved field to the class without
# having to explicitly give it a (likely useless) name
frame = sys._getframe(2)
locals = frame.f_locals
locals[self.get_available_name(locals.keys())] = self
def get_available_name(self, locals):
i = 0
while True:
name = '_reserved_%s' % i
if name not in locals:
return name
i += 1
def set_name(self, name):
if hasattr(self, 'name'):
raise TypeError('Reserved fields must not be given an attribute name')
super(Reserved, self).set_name(name)
def encode(self, value):
return b'\x00' * self.size
def decode(self, value):
return None
|
3e202c0dd4fa4c99ebee758a13ee5f6e205ef336
|
tests/functional/test_front_page.py
|
tests/functional/test_front_page.py
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
def test_can_login_and_logout(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
page.logout()
# page.sleep(120)
# page.assert_front_page()
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
def test_can_login(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
|
Change failed test case for front page to make build pass
|
Change failed test case for front page to make build pass
|
Python
|
apache-2.0
|
eavatar/eavatar-me,eavatar/eavatar-me,eavatar/eavatar-me,eavatar/eavatar-me
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
def test_can_login_and_logout(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
page.logout()
# page.sleep(120)
# page.assert_front_page()
Change failed test case for front page to make build pass
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
def test_can_login(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
def test_can_login_and_logout(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
page.logout()
# page.sleep(120)
# page.assert_front_page()
<commit_msg>Change failed test case for front page to make build pass<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
def test_can_login(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
def test_can_login_and_logout(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
page.logout()
# page.sleep(120)
# page.assert_front_page()
Change failed test case for front page to make build pass# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
def test_can_login(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
def test_can_login_and_logout(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
page.logout()
# page.sleep(120)
# page.assert_front_page()
<commit_msg>Change failed test case for front page to make build pass<commit_after># -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function, unicode_literals
from webpages import *
@pytest.fixture
def page(browser, server_url, access_token):
return FrontPage(browser, server_url, access_token)
class TestFrontPage(object):
def test_should_find_page_div(self, page):
page.open()
div = page.find_element_by_id('front')
assert div is not None
header = page.find_element_by_tag_name('h1')
# time.sleep(5)
# print(header.text)
assert 'EAvatar ME' in header.text
def test_can_login(self, page):
page.open()
page.assert_front_page()
token_input = page.find_element_by_xpath("//input[@name='token']")
token_input.send_keys(page.access_token)
login_btn = page.find_element_by_id('loginBtn')
# page.sleep(3)
login_btn.click()
header2 = page.find_element_by_tag_name('h1')
assert header2 is not None
# print("Header:", header2.text)
assert 'EAvatar' in header2.text
|
9aacf80789d6d540fe9260ecf22d7d489cd330a0
|
bills/urls.py
|
bills/urls.py
|
from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/(?P<topic_selected>(.*))/$', views.bill_list_by_topic, name='by_topic_selected'),
url(r'^by_topic/', views.bill_list_by_topic, name='by_topic'),
url(r'^by_location/(?P<location_selected>(.*))/', views.bill_list_by_location, name='by_location_selected'),
url(r'^by_location/', views.bill_list_by_location, name='by_location'),
url(r'^by_legislator/', views.bill_list_by_legislator, name='by_legislator'),
url(r'^current_session/', views.bill_list_current_session, name='current_session'),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
|
from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic, name='by_topic'),
url(r'^by_location/', views.bill_list_by_location, name='by_location'),
url(r'^by_legislator/', views.bill_list_by_legislator, name='by_legislator'),
url(r'^current_session/', views.bill_list_current_session, name='current_session'),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
|
Remove special topics and locations from URLs
|
Remove special topics and locations from URLs
|
Python
|
mit
|
jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot,jamesturk/tot
|
from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/(?P<topic_selected>(.*))/$', views.bill_list_by_topic, name='by_topic_selected'),
url(r'^by_topic/', views.bill_list_by_topic, name='by_topic'),
url(r'^by_location/(?P<location_selected>(.*))/', views.bill_list_by_location, name='by_location_selected'),
url(r'^by_location/', views.bill_list_by_location, name='by_location'),
url(r'^by_legislator/', views.bill_list_by_legislator, name='by_legislator'),
url(r'^current_session/', views.bill_list_current_session, name='current_session'),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
Remove special topics and locations from URLs
|
from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic, name='by_topic'),
url(r'^by_location/', views.bill_list_by_location, name='by_location'),
url(r'^by_legislator/', views.bill_list_by_legislator, name='by_legislator'),
url(r'^current_session/', views.bill_list_current_session, name='current_session'),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
|
<commit_before>from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/(?P<topic_selected>(.*))/$', views.bill_list_by_topic, name='by_topic_selected'),
url(r'^by_topic/', views.bill_list_by_topic, name='by_topic'),
url(r'^by_location/(?P<location_selected>(.*))/', views.bill_list_by_location, name='by_location_selected'),
url(r'^by_location/', views.bill_list_by_location, name='by_location'),
url(r'^by_legislator/', views.bill_list_by_legislator, name='by_legislator'),
url(r'^current_session/', views.bill_list_current_session, name='current_session'),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
<commit_msg>Remove special topics and locations from URLs<commit_after>
|
from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic, name='by_topic'),
url(r'^by_location/', views.bill_list_by_location, name='by_location'),
url(r'^by_legislator/', views.bill_list_by_legislator, name='by_legislator'),
url(r'^current_session/', views.bill_list_current_session, name='current_session'),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
|
from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/(?P<topic_selected>(.*))/$', views.bill_list_by_topic, name='by_topic_selected'),
url(r'^by_topic/', views.bill_list_by_topic, name='by_topic'),
url(r'^by_location/(?P<location_selected>(.*))/', views.bill_list_by_location, name='by_location_selected'),
url(r'^by_location/', views.bill_list_by_location, name='by_location'),
url(r'^by_legislator/', views.bill_list_by_legislator, name='by_legislator'),
url(r'^current_session/', views.bill_list_current_session, name='current_session'),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
Remove special topics and locations from URLsfrom . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic, name='by_topic'),
url(r'^by_location/', views.bill_list_by_location, name='by_location'),
url(r'^by_legislator/', views.bill_list_by_legislator, name='by_legislator'),
url(r'^current_session/', views.bill_list_current_session, name='current_session'),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
|
<commit_before>from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/(?P<topic_selected>(.*))/$', views.bill_list_by_topic, name='by_topic_selected'),
url(r'^by_topic/', views.bill_list_by_topic, name='by_topic'),
url(r'^by_location/(?P<location_selected>(.*))/', views.bill_list_by_location, name='by_location_selected'),
url(r'^by_location/', views.bill_list_by_location, name='by_location'),
url(r'^by_legislator/', views.bill_list_by_legislator, name='by_legislator'),
url(r'^current_session/', views.bill_list_current_session, name='current_session'),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
<commit_msg>Remove special topics and locations from URLs<commit_after>from . import views
from django.conf.urls import url
urlpatterns = [
url(r'^by_topic/', views.bill_list_by_topic, name='by_topic'),
url(r'^by_location/', views.bill_list_by_location, name='by_location'),
url(r'^by_legislator/', views.bill_list_by_legislator, name='by_legislator'),
url(r'^current_session/', views.bill_list_current_session, name='current_session'),
url(r'^detail/(?P<bill_session>(.*))/(?P<bill_identifier>(.*))/$', views.bill_detail, name='bill_detail'),
]
|
9fa95b373c2b43c6e0852aff82ec4c31821a7742
|
scss/tests/test_files.py
|
scss/tests/test_files.py
|
"""Evaluates all the tests that live in `scss/tests/files`.
A test is any file with a `.scss` extension. It'll be compiled, and the output
will be compared to the contents of a file named `foo.css`.
Currently, test files must be nested exactly one directory below `files/`.
This limitation is completely arbitrary.
"""
from __future__ import absolute_import
import glob
import os.path
import logging
import pytest
from scss import Scss
console = logging.StreamHandler()
logger = logging.getLogger('scss')
logger.setLevel(logging.ERROR)
logger.addHandler(console)
def test_pair_programmatic(scss_file_pair):
scss_fn, css_fn, pytest_trigger = scss_file_pair
if pytest_trigger:
pytest_trigger()
with open(scss_fn) as fh:
source = fh.read()
with open(css_fn) as fh:
expected = fh.read()
directory, _ = os.path.split(scss_fn)
include_dir = os.path.join(directory, 'include')
compiler = Scss(scss_opts=dict(compress=0), search_paths=[include_dir])
actual = compiler.compile(source)
# Normalize leading and trailing newlines
actual = actual.strip('\n')
expected = expected.strip('\n')
assert expected == actual
|
"""Evaluates all the tests that live in `scss/tests/files`.
A test is any file with a `.scss` extension. It'll be compiled, and the output
will be compared to the contents of a file named `foo.css`.
Currently, test files must be nested exactly one directory below `files/`.
This limitation is completely arbitrary.
"""
from __future__ import absolute_import
import glob
import os.path
import logging
import pytest
import scss
console = logging.StreamHandler()
logger = logging.getLogger('scss')
logger.setLevel(logging.ERROR)
logger.addHandler(console)
def test_pair_programmatic(scss_file_pair):
scss_fn, css_fn, pytest_trigger = scss_file_pair
if pytest_trigger:
pytest_trigger()
with open(scss_fn) as fh:
source = fh.read()
with open(css_fn) as fh:
expected = fh.read()
directory, _ = os.path.split(scss_fn)
include_dir = os.path.join(directory, 'include')
scss.config.STATIC_ROOT = os.path.join(directory, 'static')
compiler = scss.Scss(scss_opts=dict(compress=0), search_paths=[include_dir])
actual = compiler.compile(source)
# Normalize leading and trailing newlines
actual = actual.strip('\n')
expected = expected.strip('\n')
assert expected == actual
|
Add static root path to tests
|
Add static root path to tests
|
Python
|
mit
|
Kronuz/pyScss,hashamali/pyScss,cpfair/pyScss,cpfair/pyScss,cpfair/pyScss,Kronuz/pyScss,hashamali/pyScss,hashamali/pyScss,Kronuz/pyScss,Kronuz/pyScss
|
"""Evaluates all the tests that live in `scss/tests/files`.
A test is any file with a `.scss` extension. It'll be compiled, and the output
will be compared to the contents of a file named `foo.css`.
Currently, test files must be nested exactly one directory below `files/`.
This limitation is completely arbitrary.
"""
from __future__ import absolute_import
import glob
import os.path
import logging
import pytest
from scss import Scss
console = logging.StreamHandler()
logger = logging.getLogger('scss')
logger.setLevel(logging.ERROR)
logger.addHandler(console)
def test_pair_programmatic(scss_file_pair):
scss_fn, css_fn, pytest_trigger = scss_file_pair
if pytest_trigger:
pytest_trigger()
with open(scss_fn) as fh:
source = fh.read()
with open(css_fn) as fh:
expected = fh.read()
directory, _ = os.path.split(scss_fn)
include_dir = os.path.join(directory, 'include')
compiler = Scss(scss_opts=dict(compress=0), search_paths=[include_dir])
actual = compiler.compile(source)
# Normalize leading and trailing newlines
actual = actual.strip('\n')
expected = expected.strip('\n')
assert expected == actual
Add static root path to tests
|
"""Evaluates all the tests that live in `scss/tests/files`.
A test is any file with a `.scss` extension. It'll be compiled, and the output
will be compared to the contents of a file named `foo.css`.
Currently, test files must be nested exactly one directory below `files/`.
This limitation is completely arbitrary.
"""
from __future__ import absolute_import
import glob
import os.path
import logging
import pytest
import scss
console = logging.StreamHandler()
logger = logging.getLogger('scss')
logger.setLevel(logging.ERROR)
logger.addHandler(console)
def test_pair_programmatic(scss_file_pair):
scss_fn, css_fn, pytest_trigger = scss_file_pair
if pytest_trigger:
pytest_trigger()
with open(scss_fn) as fh:
source = fh.read()
with open(css_fn) as fh:
expected = fh.read()
directory, _ = os.path.split(scss_fn)
include_dir = os.path.join(directory, 'include')
scss.config.STATIC_ROOT = os.path.join(directory, 'static')
compiler = scss.Scss(scss_opts=dict(compress=0), search_paths=[include_dir])
actual = compiler.compile(source)
# Normalize leading and trailing newlines
actual = actual.strip('\n')
expected = expected.strip('\n')
assert expected == actual
|
<commit_before>"""Evaluates all the tests that live in `scss/tests/files`.
A test is any file with a `.scss` extension. It'll be compiled, and the output
will be compared to the contents of a file named `foo.css`.
Currently, test files must be nested exactly one directory below `files/`.
This limitation is completely arbitrary.
"""
from __future__ import absolute_import
import glob
import os.path
import logging
import pytest
from scss import Scss
console = logging.StreamHandler()
logger = logging.getLogger('scss')
logger.setLevel(logging.ERROR)
logger.addHandler(console)
def test_pair_programmatic(scss_file_pair):
scss_fn, css_fn, pytest_trigger = scss_file_pair
if pytest_trigger:
pytest_trigger()
with open(scss_fn) as fh:
source = fh.read()
with open(css_fn) as fh:
expected = fh.read()
directory, _ = os.path.split(scss_fn)
include_dir = os.path.join(directory, 'include')
compiler = Scss(scss_opts=dict(compress=0), search_paths=[include_dir])
actual = compiler.compile(source)
# Normalize leading and trailing newlines
actual = actual.strip('\n')
expected = expected.strip('\n')
assert expected == actual
<commit_msg>Add static root path to tests<commit_after>
|
"""Evaluates all the tests that live in `scss/tests/files`.
A test is any file with a `.scss` extension. It'll be compiled, and the output
will be compared to the contents of a file named `foo.css`.
Currently, test files must be nested exactly one directory below `files/`.
This limitation is completely arbitrary.
"""
from __future__ import absolute_import
import glob
import os.path
import logging
import pytest
import scss
console = logging.StreamHandler()
logger = logging.getLogger('scss')
logger.setLevel(logging.ERROR)
logger.addHandler(console)
def test_pair_programmatic(scss_file_pair):
scss_fn, css_fn, pytest_trigger = scss_file_pair
if pytest_trigger:
pytest_trigger()
with open(scss_fn) as fh:
source = fh.read()
with open(css_fn) as fh:
expected = fh.read()
directory, _ = os.path.split(scss_fn)
include_dir = os.path.join(directory, 'include')
scss.config.STATIC_ROOT = os.path.join(directory, 'static')
compiler = scss.Scss(scss_opts=dict(compress=0), search_paths=[include_dir])
actual = compiler.compile(source)
# Normalize leading and trailing newlines
actual = actual.strip('\n')
expected = expected.strip('\n')
assert expected == actual
|
"""Evaluates all the tests that live in `scss/tests/files`.
A test is any file with a `.scss` extension. It'll be compiled, and the output
will be compared to the contents of a file named `foo.css`.
Currently, test files must be nested exactly one directory below `files/`.
This limitation is completely arbitrary.
"""
from __future__ import absolute_import
import glob
import os.path
import logging
import pytest
from scss import Scss
console = logging.StreamHandler()
logger = logging.getLogger('scss')
logger.setLevel(logging.ERROR)
logger.addHandler(console)
def test_pair_programmatic(scss_file_pair):
scss_fn, css_fn, pytest_trigger = scss_file_pair
if pytest_trigger:
pytest_trigger()
with open(scss_fn) as fh:
source = fh.read()
with open(css_fn) as fh:
expected = fh.read()
directory, _ = os.path.split(scss_fn)
include_dir = os.path.join(directory, 'include')
compiler = Scss(scss_opts=dict(compress=0), search_paths=[include_dir])
actual = compiler.compile(source)
# Normalize leading and trailing newlines
actual = actual.strip('\n')
expected = expected.strip('\n')
assert expected == actual
Add static root path to tests"""Evaluates all the tests that live in `scss/tests/files`.
A test is any file with a `.scss` extension. It'll be compiled, and the output
will be compared to the contents of a file named `foo.css`.
Currently, test files must be nested exactly one directory below `files/`.
This limitation is completely arbitrary.
"""
from __future__ import absolute_import
import glob
import os.path
import logging
import pytest
import scss
console = logging.StreamHandler()
logger = logging.getLogger('scss')
logger.setLevel(logging.ERROR)
logger.addHandler(console)
def test_pair_programmatic(scss_file_pair):
scss_fn, css_fn, pytest_trigger = scss_file_pair
if pytest_trigger:
pytest_trigger()
with open(scss_fn) as fh:
source = fh.read()
with open(css_fn) as fh:
expected = fh.read()
directory, _ = os.path.split(scss_fn)
include_dir = os.path.join(directory, 'include')
scss.config.STATIC_ROOT = os.path.join(directory, 'static')
compiler = scss.Scss(scss_opts=dict(compress=0), search_paths=[include_dir])
actual = compiler.compile(source)
# Normalize leading and trailing newlines
actual = actual.strip('\n')
expected = expected.strip('\n')
assert expected == actual
|
<commit_before>"""Evaluates all the tests that live in `scss/tests/files`.
A test is any file with a `.scss` extension. It'll be compiled, and the output
will be compared to the contents of a file named `foo.css`.
Currently, test files must be nested exactly one directory below `files/`.
This limitation is completely arbitrary.
"""
from __future__ import absolute_import
import glob
import os.path
import logging
import pytest
from scss import Scss
console = logging.StreamHandler()
logger = logging.getLogger('scss')
logger.setLevel(logging.ERROR)
logger.addHandler(console)
def test_pair_programmatic(scss_file_pair):
scss_fn, css_fn, pytest_trigger = scss_file_pair
if pytest_trigger:
pytest_trigger()
with open(scss_fn) as fh:
source = fh.read()
with open(css_fn) as fh:
expected = fh.read()
directory, _ = os.path.split(scss_fn)
include_dir = os.path.join(directory, 'include')
compiler = Scss(scss_opts=dict(compress=0), search_paths=[include_dir])
actual = compiler.compile(source)
# Normalize leading and trailing newlines
actual = actual.strip('\n')
expected = expected.strip('\n')
assert expected == actual
<commit_msg>Add static root path to tests<commit_after>"""Evaluates all the tests that live in `scss/tests/files`.
A test is any file with a `.scss` extension. It'll be compiled, and the output
will be compared to the contents of a file named `foo.css`.
Currently, test files must be nested exactly one directory below `files/`.
This limitation is completely arbitrary.
"""
from __future__ import absolute_import
import glob
import os.path
import logging
import pytest
import scss
console = logging.StreamHandler()
logger = logging.getLogger('scss')
logger.setLevel(logging.ERROR)
logger.addHandler(console)
def test_pair_programmatic(scss_file_pair):
scss_fn, css_fn, pytest_trigger = scss_file_pair
if pytest_trigger:
pytest_trigger()
with open(scss_fn) as fh:
source = fh.read()
with open(css_fn) as fh:
expected = fh.read()
directory, _ = os.path.split(scss_fn)
include_dir = os.path.join(directory, 'include')
scss.config.STATIC_ROOT = os.path.join(directory, 'static')
compiler = scss.Scss(scss_opts=dict(compress=0), search_paths=[include_dir])
actual = compiler.compile(source)
# Normalize leading and trailing newlines
actual = actual.strip('\n')
expected = expected.strip('\n')
assert expected == actual
|
efb59846dfc577eb937cb9adc411a7d3c26dd6d9
|
stylo/testing/strategies.py
|
stylo/testing/strategies.py
|
"""Specific hypothesis strategies that are useful for testing.
It defines the following strategies:
- :code:`dimension`: Represents a size or dimension e.g. for numpy arrays image
sizes etc.
- :code:`real`: Represents a real number in the range +/-1 million
"""
from math import pi
from hypothesis.strategies import integers, floats
real = floats(min_value=-1e6, max_value=1e6)
angle = floats(min_value=-pi, max_value=pi)
dimension = integers(min_value=2, max_value=1024)
|
"""Specific hypothesis strategies that are useful for testing.
It defines the following strategies:
- :code:`dimension`: Represents a size or dimension e.g. for numpy arrays image
sizes etc.
- :code:`real`: Represents a real number in the range +/-1 million
"""
from math import pi
from hypothesis.strategies import integers, floats
real = floats(min_value=-1e6, max_value=1e6)
angle = floats(min_value=-pi, max_value=pi)
dimension = integers(min_value=4, max_value=1024)
|
Increase the minimum dimension used in the tests
|
Increase the minimum dimension used in the tests
|
Python
|
mit
|
alcarney/stylo,alcarney/stylo
|
"""Specific hypothesis strategies that are useful for testing.
It defines the following strategies:
- :code:`dimension`: Represents a size or dimension e.g. for numpy arrays image
sizes etc.
- :code:`real`: Represents a real number in the range +/-1 million
"""
from math import pi
from hypothesis.strategies import integers, floats
real = floats(min_value=-1e6, max_value=1e6)
angle = floats(min_value=-pi, max_value=pi)
dimension = integers(min_value=2, max_value=1024)
Increase the minimum dimension used in the tests
|
"""Specific hypothesis strategies that are useful for testing.
It defines the following strategies:
- :code:`dimension`: Represents a size or dimension e.g. for numpy arrays image
sizes etc.
- :code:`real`: Represents a real number in the range +/-1 million
"""
from math import pi
from hypothesis.strategies import integers, floats
real = floats(min_value=-1e6, max_value=1e6)
angle = floats(min_value=-pi, max_value=pi)
dimension = integers(min_value=4, max_value=1024)
|
<commit_before>"""Specific hypothesis strategies that are useful for testing.
It defines the following strategies:
- :code:`dimension`: Represents a size or dimension e.g. for numpy arrays image
sizes etc.
- :code:`real`: Represents a real number in the range +/-1 million
"""
from math import pi
from hypothesis.strategies import integers, floats
real = floats(min_value=-1e6, max_value=1e6)
angle = floats(min_value=-pi, max_value=pi)
dimension = integers(min_value=2, max_value=1024)
<commit_msg>Increase the minimum dimension used in the tests<commit_after>
|
"""Specific hypothesis strategies that are useful for testing.
It defines the following strategies:
- :code:`dimension`: Represents a size or dimension e.g. for numpy arrays image
sizes etc.
- :code:`real`: Represents a real number in the range +/-1 million
"""
from math import pi
from hypothesis.strategies import integers, floats
real = floats(min_value=-1e6, max_value=1e6)
angle = floats(min_value=-pi, max_value=pi)
dimension = integers(min_value=4, max_value=1024)
|
"""Specific hypothesis strategies that are useful for testing.
It defines the following strategies:
- :code:`dimension`: Represents a size or dimension e.g. for numpy arrays image
sizes etc.
- :code:`real`: Represents a real number in the range +/-1 million
"""
from math import pi
from hypothesis.strategies import integers, floats
real = floats(min_value=-1e6, max_value=1e6)
angle = floats(min_value=-pi, max_value=pi)
dimension = integers(min_value=2, max_value=1024)
Increase the minimum dimension used in the tests"""Specific hypothesis strategies that are useful for testing.
It defines the following strategies:
- :code:`dimension`: Represents a size or dimension e.g. for numpy arrays image
sizes etc.
- :code:`real`: Represents a real number in the range +/-1 million
"""
from math import pi
from hypothesis.strategies import integers, floats
real = floats(min_value=-1e6, max_value=1e6)
angle = floats(min_value=-pi, max_value=pi)
dimension = integers(min_value=4, max_value=1024)
|
<commit_before>"""Specific hypothesis strategies that are useful for testing.
It defines the following strategies:
- :code:`dimension`: Represents a size or dimension e.g. for numpy arrays image
sizes etc.
- :code:`real`: Represents a real number in the range +/-1 million
"""
from math import pi
from hypothesis.strategies import integers, floats
real = floats(min_value=-1e6, max_value=1e6)
angle = floats(min_value=-pi, max_value=pi)
dimension = integers(min_value=2, max_value=1024)
<commit_msg>Increase the minimum dimension used in the tests<commit_after>"""Specific hypothesis strategies that are useful for testing.
It defines the following strategies:
- :code:`dimension`: Represents a size or dimension e.g. for numpy arrays image
sizes etc.
- :code:`real`: Represents a real number in the range +/-1 million
"""
from math import pi
from hypothesis.strategies import integers, floats
real = floats(min_value=-1e6, max_value=1e6)
angle = floats(min_value=-pi, max_value=pi)
dimension = integers(min_value=4, max_value=1024)
|
de958b9fc68ad6209749edbfe2bdde0ef68cf3c8
|
experiments/middleware.py
|
experiments/middleware.py
|
from experiments.utils import participant
class ExperimentsRetentionMiddleware(object):
def process_response(self, request, response):
# We detect widgets by relying on the fact that they are flagged as being embedable, and don't include these in visit tracking
if getattr(response, 'xframe_options_exempt', False):
return response
experiment_user = participant(request)
experiment_user.visit()
return response
|
from experiments.utils import participant
class ExperimentsRetentionMiddleware(object):
def process_response(self, request, response):
# Don't track, failed pages, ajax requests, logged out users or widget impressions.
# We detect widgets by relying on the fact that they are flagged as being embedable
if response.status_code != 200 or request.is_ajax() or getattr(response, 'xframe_options_exempt', False):
return response
experiment_user = participant(request)
experiment_user.visit()
return response
|
Revert "tidy up ajax page loads so they count towards experiments"
|
Revert "tidy up ajax page loads so they count towards experiments"
This reverts commit a37cacb96c4021fcc2f9e23e024d8947bb4e644f.
|
Python
|
mit
|
mixcloud/django-experiments,bjarnoldus/django-experiments,bjarnoldus/django-experiments,robertobarreda/django-experiments,mixcloud/django-experiments,robertobarreda/django-experiments,squamous/django-experiments,squamous/django-experiments,uhuramedia/django-experiments,mixcloud/django-experiments,bjarnoldus/django-experiments,uhuramedia/django-experiments,squamous/django-experiments,uhuramedia/django-experiments,robertobarreda/django-experiments
|
from experiments.utils import participant
class ExperimentsRetentionMiddleware(object):
def process_response(self, request, response):
# We detect widgets by relying on the fact that they are flagged as being embedable, and don't include these in visit tracking
if getattr(response, 'xframe_options_exempt', False):
return response
experiment_user = participant(request)
experiment_user.visit()
return responseRevert "tidy up ajax page loads so they count towards experiments"
This reverts commit a37cacb96c4021fcc2f9e23e024d8947bb4e644f.
|
from experiments.utils import participant
class ExperimentsRetentionMiddleware(object):
def process_response(self, request, response):
# Don't track, failed pages, ajax requests, logged out users or widget impressions.
# We detect widgets by relying on the fact that they are flagged as being embedable
if response.status_code != 200 or request.is_ajax() or getattr(response, 'xframe_options_exempt', False):
return response
experiment_user = participant(request)
experiment_user.visit()
return response
|
<commit_before>from experiments.utils import participant
class ExperimentsRetentionMiddleware(object):
def process_response(self, request, response):
# We detect widgets by relying on the fact that they are flagged as being embedable, and don't include these in visit tracking
if getattr(response, 'xframe_options_exempt', False):
return response
experiment_user = participant(request)
experiment_user.visit()
return response<commit_msg>Revert "tidy up ajax page loads so they count towards experiments"
This reverts commit a37cacb96c4021fcc2f9e23e024d8947bb4e644f.<commit_after>
|
from experiments.utils import participant
class ExperimentsRetentionMiddleware(object):
def process_response(self, request, response):
# Don't track, failed pages, ajax requests, logged out users or widget impressions.
# We detect widgets by relying on the fact that they are flagged as being embedable
if response.status_code != 200 or request.is_ajax() or getattr(response, 'xframe_options_exempt', False):
return response
experiment_user = participant(request)
experiment_user.visit()
return response
|
from experiments.utils import participant
class ExperimentsRetentionMiddleware(object):
def process_response(self, request, response):
# We detect widgets by relying on the fact that they are flagged as being embedable, and don't include these in visit tracking
if getattr(response, 'xframe_options_exempt', False):
return response
experiment_user = participant(request)
experiment_user.visit()
return responseRevert "tidy up ajax page loads so they count towards experiments"
This reverts commit a37cacb96c4021fcc2f9e23e024d8947bb4e644f.from experiments.utils import participant
class ExperimentsRetentionMiddleware(object):
def process_response(self, request, response):
# Don't track, failed pages, ajax requests, logged out users or widget impressions.
# We detect widgets by relying on the fact that they are flagged as being embedable
if response.status_code != 200 or request.is_ajax() or getattr(response, 'xframe_options_exempt', False):
return response
experiment_user = participant(request)
experiment_user.visit()
return response
|
<commit_before>from experiments.utils import participant
class ExperimentsRetentionMiddleware(object):
def process_response(self, request, response):
# We detect widgets by relying on the fact that they are flagged as being embedable, and don't include these in visit tracking
if getattr(response, 'xframe_options_exempt', False):
return response
experiment_user = participant(request)
experiment_user.visit()
return response<commit_msg>Revert "tidy up ajax page loads so they count towards experiments"
This reverts commit a37cacb96c4021fcc2f9e23e024d8947bb4e644f.<commit_after>from experiments.utils import participant
class ExperimentsRetentionMiddleware(object):
def process_response(self, request, response):
# Don't track, failed pages, ajax requests, logged out users or widget impressions.
# We detect widgets by relying on the fact that they are flagged as being embedable
if response.status_code != 200 or request.is_ajax() or getattr(response, 'xframe_options_exempt', False):
return response
experiment_user = participant(request)
experiment_user.visit()
return response
|
88fc0f980f0efa403ab5ce7d6775bce008b284fc
|
_setup_database.py
|
_setup_database.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Setup script for NHL database creation.')
parser.add_argument(
'steps', metavar='setup_steps', help='Setup steps to execute.',
choices=['a', 't', 'd', 'p', 'ps', 'pd'])
args = parser.parse_args()
setup_steps = args.steps
# migrating teams from json file to database
if setup_steps in ['t', 'a']:
migrate_teams(simulation=True)
# creating divisions from division configuration file
if setup_steps in ['d', 'a']:
create_divisions(simulation=True)
# migrating players from json file to database
if setup_steps in ['p', 'a']:
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
if setup_steps in ['ps', 'a']:
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
if setup_steps in ['pd', 'a']:
create_player_data(simulation=False)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from setup.create_player_seasons import create_player_contracts
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Setup script for NHL database creation.')
parser.add_argument(
'steps', metavar='setup_steps', help='Setup steps to execute.',
choices=['a', 'c', 't', 'd', 'p', 'ps', 'pd'])
args = parser.parse_args()
setup_steps = args.steps
# migrating teams from json file to database
if setup_steps in ['t', 'a']:
migrate_teams(simulation=True)
# creating divisions from division configuration file
if setup_steps in ['d', 'a']:
create_divisions(simulation=True)
# migrating players from json file to database
if setup_steps in ['p', 'a']:
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
if setup_steps in ['ps', 'a']:
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
if setup_steps in ['pd', 'a']:
create_player_data(simulation=False)
if setup_steps in ['c']:
create_player_contracts(simulation=False)
|
Add contract retrieval option to database setup script
|
Add contract retrieval option to database setup script
|
Python
|
mit
|
leaffan/pynhldb
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Setup script for NHL database creation.')
parser.add_argument(
'steps', metavar='setup_steps', help='Setup steps to execute.',
choices=['a', 't', 'd', 'p', 'ps', 'pd'])
args = parser.parse_args()
setup_steps = args.steps
# migrating teams from json file to database
if setup_steps in ['t', 'a']:
migrate_teams(simulation=True)
# creating divisions from division configuration file
if setup_steps in ['d', 'a']:
create_divisions(simulation=True)
# migrating players from json file to database
if setup_steps in ['p', 'a']:
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
if setup_steps in ['ps', 'a']:
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
if setup_steps in ['pd', 'a']:
create_player_data(simulation=False)
Add contract retrieval option to database setup script
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from setup.create_player_seasons import create_player_contracts
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Setup script for NHL database creation.')
parser.add_argument(
'steps', metavar='setup_steps', help='Setup steps to execute.',
choices=['a', 'c', 't', 'd', 'p', 'ps', 'pd'])
args = parser.parse_args()
setup_steps = args.steps
# migrating teams from json file to database
if setup_steps in ['t', 'a']:
migrate_teams(simulation=True)
# creating divisions from division configuration file
if setup_steps in ['d', 'a']:
create_divisions(simulation=True)
# migrating players from json file to database
if setup_steps in ['p', 'a']:
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
if setup_steps in ['ps', 'a']:
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
if setup_steps in ['pd', 'a']:
create_player_data(simulation=False)
if setup_steps in ['c']:
create_player_contracts(simulation=False)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Setup script for NHL database creation.')
parser.add_argument(
'steps', metavar='setup_steps', help='Setup steps to execute.',
choices=['a', 't', 'd', 'p', 'ps', 'pd'])
args = parser.parse_args()
setup_steps = args.steps
# migrating teams from json file to database
if setup_steps in ['t', 'a']:
migrate_teams(simulation=True)
# creating divisions from division configuration file
if setup_steps in ['d', 'a']:
create_divisions(simulation=True)
# migrating players from json file to database
if setup_steps in ['p', 'a']:
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
if setup_steps in ['ps', 'a']:
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
if setup_steps in ['pd', 'a']:
create_player_data(simulation=False)
<commit_msg>Add contract retrieval option to database setup script<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from setup.create_player_seasons import create_player_contracts
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Setup script for NHL database creation.')
parser.add_argument(
'steps', metavar='setup_steps', help='Setup steps to execute.',
choices=['a', 'c', 't', 'd', 'p', 'ps', 'pd'])
args = parser.parse_args()
setup_steps = args.steps
# migrating teams from json file to database
if setup_steps in ['t', 'a']:
migrate_teams(simulation=True)
# creating divisions from division configuration file
if setup_steps in ['d', 'a']:
create_divisions(simulation=True)
# migrating players from json file to database
if setup_steps in ['p', 'a']:
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
if setup_steps in ['ps', 'a']:
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
if setup_steps in ['pd', 'a']:
create_player_data(simulation=False)
if setup_steps in ['c']:
create_player_contracts(simulation=False)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Setup script for NHL database creation.')
parser.add_argument(
'steps', metavar='setup_steps', help='Setup steps to execute.',
choices=['a', 't', 'd', 'p', 'ps', 'pd'])
args = parser.parse_args()
setup_steps = args.steps
# migrating teams from json file to database
if setup_steps in ['t', 'a']:
migrate_teams(simulation=True)
# creating divisions from division configuration file
if setup_steps in ['d', 'a']:
create_divisions(simulation=True)
# migrating players from json file to database
if setup_steps in ['p', 'a']:
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
if setup_steps in ['ps', 'a']:
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
if setup_steps in ['pd', 'a']:
create_player_data(simulation=False)
Add contract retrieval option to database setup script#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from setup.create_player_seasons import create_player_contracts
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Setup script for NHL database creation.')
parser.add_argument(
'steps', metavar='setup_steps', help='Setup steps to execute.',
choices=['a', 'c', 't', 'd', 'p', 'ps', 'pd'])
args = parser.parse_args()
setup_steps = args.steps
# migrating teams from json file to database
if setup_steps in ['t', 'a']:
migrate_teams(simulation=True)
# creating divisions from division configuration file
if setup_steps in ['d', 'a']:
create_divisions(simulation=True)
# migrating players from json file to database
if setup_steps in ['p', 'a']:
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
if setup_steps in ['ps', 'a']:
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
if setup_steps in ['pd', 'a']:
create_player_data(simulation=False)
if setup_steps in ['c']:
create_player_contracts(simulation=False)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Setup script for NHL database creation.')
parser.add_argument(
'steps', metavar='setup_steps', help='Setup steps to execute.',
choices=['a', 't', 'd', 'p', 'ps', 'pd'])
args = parser.parse_args()
setup_steps = args.steps
# migrating teams from json file to database
if setup_steps in ['t', 'a']:
migrate_teams(simulation=True)
# creating divisions from division configuration file
if setup_steps in ['d', 'a']:
create_divisions(simulation=True)
# migrating players from json file to database
if setup_steps in ['p', 'a']:
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
if setup_steps in ['ps', 'a']:
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
if setup_steps in ['pd', 'a']:
create_player_data(simulation=False)
<commit_msg>Add contract retrieval option to database setup script<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from setup.create_teams import migrate_teams
from setup.create_divisions import create_divisions
from setup.create_players import migrate_players
from setup.create_player_seasons import create_player_seasons
from setup.create_player_seasons import create_player_data
from setup.create_player_seasons import create_player_contracts
from utils import prepare_logging
prepare_logging(log_types=['file', 'screen'])
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Setup script for NHL database creation.')
parser.add_argument(
'steps', metavar='setup_steps', help='Setup steps to execute.',
choices=['a', 'c', 't', 'd', 'p', 'ps', 'pd'])
args = parser.parse_args()
setup_steps = args.steps
# migrating teams from json file to database
if setup_steps in ['t', 'a']:
migrate_teams(simulation=True)
# creating divisions from division configuration file
if setup_steps in ['d', 'a']:
create_divisions(simulation=True)
# migrating players from json file to database
if setup_steps in ['p', 'a']:
migrate_players(simulation=True)
# retrieving player season statistics for all players in database
if setup_steps in ['ps', 'a']:
create_player_seasons(simulation=False)
# retrieving individual player data for all players in database
if setup_steps in ['pd', 'a']:
create_player_data(simulation=False)
if setup_steps in ['c']:
create_player_contracts(simulation=False)
|
7a7d3159f774c29748b8236468dfe31729077d5c
|
test/test_ev3_legosensor.py
|
test/test_ev3_legosensor.py
|
from ev3.ev3dev import LegoSensor
import unittest
from util import get_input
import glob
class TestLegoSensor(unittest.TestCase):
def test_LegoSensor(self):
get_input('Attach a Msensor on port 1 then continue')
d = LegoSensor(port=1)
print(d.mode)
print(d.port)
if (len(glob.glob('/sys/class/lego-sensor/sensor*/name')) >0):
name = d.name
print(name)
d = LegoSensor(name=name)
print(d.mode)
print(d.port)
if __name__ == '__main__':
unittest.main()
|
from ev3.ev3dev import LegoSensor
import unittest
from util import get_input
import glob
class TestLegoSensor(unittest.TestCase):
def test_LegoSensor(self):
get_input('Attach a Lego on port 1 then continue')
d = LegoSensor(port=1)
print(d.mode)
print(d.port)
if (len(glob.glob('/sys/class/lego-sensor/sensor*/name')) >0):
name = d.name
print(name)
d = LegoSensor(name=name)
print(d.mode)
print(d.port)
if __name__ == '__main__':
unittest.main()
|
Fix an typo error in test
|
Fix an typo error in test
|
Python
|
apache-2.0
|
topikachu/python-ev3,topikachu/python-ev3,evz/python-ev3,evz/python-ev3,MaxNoe/python-ev3,MaxNoe/python-ev3
|
from ev3.ev3dev import LegoSensor
import unittest
from util import get_input
import glob
class TestLegoSensor(unittest.TestCase):
def test_LegoSensor(self):
get_input('Attach a Msensor on port 1 then continue')
d = LegoSensor(port=1)
print(d.mode)
print(d.port)
if (len(glob.glob('/sys/class/lego-sensor/sensor*/name')) >0):
name = d.name
print(name)
d = LegoSensor(name=name)
print(d.mode)
print(d.port)
if __name__ == '__main__':
unittest.main()
Fix an typo error in test
|
from ev3.ev3dev import LegoSensor
import unittest
from util import get_input
import glob
class TestLegoSensor(unittest.TestCase):
def test_LegoSensor(self):
get_input('Attach a Lego on port 1 then continue')
d = LegoSensor(port=1)
print(d.mode)
print(d.port)
if (len(glob.glob('/sys/class/lego-sensor/sensor*/name')) >0):
name = d.name
print(name)
d = LegoSensor(name=name)
print(d.mode)
print(d.port)
if __name__ == '__main__':
unittest.main()
|
<commit_before>from ev3.ev3dev import LegoSensor
import unittest
from util import get_input
import glob
class TestLegoSensor(unittest.TestCase):
def test_LegoSensor(self):
get_input('Attach a Msensor on port 1 then continue')
d = LegoSensor(port=1)
print(d.mode)
print(d.port)
if (len(glob.glob('/sys/class/lego-sensor/sensor*/name')) >0):
name = d.name
print(name)
d = LegoSensor(name=name)
print(d.mode)
print(d.port)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix an typo error in test<commit_after>
|
from ev3.ev3dev import LegoSensor
import unittest
from util import get_input
import glob
class TestLegoSensor(unittest.TestCase):
def test_LegoSensor(self):
get_input('Attach a Lego on port 1 then continue')
d = LegoSensor(port=1)
print(d.mode)
print(d.port)
if (len(glob.glob('/sys/class/lego-sensor/sensor*/name')) >0):
name = d.name
print(name)
d = LegoSensor(name=name)
print(d.mode)
print(d.port)
if __name__ == '__main__':
unittest.main()
|
from ev3.ev3dev import LegoSensor
import unittest
from util import get_input
import glob
class TestLegoSensor(unittest.TestCase):
def test_LegoSensor(self):
get_input('Attach a Msensor on port 1 then continue')
d = LegoSensor(port=1)
print(d.mode)
print(d.port)
if (len(glob.glob('/sys/class/lego-sensor/sensor*/name')) >0):
name = d.name
print(name)
d = LegoSensor(name=name)
print(d.mode)
print(d.port)
if __name__ == '__main__':
unittest.main()
Fix an typo error in testfrom ev3.ev3dev import LegoSensor
import unittest
from util import get_input
import glob
class TestLegoSensor(unittest.TestCase):
def test_LegoSensor(self):
get_input('Attach a Lego on port 1 then continue')
d = LegoSensor(port=1)
print(d.mode)
print(d.port)
if (len(glob.glob('/sys/class/lego-sensor/sensor*/name')) >0):
name = d.name
print(name)
d = LegoSensor(name=name)
print(d.mode)
print(d.port)
if __name__ == '__main__':
unittest.main()
|
<commit_before>from ev3.ev3dev import LegoSensor
import unittest
from util import get_input
import glob
class TestLegoSensor(unittest.TestCase):
def test_LegoSensor(self):
get_input('Attach a Msensor on port 1 then continue')
d = LegoSensor(port=1)
print(d.mode)
print(d.port)
if (len(glob.glob('/sys/class/lego-sensor/sensor*/name')) >0):
name = d.name
print(name)
d = LegoSensor(name=name)
print(d.mode)
print(d.port)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix an typo error in test<commit_after>from ev3.ev3dev import LegoSensor
import unittest
from util import get_input
import glob
class TestLegoSensor(unittest.TestCase):
def test_LegoSensor(self):
get_input('Attach a Lego on port 1 then continue')
d = LegoSensor(port=1)
print(d.mode)
print(d.port)
if (len(glob.glob('/sys/class/lego-sensor/sensor*/name')) >0):
name = d.name
print(name)
d = LegoSensor(name=name)
print(d.mode)
print(d.port)
if __name__ == '__main__':
unittest.main()
|
0dd6ec1c66b0873cc8f508ad4dffc2aa8fa6ad0d
|
testing/test_need_update.py
|
testing/test_need_update.py
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import os
@needinternet
def test_check_vers_update(fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/')
isnew=launch.check_new()
assert isnew
assert os.path.isfile("version.txt")
assert os.path.isfile("version.txt.old")
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import os
@needinternet
def test_check_vers_update(fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/')
isnew=launch.check_new()
assert isnew
assert os.path.isfile("version.txt")
assert os.path.isfile("version.txt.old")
# Make sure existing .old can be removed
launch.check_new()
|
Make sure .old in need_update is removed properly
|
Make sure .old in need_update is removed properly
|
Python
|
lgpl-2.1
|
rlee287/pyautoupdate,rlee287/pyautoupdate
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import os
@needinternet
def test_check_vers_update(fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/')
isnew=launch.check_new()
assert isnew
assert os.path.isfile("version.txt")
assert os.path.isfile("version.txt.old")
Make sure .old in need_update is removed properly
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import os
@needinternet
def test_check_vers_update(fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/')
isnew=launch.check_new()
assert isnew
assert os.path.isfile("version.txt")
assert os.path.isfile("version.txt.old")
# Make sure existing .old can be removed
launch.check_new()
|
<commit_before>from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import os
@needinternet
def test_check_vers_update(fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/')
isnew=launch.check_new()
assert isnew
assert os.path.isfile("version.txt")
assert os.path.isfile("version.txt.old")
<commit_msg>Make sure .old in need_update is removed properly<commit_after>
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import os
@needinternet
def test_check_vers_update(fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/')
isnew=launch.check_new()
assert isnew
assert os.path.isfile("version.txt")
assert os.path.isfile("version.txt.old")
# Make sure existing .old can be removed
launch.check_new()
|
from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import os
@needinternet
def test_check_vers_update(fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/')
isnew=launch.check_new()
assert isnew
assert os.path.isfile("version.txt")
assert os.path.isfile("version.txt.old")
Make sure .old in need_update is removed properlyfrom __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import os
@needinternet
def test_check_vers_update(fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/')
isnew=launch.check_new()
assert isnew
assert os.path.isfile("version.txt")
assert os.path.isfile("version.txt.old")
# Make sure existing .old can be removed
launch.check_new()
|
<commit_before>from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import os
@needinternet
def test_check_vers_update(fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/')
isnew=launch.check_new()
assert isnew
assert os.path.isfile("version.txt")
assert os.path.isfile("version.txt.old")
<commit_msg>Make sure .old in need_update is removed properly<commit_after>from __future__ import absolute_import, print_function
from ..pyautoupdate.launcher import Launcher
from .pytest_skipif import needinternet
from .pytest_makevers import fixture_update_dir
import os
@needinternet
def test_check_vers_update(fixture_update_dir):
package=fixture_update_dir("0.0.1")
launch = Launcher('',r'http://rlee287.github.io/pyautoupdate/testing/')
isnew=launch.check_new()
assert isnew
assert os.path.isfile("version.txt")
assert os.path.isfile("version.txt.old")
# Make sure existing .old can be removed
launch.check_new()
|
e4645dd7ecf97a026ced01535086f8fc9efc8ba3
|
src/python/fsqio/pants/buildgen/core/buildgen_base.py
|
src/python/fsqio/pants/buildgen/core/buildgen_base.py
|
# coding=utf-8
# Copyright 2016 Foursquare Labs Inc. All Rights Reserved.
from __future__ import (
absolute_import,
division,
generators,
nested_scopes,
print_function,
unicode_literals,
with_statement,
)
from pants.task.task import Task
from pants.util.memo import memoized_property
from fsqio.pants.buildgen.core.subsystems.buildgen_subsystem import BuildgenSubsystem
class BuildgenBase(Task):
""""A base task that provides the buildgen subsystem to its implementers."""
@classmethod
def global_subsystems(cls):
return super(BuildgenBase, cls).global_subsystems() + (BuildgenSubsystem.Factory,)
@memoized_property
def buildgen_subsystem(self):
# TODO(pl): When pants is a proper library dep, remove this ignore.
# pylint: disable=no-member
return BuildgenSubsystem.Factory.global_instance().create()
|
# coding=utf-8
# Copyright 2016 Foursquare Labs Inc. All Rights Reserved.
from __future__ import (
absolute_import,
division,
generators,
nested_scopes,
print_function,
unicode_literals,
with_statement,
)
from pants.task.task import Task
from pants.util.memo import memoized_property
from fsqio.pants.buildgen.core.subsystems.buildgen_subsystem import BuildgenSubsystem
class BuildgenBase(Task):
""""A base task that provides the buildgen subsystem to its implementers."""
@classmethod
def global_subsystems(cls):
return super(BuildgenBase, cls).global_subsystems() + (BuildgenSubsystem.Factory,)
@classmethod
def implementation_version(cls):
return super(BuildgenBase, cls).implementation_version() + [('BuildgenBase', 1)]
@memoized_property
def buildgen_subsystem(self):
# TODO(pl): When pants is a proper library dep, remove this ignore.
# pylint: disable=no-member
return BuildgenSubsystem.Factory.global_instance().create()
|
Add task version to BuildgenBase.
|
Add task version to BuildgenBase.
Debugging a break after a refactor, I wanted to invalidate
just the buildgen caches.
(sapling split of c73dc81f2af0cdc87d21519b032ae3f6213c932c)
|
Python
|
apache-2.0
|
foursquare/fsqio,foursquare/fsqio,foursquare/fsqio,foursquare/fsqio,foursquare/fsqio
|
# coding=utf-8
# Copyright 2016 Foursquare Labs Inc. All Rights Reserved.
from __future__ import (
absolute_import,
division,
generators,
nested_scopes,
print_function,
unicode_literals,
with_statement,
)
from pants.task.task import Task
from pants.util.memo import memoized_property
from fsqio.pants.buildgen.core.subsystems.buildgen_subsystem import BuildgenSubsystem
class BuildgenBase(Task):
""""A base task that provides the buildgen subsystem to its implementers."""
@classmethod
def global_subsystems(cls):
return super(BuildgenBase, cls).global_subsystems() + (BuildgenSubsystem.Factory,)
@memoized_property
def buildgen_subsystem(self):
# TODO(pl): When pants is a proper library dep, remove this ignore.
# pylint: disable=no-member
return BuildgenSubsystem.Factory.global_instance().create()
Add task version to BuildgenBase.
Debugging a break after a refactor, I wanted to invalidate
just the buildgen caches.
(sapling split of c73dc81f2af0cdc87d21519b032ae3f6213c932c)
|
# coding=utf-8
# Copyright 2016 Foursquare Labs Inc. All Rights Reserved.
from __future__ import (
absolute_import,
division,
generators,
nested_scopes,
print_function,
unicode_literals,
with_statement,
)
from pants.task.task import Task
from pants.util.memo import memoized_property
from fsqio.pants.buildgen.core.subsystems.buildgen_subsystem import BuildgenSubsystem
class BuildgenBase(Task):
""""A base task that provides the buildgen subsystem to its implementers."""
@classmethod
def global_subsystems(cls):
return super(BuildgenBase, cls).global_subsystems() + (BuildgenSubsystem.Factory,)
@classmethod
def implementation_version(cls):
return super(BuildgenBase, cls).implementation_version() + [('BuildgenBase', 1)]
@memoized_property
def buildgen_subsystem(self):
# TODO(pl): When pants is a proper library dep, remove this ignore.
# pylint: disable=no-member
return BuildgenSubsystem.Factory.global_instance().create()
|
<commit_before># coding=utf-8
# Copyright 2016 Foursquare Labs Inc. All Rights Reserved.
from __future__ import (
absolute_import,
division,
generators,
nested_scopes,
print_function,
unicode_literals,
with_statement,
)
from pants.task.task import Task
from pants.util.memo import memoized_property
from fsqio.pants.buildgen.core.subsystems.buildgen_subsystem import BuildgenSubsystem
class BuildgenBase(Task):
""""A base task that provides the buildgen subsystem to its implementers."""
@classmethod
def global_subsystems(cls):
return super(BuildgenBase, cls).global_subsystems() + (BuildgenSubsystem.Factory,)
@memoized_property
def buildgen_subsystem(self):
# TODO(pl): When pants is a proper library dep, remove this ignore.
# pylint: disable=no-member
return BuildgenSubsystem.Factory.global_instance().create()
<commit_msg>Add task version to BuildgenBase.
Debugging a break after a refactor, I wanted to invalidate
just the buildgen caches.
(sapling split of c73dc81f2af0cdc87d21519b032ae3f6213c932c)<commit_after>
|
# coding=utf-8
# Copyright 2016 Foursquare Labs Inc. All Rights Reserved.
from __future__ import (
absolute_import,
division,
generators,
nested_scopes,
print_function,
unicode_literals,
with_statement,
)
from pants.task.task import Task
from pants.util.memo import memoized_property
from fsqio.pants.buildgen.core.subsystems.buildgen_subsystem import BuildgenSubsystem
class BuildgenBase(Task):
""""A base task that provides the buildgen subsystem to its implementers."""
@classmethod
def global_subsystems(cls):
return super(BuildgenBase, cls).global_subsystems() + (BuildgenSubsystem.Factory,)
@classmethod
def implementation_version(cls):
return super(BuildgenBase, cls).implementation_version() + [('BuildgenBase', 1)]
@memoized_property
def buildgen_subsystem(self):
# TODO(pl): When pants is a proper library dep, remove this ignore.
# pylint: disable=no-member
return BuildgenSubsystem.Factory.global_instance().create()
|
# coding=utf-8
# Copyright 2016 Foursquare Labs Inc. All Rights Reserved.
from __future__ import (
absolute_import,
division,
generators,
nested_scopes,
print_function,
unicode_literals,
with_statement,
)
from pants.task.task import Task
from pants.util.memo import memoized_property
from fsqio.pants.buildgen.core.subsystems.buildgen_subsystem import BuildgenSubsystem
class BuildgenBase(Task):
""""A base task that provides the buildgen subsystem to its implementers."""
@classmethod
def global_subsystems(cls):
return super(BuildgenBase, cls).global_subsystems() + (BuildgenSubsystem.Factory,)
@memoized_property
def buildgen_subsystem(self):
# TODO(pl): When pants is a proper library dep, remove this ignore.
# pylint: disable=no-member
return BuildgenSubsystem.Factory.global_instance().create()
Add task version to BuildgenBase.
Debugging a break after a refactor, I wanted to invalidate
just the buildgen caches.
(sapling split of c73dc81f2af0cdc87d21519b032ae3f6213c932c)# coding=utf-8
# Copyright 2016 Foursquare Labs Inc. All Rights Reserved.
from __future__ import (
absolute_import,
division,
generators,
nested_scopes,
print_function,
unicode_literals,
with_statement,
)
from pants.task.task import Task
from pants.util.memo import memoized_property
from fsqio.pants.buildgen.core.subsystems.buildgen_subsystem import BuildgenSubsystem
class BuildgenBase(Task):
""""A base task that provides the buildgen subsystem to its implementers."""
@classmethod
def global_subsystems(cls):
return super(BuildgenBase, cls).global_subsystems() + (BuildgenSubsystem.Factory,)
@classmethod
def implementation_version(cls):
return super(BuildgenBase, cls).implementation_version() + [('BuildgenBase', 1)]
@memoized_property
def buildgen_subsystem(self):
# TODO(pl): When pants is a proper library dep, remove this ignore.
# pylint: disable=no-member
return BuildgenSubsystem.Factory.global_instance().create()
|
<commit_before># coding=utf-8
# Copyright 2016 Foursquare Labs Inc. All Rights Reserved.
from __future__ import (
absolute_import,
division,
generators,
nested_scopes,
print_function,
unicode_literals,
with_statement,
)
from pants.task.task import Task
from pants.util.memo import memoized_property
from fsqio.pants.buildgen.core.subsystems.buildgen_subsystem import BuildgenSubsystem
class BuildgenBase(Task):
""""A base task that provides the buildgen subsystem to its implementers."""
@classmethod
def global_subsystems(cls):
return super(BuildgenBase, cls).global_subsystems() + (BuildgenSubsystem.Factory,)
@memoized_property
def buildgen_subsystem(self):
# TODO(pl): When pants is a proper library dep, remove this ignore.
# pylint: disable=no-member
return BuildgenSubsystem.Factory.global_instance().create()
<commit_msg>Add task version to BuildgenBase.
Debugging a break after a refactor, I wanted to invalidate
just the buildgen caches.
(sapling split of c73dc81f2af0cdc87d21519b032ae3f6213c932c)<commit_after># coding=utf-8
# Copyright 2016 Foursquare Labs Inc. All Rights Reserved.
from __future__ import (
absolute_import,
division,
generators,
nested_scopes,
print_function,
unicode_literals,
with_statement,
)
from pants.task.task import Task
from pants.util.memo import memoized_property
from fsqio.pants.buildgen.core.subsystems.buildgen_subsystem import BuildgenSubsystem
class BuildgenBase(Task):
""""A base task that provides the buildgen subsystem to its implementers."""
@classmethod
def global_subsystems(cls):
return super(BuildgenBase, cls).global_subsystems() + (BuildgenSubsystem.Factory,)
@classmethod
def implementation_version(cls):
return super(BuildgenBase, cls).implementation_version() + [('BuildgenBase', 1)]
@memoized_property
def buildgen_subsystem(self):
# TODO(pl): When pants is a proper library dep, remove this ignore.
# pylint: disable=no-member
return BuildgenSubsystem.Factory.global_instance().create()
|
bf1fa1f284860229b1601e10306830cdc6ba2992
|
logbot/irc_client.py
|
logbot/irc_client.py
|
import irc.client
import os
class IrcClient(object):
def __init__(self, server, port, channel, bot_name, logger):
self.server = server
self.port = port
self.channel = channel
self.bot_name = bot_name
self.logger = logger
def start(self):
self._client = irc.client.IRC()
self._client_connection = self._client.server().connect(self.server, self.port, self.bot_name)
self._add_handlers()
def _add_handlers(self):
self._client_connection.add_global_handler('pubmsg', self.log)
self._client_connection.add_global_handler('welcome', self.joinner)
def joinner(self, connection, event):
connection.join(self.channel)
def log(self, connection, event):
self.logger.write(event.source.nick, event.arguments[0])
def graceful_stop(self, signum, frame):
self._client.disconnect_all("{0} is going home now.\n".format(self.bot_name))
self.logger.close()
os._exit(0)
def process_forever(self):
self._client.process_forever()
|
import irc.client
import os
class IrcClient(object):
def __init__(self, server, port, channel, bot_name, logger):
self.server = server
self.port = port
self.channel = channel
self.bot_name = bot_name
self.logger = logger
def start(self):
self._client = irc.client.IRC()
self._client_connection = self._client.server().connect(self.server, self.port, self.bot_name)
self._add_handlers()
def _add_handlers(self):
self._client_connection.add_global_handler('pubmsg', self.log)
self._client_connection.add_global_handler('welcome', self.joinner)
def joinner(self, connection, event):
connection.join(self.channel)
def log(self, connection, event):
self.logger.write(event.source.nick, event.arguments[0])
def graceful_stop(self, signum, frame):
self._client.disconnect_all("{0} is going home now.".format(self.bot_name))
self.logger.close()
os._exit(0)
def process_forever(self):
self._client.process_forever()
|
Remove newline character from disconnect message
|
Remove newline character from disconnect message
|
Python
|
mit
|
mlopes/LogBot
|
import irc.client
import os
class IrcClient(object):
def __init__(self, server, port, channel, bot_name, logger):
self.server = server
self.port = port
self.channel = channel
self.bot_name = bot_name
self.logger = logger
def start(self):
self._client = irc.client.IRC()
self._client_connection = self._client.server().connect(self.server, self.port, self.bot_name)
self._add_handlers()
def _add_handlers(self):
self._client_connection.add_global_handler('pubmsg', self.log)
self._client_connection.add_global_handler('welcome', self.joinner)
def joinner(self, connection, event):
connection.join(self.channel)
def log(self, connection, event):
self.logger.write(event.source.nick, event.arguments[0])
def graceful_stop(self, signum, frame):
self._client.disconnect_all("{0} is going home now.\n".format(self.bot_name))
self.logger.close()
os._exit(0)
def process_forever(self):
self._client.process_forever()
Remove newline character from disconnect message
|
import irc.client
import os
class IrcClient(object):
def __init__(self, server, port, channel, bot_name, logger):
self.server = server
self.port = port
self.channel = channel
self.bot_name = bot_name
self.logger = logger
def start(self):
self._client = irc.client.IRC()
self._client_connection = self._client.server().connect(self.server, self.port, self.bot_name)
self._add_handlers()
def _add_handlers(self):
self._client_connection.add_global_handler('pubmsg', self.log)
self._client_connection.add_global_handler('welcome', self.joinner)
def joinner(self, connection, event):
connection.join(self.channel)
def log(self, connection, event):
self.logger.write(event.source.nick, event.arguments[0])
def graceful_stop(self, signum, frame):
self._client.disconnect_all("{0} is going home now.".format(self.bot_name))
self.logger.close()
os._exit(0)
def process_forever(self):
self._client.process_forever()
|
<commit_before>import irc.client
import os
class IrcClient(object):
def __init__(self, server, port, channel, bot_name, logger):
self.server = server
self.port = port
self.channel = channel
self.bot_name = bot_name
self.logger = logger
def start(self):
self._client = irc.client.IRC()
self._client_connection = self._client.server().connect(self.server, self.port, self.bot_name)
self._add_handlers()
def _add_handlers(self):
self._client_connection.add_global_handler('pubmsg', self.log)
self._client_connection.add_global_handler('welcome', self.joinner)
def joinner(self, connection, event):
connection.join(self.channel)
def log(self, connection, event):
self.logger.write(event.source.nick, event.arguments[0])
def graceful_stop(self, signum, frame):
self._client.disconnect_all("{0} is going home now.\n".format(self.bot_name))
self.logger.close()
os._exit(0)
def process_forever(self):
self._client.process_forever()
<commit_msg>Remove newline character from disconnect message<commit_after>
|
import irc.client
import os
class IrcClient(object):
def __init__(self, server, port, channel, bot_name, logger):
self.server = server
self.port = port
self.channel = channel
self.bot_name = bot_name
self.logger = logger
def start(self):
self._client = irc.client.IRC()
self._client_connection = self._client.server().connect(self.server, self.port, self.bot_name)
self._add_handlers()
def _add_handlers(self):
self._client_connection.add_global_handler('pubmsg', self.log)
self._client_connection.add_global_handler('welcome', self.joinner)
def joinner(self, connection, event):
connection.join(self.channel)
def log(self, connection, event):
self.logger.write(event.source.nick, event.arguments[0])
def graceful_stop(self, signum, frame):
self._client.disconnect_all("{0} is going home now.".format(self.bot_name))
self.logger.close()
os._exit(0)
def process_forever(self):
self._client.process_forever()
|
import irc.client
import os
class IrcClient(object):
def __init__(self, server, port, channel, bot_name, logger):
self.server = server
self.port = port
self.channel = channel
self.bot_name = bot_name
self.logger = logger
def start(self):
self._client = irc.client.IRC()
self._client_connection = self._client.server().connect(self.server, self.port, self.bot_name)
self._add_handlers()
def _add_handlers(self):
self._client_connection.add_global_handler('pubmsg', self.log)
self._client_connection.add_global_handler('welcome', self.joinner)
def joinner(self, connection, event):
connection.join(self.channel)
def log(self, connection, event):
self.logger.write(event.source.nick, event.arguments[0])
def graceful_stop(self, signum, frame):
self._client.disconnect_all("{0} is going home now.\n".format(self.bot_name))
self.logger.close()
os._exit(0)
def process_forever(self):
self._client.process_forever()
Remove newline character from disconnect messageimport irc.client
import os
class IrcClient(object):
def __init__(self, server, port, channel, bot_name, logger):
self.server = server
self.port = port
self.channel = channel
self.bot_name = bot_name
self.logger = logger
def start(self):
self._client = irc.client.IRC()
self._client_connection = self._client.server().connect(self.server, self.port, self.bot_name)
self._add_handlers()
def _add_handlers(self):
self._client_connection.add_global_handler('pubmsg', self.log)
self._client_connection.add_global_handler('welcome', self.joinner)
def joinner(self, connection, event):
connection.join(self.channel)
def log(self, connection, event):
self.logger.write(event.source.nick, event.arguments[0])
def graceful_stop(self, signum, frame):
self._client.disconnect_all("{0} is going home now.".format(self.bot_name))
self.logger.close()
os._exit(0)
def process_forever(self):
self._client.process_forever()
|
<commit_before>import irc.client
import os
class IrcClient(object):
def __init__(self, server, port, channel, bot_name, logger):
self.server = server
self.port = port
self.channel = channel
self.bot_name = bot_name
self.logger = logger
def start(self):
self._client = irc.client.IRC()
self._client_connection = self._client.server().connect(self.server, self.port, self.bot_name)
self._add_handlers()
def _add_handlers(self):
self._client_connection.add_global_handler('pubmsg', self.log)
self._client_connection.add_global_handler('welcome', self.joinner)
def joinner(self, connection, event):
connection.join(self.channel)
def log(self, connection, event):
self.logger.write(event.source.nick, event.arguments[0])
def graceful_stop(self, signum, frame):
self._client.disconnect_all("{0} is going home now.\n".format(self.bot_name))
self.logger.close()
os._exit(0)
def process_forever(self):
self._client.process_forever()
<commit_msg>Remove newline character from disconnect message<commit_after>import irc.client
import os
class IrcClient(object):
def __init__(self, server, port, channel, bot_name, logger):
self.server = server
self.port = port
self.channel = channel
self.bot_name = bot_name
self.logger = logger
def start(self):
self._client = irc.client.IRC()
self._client_connection = self._client.server().connect(self.server, self.port, self.bot_name)
self._add_handlers()
def _add_handlers(self):
self._client_connection.add_global_handler('pubmsg', self.log)
self._client_connection.add_global_handler('welcome', self.joinner)
def joinner(self, connection, event):
connection.join(self.channel)
def log(self, connection, event):
self.logger.write(event.source.nick, event.arguments[0])
def graceful_stop(self, signum, frame):
self._client.disconnect_all("{0} is going home now.".format(self.bot_name))
self.logger.close()
os._exit(0)
def process_forever(self):
self._client.process_forever()
|
4bfa555e7b71e3c5176a39ec8bbe4a2071c09bb3
|
blockbuster/bb_logging.py
|
blockbuster/bb_logging.py
|
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler('./logs/app.log', when='midnight', delay=False, encoding=None,
backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
import config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
Change file logHandler to use configured path for log files
|
Change file logHandler to use configured path for log files
|
Python
|
mit
|
mattstibbs/blockbuster-server,mattstibbs/blockbuster-server
|
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler('./logs/app.log', when='midnight', delay=False, encoding=None,
backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)Change file logHandler to use configured path for log files
|
import config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
<commit_before>import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler('./logs/app.log', when='midnight', delay=False, encoding=None,
backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)<commit_msg>Change file logHandler to use configured path for log files<commit_after>
|
import config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler('./logs/app.log', when='midnight', delay=False, encoding=None,
backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)Change file logHandler to use configured path for log filesimport config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
<commit_before>import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler('./logs/app.log', when='midnight', delay=False, encoding=None,
backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)<commit_msg>Change file logHandler to use configured path for log files<commit_after>import config
import logging
import logging.handlers
# ######### Set up logging ##########
# log.basicConfig(format="%(asctime)s - %(levelname)s: %(message)s", level=log.DEBUG)
logger = logging.getLogger('bb_log')
logger.setLevel(logging.DEBUG)
# create file handler which logs even debug messages
tfh = logging.handlers.TimedRotatingFileHandler(str.format('{0}/app.log', config.log_directory),
when='midnight', delay=False, encoding=None, backupCount=7)
tfh.setLevel(logging.DEBUG)
# create console handler with a higher log level
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatterch = logging.Formatter('%(asctime)s [%(levelname)s] %(message)s')
formattertfh = logging.Formatter('%(asctime)s [%(levelname)s] [%(name)s] %(message)s')
ch.setFormatter(formatterch)
tfh.setFormatter(formattertfh)
# add the handlers to logger
logger.addHandler(ch)
logger.addHandler(tfh)
|
ebd9842569201ce0e87827c2031c28c51159c472
|
tests/test_pathutils.py
|
tests/test_pathutils.py
|
from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch.object(pathutils, 'os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch.object(pathutils.os, 'walk')
def test_grep_r(self, mock_walk):
mock_walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
Change mocking scope to take effect
|
Change mocking scope to take effect
|
Python
|
mit
|
blitzrk/sublime_libsass,blitzrk/sublime_libsass
|
from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch.object(pathutils, 'os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
Change mocking scope to take effect
|
from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch.object(pathutils.os, 'walk')
def test_grep_r(self, mock_walk):
mock_walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
<commit_before>from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch.object(pathutils, 'os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
<commit_msg>Change mocking scope to take effect<commit_after>
|
from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch.object(pathutils.os, 'walk')
def test_grep_r(self, mock_walk):
mock_walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch.object(pathutils, 'os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
Change mocking scope to take effectfrom os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch.object(pathutils.os, 'walk')
def test_grep_r(self, mock_walk):
mock_walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
<commit_before>from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch.object(pathutils, 'os')
def test_grep_r(self, mock_os):
mock_os.walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
<commit_msg>Change mocking scope to take effect<commit_after>from os.path import join
import sublime
import sys
from unittest import TestCase
from unittest.mock import patch
version = sublime.version()
try:
from libsass import pathutils
except ImportError:
from sublime_libsass.libsass import pathutils
class TestPathutils(TestCase):
def test_subpaths(self):
path = join('/foo','bar','baz')
exprmt = pathutils.subpaths(path)
expect = [ join('/foo','bar','baz'), join('/foo','bar'), join('/foo'), join('/') ]
self.assertEqual(exprmt, expect)
@patch.object(pathutils.os, 'walk')
def test_grep_r(self, mock_walk):
mock_walk = lambda x: [('/tmp','',['file.scss'])]
self.assertEqual(pathutils.find_type_dirs('anything', '.scss'), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', ['.scss', '.sass']), ['/tmp'])
self.assertEqual(pathutils.find_type_dirs('anything', '.sass'), [])
self.assertEqual(pathutils.find_type_dirs('anything', ['.txt', '.csv']), [])
|
b11ef81b180cc18acb44988f3e269af6b54f4c89
|
timewreport/interval.py
|
timewreport/interval.py
|
import dateutil.parser
from datetime import datetime
from dateutil.tz import tz
class TimeWarriorInterval(object):
def __init__(self, start, end, tags):
self.__start = self.__get_local_datetime(start)
self.__end = self.__get_local_datetime(end) if end is not None else None
self.__tags = tags
def __eq__(self, other):
return self.__start == other.get_start() \
and self.__end == other.get_end() \
and self.__tags == other.get_tags()
def get_start(self):
return self.__start
def get_end(self):
return self.__end
def get_tags(self):
return self.__tags
def is_open(self):
return self.__end is None
def get_duration(self):
if self.is_open():
return datetime.now(tz=tz.tzlocal()) - self.__start
else:
return self.__end - self.__start
def get_date(self):
return datetime(self.__start.year, self.__start.month, self.__start.day)
def __get_local_datetime(self, datetime_string):
from_zone = tz.tzutc()
to_zone = tz.tzlocal()
date = dateutil.parser.parse(datetime_string)
date.replace(tzinfo=from_zone)
return date.astimezone(to_zone)
|
import dateutil.parser
from datetime import datetime, date
from dateutil.tz import tz
class TimeWarriorInterval(object):
def __init__(self, start, end, tags):
self.__start = self.__get_local_datetime(start)
self.__end = self.__get_local_datetime(end) if end is not None else None
self.__tags = tags
def __eq__(self, other):
return self.__start == other.get_start() \
and self.__end == other.get_end() \
and self.__tags == other.get_tags()
def get_start(self):
return self.__start
def get_end(self):
return self.__end
def get_tags(self):
return self.__tags
def is_open(self):
return self.__end is None
def get_duration(self):
if self.is_open():
return datetime.now(tz=tz.tzlocal()) - self.__start
else:
return self.__end - self.__start
def get_date(self):
return date(self.__start.year, self.__start.month, self.__start.day)
def __get_local_datetime(self, datetime_string):
from_zone = tz.tzutc()
to_zone = tz.tzlocal()
date = dateutil.parser.parse(datetime_string)
date.replace(tzinfo=from_zone)
return date.astimezone(to_zone)
|
Make get_date() return date object instead of datetime
|
Make get_date() return date object instead of datetime
|
Python
|
mit
|
lauft/timew-report
|
import dateutil.parser
from datetime import datetime
from dateutil.tz import tz
class TimeWarriorInterval(object):
def __init__(self, start, end, tags):
self.__start = self.__get_local_datetime(start)
self.__end = self.__get_local_datetime(end) if end is not None else None
self.__tags = tags
def __eq__(self, other):
return self.__start == other.get_start() \
and self.__end == other.get_end() \
and self.__tags == other.get_tags()
def get_start(self):
return self.__start
def get_end(self):
return self.__end
def get_tags(self):
return self.__tags
def is_open(self):
return self.__end is None
def get_duration(self):
if self.is_open():
return datetime.now(tz=tz.tzlocal()) - self.__start
else:
return self.__end - self.__start
def get_date(self):
return datetime(self.__start.year, self.__start.month, self.__start.day)
def __get_local_datetime(self, datetime_string):
from_zone = tz.tzutc()
to_zone = tz.tzlocal()
date = dateutil.parser.parse(datetime_string)
date.replace(tzinfo=from_zone)
return date.astimezone(to_zone)
Make get_date() return date object instead of datetime
|
import dateutil.parser
from datetime import datetime, date
from dateutil.tz import tz
class TimeWarriorInterval(object):
def __init__(self, start, end, tags):
self.__start = self.__get_local_datetime(start)
self.__end = self.__get_local_datetime(end) if end is not None else None
self.__tags = tags
def __eq__(self, other):
return self.__start == other.get_start() \
and self.__end == other.get_end() \
and self.__tags == other.get_tags()
def get_start(self):
return self.__start
def get_end(self):
return self.__end
def get_tags(self):
return self.__tags
def is_open(self):
return self.__end is None
def get_duration(self):
if self.is_open():
return datetime.now(tz=tz.tzlocal()) - self.__start
else:
return self.__end - self.__start
def get_date(self):
return date(self.__start.year, self.__start.month, self.__start.day)
def __get_local_datetime(self, datetime_string):
from_zone = tz.tzutc()
to_zone = tz.tzlocal()
date = dateutil.parser.parse(datetime_string)
date.replace(tzinfo=from_zone)
return date.astimezone(to_zone)
|
<commit_before>import dateutil.parser
from datetime import datetime
from dateutil.tz import tz
class TimeWarriorInterval(object):
def __init__(self, start, end, tags):
self.__start = self.__get_local_datetime(start)
self.__end = self.__get_local_datetime(end) if end is not None else None
self.__tags = tags
def __eq__(self, other):
return self.__start == other.get_start() \
and self.__end == other.get_end() \
and self.__tags == other.get_tags()
def get_start(self):
return self.__start
def get_end(self):
return self.__end
def get_tags(self):
return self.__tags
def is_open(self):
return self.__end is None
def get_duration(self):
if self.is_open():
return datetime.now(tz=tz.tzlocal()) - self.__start
else:
return self.__end - self.__start
def get_date(self):
return datetime(self.__start.year, self.__start.month, self.__start.day)
def __get_local_datetime(self, datetime_string):
from_zone = tz.tzutc()
to_zone = tz.tzlocal()
date = dateutil.parser.parse(datetime_string)
date.replace(tzinfo=from_zone)
return date.astimezone(to_zone)
<commit_msg>Make get_date() return date object instead of datetime<commit_after>
|
import dateutil.parser
from datetime import datetime, date
from dateutil.tz import tz
class TimeWarriorInterval(object):
def __init__(self, start, end, tags):
self.__start = self.__get_local_datetime(start)
self.__end = self.__get_local_datetime(end) if end is not None else None
self.__tags = tags
def __eq__(self, other):
return self.__start == other.get_start() \
and self.__end == other.get_end() \
and self.__tags == other.get_tags()
def get_start(self):
return self.__start
def get_end(self):
return self.__end
def get_tags(self):
return self.__tags
def is_open(self):
return self.__end is None
def get_duration(self):
if self.is_open():
return datetime.now(tz=tz.tzlocal()) - self.__start
else:
return self.__end - self.__start
def get_date(self):
return date(self.__start.year, self.__start.month, self.__start.day)
def __get_local_datetime(self, datetime_string):
from_zone = tz.tzutc()
to_zone = tz.tzlocal()
date = dateutil.parser.parse(datetime_string)
date.replace(tzinfo=from_zone)
return date.astimezone(to_zone)
|
import dateutil.parser
from datetime import datetime
from dateutil.tz import tz
class TimeWarriorInterval(object):
def __init__(self, start, end, tags):
self.__start = self.__get_local_datetime(start)
self.__end = self.__get_local_datetime(end) if end is not None else None
self.__tags = tags
def __eq__(self, other):
return self.__start == other.get_start() \
and self.__end == other.get_end() \
and self.__tags == other.get_tags()
def get_start(self):
return self.__start
def get_end(self):
return self.__end
def get_tags(self):
return self.__tags
def is_open(self):
return self.__end is None
def get_duration(self):
if self.is_open():
return datetime.now(tz=tz.tzlocal()) - self.__start
else:
return self.__end - self.__start
def get_date(self):
return datetime(self.__start.year, self.__start.month, self.__start.day)
def __get_local_datetime(self, datetime_string):
from_zone = tz.tzutc()
to_zone = tz.tzlocal()
date = dateutil.parser.parse(datetime_string)
date.replace(tzinfo=from_zone)
return date.astimezone(to_zone)
Make get_date() return date object instead of datetimeimport dateutil.parser
from datetime import datetime, date
from dateutil.tz import tz
class TimeWarriorInterval(object):
def __init__(self, start, end, tags):
self.__start = self.__get_local_datetime(start)
self.__end = self.__get_local_datetime(end) if end is not None else None
self.__tags = tags
def __eq__(self, other):
return self.__start == other.get_start() \
and self.__end == other.get_end() \
and self.__tags == other.get_tags()
def get_start(self):
return self.__start
def get_end(self):
return self.__end
def get_tags(self):
return self.__tags
def is_open(self):
return self.__end is None
def get_duration(self):
if self.is_open():
return datetime.now(tz=tz.tzlocal()) - self.__start
else:
return self.__end - self.__start
def get_date(self):
return date(self.__start.year, self.__start.month, self.__start.day)
def __get_local_datetime(self, datetime_string):
from_zone = tz.tzutc()
to_zone = tz.tzlocal()
date = dateutil.parser.parse(datetime_string)
date.replace(tzinfo=from_zone)
return date.astimezone(to_zone)
|
<commit_before>import dateutil.parser
from datetime import datetime
from dateutil.tz import tz
class TimeWarriorInterval(object):
def __init__(self, start, end, tags):
self.__start = self.__get_local_datetime(start)
self.__end = self.__get_local_datetime(end) if end is not None else None
self.__tags = tags
def __eq__(self, other):
return self.__start == other.get_start() \
and self.__end == other.get_end() \
and self.__tags == other.get_tags()
def get_start(self):
return self.__start
def get_end(self):
return self.__end
def get_tags(self):
return self.__tags
def is_open(self):
return self.__end is None
def get_duration(self):
if self.is_open():
return datetime.now(tz=tz.tzlocal()) - self.__start
else:
return self.__end - self.__start
def get_date(self):
return datetime(self.__start.year, self.__start.month, self.__start.day)
def __get_local_datetime(self, datetime_string):
from_zone = tz.tzutc()
to_zone = tz.tzlocal()
date = dateutil.parser.parse(datetime_string)
date.replace(tzinfo=from_zone)
return date.astimezone(to_zone)
<commit_msg>Make get_date() return date object instead of datetime<commit_after>import dateutil.parser
from datetime import datetime, date
from dateutil.tz import tz
class TimeWarriorInterval(object):
def __init__(self, start, end, tags):
self.__start = self.__get_local_datetime(start)
self.__end = self.__get_local_datetime(end) if end is not None else None
self.__tags = tags
def __eq__(self, other):
return self.__start == other.get_start() \
and self.__end == other.get_end() \
and self.__tags == other.get_tags()
def get_start(self):
return self.__start
def get_end(self):
return self.__end
def get_tags(self):
return self.__tags
def is_open(self):
return self.__end is None
def get_duration(self):
if self.is_open():
return datetime.now(tz=tz.tzlocal()) - self.__start
else:
return self.__end - self.__start
def get_date(self):
return date(self.__start.year, self.__start.month, self.__start.day)
def __get_local_datetime(self, datetime_string):
from_zone = tz.tzutc()
to_zone = tz.tzlocal()
date = dateutil.parser.parse(datetime_string)
date.replace(tzinfo=from_zone)
return date.astimezone(to_zone)
|
b400be73feba0b571ac6453841426db9a78dfa00
|
flowerconfig.py
|
flowerconfig.py
|
import os
AMPQ_ADMIN_USERNAME = os.getenv('AMQP_ADMIN_USERNAME', 'guest')
AMPQ_ADMIN_PASSWORD = os.getenv('AMQP_ADMIN_PASSWORD', 'guest')
AMQP_ADMIN_HOST = os.getenv('AMQP_ADMIN_HOST', '172.17.42.1')
AMQP_ADMIN_PORT = int(os.getenv('AMQP_ADMIN_PORT', '15672'))
DEFAULT_BROKER_API = 'http://%s:%s@%s:%d/api/' \
% (AMPQ_ADMIN_USERNAME, AMPQ_ADMIN_PASSWORD,
AMQP_ADMIN_HOST, AMQP_ADMIN_PORT)
FLOWER_USERNAME = os.getenv('FLOWER_USERNAME', 'root')
FLOWER_PASSWORD = os.getenv('FLOWER_PASSWORD', 'changeit')
port = int(os.getenv('FLOWER_PORT', '5555'))
broker_api = os.getenv('FLOWER_BROKER_API', DEFAULT_BROKER_API)
max_tasks = int(os.getenv('FLOWER_MAX_TASKS', '3600'))
basic_auth = [os.getenv('FLOWER_BASIC_AUTH', '%s:%s'
% (FLOWER_USERNAME, FLOWER_PASSWORD))]
|
import os
AMPQ_ADMIN_USERNAME = os.getenv('AMQP_ADMIN_USERNAME', 'guest')
AMPQ_ADMIN_PASSWORD = os.getenv('AMQP_ADMIN_PASSWORD', 'guest')
AMQP_ADMIN_HOST = os.getenv('AMQP_ADMIN_HOST', '172.17.42.1')
AMQP_ADMIN_PORT = int(os.getenv('AMQP_ADMIN_PORT', '15672'))
DEFAULT_BROKER_API = 'http://%s:%s@%s:%d/api/' \
% (AMPQ_ADMIN_USERNAME, AMPQ_ADMIN_PASSWORD,
AMQP_ADMIN_HOST, AMQP_ADMIN_PORT)
USERNAME = os.getenv('USERNAME', 'root')
PASSWORD = os.getenv('PASSWORD', 'changeit')
port = int(os.getenv('FLOWER_PORT', '5555'))
broker_api = os.getenv('FLOWER_BROKER_API', DEFAULT_BROKER_API)
max_tasks = int(os.getenv('FLOWER_MAX_TASKS', '3600'))
basic_auth = [os.getenv('FLOWER_BASIC_AUTH', '%s:%s'
% (USERNAME, PASSWORD))]
|
Remove FLOWER_ prefix for non flower based vars
|
Remove FLOWER_ prefix for non flower based vars
|
Python
|
mit
|
totem/celery-flower-docker,totem/celery-flower-docker
|
import os
AMPQ_ADMIN_USERNAME = os.getenv('AMQP_ADMIN_USERNAME', 'guest')
AMPQ_ADMIN_PASSWORD = os.getenv('AMQP_ADMIN_PASSWORD', 'guest')
AMQP_ADMIN_HOST = os.getenv('AMQP_ADMIN_HOST', '172.17.42.1')
AMQP_ADMIN_PORT = int(os.getenv('AMQP_ADMIN_PORT', '15672'))
DEFAULT_BROKER_API = 'http://%s:%s@%s:%d/api/' \
% (AMPQ_ADMIN_USERNAME, AMPQ_ADMIN_PASSWORD,
AMQP_ADMIN_HOST, AMQP_ADMIN_PORT)
FLOWER_USERNAME = os.getenv('FLOWER_USERNAME', 'root')
FLOWER_PASSWORD = os.getenv('FLOWER_PASSWORD', 'changeit')
port = int(os.getenv('FLOWER_PORT', '5555'))
broker_api = os.getenv('FLOWER_BROKER_API', DEFAULT_BROKER_API)
max_tasks = int(os.getenv('FLOWER_MAX_TASKS', '3600'))
basic_auth = [os.getenv('FLOWER_BASIC_AUTH', '%s:%s'
% (FLOWER_USERNAME, FLOWER_PASSWORD))]
Remove FLOWER_ prefix for non flower based vars
|
import os
AMPQ_ADMIN_USERNAME = os.getenv('AMQP_ADMIN_USERNAME', 'guest')
AMPQ_ADMIN_PASSWORD = os.getenv('AMQP_ADMIN_PASSWORD', 'guest')
AMQP_ADMIN_HOST = os.getenv('AMQP_ADMIN_HOST', '172.17.42.1')
AMQP_ADMIN_PORT = int(os.getenv('AMQP_ADMIN_PORT', '15672'))
DEFAULT_BROKER_API = 'http://%s:%s@%s:%d/api/' \
% (AMPQ_ADMIN_USERNAME, AMPQ_ADMIN_PASSWORD,
AMQP_ADMIN_HOST, AMQP_ADMIN_PORT)
USERNAME = os.getenv('USERNAME', 'root')
PASSWORD = os.getenv('PASSWORD', 'changeit')
port = int(os.getenv('FLOWER_PORT', '5555'))
broker_api = os.getenv('FLOWER_BROKER_API', DEFAULT_BROKER_API)
max_tasks = int(os.getenv('FLOWER_MAX_TASKS', '3600'))
basic_auth = [os.getenv('FLOWER_BASIC_AUTH', '%s:%s'
% (USERNAME, PASSWORD))]
|
<commit_before>import os
AMPQ_ADMIN_USERNAME = os.getenv('AMQP_ADMIN_USERNAME', 'guest')
AMPQ_ADMIN_PASSWORD = os.getenv('AMQP_ADMIN_PASSWORD', 'guest')
AMQP_ADMIN_HOST = os.getenv('AMQP_ADMIN_HOST', '172.17.42.1')
AMQP_ADMIN_PORT = int(os.getenv('AMQP_ADMIN_PORT', '15672'))
DEFAULT_BROKER_API = 'http://%s:%s@%s:%d/api/' \
% (AMPQ_ADMIN_USERNAME, AMPQ_ADMIN_PASSWORD,
AMQP_ADMIN_HOST, AMQP_ADMIN_PORT)
FLOWER_USERNAME = os.getenv('FLOWER_USERNAME', 'root')
FLOWER_PASSWORD = os.getenv('FLOWER_PASSWORD', 'changeit')
port = int(os.getenv('FLOWER_PORT', '5555'))
broker_api = os.getenv('FLOWER_BROKER_API', DEFAULT_BROKER_API)
max_tasks = int(os.getenv('FLOWER_MAX_TASKS', '3600'))
basic_auth = [os.getenv('FLOWER_BASIC_AUTH', '%s:%s'
% (FLOWER_USERNAME, FLOWER_PASSWORD))]
<commit_msg>Remove FLOWER_ prefix for non flower based vars<commit_after>
|
import os
AMPQ_ADMIN_USERNAME = os.getenv('AMQP_ADMIN_USERNAME', 'guest')
AMPQ_ADMIN_PASSWORD = os.getenv('AMQP_ADMIN_PASSWORD', 'guest')
AMQP_ADMIN_HOST = os.getenv('AMQP_ADMIN_HOST', '172.17.42.1')
AMQP_ADMIN_PORT = int(os.getenv('AMQP_ADMIN_PORT', '15672'))
DEFAULT_BROKER_API = 'http://%s:%s@%s:%d/api/' \
% (AMPQ_ADMIN_USERNAME, AMPQ_ADMIN_PASSWORD,
AMQP_ADMIN_HOST, AMQP_ADMIN_PORT)
USERNAME = os.getenv('USERNAME', 'root')
PASSWORD = os.getenv('PASSWORD', 'changeit')
port = int(os.getenv('FLOWER_PORT', '5555'))
broker_api = os.getenv('FLOWER_BROKER_API', DEFAULT_BROKER_API)
max_tasks = int(os.getenv('FLOWER_MAX_TASKS', '3600'))
basic_auth = [os.getenv('FLOWER_BASIC_AUTH', '%s:%s'
% (USERNAME, PASSWORD))]
|
import os
AMPQ_ADMIN_USERNAME = os.getenv('AMQP_ADMIN_USERNAME', 'guest')
AMPQ_ADMIN_PASSWORD = os.getenv('AMQP_ADMIN_PASSWORD', 'guest')
AMQP_ADMIN_HOST = os.getenv('AMQP_ADMIN_HOST', '172.17.42.1')
AMQP_ADMIN_PORT = int(os.getenv('AMQP_ADMIN_PORT', '15672'))
DEFAULT_BROKER_API = 'http://%s:%s@%s:%d/api/' \
% (AMPQ_ADMIN_USERNAME, AMPQ_ADMIN_PASSWORD,
AMQP_ADMIN_HOST, AMQP_ADMIN_PORT)
FLOWER_USERNAME = os.getenv('FLOWER_USERNAME', 'root')
FLOWER_PASSWORD = os.getenv('FLOWER_PASSWORD', 'changeit')
port = int(os.getenv('FLOWER_PORT', '5555'))
broker_api = os.getenv('FLOWER_BROKER_API', DEFAULT_BROKER_API)
max_tasks = int(os.getenv('FLOWER_MAX_TASKS', '3600'))
basic_auth = [os.getenv('FLOWER_BASIC_AUTH', '%s:%s'
% (FLOWER_USERNAME, FLOWER_PASSWORD))]
Remove FLOWER_ prefix for non flower based varsimport os
AMPQ_ADMIN_USERNAME = os.getenv('AMQP_ADMIN_USERNAME', 'guest')
AMPQ_ADMIN_PASSWORD = os.getenv('AMQP_ADMIN_PASSWORD', 'guest')
AMQP_ADMIN_HOST = os.getenv('AMQP_ADMIN_HOST', '172.17.42.1')
AMQP_ADMIN_PORT = int(os.getenv('AMQP_ADMIN_PORT', '15672'))
DEFAULT_BROKER_API = 'http://%s:%s@%s:%d/api/' \
% (AMPQ_ADMIN_USERNAME, AMPQ_ADMIN_PASSWORD,
AMQP_ADMIN_HOST, AMQP_ADMIN_PORT)
USERNAME = os.getenv('USERNAME', 'root')
PASSWORD = os.getenv('PASSWORD', 'changeit')
port = int(os.getenv('FLOWER_PORT', '5555'))
broker_api = os.getenv('FLOWER_BROKER_API', DEFAULT_BROKER_API)
max_tasks = int(os.getenv('FLOWER_MAX_TASKS', '3600'))
basic_auth = [os.getenv('FLOWER_BASIC_AUTH', '%s:%s'
% (USERNAME, PASSWORD))]
|
<commit_before>import os
AMPQ_ADMIN_USERNAME = os.getenv('AMQP_ADMIN_USERNAME', 'guest')
AMPQ_ADMIN_PASSWORD = os.getenv('AMQP_ADMIN_PASSWORD', 'guest')
AMQP_ADMIN_HOST = os.getenv('AMQP_ADMIN_HOST', '172.17.42.1')
AMQP_ADMIN_PORT = int(os.getenv('AMQP_ADMIN_PORT', '15672'))
DEFAULT_BROKER_API = 'http://%s:%s@%s:%d/api/' \
% (AMPQ_ADMIN_USERNAME, AMPQ_ADMIN_PASSWORD,
AMQP_ADMIN_HOST, AMQP_ADMIN_PORT)
FLOWER_USERNAME = os.getenv('FLOWER_USERNAME', 'root')
FLOWER_PASSWORD = os.getenv('FLOWER_PASSWORD', 'changeit')
port = int(os.getenv('FLOWER_PORT', '5555'))
broker_api = os.getenv('FLOWER_BROKER_API', DEFAULT_BROKER_API)
max_tasks = int(os.getenv('FLOWER_MAX_TASKS', '3600'))
basic_auth = [os.getenv('FLOWER_BASIC_AUTH', '%s:%s'
% (FLOWER_USERNAME, FLOWER_PASSWORD))]
<commit_msg>Remove FLOWER_ prefix for non flower based vars<commit_after>import os
AMPQ_ADMIN_USERNAME = os.getenv('AMQP_ADMIN_USERNAME', 'guest')
AMPQ_ADMIN_PASSWORD = os.getenv('AMQP_ADMIN_PASSWORD', 'guest')
AMQP_ADMIN_HOST = os.getenv('AMQP_ADMIN_HOST', '172.17.42.1')
AMQP_ADMIN_PORT = int(os.getenv('AMQP_ADMIN_PORT', '15672'))
DEFAULT_BROKER_API = 'http://%s:%s@%s:%d/api/' \
% (AMPQ_ADMIN_USERNAME, AMPQ_ADMIN_PASSWORD,
AMQP_ADMIN_HOST, AMQP_ADMIN_PORT)
USERNAME = os.getenv('USERNAME', 'root')
PASSWORD = os.getenv('PASSWORD', 'changeit')
port = int(os.getenv('FLOWER_PORT', '5555'))
broker_api = os.getenv('FLOWER_BROKER_API', DEFAULT_BROKER_API)
max_tasks = int(os.getenv('FLOWER_MAX_TASKS', '3600'))
basic_auth = [os.getenv('FLOWER_BASIC_AUTH', '%s:%s'
% (USERNAME, PASSWORD))]
|
423dd5ea414fe1857b44eef00a94f4dbb6d0c798
|
import_test_data.py
|
import_test_data.py
|
#!/usr/bin/env python
# Script to import some test data into the db. Usually we should get a warning
# due to the bad formatting of the date, which is missing the time zone flag.
# Copy and execute this directly into the django shell.
from sest.models import *
from datetime import datetime
u = User.objects.create(username="test", email="test@example.com")
# c = Channel(title="test", user=u, 12345678, datetime.now())
c = Channel.objects.create(user=u, number_fields=2)
c.fieldmetadata_set.create(field_no=1, encoding='float')
c.fieldmetadata_set.create(field_no=2, encoding='float')
with open("sample_without_header.csv") as fo:
for line in fo:
line = line.strip().split(',')
dt, _id, t, h = line
dt = datetime.strptime(dt, '%Y-%m-%d %H:%M:%S UTC')
t, h = float(t), float(h)
r = Record.objects.create(channel=c, insertion_time=dt, id=_id)
r.field_set.create(field_no=1, val=t)
r.field_set.create(field_no=2, val=h)
|
#!/usr/bin/env python
# Script to import some test data into the db. Usually we should get a warning
# due to the bad formatting of the date, which is missing the time zone flag.
# Copy and execute this directly into the django shell.
from sest.models import *
# from datetime import datetime
u = User.objects.create(username="test", email="test@example.com")
# c = Channel(title="test", user=u, 12345678, datetime.now())
c = Channel.objects.create(user=u, number_fields=2)
c.fieldmetadata_set.create(field_no=1, encoding='float')
c.fieldmetadata_set.create(field_no=2, encoding='float')
with open("sample_without_header.csv") as fo:
for line in fo:
line = line.strip().split(',')
dt, _id, t, h = line
# dt = datetime.strptime(dt, '%Y-%m-%d %H:%M:%S UTC')
t, h = float(t), float(h)
# r = Record.objects.create(channel=c, insertion_time=dt, id=_id)
r = Record.objects.create(channel=c, id=_id)
r.field_set.create(field_no=1, val=t)
r.field_set.create(field_no=2, val=h)
|
Update test data importer script.
|
Update test data importer script.
|
Python
|
mit
|
bebosudo/sest,bebosudo/sest,bebosudo/sest
|
#!/usr/bin/env python
# Script to import some test data into the db. Usually we should get a warning
# due to the bad formatting of the date, which is missing the time zone flag.
# Copy and execute this directly into the django shell.
from sest.models import *
from datetime import datetime
u = User.objects.create(username="test", email="test@example.com")
# c = Channel(title="test", user=u, 12345678, datetime.now())
c = Channel.objects.create(user=u, number_fields=2)
c.fieldmetadata_set.create(field_no=1, encoding='float')
c.fieldmetadata_set.create(field_no=2, encoding='float')
with open("sample_without_header.csv") as fo:
for line in fo:
line = line.strip().split(',')
dt, _id, t, h = line
dt = datetime.strptime(dt, '%Y-%m-%d %H:%M:%S UTC')
t, h = float(t), float(h)
r = Record.objects.create(channel=c, insertion_time=dt, id=_id)
r.field_set.create(field_no=1, val=t)
r.field_set.create(field_no=2, val=h)
Update test data importer script.
|
#!/usr/bin/env python
# Script to import some test data into the db. Usually we should get a warning
# due to the bad formatting of the date, which is missing the time zone flag.
# Copy and execute this directly into the django shell.
from sest.models import *
# from datetime import datetime
u = User.objects.create(username="test", email="test@example.com")
# c = Channel(title="test", user=u, 12345678, datetime.now())
c = Channel.objects.create(user=u, number_fields=2)
c.fieldmetadata_set.create(field_no=1, encoding='float')
c.fieldmetadata_set.create(field_no=2, encoding='float')
with open("sample_without_header.csv") as fo:
for line in fo:
line = line.strip().split(',')
dt, _id, t, h = line
# dt = datetime.strptime(dt, '%Y-%m-%d %H:%M:%S UTC')
t, h = float(t), float(h)
# r = Record.objects.create(channel=c, insertion_time=dt, id=_id)
r = Record.objects.create(channel=c, id=_id)
r.field_set.create(field_no=1, val=t)
r.field_set.create(field_no=2, val=h)
|
<commit_before>#!/usr/bin/env python
# Script to import some test data into the db. Usually we should get a warning
# due to the bad formatting of the date, which is missing the time zone flag.
# Copy and execute this directly into the django shell.
from sest.models import *
from datetime import datetime
u = User.objects.create(username="test", email="test@example.com")
# c = Channel(title="test", user=u, 12345678, datetime.now())
c = Channel.objects.create(user=u, number_fields=2)
c.fieldmetadata_set.create(field_no=1, encoding='float')
c.fieldmetadata_set.create(field_no=2, encoding='float')
with open("sample_without_header.csv") as fo:
for line in fo:
line = line.strip().split(',')
dt, _id, t, h = line
dt = datetime.strptime(dt, '%Y-%m-%d %H:%M:%S UTC')
t, h = float(t), float(h)
r = Record.objects.create(channel=c, insertion_time=dt, id=_id)
r.field_set.create(field_no=1, val=t)
r.field_set.create(field_no=2, val=h)
<commit_msg>Update test data importer script.<commit_after>
|
#!/usr/bin/env python
# Script to import some test data into the db. Usually we should get a warning
# due to the bad formatting of the date, which is missing the time zone flag.
# Copy and execute this directly into the django shell.
from sest.models import *
# from datetime import datetime
u = User.objects.create(username="test", email="test@example.com")
# c = Channel(title="test", user=u, 12345678, datetime.now())
c = Channel.objects.create(user=u, number_fields=2)
c.fieldmetadata_set.create(field_no=1, encoding='float')
c.fieldmetadata_set.create(field_no=2, encoding='float')
with open("sample_without_header.csv") as fo:
for line in fo:
line = line.strip().split(',')
dt, _id, t, h = line
# dt = datetime.strptime(dt, '%Y-%m-%d %H:%M:%S UTC')
t, h = float(t), float(h)
# r = Record.objects.create(channel=c, insertion_time=dt, id=_id)
r = Record.objects.create(channel=c, id=_id)
r.field_set.create(field_no=1, val=t)
r.field_set.create(field_no=2, val=h)
|
#!/usr/bin/env python
# Script to import some test data into the db. Usually we should get a warning
# due to the bad formatting of the date, which is missing the time zone flag.
# Copy and execute this directly into the django shell.
from sest.models import *
from datetime import datetime
u = User.objects.create(username="test", email="test@example.com")
# c = Channel(title="test", user=u, 12345678, datetime.now())
c = Channel.objects.create(user=u, number_fields=2)
c.fieldmetadata_set.create(field_no=1, encoding='float')
c.fieldmetadata_set.create(field_no=2, encoding='float')
with open("sample_without_header.csv") as fo:
for line in fo:
line = line.strip().split(',')
dt, _id, t, h = line
dt = datetime.strptime(dt, '%Y-%m-%d %H:%M:%S UTC')
t, h = float(t), float(h)
r = Record.objects.create(channel=c, insertion_time=dt, id=_id)
r.field_set.create(field_no=1, val=t)
r.field_set.create(field_no=2, val=h)
Update test data importer script.#!/usr/bin/env python
# Script to import some test data into the db. Usually we should get a warning
# due to the bad formatting of the date, which is missing the time zone flag.
# Copy and execute this directly into the django shell.
from sest.models import *
# from datetime import datetime
u = User.objects.create(username="test", email="test@example.com")
# c = Channel(title="test", user=u, 12345678, datetime.now())
c = Channel.objects.create(user=u, number_fields=2)
c.fieldmetadata_set.create(field_no=1, encoding='float')
c.fieldmetadata_set.create(field_no=2, encoding='float')
with open("sample_without_header.csv") as fo:
for line in fo:
line = line.strip().split(',')
dt, _id, t, h = line
# dt = datetime.strptime(dt, '%Y-%m-%d %H:%M:%S UTC')
t, h = float(t), float(h)
# r = Record.objects.create(channel=c, insertion_time=dt, id=_id)
r = Record.objects.create(channel=c, id=_id)
r.field_set.create(field_no=1, val=t)
r.field_set.create(field_no=2, val=h)
|
<commit_before>#!/usr/bin/env python
# Script to import some test data into the db. Usually we should get a warning
# due to the bad formatting of the date, which is missing the time zone flag.
# Copy and execute this directly into the django shell.
from sest.models import *
from datetime import datetime
u = User.objects.create(username="test", email="test@example.com")
# c = Channel(title="test", user=u, 12345678, datetime.now())
c = Channel.objects.create(user=u, number_fields=2)
c.fieldmetadata_set.create(field_no=1, encoding='float')
c.fieldmetadata_set.create(field_no=2, encoding='float')
with open("sample_without_header.csv") as fo:
for line in fo:
line = line.strip().split(',')
dt, _id, t, h = line
dt = datetime.strptime(dt, '%Y-%m-%d %H:%M:%S UTC')
t, h = float(t), float(h)
r = Record.objects.create(channel=c, insertion_time=dt, id=_id)
r.field_set.create(field_no=1, val=t)
r.field_set.create(field_no=2, val=h)
<commit_msg>Update test data importer script.<commit_after>#!/usr/bin/env python
# Script to import some test data into the db. Usually we should get a warning
# due to the bad formatting of the date, which is missing the time zone flag.
# Copy and execute this directly into the django shell.
from sest.models import *
# from datetime import datetime
u = User.objects.create(username="test", email="test@example.com")
# c = Channel(title="test", user=u, 12345678, datetime.now())
c = Channel.objects.create(user=u, number_fields=2)
c.fieldmetadata_set.create(field_no=1, encoding='float')
c.fieldmetadata_set.create(field_no=2, encoding='float')
with open("sample_without_header.csv") as fo:
for line in fo:
line = line.strip().split(',')
dt, _id, t, h = line
# dt = datetime.strptime(dt, '%Y-%m-%d %H:%M:%S UTC')
t, h = float(t), float(h)
# r = Record.objects.create(channel=c, insertion_time=dt, id=_id)
r = Record.objects.create(channel=c, id=_id)
r.field_set.create(field_no=1, val=t)
r.field_set.create(field_no=2, val=h)
|
2eae88ca423a60579e9b8572b0d4bcecbe2d8631
|
utils/HTTPResponseParser.py
|
utils/HTTPResponseParser.py
|
# Utility to parse HTTP responses
# http://pythonwise.blogspot.com/2010/02/parse-http-response.html
from StringIO import StringIO
from httplib import HTTPResponse
class FakeSocket(StringIO):
def makefile(self, *args, **kw):
return self
def parse_http_response(sock):
try:
# H4ck to standardize the API between sockets and SSLConnection objects
# sock is a Python socket
sock.read = sock.recv
except AttributeError:
# sock is an SSLConnection
pass
response = sock.read(4096)
if 'HTTP/' not in response:
# Try to get the rest of the response
response += sock.read(4096)
fake_sock = FakeSocket(response)
response = HTTPResponse(fake_sock)
response.begin()
return response
|
# Utility to parse HTTP responses
# http://pythonwise.blogspot.com/2010/02/parse-http-response.html
from StringIO import StringIO
from httplib import HTTPResponse
class FakeSocket(StringIO):
def makefile(self, *args, **kw):
return self
def parse_http_response(sock):
try:
# H4ck to standardize the API between sockets and SSLConnection objects
response = sock.read(4096)
except AttributeError:
response = sock.recv(4096)
if 'HTTP/' not in response:
# Try to get the rest of the response
try:
response += sock.read(4096)
except AttributeError:
response += sock.recv(4096)
fake_sock = FakeSocket(response)
response = HTTPResponse(fake_sock)
response.begin()
return response
|
Tweak the hack to fix bug when scanning through a proxy
|
Tweak the hack to fix bug when scanning through a proxy
|
Python
|
agpl-3.0
|
nabla-c0d3/sslyze
|
# Utility to parse HTTP responses
# http://pythonwise.blogspot.com/2010/02/parse-http-response.html
from StringIO import StringIO
from httplib import HTTPResponse
class FakeSocket(StringIO):
def makefile(self, *args, **kw):
return self
def parse_http_response(sock):
try:
# H4ck to standardize the API between sockets and SSLConnection objects
# sock is a Python socket
sock.read = sock.recv
except AttributeError:
# sock is an SSLConnection
pass
response = sock.read(4096)
if 'HTTP/' not in response:
# Try to get the rest of the response
response += sock.read(4096)
fake_sock = FakeSocket(response)
response = HTTPResponse(fake_sock)
response.begin()
return response
Tweak the hack to fix bug when scanning through a proxy
|
# Utility to parse HTTP responses
# http://pythonwise.blogspot.com/2010/02/parse-http-response.html
from StringIO import StringIO
from httplib import HTTPResponse
class FakeSocket(StringIO):
def makefile(self, *args, **kw):
return self
def parse_http_response(sock):
try:
# H4ck to standardize the API between sockets and SSLConnection objects
response = sock.read(4096)
except AttributeError:
response = sock.recv(4096)
if 'HTTP/' not in response:
# Try to get the rest of the response
try:
response += sock.read(4096)
except AttributeError:
response += sock.recv(4096)
fake_sock = FakeSocket(response)
response = HTTPResponse(fake_sock)
response.begin()
return response
|
<commit_before>
# Utility to parse HTTP responses
# http://pythonwise.blogspot.com/2010/02/parse-http-response.html
from StringIO import StringIO
from httplib import HTTPResponse
class FakeSocket(StringIO):
def makefile(self, *args, **kw):
return self
def parse_http_response(sock):
try:
# H4ck to standardize the API between sockets and SSLConnection objects
# sock is a Python socket
sock.read = sock.recv
except AttributeError:
# sock is an SSLConnection
pass
response = sock.read(4096)
if 'HTTP/' not in response:
# Try to get the rest of the response
response += sock.read(4096)
fake_sock = FakeSocket(response)
response = HTTPResponse(fake_sock)
response.begin()
return response
<commit_msg>Tweak the hack to fix bug when scanning through a proxy<commit_after>
|
# Utility to parse HTTP responses
# http://pythonwise.blogspot.com/2010/02/parse-http-response.html
from StringIO import StringIO
from httplib import HTTPResponse
class FakeSocket(StringIO):
def makefile(self, *args, **kw):
return self
def parse_http_response(sock):
try:
# H4ck to standardize the API between sockets and SSLConnection objects
response = sock.read(4096)
except AttributeError:
response = sock.recv(4096)
if 'HTTP/' not in response:
# Try to get the rest of the response
try:
response += sock.read(4096)
except AttributeError:
response += sock.recv(4096)
fake_sock = FakeSocket(response)
response = HTTPResponse(fake_sock)
response.begin()
return response
|
# Utility to parse HTTP responses
# http://pythonwise.blogspot.com/2010/02/parse-http-response.html
from StringIO import StringIO
from httplib import HTTPResponse
class FakeSocket(StringIO):
def makefile(self, *args, **kw):
return self
def parse_http_response(sock):
try:
# H4ck to standardize the API between sockets and SSLConnection objects
# sock is a Python socket
sock.read = sock.recv
except AttributeError:
# sock is an SSLConnection
pass
response = sock.read(4096)
if 'HTTP/' not in response:
# Try to get the rest of the response
response += sock.read(4096)
fake_sock = FakeSocket(response)
response = HTTPResponse(fake_sock)
response.begin()
return response
Tweak the hack to fix bug when scanning through a proxy
# Utility to parse HTTP responses
# http://pythonwise.blogspot.com/2010/02/parse-http-response.html
from StringIO import StringIO
from httplib import HTTPResponse
class FakeSocket(StringIO):
def makefile(self, *args, **kw):
return self
def parse_http_response(sock):
try:
# H4ck to standardize the API between sockets and SSLConnection objects
response = sock.read(4096)
except AttributeError:
response = sock.recv(4096)
if 'HTTP/' not in response:
# Try to get the rest of the response
try:
response += sock.read(4096)
except AttributeError:
response += sock.recv(4096)
fake_sock = FakeSocket(response)
response = HTTPResponse(fake_sock)
response.begin()
return response
|
<commit_before>
# Utility to parse HTTP responses
# http://pythonwise.blogspot.com/2010/02/parse-http-response.html
from StringIO import StringIO
from httplib import HTTPResponse
class FakeSocket(StringIO):
def makefile(self, *args, **kw):
return self
def parse_http_response(sock):
try:
# H4ck to standardize the API between sockets and SSLConnection objects
# sock is a Python socket
sock.read = sock.recv
except AttributeError:
# sock is an SSLConnection
pass
response = sock.read(4096)
if 'HTTP/' not in response:
# Try to get the rest of the response
response += sock.read(4096)
fake_sock = FakeSocket(response)
response = HTTPResponse(fake_sock)
response.begin()
return response
<commit_msg>Tweak the hack to fix bug when scanning through a proxy<commit_after>
# Utility to parse HTTP responses
# http://pythonwise.blogspot.com/2010/02/parse-http-response.html
from StringIO import StringIO
from httplib import HTTPResponse
class FakeSocket(StringIO):
def makefile(self, *args, **kw):
return self
def parse_http_response(sock):
try:
# H4ck to standardize the API between sockets and SSLConnection objects
response = sock.read(4096)
except AttributeError:
response = sock.recv(4096)
if 'HTTP/' not in response:
# Try to get the rest of the response
try:
response += sock.read(4096)
except AttributeError:
response += sock.recv(4096)
fake_sock = FakeSocket(response)
response = HTTPResponse(fake_sock)
response.begin()
return response
|
e395d32770c2a4f7a2e4cab98d0a459e690ffeba
|
zeus/api/schemas/job.py
|
zeus/api/schemas/job.py
|
from marshmallow import Schema, fields
from .failurereason import FailureReasonSchema
from .fields import ResultField, StatusField
from .stats import StatsSchema
class JobSchema(Schema):
id = fields.UUID(dump_only=True)
number = fields.Integer(dump_only=True)
created_at = fields.DateTime(attribute="date_created", dump_only=True)
started_at = fields.DateTime(attribute="date_started", allow_none=True)
finished_at = fields.DateTime(attribute="date_finished", allow_none=True)
label = fields.Str()
status = StatusField()
result = ResultField()
stats = fields.Nested(StatsSchema(), dump_only=True)
# XXX(dcramer): these should be dump_only in normal cases, but not via hooks
provider = fields.Str()
external_id = fields.Str()
url = fields.Str(allow_none=True)
failures = fields.List(fields.Nested(FailureReasonSchema), dump_only=True)
allow_failure = fields.Bool(default=False)
|
from marshmallow import Schema, fields
from .failurereason import FailureReasonSchema
from .fields import ResultField, StatusField
from .stats import StatsSchema
class JobSchema(Schema):
id = fields.UUID(dump_only=True)
number = fields.Integer(dump_only=True)
created_at = fields.DateTime(attribute='date_created', dump_only=True)
started_at = fields.DateTime(attribute='date_started', allow_none=True)
finished_at = fields.DateTime(attribute='date_finished', allow_none=True)
updated_at = fields.DateTime(attribute='date_updated', dump_only=True)
label = fields.Str()
status = StatusField()
result = ResultField()
stats = fields.Nested(StatsSchema(), dump_only=True)
# XXX(dcramer): these should be dump_only in normal cases, but not via hooks
provider = fields.Str()
external_id = fields.Str()
url = fields.Str(allow_none=True)
failures = fields.List(fields.Nested(FailureReasonSchema), dump_only=True)
allow_failure = fields.Bool(default=False)
|
Add updated_at to Job schema
|
feat: Add updated_at to Job schema
|
Python
|
apache-2.0
|
getsentry/zeus,getsentry/zeus,getsentry/zeus,getsentry/zeus
|
from marshmallow import Schema, fields
from .failurereason import FailureReasonSchema
from .fields import ResultField, StatusField
from .stats import StatsSchema
class JobSchema(Schema):
id = fields.UUID(dump_only=True)
number = fields.Integer(dump_only=True)
created_at = fields.DateTime(attribute="date_created", dump_only=True)
started_at = fields.DateTime(attribute="date_started", allow_none=True)
finished_at = fields.DateTime(attribute="date_finished", allow_none=True)
label = fields.Str()
status = StatusField()
result = ResultField()
stats = fields.Nested(StatsSchema(), dump_only=True)
# XXX(dcramer): these should be dump_only in normal cases, but not via hooks
provider = fields.Str()
external_id = fields.Str()
url = fields.Str(allow_none=True)
failures = fields.List(fields.Nested(FailureReasonSchema), dump_only=True)
allow_failure = fields.Bool(default=False)
feat: Add updated_at to Job schema
|
from marshmallow import Schema, fields
from .failurereason import FailureReasonSchema
from .fields import ResultField, StatusField
from .stats import StatsSchema
class JobSchema(Schema):
id = fields.UUID(dump_only=True)
number = fields.Integer(dump_only=True)
created_at = fields.DateTime(attribute='date_created', dump_only=True)
started_at = fields.DateTime(attribute='date_started', allow_none=True)
finished_at = fields.DateTime(attribute='date_finished', allow_none=True)
updated_at = fields.DateTime(attribute='date_updated', dump_only=True)
label = fields.Str()
status = StatusField()
result = ResultField()
stats = fields.Nested(StatsSchema(), dump_only=True)
# XXX(dcramer): these should be dump_only in normal cases, but not via hooks
provider = fields.Str()
external_id = fields.Str()
url = fields.Str(allow_none=True)
failures = fields.List(fields.Nested(FailureReasonSchema), dump_only=True)
allow_failure = fields.Bool(default=False)
|
<commit_before>from marshmallow import Schema, fields
from .failurereason import FailureReasonSchema
from .fields import ResultField, StatusField
from .stats import StatsSchema
class JobSchema(Schema):
id = fields.UUID(dump_only=True)
number = fields.Integer(dump_only=True)
created_at = fields.DateTime(attribute="date_created", dump_only=True)
started_at = fields.DateTime(attribute="date_started", allow_none=True)
finished_at = fields.DateTime(attribute="date_finished", allow_none=True)
label = fields.Str()
status = StatusField()
result = ResultField()
stats = fields.Nested(StatsSchema(), dump_only=True)
# XXX(dcramer): these should be dump_only in normal cases, but not via hooks
provider = fields.Str()
external_id = fields.Str()
url = fields.Str(allow_none=True)
failures = fields.List(fields.Nested(FailureReasonSchema), dump_only=True)
allow_failure = fields.Bool(default=False)
<commit_msg>feat: Add updated_at to Job schema<commit_after>
|
from marshmallow import Schema, fields
from .failurereason import FailureReasonSchema
from .fields import ResultField, StatusField
from .stats import StatsSchema
class JobSchema(Schema):
id = fields.UUID(dump_only=True)
number = fields.Integer(dump_only=True)
created_at = fields.DateTime(attribute='date_created', dump_only=True)
started_at = fields.DateTime(attribute='date_started', allow_none=True)
finished_at = fields.DateTime(attribute='date_finished', allow_none=True)
updated_at = fields.DateTime(attribute='date_updated', dump_only=True)
label = fields.Str()
status = StatusField()
result = ResultField()
stats = fields.Nested(StatsSchema(), dump_only=True)
# XXX(dcramer): these should be dump_only in normal cases, but not via hooks
provider = fields.Str()
external_id = fields.Str()
url = fields.Str(allow_none=True)
failures = fields.List(fields.Nested(FailureReasonSchema), dump_only=True)
allow_failure = fields.Bool(default=False)
|
from marshmallow import Schema, fields
from .failurereason import FailureReasonSchema
from .fields import ResultField, StatusField
from .stats import StatsSchema
class JobSchema(Schema):
id = fields.UUID(dump_only=True)
number = fields.Integer(dump_only=True)
created_at = fields.DateTime(attribute="date_created", dump_only=True)
started_at = fields.DateTime(attribute="date_started", allow_none=True)
finished_at = fields.DateTime(attribute="date_finished", allow_none=True)
label = fields.Str()
status = StatusField()
result = ResultField()
stats = fields.Nested(StatsSchema(), dump_only=True)
# XXX(dcramer): these should be dump_only in normal cases, but not via hooks
provider = fields.Str()
external_id = fields.Str()
url = fields.Str(allow_none=True)
failures = fields.List(fields.Nested(FailureReasonSchema), dump_only=True)
allow_failure = fields.Bool(default=False)
feat: Add updated_at to Job schemafrom marshmallow import Schema, fields
from .failurereason import FailureReasonSchema
from .fields import ResultField, StatusField
from .stats import StatsSchema
class JobSchema(Schema):
id = fields.UUID(dump_only=True)
number = fields.Integer(dump_only=True)
created_at = fields.DateTime(attribute='date_created', dump_only=True)
started_at = fields.DateTime(attribute='date_started', allow_none=True)
finished_at = fields.DateTime(attribute='date_finished', allow_none=True)
updated_at = fields.DateTime(attribute='date_updated', dump_only=True)
label = fields.Str()
status = StatusField()
result = ResultField()
stats = fields.Nested(StatsSchema(), dump_only=True)
# XXX(dcramer): these should be dump_only in normal cases, but not via hooks
provider = fields.Str()
external_id = fields.Str()
url = fields.Str(allow_none=True)
failures = fields.List(fields.Nested(FailureReasonSchema), dump_only=True)
allow_failure = fields.Bool(default=False)
|
<commit_before>from marshmallow import Schema, fields
from .failurereason import FailureReasonSchema
from .fields import ResultField, StatusField
from .stats import StatsSchema
class JobSchema(Schema):
id = fields.UUID(dump_only=True)
number = fields.Integer(dump_only=True)
created_at = fields.DateTime(attribute="date_created", dump_only=True)
started_at = fields.DateTime(attribute="date_started", allow_none=True)
finished_at = fields.DateTime(attribute="date_finished", allow_none=True)
label = fields.Str()
status = StatusField()
result = ResultField()
stats = fields.Nested(StatsSchema(), dump_only=True)
# XXX(dcramer): these should be dump_only in normal cases, but not via hooks
provider = fields.Str()
external_id = fields.Str()
url = fields.Str(allow_none=True)
failures = fields.List(fields.Nested(FailureReasonSchema), dump_only=True)
allow_failure = fields.Bool(default=False)
<commit_msg>feat: Add updated_at to Job schema<commit_after>from marshmallow import Schema, fields
from .failurereason import FailureReasonSchema
from .fields import ResultField, StatusField
from .stats import StatsSchema
class JobSchema(Schema):
id = fields.UUID(dump_only=True)
number = fields.Integer(dump_only=True)
created_at = fields.DateTime(attribute='date_created', dump_only=True)
started_at = fields.DateTime(attribute='date_started', allow_none=True)
finished_at = fields.DateTime(attribute='date_finished', allow_none=True)
updated_at = fields.DateTime(attribute='date_updated', dump_only=True)
label = fields.Str()
status = StatusField()
result = ResultField()
stats = fields.Nested(StatsSchema(), dump_only=True)
# XXX(dcramer): these should be dump_only in normal cases, but not via hooks
provider = fields.Str()
external_id = fields.Str()
url = fields.Str(allow_none=True)
failures = fields.List(fields.Nested(FailureReasonSchema), dump_only=True)
allow_failure = fields.Bool(default=False)
|
ec9e401cd083c095c916055d04fc049a6dbc8ab1
|
ui/tcmui/core/management/commands/create_company.py
|
ui/tcmui/core/management/commands/create_company.py
|
from django.core.management.base import BaseCommand, CommandError
from ...api import admin
from ...models import Company, CompanyList
from ....users.models import Role, RoleList, PermissionList
DEFAULT_NEW_USER_ROLE_PERMISSIONS = set([
"PERMISSION_COMPANY_INFO_VIEW",
"PERMISSION_PRODUCT_VIEW",
"PERMISSION_TEST_CASE_VIEW",
"PERMISSION_TEST_CASE_EDIT",
"PERMISSION_TEST_CYCLE_VIEW",
"PERMISSION_TEST_RUN_VIEW",
"PERMISSION_TEST_RUN_EDIT",
"PERMISSION_TEST_RUN_ASSIGNMENT_EXECUTE",
"PERMISSION_ENVIRONMENT_VIEW",
])
class Command(BaseCommand):
help = ("Create a company resource and associated default new user role "
"with appropriate tester permissions.")
args = '"Company Name"'
def handle(self, *args, **options):
if not args:
raise CommandError("Company name is required.")
name = " ".join(args)
# @@@ U.S. is country ID 239, un-hardcode this
company = Company(name=name, country=239)
CompanyList.get(auth=admin).post(company)
role = Role(name="%s Tester" % name, company=company)
RoleList.get(auth=admin).post(role)
permissions = [p for p in PermissionList.get(auth=admin)
if p.permissionCode in DEFAULT_NEW_USER_ROLE_PERMISSIONS]
role.permissions = permissions
print "Created company id %s and role id %s." % (company.id, role.id)
|
from django.core.management.base import BaseCommand, CommandError
from ...api import admin
from ...models import Company, CompanyList
from ....users.models import Role, RoleList, PermissionList
DEFAULT_NEW_USER_ROLE_PERMISSIONS = set([
"PERMISSION_COMPANY_INFO_VIEW",
"PERMISSION_PRODUCT_VIEW",
"PERMISSION_TEST_CASE_VIEW",
"PERMISSION_TEST_CASE_EDIT",
"PERMISSION_TEST_CYCLE_VIEW",
"PERMISSION_TEST_RUN_VIEW",
"PERMISSION_TEST_RUN_ASSIGNMENT_EXECUTE",
"PERMISSION_ENVIRONMENT_VIEW",
])
class Command(BaseCommand):
help = ("Create a company resource and associated default new user role "
"with appropriate tester permissions.")
args = '"Company Name"'
def handle(self, *args, **options):
if not args:
raise CommandError("Company name is required.")
name = " ".join(args)
# @@@ U.S. is country ID 239, un-hardcode this
company = Company(name=name, country=239)
CompanyList.get(auth=admin).post(company)
role = Role(name="%s Tester" % name, company=company)
RoleList.get(auth=admin).post(role)
permissions = [p for p in PermissionList.get(auth=admin)
if p.permissionCode in DEFAULT_NEW_USER_ROLE_PERMISSIONS]
role.permissions = permissions
print "Created company id %s and role id %s." % (company.id, role.id)
|
Remove unneeded TEST_RUN_EDIT permission from default new user role.
|
Remove unneeded TEST_RUN_EDIT permission from default new user role.
|
Python
|
bsd-2-clause
|
mozilla/moztrap,mozilla/moztrap,mozilla/moztrap,shinglyu/moztrap,mozilla/moztrap,bobsilverberg/moztrap,mccarrmb/moztrap,bobsilverberg/moztrap,shinglyu/moztrap,shinglyu/moztrap,bobsilverberg/moztrap,shinglyu/moztrap,shinglyu/moztrap,bobsilverberg/moztrap,mccarrmb/moztrap,mozilla/moztrap,mccarrmb/moztrap,mccarrmb/moztrap,mccarrmb/moztrap
|
from django.core.management.base import BaseCommand, CommandError
from ...api import admin
from ...models import Company, CompanyList
from ....users.models import Role, RoleList, PermissionList
DEFAULT_NEW_USER_ROLE_PERMISSIONS = set([
"PERMISSION_COMPANY_INFO_VIEW",
"PERMISSION_PRODUCT_VIEW",
"PERMISSION_TEST_CASE_VIEW",
"PERMISSION_TEST_CASE_EDIT",
"PERMISSION_TEST_CYCLE_VIEW",
"PERMISSION_TEST_RUN_VIEW",
"PERMISSION_TEST_RUN_EDIT",
"PERMISSION_TEST_RUN_ASSIGNMENT_EXECUTE",
"PERMISSION_ENVIRONMENT_VIEW",
])
class Command(BaseCommand):
help = ("Create a company resource and associated default new user role "
"with appropriate tester permissions.")
args = '"Company Name"'
def handle(self, *args, **options):
if not args:
raise CommandError("Company name is required.")
name = " ".join(args)
# @@@ U.S. is country ID 239, un-hardcode this
company = Company(name=name, country=239)
CompanyList.get(auth=admin).post(company)
role = Role(name="%s Tester" % name, company=company)
RoleList.get(auth=admin).post(role)
permissions = [p for p in PermissionList.get(auth=admin)
if p.permissionCode in DEFAULT_NEW_USER_ROLE_PERMISSIONS]
role.permissions = permissions
print "Created company id %s and role id %s." % (company.id, role.id)
Remove unneeded TEST_RUN_EDIT permission from default new user role.
|
from django.core.management.base import BaseCommand, CommandError
from ...api import admin
from ...models import Company, CompanyList
from ....users.models import Role, RoleList, PermissionList
DEFAULT_NEW_USER_ROLE_PERMISSIONS = set([
"PERMISSION_COMPANY_INFO_VIEW",
"PERMISSION_PRODUCT_VIEW",
"PERMISSION_TEST_CASE_VIEW",
"PERMISSION_TEST_CASE_EDIT",
"PERMISSION_TEST_CYCLE_VIEW",
"PERMISSION_TEST_RUN_VIEW",
"PERMISSION_TEST_RUN_ASSIGNMENT_EXECUTE",
"PERMISSION_ENVIRONMENT_VIEW",
])
class Command(BaseCommand):
help = ("Create a company resource and associated default new user role "
"with appropriate tester permissions.")
args = '"Company Name"'
def handle(self, *args, **options):
if not args:
raise CommandError("Company name is required.")
name = " ".join(args)
# @@@ U.S. is country ID 239, un-hardcode this
company = Company(name=name, country=239)
CompanyList.get(auth=admin).post(company)
role = Role(name="%s Tester" % name, company=company)
RoleList.get(auth=admin).post(role)
permissions = [p for p in PermissionList.get(auth=admin)
if p.permissionCode in DEFAULT_NEW_USER_ROLE_PERMISSIONS]
role.permissions = permissions
print "Created company id %s and role id %s." % (company.id, role.id)
|
<commit_before>from django.core.management.base import BaseCommand, CommandError
from ...api import admin
from ...models import Company, CompanyList
from ....users.models import Role, RoleList, PermissionList
DEFAULT_NEW_USER_ROLE_PERMISSIONS = set([
"PERMISSION_COMPANY_INFO_VIEW",
"PERMISSION_PRODUCT_VIEW",
"PERMISSION_TEST_CASE_VIEW",
"PERMISSION_TEST_CASE_EDIT",
"PERMISSION_TEST_CYCLE_VIEW",
"PERMISSION_TEST_RUN_VIEW",
"PERMISSION_TEST_RUN_EDIT",
"PERMISSION_TEST_RUN_ASSIGNMENT_EXECUTE",
"PERMISSION_ENVIRONMENT_VIEW",
])
class Command(BaseCommand):
help = ("Create a company resource and associated default new user role "
"with appropriate tester permissions.")
args = '"Company Name"'
def handle(self, *args, **options):
if not args:
raise CommandError("Company name is required.")
name = " ".join(args)
# @@@ U.S. is country ID 239, un-hardcode this
company = Company(name=name, country=239)
CompanyList.get(auth=admin).post(company)
role = Role(name="%s Tester" % name, company=company)
RoleList.get(auth=admin).post(role)
permissions = [p for p in PermissionList.get(auth=admin)
if p.permissionCode in DEFAULT_NEW_USER_ROLE_PERMISSIONS]
role.permissions = permissions
print "Created company id %s and role id %s." % (company.id, role.id)
<commit_msg>Remove unneeded TEST_RUN_EDIT permission from default new user role.<commit_after>
|
from django.core.management.base import BaseCommand, CommandError
from ...api import admin
from ...models import Company, CompanyList
from ....users.models import Role, RoleList, PermissionList
DEFAULT_NEW_USER_ROLE_PERMISSIONS = set([
"PERMISSION_COMPANY_INFO_VIEW",
"PERMISSION_PRODUCT_VIEW",
"PERMISSION_TEST_CASE_VIEW",
"PERMISSION_TEST_CASE_EDIT",
"PERMISSION_TEST_CYCLE_VIEW",
"PERMISSION_TEST_RUN_VIEW",
"PERMISSION_TEST_RUN_ASSIGNMENT_EXECUTE",
"PERMISSION_ENVIRONMENT_VIEW",
])
class Command(BaseCommand):
help = ("Create a company resource and associated default new user role "
"with appropriate tester permissions.")
args = '"Company Name"'
def handle(self, *args, **options):
if not args:
raise CommandError("Company name is required.")
name = " ".join(args)
# @@@ U.S. is country ID 239, un-hardcode this
company = Company(name=name, country=239)
CompanyList.get(auth=admin).post(company)
role = Role(name="%s Tester" % name, company=company)
RoleList.get(auth=admin).post(role)
permissions = [p for p in PermissionList.get(auth=admin)
if p.permissionCode in DEFAULT_NEW_USER_ROLE_PERMISSIONS]
role.permissions = permissions
print "Created company id %s and role id %s." % (company.id, role.id)
|
from django.core.management.base import BaseCommand, CommandError
from ...api import admin
from ...models import Company, CompanyList
from ....users.models import Role, RoleList, PermissionList
DEFAULT_NEW_USER_ROLE_PERMISSIONS = set([
"PERMISSION_COMPANY_INFO_VIEW",
"PERMISSION_PRODUCT_VIEW",
"PERMISSION_TEST_CASE_VIEW",
"PERMISSION_TEST_CASE_EDIT",
"PERMISSION_TEST_CYCLE_VIEW",
"PERMISSION_TEST_RUN_VIEW",
"PERMISSION_TEST_RUN_EDIT",
"PERMISSION_TEST_RUN_ASSIGNMENT_EXECUTE",
"PERMISSION_ENVIRONMENT_VIEW",
])
class Command(BaseCommand):
help = ("Create a company resource and associated default new user role "
"with appropriate tester permissions.")
args = '"Company Name"'
def handle(self, *args, **options):
if not args:
raise CommandError("Company name is required.")
name = " ".join(args)
# @@@ U.S. is country ID 239, un-hardcode this
company = Company(name=name, country=239)
CompanyList.get(auth=admin).post(company)
role = Role(name="%s Tester" % name, company=company)
RoleList.get(auth=admin).post(role)
permissions = [p for p in PermissionList.get(auth=admin)
if p.permissionCode in DEFAULT_NEW_USER_ROLE_PERMISSIONS]
role.permissions = permissions
print "Created company id %s and role id %s." % (company.id, role.id)
Remove unneeded TEST_RUN_EDIT permission from default new user role.from django.core.management.base import BaseCommand, CommandError
from ...api import admin
from ...models import Company, CompanyList
from ....users.models import Role, RoleList, PermissionList
DEFAULT_NEW_USER_ROLE_PERMISSIONS = set([
"PERMISSION_COMPANY_INFO_VIEW",
"PERMISSION_PRODUCT_VIEW",
"PERMISSION_TEST_CASE_VIEW",
"PERMISSION_TEST_CASE_EDIT",
"PERMISSION_TEST_CYCLE_VIEW",
"PERMISSION_TEST_RUN_VIEW",
"PERMISSION_TEST_RUN_ASSIGNMENT_EXECUTE",
"PERMISSION_ENVIRONMENT_VIEW",
])
class Command(BaseCommand):
help = ("Create a company resource and associated default new user role "
"with appropriate tester permissions.")
args = '"Company Name"'
def handle(self, *args, **options):
if not args:
raise CommandError("Company name is required.")
name = " ".join(args)
# @@@ U.S. is country ID 239, un-hardcode this
company = Company(name=name, country=239)
CompanyList.get(auth=admin).post(company)
role = Role(name="%s Tester" % name, company=company)
RoleList.get(auth=admin).post(role)
permissions = [p for p in PermissionList.get(auth=admin)
if p.permissionCode in DEFAULT_NEW_USER_ROLE_PERMISSIONS]
role.permissions = permissions
print "Created company id %s and role id %s." % (company.id, role.id)
|
<commit_before>from django.core.management.base import BaseCommand, CommandError
from ...api import admin
from ...models import Company, CompanyList
from ....users.models import Role, RoleList, PermissionList
DEFAULT_NEW_USER_ROLE_PERMISSIONS = set([
"PERMISSION_COMPANY_INFO_VIEW",
"PERMISSION_PRODUCT_VIEW",
"PERMISSION_TEST_CASE_VIEW",
"PERMISSION_TEST_CASE_EDIT",
"PERMISSION_TEST_CYCLE_VIEW",
"PERMISSION_TEST_RUN_VIEW",
"PERMISSION_TEST_RUN_EDIT",
"PERMISSION_TEST_RUN_ASSIGNMENT_EXECUTE",
"PERMISSION_ENVIRONMENT_VIEW",
])
class Command(BaseCommand):
help = ("Create a company resource and associated default new user role "
"with appropriate tester permissions.")
args = '"Company Name"'
def handle(self, *args, **options):
if not args:
raise CommandError("Company name is required.")
name = " ".join(args)
# @@@ U.S. is country ID 239, un-hardcode this
company = Company(name=name, country=239)
CompanyList.get(auth=admin).post(company)
role = Role(name="%s Tester" % name, company=company)
RoleList.get(auth=admin).post(role)
permissions = [p for p in PermissionList.get(auth=admin)
if p.permissionCode in DEFAULT_NEW_USER_ROLE_PERMISSIONS]
role.permissions = permissions
print "Created company id %s and role id %s." % (company.id, role.id)
<commit_msg>Remove unneeded TEST_RUN_EDIT permission from default new user role.<commit_after>from django.core.management.base import BaseCommand, CommandError
from ...api import admin
from ...models import Company, CompanyList
from ....users.models import Role, RoleList, PermissionList
DEFAULT_NEW_USER_ROLE_PERMISSIONS = set([
"PERMISSION_COMPANY_INFO_VIEW",
"PERMISSION_PRODUCT_VIEW",
"PERMISSION_TEST_CASE_VIEW",
"PERMISSION_TEST_CASE_EDIT",
"PERMISSION_TEST_CYCLE_VIEW",
"PERMISSION_TEST_RUN_VIEW",
"PERMISSION_TEST_RUN_ASSIGNMENT_EXECUTE",
"PERMISSION_ENVIRONMENT_VIEW",
])
class Command(BaseCommand):
help = ("Create a company resource and associated default new user role "
"with appropriate tester permissions.")
args = '"Company Name"'
def handle(self, *args, **options):
if not args:
raise CommandError("Company name is required.")
name = " ".join(args)
# @@@ U.S. is country ID 239, un-hardcode this
company = Company(name=name, country=239)
CompanyList.get(auth=admin).post(company)
role = Role(name="%s Tester" % name, company=company)
RoleList.get(auth=admin).post(role)
permissions = [p for p in PermissionList.get(auth=admin)
if p.permissionCode in DEFAULT_NEW_USER_ROLE_PERMISSIONS]
role.permissions = permissions
print "Created company id %s and role id %s." % (company.id, role.id)
|
ad0e14561a4fe0cfa659bd99678b0d82de892dc5
|
helpers/text.py
|
helpers/text.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def slugify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def slugify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')\
.replace('!','').replace('?', '').replace('"', '')\
.replace('#', '').replace('%', '').replace('%', '')\
.replace('(', '').replace(')', '').replace('*', '')\
.replace("'", '').replace(',', '').replace('.', '')\
.replace('/', '').replace(':', '').replace(';', '')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
|
Add symbols to the list of symbols to replace in the slugify function
|
Add symbols to the list of symbols to replace in the slugify function
|
Python
|
mit
|
finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is,finnurtorfa/aflafrettir.is
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def slugify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
Add symbols to the list of symbols to replace in the slugify function
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def slugify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')\
.replace('!','').replace('?', '').replace('"', '')\
.replace('#', '').replace('%', '').replace('%', '')\
.replace('(', '').replace(')', '').replace('*', '')\
.replace("'", '').replace(',', '').replace('.', '')\
.replace('/', '').replace(':', '').replace(';', '')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def slugify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
<commit_msg>Add symbols to the list of symbols to replace in the slugify function<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def slugify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')\
.replace('!','').replace('?', '').replace('"', '')\
.replace('#', '').replace('%', '').replace('%', '')\
.replace('(', '').replace(')', '').replace('*', '')\
.replace("'", '').replace(',', '').replace('.', '')\
.replace('/', '').replace(':', '').replace(';', '')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def slugify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
Add symbols to the list of symbols to replace in the slugify function#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def slugify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')\
.replace('!','').replace('?', '').replace('"', '')\
.replace('#', '').replace('%', '').replace('%', '')\
.replace('(', '').replace(')', '').replace('*', '')\
.replace("'", '').replace(',', '').replace('.', '')\
.replace('/', '').replace(':', '').replace(';', '')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def slugify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
<commit_msg>Add symbols to the list of symbols to replace in the slugify function<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import unicodedata
from html.parser import HTMLParser
class HTMLStripper(HTMLParser):
def __init__(self):
super(HTMLStripper, self).__init__()
self.reset()
self.fed = []
def handle_starttag(self, tag, attrs):
pass
def handle_endtag(self, tag):
pass
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ''.join(self.fed)
def slugify(string):
string = string.replace('æ', 'ae').replace('ð','d').replace('þ','th')\
.replace('!','').replace('?', '').replace('"', '')\
.replace('#', '').replace('%', '').replace('%', '')\
.replace('(', '').replace(')', '').replace('*', '')\
.replace("'", '').replace(',', '').replace('.', '')\
.replace('/', '').replace(':', '').replace(';', '')
return unicodedata.normalize('NFKD', string)\
.lower().replace(' ', '-').encode('ascii', 'ignore')
def remove_html_tags(string):
s = HTMLStripper()
s.feed(string)
return s.get_data()
def truncate(string, length=250, suffix='...'):
if len(string) <= length:
return string
else:
return ' '.join(string[:length+1].split(' ')[0:-1]) + suffix
|
243cf3c18228b0c50b6b48a69c420922576ed723
|
grano/logic/plugins.py
|
grano/logic/plugins.py
|
import logging
from grano.model import Entity, Relation, Project, Schema
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
for schema in Schema.all():
_schema_changed(schema.project.slug, schema.name, 'delete')
_schema_changed(schema.project.slug, schema.name, 'create')
for i, entity in enumerate(Entity.all().filter_by(same_as=None)):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s entities", i)
_entity_changed(entity.id, 'delete')
_entity_changed(entity.id, 'create')
for i, relation in enumerate(Relation.all()):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s relation", i)
_relation_changed(relation.id, 'delete')
_relation_changed(relation.id, 'create')
|
import logging
from grano.model import Entity, Relation, Project
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
for schema in project.schemata:
_schema_changed(schema.project.slug, schema.name, 'delete')
_schema_changed(schema.project.slug, schema.name, 'create')
eq = Entity.all().filter_by(same_as=None)
eq = eq.filter_by(project=project)
for i, entity in enumerate(eq):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s entities", i)
_entity_changed(entity.id, 'delete')
_entity_changed(entity.id, 'create')
rq = Relation.all().filter_by(project=project)
for i, relation in enumerate(rq):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s relation", i)
_relation_changed(relation.id, 'delete')
_relation_changed(relation.id, 'create')
|
Rebuild by project, not by type.
|
Rebuild by project, not by type.
|
Python
|
mit
|
4bic-attic/grano,granoproject/grano,CodeForAfrica/grano,4bic/grano
|
import logging
from grano.model import Entity, Relation, Project, Schema
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
for schema in Schema.all():
_schema_changed(schema.project.slug, schema.name, 'delete')
_schema_changed(schema.project.slug, schema.name, 'create')
for i, entity in enumerate(Entity.all().filter_by(same_as=None)):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s entities", i)
_entity_changed(entity.id, 'delete')
_entity_changed(entity.id, 'create')
for i, relation in enumerate(Relation.all()):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s relation", i)
_relation_changed(relation.id, 'delete')
_relation_changed(relation.id, 'create')
Rebuild by project, not by type.
|
import logging
from grano.model import Entity, Relation, Project
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
for schema in project.schemata:
_schema_changed(schema.project.slug, schema.name, 'delete')
_schema_changed(schema.project.slug, schema.name, 'create')
eq = Entity.all().filter_by(same_as=None)
eq = eq.filter_by(project=project)
for i, entity in enumerate(eq):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s entities", i)
_entity_changed(entity.id, 'delete')
_entity_changed(entity.id, 'create')
rq = Relation.all().filter_by(project=project)
for i, relation in enumerate(rq):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s relation", i)
_relation_changed(relation.id, 'delete')
_relation_changed(relation.id, 'create')
|
<commit_before>import logging
from grano.model import Entity, Relation, Project, Schema
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
for schema in Schema.all():
_schema_changed(schema.project.slug, schema.name, 'delete')
_schema_changed(schema.project.slug, schema.name, 'create')
for i, entity in enumerate(Entity.all().filter_by(same_as=None)):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s entities", i)
_entity_changed(entity.id, 'delete')
_entity_changed(entity.id, 'create')
for i, relation in enumerate(Relation.all()):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s relation", i)
_relation_changed(relation.id, 'delete')
_relation_changed(relation.id, 'create')
<commit_msg>Rebuild by project, not by type. <commit_after>
|
import logging
from grano.model import Entity, Relation, Project
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
for schema in project.schemata:
_schema_changed(schema.project.slug, schema.name, 'delete')
_schema_changed(schema.project.slug, schema.name, 'create')
eq = Entity.all().filter_by(same_as=None)
eq = eq.filter_by(project=project)
for i, entity in enumerate(eq):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s entities", i)
_entity_changed(entity.id, 'delete')
_entity_changed(entity.id, 'create')
rq = Relation.all().filter_by(project=project)
for i, relation in enumerate(rq):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s relation", i)
_relation_changed(relation.id, 'delete')
_relation_changed(relation.id, 'create')
|
import logging
from grano.model import Entity, Relation, Project, Schema
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
for schema in Schema.all():
_schema_changed(schema.project.slug, schema.name, 'delete')
_schema_changed(schema.project.slug, schema.name, 'create')
for i, entity in enumerate(Entity.all().filter_by(same_as=None)):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s entities", i)
_entity_changed(entity.id, 'delete')
_entity_changed(entity.id, 'create')
for i, relation in enumerate(Relation.all()):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s relation", i)
_relation_changed(relation.id, 'delete')
_relation_changed(relation.id, 'create')
Rebuild by project, not by type. import logging
from grano.model import Entity, Relation, Project
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
for schema in project.schemata:
_schema_changed(schema.project.slug, schema.name, 'delete')
_schema_changed(schema.project.slug, schema.name, 'create')
eq = Entity.all().filter_by(same_as=None)
eq = eq.filter_by(project=project)
for i, entity in enumerate(eq):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s entities", i)
_entity_changed(entity.id, 'delete')
_entity_changed(entity.id, 'create')
rq = Relation.all().filter_by(project=project)
for i, relation in enumerate(rq):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s relation", i)
_relation_changed(relation.id, 'delete')
_relation_changed(relation.id, 'create')
|
<commit_before>import logging
from grano.model import Entity, Relation, Project, Schema
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
for schema in Schema.all():
_schema_changed(schema.project.slug, schema.name, 'delete')
_schema_changed(schema.project.slug, schema.name, 'create')
for i, entity in enumerate(Entity.all().filter_by(same_as=None)):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s entities", i)
_entity_changed(entity.id, 'delete')
_entity_changed(entity.id, 'create')
for i, relation in enumerate(Relation.all()):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s relation", i)
_relation_changed(relation.id, 'delete')
_relation_changed(relation.id, 'create')
<commit_msg>Rebuild by project, not by type. <commit_after>import logging
from grano.model import Entity, Relation, Project
from grano.logic.entities import _entity_changed
from grano.logic.relations import _relation_changed
from grano.logic.projects import _project_changed
from grano.logic.schemata import _schema_changed
log = logging.getLogger(__name__)
def rebuild():
""" Execute the change processing handlers for all entities and
relations currently loaded. This can be used as a housekeeping
function. """
for project in Project.all():
_project_changed(project.slug, 'delete')
_project_changed(project.slug, 'create')
for schema in project.schemata:
_schema_changed(schema.project.slug, schema.name, 'delete')
_schema_changed(schema.project.slug, schema.name, 'create')
eq = Entity.all().filter_by(same_as=None)
eq = eq.filter_by(project=project)
for i, entity in enumerate(eq):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s entities", i)
_entity_changed(entity.id, 'delete')
_entity_changed(entity.id, 'create')
rq = Relation.all().filter_by(project=project)
for i, relation in enumerate(rq):
if i > 0 and i % 1000 == 0:
log.info("Rebuilt: %s relation", i)
_relation_changed(relation.id, 'delete')
_relation_changed(relation.id, 'create')
|
486978630261bddf1bccdb7f1817c6aa26f78c57
|
docs/conf.py
|
docs/conf.py
|
# -*- coding: utf-8 -*-
# Standard Libs
import datetime
import os
import sys
# Add flask_hal to the Path
root = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'..',
)
)
sys.path.append(os.path.join(root, 'flask_hal'))
# First Party Libs
import flask_hal # noqa
# Project details
project = u'Flask-HAL'
copyright = u'{0}, SOON_ London Ltd'.format(datetime.datetime.utcnow().year)
version = '2015.10.8'
release = version
# Sphinx Config
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.todo',
'sphinx.ext.napoleon']
exclude_patterns = []
# Theme
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes', ]
html_static_path = ['_static', ]
html_theme = 'kr'
html_sidebars = {
'index': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html'],
'**': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html']
}
|
# -*- coding: utf-8 -*-
# Standard Libs
import datetime
import os
import sys
# Add flask_hal to the Path
root = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'..',
)
)
sys.path.append(os.path.join(root, 'flask_hal'))
# First Party Libs
import flask_hal # noqa
# Project details
project = u'Flask-HAL'
copyright = u'{0}, SOON_ London Ltd'.format(datetime.datetime.utcnow().year)
version = open('./../VERSION.txt').read().strip()
release = version
# Sphinx Config
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.todo',
'sphinx.ext.napoleon']
exclude_patterns = []
# Theme
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes', ]
html_static_path = ['_static', ]
html_theme = 'kr'
html_sidebars = {
'index': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html'],
'**': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html']
}
|
Load version from version file
|
Load version from version file
|
Python
|
unlicense
|
thisissoon/Flask-HAL,thisissoon/Flask-HAL
|
# -*- coding: utf-8 -*-
# Standard Libs
import datetime
import os
import sys
# Add flask_hal to the Path
root = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'..',
)
)
sys.path.append(os.path.join(root, 'flask_hal'))
# First Party Libs
import flask_hal # noqa
# Project details
project = u'Flask-HAL'
copyright = u'{0}, SOON_ London Ltd'.format(datetime.datetime.utcnow().year)
version = '2015.10.8'
release = version
# Sphinx Config
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.todo',
'sphinx.ext.napoleon']
exclude_patterns = []
# Theme
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes', ]
html_static_path = ['_static', ]
html_theme = 'kr'
html_sidebars = {
'index': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html'],
'**': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html']
}
Load version from version file
|
# -*- coding: utf-8 -*-
# Standard Libs
import datetime
import os
import sys
# Add flask_hal to the Path
root = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'..',
)
)
sys.path.append(os.path.join(root, 'flask_hal'))
# First Party Libs
import flask_hal # noqa
# Project details
project = u'Flask-HAL'
copyright = u'{0}, SOON_ London Ltd'.format(datetime.datetime.utcnow().year)
version = open('./../VERSION.txt').read().strip()
release = version
# Sphinx Config
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.todo',
'sphinx.ext.napoleon']
exclude_patterns = []
# Theme
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes', ]
html_static_path = ['_static', ]
html_theme = 'kr'
html_sidebars = {
'index': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html'],
'**': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html']
}
|
<commit_before># -*- coding: utf-8 -*-
# Standard Libs
import datetime
import os
import sys
# Add flask_hal to the Path
root = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'..',
)
)
sys.path.append(os.path.join(root, 'flask_hal'))
# First Party Libs
import flask_hal # noqa
# Project details
project = u'Flask-HAL'
copyright = u'{0}, SOON_ London Ltd'.format(datetime.datetime.utcnow().year)
version = '2015.10.8'
release = version
# Sphinx Config
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.todo',
'sphinx.ext.napoleon']
exclude_patterns = []
# Theme
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes', ]
html_static_path = ['_static', ]
html_theme = 'kr'
html_sidebars = {
'index': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html'],
'**': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html']
}
<commit_msg>Load version from version file<commit_after>
|
# -*- coding: utf-8 -*-
# Standard Libs
import datetime
import os
import sys
# Add flask_hal to the Path
root = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'..',
)
)
sys.path.append(os.path.join(root, 'flask_hal'))
# First Party Libs
import flask_hal # noqa
# Project details
project = u'Flask-HAL'
copyright = u'{0}, SOON_ London Ltd'.format(datetime.datetime.utcnow().year)
version = open('./../VERSION.txt').read().strip()
release = version
# Sphinx Config
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.todo',
'sphinx.ext.napoleon']
exclude_patterns = []
# Theme
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes', ]
html_static_path = ['_static', ]
html_theme = 'kr'
html_sidebars = {
'index': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html'],
'**': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html']
}
|
# -*- coding: utf-8 -*-
# Standard Libs
import datetime
import os
import sys
# Add flask_hal to the Path
root = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'..',
)
)
sys.path.append(os.path.join(root, 'flask_hal'))
# First Party Libs
import flask_hal # noqa
# Project details
project = u'Flask-HAL'
copyright = u'{0}, SOON_ London Ltd'.format(datetime.datetime.utcnow().year)
version = '2015.10.8'
release = version
# Sphinx Config
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.todo',
'sphinx.ext.napoleon']
exclude_patterns = []
# Theme
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes', ]
html_static_path = ['_static', ]
html_theme = 'kr'
html_sidebars = {
'index': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html'],
'**': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html']
}
Load version from version file# -*- coding: utf-8 -*-
# Standard Libs
import datetime
import os
import sys
# Add flask_hal to the Path
root = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'..',
)
)
sys.path.append(os.path.join(root, 'flask_hal'))
# First Party Libs
import flask_hal # noqa
# Project details
project = u'Flask-HAL'
copyright = u'{0}, SOON_ London Ltd'.format(datetime.datetime.utcnow().year)
version = open('./../VERSION.txt').read().strip()
release = version
# Sphinx Config
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.todo',
'sphinx.ext.napoleon']
exclude_patterns = []
# Theme
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes', ]
html_static_path = ['_static', ]
html_theme = 'kr'
html_sidebars = {
'index': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html'],
'**': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html']
}
|
<commit_before># -*- coding: utf-8 -*-
# Standard Libs
import datetime
import os
import sys
# Add flask_hal to the Path
root = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'..',
)
)
sys.path.append(os.path.join(root, 'flask_hal'))
# First Party Libs
import flask_hal # noqa
# Project details
project = u'Flask-HAL'
copyright = u'{0}, SOON_ London Ltd'.format(datetime.datetime.utcnow().year)
version = '2015.10.8'
release = version
# Sphinx Config
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.todo',
'sphinx.ext.napoleon']
exclude_patterns = []
# Theme
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes', ]
html_static_path = ['_static', ]
html_theme = 'kr'
html_sidebars = {
'index': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html'],
'**': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html']
}
<commit_msg>Load version from version file<commit_after># -*- coding: utf-8 -*-
# Standard Libs
import datetime
import os
import sys
# Add flask_hal to the Path
root = os.path.abspath(
os.path.join(
os.path.dirname(__file__),
'..',
)
)
sys.path.append(os.path.join(root, 'flask_hal'))
# First Party Libs
import flask_hal # noqa
# Project details
project = u'Flask-HAL'
copyright = u'{0}, SOON_ London Ltd'.format(datetime.datetime.utcnow().year)
version = open('./../VERSION.txt').read().strip()
release = version
# Sphinx Config
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.todo',
'sphinx.ext.napoleon']
exclude_patterns = []
# Theme
sys.path.append(os.path.abspath('_themes'))
html_theme_path = ['_themes', ]
html_static_path = ['_static', ]
html_theme = 'kr'
html_sidebars = {
'index': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html'],
'**': ['sidebar_intro.html', 'localtoc.html', 'relations.html',
'sourcelink.html', 'searchbox.html']
}
|
30a3764b84ec14762ebfb521820d1be9ec765952
|
htpcfrontend.py
|
htpcfrontend.py
|
from flask import Flask, render_template
from settings import *
import jsonrpclib
app = Flask(__name__)
@app.route('/')
def index():
xbmc = jsonrpclib.Server('http://%s:%s@%s:%s/jsonrpc' % (SERVER['username'], SERVER['password'], SERVER['hostname'], SERVER['port']))
episodes = xbmc.VideoLibrary.GetRecentlyAddedEpisodes()
recently_added_episodes = []
# tidy up filenames of recently added episodes
for episode in episodes['episodes'][:NUM_RECENT_EPISODES]:
filename = episode['file'].split('/').pop().replace('.', ' ')
recently_added_episodes.append(filename)
return render_template('index.html',
recently_added_episodes = recently_added_episodes,
applications = APPLICATIONS,
server = SERVER
)
if __name__ == '__main__':
app.run(debug=True)
|
from flask import Flask, render_template
from settings import *
import jsonrpclib
app = Flask(__name__)
@app.route('/')
def index():
xbmc = jsonrpclib.Server('http://%s:%s@%s:%s/jsonrpc' % (SERVER['username'], SERVER['password'], SERVER['hostname'], SERVER['port']))
episodes = xbmc.VideoLibrary.GetRecentlyAddedEpisodes()
recently_added_episodes = []
# tidy up filenames of recently added episodes
for episode in episodes['episodes'][:NUM_RECENT_EPISODES]:
filename = episode['file'].split('/').pop().replace('.', ' ')
recently_added_episodes.append(filename)
# currently playing
#currently_playing = xbmc.VideoPlaylist.GetItems(id=1)
#time = xbmc.VideoPlayer.GetTime()
return render_template('index.html',
recently_added_episodes = recently_added_episodes,
applications = APPLICATIONS,
server = SERVER
)
if __name__ == '__main__':
app.run(debug=True)
|
Revert "Revert "retrieve currently playing info (commented out)""
|
Revert "Revert "retrieve currently playing info (commented out)""
This reverts commit e14ee15116ee2137d528d298ca38e26e4f02f09f.
|
Python
|
mit
|
robweber/maraschino,awagnon/maraschino,mrkipling/maraschino,robweber/maraschino,runjmc/maraschino,awagnon/maraschino,awagnon/maraschino,mrkipling/maraschino,insertnamehere1/maraschino,mboeru/maraschino,insertnamehere1/maraschino,robweber/maraschino,insertnamehere1/maraschino,runjmc/maraschino,mboeru/maraschino,runjmc/maraschino,mboeru/maraschino,awagnon/maraschino,insertnamehere1/maraschino,mboeru/maraschino,mrkipling/maraschino,insertnamehere1/maraschino,gugahoi/maraschino,gugahoi/maraschino,mrkipling/maraschino,gugahoi/maraschino,runjmc/maraschino,mrkipling/maraschino,robweber/maraschino,mboeru/maraschino,gugahoi/maraschino,robweber/maraschino,awagnon/maraschino,runjmc/maraschino
|
from flask import Flask, render_template
from settings import *
import jsonrpclib
app = Flask(__name__)
@app.route('/')
def index():
xbmc = jsonrpclib.Server('http://%s:%s@%s:%s/jsonrpc' % (SERVER['username'], SERVER['password'], SERVER['hostname'], SERVER['port']))
episodes = xbmc.VideoLibrary.GetRecentlyAddedEpisodes()
recently_added_episodes = []
# tidy up filenames of recently added episodes
for episode in episodes['episodes'][:NUM_RECENT_EPISODES]:
filename = episode['file'].split('/').pop().replace('.', ' ')
recently_added_episodes.append(filename)
return render_template('index.html',
recently_added_episodes = recently_added_episodes,
applications = APPLICATIONS,
server = SERVER
)
if __name__ == '__main__':
app.run(debug=True)
Revert "Revert "retrieve currently playing info (commented out)""
This reverts commit e14ee15116ee2137d528d298ca38e26e4f02f09f.
|
from flask import Flask, render_template
from settings import *
import jsonrpclib
app = Flask(__name__)
@app.route('/')
def index():
xbmc = jsonrpclib.Server('http://%s:%s@%s:%s/jsonrpc' % (SERVER['username'], SERVER['password'], SERVER['hostname'], SERVER['port']))
episodes = xbmc.VideoLibrary.GetRecentlyAddedEpisodes()
recently_added_episodes = []
# tidy up filenames of recently added episodes
for episode in episodes['episodes'][:NUM_RECENT_EPISODES]:
filename = episode['file'].split('/').pop().replace('.', ' ')
recently_added_episodes.append(filename)
# currently playing
#currently_playing = xbmc.VideoPlaylist.GetItems(id=1)
#time = xbmc.VideoPlayer.GetTime()
return render_template('index.html',
recently_added_episodes = recently_added_episodes,
applications = APPLICATIONS,
server = SERVER
)
if __name__ == '__main__':
app.run(debug=True)
|
<commit_before>from flask import Flask, render_template
from settings import *
import jsonrpclib
app = Flask(__name__)
@app.route('/')
def index():
xbmc = jsonrpclib.Server('http://%s:%s@%s:%s/jsonrpc' % (SERVER['username'], SERVER['password'], SERVER['hostname'], SERVER['port']))
episodes = xbmc.VideoLibrary.GetRecentlyAddedEpisodes()
recently_added_episodes = []
# tidy up filenames of recently added episodes
for episode in episodes['episodes'][:NUM_RECENT_EPISODES]:
filename = episode['file'].split('/').pop().replace('.', ' ')
recently_added_episodes.append(filename)
return render_template('index.html',
recently_added_episodes = recently_added_episodes,
applications = APPLICATIONS,
server = SERVER
)
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Revert "Revert "retrieve currently playing info (commented out)""
This reverts commit e14ee15116ee2137d528d298ca38e26e4f02f09f.<commit_after>
|
from flask import Flask, render_template
from settings import *
import jsonrpclib
app = Flask(__name__)
@app.route('/')
def index():
xbmc = jsonrpclib.Server('http://%s:%s@%s:%s/jsonrpc' % (SERVER['username'], SERVER['password'], SERVER['hostname'], SERVER['port']))
episodes = xbmc.VideoLibrary.GetRecentlyAddedEpisodes()
recently_added_episodes = []
# tidy up filenames of recently added episodes
for episode in episodes['episodes'][:NUM_RECENT_EPISODES]:
filename = episode['file'].split('/').pop().replace('.', ' ')
recently_added_episodes.append(filename)
# currently playing
#currently_playing = xbmc.VideoPlaylist.GetItems(id=1)
#time = xbmc.VideoPlayer.GetTime()
return render_template('index.html',
recently_added_episodes = recently_added_episodes,
applications = APPLICATIONS,
server = SERVER
)
if __name__ == '__main__':
app.run(debug=True)
|
from flask import Flask, render_template
from settings import *
import jsonrpclib
app = Flask(__name__)
@app.route('/')
def index():
xbmc = jsonrpclib.Server('http://%s:%s@%s:%s/jsonrpc' % (SERVER['username'], SERVER['password'], SERVER['hostname'], SERVER['port']))
episodes = xbmc.VideoLibrary.GetRecentlyAddedEpisodes()
recently_added_episodes = []
# tidy up filenames of recently added episodes
for episode in episodes['episodes'][:NUM_RECENT_EPISODES]:
filename = episode['file'].split('/').pop().replace('.', ' ')
recently_added_episodes.append(filename)
return render_template('index.html',
recently_added_episodes = recently_added_episodes,
applications = APPLICATIONS,
server = SERVER
)
if __name__ == '__main__':
app.run(debug=True)
Revert "Revert "retrieve currently playing info (commented out)""
This reverts commit e14ee15116ee2137d528d298ca38e26e4f02f09f.from flask import Flask, render_template
from settings import *
import jsonrpclib
app = Flask(__name__)
@app.route('/')
def index():
xbmc = jsonrpclib.Server('http://%s:%s@%s:%s/jsonrpc' % (SERVER['username'], SERVER['password'], SERVER['hostname'], SERVER['port']))
episodes = xbmc.VideoLibrary.GetRecentlyAddedEpisodes()
recently_added_episodes = []
# tidy up filenames of recently added episodes
for episode in episodes['episodes'][:NUM_RECENT_EPISODES]:
filename = episode['file'].split('/').pop().replace('.', ' ')
recently_added_episodes.append(filename)
# currently playing
#currently_playing = xbmc.VideoPlaylist.GetItems(id=1)
#time = xbmc.VideoPlayer.GetTime()
return render_template('index.html',
recently_added_episodes = recently_added_episodes,
applications = APPLICATIONS,
server = SERVER
)
if __name__ == '__main__':
app.run(debug=True)
|
<commit_before>from flask import Flask, render_template
from settings import *
import jsonrpclib
app = Flask(__name__)
@app.route('/')
def index():
xbmc = jsonrpclib.Server('http://%s:%s@%s:%s/jsonrpc' % (SERVER['username'], SERVER['password'], SERVER['hostname'], SERVER['port']))
episodes = xbmc.VideoLibrary.GetRecentlyAddedEpisodes()
recently_added_episodes = []
# tidy up filenames of recently added episodes
for episode in episodes['episodes'][:NUM_RECENT_EPISODES]:
filename = episode['file'].split('/').pop().replace('.', ' ')
recently_added_episodes.append(filename)
return render_template('index.html',
recently_added_episodes = recently_added_episodes,
applications = APPLICATIONS,
server = SERVER
)
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Revert "Revert "retrieve currently playing info (commented out)""
This reverts commit e14ee15116ee2137d528d298ca38e26e4f02f09f.<commit_after>from flask import Flask, render_template
from settings import *
import jsonrpclib
app = Flask(__name__)
@app.route('/')
def index():
xbmc = jsonrpclib.Server('http://%s:%s@%s:%s/jsonrpc' % (SERVER['username'], SERVER['password'], SERVER['hostname'], SERVER['port']))
episodes = xbmc.VideoLibrary.GetRecentlyAddedEpisodes()
recently_added_episodes = []
# tidy up filenames of recently added episodes
for episode in episodes['episodes'][:NUM_RECENT_EPISODES]:
filename = episode['file'].split('/').pop().replace('.', ' ')
recently_added_episodes.append(filename)
# currently playing
#currently_playing = xbmc.VideoPlaylist.GetItems(id=1)
#time = xbmc.VideoPlayer.GetTime()
return render_template('index.html',
recently_added_episodes = recently_added_episodes,
applications = APPLICATIONS,
server = SERVER
)
if __name__ == '__main__':
app.run(debug=True)
|
27423205b06b031572b675ee29a487f4b900fe56
|
cura_app.py
|
cura_app.py
|
#!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(type, value, traceback)
sys.excepthook = exceptHook
# Workaround for a race condition on certain systems where there
# is a race condition between Arcus and PyQt. Importing Arcus
# first seems to prevent Sip from going into a state where it
# tries to create PyQt objects on a non-main thread.
import Arcus
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
import os
dirpath = os.path.expanduser("~/AppData/Local/cura/")
os.makedirs(dirpath, exist_ok = True)
sys.stdout = open(os.path.join(dirpath, "stdout.log"), "w")
sys.stderr = open(os.path.join(dirpath, "stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
|
#!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(hook_type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(hook_type, value, traceback)
sys.excepthook = exceptHook
# Workaround for a race condition on certain systems where there
# is a race condition between Arcus and PyQt. Importing Arcus
# first seems to prevent Sip from going into a state where it
# tries to create PyQt objects on a non-main thread.
import Arcus
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
import os
dirpath = os.path.expanduser("~/AppData/Local/cura/")
os.makedirs(dirpath, exist_ok = True)
sys.stdout = open(os.path.join(dirpath, "stdout.log"), "w")
sys.stderr = open(os.path.join(dirpath, "stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
|
Rename type into hook_type "type" itself if a built-in function. Using this name could be unsave.
|
Rename type into hook_type
"type" itself if a built-in function. Using this name could be unsave.
|
Python
|
agpl-3.0
|
hmflash/Cura,Curahelper/Cura,ynotstartups/Wanhao,senttech/Cura,totalretribution/Cura,fieldOfView/Cura,hmflash/Cura,Curahelper/Cura,ynotstartups/Wanhao,totalretribution/Cura,senttech/Cura,fieldOfView/Cura
|
#!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(type, value, traceback)
sys.excepthook = exceptHook
# Workaround for a race condition on certain systems where there
# is a race condition between Arcus and PyQt. Importing Arcus
# first seems to prevent Sip from going into a state where it
# tries to create PyQt objects on a non-main thread.
import Arcus
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
import os
dirpath = os.path.expanduser("~/AppData/Local/cura/")
os.makedirs(dirpath, exist_ok = True)
sys.stdout = open(os.path.join(dirpath, "stdout.log"), "w")
sys.stderr = open(os.path.join(dirpath, "stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
Rename type into hook_type
"type" itself if a built-in function. Using this name could be unsave.
|
#!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(hook_type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(hook_type, value, traceback)
sys.excepthook = exceptHook
# Workaround for a race condition on certain systems where there
# is a race condition between Arcus and PyQt. Importing Arcus
# first seems to prevent Sip from going into a state where it
# tries to create PyQt objects on a non-main thread.
import Arcus
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
import os
dirpath = os.path.expanduser("~/AppData/Local/cura/")
os.makedirs(dirpath, exist_ok = True)
sys.stdout = open(os.path.join(dirpath, "stdout.log"), "w")
sys.stderr = open(os.path.join(dirpath, "stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
|
<commit_before>#!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(type, value, traceback)
sys.excepthook = exceptHook
# Workaround for a race condition on certain systems where there
# is a race condition between Arcus and PyQt. Importing Arcus
# first seems to prevent Sip from going into a state where it
# tries to create PyQt objects on a non-main thread.
import Arcus
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
import os
dirpath = os.path.expanduser("~/AppData/Local/cura/")
os.makedirs(dirpath, exist_ok = True)
sys.stdout = open(os.path.join(dirpath, "stdout.log"), "w")
sys.stderr = open(os.path.join(dirpath, "stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
<commit_msg>Rename type into hook_type
"type" itself if a built-in function. Using this name could be unsave.<commit_after>
|
#!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(hook_type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(hook_type, value, traceback)
sys.excepthook = exceptHook
# Workaround for a race condition on certain systems where there
# is a race condition between Arcus and PyQt. Importing Arcus
# first seems to prevent Sip from going into a state where it
# tries to create PyQt objects on a non-main thread.
import Arcus
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
import os
dirpath = os.path.expanduser("~/AppData/Local/cura/")
os.makedirs(dirpath, exist_ok = True)
sys.stdout = open(os.path.join(dirpath, "stdout.log"), "w")
sys.stderr = open(os.path.join(dirpath, "stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
|
#!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(type, value, traceback)
sys.excepthook = exceptHook
# Workaround for a race condition on certain systems where there
# is a race condition between Arcus and PyQt. Importing Arcus
# first seems to prevent Sip from going into a state where it
# tries to create PyQt objects on a non-main thread.
import Arcus
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
import os
dirpath = os.path.expanduser("~/AppData/Local/cura/")
os.makedirs(dirpath, exist_ok = True)
sys.stdout = open(os.path.join(dirpath, "stdout.log"), "w")
sys.stderr = open(os.path.join(dirpath, "stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
Rename type into hook_type
"type" itself if a built-in function. Using this name could be unsave.#!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(hook_type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(hook_type, value, traceback)
sys.excepthook = exceptHook
# Workaround for a race condition on certain systems where there
# is a race condition between Arcus and PyQt. Importing Arcus
# first seems to prevent Sip from going into a state where it
# tries to create PyQt objects on a non-main thread.
import Arcus
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
import os
dirpath = os.path.expanduser("~/AppData/Local/cura/")
os.makedirs(dirpath, exist_ok = True)
sys.stdout = open(os.path.join(dirpath, "stdout.log"), "w")
sys.stderr = open(os.path.join(dirpath, "stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
|
<commit_before>#!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(type, value, traceback)
sys.excepthook = exceptHook
# Workaround for a race condition on certain systems where there
# is a race condition between Arcus and PyQt. Importing Arcus
# first seems to prevent Sip from going into a state where it
# tries to create PyQt objects on a non-main thread.
import Arcus
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
import os
dirpath = os.path.expanduser("~/AppData/Local/cura/")
os.makedirs(dirpath, exist_ok = True)
sys.stdout = open(os.path.join(dirpath, "stdout.log"), "w")
sys.stderr = open(os.path.join(dirpath, "stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
<commit_msg>Rename type into hook_type
"type" itself if a built-in function. Using this name could be unsave.<commit_after>#!/usr/bin/env python3
# Copyright (c) 2015 Ultimaker B.V.
# Cura is released under the terms of the AGPLv3 or higher.
import sys
def exceptHook(hook_type, value, traceback):
import cura.CrashHandler
cura.CrashHandler.show(hook_type, value, traceback)
sys.excepthook = exceptHook
# Workaround for a race condition on certain systems where there
# is a race condition between Arcus and PyQt. Importing Arcus
# first seems to prevent Sip from going into a state where it
# tries to create PyQt objects on a non-main thread.
import Arcus
import cura.CuraApplication
if sys.platform == "win32" and hasattr(sys, "frozen"):
import os
dirpath = os.path.expanduser("~/AppData/Local/cura/")
os.makedirs(dirpath, exist_ok = True)
sys.stdout = open(os.path.join(dirpath, "stdout.log"), "w")
sys.stderr = open(os.path.join(dirpath, "stderr.log"), "w")
app = cura.CuraApplication.CuraApplication.getInstance()
app.run()
|
03cbb450fa54cf048bea5c4e3c9c0e44ea74131c
|
search/index_settings.py
|
search/index_settings.py
|
INDEX_SETTINGS = {
"settings": {
"analysis": {
"analyzer": {
"default": {
"type": "custom",
"tokenizer": "standard",
"filter": [
"standard",
"lowercase",
"stop",
"kstem",
"ngram"
]
}
},
"filter": {
"ngram": {
"type": "ngram",
"min_gram": 3,
"max_gram": 15
}
}
}
}
}
|
INDEX_SETTINGS = {
"settings": {
"analysis": {
"analyzer": {
"default": {
"type": "custom",
"tokenizer": "standard",
"filter": [
"standard",
"lowercase",
"stop",
"kstem",
"ngram"
]
}
},
"filter": {
"ngram": {
"type": "ngram",
"min_gram": 4,
"max_gram": 15
}
}
}
}
}
|
Increase ngram size to four
|
Increase ngram size to four
|
Python
|
mit
|
MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api,MasterFacilityList/mfl_api
|
INDEX_SETTINGS = {
"settings": {
"analysis": {
"analyzer": {
"default": {
"type": "custom",
"tokenizer": "standard",
"filter": [
"standard",
"lowercase",
"stop",
"kstem",
"ngram"
]
}
},
"filter": {
"ngram": {
"type": "ngram",
"min_gram": 3,
"max_gram": 15
}
}
}
}
}
Increase ngram size to four
|
INDEX_SETTINGS = {
"settings": {
"analysis": {
"analyzer": {
"default": {
"type": "custom",
"tokenizer": "standard",
"filter": [
"standard",
"lowercase",
"stop",
"kstem",
"ngram"
]
}
},
"filter": {
"ngram": {
"type": "ngram",
"min_gram": 4,
"max_gram": 15
}
}
}
}
}
|
<commit_before>INDEX_SETTINGS = {
"settings": {
"analysis": {
"analyzer": {
"default": {
"type": "custom",
"tokenizer": "standard",
"filter": [
"standard",
"lowercase",
"stop",
"kstem",
"ngram"
]
}
},
"filter": {
"ngram": {
"type": "ngram",
"min_gram": 3,
"max_gram": 15
}
}
}
}
}
<commit_msg>Increase ngram size to four<commit_after>
|
INDEX_SETTINGS = {
"settings": {
"analysis": {
"analyzer": {
"default": {
"type": "custom",
"tokenizer": "standard",
"filter": [
"standard",
"lowercase",
"stop",
"kstem",
"ngram"
]
}
},
"filter": {
"ngram": {
"type": "ngram",
"min_gram": 4,
"max_gram": 15
}
}
}
}
}
|
INDEX_SETTINGS = {
"settings": {
"analysis": {
"analyzer": {
"default": {
"type": "custom",
"tokenizer": "standard",
"filter": [
"standard",
"lowercase",
"stop",
"kstem",
"ngram"
]
}
},
"filter": {
"ngram": {
"type": "ngram",
"min_gram": 3,
"max_gram": 15
}
}
}
}
}
Increase ngram size to fourINDEX_SETTINGS = {
"settings": {
"analysis": {
"analyzer": {
"default": {
"type": "custom",
"tokenizer": "standard",
"filter": [
"standard",
"lowercase",
"stop",
"kstem",
"ngram"
]
}
},
"filter": {
"ngram": {
"type": "ngram",
"min_gram": 4,
"max_gram": 15
}
}
}
}
}
|
<commit_before>INDEX_SETTINGS = {
"settings": {
"analysis": {
"analyzer": {
"default": {
"type": "custom",
"tokenizer": "standard",
"filter": [
"standard",
"lowercase",
"stop",
"kstem",
"ngram"
]
}
},
"filter": {
"ngram": {
"type": "ngram",
"min_gram": 3,
"max_gram": 15
}
}
}
}
}
<commit_msg>Increase ngram size to four<commit_after>INDEX_SETTINGS = {
"settings": {
"analysis": {
"analyzer": {
"default": {
"type": "custom",
"tokenizer": "standard",
"filter": [
"standard",
"lowercase",
"stop",
"kstem",
"ngram"
]
}
},
"filter": {
"ngram": {
"type": "ngram",
"min_gram": 4,
"max_gram": 15
}
}
}
}
}
|
4d1d2e12d8882084ce8deb80c3b3e162cc71b20b
|
osmaxx-py/osmaxx/excerptexport/forms/new_excerpt_form.py
|
osmaxx-py/osmaxx/excerptexport/forms/new_excerpt_form.py
|
from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'))
|
from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'), required=False)
|
Allow private excerpts (form validation)
|
Bugfix: Allow private excerpts (form validation)
|
Python
|
mit
|
geometalab/osmaxx,geometalab/drf-utm-zone-info,geometalab/osmaxx-frontend,geometalab/osmaxx,geometalab/osmaxx,geometalab/osmaxx,geometalab/osmaxx-frontend,geometalab/drf-utm-zone-info,geometalab/osmaxx-frontend,geometalab/osmaxx-frontend
|
from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'))
Bugfix: Allow private excerpts (form validation)
|
from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'), required=False)
|
<commit_before>from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'))
<commit_msg>Bugfix: Allow private excerpts (form validation)<commit_after>
|
from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'), required=False)
|
from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'))
Bugfix: Allow private excerpts (form validation)from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'), required=False)
|
<commit_before>from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'))
<commit_msg>Bugfix: Allow private excerpts (form validation)<commit_after>from django import forms
from django.utils.translation import gettext_lazy
class NewExcerptForm(forms.Form):
new_excerpt_name = forms.CharField(label=gettext_lazy('Excerpt name'))
new_excerpt_bounding_box_north = forms.CharField(label=gettext_lazy('North'))
new_excerpt_bounding_box_west = forms.CharField(label=gettext_lazy('West'))
new_excerpt_bounding_box_east = forms.CharField(label=gettext_lazy('East'))
new_excerpt_bounding_box_south = forms.CharField(label=gettext_lazy('South'))
new_excerpt_is_public = forms.BooleanField(label=gettext_lazy('Public'), required=False)
|
276c3904843417649fe71a81a30ce9b8f29d3d29
|
ipywidgets/_version.py
|
ipywidgets/_version.py
|
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
version_info = (6, 0, 0)
__version__ = '.'.join(map(str, version_info))
__frontend_version__ = '~2.1.4'
|
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
version_info = (6, 0, 0)
__version__ = '.'.join(map(str, version_info))
__frontend_version__ = '~2.2.0'
|
Update ipywidgets to expect jupyter-js-widgets 2.2.0
|
Update ipywidgets to expect jupyter-js-widgets 2.2.0
|
Python
|
bsd-3-clause
|
ipython/ipywidgets,jupyter-widgets/ipywidgets,ipython/ipywidgets,SylvainCorlay/ipywidgets,ipython/ipywidgets,cornhundred/ipywidgets,ipython/ipywidgets,cornhundred/ipywidgets,cornhundred/ipywidgets,ipython/ipywidgets,jupyter-widgets/ipywidgets,jupyter-widgets/ipywidgets,SylvainCorlay/ipywidgets,SylvainCorlay/ipywidgets,cornhundred/ipywidgets,jupyter-widgets/ipywidgets,cornhundred/ipywidgets,SylvainCorlay/ipywidgets
|
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
version_info = (6, 0, 0)
__version__ = '.'.join(map(str, version_info))
__frontend_version__ = '~2.1.4'
Update ipywidgets to expect jupyter-js-widgets 2.2.0
|
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
version_info = (6, 0, 0)
__version__ = '.'.join(map(str, version_info))
__frontend_version__ = '~2.2.0'
|
<commit_before># Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
version_info = (6, 0, 0)
__version__ = '.'.join(map(str, version_info))
__frontend_version__ = '~2.1.4'
<commit_msg>Update ipywidgets to expect jupyter-js-widgets 2.2.0<commit_after>
|
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
version_info = (6, 0, 0)
__version__ = '.'.join(map(str, version_info))
__frontend_version__ = '~2.2.0'
|
# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
version_info = (6, 0, 0)
__version__ = '.'.join(map(str, version_info))
__frontend_version__ = '~2.1.4'
Update ipywidgets to expect jupyter-js-widgets 2.2.0# Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
version_info = (6, 0, 0)
__version__ = '.'.join(map(str, version_info))
__frontend_version__ = '~2.2.0'
|
<commit_before># Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
version_info = (6, 0, 0)
__version__ = '.'.join(map(str, version_info))
__frontend_version__ = '~2.1.4'
<commit_msg>Update ipywidgets to expect jupyter-js-widgets 2.2.0<commit_after># Copyright (c) Jupyter Development Team.
# Distributed under the terms of the Modified BSD License.
version_info = (6, 0, 0)
__version__ = '.'.join(map(str, version_info))
__frontend_version__ = '~2.2.0'
|
b4ef31e6fa195480f8de1e516606aa32fecfdd15
|
future/builtins/backports/newround.py
|
future/builtins/backports/newround.py
|
"""
``python-future``: pure Python implementation of Python 3 round().
"""
def newround(number, ndigits=None):
"""
See Python 3 documentation: uses Banker's Rounding.
Delegates to the __round__ method if for some reason this exists.
If not, rounds a number to a given precision in decimal digits (default
0 digits). This returns an int when called with one argument,
otherwise the same type as the number. ndigits may be negative.
See the test_round method in future/tests/test_builtins.py for
examples.
"""
return_int = False
if ndigits is None:
return_int = True
ndigits = 0
if hasattr(number, '__round__'):
return number.__round__(ndigits)
# Use the decimal module for simplicity of implementation (and
# hopefully correctness).
from decimal import Decimal, ROUND_HALF_EVEN
if ndigits < 0:
raise NotImplementedError('negative ndigits not supported yet')
exponent = Decimal('10') ** (-ndigits)
d = Decimal.from_float(number).quantize(exponent,
rounding=ROUND_HALF_EVEN)
if return_int:
return int(d)
else:
return float(d)
__all__ = ['newround']
|
"""
``python-future``: pure Python implementation of Python 3 round().
"""
from future.utils import PYPY
def newround(number, ndigits=None):
"""
See Python 3 documentation: uses Banker's Rounding.
Delegates to the __round__ method if for some reason this exists.
If not, rounds a number to a given precision in decimal digits (default
0 digits). This returns an int when called with one argument,
otherwise the same type as the number. ndigits may be negative.
See the test_round method in future/tests/test_builtins.py for
examples.
"""
return_int = False
if ndigits is None:
return_int = True
ndigits = 0
if hasattr(number, '__round__'):
return number.__round__(ndigits)
# Use the decimal module for simplicity of implementation (and
# hopefully correctness).
from decimal import Decimal, ROUND_HALF_EVEN
if ndigits < 0:
raise NotImplementedError('negative ndigits not supported yet')
exponent = Decimal('10') ** (-ndigits)
if PYPY:
# Work around issue #24: round() breaks on PyPy with NumPy's types
if 'numpy' in repr(type(number)):
number = float(number)
d = Decimal.from_float(number).quantize(exponent,
rounding=ROUND_HALF_EVEN)
if return_int:
return int(d)
else:
return float(d)
__all__ = ['newround']
|
Add workaround for PyPy round() bug with NumPy data types
|
Add workaround for PyPy round() bug with NumPy data types
|
Python
|
mit
|
krischer/python-future,QuLogic/python-future,PythonCharmers/python-future,michaelpacer/python-future,QuLogic/python-future,krischer/python-future,michaelpacer/python-future,PythonCharmers/python-future
|
"""
``python-future``: pure Python implementation of Python 3 round().
"""
def newround(number, ndigits=None):
"""
See Python 3 documentation: uses Banker's Rounding.
Delegates to the __round__ method if for some reason this exists.
If not, rounds a number to a given precision in decimal digits (default
0 digits). This returns an int when called with one argument,
otherwise the same type as the number. ndigits may be negative.
See the test_round method in future/tests/test_builtins.py for
examples.
"""
return_int = False
if ndigits is None:
return_int = True
ndigits = 0
if hasattr(number, '__round__'):
return number.__round__(ndigits)
# Use the decimal module for simplicity of implementation (and
# hopefully correctness).
from decimal import Decimal, ROUND_HALF_EVEN
if ndigits < 0:
raise NotImplementedError('negative ndigits not supported yet')
exponent = Decimal('10') ** (-ndigits)
d = Decimal.from_float(number).quantize(exponent,
rounding=ROUND_HALF_EVEN)
if return_int:
return int(d)
else:
return float(d)
__all__ = ['newround']
Add workaround for PyPy round() bug with NumPy data types
|
"""
``python-future``: pure Python implementation of Python 3 round().
"""
from future.utils import PYPY
def newround(number, ndigits=None):
"""
See Python 3 documentation: uses Banker's Rounding.
Delegates to the __round__ method if for some reason this exists.
If not, rounds a number to a given precision in decimal digits (default
0 digits). This returns an int when called with one argument,
otherwise the same type as the number. ndigits may be negative.
See the test_round method in future/tests/test_builtins.py for
examples.
"""
return_int = False
if ndigits is None:
return_int = True
ndigits = 0
if hasattr(number, '__round__'):
return number.__round__(ndigits)
# Use the decimal module for simplicity of implementation (and
# hopefully correctness).
from decimal import Decimal, ROUND_HALF_EVEN
if ndigits < 0:
raise NotImplementedError('negative ndigits not supported yet')
exponent = Decimal('10') ** (-ndigits)
if PYPY:
# Work around issue #24: round() breaks on PyPy with NumPy's types
if 'numpy' in repr(type(number)):
number = float(number)
d = Decimal.from_float(number).quantize(exponent,
rounding=ROUND_HALF_EVEN)
if return_int:
return int(d)
else:
return float(d)
__all__ = ['newround']
|
<commit_before>"""
``python-future``: pure Python implementation of Python 3 round().
"""
def newround(number, ndigits=None):
"""
See Python 3 documentation: uses Banker's Rounding.
Delegates to the __round__ method if for some reason this exists.
If not, rounds a number to a given precision in decimal digits (default
0 digits). This returns an int when called with one argument,
otherwise the same type as the number. ndigits may be negative.
See the test_round method in future/tests/test_builtins.py for
examples.
"""
return_int = False
if ndigits is None:
return_int = True
ndigits = 0
if hasattr(number, '__round__'):
return number.__round__(ndigits)
# Use the decimal module for simplicity of implementation (and
# hopefully correctness).
from decimal import Decimal, ROUND_HALF_EVEN
if ndigits < 0:
raise NotImplementedError('negative ndigits not supported yet')
exponent = Decimal('10') ** (-ndigits)
d = Decimal.from_float(number).quantize(exponent,
rounding=ROUND_HALF_EVEN)
if return_int:
return int(d)
else:
return float(d)
__all__ = ['newround']
<commit_msg>Add workaround for PyPy round() bug with NumPy data types<commit_after>
|
"""
``python-future``: pure Python implementation of Python 3 round().
"""
from future.utils import PYPY
def newround(number, ndigits=None):
"""
See Python 3 documentation: uses Banker's Rounding.
Delegates to the __round__ method if for some reason this exists.
If not, rounds a number to a given precision in decimal digits (default
0 digits). This returns an int when called with one argument,
otherwise the same type as the number. ndigits may be negative.
See the test_round method in future/tests/test_builtins.py for
examples.
"""
return_int = False
if ndigits is None:
return_int = True
ndigits = 0
if hasattr(number, '__round__'):
return number.__round__(ndigits)
# Use the decimal module for simplicity of implementation (and
# hopefully correctness).
from decimal import Decimal, ROUND_HALF_EVEN
if ndigits < 0:
raise NotImplementedError('negative ndigits not supported yet')
exponent = Decimal('10') ** (-ndigits)
if PYPY:
# Work around issue #24: round() breaks on PyPy with NumPy's types
if 'numpy' in repr(type(number)):
number = float(number)
d = Decimal.from_float(number).quantize(exponent,
rounding=ROUND_HALF_EVEN)
if return_int:
return int(d)
else:
return float(d)
__all__ = ['newround']
|
"""
``python-future``: pure Python implementation of Python 3 round().
"""
def newround(number, ndigits=None):
"""
See Python 3 documentation: uses Banker's Rounding.
Delegates to the __round__ method if for some reason this exists.
If not, rounds a number to a given precision in decimal digits (default
0 digits). This returns an int when called with one argument,
otherwise the same type as the number. ndigits may be negative.
See the test_round method in future/tests/test_builtins.py for
examples.
"""
return_int = False
if ndigits is None:
return_int = True
ndigits = 0
if hasattr(number, '__round__'):
return number.__round__(ndigits)
# Use the decimal module for simplicity of implementation (and
# hopefully correctness).
from decimal import Decimal, ROUND_HALF_EVEN
if ndigits < 0:
raise NotImplementedError('negative ndigits not supported yet')
exponent = Decimal('10') ** (-ndigits)
d = Decimal.from_float(number).quantize(exponent,
rounding=ROUND_HALF_EVEN)
if return_int:
return int(d)
else:
return float(d)
__all__ = ['newround']
Add workaround for PyPy round() bug with NumPy data types"""
``python-future``: pure Python implementation of Python 3 round().
"""
from future.utils import PYPY
def newround(number, ndigits=None):
"""
See Python 3 documentation: uses Banker's Rounding.
Delegates to the __round__ method if for some reason this exists.
If not, rounds a number to a given precision in decimal digits (default
0 digits). This returns an int when called with one argument,
otherwise the same type as the number. ndigits may be negative.
See the test_round method in future/tests/test_builtins.py for
examples.
"""
return_int = False
if ndigits is None:
return_int = True
ndigits = 0
if hasattr(number, '__round__'):
return number.__round__(ndigits)
# Use the decimal module for simplicity of implementation (and
# hopefully correctness).
from decimal import Decimal, ROUND_HALF_EVEN
if ndigits < 0:
raise NotImplementedError('negative ndigits not supported yet')
exponent = Decimal('10') ** (-ndigits)
if PYPY:
# Work around issue #24: round() breaks on PyPy with NumPy's types
if 'numpy' in repr(type(number)):
number = float(number)
d = Decimal.from_float(number).quantize(exponent,
rounding=ROUND_HALF_EVEN)
if return_int:
return int(d)
else:
return float(d)
__all__ = ['newround']
|
<commit_before>"""
``python-future``: pure Python implementation of Python 3 round().
"""
def newround(number, ndigits=None):
"""
See Python 3 documentation: uses Banker's Rounding.
Delegates to the __round__ method if for some reason this exists.
If not, rounds a number to a given precision in decimal digits (default
0 digits). This returns an int when called with one argument,
otherwise the same type as the number. ndigits may be negative.
See the test_round method in future/tests/test_builtins.py for
examples.
"""
return_int = False
if ndigits is None:
return_int = True
ndigits = 0
if hasattr(number, '__round__'):
return number.__round__(ndigits)
# Use the decimal module for simplicity of implementation (and
# hopefully correctness).
from decimal import Decimal, ROUND_HALF_EVEN
if ndigits < 0:
raise NotImplementedError('negative ndigits not supported yet')
exponent = Decimal('10') ** (-ndigits)
d = Decimal.from_float(number).quantize(exponent,
rounding=ROUND_HALF_EVEN)
if return_int:
return int(d)
else:
return float(d)
__all__ = ['newround']
<commit_msg>Add workaround for PyPy round() bug with NumPy data types<commit_after>"""
``python-future``: pure Python implementation of Python 3 round().
"""
from future.utils import PYPY
def newround(number, ndigits=None):
"""
See Python 3 documentation: uses Banker's Rounding.
Delegates to the __round__ method if for some reason this exists.
If not, rounds a number to a given precision in decimal digits (default
0 digits). This returns an int when called with one argument,
otherwise the same type as the number. ndigits may be negative.
See the test_round method in future/tests/test_builtins.py for
examples.
"""
return_int = False
if ndigits is None:
return_int = True
ndigits = 0
if hasattr(number, '__round__'):
return number.__round__(ndigits)
# Use the decimal module for simplicity of implementation (and
# hopefully correctness).
from decimal import Decimal, ROUND_HALF_EVEN
if ndigits < 0:
raise NotImplementedError('negative ndigits not supported yet')
exponent = Decimal('10') ** (-ndigits)
if PYPY:
# Work around issue #24: round() breaks on PyPy with NumPy's types
if 'numpy' in repr(type(number)):
number = float(number)
d = Decimal.from_float(number).quantize(exponent,
rounding=ROUND_HALF_EVEN)
if return_int:
return int(d)
else:
return float(d)
__all__ = ['newround']
|
82e0987375ff99e0d94068c1ec6078d3920249f2
|
nc/data/__init__.py
|
nc/data/__init__.py
|
DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_04_13T13.38.34.887.zip" # noqa
|
DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_06_22T09.52.20.780.zip" # noqa
|
Add this fix from the master branch
|
Add this fix from the master branch
(It was in the import_nc management command file, but newer
code places it here.)
|
Python
|
mit
|
OpenDataPolicingNC/Traffic-Stops,OpenDataPolicingNC/Traffic-Stops,OpenDataPolicingNC/Traffic-Stops,OpenDataPolicingNC/Traffic-Stops
|
DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_04_13T13.38.34.887.zip" # noqa
Add this fix from the master branch
(It was in the import_nc management command file, but newer
code places it here.)
|
DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_06_22T09.52.20.780.zip" # noqa
|
<commit_before>DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_04_13T13.38.34.887.zip" # noqa
<commit_msg>Add this fix from the master branch
(It was in the import_nc management command file, but newer
code places it here.)<commit_after>
|
DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_06_22T09.52.20.780.zip" # noqa
|
DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_04_13T13.38.34.887.zip" # noqa
Add this fix from the master branch
(It was in the import_nc management command file, but newer
code places it here.)DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_06_22T09.52.20.780.zip" # noqa
|
<commit_before>DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_04_13T13.38.34.887.zip" # noqa
<commit_msg>Add this fix from the master branch
(It was in the import_nc management command file, but newer
code places it here.)<commit_after>DEFAULT_URL = "https://s3-us-west-2.amazonaws.com/openpolicingdata/TS_2016_06_22T09.52.20.780.zip" # noqa
|
7b90d75f260e76baf8b57840d96bb36b62e2c56c
|
__init__.py
|
__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
#bikeshed.update.update(path=dataPath)
#bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
print subprocess.check_output("git add .", shell=True)
print subprocess.check_output("git push", shell=True)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
bikeshed.update.update(path=dataPath)
bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
subprocess.check_call("git add data", shell=True)
subprocess.check_call("git commit -m 'update data'", shell=True)
subprocess.check_call("git push", shell=True)
if __name__ == "__main__":
main()
|
Update script with proper git-ing.
|
Update script with proper git-ing.
|
Python
|
mit
|
tabatkins/bikeshed-data
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
#bikeshed.update.update(path=dataPath)
#bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
print subprocess.check_output("git add .", shell=True)
print subprocess.check_output("git push", shell=True)
if __name__ == "__main__":
main()Update script with proper git-ing.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
bikeshed.update.update(path=dataPath)
bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
subprocess.check_call("git add data", shell=True)
subprocess.check_call("git commit -m 'update data'", shell=True)
subprocess.check_call("git push", shell=True)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
#bikeshed.update.update(path=dataPath)
#bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
print subprocess.check_output("git add .", shell=True)
print subprocess.check_output("git push", shell=True)
if __name__ == "__main__":
main()<commit_msg>Update script with proper git-ing.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
bikeshed.update.update(path=dataPath)
bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
subprocess.check_call("git add data", shell=True)
subprocess.check_call("git commit -m 'update data'", shell=True)
subprocess.check_call("git push", shell=True)
if __name__ == "__main__":
main()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
#bikeshed.update.update(path=dataPath)
#bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
print subprocess.check_output("git add .", shell=True)
print subprocess.check_output("git push", shell=True)
if __name__ == "__main__":
main()Update script with proper git-ing.#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
bikeshed.update.update(path=dataPath)
bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
subprocess.check_call("git add data", shell=True)
subprocess.check_call("git commit -m 'update data'", shell=True)
subprocess.check_call("git push", shell=True)
if __name__ == "__main__":
main()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
#bikeshed.update.update(path=dataPath)
#bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
print subprocess.check_output("git add .", shell=True)
print subprocess.check_output("git push", shell=True)
if __name__ == "__main__":
main()<commit_msg>Update script with proper git-ing.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import division, unicode_literals
import bikeshed
import os
import subprocess
def main():
scriptPath = os.path.dirname(os.path.realpath(__file__))
dataPath = os.path.join(scriptPath, "data")
bikeshed.config.quiet = False
bikeshed.update.update(path=dataPath)
bikeshed.update.createManifest(path=dataPath)
os.chdir(scriptPath)
subprocess.check_call("git add data", shell=True)
subprocess.check_call("git commit -m 'update data'", shell=True)
subprocess.check_call("git push", shell=True)
if __name__ == "__main__":
main()
|
6d2f6df3543bc287e59151e823b7a62c245c27b0
|
.gitlab/linters/check-cpp.py
|
.gitlab/linters/check-cpp.py
|
#!/usr/bin/env python3
# A linter to warn for ASSERT macros which are separated from their argument
# list by a space, which Clang's CPP barfs on
from linter import run_linters, RegexpLinter
linters = [
RegexpLinter(r'ASSERT\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'ASSERT2\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'#ifdef\s+',
message='`#if defined(x)` is preferred to `#ifdef x`'),
RegexpLinter(r'#if\s+defined\s+',
message='`#if defined(x)` is preferred to `#if defined x`'),
RegexpLinter(r'#ifndef\s+',
message='`#if !defined(x)` is preferred to `#ifndef x`'),
]
if __name__ == '__main__':
run_linters(linters)
|
#!/usr/bin/env python3
# A linter to warn for ASSERT macros which are separated from their argument
# list by a space, which Clang's CPP barfs on
from linter import run_linters, RegexpLinter
linters = [
RegexpLinter(r'WARN\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'ASSERT\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'ASSERT2\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'#ifdef\s+',
message='`#if defined(x)` is preferred to `#ifdef x`'),
RegexpLinter(r'#if\s+defined\s+',
message='`#if defined(x)` is preferred to `#if defined x`'),
RegexpLinter(r'#ifndef\s+',
message='`#if !defined(x)` is preferred to `#ifndef x`'),
]
if __name__ == '__main__':
run_linters(linters)
|
Check for WARN macro with space separating it from its paren
|
linters: Check for WARN macro with space separating it from its paren
|
Python
|
bsd-3-clause
|
sdiehl/ghc,sdiehl/ghc,sdiehl/ghc,sdiehl/ghc,sdiehl/ghc,sdiehl/ghc,sdiehl/ghc
|
#!/usr/bin/env python3
# A linter to warn for ASSERT macros which are separated from their argument
# list by a space, which Clang's CPP barfs on
from linter import run_linters, RegexpLinter
linters = [
RegexpLinter(r'ASSERT\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'ASSERT2\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'#ifdef\s+',
message='`#if defined(x)` is preferred to `#ifdef x`'),
RegexpLinter(r'#if\s+defined\s+',
message='`#if defined(x)` is preferred to `#if defined x`'),
RegexpLinter(r'#ifndef\s+',
message='`#if !defined(x)` is preferred to `#ifndef x`'),
]
if __name__ == '__main__':
run_linters(linters)
linters: Check for WARN macro with space separating it from its paren
|
#!/usr/bin/env python3
# A linter to warn for ASSERT macros which are separated from their argument
# list by a space, which Clang's CPP barfs on
from linter import run_linters, RegexpLinter
linters = [
RegexpLinter(r'WARN\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'ASSERT\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'ASSERT2\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'#ifdef\s+',
message='`#if defined(x)` is preferred to `#ifdef x`'),
RegexpLinter(r'#if\s+defined\s+',
message='`#if defined(x)` is preferred to `#if defined x`'),
RegexpLinter(r'#ifndef\s+',
message='`#if !defined(x)` is preferred to `#ifndef x`'),
]
if __name__ == '__main__':
run_linters(linters)
|
<commit_before>#!/usr/bin/env python3
# A linter to warn for ASSERT macros which are separated from their argument
# list by a space, which Clang's CPP barfs on
from linter import run_linters, RegexpLinter
linters = [
RegexpLinter(r'ASSERT\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'ASSERT2\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'#ifdef\s+',
message='`#if defined(x)` is preferred to `#ifdef x`'),
RegexpLinter(r'#if\s+defined\s+',
message='`#if defined(x)` is preferred to `#if defined x`'),
RegexpLinter(r'#ifndef\s+',
message='`#if !defined(x)` is preferred to `#ifndef x`'),
]
if __name__ == '__main__':
run_linters(linters)
<commit_msg>linters: Check for WARN macro with space separating it from its paren<commit_after>
|
#!/usr/bin/env python3
# A linter to warn for ASSERT macros which are separated from their argument
# list by a space, which Clang's CPP barfs on
from linter import run_linters, RegexpLinter
linters = [
RegexpLinter(r'WARN\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'ASSERT\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'ASSERT2\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'#ifdef\s+',
message='`#if defined(x)` is preferred to `#ifdef x`'),
RegexpLinter(r'#if\s+defined\s+',
message='`#if defined(x)` is preferred to `#if defined x`'),
RegexpLinter(r'#ifndef\s+',
message='`#if !defined(x)` is preferred to `#ifndef x`'),
]
if __name__ == '__main__':
run_linters(linters)
|
#!/usr/bin/env python3
# A linter to warn for ASSERT macros which are separated from their argument
# list by a space, which Clang's CPP barfs on
from linter import run_linters, RegexpLinter
linters = [
RegexpLinter(r'ASSERT\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'ASSERT2\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'#ifdef\s+',
message='`#if defined(x)` is preferred to `#ifdef x`'),
RegexpLinter(r'#if\s+defined\s+',
message='`#if defined(x)` is preferred to `#if defined x`'),
RegexpLinter(r'#ifndef\s+',
message='`#if !defined(x)` is preferred to `#ifndef x`'),
]
if __name__ == '__main__':
run_linters(linters)
linters: Check for WARN macro with space separating it from its paren#!/usr/bin/env python3
# A linter to warn for ASSERT macros which are separated from their argument
# list by a space, which Clang's CPP barfs on
from linter import run_linters, RegexpLinter
linters = [
RegexpLinter(r'WARN\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'ASSERT\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'ASSERT2\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'#ifdef\s+',
message='`#if defined(x)` is preferred to `#ifdef x`'),
RegexpLinter(r'#if\s+defined\s+',
message='`#if defined(x)` is preferred to `#if defined x`'),
RegexpLinter(r'#ifndef\s+',
message='`#if !defined(x)` is preferred to `#ifndef x`'),
]
if __name__ == '__main__':
run_linters(linters)
|
<commit_before>#!/usr/bin/env python3
# A linter to warn for ASSERT macros which are separated from their argument
# list by a space, which Clang's CPP barfs on
from linter import run_linters, RegexpLinter
linters = [
RegexpLinter(r'ASSERT\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'ASSERT2\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'#ifdef\s+',
message='`#if defined(x)` is preferred to `#ifdef x`'),
RegexpLinter(r'#if\s+defined\s+',
message='`#if defined(x)` is preferred to `#if defined x`'),
RegexpLinter(r'#ifndef\s+',
message='`#if !defined(x)` is preferred to `#ifndef x`'),
]
if __name__ == '__main__':
run_linters(linters)
<commit_msg>linters: Check for WARN macro with space separating it from its paren<commit_after>#!/usr/bin/env python3
# A linter to warn for ASSERT macros which are separated from their argument
# list by a space, which Clang's CPP barfs on
from linter import run_linters, RegexpLinter
linters = [
RegexpLinter(r'WARN\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'ASSERT\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'ASSERT2\s+\(',
message='CPP macros should not have a space between the macro name and their argument list'),
RegexpLinter(r'#ifdef\s+',
message='`#if defined(x)` is preferred to `#ifdef x`'),
RegexpLinter(r'#if\s+defined\s+',
message='`#if defined(x)` is preferred to `#if defined x`'),
RegexpLinter(r'#ifndef\s+',
message='`#if !defined(x)` is preferred to `#ifndef x`'),
]
if __name__ == '__main__':
run_linters(linters)
|
ed09a3ded286cc4d5623c17e65b2d40ef55ccee7
|
valohai_yaml/parsing.py
|
valohai_yaml/parsing.py
|
from typing import IO, Union
from valohai_yaml.objs import Config
from .utils import read_yaml
def parse(yaml: Union[dict, list, bytes, str, IO], validate: bool = True) -> Config:
"""
Parse the given YAML data into a `Config` object, optionally validating it first.
:param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list)
:param validate: Whether to validate the data before attempting to parse it.
:return: Config object
"""
data = read_yaml(yaml)
if validate: # pragma: no branch
from .validation import validate as do_validate
do_validate(data, raise_exc=True)
return Config.parse(data)
|
from typing import IO, Union
from valohai_yaml.objs import Config
from .utils import read_yaml
def parse(yaml: Union[dict, list, bytes, str, IO], validate: bool = True) -> Config:
"""
Parse the given YAML data into a `Config` object, optionally validating it first.
:param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list)
:param validate: Whether to validate the data before attempting to parse it.
:return: Config object
"""
data = read_yaml(yaml)
if data is None: # empty file
return Config()
if validate: # pragma: no branch
from .validation import validate as do_validate
do_validate(data, raise_exc=True)
return Config.parse(data)
|
Handle empty YAML files in parse()
|
Handle empty YAML files in parse()
Refs valohai/valohai-cli#170
|
Python
|
mit
|
valohai/valohai-yaml
|
from typing import IO, Union
from valohai_yaml.objs import Config
from .utils import read_yaml
def parse(yaml: Union[dict, list, bytes, str, IO], validate: bool = True) -> Config:
"""
Parse the given YAML data into a `Config` object, optionally validating it first.
:param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list)
:param validate: Whether to validate the data before attempting to parse it.
:return: Config object
"""
data = read_yaml(yaml)
if validate: # pragma: no branch
from .validation import validate as do_validate
do_validate(data, raise_exc=True)
return Config.parse(data)
Handle empty YAML files in parse()
Refs valohai/valohai-cli#170
|
from typing import IO, Union
from valohai_yaml.objs import Config
from .utils import read_yaml
def parse(yaml: Union[dict, list, bytes, str, IO], validate: bool = True) -> Config:
"""
Parse the given YAML data into a `Config` object, optionally validating it first.
:param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list)
:param validate: Whether to validate the data before attempting to parse it.
:return: Config object
"""
data = read_yaml(yaml)
if data is None: # empty file
return Config()
if validate: # pragma: no branch
from .validation import validate as do_validate
do_validate(data, raise_exc=True)
return Config.parse(data)
|
<commit_before>from typing import IO, Union
from valohai_yaml.objs import Config
from .utils import read_yaml
def parse(yaml: Union[dict, list, bytes, str, IO], validate: bool = True) -> Config:
"""
Parse the given YAML data into a `Config` object, optionally validating it first.
:param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list)
:param validate: Whether to validate the data before attempting to parse it.
:return: Config object
"""
data = read_yaml(yaml)
if validate: # pragma: no branch
from .validation import validate as do_validate
do_validate(data, raise_exc=True)
return Config.parse(data)
<commit_msg>Handle empty YAML files in parse()
Refs valohai/valohai-cli#170<commit_after>
|
from typing import IO, Union
from valohai_yaml.objs import Config
from .utils import read_yaml
def parse(yaml: Union[dict, list, bytes, str, IO], validate: bool = True) -> Config:
"""
Parse the given YAML data into a `Config` object, optionally validating it first.
:param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list)
:param validate: Whether to validate the data before attempting to parse it.
:return: Config object
"""
data = read_yaml(yaml)
if data is None: # empty file
return Config()
if validate: # pragma: no branch
from .validation import validate as do_validate
do_validate(data, raise_exc=True)
return Config.parse(data)
|
from typing import IO, Union
from valohai_yaml.objs import Config
from .utils import read_yaml
def parse(yaml: Union[dict, list, bytes, str, IO], validate: bool = True) -> Config:
"""
Parse the given YAML data into a `Config` object, optionally validating it first.
:param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list)
:param validate: Whether to validate the data before attempting to parse it.
:return: Config object
"""
data = read_yaml(yaml)
if validate: # pragma: no branch
from .validation import validate as do_validate
do_validate(data, raise_exc=True)
return Config.parse(data)
Handle empty YAML files in parse()
Refs valohai/valohai-cli#170from typing import IO, Union
from valohai_yaml.objs import Config
from .utils import read_yaml
def parse(yaml: Union[dict, list, bytes, str, IO], validate: bool = True) -> Config:
"""
Parse the given YAML data into a `Config` object, optionally validating it first.
:param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list)
:param validate: Whether to validate the data before attempting to parse it.
:return: Config object
"""
data = read_yaml(yaml)
if data is None: # empty file
return Config()
if validate: # pragma: no branch
from .validation import validate as do_validate
do_validate(data, raise_exc=True)
return Config.parse(data)
|
<commit_before>from typing import IO, Union
from valohai_yaml.objs import Config
from .utils import read_yaml
def parse(yaml: Union[dict, list, bytes, str, IO], validate: bool = True) -> Config:
"""
Parse the given YAML data into a `Config` object, optionally validating it first.
:param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list)
:param validate: Whether to validate the data before attempting to parse it.
:return: Config object
"""
data = read_yaml(yaml)
if validate: # pragma: no branch
from .validation import validate as do_validate
do_validate(data, raise_exc=True)
return Config.parse(data)
<commit_msg>Handle empty YAML files in parse()
Refs valohai/valohai-cli#170<commit_after>from typing import IO, Union
from valohai_yaml.objs import Config
from .utils import read_yaml
def parse(yaml: Union[dict, list, bytes, str, IO], validate: bool = True) -> Config:
"""
Parse the given YAML data into a `Config` object, optionally validating it first.
:param yaml: YAML data (either a string, a stream, or pre-parsed Python dict/list)
:param validate: Whether to validate the data before attempting to parse it.
:return: Config object
"""
data = read_yaml(yaml)
if data is None: # empty file
return Config()
if validate: # pragma: no branch
from .validation import validate as do_validate
do_validate(data, raise_exc=True)
return Config.parse(data)
|
dc6f82bce52419c7c2153a33be15f3d811161d1d
|
flask_app.py
|
flask_app.py
|
from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
def nbis_list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def nbis_api_list_restaurants():
return jsonify({'restaurants': main.list_restaurants()})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def nbis_api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
|
from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
def list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def list_restaurants():
return jsonify({'restaurants': [entry['identifier'] for entry in main.list_restaurants()]})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
|
Return a list of identifiers instead of almost all info
|
Return a list of identifiers instead of almost all info
|
Python
|
bsd-3-clause
|
talavis/kimenu
|
from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
def nbis_list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def nbis_api_list_restaurants():
return jsonify({'restaurants': main.list_restaurants()})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def nbis_api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
Return a list of identifiers instead of almost all info
|
from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
def list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def list_restaurants():
return jsonify({'restaurants': [entry['identifier'] for entry in main.list_restaurants()]})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
|
<commit_before>from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
def nbis_list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def nbis_api_list_restaurants():
return jsonify({'restaurants': main.list_restaurants()})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def nbis_api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
<commit_msg>Return a list of identifiers instead of almost all info<commit_after>
|
from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
def list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def list_restaurants():
return jsonify({'restaurants': [entry['identifier'] for entry in main.list_restaurants()]})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
|
from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
def nbis_list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def nbis_api_list_restaurants():
return jsonify({'restaurants': main.list_restaurants()})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def nbis_api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
Return a list of identifiers instead of almost all infofrom flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
def list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def list_restaurants():
return jsonify({'restaurants': [entry['identifier'] for entry in main.list_restaurants()]})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
|
<commit_before>from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
def nbis_list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def nbis_api_list_restaurants():
return jsonify({'restaurants': main.list_restaurants()})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def nbis_api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
<commit_msg>Return a list of identifiers instead of almost all info<commit_after>from flask import Flask, abort, jsonify
from flask_caching import Cache
from flask_cors import CORS
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
cors = CORS(app, resources={r"/*": {"origins": "*"}})
@app.route('/api/')
@cache.cached(timeout=3600)
def list_entities():
return jsonify({'entities': ['restaurant']})
@app.route('/api/restaurant/')
@cache.cached(timeout=3600)
def list_restaurants():
return jsonify({'restaurants': [entry['identifier'] for entry in main.list_restaurants()]})
@app.route('/api/restaurant/<name>/')
@cache.cached(timeout=3600)
def get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(status=404)
data['menu'] = [{'dish': entry} for entry in data['menu']]
return jsonify({'restaurant': data})
|
fef17579a8a084987ea5e413ad512662ab24aa56
|
ntm/similarities.py
|
ntm/similarities.py
|
import theano
import theano.tensor as T
import numpy as np
def cosine_similarity(x, y, eps=1e-9):
y = y.dimshuffle(1, 0)
z = T.dot(x, y)
z /= x.norm(2) * y.norm(2, axis=0).dimshuffle('x', 0) + eps
return z
|
import theano
import theano.tensor as T
import numpy as np
def cosine_similarity(x, y, eps=1e-9):
y = y.dimshuffle(1, 0)
z = T.dot(x, y)
z /= T.sqrt(T.sum(x * x) * T.sum(y * y, axis=0).dimshuffle('x', 0) + 1e-6)
return z
|
Replace T.norm in the cosine similarity
|
Replace T.norm in the cosine similarity
|
Python
|
mit
|
snipsco/ntm-lasagne
|
import theano
import theano.tensor as T
import numpy as np
def cosine_similarity(x, y, eps=1e-9):
y = y.dimshuffle(1, 0)
z = T.dot(x, y)
z /= x.norm(2) * y.norm(2, axis=0).dimshuffle('x', 0) + eps
return zReplace T.norm in the cosine similarity
|
import theano
import theano.tensor as T
import numpy as np
def cosine_similarity(x, y, eps=1e-9):
y = y.dimshuffle(1, 0)
z = T.dot(x, y)
z /= T.sqrt(T.sum(x * x) * T.sum(y * y, axis=0).dimshuffle('x', 0) + 1e-6)
return z
|
<commit_before>import theano
import theano.tensor as T
import numpy as np
def cosine_similarity(x, y, eps=1e-9):
y = y.dimshuffle(1, 0)
z = T.dot(x, y)
z /= x.norm(2) * y.norm(2, axis=0).dimshuffle('x', 0) + eps
return z<commit_msg>Replace T.norm in the cosine similarity<commit_after>
|
import theano
import theano.tensor as T
import numpy as np
def cosine_similarity(x, y, eps=1e-9):
y = y.dimshuffle(1, 0)
z = T.dot(x, y)
z /= T.sqrt(T.sum(x * x) * T.sum(y * y, axis=0).dimshuffle('x', 0) + 1e-6)
return z
|
import theano
import theano.tensor as T
import numpy as np
def cosine_similarity(x, y, eps=1e-9):
y = y.dimshuffle(1, 0)
z = T.dot(x, y)
z /= x.norm(2) * y.norm(2, axis=0).dimshuffle('x', 0) + eps
return zReplace T.norm in the cosine similarityimport theano
import theano.tensor as T
import numpy as np
def cosine_similarity(x, y, eps=1e-9):
y = y.dimshuffle(1, 0)
z = T.dot(x, y)
z /= T.sqrt(T.sum(x * x) * T.sum(y * y, axis=0).dimshuffle('x', 0) + 1e-6)
return z
|
<commit_before>import theano
import theano.tensor as T
import numpy as np
def cosine_similarity(x, y, eps=1e-9):
y = y.dimshuffle(1, 0)
z = T.dot(x, y)
z /= x.norm(2) * y.norm(2, axis=0).dimshuffle('x', 0) + eps
return z<commit_msg>Replace T.norm in the cosine similarity<commit_after>import theano
import theano.tensor as T
import numpy as np
def cosine_similarity(x, y, eps=1e-9):
y = y.dimshuffle(1, 0)
z = T.dot(x, y)
z /= T.sqrt(T.sum(x * x) * T.sum(y * y, axis=0).dimshuffle('x', 0) + 1e-6)
return z
|
980f9abc5b95c9f9ed089b13e9538173bcac0952
|
app/user_administration/urls.py
|
app/user_administration/urls.py
|
from django.conf.urls import url
from .views import HomePage
urlpatterns = [
url(r'^', HomePage.as_view(), name='HomePage'),
]
|
from django.conf.urls import url
from .views import HomePage, LoginPage
from django.contrib.auth.views import logout_then_login
urlpatterns = [
url(r'^$', HomePage.as_view(), name='HomePage'),
url(r'^login/', LoginPage.as_view(), name='LoginPage'),
url(r'^logout/', logout_then_login, name='LoginPage'),
]
|
Add Login & Logout Routing
|
Add Login & Logout Routing
|
Python
|
mit
|
rexhepberlajolli/RHChallenge,rexhepberlajolli/RHChallenge
|
from django.conf.urls import url
from .views import HomePage
urlpatterns = [
url(r'^', HomePage.as_view(), name='HomePage'),
]
Add Login & Logout Routing
|
from django.conf.urls import url
from .views import HomePage, LoginPage
from django.contrib.auth.views import logout_then_login
urlpatterns = [
url(r'^$', HomePage.as_view(), name='HomePage'),
url(r'^login/', LoginPage.as_view(), name='LoginPage'),
url(r'^logout/', logout_then_login, name='LoginPage'),
]
|
<commit_before>from django.conf.urls import url
from .views import HomePage
urlpatterns = [
url(r'^', HomePage.as_view(), name='HomePage'),
]
<commit_msg>Add Login & Logout Routing<commit_after>
|
from django.conf.urls import url
from .views import HomePage, LoginPage
from django.contrib.auth.views import logout_then_login
urlpatterns = [
url(r'^$', HomePage.as_view(), name='HomePage'),
url(r'^login/', LoginPage.as_view(), name='LoginPage'),
url(r'^logout/', logout_then_login, name='LoginPage'),
]
|
from django.conf.urls import url
from .views import HomePage
urlpatterns = [
url(r'^', HomePage.as_view(), name='HomePage'),
]
Add Login & Logout Routingfrom django.conf.urls import url
from .views import HomePage, LoginPage
from django.contrib.auth.views import logout_then_login
urlpatterns = [
url(r'^$', HomePage.as_view(), name='HomePage'),
url(r'^login/', LoginPage.as_view(), name='LoginPage'),
url(r'^logout/', logout_then_login, name='LoginPage'),
]
|
<commit_before>from django.conf.urls import url
from .views import HomePage
urlpatterns = [
url(r'^', HomePage.as_view(), name='HomePage'),
]
<commit_msg>Add Login & Logout Routing<commit_after>from django.conf.urls import url
from .views import HomePage, LoginPage
from django.contrib.auth.views import logout_then_login
urlpatterns = [
url(r'^$', HomePage.as_view(), name='HomePage'),
url(r'^login/', LoginPage.as_view(), name='LoginPage'),
url(r'^logout/', logout_then_login, name='LoginPage'),
]
|
4e95002f010fe7663bf678e5d359c6792bfc284d
|
fbmsgbot/models/attachment.py
|
fbmsgbot/models/attachment.py
|
class Button(object):
"""Button object, used for creating button messages"""
def __init__(self, type=None, title="", payload=""):
# Type: request param key
valid_types = {
'web_url': 'url',
'postback': 'payload'
}
assert type in valid_types, "Type %s is not a Button type" \
% (type,)
self.title = title
self.type = type
self.typekey = valid_types[type]
self.payload = payload
def to_json(self):
request_payload = {}
request_payload[self.typekey] = self.payload
request_payload['title'] = self.title
request_payload['type'] = self.type
return request_payload
class Element():
"""Elements are features of Templates"""
def __init__(self, title="", subtitle="", image_url="", buttons=None):
self.title = title
self.image_url = image_url
self.subtitle = subtitle
self.buttons = buttons
def to_json(self):
if self.buttons:
buttons = [button.to_json() for button in self.buttons]
payload = {
'title': self.title,
'image_url': self.image_url,
'subtitle': self.subtitle,
'buttons': buttons
}
return payload
|
class Button(object):
"""Button object, used for creating button messages"""
def __init__(self, type=None, title="", payload=""):
# Type: request param key
valid_types = {
'web_url': 'url',
'postback': 'payload'
}
assert type in valid_types, "Type %s is not a Button type" \
% (type,)
self.title = title
self.type = type
self.typekey = valid_types[type]
self.payload = payload
def to_json(self):
request_payload = {}
request_payload[self.typekey] = self.payload
request_payload['title'] = self.title
request_payload['type'] = self.type
return request_payload
class Element(object):
"""Elements are features of Templates"""
def __init__(self, title="", subtitle="", image_url="", buttons=None):
self.title = title
self.image_url = image_url
self.subtitle = subtitle
self.buttons = buttons
def to_json(self):
if self.buttons:
buttons = [button.to_json() for button in self.buttons]
payload = {
'title': self.title,
'image_url': self.image_url,
'subtitle': self.subtitle,
'buttons': buttons
}
return payload
|
Fix bug in Element to allow subclassing
|
Fix bug in Element to allow subclassing
|
Python
|
mit
|
ben-cunningham/python-messenger-bot,ben-cunningham/pybot
|
class Button(object):
"""Button object, used for creating button messages"""
def __init__(self, type=None, title="", payload=""):
# Type: request param key
valid_types = {
'web_url': 'url',
'postback': 'payload'
}
assert type in valid_types, "Type %s is not a Button type" \
% (type,)
self.title = title
self.type = type
self.typekey = valid_types[type]
self.payload = payload
def to_json(self):
request_payload = {}
request_payload[self.typekey] = self.payload
request_payload['title'] = self.title
request_payload['type'] = self.type
return request_payload
class Element():
"""Elements are features of Templates"""
def __init__(self, title="", subtitle="", image_url="", buttons=None):
self.title = title
self.image_url = image_url
self.subtitle = subtitle
self.buttons = buttons
def to_json(self):
if self.buttons:
buttons = [button.to_json() for button in self.buttons]
payload = {
'title': self.title,
'image_url': self.image_url,
'subtitle': self.subtitle,
'buttons': buttons
}
return payload
Fix bug in Element to allow subclassing
|
class Button(object):
"""Button object, used for creating button messages"""
def __init__(self, type=None, title="", payload=""):
# Type: request param key
valid_types = {
'web_url': 'url',
'postback': 'payload'
}
assert type in valid_types, "Type %s is not a Button type" \
% (type,)
self.title = title
self.type = type
self.typekey = valid_types[type]
self.payload = payload
def to_json(self):
request_payload = {}
request_payload[self.typekey] = self.payload
request_payload['title'] = self.title
request_payload['type'] = self.type
return request_payload
class Element(object):
"""Elements are features of Templates"""
def __init__(self, title="", subtitle="", image_url="", buttons=None):
self.title = title
self.image_url = image_url
self.subtitle = subtitle
self.buttons = buttons
def to_json(self):
if self.buttons:
buttons = [button.to_json() for button in self.buttons]
payload = {
'title': self.title,
'image_url': self.image_url,
'subtitle': self.subtitle,
'buttons': buttons
}
return payload
|
<commit_before>
class Button(object):
"""Button object, used for creating button messages"""
def __init__(self, type=None, title="", payload=""):
# Type: request param key
valid_types = {
'web_url': 'url',
'postback': 'payload'
}
assert type in valid_types, "Type %s is not a Button type" \
% (type,)
self.title = title
self.type = type
self.typekey = valid_types[type]
self.payload = payload
def to_json(self):
request_payload = {}
request_payload[self.typekey] = self.payload
request_payload['title'] = self.title
request_payload['type'] = self.type
return request_payload
class Element():
"""Elements are features of Templates"""
def __init__(self, title="", subtitle="", image_url="", buttons=None):
self.title = title
self.image_url = image_url
self.subtitle = subtitle
self.buttons = buttons
def to_json(self):
if self.buttons:
buttons = [button.to_json() for button in self.buttons]
payload = {
'title': self.title,
'image_url': self.image_url,
'subtitle': self.subtitle,
'buttons': buttons
}
return payload
<commit_msg>Fix bug in Element to allow subclassing<commit_after>
|
class Button(object):
"""Button object, used for creating button messages"""
def __init__(self, type=None, title="", payload=""):
# Type: request param key
valid_types = {
'web_url': 'url',
'postback': 'payload'
}
assert type in valid_types, "Type %s is not a Button type" \
% (type,)
self.title = title
self.type = type
self.typekey = valid_types[type]
self.payload = payload
def to_json(self):
request_payload = {}
request_payload[self.typekey] = self.payload
request_payload['title'] = self.title
request_payload['type'] = self.type
return request_payload
class Element(object):
"""Elements are features of Templates"""
def __init__(self, title="", subtitle="", image_url="", buttons=None):
self.title = title
self.image_url = image_url
self.subtitle = subtitle
self.buttons = buttons
def to_json(self):
if self.buttons:
buttons = [button.to_json() for button in self.buttons]
payload = {
'title': self.title,
'image_url': self.image_url,
'subtitle': self.subtitle,
'buttons': buttons
}
return payload
|
class Button(object):
"""Button object, used for creating button messages"""
def __init__(self, type=None, title="", payload=""):
# Type: request param key
valid_types = {
'web_url': 'url',
'postback': 'payload'
}
assert type in valid_types, "Type %s is not a Button type" \
% (type,)
self.title = title
self.type = type
self.typekey = valid_types[type]
self.payload = payload
def to_json(self):
request_payload = {}
request_payload[self.typekey] = self.payload
request_payload['title'] = self.title
request_payload['type'] = self.type
return request_payload
class Element():
"""Elements are features of Templates"""
def __init__(self, title="", subtitle="", image_url="", buttons=None):
self.title = title
self.image_url = image_url
self.subtitle = subtitle
self.buttons = buttons
def to_json(self):
if self.buttons:
buttons = [button.to_json() for button in self.buttons]
payload = {
'title': self.title,
'image_url': self.image_url,
'subtitle': self.subtitle,
'buttons': buttons
}
return payload
Fix bug in Element to allow subclassing
class Button(object):
"""Button object, used for creating button messages"""
def __init__(self, type=None, title="", payload=""):
# Type: request param key
valid_types = {
'web_url': 'url',
'postback': 'payload'
}
assert type in valid_types, "Type %s is not a Button type" \
% (type,)
self.title = title
self.type = type
self.typekey = valid_types[type]
self.payload = payload
def to_json(self):
request_payload = {}
request_payload[self.typekey] = self.payload
request_payload['title'] = self.title
request_payload['type'] = self.type
return request_payload
class Element(object):
"""Elements are features of Templates"""
def __init__(self, title="", subtitle="", image_url="", buttons=None):
self.title = title
self.image_url = image_url
self.subtitle = subtitle
self.buttons = buttons
def to_json(self):
if self.buttons:
buttons = [button.to_json() for button in self.buttons]
payload = {
'title': self.title,
'image_url': self.image_url,
'subtitle': self.subtitle,
'buttons': buttons
}
return payload
|
<commit_before>
class Button(object):
"""Button object, used for creating button messages"""
def __init__(self, type=None, title="", payload=""):
# Type: request param key
valid_types = {
'web_url': 'url',
'postback': 'payload'
}
assert type in valid_types, "Type %s is not a Button type" \
% (type,)
self.title = title
self.type = type
self.typekey = valid_types[type]
self.payload = payload
def to_json(self):
request_payload = {}
request_payload[self.typekey] = self.payload
request_payload['title'] = self.title
request_payload['type'] = self.type
return request_payload
class Element():
"""Elements are features of Templates"""
def __init__(self, title="", subtitle="", image_url="", buttons=None):
self.title = title
self.image_url = image_url
self.subtitle = subtitle
self.buttons = buttons
def to_json(self):
if self.buttons:
buttons = [button.to_json() for button in self.buttons]
payload = {
'title': self.title,
'image_url': self.image_url,
'subtitle': self.subtitle,
'buttons': buttons
}
return payload
<commit_msg>Fix bug in Element to allow subclassing<commit_after>
class Button(object):
"""Button object, used for creating button messages"""
def __init__(self, type=None, title="", payload=""):
# Type: request param key
valid_types = {
'web_url': 'url',
'postback': 'payload'
}
assert type in valid_types, "Type %s is not a Button type" \
% (type,)
self.title = title
self.type = type
self.typekey = valid_types[type]
self.payload = payload
def to_json(self):
request_payload = {}
request_payload[self.typekey] = self.payload
request_payload['title'] = self.title
request_payload['type'] = self.type
return request_payload
class Element(object):
"""Elements are features of Templates"""
def __init__(self, title="", subtitle="", image_url="", buttons=None):
self.title = title
self.image_url = image_url
self.subtitle = subtitle
self.buttons = buttons
def to_json(self):
if self.buttons:
buttons = [button.to_json() for button in self.buttons]
payload = {
'title': self.title,
'image_url': self.image_url,
'subtitle': self.subtitle,
'buttons': buttons
}
return payload
|
54655ff23297b302f12eff9900f8d1c5ce986ab2
|
moksha/tests/test_hub.py
|
moksha/tests/test_hub.py
|
"""Test Moksha's Hub """
from nose.tools import eq_, assert_true
from moksha.hub import MokshaHub
class TestHub:
def setUp(self):
self.hub = MokshaHub()
def tearDown(self):
self.hub.close()
def test_creating_queue(self):
self.hub.create_queue('test')
eq_(len(self.hub.queues), 1)
def test_delete_queue(self):
""" Test deleting a queue """
def test_subscription(self):
""" Test subscribing to a queue """
def test_unsubscribing(self):
""" Test unsubscribing to a queue """
def test_sending_message(self):
""" Test sending a simple message """
def test_receiving_message(self):
""" Test receiving a message """
def test_query(self):
""" Test querying queues """
|
"""Test Moksha's Hub """
#from nose.tools import eq_, assert_true
#from moksha.hub import MokshaHub
#
#class TestHub:
#
# def setUp(self):
# self.hub = MokshaHub()
#
# def tearDown(self):
# self.hub.close()
#
# def test_creating_queue(self):
# self.hub.create_queue('test')
# eq_(len(self.hub.queues), 1)
#
# def test_delete_queue(self):
# """ Test deleting a queue """
# def test_subscription(self):
# """ Test subscribing to a queue """
# def test_unsubscribing(self):
# """ Test unsubscribing to a queue """
# def test_sending_message(self):
# """ Test sending a simple message """
# def test_receiving_message(self):
# """ Test receiving a message """
# def test_query(self):
# """ Test querying queues """
|
Comment out some hub tests, as they do not currently work
|
Comment out some hub tests, as they do not currently work
|
Python
|
apache-2.0
|
lmacken/moksha,mokshaproject/moksha,pombredanne/moksha,ralphbean/moksha,mokshaproject/moksha,pombredanne/moksha,pombredanne/moksha,ralphbean/moksha,lmacken/moksha,mokshaproject/moksha,pombredanne/moksha,lmacken/moksha,ralphbean/moksha,mokshaproject/moksha
|
"""Test Moksha's Hub """
from nose.tools import eq_, assert_true
from moksha.hub import MokshaHub
class TestHub:
def setUp(self):
self.hub = MokshaHub()
def tearDown(self):
self.hub.close()
def test_creating_queue(self):
self.hub.create_queue('test')
eq_(len(self.hub.queues), 1)
def test_delete_queue(self):
""" Test deleting a queue """
def test_subscription(self):
""" Test subscribing to a queue """
def test_unsubscribing(self):
""" Test unsubscribing to a queue """
def test_sending_message(self):
""" Test sending a simple message """
def test_receiving_message(self):
""" Test receiving a message """
def test_query(self):
""" Test querying queues """
Comment out some hub tests, as they do not currently work
|
"""Test Moksha's Hub """
#from nose.tools import eq_, assert_true
#from moksha.hub import MokshaHub
#
#class TestHub:
#
# def setUp(self):
# self.hub = MokshaHub()
#
# def tearDown(self):
# self.hub.close()
#
# def test_creating_queue(self):
# self.hub.create_queue('test')
# eq_(len(self.hub.queues), 1)
#
# def test_delete_queue(self):
# """ Test deleting a queue """
# def test_subscription(self):
# """ Test subscribing to a queue """
# def test_unsubscribing(self):
# """ Test unsubscribing to a queue """
# def test_sending_message(self):
# """ Test sending a simple message """
# def test_receiving_message(self):
# """ Test receiving a message """
# def test_query(self):
# """ Test querying queues """
|
<commit_before>"""Test Moksha's Hub """
from nose.tools import eq_, assert_true
from moksha.hub import MokshaHub
class TestHub:
def setUp(self):
self.hub = MokshaHub()
def tearDown(self):
self.hub.close()
def test_creating_queue(self):
self.hub.create_queue('test')
eq_(len(self.hub.queues), 1)
def test_delete_queue(self):
""" Test deleting a queue """
def test_subscription(self):
""" Test subscribing to a queue """
def test_unsubscribing(self):
""" Test unsubscribing to a queue """
def test_sending_message(self):
""" Test sending a simple message """
def test_receiving_message(self):
""" Test receiving a message """
def test_query(self):
""" Test querying queues """
<commit_msg>Comment out some hub tests, as they do not currently work<commit_after>
|
"""Test Moksha's Hub """
#from nose.tools import eq_, assert_true
#from moksha.hub import MokshaHub
#
#class TestHub:
#
# def setUp(self):
# self.hub = MokshaHub()
#
# def tearDown(self):
# self.hub.close()
#
# def test_creating_queue(self):
# self.hub.create_queue('test')
# eq_(len(self.hub.queues), 1)
#
# def test_delete_queue(self):
# """ Test deleting a queue """
# def test_subscription(self):
# """ Test subscribing to a queue """
# def test_unsubscribing(self):
# """ Test unsubscribing to a queue """
# def test_sending_message(self):
# """ Test sending a simple message """
# def test_receiving_message(self):
# """ Test receiving a message """
# def test_query(self):
# """ Test querying queues """
|
"""Test Moksha's Hub """
from nose.tools import eq_, assert_true
from moksha.hub import MokshaHub
class TestHub:
def setUp(self):
self.hub = MokshaHub()
def tearDown(self):
self.hub.close()
def test_creating_queue(self):
self.hub.create_queue('test')
eq_(len(self.hub.queues), 1)
def test_delete_queue(self):
""" Test deleting a queue """
def test_subscription(self):
""" Test subscribing to a queue """
def test_unsubscribing(self):
""" Test unsubscribing to a queue """
def test_sending_message(self):
""" Test sending a simple message """
def test_receiving_message(self):
""" Test receiving a message """
def test_query(self):
""" Test querying queues """
Comment out some hub tests, as they do not currently work"""Test Moksha's Hub """
#from nose.tools import eq_, assert_true
#from moksha.hub import MokshaHub
#
#class TestHub:
#
# def setUp(self):
# self.hub = MokshaHub()
#
# def tearDown(self):
# self.hub.close()
#
# def test_creating_queue(self):
# self.hub.create_queue('test')
# eq_(len(self.hub.queues), 1)
#
# def test_delete_queue(self):
# """ Test deleting a queue """
# def test_subscription(self):
# """ Test subscribing to a queue """
# def test_unsubscribing(self):
# """ Test unsubscribing to a queue """
# def test_sending_message(self):
# """ Test sending a simple message """
# def test_receiving_message(self):
# """ Test receiving a message """
# def test_query(self):
# """ Test querying queues """
|
<commit_before>"""Test Moksha's Hub """
from nose.tools import eq_, assert_true
from moksha.hub import MokshaHub
class TestHub:
def setUp(self):
self.hub = MokshaHub()
def tearDown(self):
self.hub.close()
def test_creating_queue(self):
self.hub.create_queue('test')
eq_(len(self.hub.queues), 1)
def test_delete_queue(self):
""" Test deleting a queue """
def test_subscription(self):
""" Test subscribing to a queue """
def test_unsubscribing(self):
""" Test unsubscribing to a queue """
def test_sending_message(self):
""" Test sending a simple message """
def test_receiving_message(self):
""" Test receiving a message """
def test_query(self):
""" Test querying queues """
<commit_msg>Comment out some hub tests, as they do not currently work<commit_after>"""Test Moksha's Hub """
#from nose.tools import eq_, assert_true
#from moksha.hub import MokshaHub
#
#class TestHub:
#
# def setUp(self):
# self.hub = MokshaHub()
#
# def tearDown(self):
# self.hub.close()
#
# def test_creating_queue(self):
# self.hub.create_queue('test')
# eq_(len(self.hub.queues), 1)
#
# def test_delete_queue(self):
# """ Test deleting a queue """
# def test_subscription(self):
# """ Test subscribing to a queue """
# def test_unsubscribing(self):
# """ Test unsubscribing to a queue """
# def test_sending_message(self):
# """ Test sending a simple message """
# def test_receiving_message(self):
# """ Test receiving a message """
# def test_query(self):
# """ Test querying queues """
|
9d7cec35a1771f45d0083a80e2f1823182d8d0b8
|
MarkovChainBibleBot/get_bible.py
|
MarkovChainBibleBot/get_bible.py
|
import requests
from os import path
project_gutenberg_bible_url = 'http://www.gutenberg.org/cache/epub/10/pg10.txt'
bible_filename = 'bible.txt'
bible_path = path.join('..', 'data', bible_filename)
def bible_text(url=project_gutenberg_bible_url):
"""Get the bible text"""
return requests.get(url).text
def process_gutenberg_bible(url=project_gutenberg_bible_url):
"""Remove header and footer info"""
gutenberg_header_footer_sep = '\n\r'*8
header, body, footer = bible_text(url).split(gutenberg_header_footer_sep)
return body
def save_internet_bible(url=project_gutenberg_bible_url):
"""Save bible as a text file"""
bible = process_gutenberg_bible(url)
with open(bible_path, 'w') as file:
file.write(bible)
|
import requests
from os import path, linesep
project_gutenberg_bible_url = 'http://www.gutenberg.org/cache/epub/10/pg10.txt'
bible_filename = 'bible.txt'
bible_path = path.join('..', 'data', bible_filename)
def bible_text(url=project_gutenberg_bible_url):
"""Get the bible text"""
return requests.get(url).text
def process_gutenberg_bible(url=project_gutenberg_bible_url):
"""Remove header and footer info"""
gutenberg_header_footer_sep = linesep*8
header, body, footer = bible_text(url).split(gutenberg_header_footer_sep)
return body
def save_internet_bible(url=project_gutenberg_bible_url):
"""Save bible as a text file"""
bible = process_gutenberg_bible(url)
with open(bible_path, 'w') as file:
file.write(bible)
|
Use os independent line seperator
|
Use os independent line seperator
|
Python
|
mit
|
salvor7/MarkovChainBibleBot
|
import requests
from os import path
project_gutenberg_bible_url = 'http://www.gutenberg.org/cache/epub/10/pg10.txt'
bible_filename = 'bible.txt'
bible_path = path.join('..', 'data', bible_filename)
def bible_text(url=project_gutenberg_bible_url):
"""Get the bible text"""
return requests.get(url).text
def process_gutenberg_bible(url=project_gutenberg_bible_url):
"""Remove header and footer info"""
gutenberg_header_footer_sep = '\n\r'*8
header, body, footer = bible_text(url).split(gutenberg_header_footer_sep)
return body
def save_internet_bible(url=project_gutenberg_bible_url):
"""Save bible as a text file"""
bible = process_gutenberg_bible(url)
with open(bible_path, 'w') as file:
file.write(bible)
Use os independent line seperator
|
import requests
from os import path, linesep
project_gutenberg_bible_url = 'http://www.gutenberg.org/cache/epub/10/pg10.txt'
bible_filename = 'bible.txt'
bible_path = path.join('..', 'data', bible_filename)
def bible_text(url=project_gutenberg_bible_url):
"""Get the bible text"""
return requests.get(url).text
def process_gutenberg_bible(url=project_gutenberg_bible_url):
"""Remove header and footer info"""
gutenberg_header_footer_sep = linesep*8
header, body, footer = bible_text(url).split(gutenberg_header_footer_sep)
return body
def save_internet_bible(url=project_gutenberg_bible_url):
"""Save bible as a text file"""
bible = process_gutenberg_bible(url)
with open(bible_path, 'w') as file:
file.write(bible)
|
<commit_before>import requests
from os import path
project_gutenberg_bible_url = 'http://www.gutenberg.org/cache/epub/10/pg10.txt'
bible_filename = 'bible.txt'
bible_path = path.join('..', 'data', bible_filename)
def bible_text(url=project_gutenberg_bible_url):
"""Get the bible text"""
return requests.get(url).text
def process_gutenberg_bible(url=project_gutenberg_bible_url):
"""Remove header and footer info"""
gutenberg_header_footer_sep = '\n\r'*8
header, body, footer = bible_text(url).split(gutenberg_header_footer_sep)
return body
def save_internet_bible(url=project_gutenberg_bible_url):
"""Save bible as a text file"""
bible = process_gutenberg_bible(url)
with open(bible_path, 'w') as file:
file.write(bible)
<commit_msg>Use os independent line seperator<commit_after>
|
import requests
from os import path, linesep
project_gutenberg_bible_url = 'http://www.gutenberg.org/cache/epub/10/pg10.txt'
bible_filename = 'bible.txt'
bible_path = path.join('..', 'data', bible_filename)
def bible_text(url=project_gutenberg_bible_url):
"""Get the bible text"""
return requests.get(url).text
def process_gutenberg_bible(url=project_gutenberg_bible_url):
"""Remove header and footer info"""
gutenberg_header_footer_sep = linesep*8
header, body, footer = bible_text(url).split(gutenberg_header_footer_sep)
return body
def save_internet_bible(url=project_gutenberg_bible_url):
"""Save bible as a text file"""
bible = process_gutenberg_bible(url)
with open(bible_path, 'w') as file:
file.write(bible)
|
import requests
from os import path
project_gutenberg_bible_url = 'http://www.gutenberg.org/cache/epub/10/pg10.txt'
bible_filename = 'bible.txt'
bible_path = path.join('..', 'data', bible_filename)
def bible_text(url=project_gutenberg_bible_url):
"""Get the bible text"""
return requests.get(url).text
def process_gutenberg_bible(url=project_gutenberg_bible_url):
"""Remove header and footer info"""
gutenberg_header_footer_sep = '\n\r'*8
header, body, footer = bible_text(url).split(gutenberg_header_footer_sep)
return body
def save_internet_bible(url=project_gutenberg_bible_url):
"""Save bible as a text file"""
bible = process_gutenberg_bible(url)
with open(bible_path, 'w') as file:
file.write(bible)
Use os independent line seperatorimport requests
from os import path, linesep
project_gutenberg_bible_url = 'http://www.gutenberg.org/cache/epub/10/pg10.txt'
bible_filename = 'bible.txt'
bible_path = path.join('..', 'data', bible_filename)
def bible_text(url=project_gutenberg_bible_url):
"""Get the bible text"""
return requests.get(url).text
def process_gutenberg_bible(url=project_gutenberg_bible_url):
"""Remove header and footer info"""
gutenberg_header_footer_sep = linesep*8
header, body, footer = bible_text(url).split(gutenberg_header_footer_sep)
return body
def save_internet_bible(url=project_gutenberg_bible_url):
"""Save bible as a text file"""
bible = process_gutenberg_bible(url)
with open(bible_path, 'w') as file:
file.write(bible)
|
<commit_before>import requests
from os import path
project_gutenberg_bible_url = 'http://www.gutenberg.org/cache/epub/10/pg10.txt'
bible_filename = 'bible.txt'
bible_path = path.join('..', 'data', bible_filename)
def bible_text(url=project_gutenberg_bible_url):
"""Get the bible text"""
return requests.get(url).text
def process_gutenberg_bible(url=project_gutenberg_bible_url):
"""Remove header and footer info"""
gutenberg_header_footer_sep = '\n\r'*8
header, body, footer = bible_text(url).split(gutenberg_header_footer_sep)
return body
def save_internet_bible(url=project_gutenberg_bible_url):
"""Save bible as a text file"""
bible = process_gutenberg_bible(url)
with open(bible_path, 'w') as file:
file.write(bible)
<commit_msg>Use os independent line seperator<commit_after>import requests
from os import path, linesep
project_gutenberg_bible_url = 'http://www.gutenberg.org/cache/epub/10/pg10.txt'
bible_filename = 'bible.txt'
bible_path = path.join('..', 'data', bible_filename)
def bible_text(url=project_gutenberg_bible_url):
"""Get the bible text"""
return requests.get(url).text
def process_gutenberg_bible(url=project_gutenberg_bible_url):
"""Remove header and footer info"""
gutenberg_header_footer_sep = linesep*8
header, body, footer = bible_text(url).split(gutenberg_header_footer_sep)
return body
def save_internet_bible(url=project_gutenberg_bible_url):
"""Save bible as a text file"""
bible = process_gutenberg_bible(url)
with open(bible_path, 'w') as file:
file.write(bible)
|
7c1173bb5b6d093b4ff7cc89cbe069e1179f1d96
|
IPython/external/appnope/__init__.py
|
IPython/external/appnope/__init__.py
|
try:
from appnope import *
except ImportError:
__version__ = '0.0.5'
import sys
import platform
from distutils.version import LooseVersion as V
if sys.platform != "darwin" or V(platform.mac_ver()[0]) < V("10.9"):
from _dummy import *
else:
from ._nope import *
del sys, platform, V
|
try:
from appnope import *
except ImportError:
__version__ = '0.0.5'
import sys
import platform
from distutils.version import LooseVersion as V
if sys.platform != "darwin" or V(platform.mac_ver()[0]) < V("10.9"):
from ._dummy import *
else:
from ._nope import *
del sys, platform, V
|
Fix relative import in appnope
|
Fix relative import in appnope
Closes gh-6409
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
try:
from appnope import *
except ImportError:
__version__ = '0.0.5'
import sys
import platform
from distutils.version import LooseVersion as V
if sys.platform != "darwin" or V(platform.mac_ver()[0]) < V("10.9"):
from _dummy import *
else:
from ._nope import *
del sys, platform, V
Fix relative import in appnope
Closes gh-6409
|
try:
from appnope import *
except ImportError:
__version__ = '0.0.5'
import sys
import platform
from distutils.version import LooseVersion as V
if sys.platform != "darwin" or V(platform.mac_ver()[0]) < V("10.9"):
from ._dummy import *
else:
from ._nope import *
del sys, platform, V
|
<commit_before>
try:
from appnope import *
except ImportError:
__version__ = '0.0.5'
import sys
import platform
from distutils.version import LooseVersion as V
if sys.platform != "darwin" or V(platform.mac_ver()[0]) < V("10.9"):
from _dummy import *
else:
from ._nope import *
del sys, platform, V
<commit_msg>Fix relative import in appnope
Closes gh-6409<commit_after>
|
try:
from appnope import *
except ImportError:
__version__ = '0.0.5'
import sys
import platform
from distutils.version import LooseVersion as V
if sys.platform != "darwin" or V(platform.mac_ver()[0]) < V("10.9"):
from ._dummy import *
else:
from ._nope import *
del sys, platform, V
|
try:
from appnope import *
except ImportError:
__version__ = '0.0.5'
import sys
import platform
from distutils.version import LooseVersion as V
if sys.platform != "darwin" or V(platform.mac_ver()[0]) < V("10.9"):
from _dummy import *
else:
from ._nope import *
del sys, platform, V
Fix relative import in appnope
Closes gh-6409
try:
from appnope import *
except ImportError:
__version__ = '0.0.5'
import sys
import platform
from distutils.version import LooseVersion as V
if sys.platform != "darwin" or V(platform.mac_ver()[0]) < V("10.9"):
from ._dummy import *
else:
from ._nope import *
del sys, platform, V
|
<commit_before>
try:
from appnope import *
except ImportError:
__version__ = '0.0.5'
import sys
import platform
from distutils.version import LooseVersion as V
if sys.platform != "darwin" or V(platform.mac_ver()[0]) < V("10.9"):
from _dummy import *
else:
from ._nope import *
del sys, platform, V
<commit_msg>Fix relative import in appnope
Closes gh-6409<commit_after>
try:
from appnope import *
except ImportError:
__version__ = '0.0.5'
import sys
import platform
from distutils.version import LooseVersion as V
if sys.platform != "darwin" or V(platform.mac_ver()[0]) < V("10.9"):
from ._dummy import *
else:
from ._nope import *
del sys, platform, V
|
822175a40c7a811331593069766c75e9ee0b0c25
|
py/vtdb/__init__.py
|
py/vtdb/__init__.py
|
# Copyright 2012, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
# PEP 249 complient db api for Vitess
apilevel = '2.0'
# Threads may not share the module because multi_client is not thread safe.
threadsafety = 0
paramstyle = 'named'
# TODO(dumbunny): Have callers use dbexceptions.DatabaseError directly
from vtdb.dbexceptions import DatabaseError
from vtdb.dbexceptions import IntegrityError
from vtdb.dbexceptions import OperationalError
|
# Copyright 2012, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
# PEP 249 complient db api for Vitess
apilevel = '2.0'
# Threads may not share the module because multi_client is not thread safe.
threadsafety = 0
paramstyle = 'named'
# TODO(dumbunny): Have callers use dbexceptions.DatabaseError directly
from vtdb.dbexceptions import DatabaseError
from vtdb.dbexceptions import IntegrityError
from vtdb.dbexceptions import OperationalError
from vtdb import vtgatev2
_vtgate_client_registered_conn_class_modules = [vtgatev2]
|
Make sure vtgatev2.VTGateConnection is registered.
|
Make sure vtgatev2.VTGateConnection is registered.
|
Python
|
apache-2.0
|
tinyspeck/vitess,AndyDiamondstein/vitess,mattharden/vitess,enisoc/vitess,mahak/vitess,enisoc/vitess,rnavarro/vitess,erzel/vitess,mapbased/vitess,mattharden/vitess,davygeek/vitess,guokeno0/vitess,enisoc/vitess,pivanof/vitess,aaijazi/vitess,aaijazi/vitess,dcadevil/vitess,mahak/vitess,mapbased/vitess,davygeek/vitess,tjyang/vitess,HubSpot/vitess,sougou/vitess,dumbunny/vitess,tinyspeck/vitess,AndyDiamondstein/vitess,mahak/vitess,pivanof/vitess,mapbased/vitess,alainjobart/vitess,tinyspeck/vitess,mattharden/vitess,sougou/vitess,alainjobart/vitess,tinyspeck/vitess,applift/vitess,tjyang/vitess,dcadevil/vitess,HubSpot/vitess,tjyang/vitess,applift/vitess,dcadevil/vitess,tjyang/vitess,sougou/vitess,tirsen/vitess,AndyDiamondstein/vitess,yaoshengzhe/vitess,mattharden/vitess,tirsen/vitess,vitessio/vitess,tirsen/vitess,mahak/vitess,erzel/vitess,erzel/vitess,mahak/vitess,pivanof/vitess,rnavarro/vitess,applift/vitess,applift/vitess,mapbased/vitess,HubSpot/vitess,erzel/vitess,dumbunny/vitess,mattharden/vitess,AndyDiamondstein/vitess,mahak/vitess,HubSpot/vitess,guokeno0/vitess,mahak/vitess,dcadevil/vitess,erzel/vitess,HubSpot/vitess,mapbased/vitess,enisoc/vitess,dumbunny/vitess,yaoshengzhe/vitess,tjyang/vitess,enisoc/vitess,dumbunny/vitess,dumbunny/vitess,applift/vitess,applift/vitess,pivanof/vitess,pivanof/vitess,dumbunny/vitess,enisoc/vitess,alainjobart/vitess,applift/vitess,mattharden/vitess,AndyDiamondstein/vitess,AndyDiamondstein/vitess,guokeno0/vitess,yaoshengzhe/vitess,tirsen/vitess,dcadevil/vitess,tirsen/vitess,guokeno0/vitess,erzel/vitess,tinyspeck/vitess,pivanof/vitess,vitessio/vitess,erzel/vitess,mapbased/vitess,tinyspeck/vitess,yaoshengzhe/vitess,dumbunny/vitess,pivanof/vitess,alainjobart/vitess,mattharden/vitess,alainjobart/vitess,guokeno0/vitess,yaoshengzhe/vitess,mahak/vitess,rnavarro/vitess,AndyDiamondstein/vitess,pivanof/vitess,vitessio/vitess,sougou/vitess,sougou/vitess,sougou/vitess,davygeek/vitess,tirsen/vitess,alainjobart/vitess,vitessio/vitess,dumbunny/vitess,dumbunny/vitess,davygeek/vitess,rnavarro/vitess,alainjobart/vitess,vitessio/vitess,yaoshengzhe/vitess,alainjobart/vitess,vitessio/vitess,AndyDiamondstein/vitess,aaijazi/vitess,tjyang/vitess,applift/vitess,mattharden/vitess,erzel/vitess,davygeek/vitess,aaijazi/vitess,rnavarro/vitess,applift/vitess,tjyang/vitess,guokeno0/vitess,dcadevil/vitess,erzel/vitess,mapbased/vitess,vitessio/vitess,guokeno0/vitess,davygeek/vitess,erzel/vitess,yaoshengzhe/vitess,sougou/vitess,AndyDiamondstein/vitess,HubSpot/vitess,tirsen/vitess,mapbased/vitess,rnavarro/vitess,tinyspeck/vitess,yaoshengzhe/vitess,tirsen/vitess,tirsen/vitess,aaijazi/vitess,davygeek/vitess,aaijazi/vitess,aaijazi/vitess,sougou/vitess,mapbased/vitess,HubSpot/vitess,HubSpot/vitess,guokeno0/vitess,applift/vitess,mattharden/vitess,tjyang/vitess,guokeno0/vitess,tjyang/vitess,dumbunny/vitess,yaoshengzhe/vitess,aaijazi/vitess,rnavarro/vitess,vitessio/vitess,dcadevil/vitess,rnavarro/vitess,enisoc/vitess,aaijazi/vitess,pivanof/vitess,rnavarro/vitess,mapbased/vitess,mattharden/vitess
|
# Copyright 2012, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
# PEP 249 complient db api for Vitess
apilevel = '2.0'
# Threads may not share the module because multi_client is not thread safe.
threadsafety = 0
paramstyle = 'named'
# TODO(dumbunny): Have callers use dbexceptions.DatabaseError directly
from vtdb.dbexceptions import DatabaseError
from vtdb.dbexceptions import IntegrityError
from vtdb.dbexceptions import OperationalError
Make sure vtgatev2.VTGateConnection is registered.
|
# Copyright 2012, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
# PEP 249 complient db api for Vitess
apilevel = '2.0'
# Threads may not share the module because multi_client is not thread safe.
threadsafety = 0
paramstyle = 'named'
# TODO(dumbunny): Have callers use dbexceptions.DatabaseError directly
from vtdb.dbexceptions import DatabaseError
from vtdb.dbexceptions import IntegrityError
from vtdb.dbexceptions import OperationalError
from vtdb import vtgatev2
_vtgate_client_registered_conn_class_modules = [vtgatev2]
|
<commit_before># Copyright 2012, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
# PEP 249 complient db api for Vitess
apilevel = '2.0'
# Threads may not share the module because multi_client is not thread safe.
threadsafety = 0
paramstyle = 'named'
# TODO(dumbunny): Have callers use dbexceptions.DatabaseError directly
from vtdb.dbexceptions import DatabaseError
from vtdb.dbexceptions import IntegrityError
from vtdb.dbexceptions import OperationalError
<commit_msg>Make sure vtgatev2.VTGateConnection is registered.<commit_after>
|
# Copyright 2012, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
# PEP 249 complient db api for Vitess
apilevel = '2.0'
# Threads may not share the module because multi_client is not thread safe.
threadsafety = 0
paramstyle = 'named'
# TODO(dumbunny): Have callers use dbexceptions.DatabaseError directly
from vtdb.dbexceptions import DatabaseError
from vtdb.dbexceptions import IntegrityError
from vtdb.dbexceptions import OperationalError
from vtdb import vtgatev2
_vtgate_client_registered_conn_class_modules = [vtgatev2]
|
# Copyright 2012, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
# PEP 249 complient db api for Vitess
apilevel = '2.0'
# Threads may not share the module because multi_client is not thread safe.
threadsafety = 0
paramstyle = 'named'
# TODO(dumbunny): Have callers use dbexceptions.DatabaseError directly
from vtdb.dbexceptions import DatabaseError
from vtdb.dbexceptions import IntegrityError
from vtdb.dbexceptions import OperationalError
Make sure vtgatev2.VTGateConnection is registered.# Copyright 2012, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
# PEP 249 complient db api for Vitess
apilevel = '2.0'
# Threads may not share the module because multi_client is not thread safe.
threadsafety = 0
paramstyle = 'named'
# TODO(dumbunny): Have callers use dbexceptions.DatabaseError directly
from vtdb.dbexceptions import DatabaseError
from vtdb.dbexceptions import IntegrityError
from vtdb.dbexceptions import OperationalError
from vtdb import vtgatev2
_vtgate_client_registered_conn_class_modules = [vtgatev2]
|
<commit_before># Copyright 2012, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
# PEP 249 complient db api for Vitess
apilevel = '2.0'
# Threads may not share the module because multi_client is not thread safe.
threadsafety = 0
paramstyle = 'named'
# TODO(dumbunny): Have callers use dbexceptions.DatabaseError directly
from vtdb.dbexceptions import DatabaseError
from vtdb.dbexceptions import IntegrityError
from vtdb.dbexceptions import OperationalError
<commit_msg>Make sure vtgatev2.VTGateConnection is registered.<commit_after># Copyright 2012, Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can
# be found in the LICENSE file.
# PEP 249 complient db api for Vitess
apilevel = '2.0'
# Threads may not share the module because multi_client is not thread safe.
threadsafety = 0
paramstyle = 'named'
# TODO(dumbunny): Have callers use dbexceptions.DatabaseError directly
from vtdb.dbexceptions import DatabaseError
from vtdb.dbexceptions import IntegrityError
from vtdb.dbexceptions import OperationalError
from vtdb import vtgatev2
_vtgate_client_registered_conn_class_modules = [vtgatev2]
|
85214e015d4e9acc74e11e206cc753cd33d1a2e0
|
webserver.py
|
webserver.py
|
"""
This module is responsible for handling web requests using Flask.
Requests are of the form (start, end) in unix time and are passed on to the db
manager, which then returns the appropriate data to be sent back as JSON.
"""
#TODO: turn this into a daemon
from flask import Flask, request
import json
import db
import config
app = Flask(__name__)
dbm = db.DatabaseManager(config)
@app.route("/data")
def get_data():
print("getting data: {}".format(request))
try:
data_range = (request.args.get("s"), request.args.get("e"))
query_result = dbm.query_range(data_range)
#query_
#return str(data_range)
return str(query_result)
except DBException:
print("DBEXCEPTION")
#TODO
raise
def main():
pass
# Testing
#TODO: move these to main function...
if __name__ == "__main__":
import argparse
ap = argparse.ArgumentParser()
ap.add_argument("--public",
dest="public", action = "store_true",
help = "make the flask app publicly visible to the network.")
args = ap.parse_args()
# initialize a dummy database, which returns random values.
#TODO
dbm = db.DatabaseManager("dummy")
app.run('0.0.0.0' if args.public else None)
|
"""
This module is responsible for handling web requests using Flask.
Requests are of the form (start, end) in unix time and are passed on to the db
manager, which then returns the appropriate data to be sent back as JSON.
"""
#TODO: turn this into a daemon
from flask import Flask, request
import json
import db
import config
app = Flask(__name__)
cfg = config.get_config_dict()
dbm = db.DatabaseManager(cfg)
@app.route("/data")
def get_data():
print("getting data: {}".format(request))
try:
data_range = (int(request.args.get("s")), int(request.args.get("e")))
datapoints = dbm.query_range(data_range)
#query_
#return str(data_range)
#return str(query_result)
return json.dumps([[i, x] for i, x in enumerate(datapoints)])
#TODO
except db.DBException:
raise
def main():
pass
# Testing
#TODO: move these to main function...
if __name__ == "__main__":
import argparse
ap = argparse.ArgumentParser()
ap.add_argument("--public",
dest="public", action = "store_true",
help = "make the flask app publicly visible to the network.")
args = ap.parse_args()
# initialize a dummy database, which returns random values.
#TODO
dbm = db.DatabaseManager("dummy")
app.run('0.0.0.0' if args.public else None)
|
Return result as json, not properly formatted as of yet.
|
Return result as json, not properly formatted as of yet.
|
Python
|
mit
|
mgunyho/kiltiskahvi
|
"""
This module is responsible for handling web requests using Flask.
Requests are of the form (start, end) in unix time and are passed on to the db
manager, which then returns the appropriate data to be sent back as JSON.
"""
#TODO: turn this into a daemon
from flask import Flask, request
import json
import db
import config
app = Flask(__name__)
dbm = db.DatabaseManager(config)
@app.route("/data")
def get_data():
print("getting data: {}".format(request))
try:
data_range = (request.args.get("s"), request.args.get("e"))
query_result = dbm.query_range(data_range)
#query_
#return str(data_range)
return str(query_result)
except DBException:
print("DBEXCEPTION")
#TODO
raise
def main():
pass
# Testing
#TODO: move these to main function...
if __name__ == "__main__":
import argparse
ap = argparse.ArgumentParser()
ap.add_argument("--public",
dest="public", action = "store_true",
help = "make the flask app publicly visible to the network.")
args = ap.parse_args()
# initialize a dummy database, which returns random values.
#TODO
dbm = db.DatabaseManager("dummy")
app.run('0.0.0.0' if args.public else None)
Return result as json, not properly formatted as of yet.
|
"""
This module is responsible for handling web requests using Flask.
Requests are of the form (start, end) in unix time and are passed on to the db
manager, which then returns the appropriate data to be sent back as JSON.
"""
#TODO: turn this into a daemon
from flask import Flask, request
import json
import db
import config
app = Flask(__name__)
cfg = config.get_config_dict()
dbm = db.DatabaseManager(cfg)
@app.route("/data")
def get_data():
print("getting data: {}".format(request))
try:
data_range = (int(request.args.get("s")), int(request.args.get("e")))
datapoints = dbm.query_range(data_range)
#query_
#return str(data_range)
#return str(query_result)
return json.dumps([[i, x] for i, x in enumerate(datapoints)])
#TODO
except db.DBException:
raise
def main():
pass
# Testing
#TODO: move these to main function...
if __name__ == "__main__":
import argparse
ap = argparse.ArgumentParser()
ap.add_argument("--public",
dest="public", action = "store_true",
help = "make the flask app publicly visible to the network.")
args = ap.parse_args()
# initialize a dummy database, which returns random values.
#TODO
dbm = db.DatabaseManager("dummy")
app.run('0.0.0.0' if args.public else None)
|
<commit_before>"""
This module is responsible for handling web requests using Flask.
Requests are of the form (start, end) in unix time and are passed on to the db
manager, which then returns the appropriate data to be sent back as JSON.
"""
#TODO: turn this into a daemon
from flask import Flask, request
import json
import db
import config
app = Flask(__name__)
dbm = db.DatabaseManager(config)
@app.route("/data")
def get_data():
print("getting data: {}".format(request))
try:
data_range = (request.args.get("s"), request.args.get("e"))
query_result = dbm.query_range(data_range)
#query_
#return str(data_range)
return str(query_result)
except DBException:
print("DBEXCEPTION")
#TODO
raise
def main():
pass
# Testing
#TODO: move these to main function...
if __name__ == "__main__":
import argparse
ap = argparse.ArgumentParser()
ap.add_argument("--public",
dest="public", action = "store_true",
help = "make the flask app publicly visible to the network.")
args = ap.parse_args()
# initialize a dummy database, which returns random values.
#TODO
dbm = db.DatabaseManager("dummy")
app.run('0.0.0.0' if args.public else None)
<commit_msg>Return result as json, not properly formatted as of yet.<commit_after>
|
"""
This module is responsible for handling web requests using Flask.
Requests are of the form (start, end) in unix time and are passed on to the db
manager, which then returns the appropriate data to be sent back as JSON.
"""
#TODO: turn this into a daemon
from flask import Flask, request
import json
import db
import config
app = Flask(__name__)
cfg = config.get_config_dict()
dbm = db.DatabaseManager(cfg)
@app.route("/data")
def get_data():
print("getting data: {}".format(request))
try:
data_range = (int(request.args.get("s")), int(request.args.get("e")))
datapoints = dbm.query_range(data_range)
#query_
#return str(data_range)
#return str(query_result)
return json.dumps([[i, x] for i, x in enumerate(datapoints)])
#TODO
except db.DBException:
raise
def main():
pass
# Testing
#TODO: move these to main function...
if __name__ == "__main__":
import argparse
ap = argparse.ArgumentParser()
ap.add_argument("--public",
dest="public", action = "store_true",
help = "make the flask app publicly visible to the network.")
args = ap.parse_args()
# initialize a dummy database, which returns random values.
#TODO
dbm = db.DatabaseManager("dummy")
app.run('0.0.0.0' if args.public else None)
|
"""
This module is responsible for handling web requests using Flask.
Requests are of the form (start, end) in unix time and are passed on to the db
manager, which then returns the appropriate data to be sent back as JSON.
"""
#TODO: turn this into a daemon
from flask import Flask, request
import json
import db
import config
app = Flask(__name__)
dbm = db.DatabaseManager(config)
@app.route("/data")
def get_data():
print("getting data: {}".format(request))
try:
data_range = (request.args.get("s"), request.args.get("e"))
query_result = dbm.query_range(data_range)
#query_
#return str(data_range)
return str(query_result)
except DBException:
print("DBEXCEPTION")
#TODO
raise
def main():
pass
# Testing
#TODO: move these to main function...
if __name__ == "__main__":
import argparse
ap = argparse.ArgumentParser()
ap.add_argument("--public",
dest="public", action = "store_true",
help = "make the flask app publicly visible to the network.")
args = ap.parse_args()
# initialize a dummy database, which returns random values.
#TODO
dbm = db.DatabaseManager("dummy")
app.run('0.0.0.0' if args.public else None)
Return result as json, not properly formatted as of yet."""
This module is responsible for handling web requests using Flask.
Requests are of the form (start, end) in unix time and are passed on to the db
manager, which then returns the appropriate data to be sent back as JSON.
"""
#TODO: turn this into a daemon
from flask import Flask, request
import json
import db
import config
app = Flask(__name__)
cfg = config.get_config_dict()
dbm = db.DatabaseManager(cfg)
@app.route("/data")
def get_data():
print("getting data: {}".format(request))
try:
data_range = (int(request.args.get("s")), int(request.args.get("e")))
datapoints = dbm.query_range(data_range)
#query_
#return str(data_range)
#return str(query_result)
return json.dumps([[i, x] for i, x in enumerate(datapoints)])
#TODO
except db.DBException:
raise
def main():
pass
# Testing
#TODO: move these to main function...
if __name__ == "__main__":
import argparse
ap = argparse.ArgumentParser()
ap.add_argument("--public",
dest="public", action = "store_true",
help = "make the flask app publicly visible to the network.")
args = ap.parse_args()
# initialize a dummy database, which returns random values.
#TODO
dbm = db.DatabaseManager("dummy")
app.run('0.0.0.0' if args.public else None)
|
<commit_before>"""
This module is responsible for handling web requests using Flask.
Requests are of the form (start, end) in unix time and are passed on to the db
manager, which then returns the appropriate data to be sent back as JSON.
"""
#TODO: turn this into a daemon
from flask import Flask, request
import json
import db
import config
app = Flask(__name__)
dbm = db.DatabaseManager(config)
@app.route("/data")
def get_data():
print("getting data: {}".format(request))
try:
data_range = (request.args.get("s"), request.args.get("e"))
query_result = dbm.query_range(data_range)
#query_
#return str(data_range)
return str(query_result)
except DBException:
print("DBEXCEPTION")
#TODO
raise
def main():
pass
# Testing
#TODO: move these to main function...
if __name__ == "__main__":
import argparse
ap = argparse.ArgumentParser()
ap.add_argument("--public",
dest="public", action = "store_true",
help = "make the flask app publicly visible to the network.")
args = ap.parse_args()
# initialize a dummy database, which returns random values.
#TODO
dbm = db.DatabaseManager("dummy")
app.run('0.0.0.0' if args.public else None)
<commit_msg>Return result as json, not properly formatted as of yet.<commit_after>"""
This module is responsible for handling web requests using Flask.
Requests are of the form (start, end) in unix time and are passed on to the db
manager, which then returns the appropriate data to be sent back as JSON.
"""
#TODO: turn this into a daemon
from flask import Flask, request
import json
import db
import config
app = Flask(__name__)
cfg = config.get_config_dict()
dbm = db.DatabaseManager(cfg)
@app.route("/data")
def get_data():
print("getting data: {}".format(request))
try:
data_range = (int(request.args.get("s")), int(request.args.get("e")))
datapoints = dbm.query_range(data_range)
#query_
#return str(data_range)
#return str(query_result)
return json.dumps([[i, x] for i, x in enumerate(datapoints)])
#TODO
except db.DBException:
raise
def main():
pass
# Testing
#TODO: move these to main function...
if __name__ == "__main__":
import argparse
ap = argparse.ArgumentParser()
ap.add_argument("--public",
dest="public", action = "store_true",
help = "make the flask app publicly visible to the network.")
args = ap.parse_args()
# initialize a dummy database, which returns random values.
#TODO
dbm = db.DatabaseManager("dummy")
app.run('0.0.0.0' if args.public else None)
|
c3d22dd13bf56e65452e2e7d634c527d66e2a3b4
|
pyptools/objects.py
|
pyptools/objects.py
|
class Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
return self.parse(value.read().splitlines(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
|
class Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
return self.parse(value.read(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
|
Fix a bug where splitlines was called twice for parse_file
|
Fix a bug where splitlines was called twice for parse_file
|
Python
|
mit
|
tandreas/pyptools
|
class Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
return self.parse(value.read().splitlines(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
Fix a bug where splitlines was called twice for parse_file
|
class Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
return self.parse(value.read(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
|
<commit_before>class Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
return self.parse(value.read().splitlines(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
<commit_msg>Fix a bug where splitlines was called twice for parse_file<commit_after>
|
class Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
return self.parse(value.read(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
|
class Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
return self.parse(value.read().splitlines(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
Fix a bug where splitlines was called twice for parse_fileclass Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
return self.parse(value.read(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
|
<commit_before>class Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
return self.parse(value.read().splitlines(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
<commit_msg>Fix a bug where splitlines was called twice for parse_file<commit_after>class Parser(object):
"""Base class for all parsers to inheret with common interface"""
def iterparse(self, iterator, **kwargs):
"""
Parses a iterator/generator. Must be implemented by each parser.
:param value: Iterable containing data
:return: yeilds parsed data
"""
raise NotImplementedError('Must implement iterparse method!')
def parse(self, value, **kwargs):
"""
Parses a stirng. By default accumlates from the iterparse method.
:param value:
String containing the data to parse.
:return: data structure containing parsed data
"""
result = []
value = value.splitlines()
for item in self.iterparse(iter(value), **kwargs):
result.append(item)
if len(result) == 1:
return result[0]
else:
return result
def parse_file(self, filename, **kwargs):
"""
Parses lines from a file. By default accumlates from
iterparse_file method by splitting the file by lines.
:param filename: string with the path to the file.
:return: data structure containing parsed data
"""
with open(filename, 'rU') as value:
return self.parse(value.read(), **kwargs)
def iterparse_file(self, filename, **kwargs):
def file_generator(fname):
with open(fname, 'rU') as f:
for line in f:
yield line.strip('\r\n')
generator = file_generator(filename)
for value in self.iterparse(generator, **kwargs):
yield value
|
4987b2e5a2d5ee208a274702f6b88a9021149c86
|
tests/blueprints/user_message/test_address_formatting.py
|
tests/blueprints/user_message/test_address_formatting.py
|
"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(application, params):
screen_name, email_address, expected = params
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', 'alice@example.com', 'Alice <alice@example.com>'),
('<AngleInvestor>', 'angleinvestor@example.com', '"<AngleInvestor>" <angleinvestor@example.com>'),
('-=]YOLO[=-', 'yolo@example.com', '"-=]YOLO[=-" <yolo@example.com>'),
])
def params(request):
yield request.param
@pytest.fixture
def application(db):
with app_context():
with database_recreated(db):
yield
|
"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(site, params):
screen_name, email_address, expected = params
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', 'alice@example.com', 'Alice <alice@example.com>'),
('<AngleInvestor>', 'angleinvestor@example.com', '"<AngleInvestor>" <angleinvestor@example.com>'),
('-=]YOLO[=-', 'yolo@example.com', '"-=]YOLO[=-" <yolo@example.com>'),
])
def params(request):
yield request.param
@pytest.fixture(scope='module')
def site(db):
with app_context():
with database_recreated(db):
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
yield site
|
Speed up user message address formatting test
|
Speed up user message address formatting test
The common set-up is moved to the fixture, then the fixture's scope is
widened so that it is used for all test cases in the module, avoiding
duplicate work.
|
Python
|
bsd-3-clause
|
m-ober/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps
|
"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(application, params):
screen_name, email_address, expected = params
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', 'alice@example.com', 'Alice <alice@example.com>'),
('<AngleInvestor>', 'angleinvestor@example.com', '"<AngleInvestor>" <angleinvestor@example.com>'),
('-=]YOLO[=-', 'yolo@example.com', '"-=]YOLO[=-" <yolo@example.com>'),
])
def params(request):
yield request.param
@pytest.fixture
def application(db):
with app_context():
with database_recreated(db):
yield
Speed up user message address formatting test
The common set-up is moved to the fixture, then the fixture's scope is
widened so that it is used for all test cases in the module, avoiding
duplicate work.
|
"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(site, params):
screen_name, email_address, expected = params
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', 'alice@example.com', 'Alice <alice@example.com>'),
('<AngleInvestor>', 'angleinvestor@example.com', '"<AngleInvestor>" <angleinvestor@example.com>'),
('-=]YOLO[=-', 'yolo@example.com', '"-=]YOLO[=-" <yolo@example.com>'),
])
def params(request):
yield request.param
@pytest.fixture(scope='module')
def site(db):
with app_context():
with database_recreated(db):
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
yield site
|
<commit_before>"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(application, params):
screen_name, email_address, expected = params
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', 'alice@example.com', 'Alice <alice@example.com>'),
('<AngleInvestor>', 'angleinvestor@example.com', '"<AngleInvestor>" <angleinvestor@example.com>'),
('-=]YOLO[=-', 'yolo@example.com', '"-=]YOLO[=-" <yolo@example.com>'),
])
def params(request):
yield request.param
@pytest.fixture
def application(db):
with app_context():
with database_recreated(db):
yield
<commit_msg>Speed up user message address formatting test
The common set-up is moved to the fixture, then the fixture's scope is
widened so that it is used for all test cases in the module, avoiding
duplicate work.<commit_after>
|
"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(site, params):
screen_name, email_address, expected = params
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', 'alice@example.com', 'Alice <alice@example.com>'),
('<AngleInvestor>', 'angleinvestor@example.com', '"<AngleInvestor>" <angleinvestor@example.com>'),
('-=]YOLO[=-', 'yolo@example.com', '"-=]YOLO[=-" <yolo@example.com>'),
])
def params(request):
yield request.param
@pytest.fixture(scope='module')
def site(db):
with app_context():
with database_recreated(db):
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
yield site
|
"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(application, params):
screen_name, email_address, expected = params
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', 'alice@example.com', 'Alice <alice@example.com>'),
('<AngleInvestor>', 'angleinvestor@example.com', '"<AngleInvestor>" <angleinvestor@example.com>'),
('-=]YOLO[=-', 'yolo@example.com', '"-=]YOLO[=-" <yolo@example.com>'),
])
def params(request):
yield request.param
@pytest.fixture
def application(db):
with app_context():
with database_recreated(db):
yield
Speed up user message address formatting test
The common set-up is moved to the fixture, then the fixture's scope is
widened so that it is used for all test cases in the module, avoiding
duplicate work."""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(site, params):
screen_name, email_address, expected = params
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', 'alice@example.com', 'Alice <alice@example.com>'),
('<AngleInvestor>', 'angleinvestor@example.com', '"<AngleInvestor>" <angleinvestor@example.com>'),
('-=]YOLO[=-', 'yolo@example.com', '"-=]YOLO[=-" <yolo@example.com>'),
])
def params(request):
yield request.param
@pytest.fixture(scope='module')
def site(db):
with app_context():
with database_recreated(db):
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
yield site
|
<commit_before>"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(application, params):
screen_name, email_address, expected = params
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', 'alice@example.com', 'Alice <alice@example.com>'),
('<AngleInvestor>', 'angleinvestor@example.com', '"<AngleInvestor>" <angleinvestor@example.com>'),
('-=]YOLO[=-', 'yolo@example.com', '"-=]YOLO[=-" <yolo@example.com>'),
])
def params(request):
yield request.param
@pytest.fixture
def application(db):
with app_context():
with database_recreated(db):
yield
<commit_msg>Speed up user message address formatting test
The common set-up is moved to the fixture, then the fixture's scope is
widened so that it is used for all test cases in the module, avoiding
duplicate work.<commit_after>"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from unittest.mock import patch
import pytest
from byceps.services.user_message import service as user_message_service
from tests.conftest import database_recreated
from tests.helpers import app_context, create_brand, create_email_config, \
create_party, create_site, create_user
def test_recipient_formatting(site, params):
screen_name, email_address, expected = params
user = create_user(screen_name, email_address=email_address)
message = user_message_service.create_message(user.id, user.id, '', '',
site.id)
assert message.recipients == [expected]
@pytest.fixture(params=[
('Alice', 'alice@example.com', 'Alice <alice@example.com>'),
('<AngleInvestor>', 'angleinvestor@example.com', '"<AngleInvestor>" <angleinvestor@example.com>'),
('-=]YOLO[=-', 'yolo@example.com', '"-=]YOLO[=-" <yolo@example.com>'),
])
def params(request):
yield request.param
@pytest.fixture(scope='module')
def site(db):
with app_context():
with database_recreated(db):
create_email_config()
brand = create_brand()
party = create_party(brand.id)
site = create_site(party.id)
yield site
|
d3fd59325f592bd3409d8466ba288e0c377c7440
|
mklocale/cmd.py
|
mklocale/cmd.py
|
import argparse
import logging
import yaml
from mklocale import transifex
from mklocale.cats import merge_by_language, write_catalog
from mklocale.utils import listify
log = logging.getLogger("mklocale")
def cmdline(argv):
logging.basicConfig(level=logging.INFO)
ap = argparse.ArgumentParser()
ap.add_argument("config")
args = ap.parse_args()
with open(args.config, "r") as infp:
config = yaml.safe_load(infp)
catalogs = []
for tx_config in listify(config.get("transifex")):
catalogs.extend(transifex.read_catalogs(tx_config))
for merged_catalog in merge_by_language(catalogs):
targets = [
t.format(lang=merged_catalog.locale)
for t
in listify(config["target"])
]
for target_file in targets:
write_catalog(target_file, merged_catalog)
|
import argparse
import hashlib
import logging
import os
import yaml
from mklocale import transifex
from mklocale.cats import merge_by_language, write_catalog
from mklocale.utils import listify
log = logging.getLogger("mklocale")
def cmdline(argv):
logging.basicConfig(level=logging.INFO)
ap = argparse.ArgumentParser()
ap.add_argument("config")
args = ap.parse_args()
try:
import requests_cache
requests_cache.install_cache(
os.path.realpath('./mklocale.%s.cache' % hashlib.md5(args.config).hexdigest()),
expire_after=86400
)
except ImportError:
pass
os.chdir(os.path.dirname(args.config))
with open(args.config, "r") as infp:
config = yaml.safe_load(infp)
catalogs = []
for tx_config in listify(config.get("transifex")):
catalogs.extend(transifex.read_catalogs(tx_config))
for merged_catalog in merge_by_language(catalogs):
targets = [
t.format(lang=merged_catalog.locale)
for t
in listify(config["target"])
]
for target_file in targets:
write_catalog(target_file, merged_catalog)
|
Add optional use of requests_cache
|
Add optional use of requests_cache
|
Python
|
mit
|
akx/mklocale
|
import argparse
import logging
import yaml
from mklocale import transifex
from mklocale.cats import merge_by_language, write_catalog
from mklocale.utils import listify
log = logging.getLogger("mklocale")
def cmdline(argv):
logging.basicConfig(level=logging.INFO)
ap = argparse.ArgumentParser()
ap.add_argument("config")
args = ap.parse_args()
with open(args.config, "r") as infp:
config = yaml.safe_load(infp)
catalogs = []
for tx_config in listify(config.get("transifex")):
catalogs.extend(transifex.read_catalogs(tx_config))
for merged_catalog in merge_by_language(catalogs):
targets = [
t.format(lang=merged_catalog.locale)
for t
in listify(config["target"])
]
for target_file in targets:
write_catalog(target_file, merged_catalog)
Add optional use of requests_cache
|
import argparse
import hashlib
import logging
import os
import yaml
from mklocale import transifex
from mklocale.cats import merge_by_language, write_catalog
from mklocale.utils import listify
log = logging.getLogger("mklocale")
def cmdline(argv):
logging.basicConfig(level=logging.INFO)
ap = argparse.ArgumentParser()
ap.add_argument("config")
args = ap.parse_args()
try:
import requests_cache
requests_cache.install_cache(
os.path.realpath('./mklocale.%s.cache' % hashlib.md5(args.config).hexdigest()),
expire_after=86400
)
except ImportError:
pass
os.chdir(os.path.dirname(args.config))
with open(args.config, "r") as infp:
config = yaml.safe_load(infp)
catalogs = []
for tx_config in listify(config.get("transifex")):
catalogs.extend(transifex.read_catalogs(tx_config))
for merged_catalog in merge_by_language(catalogs):
targets = [
t.format(lang=merged_catalog.locale)
for t
in listify(config["target"])
]
for target_file in targets:
write_catalog(target_file, merged_catalog)
|
<commit_before>import argparse
import logging
import yaml
from mklocale import transifex
from mklocale.cats import merge_by_language, write_catalog
from mklocale.utils import listify
log = logging.getLogger("mklocale")
def cmdline(argv):
logging.basicConfig(level=logging.INFO)
ap = argparse.ArgumentParser()
ap.add_argument("config")
args = ap.parse_args()
with open(args.config, "r") as infp:
config = yaml.safe_load(infp)
catalogs = []
for tx_config in listify(config.get("transifex")):
catalogs.extend(transifex.read_catalogs(tx_config))
for merged_catalog in merge_by_language(catalogs):
targets = [
t.format(lang=merged_catalog.locale)
for t
in listify(config["target"])
]
for target_file in targets:
write_catalog(target_file, merged_catalog)
<commit_msg>Add optional use of requests_cache<commit_after>
|
import argparse
import hashlib
import logging
import os
import yaml
from mklocale import transifex
from mklocale.cats import merge_by_language, write_catalog
from mklocale.utils import listify
log = logging.getLogger("mklocale")
def cmdline(argv):
logging.basicConfig(level=logging.INFO)
ap = argparse.ArgumentParser()
ap.add_argument("config")
args = ap.parse_args()
try:
import requests_cache
requests_cache.install_cache(
os.path.realpath('./mklocale.%s.cache' % hashlib.md5(args.config).hexdigest()),
expire_after=86400
)
except ImportError:
pass
os.chdir(os.path.dirname(args.config))
with open(args.config, "r") as infp:
config = yaml.safe_load(infp)
catalogs = []
for tx_config in listify(config.get("transifex")):
catalogs.extend(transifex.read_catalogs(tx_config))
for merged_catalog in merge_by_language(catalogs):
targets = [
t.format(lang=merged_catalog.locale)
for t
in listify(config["target"])
]
for target_file in targets:
write_catalog(target_file, merged_catalog)
|
import argparse
import logging
import yaml
from mklocale import transifex
from mklocale.cats import merge_by_language, write_catalog
from mklocale.utils import listify
log = logging.getLogger("mklocale")
def cmdline(argv):
logging.basicConfig(level=logging.INFO)
ap = argparse.ArgumentParser()
ap.add_argument("config")
args = ap.parse_args()
with open(args.config, "r") as infp:
config = yaml.safe_load(infp)
catalogs = []
for tx_config in listify(config.get("transifex")):
catalogs.extend(transifex.read_catalogs(tx_config))
for merged_catalog in merge_by_language(catalogs):
targets = [
t.format(lang=merged_catalog.locale)
for t
in listify(config["target"])
]
for target_file in targets:
write_catalog(target_file, merged_catalog)
Add optional use of requests_cacheimport argparse
import hashlib
import logging
import os
import yaml
from mklocale import transifex
from mklocale.cats import merge_by_language, write_catalog
from mklocale.utils import listify
log = logging.getLogger("mklocale")
def cmdline(argv):
logging.basicConfig(level=logging.INFO)
ap = argparse.ArgumentParser()
ap.add_argument("config")
args = ap.parse_args()
try:
import requests_cache
requests_cache.install_cache(
os.path.realpath('./mklocale.%s.cache' % hashlib.md5(args.config).hexdigest()),
expire_after=86400
)
except ImportError:
pass
os.chdir(os.path.dirname(args.config))
with open(args.config, "r") as infp:
config = yaml.safe_load(infp)
catalogs = []
for tx_config in listify(config.get("transifex")):
catalogs.extend(transifex.read_catalogs(tx_config))
for merged_catalog in merge_by_language(catalogs):
targets = [
t.format(lang=merged_catalog.locale)
for t
in listify(config["target"])
]
for target_file in targets:
write_catalog(target_file, merged_catalog)
|
<commit_before>import argparse
import logging
import yaml
from mklocale import transifex
from mklocale.cats import merge_by_language, write_catalog
from mklocale.utils import listify
log = logging.getLogger("mklocale")
def cmdline(argv):
logging.basicConfig(level=logging.INFO)
ap = argparse.ArgumentParser()
ap.add_argument("config")
args = ap.parse_args()
with open(args.config, "r") as infp:
config = yaml.safe_load(infp)
catalogs = []
for tx_config in listify(config.get("transifex")):
catalogs.extend(transifex.read_catalogs(tx_config))
for merged_catalog in merge_by_language(catalogs):
targets = [
t.format(lang=merged_catalog.locale)
for t
in listify(config["target"])
]
for target_file in targets:
write_catalog(target_file, merged_catalog)
<commit_msg>Add optional use of requests_cache<commit_after>import argparse
import hashlib
import logging
import os
import yaml
from mklocale import transifex
from mklocale.cats import merge_by_language, write_catalog
from mklocale.utils import listify
log = logging.getLogger("mklocale")
def cmdline(argv):
logging.basicConfig(level=logging.INFO)
ap = argparse.ArgumentParser()
ap.add_argument("config")
args = ap.parse_args()
try:
import requests_cache
requests_cache.install_cache(
os.path.realpath('./mklocale.%s.cache' % hashlib.md5(args.config).hexdigest()),
expire_after=86400
)
except ImportError:
pass
os.chdir(os.path.dirname(args.config))
with open(args.config, "r") as infp:
config = yaml.safe_load(infp)
catalogs = []
for tx_config in listify(config.get("transifex")):
catalogs.extend(transifex.read_catalogs(tx_config))
for merged_catalog in merge_by_language(catalogs):
targets = [
t.format(lang=merged_catalog.locale)
for t
in listify(config["target"])
]
for target_file in targets:
write_catalog(target_file, merged_catalog)
|
7c2ec2e873fd8eb7bf1537d04c454a00ca2b40f9
|
conftest.py
|
conftest.py
|
# -*- coding: utf-8 -*-
import pytest
try:
# TODO: this file is read/executed even when called from ``readthedocsinc``,
# so it's overriding the options that we are defining in the ``conftest.py``
# from the corporate site. We need to find a better way to avoid this.
import readthedocsinc
PYTEST_OPTIONS = ()
except ImportError:
PYTEST_OPTIONS = (
# Options to set test environment
('community', True),
('corporate', False),
('environment', 'readthedocs'),
('url_scheme', 'http'),
)
def pytest_addoption(parser):
parser.addoption(
'--including-search',
action='store_true',
dest='searchtests',
default=False, help='enable search tests',
)
def pytest_configure(config):
if not config.option.searchtests:
# Include ``not search``` to parameters so search tests do not perform
setattr(config.option, 'markexpr', 'not search')
for option, value in PYTEST_OPTIONS:
setattr(config.option, option, value)
@pytest.fixture(autouse=True)
def settings_modification(settings):
settings.CELERY_ALWAYS_EAGER = True
|
# -*- coding: utf-8 -*-
import pytest
try:
# TODO: this file is read/executed even when called from ``readthedocsinc``,
# so it's overriding the options that we are defining in the ``conftest.py``
# from the corporate site. We need to find a better way to avoid this.
import readthedocsinc
PYTEST_OPTIONS = ()
except ImportError:
PYTEST_OPTIONS = (
# Options to set test environment
('community', True),
('corporate', False),
('environment', 'readthedocs'),
('url_scheme', 'http'),
)
def pytest_addoption(parser):
parser.addoption(
'--including-search',
action='store_true',
dest='searchtests',
default=False, help='enable search tests',
)
def pytest_configure(config):
if not config.option.searchtests:
# Include ``not search``` to parameters so search tests do not perform
markexpr = getattr(config.option, 'markexpr')
if markexpr:
markexpr += ' and not search'
else:
markexpr = 'not search'
setattr(config.option, 'markexpr', markexpr.strip())
for option, value in PYTEST_OPTIONS:
setattr(config.option, option, value)
@pytest.fixture(autouse=True)
def settings_modification(settings):
settings.CELERY_ALWAYS_EAGER = True
|
Extend pytest's `markexpr` conf in case it already exists
|
Extend pytest's `markexpr` conf in case it already exists
|
Python
|
mit
|
rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org
|
# -*- coding: utf-8 -*-
import pytest
try:
# TODO: this file is read/executed even when called from ``readthedocsinc``,
# so it's overriding the options that we are defining in the ``conftest.py``
# from the corporate site. We need to find a better way to avoid this.
import readthedocsinc
PYTEST_OPTIONS = ()
except ImportError:
PYTEST_OPTIONS = (
# Options to set test environment
('community', True),
('corporate', False),
('environment', 'readthedocs'),
('url_scheme', 'http'),
)
def pytest_addoption(parser):
parser.addoption(
'--including-search',
action='store_true',
dest='searchtests',
default=False, help='enable search tests',
)
def pytest_configure(config):
if not config.option.searchtests:
# Include ``not search``` to parameters so search tests do not perform
setattr(config.option, 'markexpr', 'not search')
for option, value in PYTEST_OPTIONS:
setattr(config.option, option, value)
@pytest.fixture(autouse=True)
def settings_modification(settings):
settings.CELERY_ALWAYS_EAGER = True
Extend pytest's `markexpr` conf in case it already exists
|
# -*- coding: utf-8 -*-
import pytest
try:
# TODO: this file is read/executed even when called from ``readthedocsinc``,
# so it's overriding the options that we are defining in the ``conftest.py``
# from the corporate site. We need to find a better way to avoid this.
import readthedocsinc
PYTEST_OPTIONS = ()
except ImportError:
PYTEST_OPTIONS = (
# Options to set test environment
('community', True),
('corporate', False),
('environment', 'readthedocs'),
('url_scheme', 'http'),
)
def pytest_addoption(parser):
parser.addoption(
'--including-search',
action='store_true',
dest='searchtests',
default=False, help='enable search tests',
)
def pytest_configure(config):
if not config.option.searchtests:
# Include ``not search``` to parameters so search tests do not perform
markexpr = getattr(config.option, 'markexpr')
if markexpr:
markexpr += ' and not search'
else:
markexpr = 'not search'
setattr(config.option, 'markexpr', markexpr.strip())
for option, value in PYTEST_OPTIONS:
setattr(config.option, option, value)
@pytest.fixture(autouse=True)
def settings_modification(settings):
settings.CELERY_ALWAYS_EAGER = True
|
<commit_before># -*- coding: utf-8 -*-
import pytest
try:
# TODO: this file is read/executed even when called from ``readthedocsinc``,
# so it's overriding the options that we are defining in the ``conftest.py``
# from the corporate site. We need to find a better way to avoid this.
import readthedocsinc
PYTEST_OPTIONS = ()
except ImportError:
PYTEST_OPTIONS = (
# Options to set test environment
('community', True),
('corporate', False),
('environment', 'readthedocs'),
('url_scheme', 'http'),
)
def pytest_addoption(parser):
parser.addoption(
'--including-search',
action='store_true',
dest='searchtests',
default=False, help='enable search tests',
)
def pytest_configure(config):
if not config.option.searchtests:
# Include ``not search``` to parameters so search tests do not perform
setattr(config.option, 'markexpr', 'not search')
for option, value in PYTEST_OPTIONS:
setattr(config.option, option, value)
@pytest.fixture(autouse=True)
def settings_modification(settings):
settings.CELERY_ALWAYS_EAGER = True
<commit_msg>Extend pytest's `markexpr` conf in case it already exists<commit_after>
|
# -*- coding: utf-8 -*-
import pytest
try:
# TODO: this file is read/executed even when called from ``readthedocsinc``,
# so it's overriding the options that we are defining in the ``conftest.py``
# from the corporate site. We need to find a better way to avoid this.
import readthedocsinc
PYTEST_OPTIONS = ()
except ImportError:
PYTEST_OPTIONS = (
# Options to set test environment
('community', True),
('corporate', False),
('environment', 'readthedocs'),
('url_scheme', 'http'),
)
def pytest_addoption(parser):
parser.addoption(
'--including-search',
action='store_true',
dest='searchtests',
default=False, help='enable search tests',
)
def pytest_configure(config):
if not config.option.searchtests:
# Include ``not search``` to parameters so search tests do not perform
markexpr = getattr(config.option, 'markexpr')
if markexpr:
markexpr += ' and not search'
else:
markexpr = 'not search'
setattr(config.option, 'markexpr', markexpr.strip())
for option, value in PYTEST_OPTIONS:
setattr(config.option, option, value)
@pytest.fixture(autouse=True)
def settings_modification(settings):
settings.CELERY_ALWAYS_EAGER = True
|
# -*- coding: utf-8 -*-
import pytest
try:
# TODO: this file is read/executed even when called from ``readthedocsinc``,
# so it's overriding the options that we are defining in the ``conftest.py``
# from the corporate site. We need to find a better way to avoid this.
import readthedocsinc
PYTEST_OPTIONS = ()
except ImportError:
PYTEST_OPTIONS = (
# Options to set test environment
('community', True),
('corporate', False),
('environment', 'readthedocs'),
('url_scheme', 'http'),
)
def pytest_addoption(parser):
parser.addoption(
'--including-search',
action='store_true',
dest='searchtests',
default=False, help='enable search tests',
)
def pytest_configure(config):
if not config.option.searchtests:
# Include ``not search``` to parameters so search tests do not perform
setattr(config.option, 'markexpr', 'not search')
for option, value in PYTEST_OPTIONS:
setattr(config.option, option, value)
@pytest.fixture(autouse=True)
def settings_modification(settings):
settings.CELERY_ALWAYS_EAGER = True
Extend pytest's `markexpr` conf in case it already exists# -*- coding: utf-8 -*-
import pytest
try:
# TODO: this file is read/executed even when called from ``readthedocsinc``,
# so it's overriding the options that we are defining in the ``conftest.py``
# from the corporate site. We need to find a better way to avoid this.
import readthedocsinc
PYTEST_OPTIONS = ()
except ImportError:
PYTEST_OPTIONS = (
# Options to set test environment
('community', True),
('corporate', False),
('environment', 'readthedocs'),
('url_scheme', 'http'),
)
def pytest_addoption(parser):
parser.addoption(
'--including-search',
action='store_true',
dest='searchtests',
default=False, help='enable search tests',
)
def pytest_configure(config):
if not config.option.searchtests:
# Include ``not search``` to parameters so search tests do not perform
markexpr = getattr(config.option, 'markexpr')
if markexpr:
markexpr += ' and not search'
else:
markexpr = 'not search'
setattr(config.option, 'markexpr', markexpr.strip())
for option, value in PYTEST_OPTIONS:
setattr(config.option, option, value)
@pytest.fixture(autouse=True)
def settings_modification(settings):
settings.CELERY_ALWAYS_EAGER = True
|
<commit_before># -*- coding: utf-8 -*-
import pytest
try:
# TODO: this file is read/executed even when called from ``readthedocsinc``,
# so it's overriding the options that we are defining in the ``conftest.py``
# from the corporate site. We need to find a better way to avoid this.
import readthedocsinc
PYTEST_OPTIONS = ()
except ImportError:
PYTEST_OPTIONS = (
# Options to set test environment
('community', True),
('corporate', False),
('environment', 'readthedocs'),
('url_scheme', 'http'),
)
def pytest_addoption(parser):
parser.addoption(
'--including-search',
action='store_true',
dest='searchtests',
default=False, help='enable search tests',
)
def pytest_configure(config):
if not config.option.searchtests:
# Include ``not search``` to parameters so search tests do not perform
setattr(config.option, 'markexpr', 'not search')
for option, value in PYTEST_OPTIONS:
setattr(config.option, option, value)
@pytest.fixture(autouse=True)
def settings_modification(settings):
settings.CELERY_ALWAYS_EAGER = True
<commit_msg>Extend pytest's `markexpr` conf in case it already exists<commit_after># -*- coding: utf-8 -*-
import pytest
try:
# TODO: this file is read/executed even when called from ``readthedocsinc``,
# so it's overriding the options that we are defining in the ``conftest.py``
# from the corporate site. We need to find a better way to avoid this.
import readthedocsinc
PYTEST_OPTIONS = ()
except ImportError:
PYTEST_OPTIONS = (
# Options to set test environment
('community', True),
('corporate', False),
('environment', 'readthedocs'),
('url_scheme', 'http'),
)
def pytest_addoption(parser):
parser.addoption(
'--including-search',
action='store_true',
dest='searchtests',
default=False, help='enable search tests',
)
def pytest_configure(config):
if not config.option.searchtests:
# Include ``not search``` to parameters so search tests do not perform
markexpr = getattr(config.option, 'markexpr')
if markexpr:
markexpr += ' and not search'
else:
markexpr = 'not search'
setattr(config.option, 'markexpr', markexpr.strip())
for option, value in PYTEST_OPTIONS:
setattr(config.option, option, value)
@pytest.fixture(autouse=True)
def settings_modification(settings):
settings.CELERY_ALWAYS_EAGER = True
|
142addc801051c688252944a37081010e0f5d58f
|
api/base/settings/__init__.py
|
api/base/settings/__init__.py
|
# -*- coding: utf-8 -*-
'''Consolidates settings from defaults.py and local.py.
::
>>> from api.base import settings
>>> settings.API_BASE
'v2/'
'''
from .defaults import * # noqa
try:
from .local import * # noqa
except ImportError as error:
raise ImportError("No api/base/settings/local.py settings file found. Did you remember to "
"copy local-dist.py to local.py?")
|
# -*- coding: utf-8 -*-
'''Consolidates settings from defaults.py and local.py.
::
>>> from api.base import settings
>>> settings.API_BASE
'v2/'
'''
from .defaults import * # noqa
try:
from .local import * # noqa
except ImportError as error:
raise ImportWarning("No api/base/settings/local.py settings file found. Did you remember to "
"copy local-dist.py to local.py?")
|
Change import error to a warning
|
Change import error to a warning
|
Python
|
apache-2.0
|
emetsger/osf.io,haoyuchen1992/osf.io,Johnetordoff/osf.io,mluo613/osf.io,RomanZWang/osf.io,arpitar/osf.io,cslzchen/osf.io,samanehsan/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,KAsante95/osf.io,cldershem/osf.io,njantrania/osf.io,caseyrygt/osf.io,adlius/osf.io,leb2dg/osf.io,crcresearch/osf.io,DanielSBrown/osf.io,binoculars/osf.io,billyhunt/osf.io,chennan47/osf.io,GageGaskins/osf.io,arpitar/osf.io,mluke93/osf.io,binoculars/osf.io,CenterForOpenScience/osf.io,jolene-esposito/osf.io,njantrania/osf.io,jmcarp/osf.io,doublebits/osf.io,mluo613/osf.io,icereval/osf.io,ckc6cz/osf.io,dplorimer/osf,cslzchen/osf.io,brandonPurvis/osf.io,doublebits/osf.io,felliott/osf.io,danielneis/osf.io,caseyrygt/osf.io,jinluyuan/osf.io,kch8qx/osf.io,aaxelb/osf.io,brandonPurvis/osf.io,barbour-em/osf.io,binoculars/osf.io,Nesiehr/osf.io,caseyrygt/osf.io,laurenrevere/osf.io,adlius/osf.io,zamattiac/osf.io,haoyuchen1992/osf.io,haoyuchen1992/osf.io,caseyrollins/osf.io,mattclark/osf.io,asanfilippo7/osf.io,caneruguz/osf.io,laurenrevere/osf.io,acshi/osf.io,abought/osf.io,lyndsysimon/osf.io,Ghalko/osf.io,mluke93/osf.io,ZobairAlijan/osf.io,kwierman/osf.io,cslzchen/osf.io,jinluyuan/osf.io,jolene-esposito/osf.io,monikagrabowska/osf.io,bdyetton/prettychart,MerlinZhang/osf.io,leb2dg/osf.io,crcresearch/osf.io,reinaH/osf.io,RomanZWang/osf.io,dplorimer/osf,cosenal/osf.io,brandonPurvis/osf.io,HalcyonChimera/osf.io,KAsante95/osf.io,CenterForOpenScience/osf.io,Nesiehr/osf.io,TomHeatwole/osf.io,leb2dg/osf.io,erinspace/osf.io,alexschiller/osf.io,TomBaxter/osf.io,cldershem/osf.io,petermalcolm/osf.io,icereval/osf.io,brianjgeiger/osf.io,reinaH/osf.io,adlius/osf.io,abought/osf.io,chrisseto/osf.io,billyhunt/osf.io,jinluyuan/osf.io,HarryRybacki/osf.io,HarryRybacki/osf.io,lyndsysimon/osf.io,zamattiac/osf.io,leb2dg/osf.io,alexschiller/osf.io,emetsger/osf.io,rdhyee/osf.io,acshi/osf.io,MerlinZhang/osf.io,samanehsan/osf.io,monikagrabowska/osf.io,kwierman/osf.io,jolene-esposito/osf.io,HarryRybacki/osf.io,mfraezz/osf.io,TomBaxter/osf.io,Nesiehr/osf.io,HalcyonChimera/osf.io,kwierman/osf.io,chrisseto/osf.io,barbour-em/osf.io,cldershem/osf.io,samchrisinger/osf.io,barbour-em/osf.io,Ghalko/osf.io,chennan47/osf.io,GageGaskins/osf.io,MerlinZhang/osf.io,samanehsan/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,zachjanicki/osf.io,Nesiehr/osf.io,DanielSBrown/osf.io,mluo613/osf.io,fabianvf/osf.io,RomanZWang/osf.io,sbt9uc/osf.io,GageGaskins/osf.io,amyshi188/osf.io,lyndsysimon/osf.io,TomHeatwole/osf.io,wearpants/osf.io,sbt9uc/osf.io,acshi/osf.io,ckc6cz/osf.io,DanielSBrown/osf.io,erinspace/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,crcresearch/osf.io,caseyrollins/osf.io,rdhyee/osf.io,samanehsan/osf.io,cldershem/osf.io,aaxelb/osf.io,cslzchen/osf.io,MerlinZhang/osf.io,haoyuchen1992/osf.io,chrisseto/osf.io,arpitar/osf.io,caneruguz/osf.io,jnayak1/osf.io,mfraezz/osf.io,hmoco/osf.io,emetsger/osf.io,amyshi188/osf.io,SSJohns/osf.io,GageGaskins/osf.io,samchrisinger/osf.io,billyhunt/osf.io,HarryRybacki/osf.io,mluo613/osf.io,kch8qx/osf.io,cwisecarver/osf.io,arpitar/osf.io,danielneis/osf.io,pattisdr/osf.io,samchrisinger/osf.io,Ghalko/osf.io,mfraezz/osf.io,abought/osf.io,dplorimer/osf,ZobairAlijan/osf.io,brandonPurvis/osf.io,jnayak1/osf.io,Ghalko/osf.io,fabianvf/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,saradbowman/osf.io,icereval/osf.io,cwisecarver/osf.io,mluke93/osf.io,hmoco/osf.io,ZobairAlijan/osf.io,wearpants/osf.io,mattclark/osf.io,wearpants/osf.io,lyndsysimon/osf.io,njantrania/osf.io,danielneis/osf.io,billyhunt/osf.io,amyshi188/osf.io,saradbowman/osf.io,adlius/osf.io,ckc6cz/osf.io,jmcarp/osf.io,jinluyuan/osf.io,alexschiller/osf.io,zachjanicki/osf.io,chrisseto/osf.io,acshi/osf.io,caseyrollins/osf.io,hmoco/osf.io,fabianvf/osf.io,pattisdr/osf.io,petermalcolm/osf.io,bdyetton/prettychart,emetsger/osf.io,kch8qx/osf.io,RomanZWang/osf.io,asanfilippo7/osf.io,dplorimer/osf,ticklemepierce/osf.io,zachjanicki/osf.io,jmcarp/osf.io,pattisdr/osf.io,asanfilippo7/osf.io,ZobairAlijan/osf.io,sbt9uc/osf.io,hmoco/osf.io,DanielSBrown/osf.io,KAsante95/osf.io,TomHeatwole/osf.io,jnayak1/osf.io,njantrania/osf.io,kch8qx/osf.io,reinaH/osf.io,jeffreyliu3230/osf.io,laurenrevere/osf.io,zachjanicki/osf.io,monikagrabowska/osf.io,mluke93/osf.io,jeffreyliu3230/osf.io,erinspace/osf.io,felliott/osf.io,sloria/osf.io,kch8qx/osf.io,jnayak1/osf.io,aaxelb/osf.io,KAsante95/osf.io,SSJohns/osf.io,ticklemepierce/osf.io,SSJohns/osf.io,jeffreyliu3230/osf.io,zamattiac/osf.io,kwierman/osf.io,fabianvf/osf.io,wearpants/osf.io,felliott/osf.io,bdyetton/prettychart,doublebits/osf.io,jeffreyliu3230/osf.io,acshi/osf.io,petermalcolm/osf.io,HalcyonChimera/osf.io,rdhyee/osf.io,cwisecarver/osf.io,zamattiac/osf.io,alexschiller/osf.io,sbt9uc/osf.io,TomBaxter/osf.io,baylee-d/osf.io,baylee-d/osf.io,baylee-d/osf.io,jolene-esposito/osf.io,TomHeatwole/osf.io,GageGaskins/osf.io,rdhyee/osf.io,monikagrabowska/osf.io,bdyetton/prettychart,monikagrabowska/osf.io,RomanZWang/osf.io,caseyrygt/osf.io,caneruguz/osf.io,petermalcolm/osf.io,ticklemepierce/osf.io,mattclark/osf.io,abought/osf.io,Johnetordoff/osf.io,brandonPurvis/osf.io,sloria/osf.io,reinaH/osf.io,danielneis/osf.io,SSJohns/osf.io,Johnetordoff/osf.io,cosenal/osf.io,caneruguz/osf.io,sloria/osf.io,jmcarp/osf.io,chennan47/osf.io,ticklemepierce/osf.io,cosenal/osf.io,billyhunt/osf.io,barbour-em/osf.io,amyshi188/osf.io,mluo613/osf.io,samchrisinger/osf.io,KAsante95/osf.io,doublebits/osf.io,cosenal/osf.io,alexschiller/osf.io,ckc6cz/osf.io,doublebits/osf.io,asanfilippo7/osf.io
|
# -*- coding: utf-8 -*-
'''Consolidates settings from defaults.py and local.py.
::
>>> from api.base import settings
>>> settings.API_BASE
'v2/'
'''
from .defaults import * # noqa
try:
from .local import * # noqa
except ImportError as error:
raise ImportError("No api/base/settings/local.py settings file found. Did you remember to "
"copy local-dist.py to local.py?")
Change import error to a warning
|
# -*- coding: utf-8 -*-
'''Consolidates settings from defaults.py and local.py.
::
>>> from api.base import settings
>>> settings.API_BASE
'v2/'
'''
from .defaults import * # noqa
try:
from .local import * # noqa
except ImportError as error:
raise ImportWarning("No api/base/settings/local.py settings file found. Did you remember to "
"copy local-dist.py to local.py?")
|
<commit_before># -*- coding: utf-8 -*-
'''Consolidates settings from defaults.py and local.py.
::
>>> from api.base import settings
>>> settings.API_BASE
'v2/'
'''
from .defaults import * # noqa
try:
from .local import * # noqa
except ImportError as error:
raise ImportError("No api/base/settings/local.py settings file found. Did you remember to "
"copy local-dist.py to local.py?")
<commit_msg>Change import error to a warning<commit_after>
|
# -*- coding: utf-8 -*-
'''Consolidates settings from defaults.py and local.py.
::
>>> from api.base import settings
>>> settings.API_BASE
'v2/'
'''
from .defaults import * # noqa
try:
from .local import * # noqa
except ImportError as error:
raise ImportWarning("No api/base/settings/local.py settings file found. Did you remember to "
"copy local-dist.py to local.py?")
|
# -*- coding: utf-8 -*-
'''Consolidates settings from defaults.py and local.py.
::
>>> from api.base import settings
>>> settings.API_BASE
'v2/'
'''
from .defaults import * # noqa
try:
from .local import * # noqa
except ImportError as error:
raise ImportError("No api/base/settings/local.py settings file found. Did you remember to "
"copy local-dist.py to local.py?")
Change import error to a warning# -*- coding: utf-8 -*-
'''Consolidates settings from defaults.py and local.py.
::
>>> from api.base import settings
>>> settings.API_BASE
'v2/'
'''
from .defaults import * # noqa
try:
from .local import * # noqa
except ImportError as error:
raise ImportWarning("No api/base/settings/local.py settings file found. Did you remember to "
"copy local-dist.py to local.py?")
|
<commit_before># -*- coding: utf-8 -*-
'''Consolidates settings from defaults.py and local.py.
::
>>> from api.base import settings
>>> settings.API_BASE
'v2/'
'''
from .defaults import * # noqa
try:
from .local import * # noqa
except ImportError as error:
raise ImportError("No api/base/settings/local.py settings file found. Did you remember to "
"copy local-dist.py to local.py?")
<commit_msg>Change import error to a warning<commit_after># -*- coding: utf-8 -*-
'''Consolidates settings from defaults.py and local.py.
::
>>> from api.base import settings
>>> settings.API_BASE
'v2/'
'''
from .defaults import * # noqa
try:
from .local import * # noqa
except ImportError as error:
raise ImportWarning("No api/base/settings/local.py settings file found. Did you remember to "
"copy local-dist.py to local.py?")
|
c71e494af9c861e0a5ddfff0f18d0dfe5c6a45e4
|
derrida/__init__.py
|
derrida/__init__.py
|
__version_info__ = (1, 2, 4, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
__version_info__ = (1, 3, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
Set develop version to 1.3.0-dev
|
Set develop version to 1.3.0-dev
|
Python
|
apache-2.0
|
Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django,Princeton-CDH/derrida-django
|
__version_info__ = (1, 2, 4, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
Set develop version to 1.3.0-dev
|
__version_info__ = (1, 3, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
<commit_before>__version_info__ = (1, 2, 4, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
<commit_msg>Set develop version to 1.3.0-dev<commit_after>
|
__version_info__ = (1, 3, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
__version_info__ = (1, 2, 4, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
Set develop version to 1.3.0-dev__version_info__ = (1, 3, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
<commit_before>__version_info__ = (1, 2, 4, None)
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
<commit_msg>Set develop version to 1.3.0-dev<commit_after>__version_info__ = (1, 3, 0, 'dev')
# Dot-connect all but the last. Last is dash-connected if not None.
__version__ = '.'.join([str(i) for i in __version_info__[:-1]])
if __version_info__[-1] is not None:
__version__ += ('-%s' % (__version_info__[-1],))
# context processor to add version to the template environment
def context_extras(request):
return {
# software version
'SW_VERSION': __version__
}
|
803e128b8e151c061f75051b5a4386d4c624ba56
|
core/settings-wni-Windows_NT.py
|
core/settings-wni-Windows_NT.py
|
#!/usr/bin/python2
from __future__ import absolute_import
from qubes.storage.wni import QubesWniVmStorage
def apply(system_path, vm_files, defaults):
system_path['qubes_base_dir'] = 'c:\\qubes'
system_path['config_template_pv'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template.xml'
system_path['config_template_hvm'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template-hvm.xml'
system_path['qubes_icon_dir'] = \
'c:/program files/Invisible Things Lab/Qubes/icons'
system_path['qubesdb_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qubesdb-daemon.exe'
system_path['qrexec_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-daemon.exe'
# Specific to WNI - normally VM have this file
system_path['qrexec_agent_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-agent.exe'
defaults['libvirt_uri'] = 'wni:///'
defaults['storage_class'] = QubesWniVmStorage
|
#!/usr/bin/python2
from __future__ import absolute_import
from qubes.storage.wni import QubesWniVmStorage
def apply(system_path, vm_files, defaults):
system_path['qubes_base_dir'] = 'c:\\qubes'
system_path['config_template_pv'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template.xml'
system_path['config_template_hvm'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template-hvm.xml'
system_path['qubes_icon_dir'] = \
'c:/program files/Invisible Things Lab/Qubes/icons'
system_path['qubesdb_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qubesdb-daemon.exe'
system_path['qrexec_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-daemon.exe'
system_path['qrexec_client_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-client.exe'
# Specific to WNI - normally VM have this file
system_path['qrexec_agent_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-agent.exe'
defaults['libvirt_uri'] = 'wni:///'
defaults['storage_class'] = QubesWniVmStorage
|
Add qrexec-client path to WNI settings
|
wni: Add qrexec-client path to WNI settings
|
Python
|
lgpl-2.1
|
marmarek/qubes-core-admin,QubesOS/qubes-core-admin,QubesOS/qubes-core-admin,woju/qubes-core-admin,marmarek/qubes-core-admin,QubesOS/qubes-core-admin,woju/qubes-core-admin,woju/qubes-core-admin,woju/qubes-core-admin,marmarek/qubes-core-admin
|
#!/usr/bin/python2
from __future__ import absolute_import
from qubes.storage.wni import QubesWniVmStorage
def apply(system_path, vm_files, defaults):
system_path['qubes_base_dir'] = 'c:\\qubes'
system_path['config_template_pv'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template.xml'
system_path['config_template_hvm'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template-hvm.xml'
system_path['qubes_icon_dir'] = \
'c:/program files/Invisible Things Lab/Qubes/icons'
system_path['qubesdb_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qubesdb-daemon.exe'
system_path['qrexec_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-daemon.exe'
# Specific to WNI - normally VM have this file
system_path['qrexec_agent_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-agent.exe'
defaults['libvirt_uri'] = 'wni:///'
defaults['storage_class'] = QubesWniVmStorage
wni: Add qrexec-client path to WNI settings
|
#!/usr/bin/python2
from __future__ import absolute_import
from qubes.storage.wni import QubesWniVmStorage
def apply(system_path, vm_files, defaults):
system_path['qubes_base_dir'] = 'c:\\qubes'
system_path['config_template_pv'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template.xml'
system_path['config_template_hvm'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template-hvm.xml'
system_path['qubes_icon_dir'] = \
'c:/program files/Invisible Things Lab/Qubes/icons'
system_path['qubesdb_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qubesdb-daemon.exe'
system_path['qrexec_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-daemon.exe'
system_path['qrexec_client_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-client.exe'
# Specific to WNI - normally VM have this file
system_path['qrexec_agent_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-agent.exe'
defaults['libvirt_uri'] = 'wni:///'
defaults['storage_class'] = QubesWniVmStorage
|
<commit_before>#!/usr/bin/python2
from __future__ import absolute_import
from qubes.storage.wni import QubesWniVmStorage
def apply(system_path, vm_files, defaults):
system_path['qubes_base_dir'] = 'c:\\qubes'
system_path['config_template_pv'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template.xml'
system_path['config_template_hvm'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template-hvm.xml'
system_path['qubes_icon_dir'] = \
'c:/program files/Invisible Things Lab/Qubes/icons'
system_path['qubesdb_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qubesdb-daemon.exe'
system_path['qrexec_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-daemon.exe'
# Specific to WNI - normally VM have this file
system_path['qrexec_agent_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-agent.exe'
defaults['libvirt_uri'] = 'wni:///'
defaults['storage_class'] = QubesWniVmStorage
<commit_msg>wni: Add qrexec-client path to WNI settings<commit_after>
|
#!/usr/bin/python2
from __future__ import absolute_import
from qubes.storage.wni import QubesWniVmStorage
def apply(system_path, vm_files, defaults):
system_path['qubes_base_dir'] = 'c:\\qubes'
system_path['config_template_pv'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template.xml'
system_path['config_template_hvm'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template-hvm.xml'
system_path['qubes_icon_dir'] = \
'c:/program files/Invisible Things Lab/Qubes/icons'
system_path['qubesdb_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qubesdb-daemon.exe'
system_path['qrexec_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-daemon.exe'
system_path['qrexec_client_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-client.exe'
# Specific to WNI - normally VM have this file
system_path['qrexec_agent_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-agent.exe'
defaults['libvirt_uri'] = 'wni:///'
defaults['storage_class'] = QubesWniVmStorage
|
#!/usr/bin/python2
from __future__ import absolute_import
from qubes.storage.wni import QubesWniVmStorage
def apply(system_path, vm_files, defaults):
system_path['qubes_base_dir'] = 'c:\\qubes'
system_path['config_template_pv'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template.xml'
system_path['config_template_hvm'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template-hvm.xml'
system_path['qubes_icon_dir'] = \
'c:/program files/Invisible Things Lab/Qubes/icons'
system_path['qubesdb_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qubesdb-daemon.exe'
system_path['qrexec_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-daemon.exe'
# Specific to WNI - normally VM have this file
system_path['qrexec_agent_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-agent.exe'
defaults['libvirt_uri'] = 'wni:///'
defaults['storage_class'] = QubesWniVmStorage
wni: Add qrexec-client path to WNI settings#!/usr/bin/python2
from __future__ import absolute_import
from qubes.storage.wni import QubesWniVmStorage
def apply(system_path, vm_files, defaults):
system_path['qubes_base_dir'] = 'c:\\qubes'
system_path['config_template_pv'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template.xml'
system_path['config_template_hvm'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template-hvm.xml'
system_path['qubes_icon_dir'] = \
'c:/program files/Invisible Things Lab/Qubes/icons'
system_path['qubesdb_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qubesdb-daemon.exe'
system_path['qrexec_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-daemon.exe'
system_path['qrexec_client_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-client.exe'
# Specific to WNI - normally VM have this file
system_path['qrexec_agent_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-agent.exe'
defaults['libvirt_uri'] = 'wni:///'
defaults['storage_class'] = QubesWniVmStorage
|
<commit_before>#!/usr/bin/python2
from __future__ import absolute_import
from qubes.storage.wni import QubesWniVmStorage
def apply(system_path, vm_files, defaults):
system_path['qubes_base_dir'] = 'c:\\qubes'
system_path['config_template_pv'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template.xml'
system_path['config_template_hvm'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template-hvm.xml'
system_path['qubes_icon_dir'] = \
'c:/program files/Invisible Things Lab/Qubes/icons'
system_path['qubesdb_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qubesdb-daemon.exe'
system_path['qrexec_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-daemon.exe'
# Specific to WNI - normally VM have this file
system_path['qrexec_agent_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-agent.exe'
defaults['libvirt_uri'] = 'wni:///'
defaults['storage_class'] = QubesWniVmStorage
<commit_msg>wni: Add qrexec-client path to WNI settings<commit_after>#!/usr/bin/python2
from __future__ import absolute_import
from qubes.storage.wni import QubesWniVmStorage
def apply(system_path, vm_files, defaults):
system_path['qubes_base_dir'] = 'c:\\qubes'
system_path['config_template_pv'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template.xml'
system_path['config_template_hvm'] = 'c:/program files/Invisible Things Lab/Qubes/vm-template-hvm.xml'
system_path['qubes_icon_dir'] = \
'c:/program files/Invisible Things Lab/Qubes/icons'
system_path['qubesdb_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qubesdb-daemon.exe'
system_path['qrexec_daemon_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-daemon.exe'
system_path['qrexec_client_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-client.exe'
# Specific to WNI - normally VM have this file
system_path['qrexec_agent_path'] = \
'c:/program files/Invisible Things Lab/Qubes/bin/qrexec-agent.exe'
defaults['libvirt_uri'] = 'wni:///'
defaults['storage_class'] = QubesWniVmStorage
|
8598cd0dd4938a2c5d46c350445e5c36c7792a30
|
leapp/utils/workarounds/mp.py
|
leapp/utils/workarounds/mp.py
|
import os
import multiprocessing.util
def apply_workaround():
# Implements:
# https://github.com/python/cpython/commit/e8a57b98ec8f2b161d4ad68ecc1433c9e3caad57
#
# Detection of fix: os imported to compare pids, before the fix os has not
# been imported
if getattr(multiprocessing.util, 'os', None):
return
class FixedFinalize(multiprocessing.util.Finalize):
def __init__(self, *args, **kwargs):
super(FixedFinalize, self).__init__(*args, **kwargs)
self._pid = os.getpid()
def __call__(self, *args, **kwargs):
if self._pid != os.getpid():
return None
return super(FixedFinalize, self).__call__(*args, **kwargs)
setattr(multiprocessing.util, 'Finalize', FixedFinalize)
|
import os
import multiprocessing.util
def apply_workaround():
# Implements:
# https://github.com/python/cpython/commit/e8a57b98ec8f2b161d4ad68ecc1433c9e3caad57
#
# Detection of fix: os imported to compare pids, before the fix os has not
# been imported
if getattr(multiprocessing.util, 'os', None):
return
class FixedFinalize(multiprocessing.util.Finalize):
def __init__(self, *args, **kwargs):
super(FixedFinalize, self).__init__(*args, **kwargs)
self._pid = os.getpid()
def __call__(self, *args, **kwargs): # pylint: disable=signature-differs
if self._pid != os.getpid():
return None
return super(FixedFinalize, self).__call__(*args, **kwargs)
setattr(multiprocessing.util, 'Finalize', FixedFinalize)
|
Disable pylint signature-differs in md.py
|
Disable pylint signature-differs in md.py
|
Python
|
lgpl-2.1
|
leapp-to/prototype,leapp-to/prototype,leapp-to/prototype,leapp-to/prototype
|
import os
import multiprocessing.util
def apply_workaround():
# Implements:
# https://github.com/python/cpython/commit/e8a57b98ec8f2b161d4ad68ecc1433c9e3caad57
#
# Detection of fix: os imported to compare pids, before the fix os has not
# been imported
if getattr(multiprocessing.util, 'os', None):
return
class FixedFinalize(multiprocessing.util.Finalize):
def __init__(self, *args, **kwargs):
super(FixedFinalize, self).__init__(*args, **kwargs)
self._pid = os.getpid()
def __call__(self, *args, **kwargs):
if self._pid != os.getpid():
return None
return super(FixedFinalize, self).__call__(*args, **kwargs)
setattr(multiprocessing.util, 'Finalize', FixedFinalize)
Disable pylint signature-differs in md.py
|
import os
import multiprocessing.util
def apply_workaround():
# Implements:
# https://github.com/python/cpython/commit/e8a57b98ec8f2b161d4ad68ecc1433c9e3caad57
#
# Detection of fix: os imported to compare pids, before the fix os has not
# been imported
if getattr(multiprocessing.util, 'os', None):
return
class FixedFinalize(multiprocessing.util.Finalize):
def __init__(self, *args, **kwargs):
super(FixedFinalize, self).__init__(*args, **kwargs)
self._pid = os.getpid()
def __call__(self, *args, **kwargs): # pylint: disable=signature-differs
if self._pid != os.getpid():
return None
return super(FixedFinalize, self).__call__(*args, **kwargs)
setattr(multiprocessing.util, 'Finalize', FixedFinalize)
|
<commit_before>import os
import multiprocessing.util
def apply_workaround():
# Implements:
# https://github.com/python/cpython/commit/e8a57b98ec8f2b161d4ad68ecc1433c9e3caad57
#
# Detection of fix: os imported to compare pids, before the fix os has not
# been imported
if getattr(multiprocessing.util, 'os', None):
return
class FixedFinalize(multiprocessing.util.Finalize):
def __init__(self, *args, **kwargs):
super(FixedFinalize, self).__init__(*args, **kwargs)
self._pid = os.getpid()
def __call__(self, *args, **kwargs):
if self._pid != os.getpid():
return None
return super(FixedFinalize, self).__call__(*args, **kwargs)
setattr(multiprocessing.util, 'Finalize', FixedFinalize)
<commit_msg>Disable pylint signature-differs in md.py<commit_after>
|
import os
import multiprocessing.util
def apply_workaround():
# Implements:
# https://github.com/python/cpython/commit/e8a57b98ec8f2b161d4ad68ecc1433c9e3caad57
#
# Detection of fix: os imported to compare pids, before the fix os has not
# been imported
if getattr(multiprocessing.util, 'os', None):
return
class FixedFinalize(multiprocessing.util.Finalize):
def __init__(self, *args, **kwargs):
super(FixedFinalize, self).__init__(*args, **kwargs)
self._pid = os.getpid()
def __call__(self, *args, **kwargs): # pylint: disable=signature-differs
if self._pid != os.getpid():
return None
return super(FixedFinalize, self).__call__(*args, **kwargs)
setattr(multiprocessing.util, 'Finalize', FixedFinalize)
|
import os
import multiprocessing.util
def apply_workaround():
# Implements:
# https://github.com/python/cpython/commit/e8a57b98ec8f2b161d4ad68ecc1433c9e3caad57
#
# Detection of fix: os imported to compare pids, before the fix os has not
# been imported
if getattr(multiprocessing.util, 'os', None):
return
class FixedFinalize(multiprocessing.util.Finalize):
def __init__(self, *args, **kwargs):
super(FixedFinalize, self).__init__(*args, **kwargs)
self._pid = os.getpid()
def __call__(self, *args, **kwargs):
if self._pid != os.getpid():
return None
return super(FixedFinalize, self).__call__(*args, **kwargs)
setattr(multiprocessing.util, 'Finalize', FixedFinalize)
Disable pylint signature-differs in md.pyimport os
import multiprocessing.util
def apply_workaround():
# Implements:
# https://github.com/python/cpython/commit/e8a57b98ec8f2b161d4ad68ecc1433c9e3caad57
#
# Detection of fix: os imported to compare pids, before the fix os has not
# been imported
if getattr(multiprocessing.util, 'os', None):
return
class FixedFinalize(multiprocessing.util.Finalize):
def __init__(self, *args, **kwargs):
super(FixedFinalize, self).__init__(*args, **kwargs)
self._pid = os.getpid()
def __call__(self, *args, **kwargs): # pylint: disable=signature-differs
if self._pid != os.getpid():
return None
return super(FixedFinalize, self).__call__(*args, **kwargs)
setattr(multiprocessing.util, 'Finalize', FixedFinalize)
|
<commit_before>import os
import multiprocessing.util
def apply_workaround():
# Implements:
# https://github.com/python/cpython/commit/e8a57b98ec8f2b161d4ad68ecc1433c9e3caad57
#
# Detection of fix: os imported to compare pids, before the fix os has not
# been imported
if getattr(multiprocessing.util, 'os', None):
return
class FixedFinalize(multiprocessing.util.Finalize):
def __init__(self, *args, **kwargs):
super(FixedFinalize, self).__init__(*args, **kwargs)
self._pid = os.getpid()
def __call__(self, *args, **kwargs):
if self._pid != os.getpid():
return None
return super(FixedFinalize, self).__call__(*args, **kwargs)
setattr(multiprocessing.util, 'Finalize', FixedFinalize)
<commit_msg>Disable pylint signature-differs in md.py<commit_after>import os
import multiprocessing.util
def apply_workaround():
# Implements:
# https://github.com/python/cpython/commit/e8a57b98ec8f2b161d4ad68ecc1433c9e3caad57
#
# Detection of fix: os imported to compare pids, before the fix os has not
# been imported
if getattr(multiprocessing.util, 'os', None):
return
class FixedFinalize(multiprocessing.util.Finalize):
def __init__(self, *args, **kwargs):
super(FixedFinalize, self).__init__(*args, **kwargs)
self._pid = os.getpid()
def __call__(self, *args, **kwargs): # pylint: disable=signature-differs
if self._pid != os.getpid():
return None
return super(FixedFinalize, self).__call__(*args, **kwargs)
setattr(multiprocessing.util, 'Finalize', FixedFinalize)
|
d373404a496713596bed91f62082c5a01b1891fb
|
ydf/cli.py
|
ydf/cli.py
|
"""
ydf/cli
~~~~~~~
Defines the command-line interface.
"""
import click
import sys
from ydf import templating, yaml_ext
@click.command('ydf')
@click.argument('yaml',
type=click.File('r'))
@click.option('-v', '--variables',
type=click.Path(dir_okay=False),
help='YAML file containing variables to be exposed to YAML file and template during rendering')
@click.option('-t', '--template',
type=str,
default=templating.DEFAULT_TEMPLATE_NAME,
help='Name of Jinja2 template used to build Dockerfile')
@click.option('-s', '--search-path',
type=click.Path(file_okay=False),
default=templating.DEFAULT_TEMPLATE_PATH,
help='File system paths to search for templates')
@click.option('-o', '--output',
type=click.File('w'),
help='Dockerfile generated from translation',
default=sys.stdout)
def main(yaml, variables, template, search_path, output):
"""
YAML to Dockerfile
"""
yaml = yaml_ext.load(yaml.read())
env = templating.environ(search_path)
rendered = env.get_template(template).render(templating.render_vars(yaml))
output.write(rendered)
if __name__ == '__main__':
main()
|
"""
ydf/cli
~~~~~~~
Defines the command-line interface.
"""
import click
import sys
from ydf import templating, yaml_ext
@click.command('ydf')
@click.argument('yaml',
type=click.Path(dir_okay=False))
@click.option('-v', '--variables',
type=click.Path(dir_okay=False),
help='YAML file containing variables to be exposed to YAML file and template during rendering')
@click.option('-t', '--template',
type=str,
default=templating.DEFAULT_TEMPLATE_NAME,
help='Name of Jinja2 template used to build Dockerfile')
@click.option('-s', '--search-path',
type=click.Path(file_okay=False),
default=templating.DEFAULT_TEMPLATE_PATH,
help='File system paths to search for templates')
@click.option('-o', '--output',
type=click.File('w'),
help='Dockerfile generated from translation',
default=sys.stdout)
def main(yaml, variables, template, search_path, output):
"""
YAML to Dockerfile
"""
yaml = yaml_ext.load(yaml.read())
env = templating.environ(search_path)
rendered = env.get_template(template).render(templating.render_vars(yaml))
output.write(rendered)
if __name__ == '__main__':
main()
|
Switch yaml CLI argument from file to file path.
|
Switch yaml CLI argument from file to file path.
|
Python
|
apache-2.0
|
ahawker/ydf
|
"""
ydf/cli
~~~~~~~
Defines the command-line interface.
"""
import click
import sys
from ydf import templating, yaml_ext
@click.command('ydf')
@click.argument('yaml',
type=click.File('r'))
@click.option('-v', '--variables',
type=click.Path(dir_okay=False),
help='YAML file containing variables to be exposed to YAML file and template during rendering')
@click.option('-t', '--template',
type=str,
default=templating.DEFAULT_TEMPLATE_NAME,
help='Name of Jinja2 template used to build Dockerfile')
@click.option('-s', '--search-path',
type=click.Path(file_okay=False),
default=templating.DEFAULT_TEMPLATE_PATH,
help='File system paths to search for templates')
@click.option('-o', '--output',
type=click.File('w'),
help='Dockerfile generated from translation',
default=sys.stdout)
def main(yaml, variables, template, search_path, output):
"""
YAML to Dockerfile
"""
yaml = yaml_ext.load(yaml.read())
env = templating.environ(search_path)
rendered = env.get_template(template).render(templating.render_vars(yaml))
output.write(rendered)
if __name__ == '__main__':
main()
Switch yaml CLI argument from file to file path.
|
"""
ydf/cli
~~~~~~~
Defines the command-line interface.
"""
import click
import sys
from ydf import templating, yaml_ext
@click.command('ydf')
@click.argument('yaml',
type=click.Path(dir_okay=False))
@click.option('-v', '--variables',
type=click.Path(dir_okay=False),
help='YAML file containing variables to be exposed to YAML file and template during rendering')
@click.option('-t', '--template',
type=str,
default=templating.DEFAULT_TEMPLATE_NAME,
help='Name of Jinja2 template used to build Dockerfile')
@click.option('-s', '--search-path',
type=click.Path(file_okay=False),
default=templating.DEFAULT_TEMPLATE_PATH,
help='File system paths to search for templates')
@click.option('-o', '--output',
type=click.File('w'),
help='Dockerfile generated from translation',
default=sys.stdout)
def main(yaml, variables, template, search_path, output):
"""
YAML to Dockerfile
"""
yaml = yaml_ext.load(yaml.read())
env = templating.environ(search_path)
rendered = env.get_template(template).render(templating.render_vars(yaml))
output.write(rendered)
if __name__ == '__main__':
main()
|
<commit_before>"""
ydf/cli
~~~~~~~
Defines the command-line interface.
"""
import click
import sys
from ydf import templating, yaml_ext
@click.command('ydf')
@click.argument('yaml',
type=click.File('r'))
@click.option('-v', '--variables',
type=click.Path(dir_okay=False),
help='YAML file containing variables to be exposed to YAML file and template during rendering')
@click.option('-t', '--template',
type=str,
default=templating.DEFAULT_TEMPLATE_NAME,
help='Name of Jinja2 template used to build Dockerfile')
@click.option('-s', '--search-path',
type=click.Path(file_okay=False),
default=templating.DEFAULT_TEMPLATE_PATH,
help='File system paths to search for templates')
@click.option('-o', '--output',
type=click.File('w'),
help='Dockerfile generated from translation',
default=sys.stdout)
def main(yaml, variables, template, search_path, output):
"""
YAML to Dockerfile
"""
yaml = yaml_ext.load(yaml.read())
env = templating.environ(search_path)
rendered = env.get_template(template).render(templating.render_vars(yaml))
output.write(rendered)
if __name__ == '__main__':
main()
<commit_msg>Switch yaml CLI argument from file to file path.<commit_after>
|
"""
ydf/cli
~~~~~~~
Defines the command-line interface.
"""
import click
import sys
from ydf import templating, yaml_ext
@click.command('ydf')
@click.argument('yaml',
type=click.Path(dir_okay=False))
@click.option('-v', '--variables',
type=click.Path(dir_okay=False),
help='YAML file containing variables to be exposed to YAML file and template during rendering')
@click.option('-t', '--template',
type=str,
default=templating.DEFAULT_TEMPLATE_NAME,
help='Name of Jinja2 template used to build Dockerfile')
@click.option('-s', '--search-path',
type=click.Path(file_okay=False),
default=templating.DEFAULT_TEMPLATE_PATH,
help='File system paths to search for templates')
@click.option('-o', '--output',
type=click.File('w'),
help='Dockerfile generated from translation',
default=sys.stdout)
def main(yaml, variables, template, search_path, output):
"""
YAML to Dockerfile
"""
yaml = yaml_ext.load(yaml.read())
env = templating.environ(search_path)
rendered = env.get_template(template).render(templating.render_vars(yaml))
output.write(rendered)
if __name__ == '__main__':
main()
|
"""
ydf/cli
~~~~~~~
Defines the command-line interface.
"""
import click
import sys
from ydf import templating, yaml_ext
@click.command('ydf')
@click.argument('yaml',
type=click.File('r'))
@click.option('-v', '--variables',
type=click.Path(dir_okay=False),
help='YAML file containing variables to be exposed to YAML file and template during rendering')
@click.option('-t', '--template',
type=str,
default=templating.DEFAULT_TEMPLATE_NAME,
help='Name of Jinja2 template used to build Dockerfile')
@click.option('-s', '--search-path',
type=click.Path(file_okay=False),
default=templating.DEFAULT_TEMPLATE_PATH,
help='File system paths to search for templates')
@click.option('-o', '--output',
type=click.File('w'),
help='Dockerfile generated from translation',
default=sys.stdout)
def main(yaml, variables, template, search_path, output):
"""
YAML to Dockerfile
"""
yaml = yaml_ext.load(yaml.read())
env = templating.environ(search_path)
rendered = env.get_template(template).render(templating.render_vars(yaml))
output.write(rendered)
if __name__ == '__main__':
main()
Switch yaml CLI argument from file to file path."""
ydf/cli
~~~~~~~
Defines the command-line interface.
"""
import click
import sys
from ydf import templating, yaml_ext
@click.command('ydf')
@click.argument('yaml',
type=click.Path(dir_okay=False))
@click.option('-v', '--variables',
type=click.Path(dir_okay=False),
help='YAML file containing variables to be exposed to YAML file and template during rendering')
@click.option('-t', '--template',
type=str,
default=templating.DEFAULT_TEMPLATE_NAME,
help='Name of Jinja2 template used to build Dockerfile')
@click.option('-s', '--search-path',
type=click.Path(file_okay=False),
default=templating.DEFAULT_TEMPLATE_PATH,
help='File system paths to search for templates')
@click.option('-o', '--output',
type=click.File('w'),
help='Dockerfile generated from translation',
default=sys.stdout)
def main(yaml, variables, template, search_path, output):
"""
YAML to Dockerfile
"""
yaml = yaml_ext.load(yaml.read())
env = templating.environ(search_path)
rendered = env.get_template(template).render(templating.render_vars(yaml))
output.write(rendered)
if __name__ == '__main__':
main()
|
<commit_before>"""
ydf/cli
~~~~~~~
Defines the command-line interface.
"""
import click
import sys
from ydf import templating, yaml_ext
@click.command('ydf')
@click.argument('yaml',
type=click.File('r'))
@click.option('-v', '--variables',
type=click.Path(dir_okay=False),
help='YAML file containing variables to be exposed to YAML file and template during rendering')
@click.option('-t', '--template',
type=str,
default=templating.DEFAULT_TEMPLATE_NAME,
help='Name of Jinja2 template used to build Dockerfile')
@click.option('-s', '--search-path',
type=click.Path(file_okay=False),
default=templating.DEFAULT_TEMPLATE_PATH,
help='File system paths to search for templates')
@click.option('-o', '--output',
type=click.File('w'),
help='Dockerfile generated from translation',
default=sys.stdout)
def main(yaml, variables, template, search_path, output):
"""
YAML to Dockerfile
"""
yaml = yaml_ext.load(yaml.read())
env = templating.environ(search_path)
rendered = env.get_template(template).render(templating.render_vars(yaml))
output.write(rendered)
if __name__ == '__main__':
main()
<commit_msg>Switch yaml CLI argument from file to file path.<commit_after>"""
ydf/cli
~~~~~~~
Defines the command-line interface.
"""
import click
import sys
from ydf import templating, yaml_ext
@click.command('ydf')
@click.argument('yaml',
type=click.Path(dir_okay=False))
@click.option('-v', '--variables',
type=click.Path(dir_okay=False),
help='YAML file containing variables to be exposed to YAML file and template during rendering')
@click.option('-t', '--template',
type=str,
default=templating.DEFAULT_TEMPLATE_NAME,
help='Name of Jinja2 template used to build Dockerfile')
@click.option('-s', '--search-path',
type=click.Path(file_okay=False),
default=templating.DEFAULT_TEMPLATE_PATH,
help='File system paths to search for templates')
@click.option('-o', '--output',
type=click.File('w'),
help='Dockerfile generated from translation',
default=sys.stdout)
def main(yaml, variables, template, search_path, output):
"""
YAML to Dockerfile
"""
yaml = yaml_ext.load(yaml.read())
env = templating.environ(search_path)
rendered = env.get_template(template).render(templating.render_vars(yaml))
output.write(rendered)
if __name__ == '__main__':
main()
|
913d7f39bdbce53e64ea306b7bd2d95ffa0e0adb
|
lambdawebhook/hook.py
|
lambdawebhook/hook.py
|
#!/usr/bin/env python
import os
import sys
import hashlib
# Add the lib directory to the path for Lambda to load our libs
sys.path.append(os.path.join(os.path.dirname(__file__), 'lib'))
import requests # NOQA
import hmac # NOQA
def verify_signature(secret, signature, payload):
computed_hash = hmac.new(str(secret), payload, hashlib.sha1)
computed_signature = '='.join(['sha1', computed_hash.hexdigest()])
return hmac.compare_digest(computed_signature, str(signature))
def lambda_handler(event, context):
print 'Webhook received'
verified = verify_signature(event['secret'],
event['x_hub_signature'],
event['payload'])
print 'Signature verified: ' + str(verified)
if verified:
response = requests.post(event['jenkins_url'],
headers={
'Content-Type': 'application/json',
'X-GitHub-Delivery': event['x_github_delivery'],
'X-GitHub-Event': event['x_github_event'],
'X-Hub-Signature': event['x_hub_signature']
},
data=event['payload'])
response.raise_for_status()
else:
raise requests.HTTPError('400 Client Error: Bad Request')
if __name__ == "__main__":
pass
|
#!/usr/bin/env python
import os
import sys
import hashlib
import hmac
# Add the lib directory to the path for Lambda to load our libs
sys.path.append(os.path.join(os.path.dirname(__file__), 'lib'))
import requests # NOQA
def verify_signature(secret, signature, payload):
computed_hash = hmac.new(str(secret), payload, hashlib.sha1)
computed_signature = '='.join(['sha1', computed_hash.hexdigest()])
return hmac.compare_digest(computed_signature, str(signature))
def lambda_handler(event, context):
print 'Webhook received'
verified = verify_signature(event['secret'],
event['x_hub_signature'],
event['payload'])
print 'Signature verified: ' + str(verified)
if verified:
response = requests.post(event['jenkins_url'],
headers={
'Content-Type': 'application/json',
'X-GitHub-Delivery': event['x_github_delivery'],
'X-GitHub-Event': event['x_github_event'],
'X-Hub-Signature': event['x_hub_signature']
},
data=event['payload'])
response.raise_for_status()
else:
raise requests.HTTPError('400 Client Error: Bad Request')
if __name__ == "__main__":
pass
|
Clean up order of imports
|
Clean up order of imports
|
Python
|
bsd-3-clause
|
pristineio/lambda-webhook
|
#!/usr/bin/env python
import os
import sys
import hashlib
# Add the lib directory to the path for Lambda to load our libs
sys.path.append(os.path.join(os.path.dirname(__file__), 'lib'))
import requests # NOQA
import hmac # NOQA
def verify_signature(secret, signature, payload):
computed_hash = hmac.new(str(secret), payload, hashlib.sha1)
computed_signature = '='.join(['sha1', computed_hash.hexdigest()])
return hmac.compare_digest(computed_signature, str(signature))
def lambda_handler(event, context):
print 'Webhook received'
verified = verify_signature(event['secret'],
event['x_hub_signature'],
event['payload'])
print 'Signature verified: ' + str(verified)
if verified:
response = requests.post(event['jenkins_url'],
headers={
'Content-Type': 'application/json',
'X-GitHub-Delivery': event['x_github_delivery'],
'X-GitHub-Event': event['x_github_event'],
'X-Hub-Signature': event['x_hub_signature']
},
data=event['payload'])
response.raise_for_status()
else:
raise requests.HTTPError('400 Client Error: Bad Request')
if __name__ == "__main__":
pass
Clean up order of imports
|
#!/usr/bin/env python
import os
import sys
import hashlib
import hmac
# Add the lib directory to the path for Lambda to load our libs
sys.path.append(os.path.join(os.path.dirname(__file__), 'lib'))
import requests # NOQA
def verify_signature(secret, signature, payload):
computed_hash = hmac.new(str(secret), payload, hashlib.sha1)
computed_signature = '='.join(['sha1', computed_hash.hexdigest()])
return hmac.compare_digest(computed_signature, str(signature))
def lambda_handler(event, context):
print 'Webhook received'
verified = verify_signature(event['secret'],
event['x_hub_signature'],
event['payload'])
print 'Signature verified: ' + str(verified)
if verified:
response = requests.post(event['jenkins_url'],
headers={
'Content-Type': 'application/json',
'X-GitHub-Delivery': event['x_github_delivery'],
'X-GitHub-Event': event['x_github_event'],
'X-Hub-Signature': event['x_hub_signature']
},
data=event['payload'])
response.raise_for_status()
else:
raise requests.HTTPError('400 Client Error: Bad Request')
if __name__ == "__main__":
pass
|
<commit_before>#!/usr/bin/env python
import os
import sys
import hashlib
# Add the lib directory to the path for Lambda to load our libs
sys.path.append(os.path.join(os.path.dirname(__file__), 'lib'))
import requests # NOQA
import hmac # NOQA
def verify_signature(secret, signature, payload):
computed_hash = hmac.new(str(secret), payload, hashlib.sha1)
computed_signature = '='.join(['sha1', computed_hash.hexdigest()])
return hmac.compare_digest(computed_signature, str(signature))
def lambda_handler(event, context):
print 'Webhook received'
verified = verify_signature(event['secret'],
event['x_hub_signature'],
event['payload'])
print 'Signature verified: ' + str(verified)
if verified:
response = requests.post(event['jenkins_url'],
headers={
'Content-Type': 'application/json',
'X-GitHub-Delivery': event['x_github_delivery'],
'X-GitHub-Event': event['x_github_event'],
'X-Hub-Signature': event['x_hub_signature']
},
data=event['payload'])
response.raise_for_status()
else:
raise requests.HTTPError('400 Client Error: Bad Request')
if __name__ == "__main__":
pass
<commit_msg>Clean up order of imports<commit_after>
|
#!/usr/bin/env python
import os
import sys
import hashlib
import hmac
# Add the lib directory to the path for Lambda to load our libs
sys.path.append(os.path.join(os.path.dirname(__file__), 'lib'))
import requests # NOQA
def verify_signature(secret, signature, payload):
computed_hash = hmac.new(str(secret), payload, hashlib.sha1)
computed_signature = '='.join(['sha1', computed_hash.hexdigest()])
return hmac.compare_digest(computed_signature, str(signature))
def lambda_handler(event, context):
print 'Webhook received'
verified = verify_signature(event['secret'],
event['x_hub_signature'],
event['payload'])
print 'Signature verified: ' + str(verified)
if verified:
response = requests.post(event['jenkins_url'],
headers={
'Content-Type': 'application/json',
'X-GitHub-Delivery': event['x_github_delivery'],
'X-GitHub-Event': event['x_github_event'],
'X-Hub-Signature': event['x_hub_signature']
},
data=event['payload'])
response.raise_for_status()
else:
raise requests.HTTPError('400 Client Error: Bad Request')
if __name__ == "__main__":
pass
|
#!/usr/bin/env python
import os
import sys
import hashlib
# Add the lib directory to the path for Lambda to load our libs
sys.path.append(os.path.join(os.path.dirname(__file__), 'lib'))
import requests # NOQA
import hmac # NOQA
def verify_signature(secret, signature, payload):
computed_hash = hmac.new(str(secret), payload, hashlib.sha1)
computed_signature = '='.join(['sha1', computed_hash.hexdigest()])
return hmac.compare_digest(computed_signature, str(signature))
def lambda_handler(event, context):
print 'Webhook received'
verified = verify_signature(event['secret'],
event['x_hub_signature'],
event['payload'])
print 'Signature verified: ' + str(verified)
if verified:
response = requests.post(event['jenkins_url'],
headers={
'Content-Type': 'application/json',
'X-GitHub-Delivery': event['x_github_delivery'],
'X-GitHub-Event': event['x_github_event'],
'X-Hub-Signature': event['x_hub_signature']
},
data=event['payload'])
response.raise_for_status()
else:
raise requests.HTTPError('400 Client Error: Bad Request')
if __name__ == "__main__":
pass
Clean up order of imports#!/usr/bin/env python
import os
import sys
import hashlib
import hmac
# Add the lib directory to the path for Lambda to load our libs
sys.path.append(os.path.join(os.path.dirname(__file__), 'lib'))
import requests # NOQA
def verify_signature(secret, signature, payload):
computed_hash = hmac.new(str(secret), payload, hashlib.sha1)
computed_signature = '='.join(['sha1', computed_hash.hexdigest()])
return hmac.compare_digest(computed_signature, str(signature))
def lambda_handler(event, context):
print 'Webhook received'
verified = verify_signature(event['secret'],
event['x_hub_signature'],
event['payload'])
print 'Signature verified: ' + str(verified)
if verified:
response = requests.post(event['jenkins_url'],
headers={
'Content-Type': 'application/json',
'X-GitHub-Delivery': event['x_github_delivery'],
'X-GitHub-Event': event['x_github_event'],
'X-Hub-Signature': event['x_hub_signature']
},
data=event['payload'])
response.raise_for_status()
else:
raise requests.HTTPError('400 Client Error: Bad Request')
if __name__ == "__main__":
pass
|
<commit_before>#!/usr/bin/env python
import os
import sys
import hashlib
# Add the lib directory to the path for Lambda to load our libs
sys.path.append(os.path.join(os.path.dirname(__file__), 'lib'))
import requests # NOQA
import hmac # NOQA
def verify_signature(secret, signature, payload):
computed_hash = hmac.new(str(secret), payload, hashlib.sha1)
computed_signature = '='.join(['sha1', computed_hash.hexdigest()])
return hmac.compare_digest(computed_signature, str(signature))
def lambda_handler(event, context):
print 'Webhook received'
verified = verify_signature(event['secret'],
event['x_hub_signature'],
event['payload'])
print 'Signature verified: ' + str(verified)
if verified:
response = requests.post(event['jenkins_url'],
headers={
'Content-Type': 'application/json',
'X-GitHub-Delivery': event['x_github_delivery'],
'X-GitHub-Event': event['x_github_event'],
'X-Hub-Signature': event['x_hub_signature']
},
data=event['payload'])
response.raise_for_status()
else:
raise requests.HTTPError('400 Client Error: Bad Request')
if __name__ == "__main__":
pass
<commit_msg>Clean up order of imports<commit_after>#!/usr/bin/env python
import os
import sys
import hashlib
import hmac
# Add the lib directory to the path for Lambda to load our libs
sys.path.append(os.path.join(os.path.dirname(__file__), 'lib'))
import requests # NOQA
def verify_signature(secret, signature, payload):
computed_hash = hmac.new(str(secret), payload, hashlib.sha1)
computed_signature = '='.join(['sha1', computed_hash.hexdigest()])
return hmac.compare_digest(computed_signature, str(signature))
def lambda_handler(event, context):
print 'Webhook received'
verified = verify_signature(event['secret'],
event['x_hub_signature'],
event['payload'])
print 'Signature verified: ' + str(verified)
if verified:
response = requests.post(event['jenkins_url'],
headers={
'Content-Type': 'application/json',
'X-GitHub-Delivery': event['x_github_delivery'],
'X-GitHub-Event': event['x_github_event'],
'X-Hub-Signature': event['x_hub_signature']
},
data=event['payload'])
response.raise_for_status()
else:
raise requests.HTTPError('400 Client Error: Bad Request')
if __name__ == "__main__":
pass
|
8ad5ff34a91cd103534b7b936e023462a08683fc
|
interface/backend/static/tests.py
|
interface/backend/static/tests.py
|
from django.test import TestCase
from django.urls import reverse
class SmokeTest(TestCase):
def test_landing(self):
url = reverse('static:open-image')
resp = self.client.get(url)
self.assertContains(resp, 'Concept to Clinic')
self.assertEqual(resp.status_code, 200)
|
from django.test import TestCase
from django.urls import reverse
class SmokeTest(TestCase):
def test_landing(self):
url = reverse('static:home')
resp = self.client.get(url)
self.assertContains(resp, 'Concept to Clinic')
self.assertEqual(resp.status_code, 200)
|
Fix broken URL reverse, introduced in 933649f
|
Fix broken URL reverse, introduced in 933649f
|
Python
|
mit
|
vessemer/concept-to-clinic,antonow/concept-to-clinic,antonow/concept-to-clinic,vessemer/concept-to-clinic,antonow/concept-to-clinic,vessemer/concept-to-clinic,antonow/concept-to-clinic,vessemer/concept-to-clinic
|
from django.test import TestCase
from django.urls import reverse
class SmokeTest(TestCase):
def test_landing(self):
url = reverse('static:open-image')
resp = self.client.get(url)
self.assertContains(resp, 'Concept to Clinic')
self.assertEqual(resp.status_code, 200)
Fix broken URL reverse, introduced in 933649f
|
from django.test import TestCase
from django.urls import reverse
class SmokeTest(TestCase):
def test_landing(self):
url = reverse('static:home')
resp = self.client.get(url)
self.assertContains(resp, 'Concept to Clinic')
self.assertEqual(resp.status_code, 200)
|
<commit_before>from django.test import TestCase
from django.urls import reverse
class SmokeTest(TestCase):
def test_landing(self):
url = reverse('static:open-image')
resp = self.client.get(url)
self.assertContains(resp, 'Concept to Clinic')
self.assertEqual(resp.status_code, 200)
<commit_msg>Fix broken URL reverse, introduced in 933649f<commit_after>
|
from django.test import TestCase
from django.urls import reverse
class SmokeTest(TestCase):
def test_landing(self):
url = reverse('static:home')
resp = self.client.get(url)
self.assertContains(resp, 'Concept to Clinic')
self.assertEqual(resp.status_code, 200)
|
from django.test import TestCase
from django.urls import reverse
class SmokeTest(TestCase):
def test_landing(self):
url = reverse('static:open-image')
resp = self.client.get(url)
self.assertContains(resp, 'Concept to Clinic')
self.assertEqual(resp.status_code, 200)
Fix broken URL reverse, introduced in 933649ffrom django.test import TestCase
from django.urls import reverse
class SmokeTest(TestCase):
def test_landing(self):
url = reverse('static:home')
resp = self.client.get(url)
self.assertContains(resp, 'Concept to Clinic')
self.assertEqual(resp.status_code, 200)
|
<commit_before>from django.test import TestCase
from django.urls import reverse
class SmokeTest(TestCase):
def test_landing(self):
url = reverse('static:open-image')
resp = self.client.get(url)
self.assertContains(resp, 'Concept to Clinic')
self.assertEqual(resp.status_code, 200)
<commit_msg>Fix broken URL reverse, introduced in 933649f<commit_after>from django.test import TestCase
from django.urls import reverse
class SmokeTest(TestCase):
def test_landing(self):
url = reverse('static:home')
resp = self.client.get(url)
self.assertContains(resp, 'Concept to Clinic')
self.assertEqual(resp.status_code, 200)
|
76da7e8bcee5cb91723ebe47006b1e3c20e7cc60
|
services/httplib.py
|
services/httplib.py
|
from __future__ import absolute_import
from . import HttpService
from . import Response
from httplib import HTTPConnection
from httplib import HTTPException
from httplib import HTTPSConnection
from socket import timeout, error
import time
class HttpLibHttpService(HttpService):
"""
HttpService using python batteries' httplib.
"""
def __init__(self, *args, **kwargs):
super(HttpLibHttpService, self).__init__(*args, **kwargs)
if self.ssl:
self.connection_class = HTTPSConnection
else:
self.connection_class = HTTPConnection
def call(self, body):
try:
connection = self.connection_class(self.host, self.port, timeout=self.connect_timeout)
start_time = time.time()
connection.request("POST", self.path, body)
response = connection.getresponse()
except (HTTPException, timeout, error), e:
# raise common error class
raise self.communication_error_class(message=u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
connection.close()
end_time = time.time()
return Response(response.status, data, (end_time - start_time)*1000)
|
from __future__ import absolute_import
from . import HttpService
from . import Response
from httplib import HTTPConnection
from httplib import HTTPException
from httplib import HTTPSConnection
from socket import timeout, error
import time
class HttpLibHttpService(HttpService):
"""
HttpService using python batteries' httplib.
"""
def __init__(self, *args, **kwargs):
super(HttpLibHttpService, self).__init__(*args, **kwargs)
if self.ssl:
self.connection_class = HTTPSConnection
else:
self.connection_class = HTTPConnection
def call(self, body):
try:
connection = self.connection_class(self.host, self.port, timeout=self.connect_timeout)
start_time = time.time()
connection.request("POST", self.path, body)
response = connection.getresponse()
except (HTTPException, timeout, error), e:
raise self.communication_error_class(u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
connection.close()
end_time = time.time()
return Response(response.status, data, (end_time - start_time)*1000)
|
Make HttpLibHttpService compatible with Exception (no kwarg).
|
Make HttpLibHttpService compatible with Exception (no kwarg).
|
Python
|
bsd-2-clause
|
storecast/holon
|
from __future__ import absolute_import
from . import HttpService
from . import Response
from httplib import HTTPConnection
from httplib import HTTPException
from httplib import HTTPSConnection
from socket import timeout, error
import time
class HttpLibHttpService(HttpService):
"""
HttpService using python batteries' httplib.
"""
def __init__(self, *args, **kwargs):
super(HttpLibHttpService, self).__init__(*args, **kwargs)
if self.ssl:
self.connection_class = HTTPSConnection
else:
self.connection_class = HTTPConnection
def call(self, body):
try:
connection = self.connection_class(self.host, self.port, timeout=self.connect_timeout)
start_time = time.time()
connection.request("POST", self.path, body)
response = connection.getresponse()
except (HTTPException, timeout, error), e:
# raise common error class
raise self.communication_error_class(message=u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
connection.close()
end_time = time.time()
return Response(response.status, data, (end_time - start_time)*1000)
Make HttpLibHttpService compatible with Exception (no kwarg).
|
from __future__ import absolute_import
from . import HttpService
from . import Response
from httplib import HTTPConnection
from httplib import HTTPException
from httplib import HTTPSConnection
from socket import timeout, error
import time
class HttpLibHttpService(HttpService):
"""
HttpService using python batteries' httplib.
"""
def __init__(self, *args, **kwargs):
super(HttpLibHttpService, self).__init__(*args, **kwargs)
if self.ssl:
self.connection_class = HTTPSConnection
else:
self.connection_class = HTTPConnection
def call(self, body):
try:
connection = self.connection_class(self.host, self.port, timeout=self.connect_timeout)
start_time = time.time()
connection.request("POST", self.path, body)
response = connection.getresponse()
except (HTTPException, timeout, error), e:
raise self.communication_error_class(u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
connection.close()
end_time = time.time()
return Response(response.status, data, (end_time - start_time)*1000)
|
<commit_before>from __future__ import absolute_import
from . import HttpService
from . import Response
from httplib import HTTPConnection
from httplib import HTTPException
from httplib import HTTPSConnection
from socket import timeout, error
import time
class HttpLibHttpService(HttpService):
"""
HttpService using python batteries' httplib.
"""
def __init__(self, *args, **kwargs):
super(HttpLibHttpService, self).__init__(*args, **kwargs)
if self.ssl:
self.connection_class = HTTPSConnection
else:
self.connection_class = HTTPConnection
def call(self, body):
try:
connection = self.connection_class(self.host, self.port, timeout=self.connect_timeout)
start_time = time.time()
connection.request("POST", self.path, body)
response = connection.getresponse()
except (HTTPException, timeout, error), e:
# raise common error class
raise self.communication_error_class(message=u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
connection.close()
end_time = time.time()
return Response(response.status, data, (end_time - start_time)*1000)
<commit_msg>Make HttpLibHttpService compatible with Exception (no kwarg).<commit_after>
|
from __future__ import absolute_import
from . import HttpService
from . import Response
from httplib import HTTPConnection
from httplib import HTTPException
from httplib import HTTPSConnection
from socket import timeout, error
import time
class HttpLibHttpService(HttpService):
"""
HttpService using python batteries' httplib.
"""
def __init__(self, *args, **kwargs):
super(HttpLibHttpService, self).__init__(*args, **kwargs)
if self.ssl:
self.connection_class = HTTPSConnection
else:
self.connection_class = HTTPConnection
def call(self, body):
try:
connection = self.connection_class(self.host, self.port, timeout=self.connect_timeout)
start_time = time.time()
connection.request("POST", self.path, body)
response = connection.getresponse()
except (HTTPException, timeout, error), e:
raise self.communication_error_class(u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
connection.close()
end_time = time.time()
return Response(response.status, data, (end_time - start_time)*1000)
|
from __future__ import absolute_import
from . import HttpService
from . import Response
from httplib import HTTPConnection
from httplib import HTTPException
from httplib import HTTPSConnection
from socket import timeout, error
import time
class HttpLibHttpService(HttpService):
"""
HttpService using python batteries' httplib.
"""
def __init__(self, *args, **kwargs):
super(HttpLibHttpService, self).__init__(*args, **kwargs)
if self.ssl:
self.connection_class = HTTPSConnection
else:
self.connection_class = HTTPConnection
def call(self, body):
try:
connection = self.connection_class(self.host, self.port, timeout=self.connect_timeout)
start_time = time.time()
connection.request("POST", self.path, body)
response = connection.getresponse()
except (HTTPException, timeout, error), e:
# raise common error class
raise self.communication_error_class(message=u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
connection.close()
end_time = time.time()
return Response(response.status, data, (end_time - start_time)*1000)
Make HttpLibHttpService compatible with Exception (no kwarg).from __future__ import absolute_import
from . import HttpService
from . import Response
from httplib import HTTPConnection
from httplib import HTTPException
from httplib import HTTPSConnection
from socket import timeout, error
import time
class HttpLibHttpService(HttpService):
"""
HttpService using python batteries' httplib.
"""
def __init__(self, *args, **kwargs):
super(HttpLibHttpService, self).__init__(*args, **kwargs)
if self.ssl:
self.connection_class = HTTPSConnection
else:
self.connection_class = HTTPConnection
def call(self, body):
try:
connection = self.connection_class(self.host, self.port, timeout=self.connect_timeout)
start_time = time.time()
connection.request("POST", self.path, body)
response = connection.getresponse()
except (HTTPException, timeout, error), e:
raise self.communication_error_class(u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
connection.close()
end_time = time.time()
return Response(response.status, data, (end_time - start_time)*1000)
|
<commit_before>from __future__ import absolute_import
from . import HttpService
from . import Response
from httplib import HTTPConnection
from httplib import HTTPException
from httplib import HTTPSConnection
from socket import timeout, error
import time
class HttpLibHttpService(HttpService):
"""
HttpService using python batteries' httplib.
"""
def __init__(self, *args, **kwargs):
super(HttpLibHttpService, self).__init__(*args, **kwargs)
if self.ssl:
self.connection_class = HTTPSConnection
else:
self.connection_class = HTTPConnection
def call(self, body):
try:
connection = self.connection_class(self.host, self.port, timeout=self.connect_timeout)
start_time = time.time()
connection.request("POST", self.path, body)
response = connection.getresponse()
except (HTTPException, timeout, error), e:
# raise common error class
raise self.communication_error_class(message=u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
connection.close()
end_time = time.time()
return Response(response.status, data, (end_time - start_time)*1000)
<commit_msg>Make HttpLibHttpService compatible with Exception (no kwarg).<commit_after>from __future__ import absolute_import
from . import HttpService
from . import Response
from httplib import HTTPConnection
from httplib import HTTPException
from httplib import HTTPSConnection
from socket import timeout, error
import time
class HttpLibHttpService(HttpService):
"""
HttpService using python batteries' httplib.
"""
def __init__(self, *args, **kwargs):
super(HttpLibHttpService, self).__init__(*args, **kwargs)
if self.ssl:
self.connection_class = HTTPSConnection
else:
self.connection_class = HTTPConnection
def call(self, body):
try:
connection = self.connection_class(self.host, self.port, timeout=self.connect_timeout)
start_time = time.time()
connection.request("POST", self.path, body)
response = connection.getresponse()
except (HTTPException, timeout, error), e:
raise self.communication_error_class(u"%s failed with %s when attempting to make a call to %s with body %s" % (self.__class__.__name__, e.__class__.__name__, self.base_url, body))
else:
data = unicode(response.read(), "utf-8")
finally:
connection.close()
end_time = time.time()
return Response(response.status, data, (end_time - start_time)*1000)
|
fdd87814f68810a390c50f7bf2a08359430722fa
|
conda_build/main_index.py
|
conda_build/main_index.py
|
from __future__ import absolute_import, division, print_function
import argparse
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda_build.index import update_index
def main():
p = argparse.ArgumentParser(
description="Update package index metadata files in given directories")
p.add_argument('dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()])
p.add_argument('-c', "--check-md5",
action="store_true",
help="Use MD5 values instead of file modification times for\
determining if a package's metadata needs to be \
updated.")
p.add_argument('-f', "--force",
action="store_true",
help="force reading all files")
p.add_argument('-q', "--quiet",
action="store_true")
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
|
from __future__ import absolute_import, division, print_function
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda.cli.conda_argparse import ArgumentParser
from conda_build.index import update_index
def main():
p = ArgumentParser(
description="Update package index metadata files in given directories.")
p.add_argument(
'dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()],
)
p.add_argument(
'-c', "--check-md5",
action="store_true",
help="""Use MD5 values instead of file modification times for determining if a
package's metadata needs to be updated.""",
)
p.add_argument(
'-f', "--force",
action="store_true",
help="Force reading all files.",
)
p.add_argument(
'-q', "--quiet",
action="store_true",
help="Don't show any output.",
)
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
|
Update command docs for conda index
|
Update command docs for conda index
|
Python
|
bsd-3-clause
|
frol/conda-build,rmcgibbo/conda-build,shastings517/conda-build,mwcraig/conda-build,mwcraig/conda-build,dan-blanchard/conda-build,mwcraig/conda-build,dan-blanchard/conda-build,sandhujasmine/conda-build,frol/conda-build,rmcgibbo/conda-build,ilastik/conda-build,dan-blanchard/conda-build,shastings517/conda-build,frol/conda-build,rmcgibbo/conda-build,sandhujasmine/conda-build,shastings517/conda-build,sandhujasmine/conda-build,ilastik/conda-build,ilastik/conda-build
|
from __future__ import absolute_import, division, print_function
import argparse
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda_build.index import update_index
def main():
p = argparse.ArgumentParser(
description="Update package index metadata files in given directories")
p.add_argument('dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()])
p.add_argument('-c', "--check-md5",
action="store_true",
help="Use MD5 values instead of file modification times for\
determining if a package's metadata needs to be \
updated.")
p.add_argument('-f', "--force",
action="store_true",
help="force reading all files")
p.add_argument('-q', "--quiet",
action="store_true")
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
Update command docs for conda index
|
from __future__ import absolute_import, division, print_function
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda.cli.conda_argparse import ArgumentParser
from conda_build.index import update_index
def main():
p = ArgumentParser(
description="Update package index metadata files in given directories.")
p.add_argument(
'dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()],
)
p.add_argument(
'-c', "--check-md5",
action="store_true",
help="""Use MD5 values instead of file modification times for determining if a
package's metadata needs to be updated.""",
)
p.add_argument(
'-f', "--force",
action="store_true",
help="Force reading all files.",
)
p.add_argument(
'-q', "--quiet",
action="store_true",
help="Don't show any output.",
)
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import, division, print_function
import argparse
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda_build.index import update_index
def main():
p = argparse.ArgumentParser(
description="Update package index metadata files in given directories")
p.add_argument('dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()])
p.add_argument('-c', "--check-md5",
action="store_true",
help="Use MD5 values instead of file modification times for\
determining if a package's metadata needs to be \
updated.")
p.add_argument('-f', "--force",
action="store_true",
help="force reading all files")
p.add_argument('-q', "--quiet",
action="store_true")
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
<commit_msg>Update command docs for conda index<commit_after>
|
from __future__ import absolute_import, division, print_function
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda.cli.conda_argparse import ArgumentParser
from conda_build.index import update_index
def main():
p = ArgumentParser(
description="Update package index metadata files in given directories.")
p.add_argument(
'dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()],
)
p.add_argument(
'-c', "--check-md5",
action="store_true",
help="""Use MD5 values instead of file modification times for determining if a
package's metadata needs to be updated.""",
)
p.add_argument(
'-f', "--force",
action="store_true",
help="Force reading all files.",
)
p.add_argument(
'-q', "--quiet",
action="store_true",
help="Don't show any output.",
)
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
|
from __future__ import absolute_import, division, print_function
import argparse
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda_build.index import update_index
def main():
p = argparse.ArgumentParser(
description="Update package index metadata files in given directories")
p.add_argument('dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()])
p.add_argument('-c', "--check-md5",
action="store_true",
help="Use MD5 values instead of file modification times for\
determining if a package's metadata needs to be \
updated.")
p.add_argument('-f', "--force",
action="store_true",
help="force reading all files")
p.add_argument('-q', "--quiet",
action="store_true")
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
Update command docs for conda indexfrom __future__ import absolute_import, division, print_function
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda.cli.conda_argparse import ArgumentParser
from conda_build.index import update_index
def main():
p = ArgumentParser(
description="Update package index metadata files in given directories.")
p.add_argument(
'dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()],
)
p.add_argument(
'-c', "--check-md5",
action="store_true",
help="""Use MD5 values instead of file modification times for determining if a
package's metadata needs to be updated.""",
)
p.add_argument(
'-f', "--force",
action="store_true",
help="Force reading all files.",
)
p.add_argument(
'-q', "--quiet",
action="store_true",
help="Don't show any output.",
)
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import, division, print_function
import argparse
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda_build.index import update_index
def main():
p = argparse.ArgumentParser(
description="Update package index metadata files in given directories")
p.add_argument('dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()])
p.add_argument('-c', "--check-md5",
action="store_true",
help="Use MD5 values instead of file modification times for\
determining if a package's metadata needs to be \
updated.")
p.add_argument('-f', "--force",
action="store_true",
help="force reading all files")
p.add_argument('-q', "--quiet",
action="store_true")
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
<commit_msg>Update command docs for conda index<commit_after>from __future__ import absolute_import, division, print_function
import os
from locale import getpreferredencoding
from os.path import abspath
from conda.compat import PY3
from conda.cli.conda_argparse import ArgumentParser
from conda_build.index import update_index
def main():
p = ArgumentParser(
description="Update package index metadata files in given directories.")
p.add_argument(
'dir',
help='Directory that contains an index to be updated.',
nargs='*',
default=[os.getcwd()],
)
p.add_argument(
'-c', "--check-md5",
action="store_true",
help="""Use MD5 values instead of file modification times for determining if a
package's metadata needs to be updated.""",
)
p.add_argument(
'-f', "--force",
action="store_true",
help="Force reading all files.",
)
p.add_argument(
'-q', "--quiet",
action="store_true",
help="Don't show any output.",
)
args = p.parse_args()
dir_paths = [abspath(path) for path in args.dir]
# Don't use byte strings in Python 2
if not PY3:
dir_paths = [path.decode(getpreferredencoding()) for path in dir_paths]
for path in dir_paths:
update_index(path, verbose=(not args.quiet), force=args.force)
if __name__ == '__main__':
main()
|
6f557ed73372aa5823393a53b079bf4cec7511b8
|
docker/ssladapter/ssladapter.py
|
docker/ssladapter/ssladapter.py
|
""" Resolves OpenSSL issues in some servers:
https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
https://github.com/kennethreitz/requests/pull/799
"""
from distutils.version import StrictVersion
from requests.adapters import HTTPAdapter
try:
from requests.packages.urllib3.poolmanager import PoolManager
except ImportError:
import urllib3
from urllib3.poolmanager import PoolManager
class SSLAdapter(HTTPAdapter):
'''An HTTPS Transport Adapter that uses an arbitrary SSL version.'''
def __init__(self, ssl_version=None, **kwargs):
self.ssl_version = ssl_version
super(SSLAdapter, self).__init__(**kwargs)
def init_poolmanager(self, connections, maxsize, block=False):
urllib_ver = urllib3.__version__
if urllib3 and StrictVersion(urllib_ver) <= StrictVersion('1.5'):
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block)
else:
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block,
ssl_version=self.ssl_version)
|
""" Resolves OpenSSL issues in some servers:
https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
https://github.com/kennethreitz/requests/pull/799
"""
from distutils.version import StrictVersion
from requests.adapters import HTTPAdapter
try:
import requests.packages.urllib3 as urllib3
except ImportError:
import urllib3
PoolManager = urllib3.poolmanager.PoolManager
class SSLAdapter(HTTPAdapter):
'''An HTTPS Transport Adapter that uses an arbitrary SSL version.'''
def __init__(self, ssl_version=None, **kwargs):
self.ssl_version = ssl_version
super(SSLAdapter, self).__init__(**kwargs)
def init_poolmanager(self, connections, maxsize, block=False):
urllib_ver = urllib3.__version__.split('-')[0]
if urllib3 and urllib_ver != 'dev' and \
StrictVersion(urllib_ver) <= StrictVersion('1.5'):
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block)
else:
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block,
ssl_version=self.ssl_version)
|
Fix some urllib3 import issues
|
Fix some urllib3 import issues
|
Python
|
apache-2.0
|
runcom/docker-py,kaiyou/docker-py,TomasTomecek/docker-py,vpetersson/docker-py,dimaspivak/docker-py,paulbellamy/docker-py,ClusterHQ/docker-py,terminalmage/docker-py,ticosax/docker-py,kaiyou/docker-py,dimaspivak/docker-py,auready/docker-py,dnephin/docker-py,docker/docker-py,docker/docker-py,mrfuxi/docker-py,bboreham/docker-py,rhatdan/docker-py,mangalaman93/docker-py,hibooboo2/docker-py,rancher/docker-py,tbeadle/docker-py,vitalyisaev2/docker-py,sourcelair/docker-py,uian/docker-py,delfick/docker-py,olsaki/docker-py,youhong316/docker-py,funkyfuture/docker-py,ColinHuang/docker-py,ssanderson/docker-py,youhong316/docker-py,MohamedAshiqrh/docker-py,auready/docker-py,v-boyko/docker-py,clarete/docker-py,bfirsh/docker-py,aiden0z/docker-py,tangkun75/docker-py,mnowster/docker-py,Faylixe/docker-py,gamechanger/docker-py,shakamunyi/docker-py,jpopelka/docker-py,mikedougherty/docker-py,zwqzhangweiqiang/docker-py,mark-adams/docker-py,funkyfuture/docker-py,shishir-a412ed/docker-py,minzhang28/docker-py,shakamunyi/docker-py,ColinHuang/docker-py,tshauck/docker-py,vpetersson/docker-py,Faylixe/docker-py,jamesmarva/docker-py,wehkamp/docker-py,wallrj/docker-py,vdemeester/docker-py,minzhang28/docker-py,leolujuyi/docker-py,mohitsoni/docker-py,Melraidin/docker-py,bfirsh/docker-py,aiden0z/docker-py,vdemeester/docker-py,Ye-Yong-Chi/docker-py,erikced/docker-py,PierreF/docker-py,kpavel/docker-py,schu/docker-py,tristan0x/docker-py,rastaman/docker-py,stevenewey/docker-py,rschmidtz/docker-py,dlorenc/docker-py,jhowardmsft/docker-py,wlan0/docker-py,uggla/docker-py
|
""" Resolves OpenSSL issues in some servers:
https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
https://github.com/kennethreitz/requests/pull/799
"""
from distutils.version import StrictVersion
from requests.adapters import HTTPAdapter
try:
from requests.packages.urllib3.poolmanager import PoolManager
except ImportError:
import urllib3
from urllib3.poolmanager import PoolManager
class SSLAdapter(HTTPAdapter):
'''An HTTPS Transport Adapter that uses an arbitrary SSL version.'''
def __init__(self, ssl_version=None, **kwargs):
self.ssl_version = ssl_version
super(SSLAdapter, self).__init__(**kwargs)
def init_poolmanager(self, connections, maxsize, block=False):
urllib_ver = urllib3.__version__
if urllib3 and StrictVersion(urllib_ver) <= StrictVersion('1.5'):
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block)
else:
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block,
ssl_version=self.ssl_version)
Fix some urllib3 import issues
|
""" Resolves OpenSSL issues in some servers:
https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
https://github.com/kennethreitz/requests/pull/799
"""
from distutils.version import StrictVersion
from requests.adapters import HTTPAdapter
try:
import requests.packages.urllib3 as urllib3
except ImportError:
import urllib3
PoolManager = urllib3.poolmanager.PoolManager
class SSLAdapter(HTTPAdapter):
'''An HTTPS Transport Adapter that uses an arbitrary SSL version.'''
def __init__(self, ssl_version=None, **kwargs):
self.ssl_version = ssl_version
super(SSLAdapter, self).__init__(**kwargs)
def init_poolmanager(self, connections, maxsize, block=False):
urllib_ver = urllib3.__version__.split('-')[0]
if urllib3 and urllib_ver != 'dev' and \
StrictVersion(urllib_ver) <= StrictVersion('1.5'):
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block)
else:
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block,
ssl_version=self.ssl_version)
|
<commit_before>""" Resolves OpenSSL issues in some servers:
https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
https://github.com/kennethreitz/requests/pull/799
"""
from distutils.version import StrictVersion
from requests.adapters import HTTPAdapter
try:
from requests.packages.urllib3.poolmanager import PoolManager
except ImportError:
import urllib3
from urllib3.poolmanager import PoolManager
class SSLAdapter(HTTPAdapter):
'''An HTTPS Transport Adapter that uses an arbitrary SSL version.'''
def __init__(self, ssl_version=None, **kwargs):
self.ssl_version = ssl_version
super(SSLAdapter, self).__init__(**kwargs)
def init_poolmanager(self, connections, maxsize, block=False):
urllib_ver = urllib3.__version__
if urllib3 and StrictVersion(urllib_ver) <= StrictVersion('1.5'):
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block)
else:
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block,
ssl_version=self.ssl_version)
<commit_msg>Fix some urllib3 import issues<commit_after>
|
""" Resolves OpenSSL issues in some servers:
https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
https://github.com/kennethreitz/requests/pull/799
"""
from distutils.version import StrictVersion
from requests.adapters import HTTPAdapter
try:
import requests.packages.urllib3 as urllib3
except ImportError:
import urllib3
PoolManager = urllib3.poolmanager.PoolManager
class SSLAdapter(HTTPAdapter):
'''An HTTPS Transport Adapter that uses an arbitrary SSL version.'''
def __init__(self, ssl_version=None, **kwargs):
self.ssl_version = ssl_version
super(SSLAdapter, self).__init__(**kwargs)
def init_poolmanager(self, connections, maxsize, block=False):
urllib_ver = urllib3.__version__.split('-')[0]
if urllib3 and urllib_ver != 'dev' and \
StrictVersion(urllib_ver) <= StrictVersion('1.5'):
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block)
else:
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block,
ssl_version=self.ssl_version)
|
""" Resolves OpenSSL issues in some servers:
https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
https://github.com/kennethreitz/requests/pull/799
"""
from distutils.version import StrictVersion
from requests.adapters import HTTPAdapter
try:
from requests.packages.urllib3.poolmanager import PoolManager
except ImportError:
import urllib3
from urllib3.poolmanager import PoolManager
class SSLAdapter(HTTPAdapter):
'''An HTTPS Transport Adapter that uses an arbitrary SSL version.'''
def __init__(self, ssl_version=None, **kwargs):
self.ssl_version = ssl_version
super(SSLAdapter, self).__init__(**kwargs)
def init_poolmanager(self, connections, maxsize, block=False):
urllib_ver = urllib3.__version__
if urllib3 and StrictVersion(urllib_ver) <= StrictVersion('1.5'):
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block)
else:
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block,
ssl_version=self.ssl_version)
Fix some urllib3 import issues""" Resolves OpenSSL issues in some servers:
https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
https://github.com/kennethreitz/requests/pull/799
"""
from distutils.version import StrictVersion
from requests.adapters import HTTPAdapter
try:
import requests.packages.urllib3 as urllib3
except ImportError:
import urllib3
PoolManager = urllib3.poolmanager.PoolManager
class SSLAdapter(HTTPAdapter):
'''An HTTPS Transport Adapter that uses an arbitrary SSL version.'''
def __init__(self, ssl_version=None, **kwargs):
self.ssl_version = ssl_version
super(SSLAdapter, self).__init__(**kwargs)
def init_poolmanager(self, connections, maxsize, block=False):
urllib_ver = urllib3.__version__.split('-')[0]
if urllib3 and urllib_ver != 'dev' and \
StrictVersion(urllib_ver) <= StrictVersion('1.5'):
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block)
else:
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block,
ssl_version=self.ssl_version)
|
<commit_before>""" Resolves OpenSSL issues in some servers:
https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
https://github.com/kennethreitz/requests/pull/799
"""
from distutils.version import StrictVersion
from requests.adapters import HTTPAdapter
try:
from requests.packages.urllib3.poolmanager import PoolManager
except ImportError:
import urllib3
from urllib3.poolmanager import PoolManager
class SSLAdapter(HTTPAdapter):
'''An HTTPS Transport Adapter that uses an arbitrary SSL version.'''
def __init__(self, ssl_version=None, **kwargs):
self.ssl_version = ssl_version
super(SSLAdapter, self).__init__(**kwargs)
def init_poolmanager(self, connections, maxsize, block=False):
urllib_ver = urllib3.__version__
if urllib3 and StrictVersion(urllib_ver) <= StrictVersion('1.5'):
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block)
else:
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block,
ssl_version=self.ssl_version)
<commit_msg>Fix some urllib3 import issues<commit_after>""" Resolves OpenSSL issues in some servers:
https://lukasa.co.uk/2013/01/Choosing_SSL_Version_In_Requests/
https://github.com/kennethreitz/requests/pull/799
"""
from distutils.version import StrictVersion
from requests.adapters import HTTPAdapter
try:
import requests.packages.urllib3 as urllib3
except ImportError:
import urllib3
PoolManager = urllib3.poolmanager.PoolManager
class SSLAdapter(HTTPAdapter):
'''An HTTPS Transport Adapter that uses an arbitrary SSL version.'''
def __init__(self, ssl_version=None, **kwargs):
self.ssl_version = ssl_version
super(SSLAdapter, self).__init__(**kwargs)
def init_poolmanager(self, connections, maxsize, block=False):
urllib_ver = urllib3.__version__.split('-')[0]
if urllib3 and urllib_ver != 'dev' and \
StrictVersion(urllib_ver) <= StrictVersion('1.5'):
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block)
else:
self.poolmanager = PoolManager(num_pools=connections,
maxsize=maxsize,
block=block,
ssl_version=self.ssl_version)
|
bf91b77db4327c698b7ed6fe5d0790aea3799e3c
|
dsub/_dsub_version.py
|
dsub/_dsub_version.py
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.3.dev0'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.3'
|
Update dsub version to 0.4.3
|
Update dsub version to 0.4.3
PiperOrigin-RevId: 343898649
|
Python
|
apache-2.0
|
DataBiosphere/dsub,DataBiosphere/dsub
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.3.dev0'
Update dsub version to 0.4.3
PiperOrigin-RevId: 343898649
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.3'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.3.dev0'
<commit_msg>Update dsub version to 0.4.3
PiperOrigin-RevId: 343898649<commit_after>
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.3'
|
# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.3.dev0'
Update dsub version to 0.4.3
PiperOrigin-RevId: 343898649# Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.3'
|
<commit_before># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.3.dev0'
<commit_msg>Update dsub version to 0.4.3
PiperOrigin-RevId: 343898649<commit_after># Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Single source of truth for dsub's version.
This must remain small and dependency-free so that any dsub module may
import it without creating circular dependencies. Note that this module
is parsed as a text file by setup.py and changes to the format of this
file could break setup.py.
The version should follow formatting requirements specified in PEP-440.
- https://www.python.org/dev/peps/pep-0440
A typical release sequence will be versioned as:
0.1.3.dev0 -> 0.1.3 -> 0.1.4.dev0 -> ...
"""
DSUB_VERSION = '0.4.3'
|
d58a85b922d2159b16bc16be46b5c09175567ece
|
dockci/migrations/0002.py
|
dockci/migrations/0002.py
|
"""
Migrate config to docker hosts list
"""
import os
import yaml
filename = os.path.join('data', 'configs.yaml')
with open(filename) as handle:
data = yaml.load(handle)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
|
"""
Migrate config to docker hosts list
"""
import os
import sys
import yaml
filename = os.path.join('data', 'configs.yaml')
try:
with open(filename) as handle:
data = yaml.load(handle)
except FileNotFoundError:
# This is fine; will fail for new installs
sys.exit(0)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
|
Handle no config.yaml in migrations
|
Handle no config.yaml in migrations
|
Python
|
isc
|
RickyCook/DockCI,RickyCook/DockCI,RickyCook/DockCI,sprucedev/DockCI,sprucedev/DockCI,sprucedev/DockCI,sprucedev/DockCI-Agent,sprucedev/DockCI,RickyCook/DockCI,sprucedev/DockCI-Agent
|
"""
Migrate config to docker hosts list
"""
import os
import yaml
filename = os.path.join('data', 'configs.yaml')
with open(filename) as handle:
data = yaml.load(handle)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
Handle no config.yaml in migrations
|
"""
Migrate config to docker hosts list
"""
import os
import sys
import yaml
filename = os.path.join('data', 'configs.yaml')
try:
with open(filename) as handle:
data = yaml.load(handle)
except FileNotFoundError:
# This is fine; will fail for new installs
sys.exit(0)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
|
<commit_before>"""
Migrate config to docker hosts list
"""
import os
import yaml
filename = os.path.join('data', 'configs.yaml')
with open(filename) as handle:
data = yaml.load(handle)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
<commit_msg>Handle no config.yaml in migrations<commit_after>
|
"""
Migrate config to docker hosts list
"""
import os
import sys
import yaml
filename = os.path.join('data', 'configs.yaml')
try:
with open(filename) as handle:
data = yaml.load(handle)
except FileNotFoundError:
# This is fine; will fail for new installs
sys.exit(0)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
|
"""
Migrate config to docker hosts list
"""
import os
import yaml
filename = os.path.join('data', 'configs.yaml')
with open(filename) as handle:
data = yaml.load(handle)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
Handle no config.yaml in migrations"""
Migrate config to docker hosts list
"""
import os
import sys
import yaml
filename = os.path.join('data', 'configs.yaml')
try:
with open(filename) as handle:
data = yaml.load(handle)
except FileNotFoundError:
# This is fine; will fail for new installs
sys.exit(0)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
|
<commit_before>"""
Migrate config to docker hosts list
"""
import os
import yaml
filename = os.path.join('data', 'configs.yaml')
with open(filename) as handle:
data = yaml.load(handle)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
<commit_msg>Handle no config.yaml in migrations<commit_after>"""
Migrate config to docker hosts list
"""
import os
import sys
import yaml
filename = os.path.join('data', 'configs.yaml')
try:
with open(filename) as handle:
data = yaml.load(handle)
except FileNotFoundError:
# This is fine; will fail for new installs
sys.exit(0)
host = data.pop('docker_host')
data['docker_hosts'] = [host]
with open(filename, 'w') as handle:
yaml.dump(data, handle, default_flow_style=False)
|
b56320247044e1a3187d59b003e1fd5c9e4d49cd
|
cq/utils.py
|
cq/utils.py
|
import time
from contextlib import contextmanager
import six
import inspect
import importlib
import logging
from redis.exceptions import RedisError
from django_redis import get_redis_connection
logger = logging.getLogger('cq')
def to_import_string(func):
if inspect.isfunction(func) or inspect.isbuiltin(func):
name = '{0}.{1}'.format(func.__module__, func.__name__)
elif isinstance(func, six.string_types):
name = str(func)
elif inspect.isclass(func):
return '{0}.{1}'.format(func.__module__, func.__name__)
else:
msg = 'Expected a callable or a string, but got: {}'.format(func)
raise TypeError(msg)
return name
def import_attribute(name):
"""Return an attribute from a dotted path name (e.g. "path.to.func").
"""
module_name, attribute = name.rsplit('.', 1)
module = importlib.import_module(module_name)
return getattr(module, attribute)
@contextmanager
def redis_connection(retries=3, sleep_time=0.5):
while 1:
try:
conn = get_redis_connection()
break
except RedisError:
if retries is None or retries == 0:
raise
retries -= 1
time.sleep(sleep_time)
try:
logger.debug('Using connection, {}, at {}'.format(conn, hex(id(conn))))
yield conn
finally:
pass
# This is actually not needed. The call to `get_redis_connection`
# shares a single connection.
# conn.release()
|
import time
from contextlib import contextmanager
import six
import inspect
import importlib
import logging
from redis.exceptions import RedisError
from django_redis import get_redis_connection
logger = logging.getLogger('cq')
def to_import_string(func):
if inspect.isfunction(func) or inspect.isbuiltin(func):
name = '{0}.{1}'.format(func.__module__, func.__name__)
elif isinstance(func, six.string_types):
name = str(func)
elif inspect.isclass(func):
return '{0}.{1}'.format(func.__module__, func.__name__)
else:
msg = 'Expected a callable or a string, but got: {}'.format(func)
raise TypeError(msg)
return name
def import_attribute(name):
"""Return an attribute from a dotted path name (e.g. "path.to.func").
"""
module_name, attribute = name.rsplit('.', 1)
module = importlib.import_module(module_name)
return getattr(module, attribute)
@contextmanager
def redis_connection(retries=3, sleep_time=0.5):
while 1:
try:
conn = get_redis_connection()
break
except RedisError:
if retries is None or retries == 0:
raise
retries -= 1
time.sleep(sleep_time)
try:
yield conn
finally:
pass
# This is actually not needed. The call to `get_redis_connection`
# shares a single connection.
# conn.release()
|
Remove a logging call, not needed anymore.
|
Remove a logging call, not needed anymore.
|
Python
|
bsd-3-clause
|
furious-luke/django-cq
|
import time
from contextlib import contextmanager
import six
import inspect
import importlib
import logging
from redis.exceptions import RedisError
from django_redis import get_redis_connection
logger = logging.getLogger('cq')
def to_import_string(func):
if inspect.isfunction(func) or inspect.isbuiltin(func):
name = '{0}.{1}'.format(func.__module__, func.__name__)
elif isinstance(func, six.string_types):
name = str(func)
elif inspect.isclass(func):
return '{0}.{1}'.format(func.__module__, func.__name__)
else:
msg = 'Expected a callable or a string, but got: {}'.format(func)
raise TypeError(msg)
return name
def import_attribute(name):
"""Return an attribute from a dotted path name (e.g. "path.to.func").
"""
module_name, attribute = name.rsplit('.', 1)
module = importlib.import_module(module_name)
return getattr(module, attribute)
@contextmanager
def redis_connection(retries=3, sleep_time=0.5):
while 1:
try:
conn = get_redis_connection()
break
except RedisError:
if retries is None or retries == 0:
raise
retries -= 1
time.sleep(sleep_time)
try:
logger.debug('Using connection, {}, at {}'.format(conn, hex(id(conn))))
yield conn
finally:
pass
# This is actually not needed. The call to `get_redis_connection`
# shares a single connection.
# conn.release()
Remove a logging call, not needed anymore.
|
import time
from contextlib import contextmanager
import six
import inspect
import importlib
import logging
from redis.exceptions import RedisError
from django_redis import get_redis_connection
logger = logging.getLogger('cq')
def to_import_string(func):
if inspect.isfunction(func) or inspect.isbuiltin(func):
name = '{0}.{1}'.format(func.__module__, func.__name__)
elif isinstance(func, six.string_types):
name = str(func)
elif inspect.isclass(func):
return '{0}.{1}'.format(func.__module__, func.__name__)
else:
msg = 'Expected a callable or a string, but got: {}'.format(func)
raise TypeError(msg)
return name
def import_attribute(name):
"""Return an attribute from a dotted path name (e.g. "path.to.func").
"""
module_name, attribute = name.rsplit('.', 1)
module = importlib.import_module(module_name)
return getattr(module, attribute)
@contextmanager
def redis_connection(retries=3, sleep_time=0.5):
while 1:
try:
conn = get_redis_connection()
break
except RedisError:
if retries is None or retries == 0:
raise
retries -= 1
time.sleep(sleep_time)
try:
yield conn
finally:
pass
# This is actually not needed. The call to `get_redis_connection`
# shares a single connection.
# conn.release()
|
<commit_before>import time
from contextlib import contextmanager
import six
import inspect
import importlib
import logging
from redis.exceptions import RedisError
from django_redis import get_redis_connection
logger = logging.getLogger('cq')
def to_import_string(func):
if inspect.isfunction(func) or inspect.isbuiltin(func):
name = '{0}.{1}'.format(func.__module__, func.__name__)
elif isinstance(func, six.string_types):
name = str(func)
elif inspect.isclass(func):
return '{0}.{1}'.format(func.__module__, func.__name__)
else:
msg = 'Expected a callable or a string, but got: {}'.format(func)
raise TypeError(msg)
return name
def import_attribute(name):
"""Return an attribute from a dotted path name (e.g. "path.to.func").
"""
module_name, attribute = name.rsplit('.', 1)
module = importlib.import_module(module_name)
return getattr(module, attribute)
@contextmanager
def redis_connection(retries=3, sleep_time=0.5):
while 1:
try:
conn = get_redis_connection()
break
except RedisError:
if retries is None or retries == 0:
raise
retries -= 1
time.sleep(sleep_time)
try:
logger.debug('Using connection, {}, at {}'.format(conn, hex(id(conn))))
yield conn
finally:
pass
# This is actually not needed. The call to `get_redis_connection`
# shares a single connection.
# conn.release()
<commit_msg>Remove a logging call, not needed anymore.<commit_after>
|
import time
from contextlib import contextmanager
import six
import inspect
import importlib
import logging
from redis.exceptions import RedisError
from django_redis import get_redis_connection
logger = logging.getLogger('cq')
def to_import_string(func):
if inspect.isfunction(func) or inspect.isbuiltin(func):
name = '{0}.{1}'.format(func.__module__, func.__name__)
elif isinstance(func, six.string_types):
name = str(func)
elif inspect.isclass(func):
return '{0}.{1}'.format(func.__module__, func.__name__)
else:
msg = 'Expected a callable or a string, but got: {}'.format(func)
raise TypeError(msg)
return name
def import_attribute(name):
"""Return an attribute from a dotted path name (e.g. "path.to.func").
"""
module_name, attribute = name.rsplit('.', 1)
module = importlib.import_module(module_name)
return getattr(module, attribute)
@contextmanager
def redis_connection(retries=3, sleep_time=0.5):
while 1:
try:
conn = get_redis_connection()
break
except RedisError:
if retries is None or retries == 0:
raise
retries -= 1
time.sleep(sleep_time)
try:
yield conn
finally:
pass
# This is actually not needed. The call to `get_redis_connection`
# shares a single connection.
# conn.release()
|
import time
from contextlib import contextmanager
import six
import inspect
import importlib
import logging
from redis.exceptions import RedisError
from django_redis import get_redis_connection
logger = logging.getLogger('cq')
def to_import_string(func):
if inspect.isfunction(func) or inspect.isbuiltin(func):
name = '{0}.{1}'.format(func.__module__, func.__name__)
elif isinstance(func, six.string_types):
name = str(func)
elif inspect.isclass(func):
return '{0}.{1}'.format(func.__module__, func.__name__)
else:
msg = 'Expected a callable or a string, but got: {}'.format(func)
raise TypeError(msg)
return name
def import_attribute(name):
"""Return an attribute from a dotted path name (e.g. "path.to.func").
"""
module_name, attribute = name.rsplit('.', 1)
module = importlib.import_module(module_name)
return getattr(module, attribute)
@contextmanager
def redis_connection(retries=3, sleep_time=0.5):
while 1:
try:
conn = get_redis_connection()
break
except RedisError:
if retries is None or retries == 0:
raise
retries -= 1
time.sleep(sleep_time)
try:
logger.debug('Using connection, {}, at {}'.format(conn, hex(id(conn))))
yield conn
finally:
pass
# This is actually not needed. The call to `get_redis_connection`
# shares a single connection.
# conn.release()
Remove a logging call, not needed anymore.import time
from contextlib import contextmanager
import six
import inspect
import importlib
import logging
from redis.exceptions import RedisError
from django_redis import get_redis_connection
logger = logging.getLogger('cq')
def to_import_string(func):
if inspect.isfunction(func) or inspect.isbuiltin(func):
name = '{0}.{1}'.format(func.__module__, func.__name__)
elif isinstance(func, six.string_types):
name = str(func)
elif inspect.isclass(func):
return '{0}.{1}'.format(func.__module__, func.__name__)
else:
msg = 'Expected a callable or a string, but got: {}'.format(func)
raise TypeError(msg)
return name
def import_attribute(name):
"""Return an attribute from a dotted path name (e.g. "path.to.func").
"""
module_name, attribute = name.rsplit('.', 1)
module = importlib.import_module(module_name)
return getattr(module, attribute)
@contextmanager
def redis_connection(retries=3, sleep_time=0.5):
while 1:
try:
conn = get_redis_connection()
break
except RedisError:
if retries is None or retries == 0:
raise
retries -= 1
time.sleep(sleep_time)
try:
yield conn
finally:
pass
# This is actually not needed. The call to `get_redis_connection`
# shares a single connection.
# conn.release()
|
<commit_before>import time
from contextlib import contextmanager
import six
import inspect
import importlib
import logging
from redis.exceptions import RedisError
from django_redis import get_redis_connection
logger = logging.getLogger('cq')
def to_import_string(func):
if inspect.isfunction(func) or inspect.isbuiltin(func):
name = '{0}.{1}'.format(func.__module__, func.__name__)
elif isinstance(func, six.string_types):
name = str(func)
elif inspect.isclass(func):
return '{0}.{1}'.format(func.__module__, func.__name__)
else:
msg = 'Expected a callable or a string, but got: {}'.format(func)
raise TypeError(msg)
return name
def import_attribute(name):
"""Return an attribute from a dotted path name (e.g. "path.to.func").
"""
module_name, attribute = name.rsplit('.', 1)
module = importlib.import_module(module_name)
return getattr(module, attribute)
@contextmanager
def redis_connection(retries=3, sleep_time=0.5):
while 1:
try:
conn = get_redis_connection()
break
except RedisError:
if retries is None or retries == 0:
raise
retries -= 1
time.sleep(sleep_time)
try:
logger.debug('Using connection, {}, at {}'.format(conn, hex(id(conn))))
yield conn
finally:
pass
# This is actually not needed. The call to `get_redis_connection`
# shares a single connection.
# conn.release()
<commit_msg>Remove a logging call, not needed anymore.<commit_after>import time
from contextlib import contextmanager
import six
import inspect
import importlib
import logging
from redis.exceptions import RedisError
from django_redis import get_redis_connection
logger = logging.getLogger('cq')
def to_import_string(func):
if inspect.isfunction(func) or inspect.isbuiltin(func):
name = '{0}.{1}'.format(func.__module__, func.__name__)
elif isinstance(func, six.string_types):
name = str(func)
elif inspect.isclass(func):
return '{0}.{1}'.format(func.__module__, func.__name__)
else:
msg = 'Expected a callable or a string, but got: {}'.format(func)
raise TypeError(msg)
return name
def import_attribute(name):
"""Return an attribute from a dotted path name (e.g. "path.to.func").
"""
module_name, attribute = name.rsplit('.', 1)
module = importlib.import_module(module_name)
return getattr(module, attribute)
@contextmanager
def redis_connection(retries=3, sleep_time=0.5):
while 1:
try:
conn = get_redis_connection()
break
except RedisError:
if retries is None or retries == 0:
raise
retries -= 1
time.sleep(sleep_time)
try:
yield conn
finally:
pass
# This is actually not needed. The call to `get_redis_connection`
# shares a single connection.
# conn.release()
|
3e4360e831d98dadca3f9346f324f3d17769257f
|
alg_selection_sort.py
|
alg_selection_sort.py
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def selection_sort(a_list):
"""Selection Sort algortihm.
Time complexity: O(n^2).
"""
for max_slot in reversed(range(len(a_list))):
select_slot = 0
for slot in range(1, max_slot + 1):
if a_list[slot] > a_list[select_slot]:
select_slot = slot
a_list[select_slot], a_list[max_slot] = (
a_list[max_slot], a_list[select_slot])
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By selection sort: ')
selection_sort(a_list)
print(a_list)
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def selection_sort(ls):
"""Selection Sort algortihm.
Time complexity: O(n^2).
Space complexity: O(1).
"""
# Start from the last elemenet reversely: len(ls) - 1, ..., 0.
for i_max in reversed(range(len(ls))):
# Select the next max, and interchange it with corresponding element.
s = 0
for i in range(1, i_max + 1):
if ls[i] > ls[s]:
s = i
ls[s], ls[i_max] = ls[i_max], ls[s]
def main():
ls = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('List: {}'.format(ls))
print('By selection sort: ')
selection_sort(ls)
print(ls)
if __name__ == '__main__':
main()
|
Refactor selection sort w/ adding comments
|
Refactor selection sort w/ adding comments
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def selection_sort(a_list):
"""Selection Sort algortihm.
Time complexity: O(n^2).
"""
for max_slot in reversed(range(len(a_list))):
select_slot = 0
for slot in range(1, max_slot + 1):
if a_list[slot] > a_list[select_slot]:
select_slot = slot
a_list[select_slot], a_list[max_slot] = (
a_list[max_slot], a_list[select_slot])
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By selection sort: ')
selection_sort(a_list)
print(a_list)
if __name__ == '__main__':
main()
Refactor selection sort w/ adding comments
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def selection_sort(ls):
"""Selection Sort algortihm.
Time complexity: O(n^2).
Space complexity: O(1).
"""
# Start from the last elemenet reversely: len(ls) - 1, ..., 0.
for i_max in reversed(range(len(ls))):
# Select the next max, and interchange it with corresponding element.
s = 0
for i in range(1, i_max + 1):
if ls[i] > ls[s]:
s = i
ls[s], ls[i_max] = ls[i_max], ls[s]
def main():
ls = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('List: {}'.format(ls))
print('By selection sort: ')
selection_sort(ls)
print(ls)
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def selection_sort(a_list):
"""Selection Sort algortihm.
Time complexity: O(n^2).
"""
for max_slot in reversed(range(len(a_list))):
select_slot = 0
for slot in range(1, max_slot + 1):
if a_list[slot] > a_list[select_slot]:
select_slot = slot
a_list[select_slot], a_list[max_slot] = (
a_list[max_slot], a_list[select_slot])
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By selection sort: ')
selection_sort(a_list)
print(a_list)
if __name__ == '__main__':
main()
<commit_msg>Refactor selection sort w/ adding comments<commit_after>
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def selection_sort(ls):
"""Selection Sort algortihm.
Time complexity: O(n^2).
Space complexity: O(1).
"""
# Start from the last elemenet reversely: len(ls) - 1, ..., 0.
for i_max in reversed(range(len(ls))):
# Select the next max, and interchange it with corresponding element.
s = 0
for i in range(1, i_max + 1):
if ls[i] > ls[s]:
s = i
ls[s], ls[i_max] = ls[i_max], ls[s]
def main():
ls = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('List: {}'.format(ls))
print('By selection sort: ')
selection_sort(ls)
print(ls)
if __name__ == '__main__':
main()
|
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def selection_sort(a_list):
"""Selection Sort algortihm.
Time complexity: O(n^2).
"""
for max_slot in reversed(range(len(a_list))):
select_slot = 0
for slot in range(1, max_slot + 1):
if a_list[slot] > a_list[select_slot]:
select_slot = slot
a_list[select_slot], a_list[max_slot] = (
a_list[max_slot], a_list[select_slot])
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By selection sort: ')
selection_sort(a_list)
print(a_list)
if __name__ == '__main__':
main()
Refactor selection sort w/ adding commentsfrom __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def selection_sort(ls):
"""Selection Sort algortihm.
Time complexity: O(n^2).
Space complexity: O(1).
"""
# Start from the last elemenet reversely: len(ls) - 1, ..., 0.
for i_max in reversed(range(len(ls))):
# Select the next max, and interchange it with corresponding element.
s = 0
for i in range(1, i_max + 1):
if ls[i] > ls[s]:
s = i
ls[s], ls[i_max] = ls[i_max], ls[s]
def main():
ls = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('List: {}'.format(ls))
print('By selection sort: ')
selection_sort(ls)
print(ls)
if __name__ == '__main__':
main()
|
<commit_before>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def selection_sort(a_list):
"""Selection Sort algortihm.
Time complexity: O(n^2).
"""
for max_slot in reversed(range(len(a_list))):
select_slot = 0
for slot in range(1, max_slot + 1):
if a_list[slot] > a_list[select_slot]:
select_slot = slot
a_list[select_slot], a_list[max_slot] = (
a_list[max_slot], a_list[select_slot])
def main():
a_list = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('a_list: {}'.format(a_list))
print('By selection sort: ')
selection_sort(a_list)
print(a_list)
if __name__ == '__main__':
main()
<commit_msg>Refactor selection sort w/ adding comments<commit_after>from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
def selection_sort(ls):
"""Selection Sort algortihm.
Time complexity: O(n^2).
Space complexity: O(1).
"""
# Start from the last elemenet reversely: len(ls) - 1, ..., 0.
for i_max in reversed(range(len(ls))):
# Select the next max, and interchange it with corresponding element.
s = 0
for i in range(1, i_max + 1):
if ls[i] > ls[s]:
s = i
ls[s], ls[i_max] = ls[i_max], ls[s]
def main():
ls = [54, 26, 93, 17, 77, 31, 44, 55, 20]
print('List: {}'.format(ls))
print('By selection sort: ')
selection_sort(ls)
print(ls)
if __name__ == '__main__':
main()
|
2555988a8eaf8e5620a8bf964092f23d1e309e91
|
examples/traffic_light.py
|
examples/traffic_light.py
|
import statemachine as fsm
class TrafficLight(fsm.Machine):
initial_state = 'red'
count = 0
@fsm.after_transition('red', 'green')
def chime(self):
print 'GO GO GO'
self.count += 1
@fsm.after_transition('*', 'red')
def apply_brakes(self):
self.stopped = True
@fsm.event
def cycle(self):
yield 'red', 'green'
yield 'green', 'yellow'
yield 'yellow', 'red'
|
from __future__ import print_function
import statemachine as fsm
class TrafficLight(fsm.Machine):
initial_state = 'red'
count = 0
@fsm.after_transition('red', 'green')
def chime(self):
print('GO GO GO')
self.count += 1
@fsm.after_transition('*', 'red')
def apply_brakes(self):
self.stopped = True
@fsm.event
def cycle(self):
yield 'red', 'green'
yield 'green', 'yellow'
yield 'yellow', 'red'
|
Use the print function for python 3 support
|
Use the print function for python 3 support
|
Python
|
mit
|
kyleconroy/statemachine
|
import statemachine as fsm
class TrafficLight(fsm.Machine):
initial_state = 'red'
count = 0
@fsm.after_transition('red', 'green')
def chime(self):
print 'GO GO GO'
self.count += 1
@fsm.after_transition('*', 'red')
def apply_brakes(self):
self.stopped = True
@fsm.event
def cycle(self):
yield 'red', 'green'
yield 'green', 'yellow'
yield 'yellow', 'red'
Use the print function for python 3 support
|
from __future__ import print_function
import statemachine as fsm
class TrafficLight(fsm.Machine):
initial_state = 'red'
count = 0
@fsm.after_transition('red', 'green')
def chime(self):
print('GO GO GO')
self.count += 1
@fsm.after_transition('*', 'red')
def apply_brakes(self):
self.stopped = True
@fsm.event
def cycle(self):
yield 'red', 'green'
yield 'green', 'yellow'
yield 'yellow', 'red'
|
<commit_before>import statemachine as fsm
class TrafficLight(fsm.Machine):
initial_state = 'red'
count = 0
@fsm.after_transition('red', 'green')
def chime(self):
print 'GO GO GO'
self.count += 1
@fsm.after_transition('*', 'red')
def apply_brakes(self):
self.stopped = True
@fsm.event
def cycle(self):
yield 'red', 'green'
yield 'green', 'yellow'
yield 'yellow', 'red'
<commit_msg>Use the print function for python 3 support<commit_after>
|
from __future__ import print_function
import statemachine as fsm
class TrafficLight(fsm.Machine):
initial_state = 'red'
count = 0
@fsm.after_transition('red', 'green')
def chime(self):
print('GO GO GO')
self.count += 1
@fsm.after_transition('*', 'red')
def apply_brakes(self):
self.stopped = True
@fsm.event
def cycle(self):
yield 'red', 'green'
yield 'green', 'yellow'
yield 'yellow', 'red'
|
import statemachine as fsm
class TrafficLight(fsm.Machine):
initial_state = 'red'
count = 0
@fsm.after_transition('red', 'green')
def chime(self):
print 'GO GO GO'
self.count += 1
@fsm.after_transition('*', 'red')
def apply_brakes(self):
self.stopped = True
@fsm.event
def cycle(self):
yield 'red', 'green'
yield 'green', 'yellow'
yield 'yellow', 'red'
Use the print function for python 3 supportfrom __future__ import print_function
import statemachine as fsm
class TrafficLight(fsm.Machine):
initial_state = 'red'
count = 0
@fsm.after_transition('red', 'green')
def chime(self):
print('GO GO GO')
self.count += 1
@fsm.after_transition('*', 'red')
def apply_brakes(self):
self.stopped = True
@fsm.event
def cycle(self):
yield 'red', 'green'
yield 'green', 'yellow'
yield 'yellow', 'red'
|
<commit_before>import statemachine as fsm
class TrafficLight(fsm.Machine):
initial_state = 'red'
count = 0
@fsm.after_transition('red', 'green')
def chime(self):
print 'GO GO GO'
self.count += 1
@fsm.after_transition('*', 'red')
def apply_brakes(self):
self.stopped = True
@fsm.event
def cycle(self):
yield 'red', 'green'
yield 'green', 'yellow'
yield 'yellow', 'red'
<commit_msg>Use the print function for python 3 support<commit_after>from __future__ import print_function
import statemachine as fsm
class TrafficLight(fsm.Machine):
initial_state = 'red'
count = 0
@fsm.after_transition('red', 'green')
def chime(self):
print('GO GO GO')
self.count += 1
@fsm.after_transition('*', 'red')
def apply_brakes(self):
self.stopped = True
@fsm.event
def cycle(self):
yield 'red', 'green'
yield 'green', 'yellow'
yield 'yellow', 'red'
|
e6723a804803457f635307bd0de66175f00c8c0e
|
day1/part2.py
|
day1/part2.py
|
x = y = direction = 0
moves = open('input.txt', 'r').readline().strip().split(', ')
visited = set()
for move in moves:
if move[0] == 'L':
if direction == 0:
direction = 3
else:
direction -= 1
elif move[0] == 'R':
if direction == 3:
direction = 0
else:
direction += 1
dist = int(''.join(move[1:]))
for _ in range(dist):
if direction == 0:
y -= 1
elif direction == 1:
x += 1
elif direction == 2:
y += 1
elif direction == 3:
x -= 1
if (x, y) in visited:
print(abs(x) + abs(y))
input()
exit()
else:
visited.add((x, y))
|
x = y = direction = 0
moves = open('input.txt', 'r').readline().strip().split(', ')
visited = set((0, 0))
for move in moves:
if move[0] == 'L':
if direction == 0:
direction = 3
else:
direction -= 1
elif move[0] == 'R':
if direction == 3:
direction = 0
else:
direction += 1
dist = int(''.join(move[1:]))
for _ in range(dist):
if direction == 0:
y -= 1
elif direction == 1:
x += 1
elif direction == 2:
y += 1
elif direction == 3:
x -= 1
if (x, y) in visited:
print(abs(x) + abs(y))
input()
exit()
else:
visited.add((x, y))
|
Add starting position to the list of visited locations
|
Add starting position to the list of visited locations
|
Python
|
unlicense
|
ultramega/adventofcode2016
|
x = y = direction = 0
moves = open('input.txt', 'r').readline().strip().split(', ')
visited = set()
for move in moves:
if move[0] == 'L':
if direction == 0:
direction = 3
else:
direction -= 1
elif move[0] == 'R':
if direction == 3:
direction = 0
else:
direction += 1
dist = int(''.join(move[1:]))
for _ in range(dist):
if direction == 0:
y -= 1
elif direction == 1:
x += 1
elif direction == 2:
y += 1
elif direction == 3:
x -= 1
if (x, y) in visited:
print(abs(x) + abs(y))
input()
exit()
else:
visited.add((x, y))
Add starting position to the list of visited locations
|
x = y = direction = 0
moves = open('input.txt', 'r').readline().strip().split(', ')
visited = set((0, 0))
for move in moves:
if move[0] == 'L':
if direction == 0:
direction = 3
else:
direction -= 1
elif move[0] == 'R':
if direction == 3:
direction = 0
else:
direction += 1
dist = int(''.join(move[1:]))
for _ in range(dist):
if direction == 0:
y -= 1
elif direction == 1:
x += 1
elif direction == 2:
y += 1
elif direction == 3:
x -= 1
if (x, y) in visited:
print(abs(x) + abs(y))
input()
exit()
else:
visited.add((x, y))
|
<commit_before>x = y = direction = 0
moves = open('input.txt', 'r').readline().strip().split(', ')
visited = set()
for move in moves:
if move[0] == 'L':
if direction == 0:
direction = 3
else:
direction -= 1
elif move[0] == 'R':
if direction == 3:
direction = 0
else:
direction += 1
dist = int(''.join(move[1:]))
for _ in range(dist):
if direction == 0:
y -= 1
elif direction == 1:
x += 1
elif direction == 2:
y += 1
elif direction == 3:
x -= 1
if (x, y) in visited:
print(abs(x) + abs(y))
input()
exit()
else:
visited.add((x, y))
<commit_msg>Add starting position to the list of visited locations<commit_after>
|
x = y = direction = 0
moves = open('input.txt', 'r').readline().strip().split(', ')
visited = set((0, 0))
for move in moves:
if move[0] == 'L':
if direction == 0:
direction = 3
else:
direction -= 1
elif move[0] == 'R':
if direction == 3:
direction = 0
else:
direction += 1
dist = int(''.join(move[1:]))
for _ in range(dist):
if direction == 0:
y -= 1
elif direction == 1:
x += 1
elif direction == 2:
y += 1
elif direction == 3:
x -= 1
if (x, y) in visited:
print(abs(x) + abs(y))
input()
exit()
else:
visited.add((x, y))
|
x = y = direction = 0
moves = open('input.txt', 'r').readline().strip().split(', ')
visited = set()
for move in moves:
if move[0] == 'L':
if direction == 0:
direction = 3
else:
direction -= 1
elif move[0] == 'R':
if direction == 3:
direction = 0
else:
direction += 1
dist = int(''.join(move[1:]))
for _ in range(dist):
if direction == 0:
y -= 1
elif direction == 1:
x += 1
elif direction == 2:
y += 1
elif direction == 3:
x -= 1
if (x, y) in visited:
print(abs(x) + abs(y))
input()
exit()
else:
visited.add((x, y))
Add starting position to the list of visited locationsx = y = direction = 0
moves = open('input.txt', 'r').readline().strip().split(', ')
visited = set((0, 0))
for move in moves:
if move[0] == 'L':
if direction == 0:
direction = 3
else:
direction -= 1
elif move[0] == 'R':
if direction == 3:
direction = 0
else:
direction += 1
dist = int(''.join(move[1:]))
for _ in range(dist):
if direction == 0:
y -= 1
elif direction == 1:
x += 1
elif direction == 2:
y += 1
elif direction == 3:
x -= 1
if (x, y) in visited:
print(abs(x) + abs(y))
input()
exit()
else:
visited.add((x, y))
|
<commit_before>x = y = direction = 0
moves = open('input.txt', 'r').readline().strip().split(', ')
visited = set()
for move in moves:
if move[0] == 'L':
if direction == 0:
direction = 3
else:
direction -= 1
elif move[0] == 'R':
if direction == 3:
direction = 0
else:
direction += 1
dist = int(''.join(move[1:]))
for _ in range(dist):
if direction == 0:
y -= 1
elif direction == 1:
x += 1
elif direction == 2:
y += 1
elif direction == 3:
x -= 1
if (x, y) in visited:
print(abs(x) + abs(y))
input()
exit()
else:
visited.add((x, y))
<commit_msg>Add starting position to the list of visited locations<commit_after>x = y = direction = 0
moves = open('input.txt', 'r').readline().strip().split(', ')
visited = set((0, 0))
for move in moves:
if move[0] == 'L':
if direction == 0:
direction = 3
else:
direction -= 1
elif move[0] == 'R':
if direction == 3:
direction = 0
else:
direction += 1
dist = int(''.join(move[1:]))
for _ in range(dist):
if direction == 0:
y -= 1
elif direction == 1:
x += 1
elif direction == 2:
y += 1
elif direction == 3:
x -= 1
if (x, y) in visited:
print(abs(x) + abs(y))
input()
exit()
else:
visited.add((x, y))
|
8ada9ee4b394119a73de8d85a9db2be9df547aae
|
lib/pegasus/python/Pegasus/cli/startup-validation.py
|
lib/pegasus/python/Pegasus/cli/startup-validation.py
|
#!/usr/bin/python3
import sys
if not sys.version_info >= (3, 5):
sys.stderr.write('Pegasus requires Python 3.5 or above\n')
sys.exit(1)
try:
import yaml
except:
sys.stderr.write('Pegasus requires the Python3 YAML module to be installed\n')
sys.exit(1)
try:
import OpenSSL
except:
sys.stderr.write('Pegasus requires the Python3 PyOpenSSL module to be installed\n')
sys.exit(1)
|
#!/usr/bin/python3
import sys
if not sys.version_info >= (3, 5):
sys.stderr.write("Pegasus requires Python 3.5 or above\n")
sys.exit(1)
try:
import yaml
except:
sys.stderr.write("Pegasus requires the Python3 YAML module to be installed\n")
sys.exit(1)
|
Remove check for pyOpenSSL as it is only needed in pegasus-service to use ssl certs.
|
Remove check for pyOpenSSL as it is only needed in pegasus-service to use ssl certs.
|
Python
|
apache-2.0
|
pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus,pegasus-isi/pegasus
|
#!/usr/bin/python3
import sys
if not sys.version_info >= (3, 5):
sys.stderr.write('Pegasus requires Python 3.5 or above\n')
sys.exit(1)
try:
import yaml
except:
sys.stderr.write('Pegasus requires the Python3 YAML module to be installed\n')
sys.exit(1)
try:
import OpenSSL
except:
sys.stderr.write('Pegasus requires the Python3 PyOpenSSL module to be installed\n')
sys.exit(1)
Remove check for pyOpenSSL as it is only needed in pegasus-service to use ssl certs.
|
#!/usr/bin/python3
import sys
if not sys.version_info >= (3, 5):
sys.stderr.write("Pegasus requires Python 3.5 or above\n")
sys.exit(1)
try:
import yaml
except:
sys.stderr.write("Pegasus requires the Python3 YAML module to be installed\n")
sys.exit(1)
|
<commit_before>#!/usr/bin/python3
import sys
if not sys.version_info >= (3, 5):
sys.stderr.write('Pegasus requires Python 3.5 or above\n')
sys.exit(1)
try:
import yaml
except:
sys.stderr.write('Pegasus requires the Python3 YAML module to be installed\n')
sys.exit(1)
try:
import OpenSSL
except:
sys.stderr.write('Pegasus requires the Python3 PyOpenSSL module to be installed\n')
sys.exit(1)
<commit_msg>Remove check for pyOpenSSL as it is only needed in pegasus-service to use ssl certs.<commit_after>
|
#!/usr/bin/python3
import sys
if not sys.version_info >= (3, 5):
sys.stderr.write("Pegasus requires Python 3.5 or above\n")
sys.exit(1)
try:
import yaml
except:
sys.stderr.write("Pegasus requires the Python3 YAML module to be installed\n")
sys.exit(1)
|
#!/usr/bin/python3
import sys
if not sys.version_info >= (3, 5):
sys.stderr.write('Pegasus requires Python 3.5 or above\n')
sys.exit(1)
try:
import yaml
except:
sys.stderr.write('Pegasus requires the Python3 YAML module to be installed\n')
sys.exit(1)
try:
import OpenSSL
except:
sys.stderr.write('Pegasus requires the Python3 PyOpenSSL module to be installed\n')
sys.exit(1)
Remove check for pyOpenSSL as it is only needed in pegasus-service to use ssl certs.#!/usr/bin/python3
import sys
if not sys.version_info >= (3, 5):
sys.stderr.write("Pegasus requires Python 3.5 or above\n")
sys.exit(1)
try:
import yaml
except:
sys.stderr.write("Pegasus requires the Python3 YAML module to be installed\n")
sys.exit(1)
|
<commit_before>#!/usr/bin/python3
import sys
if not sys.version_info >= (3, 5):
sys.stderr.write('Pegasus requires Python 3.5 or above\n')
sys.exit(1)
try:
import yaml
except:
sys.stderr.write('Pegasus requires the Python3 YAML module to be installed\n')
sys.exit(1)
try:
import OpenSSL
except:
sys.stderr.write('Pegasus requires the Python3 PyOpenSSL module to be installed\n')
sys.exit(1)
<commit_msg>Remove check for pyOpenSSL as it is only needed in pegasus-service to use ssl certs.<commit_after>#!/usr/bin/python3
import sys
if not sys.version_info >= (3, 5):
sys.stderr.write("Pegasus requires Python 3.5 or above\n")
sys.exit(1)
try:
import yaml
except:
sys.stderr.write("Pegasus requires the Python3 YAML module to be installed\n")
sys.exit(1)
|
c00f0f2c0f89b1596c73cb671ef7127ecf56150f
|
features/steps/sensors.py
|
features/steps/sensors.py
|
from behave import given, when, then
from mock import patch, call
from tinkerforge.ip_connection import IPConnection
from pytoon.models import Electricity, db
from pytoon.connection import BrickConnection
@given('we connect to the master brick')
@patch('pytoon.connection.IPConnection')
def step_impl(context, mock_class):
host = None
port = None
context.brick_conn = BrickConnection(host, port, db)
@then('we are connected')
def step_impl(context):
calls = [call(), call()]
context.brick_conn.connection.register_callback.has_calls(calls)
@when('we have an {sensor_type} sensor')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_enumerate('3', None, None, None, None, device_identifier=21,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
assert (context.brick_conn.ambient is not None )
@then('we can measure {sensor_type}')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_ambient()
@then('we store the {sensor_type} measurements in the database')
def step_impl(context, sensor_type):
result = Electricity.query.all()
assert result is not None
|
from behave import given, when, then
from mock import patch, call
from tinkerforge.ip_connection import IPConnection
from pytoon.models import Electricity, db
from pytoon.connection import BrickConnection
@given('we connect to the master brick')
@patch('pytoon.connection.IPConnection')
def step_impl(context, mock_class):
host = None
port = None
context.brick_conn = BrickConnection(host, port, db)
@then('we are connected')
def step_impl(context):
calls = [call(), call()]
context.brick_conn.connection.register_callback.has_calls(calls)
@when('we have an {sensor_type} sensor')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_enumerate('3', None, None, None, None, device_identifier=21,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
assert (context.brick_conn.ambient is not None )
@then('we can measure {sensor_type}')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_ambient()
@then('we store the {sensor_type} measurements in the database')
def step_impl(context, sensor_type):
db.create_all()
result = Electricity.query.all()
assert result is not None
|
Create database if it doesn't exist
|
Create database if it doesn't exist
|
Python
|
bsd-3-clause
|
marcoplaisier/pytoon,marcofinalist/pytoon,marcoplaisier/pytoon,marcofinalist/pytoon
|
from behave import given, when, then
from mock import patch, call
from tinkerforge.ip_connection import IPConnection
from pytoon.models import Electricity, db
from pytoon.connection import BrickConnection
@given('we connect to the master brick')
@patch('pytoon.connection.IPConnection')
def step_impl(context, mock_class):
host = None
port = None
context.brick_conn = BrickConnection(host, port, db)
@then('we are connected')
def step_impl(context):
calls = [call(), call()]
context.brick_conn.connection.register_callback.has_calls(calls)
@when('we have an {sensor_type} sensor')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_enumerate('3', None, None, None, None, device_identifier=21,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
assert (context.brick_conn.ambient is not None )
@then('we can measure {sensor_type}')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_ambient()
@then('we store the {sensor_type} measurements in the database')
def step_impl(context, sensor_type):
result = Electricity.query.all()
assert result is not NoneCreate database if it doesn't exist
|
from behave import given, when, then
from mock import patch, call
from tinkerforge.ip_connection import IPConnection
from pytoon.models import Electricity, db
from pytoon.connection import BrickConnection
@given('we connect to the master brick')
@patch('pytoon.connection.IPConnection')
def step_impl(context, mock_class):
host = None
port = None
context.brick_conn = BrickConnection(host, port, db)
@then('we are connected')
def step_impl(context):
calls = [call(), call()]
context.brick_conn.connection.register_callback.has_calls(calls)
@when('we have an {sensor_type} sensor')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_enumerate('3', None, None, None, None, device_identifier=21,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
assert (context.brick_conn.ambient is not None )
@then('we can measure {sensor_type}')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_ambient()
@then('we store the {sensor_type} measurements in the database')
def step_impl(context, sensor_type):
db.create_all()
result = Electricity.query.all()
assert result is not None
|
<commit_before>from behave import given, when, then
from mock import patch, call
from tinkerforge.ip_connection import IPConnection
from pytoon.models import Electricity, db
from pytoon.connection import BrickConnection
@given('we connect to the master brick')
@patch('pytoon.connection.IPConnection')
def step_impl(context, mock_class):
host = None
port = None
context.brick_conn = BrickConnection(host, port, db)
@then('we are connected')
def step_impl(context):
calls = [call(), call()]
context.brick_conn.connection.register_callback.has_calls(calls)
@when('we have an {sensor_type} sensor')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_enumerate('3', None, None, None, None, device_identifier=21,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
assert (context.brick_conn.ambient is not None )
@then('we can measure {sensor_type}')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_ambient()
@then('we store the {sensor_type} measurements in the database')
def step_impl(context, sensor_type):
result = Electricity.query.all()
assert result is not None<commit_msg>Create database if it doesn't exist<commit_after>
|
from behave import given, when, then
from mock import patch, call
from tinkerforge.ip_connection import IPConnection
from pytoon.models import Electricity, db
from pytoon.connection import BrickConnection
@given('we connect to the master brick')
@patch('pytoon.connection.IPConnection')
def step_impl(context, mock_class):
host = None
port = None
context.brick_conn = BrickConnection(host, port, db)
@then('we are connected')
def step_impl(context):
calls = [call(), call()]
context.brick_conn.connection.register_callback.has_calls(calls)
@when('we have an {sensor_type} sensor')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_enumerate('3', None, None, None, None, device_identifier=21,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
assert (context.brick_conn.ambient is not None )
@then('we can measure {sensor_type}')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_ambient()
@then('we store the {sensor_type} measurements in the database')
def step_impl(context, sensor_type):
db.create_all()
result = Electricity.query.all()
assert result is not None
|
from behave import given, when, then
from mock import patch, call
from tinkerforge.ip_connection import IPConnection
from pytoon.models import Electricity, db
from pytoon.connection import BrickConnection
@given('we connect to the master brick')
@patch('pytoon.connection.IPConnection')
def step_impl(context, mock_class):
host = None
port = None
context.brick_conn = BrickConnection(host, port, db)
@then('we are connected')
def step_impl(context):
calls = [call(), call()]
context.brick_conn.connection.register_callback.has_calls(calls)
@when('we have an {sensor_type} sensor')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_enumerate('3', None, None, None, None, device_identifier=21,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
assert (context.brick_conn.ambient is not None )
@then('we can measure {sensor_type}')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_ambient()
@then('we store the {sensor_type} measurements in the database')
def step_impl(context, sensor_type):
result = Electricity.query.all()
assert result is not NoneCreate database if it doesn't existfrom behave import given, when, then
from mock import patch, call
from tinkerforge.ip_connection import IPConnection
from pytoon.models import Electricity, db
from pytoon.connection import BrickConnection
@given('we connect to the master brick')
@patch('pytoon.connection.IPConnection')
def step_impl(context, mock_class):
host = None
port = None
context.brick_conn = BrickConnection(host, port, db)
@then('we are connected')
def step_impl(context):
calls = [call(), call()]
context.brick_conn.connection.register_callback.has_calls(calls)
@when('we have an {sensor_type} sensor')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_enumerate('3', None, None, None, None, device_identifier=21,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
assert (context.brick_conn.ambient is not None )
@then('we can measure {sensor_type}')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_ambient()
@then('we store the {sensor_type} measurements in the database')
def step_impl(context, sensor_type):
db.create_all()
result = Electricity.query.all()
assert result is not None
|
<commit_before>from behave import given, when, then
from mock import patch, call
from tinkerforge.ip_connection import IPConnection
from pytoon.models import Electricity, db
from pytoon.connection import BrickConnection
@given('we connect to the master brick')
@patch('pytoon.connection.IPConnection')
def step_impl(context, mock_class):
host = None
port = None
context.brick_conn = BrickConnection(host, port, db)
@then('we are connected')
def step_impl(context):
calls = [call(), call()]
context.brick_conn.connection.register_callback.has_calls(calls)
@when('we have an {sensor_type} sensor')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_enumerate('3', None, None, None, None, device_identifier=21,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
assert (context.brick_conn.ambient is not None )
@then('we can measure {sensor_type}')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_ambient()
@then('we store the {sensor_type} measurements in the database')
def step_impl(context, sensor_type):
result = Electricity.query.all()
assert result is not None<commit_msg>Create database if it doesn't exist<commit_after>from behave import given, when, then
from mock import patch, call
from tinkerforge.ip_connection import IPConnection
from pytoon.models import Electricity, db
from pytoon.connection import BrickConnection
@given('we connect to the master brick')
@patch('pytoon.connection.IPConnection')
def step_impl(context, mock_class):
host = None
port = None
context.brick_conn = BrickConnection(host, port, db)
@then('we are connected')
def step_impl(context):
calls = [call(), call()]
context.brick_conn.connection.register_callback.has_calls(calls)
@when('we have an {sensor_type} sensor')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_enumerate('3', None, None, None, None, device_identifier=21,
enumeration_type=IPConnection.ENUMERATION_TYPE_CONNECTED)
assert (context.brick_conn.ambient is not None )
@then('we can measure {sensor_type}')
def step_impl(context, sensor_type):
if sensor_type == 'electricity':
context.brick_conn.cb_ambient()
@then('we store the {sensor_type} measurements in the database')
def step_impl(context, sensor_type):
db.create_all()
result = Electricity.query.all()
assert result is not None
|
ef8a6616876ee044d07cf8f30b51af0cbb2bc7e4
|
geozones/factories.py
|
geozones/factories.py
|
# coding: utf-8
import factory
import random
from .models import Location, Region
class RegionFactory(factory.Factory):
FACTORY_FOR = Region
name = factory.Sequence(lambda n: "Region_%s" % n)
cityId = factory.SubFactory(CityFactory)
slug = factory.LazyAttribute(lambda a: a.name.lower())
zoomLvl = random.randint(1, 10)
order = factory.Sequence(lambda n: n)
class LocationFactory(factory.Factory):
FACTORY_FOR = Location
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
name = factory.Sequence(lambda n: "Location_%s" % n)
regionId = factory.SubFactory(RegionFactory)
|
# coding: utf-8
import factory
import random
from .models import Location, Region
class RegionFactory(factory.Factory):
FACTORY_FOR = Region
name = factory.Sequence(lambda n: "Region_%s" % n)
slug = factory.LazyAttribute(lambda a: a.name.lower())
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
zoom = random.randint(1, 10)
order = factory.Sequence(lambda n: n)
class LocationFactory(factory.Factory):
FACTORY_FOR = Location
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
name = factory.Sequence(lambda n: "Location_%s" % n)
regionId = factory.SubFactory(RegionFactory)
|
Fix region factory to reflect region model
|
Fix region factory to reflect region model
|
Python
|
mit
|
sarutobi/Rynda,sarutobi/Rynda,sarutobi/ritmserdtsa,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/Rynda,sarutobi/Rynda,sarutobi/flowofkindness,sarutobi/flowofkindness,sarutobi/flowofkindness,sarutobi/ritmserdtsa,sarutobi/ritmserdtsa
|
# coding: utf-8
import factory
import random
from .models import Location, Region
class RegionFactory(factory.Factory):
FACTORY_FOR = Region
name = factory.Sequence(lambda n: "Region_%s" % n)
cityId = factory.SubFactory(CityFactory)
slug = factory.LazyAttribute(lambda a: a.name.lower())
zoomLvl = random.randint(1, 10)
order = factory.Sequence(lambda n: n)
class LocationFactory(factory.Factory):
FACTORY_FOR = Location
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
name = factory.Sequence(lambda n: "Location_%s" % n)
regionId = factory.SubFactory(RegionFactory)
Fix region factory to reflect region model
|
# coding: utf-8
import factory
import random
from .models import Location, Region
class RegionFactory(factory.Factory):
FACTORY_FOR = Region
name = factory.Sequence(lambda n: "Region_%s" % n)
slug = factory.LazyAttribute(lambda a: a.name.lower())
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
zoom = random.randint(1, 10)
order = factory.Sequence(lambda n: n)
class LocationFactory(factory.Factory):
FACTORY_FOR = Location
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
name = factory.Sequence(lambda n: "Location_%s" % n)
regionId = factory.SubFactory(RegionFactory)
|
<commit_before># coding: utf-8
import factory
import random
from .models import Location, Region
class RegionFactory(factory.Factory):
FACTORY_FOR = Region
name = factory.Sequence(lambda n: "Region_%s" % n)
cityId = factory.SubFactory(CityFactory)
slug = factory.LazyAttribute(lambda a: a.name.lower())
zoomLvl = random.randint(1, 10)
order = factory.Sequence(lambda n: n)
class LocationFactory(factory.Factory):
FACTORY_FOR = Location
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
name = factory.Sequence(lambda n: "Location_%s" % n)
regionId = factory.SubFactory(RegionFactory)
<commit_msg>Fix region factory to reflect region model<commit_after>
|
# coding: utf-8
import factory
import random
from .models import Location, Region
class RegionFactory(factory.Factory):
FACTORY_FOR = Region
name = factory.Sequence(lambda n: "Region_%s" % n)
slug = factory.LazyAttribute(lambda a: a.name.lower())
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
zoom = random.randint(1, 10)
order = factory.Sequence(lambda n: n)
class LocationFactory(factory.Factory):
FACTORY_FOR = Location
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
name = factory.Sequence(lambda n: "Location_%s" % n)
regionId = factory.SubFactory(RegionFactory)
|
# coding: utf-8
import factory
import random
from .models import Location, Region
class RegionFactory(factory.Factory):
FACTORY_FOR = Region
name = factory.Sequence(lambda n: "Region_%s" % n)
cityId = factory.SubFactory(CityFactory)
slug = factory.LazyAttribute(lambda a: a.name.lower())
zoomLvl = random.randint(1, 10)
order = factory.Sequence(lambda n: n)
class LocationFactory(factory.Factory):
FACTORY_FOR = Location
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
name = factory.Sequence(lambda n: "Location_%s" % n)
regionId = factory.SubFactory(RegionFactory)
Fix region factory to reflect region model# coding: utf-8
import factory
import random
from .models import Location, Region
class RegionFactory(factory.Factory):
FACTORY_FOR = Region
name = factory.Sequence(lambda n: "Region_%s" % n)
slug = factory.LazyAttribute(lambda a: a.name.lower())
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
zoom = random.randint(1, 10)
order = factory.Sequence(lambda n: n)
class LocationFactory(factory.Factory):
FACTORY_FOR = Location
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
name = factory.Sequence(lambda n: "Location_%s" % n)
regionId = factory.SubFactory(RegionFactory)
|
<commit_before># coding: utf-8
import factory
import random
from .models import Location, Region
class RegionFactory(factory.Factory):
FACTORY_FOR = Region
name = factory.Sequence(lambda n: "Region_%s" % n)
cityId = factory.SubFactory(CityFactory)
slug = factory.LazyAttribute(lambda a: a.name.lower())
zoomLvl = random.randint(1, 10)
order = factory.Sequence(lambda n: n)
class LocationFactory(factory.Factory):
FACTORY_FOR = Location
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
name = factory.Sequence(lambda n: "Location_%s" % n)
regionId = factory.SubFactory(RegionFactory)
<commit_msg>Fix region factory to reflect region model<commit_after># coding: utf-8
import factory
import random
from .models import Location, Region
class RegionFactory(factory.Factory):
FACTORY_FOR = Region
name = factory.Sequence(lambda n: "Region_%s" % n)
slug = factory.LazyAttribute(lambda a: a.name.lower())
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
zoom = random.randint(1, 10)
order = factory.Sequence(lambda n: n)
class LocationFactory(factory.Factory):
FACTORY_FOR = Location
latitude = random.uniform(-90.0, 90.0)
longtitude = random.uniform(-180.0, 180.0)
name = factory.Sequence(lambda n: "Location_%s" % n)
regionId = factory.SubFactory(RegionFactory)
|
03a95c87dde1a5b20658b3b61b4c4abc070e3bf3
|
flowtype/commands/base.py
|
flowtype/commands/base.py
|
import abc
import sublime
import sublime_plugin
from ..helpers import is_js_source
class BaseCommand(sublime_plugin.TextCommand, metaclass=abc.ABCMeta):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
@abc.abstractmethod
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
@abc.abstractmethod
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
|
import sublime
import sublime_plugin
from ..helpers import is_js_source
class BaseCommand(sublime_plugin.TextCommand):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
|
Fix travis by removing abc metaclass.
|
Fix travis by removing abc metaclass.
|
Python
|
mit
|
Pegase745/sublime-flowtype
|
import abc
import sublime
import sublime_plugin
from ..helpers import is_js_source
class BaseCommand(sublime_plugin.TextCommand, metaclass=abc.ABCMeta):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
@abc.abstractmethod
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
@abc.abstractmethod
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
Fix travis by removing abc metaclass.
|
import sublime
import sublime_plugin
from ..helpers import is_js_source
class BaseCommand(sublime_plugin.TextCommand):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
|
<commit_before>import abc
import sublime
import sublime_plugin
from ..helpers import is_js_source
class BaseCommand(sublime_plugin.TextCommand, metaclass=abc.ABCMeta):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
@abc.abstractmethod
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
@abc.abstractmethod
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
<commit_msg>Fix travis by removing abc metaclass.<commit_after>
|
import sublime
import sublime_plugin
from ..helpers import is_js_source
class BaseCommand(sublime_plugin.TextCommand):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
|
import abc
import sublime
import sublime_plugin
from ..helpers import is_js_source
class BaseCommand(sublime_plugin.TextCommand, metaclass=abc.ABCMeta):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
@abc.abstractmethod
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
@abc.abstractmethod
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
Fix travis by removing abc metaclass.import sublime
import sublime_plugin
from ..helpers import is_js_source
class BaseCommand(sublime_plugin.TextCommand):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
|
<commit_before>import abc
import sublime
import sublime_plugin
from ..helpers import is_js_source
class BaseCommand(sublime_plugin.TextCommand, metaclass=abc.ABCMeta):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
@abc.abstractmethod
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
@abc.abstractmethod
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
<commit_msg>Fix travis by removing abc metaclass.<commit_after>import sublime
import sublime_plugin
from ..helpers import is_js_source
class BaseCommand(sublime_plugin.TextCommand):
"""Common properties and methods for children commands."""
def get_content(self):
"""Return file content."""
return self.view.substr(sublime.Region(0, self.view.size()))
def get_cmd(self):
"""Construct cli command."""
raise NotImplementedError('get_cmd method must be defined')
def handle_process(self, returncode, stdout, error):
"""Handle the output from the threaded process."""
raise NotImplementedError('handle_process method must be defined')
def check_thread(self, thread, i=0, dir=1):
"""Check if the thread is still running."""
before = i % 8
after = (7) - before
if not after:
dir = -1
if not before:
dir = 1
i += dir
self.view.set_status(
'flow_type',
'FlowType [%s=%s]' % (' ' * before, ' ' * after)
)
if thread.is_alive():
return sublime.set_timeout(lambda: self.check_thread(
thread, i, dir), 100)
self.view.erase_status('flow_type')
self.handle_process(thread.returncode, thread.stdout, thread.stderr)
def is_enabled(self):
"""Enable the command only on Javascript files."""
return is_js_source(self.view)
|
c8962d382f52b172ebc3a0d562597936dcf902ba
|
fmn/web/default_config.py
|
fmn/web/default_config.py
|
SECRET_KEY = 'changeme please'
# TODO -- May I set this to true?
FAS_OPENID_CHECK_CERT = False
#ADMIN_GROUPS = ['sysadmin-web']
FMN_FEDORA_OPENID = 'https://id.fedoraproject.org'
|
SECRET_KEY = 'changeme please'
# TODO -- May I set this to true?
FAS_OPENID_CHECK_CERT = False
#ADMIN_GROUPS = ['sysadmin-web']
FMN_FEDORA_OPENID = 'https://id.fedoraproject.org'
FMN_ALLOW_FAS_OPENID = True
FMN_ALLOW_GOOGLE_OPENID = True
FMN_ALLOW_YAHOO_OPENID = True
FMN_ALLOW_GENERIC_OPENID = True
|
Add configuration keys to allow turning on/off easy access to openid servers
|
Add configuration keys to allow turning on/off easy access to openid servers
|
Python
|
lgpl-2.1
|
jeremycline/fmn,jeremycline/fmn,jeremycline/fmn
|
SECRET_KEY = 'changeme please'
# TODO -- May I set this to true?
FAS_OPENID_CHECK_CERT = False
#ADMIN_GROUPS = ['sysadmin-web']
FMN_FEDORA_OPENID = 'https://id.fedoraproject.org'
Add configuration keys to allow turning on/off easy access to openid servers
|
SECRET_KEY = 'changeme please'
# TODO -- May I set this to true?
FAS_OPENID_CHECK_CERT = False
#ADMIN_GROUPS = ['sysadmin-web']
FMN_FEDORA_OPENID = 'https://id.fedoraproject.org'
FMN_ALLOW_FAS_OPENID = True
FMN_ALLOW_GOOGLE_OPENID = True
FMN_ALLOW_YAHOO_OPENID = True
FMN_ALLOW_GENERIC_OPENID = True
|
<commit_before>SECRET_KEY = 'changeme please'
# TODO -- May I set this to true?
FAS_OPENID_CHECK_CERT = False
#ADMIN_GROUPS = ['sysadmin-web']
FMN_FEDORA_OPENID = 'https://id.fedoraproject.org'
<commit_msg>Add configuration keys to allow turning on/off easy access to openid servers<commit_after>
|
SECRET_KEY = 'changeme please'
# TODO -- May I set this to true?
FAS_OPENID_CHECK_CERT = False
#ADMIN_GROUPS = ['sysadmin-web']
FMN_FEDORA_OPENID = 'https://id.fedoraproject.org'
FMN_ALLOW_FAS_OPENID = True
FMN_ALLOW_GOOGLE_OPENID = True
FMN_ALLOW_YAHOO_OPENID = True
FMN_ALLOW_GENERIC_OPENID = True
|
SECRET_KEY = 'changeme please'
# TODO -- May I set this to true?
FAS_OPENID_CHECK_CERT = False
#ADMIN_GROUPS = ['sysadmin-web']
FMN_FEDORA_OPENID = 'https://id.fedoraproject.org'
Add configuration keys to allow turning on/off easy access to openid serversSECRET_KEY = 'changeme please'
# TODO -- May I set this to true?
FAS_OPENID_CHECK_CERT = False
#ADMIN_GROUPS = ['sysadmin-web']
FMN_FEDORA_OPENID = 'https://id.fedoraproject.org'
FMN_ALLOW_FAS_OPENID = True
FMN_ALLOW_GOOGLE_OPENID = True
FMN_ALLOW_YAHOO_OPENID = True
FMN_ALLOW_GENERIC_OPENID = True
|
<commit_before>SECRET_KEY = 'changeme please'
# TODO -- May I set this to true?
FAS_OPENID_CHECK_CERT = False
#ADMIN_GROUPS = ['sysadmin-web']
FMN_FEDORA_OPENID = 'https://id.fedoraproject.org'
<commit_msg>Add configuration keys to allow turning on/off easy access to openid servers<commit_after>SECRET_KEY = 'changeme please'
# TODO -- May I set this to true?
FAS_OPENID_CHECK_CERT = False
#ADMIN_GROUPS = ['sysadmin-web']
FMN_FEDORA_OPENID = 'https://id.fedoraproject.org'
FMN_ALLOW_FAS_OPENID = True
FMN_ALLOW_GOOGLE_OPENID = True
FMN_ALLOW_YAHOO_OPENID = True
FMN_ALLOW_GENERIC_OPENID = True
|
c99ad137bd2251584d8529e3b7d06aea2ca25967
|
app/logger.py
|
app/logger.py
|
import logging
from configs import EnjoliverConfig
formatter = logging.Formatter('\r%(levelname)-7s %(module)-13s %(funcName)s %(message)s')
consoleHandler = logging.StreamHandler()
consoleHandler.setLevel(logging.INFO)
consoleHandler.setFormatter(formatter)
ec = EnjoliverConfig()
def get_logger(name):
logger = logging.getLogger(name)
if ec.logging_level == "DEBUG":
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
logger.addHandler(consoleHandler)
return logger
|
import logging
from configs import EnjoliverConfig
formatter = logging.Formatter('\r%(levelname)-7s %(module)-13s %(funcName)s %(message)s')
ec = EnjoliverConfig()
def get_logger(name):
logger = logging.getLogger(name)
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
if ec.logging_level == "DEBUG":
logger.setLevel(logging.DEBUG)
console_handler.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
console_handler.setLevel(logging.INFO)
logger.addHandler(console_handler)
return logger
|
Fix the handler for the log level
|
Fix the handler for the log level
|
Python
|
mit
|
nyodas/enjoliver,JulienBalestra/enjoliver,JulienBalestra/enjoliver,JulienBalestra/enjoliver,nyodas/enjoliver,kirek007/enjoliver,JulienBalestra/enjoliver,nyodas/enjoliver,nyodas/enjoliver,kirek007/enjoliver,nyodas/enjoliver,kirek007/enjoliver,kirek007/enjoliver,JulienBalestra/enjoliver,kirek007/enjoliver
|
import logging
from configs import EnjoliverConfig
formatter = logging.Formatter('\r%(levelname)-7s %(module)-13s %(funcName)s %(message)s')
consoleHandler = logging.StreamHandler()
consoleHandler.setLevel(logging.INFO)
consoleHandler.setFormatter(formatter)
ec = EnjoliverConfig()
def get_logger(name):
logger = logging.getLogger(name)
if ec.logging_level == "DEBUG":
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
logger.addHandler(consoleHandler)
return logger
Fix the handler for the log level
|
import logging
from configs import EnjoliverConfig
formatter = logging.Formatter('\r%(levelname)-7s %(module)-13s %(funcName)s %(message)s')
ec = EnjoliverConfig()
def get_logger(name):
logger = logging.getLogger(name)
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
if ec.logging_level == "DEBUG":
logger.setLevel(logging.DEBUG)
console_handler.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
console_handler.setLevel(logging.INFO)
logger.addHandler(console_handler)
return logger
|
<commit_before>import logging
from configs import EnjoliverConfig
formatter = logging.Formatter('\r%(levelname)-7s %(module)-13s %(funcName)s %(message)s')
consoleHandler = logging.StreamHandler()
consoleHandler.setLevel(logging.INFO)
consoleHandler.setFormatter(formatter)
ec = EnjoliverConfig()
def get_logger(name):
logger = logging.getLogger(name)
if ec.logging_level == "DEBUG":
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
logger.addHandler(consoleHandler)
return logger
<commit_msg>Fix the handler for the log level<commit_after>
|
import logging
from configs import EnjoliverConfig
formatter = logging.Formatter('\r%(levelname)-7s %(module)-13s %(funcName)s %(message)s')
ec = EnjoliverConfig()
def get_logger(name):
logger = logging.getLogger(name)
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
if ec.logging_level == "DEBUG":
logger.setLevel(logging.DEBUG)
console_handler.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
console_handler.setLevel(logging.INFO)
logger.addHandler(console_handler)
return logger
|
import logging
from configs import EnjoliverConfig
formatter = logging.Formatter('\r%(levelname)-7s %(module)-13s %(funcName)s %(message)s')
consoleHandler = logging.StreamHandler()
consoleHandler.setLevel(logging.INFO)
consoleHandler.setFormatter(formatter)
ec = EnjoliverConfig()
def get_logger(name):
logger = logging.getLogger(name)
if ec.logging_level == "DEBUG":
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
logger.addHandler(consoleHandler)
return logger
Fix the handler for the log levelimport logging
from configs import EnjoliverConfig
formatter = logging.Formatter('\r%(levelname)-7s %(module)-13s %(funcName)s %(message)s')
ec = EnjoliverConfig()
def get_logger(name):
logger = logging.getLogger(name)
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
if ec.logging_level == "DEBUG":
logger.setLevel(logging.DEBUG)
console_handler.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
console_handler.setLevel(logging.INFO)
logger.addHandler(console_handler)
return logger
|
<commit_before>import logging
from configs import EnjoliverConfig
formatter = logging.Formatter('\r%(levelname)-7s %(module)-13s %(funcName)s %(message)s')
consoleHandler = logging.StreamHandler()
consoleHandler.setLevel(logging.INFO)
consoleHandler.setFormatter(formatter)
ec = EnjoliverConfig()
def get_logger(name):
logger = logging.getLogger(name)
if ec.logging_level == "DEBUG":
logger.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
logger.addHandler(consoleHandler)
return logger
<commit_msg>Fix the handler for the log level<commit_after>import logging
from configs import EnjoliverConfig
formatter = logging.Formatter('\r%(levelname)-7s %(module)-13s %(funcName)s %(message)s')
ec = EnjoliverConfig()
def get_logger(name):
logger = logging.getLogger(name)
console_handler = logging.StreamHandler()
console_handler.setFormatter(formatter)
if ec.logging_level == "DEBUG":
logger.setLevel(logging.DEBUG)
console_handler.setLevel(logging.DEBUG)
else:
logger.setLevel(logging.INFO)
console_handler.setLevel(logging.INFO)
logger.addHandler(console_handler)
return logger
|
93c978ba422b26971180a4277a0b69e82848ee78
|
src/yunohost/data_migrations/0009_migrate_to_apps_json.py
|
src/yunohost/data_migrations/0009_migrate_to_apps_json.py
|
from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
# Remove apps.json list
app_removelist(name="yunohost")
# Replace by official.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/official.json")
|
import os
from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list, APPSLISTS_JSON
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
BASE_CONF_PATH = '/home/yunohost.conf'
BACKUP_CONF_DIR = os.path.join(BASE_CONF_PATH, 'backup')
APPSLISTS_BACKUP = os.path.join(BACKUP_CONF_DIR, "appslist_before_migration_0009.json")
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Backup current app list json
os.system("cp %s %s") % (APPSLISTS_JSON, APPSLISTS_BACKUP)
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
if os.path.exists(APPSLISTS_BACKUP):
os.system("cp %s %s") % (APPSLISTS_BACKUP, APPSLISTS_JSON)
|
Backup / restore original appslist to handle backward case properly
|
Backup / restore original appslist to handle backward case properly
|
Python
|
agpl-3.0
|
YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/moulinette-yunohost,YunoHost/yunohost,YunoHost/yunohost
|
from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
# Remove apps.json list
app_removelist(name="yunohost")
# Replace by official.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/official.json")
Backup / restore original appslist to handle backward case properly
|
import os
from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list, APPSLISTS_JSON
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
BASE_CONF_PATH = '/home/yunohost.conf'
BACKUP_CONF_DIR = os.path.join(BASE_CONF_PATH, 'backup')
APPSLISTS_BACKUP = os.path.join(BACKUP_CONF_DIR, "appslist_before_migration_0009.json")
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Backup current app list json
os.system("cp %s %s") % (APPSLISTS_JSON, APPSLISTS_BACKUP)
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
if os.path.exists(APPSLISTS_BACKUP):
os.system("cp %s %s") % (APPSLISTS_BACKUP, APPSLISTS_JSON)
|
<commit_before>from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
# Remove apps.json list
app_removelist(name="yunohost")
# Replace by official.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/official.json")
<commit_msg>Backup / restore original appslist to handle backward case properly<commit_after>
|
import os
from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list, APPSLISTS_JSON
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
BASE_CONF_PATH = '/home/yunohost.conf'
BACKUP_CONF_DIR = os.path.join(BASE_CONF_PATH, 'backup')
APPSLISTS_BACKUP = os.path.join(BACKUP_CONF_DIR, "appslist_before_migration_0009.json")
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Backup current app list json
os.system("cp %s %s") % (APPSLISTS_JSON, APPSLISTS_BACKUP)
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
if os.path.exists(APPSLISTS_BACKUP):
os.system("cp %s %s") % (APPSLISTS_BACKUP, APPSLISTS_JSON)
|
from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
# Remove apps.json list
app_removelist(name="yunohost")
# Replace by official.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/official.json")
Backup / restore original appslist to handle backward case properlyimport os
from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list, APPSLISTS_JSON
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
BASE_CONF_PATH = '/home/yunohost.conf'
BACKUP_CONF_DIR = os.path.join(BASE_CONF_PATH, 'backup')
APPSLISTS_BACKUP = os.path.join(BACKUP_CONF_DIR, "appslist_before_migration_0009.json")
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Backup current app list json
os.system("cp %s %s") % (APPSLISTS_JSON, APPSLISTS_BACKUP)
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
if os.path.exists(APPSLISTS_BACKUP):
os.system("cp %s %s") % (APPSLISTS_BACKUP, APPSLISTS_JSON)
|
<commit_before>from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
# Remove apps.json list
app_removelist(name="yunohost")
# Replace by official.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/official.json")
<commit_msg>Backup / restore original appslist to handle backward case properly<commit_after>import os
from moulinette.utils.log import getActionLogger
from yunohost.app import app_fetchlist, app_removelist, _read_appslist_list, APPSLISTS_JSON
from yunohost.tools import Migration
logger = getActionLogger('yunohost.migration')
BASE_CONF_PATH = '/home/yunohost.conf'
BACKUP_CONF_DIR = os.path.join(BASE_CONF_PATH, 'backup')
APPSLISTS_BACKUP = os.path.join(BACKUP_CONF_DIR, "appslist_before_migration_0009.json")
class MyMigration(Migration):
"Migrate from official.json to apps.json"
def migrate(self):
# Backup current app list json
os.system("cp %s %s") % (APPSLISTS_JSON, APPSLISTS_BACKUP)
# Remove all the deprecated lists
lists_to_remove = [
"https://app.yunohost.org/official.json",
"https://app.yunohost.org/community.json",
"https://labriqueinter.net/apps/labriqueinternet.json"
]
appslists = _read_appslist_list()
for appslist, infos in appslists.items():
if infos["url"] in lists_to_remove:
app_removelist(name=appslist)
# Replace by apps.json list
app_fetchlist(name="yunohost",
url="https://app.yunohost.org/apps.json")
def backward(self):
if os.path.exists(APPSLISTS_BACKUP):
os.system("cp %s %s") % (APPSLISTS_BACKUP, APPSLISTS_JSON)
|
b4f2d120c600cbbd3696766473e6cd18cb597728
|
src/evesrp/util/models.py
|
src/evesrp/util/models.py
|
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime as dt
from sqlalchemy.types import DateTime
from sqlalchemy.ext.declarative import declared_attr
from .datetime import DateTime
from .datetime import utc
from .. import db
class AutoID(object):
"""Mixin adding a primary key integer column named 'id'."""
id = db.Column(db.Integer, primary_key=True)
def _json(self, extended=False):
try:
parent = super(AutoName, self)._json(extended)
except AttributeError:
parent = {}
parent[u'id'] = self.id
return parent
def _utcnow(arg):
return dt.datetime.now(utc)
class Timestamped(object):
"""Mixin adding a timestamp column.
The timestamp defaults to the current time.
"""
timestamp = db.Column(DateTime, nullable=False,
default=_utcnow)
class AutoName(object):
@declared_attr
def __tablename__(cls):
"""SQLAlchemy late-binding attribute to set the table name.
Implemented this way so subclasses do not need to specify a table name
themselves.
"""
return cls.__name__.lower()
|
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime as dt
from sqlalchemy.types import DateTime
from sqlalchemy.ext.declarative import declared_attr
from .datetime import DateTime
from .datetime import utc
from .. import db
class AutoID(object):
"""Mixin adding a primary key integer column named 'id'."""
id = db.Column(db.Integer, primary_key=True)
def _json(self, extended=False):
try:
parent = super(AutoID, self)._json(extended)
except AttributeError:
parent = {}
parent[u'id'] = self.id
return parent
def _utcnow(arg):
return dt.datetime.now(utc)
class Timestamped(object):
"""Mixin adding a timestamp column.
The timestamp defaults to the current time.
"""
timestamp = db.Column(DateTime, nullable=False,
default=_utcnow)
class AutoName(object):
@declared_attr
def __tablename__(cls):
"""SQLAlchemy late-binding attribute to set the table name.
Implemented this way so subclasses do not need to specify a table name
themselves.
"""
return cls.__name__.lower()
|
Fix typo in getting superclass of AutoID
|
Fix typo in getting superclass of AutoID
|
Python
|
bsd-2-clause
|
paxswill/evesrp,paxswill/evesrp,paxswill/evesrp
|
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime as dt
from sqlalchemy.types import DateTime
from sqlalchemy.ext.declarative import declared_attr
from .datetime import DateTime
from .datetime import utc
from .. import db
class AutoID(object):
"""Mixin adding a primary key integer column named 'id'."""
id = db.Column(db.Integer, primary_key=True)
def _json(self, extended=False):
try:
parent = super(AutoName, self)._json(extended)
except AttributeError:
parent = {}
parent[u'id'] = self.id
return parent
def _utcnow(arg):
return dt.datetime.now(utc)
class Timestamped(object):
"""Mixin adding a timestamp column.
The timestamp defaults to the current time.
"""
timestamp = db.Column(DateTime, nullable=False,
default=_utcnow)
class AutoName(object):
@declared_attr
def __tablename__(cls):
"""SQLAlchemy late-binding attribute to set the table name.
Implemented this way so subclasses do not need to specify a table name
themselves.
"""
return cls.__name__.lower()
Fix typo in getting superclass of AutoID
|
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime as dt
from sqlalchemy.types import DateTime
from sqlalchemy.ext.declarative import declared_attr
from .datetime import DateTime
from .datetime import utc
from .. import db
class AutoID(object):
"""Mixin adding a primary key integer column named 'id'."""
id = db.Column(db.Integer, primary_key=True)
def _json(self, extended=False):
try:
parent = super(AutoID, self)._json(extended)
except AttributeError:
parent = {}
parent[u'id'] = self.id
return parent
def _utcnow(arg):
return dt.datetime.now(utc)
class Timestamped(object):
"""Mixin adding a timestamp column.
The timestamp defaults to the current time.
"""
timestamp = db.Column(DateTime, nullable=False,
default=_utcnow)
class AutoName(object):
@declared_attr
def __tablename__(cls):
"""SQLAlchemy late-binding attribute to set the table name.
Implemented this way so subclasses do not need to specify a table name
themselves.
"""
return cls.__name__.lower()
|
<commit_before>from __future__ import absolute_import
from __future__ import unicode_literals
import datetime as dt
from sqlalchemy.types import DateTime
from sqlalchemy.ext.declarative import declared_attr
from .datetime import DateTime
from .datetime import utc
from .. import db
class AutoID(object):
"""Mixin adding a primary key integer column named 'id'."""
id = db.Column(db.Integer, primary_key=True)
def _json(self, extended=False):
try:
parent = super(AutoName, self)._json(extended)
except AttributeError:
parent = {}
parent[u'id'] = self.id
return parent
def _utcnow(arg):
return dt.datetime.now(utc)
class Timestamped(object):
"""Mixin adding a timestamp column.
The timestamp defaults to the current time.
"""
timestamp = db.Column(DateTime, nullable=False,
default=_utcnow)
class AutoName(object):
@declared_attr
def __tablename__(cls):
"""SQLAlchemy late-binding attribute to set the table name.
Implemented this way so subclasses do not need to specify a table name
themselves.
"""
return cls.__name__.lower()
<commit_msg>Fix typo in getting superclass of AutoID<commit_after>
|
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime as dt
from sqlalchemy.types import DateTime
from sqlalchemy.ext.declarative import declared_attr
from .datetime import DateTime
from .datetime import utc
from .. import db
class AutoID(object):
"""Mixin adding a primary key integer column named 'id'."""
id = db.Column(db.Integer, primary_key=True)
def _json(self, extended=False):
try:
parent = super(AutoID, self)._json(extended)
except AttributeError:
parent = {}
parent[u'id'] = self.id
return parent
def _utcnow(arg):
return dt.datetime.now(utc)
class Timestamped(object):
"""Mixin adding a timestamp column.
The timestamp defaults to the current time.
"""
timestamp = db.Column(DateTime, nullable=False,
default=_utcnow)
class AutoName(object):
@declared_attr
def __tablename__(cls):
"""SQLAlchemy late-binding attribute to set the table name.
Implemented this way so subclasses do not need to specify a table name
themselves.
"""
return cls.__name__.lower()
|
from __future__ import absolute_import
from __future__ import unicode_literals
import datetime as dt
from sqlalchemy.types import DateTime
from sqlalchemy.ext.declarative import declared_attr
from .datetime import DateTime
from .datetime import utc
from .. import db
class AutoID(object):
"""Mixin adding a primary key integer column named 'id'."""
id = db.Column(db.Integer, primary_key=True)
def _json(self, extended=False):
try:
parent = super(AutoName, self)._json(extended)
except AttributeError:
parent = {}
parent[u'id'] = self.id
return parent
def _utcnow(arg):
return dt.datetime.now(utc)
class Timestamped(object):
"""Mixin adding a timestamp column.
The timestamp defaults to the current time.
"""
timestamp = db.Column(DateTime, nullable=False,
default=_utcnow)
class AutoName(object):
@declared_attr
def __tablename__(cls):
"""SQLAlchemy late-binding attribute to set the table name.
Implemented this way so subclasses do not need to specify a table name
themselves.
"""
return cls.__name__.lower()
Fix typo in getting superclass of AutoIDfrom __future__ import absolute_import
from __future__ import unicode_literals
import datetime as dt
from sqlalchemy.types import DateTime
from sqlalchemy.ext.declarative import declared_attr
from .datetime import DateTime
from .datetime import utc
from .. import db
class AutoID(object):
"""Mixin adding a primary key integer column named 'id'."""
id = db.Column(db.Integer, primary_key=True)
def _json(self, extended=False):
try:
parent = super(AutoID, self)._json(extended)
except AttributeError:
parent = {}
parent[u'id'] = self.id
return parent
def _utcnow(arg):
return dt.datetime.now(utc)
class Timestamped(object):
"""Mixin adding a timestamp column.
The timestamp defaults to the current time.
"""
timestamp = db.Column(DateTime, nullable=False,
default=_utcnow)
class AutoName(object):
@declared_attr
def __tablename__(cls):
"""SQLAlchemy late-binding attribute to set the table name.
Implemented this way so subclasses do not need to specify a table name
themselves.
"""
return cls.__name__.lower()
|
<commit_before>from __future__ import absolute_import
from __future__ import unicode_literals
import datetime as dt
from sqlalchemy.types import DateTime
from sqlalchemy.ext.declarative import declared_attr
from .datetime import DateTime
from .datetime import utc
from .. import db
class AutoID(object):
"""Mixin adding a primary key integer column named 'id'."""
id = db.Column(db.Integer, primary_key=True)
def _json(self, extended=False):
try:
parent = super(AutoName, self)._json(extended)
except AttributeError:
parent = {}
parent[u'id'] = self.id
return parent
def _utcnow(arg):
return dt.datetime.now(utc)
class Timestamped(object):
"""Mixin adding a timestamp column.
The timestamp defaults to the current time.
"""
timestamp = db.Column(DateTime, nullable=False,
default=_utcnow)
class AutoName(object):
@declared_attr
def __tablename__(cls):
"""SQLAlchemy late-binding attribute to set the table name.
Implemented this way so subclasses do not need to specify a table name
themselves.
"""
return cls.__name__.lower()
<commit_msg>Fix typo in getting superclass of AutoID<commit_after>from __future__ import absolute_import
from __future__ import unicode_literals
import datetime as dt
from sqlalchemy.types import DateTime
from sqlalchemy.ext.declarative import declared_attr
from .datetime import DateTime
from .datetime import utc
from .. import db
class AutoID(object):
"""Mixin adding a primary key integer column named 'id'."""
id = db.Column(db.Integer, primary_key=True)
def _json(self, extended=False):
try:
parent = super(AutoID, self)._json(extended)
except AttributeError:
parent = {}
parent[u'id'] = self.id
return parent
def _utcnow(arg):
return dt.datetime.now(utc)
class Timestamped(object):
"""Mixin adding a timestamp column.
The timestamp defaults to the current time.
"""
timestamp = db.Column(DateTime, nullable=False,
default=_utcnow)
class AutoName(object):
@declared_attr
def __tablename__(cls):
"""SQLAlchemy late-binding attribute to set the table name.
Implemented this way so subclasses do not need to specify a table name
themselves.
"""
return cls.__name__.lower()
|
92ecaea827da56a15297ffc240312b1767ebb845
|
ibmcnx/doc/DataSources.py
|
ibmcnx/doc/DataSources.py
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(db).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
print "AdminConfig.list( db ): "
AdminConfig.list ( db )
print "AdminConfig.showAttribute( db, 'name' ): "
AdminConfig.showAttribute( db, 'name' )
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(db).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
print "AdminConfig.list( db ): "
try:
AdminConfig.list ( db )
except:
print "error on: " + db
print "AdminConfig.showAttribute( db, 'name' ): "
try:
AdminConfig.showAttribute( db, 'name' )
except:
print "error on: " + db
|
Create documentation of DataSource Settings
|
8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8
|
Python
|
apache-2.0
|
stoeps13/ibmcnx2,stoeps13/ibmcnx2
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(db).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
print "AdminConfig.list( db ): "
AdminConfig.list ( db )
print "AdminConfig.showAttribute( db, 'name' ): "
AdminConfig.showAttribute( db, 'name' )8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(db).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
print "AdminConfig.list( db ): "
try:
AdminConfig.list ( db )
except:
print "error on: " + db
print "AdminConfig.showAttribute( db, 'name' ): "
try:
AdminConfig.showAttribute( db, 'name' )
except:
print "error on: " + db
|
<commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(db).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
print "AdminConfig.list( db ): "
AdminConfig.list ( db )
print "AdminConfig.showAttribute( db, 'name' ): "
AdminConfig.showAttribute( db, 'name' )<commit_msg>8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8<commit_after>
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(db).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
print "AdminConfig.list( db ): "
try:
AdminConfig.list ( db )
except:
print "error on: " + db
print "AdminConfig.showAttribute( db, 'name' ): "
try:
AdminConfig.showAttribute( db, 'name' )
except:
print "error on: " + db
|
######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(db).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
print "AdminConfig.list( db ): "
AdminConfig.list ( db )
print "AdminConfig.showAttribute( db, 'name' ): "
AdminConfig.showAttribute( db, 'name' )8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(db).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
print "AdminConfig.list( db ): "
try:
AdminConfig.list ( db )
except:
print "error on: " + db
print "AdminConfig.showAttribute( db, 'name' ): "
try:
AdminConfig.showAttribute( db, 'name' )
except:
print "error on: " + db
|
<commit_before>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(db).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
print "AdminConfig.list( db ): "
AdminConfig.list ( db )
print "AdminConfig.showAttribute( db, 'name' ): "
AdminConfig.showAttribute( db, 'name' )<commit_msg>8: Create documentation of DataSource Settings
Task-Url: http://github.com/stoeps13/ibmcnx2/issues/issue/8<commit_after>######
# Check ExId (GUID) by Email through JDBC
#
# Author: Christoph Stoettner
# Mail: christoph.stoettner@stoeps.de
# Documentation: http://scripting101.stoeps.de
#
# Version: 2.0
# Date: 2014-06-04
#
# License: Apache 2.0
#
# Check ExId of a User in all Connections Applications
import ibmcnx.functions
cell = AdminControl.getCell()
cellname = "/Cell:" + cell + "/"
# Get a list of all databases except DefaultEJBTimerDataSource and OTiSDataSource
dbs = AdminConfig.list('DataSource',AdminConfig.getid(cellname)).splitlines()
dblist = []
# remove unwanted databases
for db in dbs:
dbname = db.split('(')
n = 0
for i in dbname:
# i is only the name of the DataSource, db is DataSource ID!
if n == 0 and i != "DefaultEJBTimerDataSource" and i != 'OTiSDataSource':
dblist.append(str(db).replace('"',''))
n += 1
dblist.sort()
for db in dblist:
print "AdminConfig.list( db ): "
try:
AdminConfig.list ( db )
except:
print "error on: " + db
print "AdminConfig.showAttribute( db, 'name' ): "
try:
AdminConfig.showAttribute( db, 'name' )
except:
print "error on: " + db
|
b2e1fd5727eed1818d0ddc3c29a1cf9f7e38d024
|
wger/exercises/management/commands/submitted-exercises.py
|
wger/exercises/management/commands/submitted-exercises.py
|
# -*- coding: utf-8 *-*
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from django.core.management.base import BaseCommand
from wger.exercises.models import Exercise
class Command(BaseCommand):
'''
Read out the user submitted exercise.
Used to generate the AUTHORS file for a release
'''
help = 'Read out the user submitted exercise'
def handle(self, *args, **options):
exercises = Exercise.objects.filter(status=Exercise.EXERCISE_STATUS_ACCEPTED)
usernames = []
for exercise in exercises:
username = exercise.user.username
if username not in usernames:
usernames.append(username)
self.stdout.write('{0}\n'.format(username))
|
# -*- coding: utf-8 *-*
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from django.core.management.base import BaseCommand
from wger.exercises.models import Exercise
class Command(BaseCommand):
'''
Read out the user submitted exercise.
Used to generate the AUTHORS file for a release
'''
help = 'Read out the user submitted exercise'
def handle(self, *args, **options):
exercises = Exercise.objects.filter(status=Exercise.EXERCISE_STATUS_ACCEPTED)
usernames = []
for exercise in exercises:
if exercise.user not in usernames:
usernames.append(exercise.user)
self.stdout.write(exercise.user)
|
Fix management command for submitted exercises
|
Fix management command for submitted exercises
|
Python
|
agpl-3.0
|
DeveloperMal/wger,kjagoo/wger_stark,rolandgeider/wger,rolandgeider/wger,DeveloperMal/wger,petervanderdoes/wger,kjagoo/wger_stark,wger-project/wger,wger-project/wger,petervanderdoes/wger,wger-project/wger,kjagoo/wger_stark,rolandgeider/wger,rolandgeider/wger,petervanderdoes/wger,DeveloperMal/wger,DeveloperMal/wger,wger-project/wger,petervanderdoes/wger,kjagoo/wger_stark
|
# -*- coding: utf-8 *-*
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from django.core.management.base import BaseCommand
from wger.exercises.models import Exercise
class Command(BaseCommand):
'''
Read out the user submitted exercise.
Used to generate the AUTHORS file for a release
'''
help = 'Read out the user submitted exercise'
def handle(self, *args, **options):
exercises = Exercise.objects.filter(status=Exercise.EXERCISE_STATUS_ACCEPTED)
usernames = []
for exercise in exercises:
username = exercise.user.username
if username not in usernames:
usernames.append(username)
self.stdout.write('{0}\n'.format(username))
Fix management command for submitted exercises
|
# -*- coding: utf-8 *-*
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from django.core.management.base import BaseCommand
from wger.exercises.models import Exercise
class Command(BaseCommand):
'''
Read out the user submitted exercise.
Used to generate the AUTHORS file for a release
'''
help = 'Read out the user submitted exercise'
def handle(self, *args, **options):
exercises = Exercise.objects.filter(status=Exercise.EXERCISE_STATUS_ACCEPTED)
usernames = []
for exercise in exercises:
if exercise.user not in usernames:
usernames.append(exercise.user)
self.stdout.write(exercise.user)
|
<commit_before># -*- coding: utf-8 *-*
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from django.core.management.base import BaseCommand
from wger.exercises.models import Exercise
class Command(BaseCommand):
'''
Read out the user submitted exercise.
Used to generate the AUTHORS file for a release
'''
help = 'Read out the user submitted exercise'
def handle(self, *args, **options):
exercises = Exercise.objects.filter(status=Exercise.EXERCISE_STATUS_ACCEPTED)
usernames = []
for exercise in exercises:
username = exercise.user.username
if username not in usernames:
usernames.append(username)
self.stdout.write('{0}\n'.format(username))
<commit_msg>Fix management command for submitted exercises<commit_after>
|
# -*- coding: utf-8 *-*
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from django.core.management.base import BaseCommand
from wger.exercises.models import Exercise
class Command(BaseCommand):
'''
Read out the user submitted exercise.
Used to generate the AUTHORS file for a release
'''
help = 'Read out the user submitted exercise'
def handle(self, *args, **options):
exercises = Exercise.objects.filter(status=Exercise.EXERCISE_STATUS_ACCEPTED)
usernames = []
for exercise in exercises:
if exercise.user not in usernames:
usernames.append(exercise.user)
self.stdout.write(exercise.user)
|
# -*- coding: utf-8 *-*
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from django.core.management.base import BaseCommand
from wger.exercises.models import Exercise
class Command(BaseCommand):
'''
Read out the user submitted exercise.
Used to generate the AUTHORS file for a release
'''
help = 'Read out the user submitted exercise'
def handle(self, *args, **options):
exercises = Exercise.objects.filter(status=Exercise.EXERCISE_STATUS_ACCEPTED)
usernames = []
for exercise in exercises:
username = exercise.user.username
if username not in usernames:
usernames.append(username)
self.stdout.write('{0}\n'.format(username))
Fix management command for submitted exercises# -*- coding: utf-8 *-*
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from django.core.management.base import BaseCommand
from wger.exercises.models import Exercise
class Command(BaseCommand):
'''
Read out the user submitted exercise.
Used to generate the AUTHORS file for a release
'''
help = 'Read out the user submitted exercise'
def handle(self, *args, **options):
exercises = Exercise.objects.filter(status=Exercise.EXERCISE_STATUS_ACCEPTED)
usernames = []
for exercise in exercises:
if exercise.user not in usernames:
usernames.append(exercise.user)
self.stdout.write(exercise.user)
|
<commit_before># -*- coding: utf-8 *-*
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from django.core.management.base import BaseCommand
from wger.exercises.models import Exercise
class Command(BaseCommand):
'''
Read out the user submitted exercise.
Used to generate the AUTHORS file for a release
'''
help = 'Read out the user submitted exercise'
def handle(self, *args, **options):
exercises = Exercise.objects.filter(status=Exercise.EXERCISE_STATUS_ACCEPTED)
usernames = []
for exercise in exercises:
username = exercise.user.username
if username not in usernames:
usernames.append(username)
self.stdout.write('{0}\n'.format(username))
<commit_msg>Fix management command for submitted exercises<commit_after># -*- coding: utf-8 *-*
# This file is part of wger Workout Manager.
#
# wger Workout Manager is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# wger Workout Manager is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
from django.core.management.base import BaseCommand
from wger.exercises.models import Exercise
class Command(BaseCommand):
'''
Read out the user submitted exercise.
Used to generate the AUTHORS file for a release
'''
help = 'Read out the user submitted exercise'
def handle(self, *args, **options):
exercises = Exercise.objects.filter(status=Exercise.EXERCISE_STATUS_ACCEPTED)
usernames = []
for exercise in exercises:
if exercise.user not in usernames:
usernames.append(exercise.user)
self.stdout.write(exercise.user)
|
1004cc88032e816116bd46f2eb66e4b89f3f766f
|
tests/test_web_caller.py
|
tests/test_web_caller.py
|
from unittest import TestCase
from mock import NonCallableMock, patch
from modules.web_caller import get_google, GOOGLE_URL
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
@patch('modules.web_caller.requests.get')
def test_get_google(self, get):
"""
Calling `get_google` works as expected.
"""
# Create a mock response
mock_response = NonCallableMock(
status_code=200,
)
# Assign the mock response as the requests.get return value
get.return_value = mock_response
# Call the function
response = get_google()
# Check that requests.get was called with the expected URL
get.assert_called_once_with(GOOGLE_URL)
# Check that the mock response is returned
self.assertIs(mock_response, response)
# Check that the mocked response.status_code is as expected
self.assertEqual(200, response.status_code)
|
from unittest import TestCase
from requests.exceptions import ConnectionError
from mock import NonCallableMock, patch
from modules.web_caller import get_google, GOOGLE_URL
MOCK_GOOGLE_URL = 'http://not-going-to-work!!!'
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
@patch('modules.web_caller.requests.get')
def test_get_google(self, get):
"""
Calling `get_google` works as expected.
"""
# Create a mock response
mock_response = NonCallableMock(
status_code=200,
)
# Assign the mock response as the requests.get return value
get.return_value = mock_response
# Call the function
response = get_google()
# Check that requests.get was called with the expected URL
get.assert_called_once_with(GOOGLE_URL)
# Check that the mock response is returned
self.assertIs(mock_response, response)
# Check that the mocked response.status_code is as expected
self.assertEqual(200, response.status_code)
@patch('modules.web_caller.GOOGLE_URL', MOCK_GOOGLE_URL)
def test_get_google_with_exception(self):
"""
Call the `get_google` function while using `patch` to create an
unreachable URL.
Assert that the error happened and capture it using assertRaises.
"""
# Call the function via assertRaises and confirm the exception is
# raised by making the call
self.assertRaises(ConnectionError, get_google)
@patch('modules.web_caller.GOOGLE_URL', MOCK_GOOGLE_URL)
def test_get_google_with_exception_context_manager(self):
"""
Call the `get_google` function while using `patch` to create an
unreachable URL.
Assert that the error happened and capture it using the assertRaises
context manager.
"""
# Establish an assertRaises context manager
with self.assertRaises(ConnectionError):
# Call the function
get_google()
|
Add test examples to assert against exceptions
|
Add test examples to assert against exceptions
|
Python
|
mit
|
tkh/test-examples,tkh/test-examples
|
from unittest import TestCase
from mock import NonCallableMock, patch
from modules.web_caller import get_google, GOOGLE_URL
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
@patch('modules.web_caller.requests.get')
def test_get_google(self, get):
"""
Calling `get_google` works as expected.
"""
# Create a mock response
mock_response = NonCallableMock(
status_code=200,
)
# Assign the mock response as the requests.get return value
get.return_value = mock_response
# Call the function
response = get_google()
# Check that requests.get was called with the expected URL
get.assert_called_once_with(GOOGLE_URL)
# Check that the mock response is returned
self.assertIs(mock_response, response)
# Check that the mocked response.status_code is as expected
self.assertEqual(200, response.status_code)
Add test examples to assert against exceptions
|
from unittest import TestCase
from requests.exceptions import ConnectionError
from mock import NonCallableMock, patch
from modules.web_caller import get_google, GOOGLE_URL
MOCK_GOOGLE_URL = 'http://not-going-to-work!!!'
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
@patch('modules.web_caller.requests.get')
def test_get_google(self, get):
"""
Calling `get_google` works as expected.
"""
# Create a mock response
mock_response = NonCallableMock(
status_code=200,
)
# Assign the mock response as the requests.get return value
get.return_value = mock_response
# Call the function
response = get_google()
# Check that requests.get was called with the expected URL
get.assert_called_once_with(GOOGLE_URL)
# Check that the mock response is returned
self.assertIs(mock_response, response)
# Check that the mocked response.status_code is as expected
self.assertEqual(200, response.status_code)
@patch('modules.web_caller.GOOGLE_URL', MOCK_GOOGLE_URL)
def test_get_google_with_exception(self):
"""
Call the `get_google` function while using `patch` to create an
unreachable URL.
Assert that the error happened and capture it using assertRaises.
"""
# Call the function via assertRaises and confirm the exception is
# raised by making the call
self.assertRaises(ConnectionError, get_google)
@patch('modules.web_caller.GOOGLE_URL', MOCK_GOOGLE_URL)
def test_get_google_with_exception_context_manager(self):
"""
Call the `get_google` function while using `patch` to create an
unreachable URL.
Assert that the error happened and capture it using the assertRaises
context manager.
"""
# Establish an assertRaises context manager
with self.assertRaises(ConnectionError):
# Call the function
get_google()
|
<commit_before>from unittest import TestCase
from mock import NonCallableMock, patch
from modules.web_caller import get_google, GOOGLE_URL
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
@patch('modules.web_caller.requests.get')
def test_get_google(self, get):
"""
Calling `get_google` works as expected.
"""
# Create a mock response
mock_response = NonCallableMock(
status_code=200,
)
# Assign the mock response as the requests.get return value
get.return_value = mock_response
# Call the function
response = get_google()
# Check that requests.get was called with the expected URL
get.assert_called_once_with(GOOGLE_URL)
# Check that the mock response is returned
self.assertIs(mock_response, response)
# Check that the mocked response.status_code is as expected
self.assertEqual(200, response.status_code)
<commit_msg>Add test examples to assert against exceptions<commit_after>
|
from unittest import TestCase
from requests.exceptions import ConnectionError
from mock import NonCallableMock, patch
from modules.web_caller import get_google, GOOGLE_URL
MOCK_GOOGLE_URL = 'http://not-going-to-work!!!'
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
@patch('modules.web_caller.requests.get')
def test_get_google(self, get):
"""
Calling `get_google` works as expected.
"""
# Create a mock response
mock_response = NonCallableMock(
status_code=200,
)
# Assign the mock response as the requests.get return value
get.return_value = mock_response
# Call the function
response = get_google()
# Check that requests.get was called with the expected URL
get.assert_called_once_with(GOOGLE_URL)
# Check that the mock response is returned
self.assertIs(mock_response, response)
# Check that the mocked response.status_code is as expected
self.assertEqual(200, response.status_code)
@patch('modules.web_caller.GOOGLE_URL', MOCK_GOOGLE_URL)
def test_get_google_with_exception(self):
"""
Call the `get_google` function while using `patch` to create an
unreachable URL.
Assert that the error happened and capture it using assertRaises.
"""
# Call the function via assertRaises and confirm the exception is
# raised by making the call
self.assertRaises(ConnectionError, get_google)
@patch('modules.web_caller.GOOGLE_URL', MOCK_GOOGLE_URL)
def test_get_google_with_exception_context_manager(self):
"""
Call the `get_google` function while using `patch` to create an
unreachable URL.
Assert that the error happened and capture it using the assertRaises
context manager.
"""
# Establish an assertRaises context manager
with self.assertRaises(ConnectionError):
# Call the function
get_google()
|
from unittest import TestCase
from mock import NonCallableMock, patch
from modules.web_caller import get_google, GOOGLE_URL
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
@patch('modules.web_caller.requests.get')
def test_get_google(self, get):
"""
Calling `get_google` works as expected.
"""
# Create a mock response
mock_response = NonCallableMock(
status_code=200,
)
# Assign the mock response as the requests.get return value
get.return_value = mock_response
# Call the function
response = get_google()
# Check that requests.get was called with the expected URL
get.assert_called_once_with(GOOGLE_URL)
# Check that the mock response is returned
self.assertIs(mock_response, response)
# Check that the mocked response.status_code is as expected
self.assertEqual(200, response.status_code)
Add test examples to assert against exceptionsfrom unittest import TestCase
from requests.exceptions import ConnectionError
from mock import NonCallableMock, patch
from modules.web_caller import get_google, GOOGLE_URL
MOCK_GOOGLE_URL = 'http://not-going-to-work!!!'
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
@patch('modules.web_caller.requests.get')
def test_get_google(self, get):
"""
Calling `get_google` works as expected.
"""
# Create a mock response
mock_response = NonCallableMock(
status_code=200,
)
# Assign the mock response as the requests.get return value
get.return_value = mock_response
# Call the function
response = get_google()
# Check that requests.get was called with the expected URL
get.assert_called_once_with(GOOGLE_URL)
# Check that the mock response is returned
self.assertIs(mock_response, response)
# Check that the mocked response.status_code is as expected
self.assertEqual(200, response.status_code)
@patch('modules.web_caller.GOOGLE_URL', MOCK_GOOGLE_URL)
def test_get_google_with_exception(self):
"""
Call the `get_google` function while using `patch` to create an
unreachable URL.
Assert that the error happened and capture it using assertRaises.
"""
# Call the function via assertRaises and confirm the exception is
# raised by making the call
self.assertRaises(ConnectionError, get_google)
@patch('modules.web_caller.GOOGLE_URL', MOCK_GOOGLE_URL)
def test_get_google_with_exception_context_manager(self):
"""
Call the `get_google` function while using `patch` to create an
unreachable URL.
Assert that the error happened and capture it using the assertRaises
context manager.
"""
# Establish an assertRaises context manager
with self.assertRaises(ConnectionError):
# Call the function
get_google()
|
<commit_before>from unittest import TestCase
from mock import NonCallableMock, patch
from modules.web_caller import get_google, GOOGLE_URL
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
@patch('modules.web_caller.requests.get')
def test_get_google(self, get):
"""
Calling `get_google` works as expected.
"""
# Create a mock response
mock_response = NonCallableMock(
status_code=200,
)
# Assign the mock response as the requests.get return value
get.return_value = mock_response
# Call the function
response = get_google()
# Check that requests.get was called with the expected URL
get.assert_called_once_with(GOOGLE_URL)
# Check that the mock response is returned
self.assertIs(mock_response, response)
# Check that the mocked response.status_code is as expected
self.assertEqual(200, response.status_code)
<commit_msg>Add test examples to assert against exceptions<commit_after>from unittest import TestCase
from requests.exceptions import ConnectionError
from mock import NonCallableMock, patch
from modules.web_caller import get_google, GOOGLE_URL
MOCK_GOOGLE_URL = 'http://not-going-to-work!!!'
class TestWebCaller(TestCase):
"""
Tests for the `web_caller` module.
"""
@patch('modules.web_caller.requests.get')
def test_get_google(self, get):
"""
Calling `get_google` works as expected.
"""
# Create a mock response
mock_response = NonCallableMock(
status_code=200,
)
# Assign the mock response as the requests.get return value
get.return_value = mock_response
# Call the function
response = get_google()
# Check that requests.get was called with the expected URL
get.assert_called_once_with(GOOGLE_URL)
# Check that the mock response is returned
self.assertIs(mock_response, response)
# Check that the mocked response.status_code is as expected
self.assertEqual(200, response.status_code)
@patch('modules.web_caller.GOOGLE_URL', MOCK_GOOGLE_URL)
def test_get_google_with_exception(self):
"""
Call the `get_google` function while using `patch` to create an
unreachable URL.
Assert that the error happened and capture it using assertRaises.
"""
# Call the function via assertRaises and confirm the exception is
# raised by making the call
self.assertRaises(ConnectionError, get_google)
@patch('modules.web_caller.GOOGLE_URL', MOCK_GOOGLE_URL)
def test_get_google_with_exception_context_manager(self):
"""
Call the `get_google` function while using `patch` to create an
unreachable URL.
Assert that the error happened and capture it using the assertRaises
context manager.
"""
# Establish an assertRaises context manager
with self.assertRaises(ConnectionError):
# Call the function
get_google()
|
e82045217fa262fbfe30563fef9945a67024d27f
|
project/creditor/management/commands/addrecurring.py
|
project/creditor/management/commands/addrecurring.py
|
# -*- coding: utf-8 -*-
from creditor.models import RecurringTransaction
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = 'Gets all RecurringTransactions and runs conditional_add_transaction()'
def handle(self, *args, **options):
for t in RecurringTransaction.objects.all():
ret = t.conditional_add_transaction()
if ret:
if options['verbosity'] > 1:
print("Created transaction %s" % ret)
|
# -*- coding: utf-8 -*-
import datetime
import itertools
import dateutil.parser
from creditor.models import RecurringTransaction
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
from asylum.utils import datetime_proxy, months
class Command(BaseCommand):
help = 'Gets all RecurringTransactions and runs conditional_add_transaction()'
def add_arguments(self, parser):
parser.add_argument('since', type=str, nargs='?', default=datetime_proxy(), help='Run for each month since the date, defaults to yesterday midnight')
def handle(self, *args, **options):
since_parsed = timezone.make_aware(dateutil.parser.parse(options['since']))
if options['verbosity'] > 2:
print("Processing since %s" % since_parsed.isoformat())
for t in RecurringTransaction.objects.all():
if options['verbosity'] > 2:
print("Processing: %s" % t)
for month in months(since_parsed, timezone.now()):
if options['verbosity'] > 2:
print(" month %s" % month.isoformat())
ret = t.conditional_add_transaction(month)
if ret:
if options['verbosity'] > 1:
print("Created transaction %s" % ret)
|
Add "since" parameter to this command
|
Add "since" parameter to this command
Fixes #25
|
Python
|
mit
|
HelsinkiHacklab/asylum,hacklab-fi/asylum,rambo/asylum,rambo/asylum,HelsinkiHacklab/asylum,jautero/asylum,hacklab-fi/asylum,HelsinkiHacklab/asylum,rambo/asylum,HelsinkiHacklab/asylum,jautero/asylum,jautero/asylum,jautero/asylum,hacklab-fi/asylum,hacklab-fi/asylum,rambo/asylum
|
# -*- coding: utf-8 -*-
from creditor.models import RecurringTransaction
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = 'Gets all RecurringTransactions and runs conditional_add_transaction()'
def handle(self, *args, **options):
for t in RecurringTransaction.objects.all():
ret = t.conditional_add_transaction()
if ret:
if options['verbosity'] > 1:
print("Created transaction %s" % ret)
Add "since" parameter to this command
Fixes #25
|
# -*- coding: utf-8 -*-
import datetime
import itertools
import dateutil.parser
from creditor.models import RecurringTransaction
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
from asylum.utils import datetime_proxy, months
class Command(BaseCommand):
help = 'Gets all RecurringTransactions and runs conditional_add_transaction()'
def add_arguments(self, parser):
parser.add_argument('since', type=str, nargs='?', default=datetime_proxy(), help='Run for each month since the date, defaults to yesterday midnight')
def handle(self, *args, **options):
since_parsed = timezone.make_aware(dateutil.parser.parse(options['since']))
if options['verbosity'] > 2:
print("Processing since %s" % since_parsed.isoformat())
for t in RecurringTransaction.objects.all():
if options['verbosity'] > 2:
print("Processing: %s" % t)
for month in months(since_parsed, timezone.now()):
if options['verbosity'] > 2:
print(" month %s" % month.isoformat())
ret = t.conditional_add_transaction(month)
if ret:
if options['verbosity'] > 1:
print("Created transaction %s" % ret)
|
<commit_before># -*- coding: utf-8 -*-
from creditor.models import RecurringTransaction
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = 'Gets all RecurringTransactions and runs conditional_add_transaction()'
def handle(self, *args, **options):
for t in RecurringTransaction.objects.all():
ret = t.conditional_add_transaction()
if ret:
if options['verbosity'] > 1:
print("Created transaction %s" % ret)
<commit_msg>Add "since" parameter to this command
Fixes #25<commit_after>
|
# -*- coding: utf-8 -*-
import datetime
import itertools
import dateutil.parser
from creditor.models import RecurringTransaction
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
from asylum.utils import datetime_proxy, months
class Command(BaseCommand):
help = 'Gets all RecurringTransactions and runs conditional_add_transaction()'
def add_arguments(self, parser):
parser.add_argument('since', type=str, nargs='?', default=datetime_proxy(), help='Run for each month since the date, defaults to yesterday midnight')
def handle(self, *args, **options):
since_parsed = timezone.make_aware(dateutil.parser.parse(options['since']))
if options['verbosity'] > 2:
print("Processing since %s" % since_parsed.isoformat())
for t in RecurringTransaction.objects.all():
if options['verbosity'] > 2:
print("Processing: %s" % t)
for month in months(since_parsed, timezone.now()):
if options['verbosity'] > 2:
print(" month %s" % month.isoformat())
ret = t.conditional_add_transaction(month)
if ret:
if options['verbosity'] > 1:
print("Created transaction %s" % ret)
|
# -*- coding: utf-8 -*-
from creditor.models import RecurringTransaction
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = 'Gets all RecurringTransactions and runs conditional_add_transaction()'
def handle(self, *args, **options):
for t in RecurringTransaction.objects.all():
ret = t.conditional_add_transaction()
if ret:
if options['verbosity'] > 1:
print("Created transaction %s" % ret)
Add "since" parameter to this command
Fixes #25# -*- coding: utf-8 -*-
import datetime
import itertools
import dateutil.parser
from creditor.models import RecurringTransaction
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
from asylum.utils import datetime_proxy, months
class Command(BaseCommand):
help = 'Gets all RecurringTransactions and runs conditional_add_transaction()'
def add_arguments(self, parser):
parser.add_argument('since', type=str, nargs='?', default=datetime_proxy(), help='Run for each month since the date, defaults to yesterday midnight')
def handle(self, *args, **options):
since_parsed = timezone.make_aware(dateutil.parser.parse(options['since']))
if options['verbosity'] > 2:
print("Processing since %s" % since_parsed.isoformat())
for t in RecurringTransaction.objects.all():
if options['verbosity'] > 2:
print("Processing: %s" % t)
for month in months(since_parsed, timezone.now()):
if options['verbosity'] > 2:
print(" month %s" % month.isoformat())
ret = t.conditional_add_transaction(month)
if ret:
if options['verbosity'] > 1:
print("Created transaction %s" % ret)
|
<commit_before># -*- coding: utf-8 -*-
from creditor.models import RecurringTransaction
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = 'Gets all RecurringTransactions and runs conditional_add_transaction()'
def handle(self, *args, **options):
for t in RecurringTransaction.objects.all():
ret = t.conditional_add_transaction()
if ret:
if options['verbosity'] > 1:
print("Created transaction %s" % ret)
<commit_msg>Add "since" parameter to this command
Fixes #25<commit_after># -*- coding: utf-8 -*-
import datetime
import itertools
import dateutil.parser
from creditor.models import RecurringTransaction
from django.core.management.base import BaseCommand, CommandError
from django.utils import timezone
from asylum.utils import datetime_proxy, months
class Command(BaseCommand):
help = 'Gets all RecurringTransactions and runs conditional_add_transaction()'
def add_arguments(self, parser):
parser.add_argument('since', type=str, nargs='?', default=datetime_proxy(), help='Run for each month since the date, defaults to yesterday midnight')
def handle(self, *args, **options):
since_parsed = timezone.make_aware(dateutil.parser.parse(options['since']))
if options['verbosity'] > 2:
print("Processing since %s" % since_parsed.isoformat())
for t in RecurringTransaction.objects.all():
if options['verbosity'] > 2:
print("Processing: %s" % t)
for month in months(since_parsed, timezone.now()):
if options['verbosity'] > 2:
print(" month %s" % month.isoformat())
ret = t.conditional_add_transaction(month)
if ret:
if options['verbosity'] > 1:
print("Created transaction %s" % ret)
|
a9fb3353c32a9b53b8e58912a98cf61176a57f04
|
test/test_molecule.py
|
test/test_molecule.py
|
import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r https://raw.githubusercontent.com/nephelaiio/ansible-role-requirements/master/requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
|
import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r https://raw.githubusercontent.com/nephelaiio/ansible-role-requirements/molecule3/requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
|
Update tests for molecule 3 compatibility
|
Update tests for molecule 3 compatibility
|
Python
|
mit
|
nephelaiio/cookiecutter-ansible-role
|
import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r https://raw.githubusercontent.com/nephelaiio/ansible-role-requirements/master/requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
Update tests for molecule 3 compatibility
|
import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r https://raw.githubusercontent.com/nephelaiio/ansible-role-requirements/molecule3/requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
|
<commit_before>import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r https://raw.githubusercontent.com/nephelaiio/ansible-role-requirements/master/requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
<commit_msg>Update tests for molecule 3 compatibility<commit_after>
|
import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r https://raw.githubusercontent.com/nephelaiio/ansible-role-requirements/molecule3/requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
|
import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r https://raw.githubusercontent.com/nephelaiio/ansible-role-requirements/master/requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
Update tests for molecule 3 compatibilityimport pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r https://raw.githubusercontent.com/nephelaiio/ansible-role-requirements/molecule3/requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
|
<commit_before>import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r https://raw.githubusercontent.com/nephelaiio/ansible-role-requirements/master/requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
<commit_msg>Update tests for molecule 3 compatibility<commit_after>import pytest
import os
import shutil
from subprocess import call
from cookiecutter.main import cookiecutter
playbook_setup_commands = ['pip install -r https://raw.githubusercontent.com/nephelaiio/ansible-role-requirements/molecule3/requirements.txt']
playbook_setup_success = 0
playbook_test_command = "molecule test"
playbook_test_success = 0
@pytest.mark.parametrize('role_name', ['tree'])
def test_role_name(role_name):
last_dir = os.path.curdir
project_name = "ansible-role-{0}".format(role_name)
test_dir = project_name
try:
shutil.rmtree(test_dir, ignore_errors=True)
cookiecutter(
'.',
no_input=True,
overwrite_if_exists=True,
extra_context={
'role_name': role_name,
'project_name': project_name}
)
for command in playbook_setup_commands:
assert call(command.split()) == playbook_setup_success
os.chdir(test_dir)
assert call(playbook_test_command.split()) == playbook_test_success
finally:
os.chdir(last_dir)
shutil.rmtree(test_dir, ignore_errors=True)
|
ade2562d3ba731aed66542c00f4465b698d0a999
|
grammpy/Rules/__init__.py
|
grammpy/Rules/__init__.py
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 16.08.2017 19:28
:Licence GNUv3
Part of grammpy
"""
from .RuleChainable import RuleConnectable
from .Rule import Rule
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 16.08.2017 19:28
:Licence GNUv3
Part of grammpy
"""
from .RuleConnectable import RuleConnectable
from .Rule import Rule
|
FIX import of RuleConnectable instead of RuleChainable
|
FIX import of RuleConnectable instead of RuleChainable
|
Python
|
mit
|
PatrikValkovic/grammpy
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 16.08.2017 19:28
:Licence GNUv3
Part of grammpy
"""
from .RuleChainable import RuleConnectable
from .Rule import Rule
FIX import of RuleConnectable instead of RuleChainable
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 16.08.2017 19:28
:Licence GNUv3
Part of grammpy
"""
from .RuleConnectable import RuleConnectable
from .Rule import Rule
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 16.08.2017 19:28
:Licence GNUv3
Part of grammpy
"""
from .RuleChainable import RuleConnectable
from .Rule import Rule
<commit_msg>FIX import of RuleConnectable instead of RuleChainable<commit_after>
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 16.08.2017 19:28
:Licence GNUv3
Part of grammpy
"""
from .RuleConnectable import RuleConnectable
from .Rule import Rule
|
#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 16.08.2017 19:28
:Licence GNUv3
Part of grammpy
"""
from .RuleChainable import RuleConnectable
from .Rule import Rule
FIX import of RuleConnectable instead of RuleChainable#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 16.08.2017 19:28
:Licence GNUv3
Part of grammpy
"""
from .RuleConnectable import RuleConnectable
from .Rule import Rule
|
<commit_before>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 16.08.2017 19:28
:Licence GNUv3
Part of grammpy
"""
from .RuleChainable import RuleConnectable
from .Rule import Rule
<commit_msg>FIX import of RuleConnectable instead of RuleChainable<commit_after>#!/usr/bin/env python
"""
:Author Patrik Valkovic
:Created 16.08.2017 19:28
:Licence GNUv3
Part of grammpy
"""
from .RuleConnectable import RuleConnectable
from .Rule import Rule
|
8fbb173a11bee6eb7178b8276a594af4d3473442
|
python/setup.py
|
python/setup.py
|
from distutils.core import setup
setup(
name = 'fancypants',
version = '1.1',
py_modules = ['fancypants'],
url = 'http://netcetera.org/',
author = 'Simon Whitaker',
author_email = 'sw@netcetera.org',
description = 'A collection of data visualisation trinkets',
license = 'unlicense.org',
)
|
from distutils.core import setup
setup(
name = 'fancypants',
version = '1.2',
py_modules = ['fancypants'],
url = 'http://netcetera.org/',
author = 'Simon Whitaker',
author_email = 'sw@netcetera.org',
description = 'A collection of data visualisation trinkets',
license = 'unlicense.org',
)
|
Increment version number to 1.2
|
Increment version number to 1.2
|
Python
|
unlicense
|
simonwhitaker/fancypants,simonwhitaker/fancypants
|
from distutils.core import setup
setup(
name = 'fancypants',
version = '1.1',
py_modules = ['fancypants'],
url = 'http://netcetera.org/',
author = 'Simon Whitaker',
author_email = 'sw@netcetera.org',
description = 'A collection of data visualisation trinkets',
license = 'unlicense.org',
)Increment version number to 1.2
|
from distutils.core import setup
setup(
name = 'fancypants',
version = '1.2',
py_modules = ['fancypants'],
url = 'http://netcetera.org/',
author = 'Simon Whitaker',
author_email = 'sw@netcetera.org',
description = 'A collection of data visualisation trinkets',
license = 'unlicense.org',
)
|
<commit_before>from distutils.core import setup
setup(
name = 'fancypants',
version = '1.1',
py_modules = ['fancypants'],
url = 'http://netcetera.org/',
author = 'Simon Whitaker',
author_email = 'sw@netcetera.org',
description = 'A collection of data visualisation trinkets',
license = 'unlicense.org',
)<commit_msg>Increment version number to 1.2<commit_after>
|
from distutils.core import setup
setup(
name = 'fancypants',
version = '1.2',
py_modules = ['fancypants'],
url = 'http://netcetera.org/',
author = 'Simon Whitaker',
author_email = 'sw@netcetera.org',
description = 'A collection of data visualisation trinkets',
license = 'unlicense.org',
)
|
from distutils.core import setup
setup(
name = 'fancypants',
version = '1.1',
py_modules = ['fancypants'],
url = 'http://netcetera.org/',
author = 'Simon Whitaker',
author_email = 'sw@netcetera.org',
description = 'A collection of data visualisation trinkets',
license = 'unlicense.org',
)Increment version number to 1.2from distutils.core import setup
setup(
name = 'fancypants',
version = '1.2',
py_modules = ['fancypants'],
url = 'http://netcetera.org/',
author = 'Simon Whitaker',
author_email = 'sw@netcetera.org',
description = 'A collection of data visualisation trinkets',
license = 'unlicense.org',
)
|
<commit_before>from distutils.core import setup
setup(
name = 'fancypants',
version = '1.1',
py_modules = ['fancypants'],
url = 'http://netcetera.org/',
author = 'Simon Whitaker',
author_email = 'sw@netcetera.org',
description = 'A collection of data visualisation trinkets',
license = 'unlicense.org',
)<commit_msg>Increment version number to 1.2<commit_after>from distutils.core import setup
setup(
name = 'fancypants',
version = '1.2',
py_modules = ['fancypants'],
url = 'http://netcetera.org/',
author = 'Simon Whitaker',
author_email = 'sw@netcetera.org',
description = 'A collection of data visualisation trinkets',
license = 'unlicense.org',
)
|
8f7640fd5a145dba724974ca5a46f73b9c991c45
|
cloud_notes/templatetags/markdown_filters.py
|
cloud_notes/templatetags/markdown_filters.py
|
from django import template
import markdown as md
import bleach
register = template.Library()
def markdown(value):
"""convert to markdown"""
return md.markdown(bleach.clean(value))
register.filter('markdown', markdown)
|
from django import template
import markdown as md
import bleach
import copy
register = template.Library()
def markdown(value):
"""convert to markdown"""
allowed_tags = bleach.ALLOWED_TAGS + ['p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
return bleach.clean(md.markdown(value), tags = allowed_tags)
register.filter('markdown', markdown)
|
Fix blockquote missing from markdown filter
|
Fix blockquote missing from markdown filter
|
Python
|
apache-2.0
|
kiwiheretic/logos-v2,kiwiheretic/logos-v2,kiwiheretic/logos-v2,kiwiheretic/logos-v2
|
from django import template
import markdown as md
import bleach
register = template.Library()
def markdown(value):
"""convert to markdown"""
return md.markdown(bleach.clean(value))
register.filter('markdown', markdown)Fix blockquote missing from markdown filter
|
from django import template
import markdown as md
import bleach
import copy
register = template.Library()
def markdown(value):
"""convert to markdown"""
allowed_tags = bleach.ALLOWED_TAGS + ['p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
return bleach.clean(md.markdown(value), tags = allowed_tags)
register.filter('markdown', markdown)
|
<commit_before>from django import template
import markdown as md
import bleach
register = template.Library()
def markdown(value):
"""convert to markdown"""
return md.markdown(bleach.clean(value))
register.filter('markdown', markdown)<commit_msg>Fix blockquote missing from markdown filter<commit_after>
|
from django import template
import markdown as md
import bleach
import copy
register = template.Library()
def markdown(value):
"""convert to markdown"""
allowed_tags = bleach.ALLOWED_TAGS + ['p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
return bleach.clean(md.markdown(value), tags = allowed_tags)
register.filter('markdown', markdown)
|
from django import template
import markdown as md
import bleach
register = template.Library()
def markdown(value):
"""convert to markdown"""
return md.markdown(bleach.clean(value))
register.filter('markdown', markdown)Fix blockquote missing from markdown filterfrom django import template
import markdown as md
import bleach
import copy
register = template.Library()
def markdown(value):
"""convert to markdown"""
allowed_tags = bleach.ALLOWED_TAGS + ['p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
return bleach.clean(md.markdown(value), tags = allowed_tags)
register.filter('markdown', markdown)
|
<commit_before>from django import template
import markdown as md
import bleach
register = template.Library()
def markdown(value):
"""convert to markdown"""
return md.markdown(bleach.clean(value))
register.filter('markdown', markdown)<commit_msg>Fix blockquote missing from markdown filter<commit_after>from django import template
import markdown as md
import bleach
import copy
register = template.Library()
def markdown(value):
"""convert to markdown"""
allowed_tags = bleach.ALLOWED_TAGS + ['p', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6']
return bleach.clean(md.markdown(value), tags = allowed_tags)
register.filter('markdown', markdown)
|
ec1f7db3f1bd637807b4b9d69a0b702af36fbef1
|
morenines/ignores.py
|
morenines/ignores.py
|
import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
@classmethod
def read(cls, path):
ignores = cls()
if path:
with click.open_file(path, 'r') as stream:
ignores.patterns = [line.strip() for line in stream]
return ignores
def __init__(self):
self.patterns = []
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
import os
from fnmatch import fnmatchcase
import click
from morenines.util import find_file
class Ignores(object):
@classmethod
def read(cls, path):
if not path:
path = find_file('.mnignore')
ignores = cls()
if path:
with click.open_file(path, 'r') as stream:
ignores.patterns = [line.strip() for line in stream]
return ignores
def __init__(self):
self.patterns = []
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
Make Ignores try to find '.mnignore'
|
Make Ignores try to find '.mnignore'
If it doesn't find it, that's okay, and no action is required.
|
Python
|
mit
|
mcgid/morenines,mcgid/morenines
|
import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
@classmethod
def read(cls, path):
ignores = cls()
if path:
with click.open_file(path, 'r') as stream:
ignores.patterns = [line.strip() for line in stream]
return ignores
def __init__(self):
self.patterns = []
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
Make Ignores try to find '.mnignore'
If it doesn't find it, that's okay, and no action is required.
|
import os
from fnmatch import fnmatchcase
import click
from morenines.util import find_file
class Ignores(object):
@classmethod
def read(cls, path):
if not path:
path = find_file('.mnignore')
ignores = cls()
if path:
with click.open_file(path, 'r') as stream:
ignores.patterns = [line.strip() for line in stream]
return ignores
def __init__(self):
self.patterns = []
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
<commit_before>import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
@classmethod
def read(cls, path):
ignores = cls()
if path:
with click.open_file(path, 'r') as stream:
ignores.patterns = [line.strip() for line in stream]
return ignores
def __init__(self):
self.patterns = []
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
<commit_msg>Make Ignores try to find '.mnignore'
If it doesn't find it, that's okay, and no action is required.<commit_after>
|
import os
from fnmatch import fnmatchcase
import click
from morenines.util import find_file
class Ignores(object):
@classmethod
def read(cls, path):
if not path:
path = find_file('.mnignore')
ignores = cls()
if path:
with click.open_file(path, 'r') as stream:
ignores.patterns = [line.strip() for line in stream]
return ignores
def __init__(self):
self.patterns = []
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
@classmethod
def read(cls, path):
ignores = cls()
if path:
with click.open_file(path, 'r') as stream:
ignores.patterns = [line.strip() for line in stream]
return ignores
def __init__(self):
self.patterns = []
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
Make Ignores try to find '.mnignore'
If it doesn't find it, that's okay, and no action is required.import os
from fnmatch import fnmatchcase
import click
from morenines.util import find_file
class Ignores(object):
@classmethod
def read(cls, path):
if not path:
path = find_file('.mnignore')
ignores = cls()
if path:
with click.open_file(path, 'r') as stream:
ignores.patterns = [line.strip() for line in stream]
return ignores
def __init__(self):
self.patterns = []
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
<commit_before>import os
from fnmatch import fnmatchcase
import click
class Ignores(object):
@classmethod
def read(cls, path):
ignores = cls()
if path:
with click.open_file(path, 'r') as stream:
ignores.patterns = [line.strip() for line in stream]
return ignores
def __init__(self):
self.patterns = []
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
<commit_msg>Make Ignores try to find '.mnignore'
If it doesn't find it, that's okay, and no action is required.<commit_after>import os
from fnmatch import fnmatchcase
import click
from morenines.util import find_file
class Ignores(object):
@classmethod
def read(cls, path):
if not path:
path = find_file('.mnignore')
ignores = cls()
if path:
with click.open_file(path, 'r') as stream:
ignores.patterns = [line.strip() for line in stream]
return ignores
def __init__(self):
self.patterns = []
def match(self, path):
filename = os.path.basename(path)
if any(fnmatchcase(filename, pattern) for pattern in self.patterns):
return True
else:
return False
|
9a81879bd4eb01be5ed74acfdaf22acb635a9817
|
pikalang/__init__.py
|
pikalang/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""pikalang module.
A brainfuck derivative based off the vocabulary of Pikachu from Pokemon.
Copyright (c) 2019 Blake Grotewold
"""
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""pikalang module.
A brainfuck derivative based off the vocabulary of Pikachu from Pokemon.
Copyright (c) 2019 Blake Grotewold
"""
from __future__ import print_function
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
|
Add proper printing for py2
|
Add proper printing for py2
|
Python
|
mit
|
groteworld/pikalang,grotewold/pikalang
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""pikalang module.
A brainfuck derivative based off the vocabulary of Pikachu from Pokemon.
Copyright (c) 2019 Blake Grotewold
"""
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
Add proper printing for py2
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""pikalang module.
A brainfuck derivative based off the vocabulary of Pikachu from Pokemon.
Copyright (c) 2019 Blake Grotewold
"""
from __future__ import print_function
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""pikalang module.
A brainfuck derivative based off the vocabulary of Pikachu from Pokemon.
Copyright (c) 2019 Blake Grotewold
"""
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
<commit_msg>Add proper printing for py2<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""pikalang module.
A brainfuck derivative based off the vocabulary of Pikachu from Pokemon.
Copyright (c) 2019 Blake Grotewold
"""
from __future__ import print_function
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""pikalang module.
A brainfuck derivative based off the vocabulary of Pikachu from Pokemon.
Copyright (c) 2019 Blake Grotewold
"""
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
Add proper printing for py2#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""pikalang module.
A brainfuck derivative based off the vocabulary of Pikachu from Pokemon.
Copyright (c) 2019 Blake Grotewold
"""
from __future__ import print_function
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""pikalang module.
A brainfuck derivative based off the vocabulary of Pikachu from Pokemon.
Copyright (c) 2019 Blake Grotewold
"""
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
<commit_msg>Add proper printing for py2<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""pikalang module.
A brainfuck derivative based off the vocabulary of Pikachu from Pokemon.
Copyright (c) 2019 Blake Grotewold
"""
from __future__ import print_function
import sys
import os
from pikalang.interpreter import PikalangProgram
def load_source(file):
if os.path.isfile(file):
if os.path.splitext(file)[1] == ".pokeball":
with open(file, "r") as pikalang_file:
pikalang_data = pikalang_file.read()
return pikalang_data
else:
print("pikalang: file is not a pokeball", file=sys.stderr)
return False
else:
print("pikalang: file does not exist", file=sys.stderr)
return False
def evaluate(source):
"""Run Pikalang system."""
program = PikalangProgram(source)
program.run()
|
07ae9397835bc064d0119d2f35b2c1255597ea63
|
dipy/io/tests/test_utils.py
|
dipy/io/tests/test_utils.py
|
from dipy.io.utils import decfa
from nibabel import Nifti1Image
import numpy as np
def test_decfa():
data_orig = np.zeros((4, 4, 4, 3))
data_orig[0, 0, 0] = np.array([1, 0, 0])
img_orig = Nifti1Image(data_orig, np.eye(4))
img_new = decfa(img_orig)
data_new = img_new.get_data()
assert data_new[0, 0, 0] = (1, 0, 0)
|
from dipy.io.utils import decfa
from nibabel import Nifti1Image
import numpy as np
def test_decfa():
data_orig = np.zeros((4, 4, 4, 3))
data_orig[0, 0, 0] = np.array([1, 0, 0])
img_orig = Nifti1Image(data_orig, np.eye(4))
img_new = decfa(img_orig)
data_new = img_new.get_data()
assert data_new[0, 0, 0] == (1, 0, 0)
assert data_new.dtype == np.dtype([('R', 'uint8'), ('G', 'uint8'), ('B', 'uint8')])
|
Test properly, including the dtype.
|
TST: Test properly, including the dtype.
|
Python
|
bsd-3-clause
|
FrancoisRheaultUS/dipy,FrancoisRheaultUS/dipy
|
from dipy.io.utils import decfa
from nibabel import Nifti1Image
import numpy as np
def test_decfa():
data_orig = np.zeros((4, 4, 4, 3))
data_orig[0, 0, 0] = np.array([1, 0, 0])
img_orig = Nifti1Image(data_orig, np.eye(4))
img_new = decfa(img_orig)
data_new = img_new.get_data()
assert data_new[0, 0, 0] = (1, 0, 0)TST: Test properly, including the dtype.
|
from dipy.io.utils import decfa
from nibabel import Nifti1Image
import numpy as np
def test_decfa():
data_orig = np.zeros((4, 4, 4, 3))
data_orig[0, 0, 0] = np.array([1, 0, 0])
img_orig = Nifti1Image(data_orig, np.eye(4))
img_new = decfa(img_orig)
data_new = img_new.get_data()
assert data_new[0, 0, 0] == (1, 0, 0)
assert data_new.dtype == np.dtype([('R', 'uint8'), ('G', 'uint8'), ('B', 'uint8')])
|
<commit_before>
from dipy.io.utils import decfa
from nibabel import Nifti1Image
import numpy as np
def test_decfa():
data_orig = np.zeros((4, 4, 4, 3))
data_orig[0, 0, 0] = np.array([1, 0, 0])
img_orig = Nifti1Image(data_orig, np.eye(4))
img_new = decfa(img_orig)
data_new = img_new.get_data()
assert data_new[0, 0, 0] = (1, 0, 0)<commit_msg>TST: Test properly, including the dtype.<commit_after>
|
from dipy.io.utils import decfa
from nibabel import Nifti1Image
import numpy as np
def test_decfa():
data_orig = np.zeros((4, 4, 4, 3))
data_orig[0, 0, 0] = np.array([1, 0, 0])
img_orig = Nifti1Image(data_orig, np.eye(4))
img_new = decfa(img_orig)
data_new = img_new.get_data()
assert data_new[0, 0, 0] == (1, 0, 0)
assert data_new.dtype == np.dtype([('R', 'uint8'), ('G', 'uint8'), ('B', 'uint8')])
|
from dipy.io.utils import decfa
from nibabel import Nifti1Image
import numpy as np
def test_decfa():
data_orig = np.zeros((4, 4, 4, 3))
data_orig[0, 0, 0] = np.array([1, 0, 0])
img_orig = Nifti1Image(data_orig, np.eye(4))
img_new = decfa(img_orig)
data_new = img_new.get_data()
assert data_new[0, 0, 0] = (1, 0, 0)TST: Test properly, including the dtype.from dipy.io.utils import decfa
from nibabel import Nifti1Image
import numpy as np
def test_decfa():
data_orig = np.zeros((4, 4, 4, 3))
data_orig[0, 0, 0] = np.array([1, 0, 0])
img_orig = Nifti1Image(data_orig, np.eye(4))
img_new = decfa(img_orig)
data_new = img_new.get_data()
assert data_new[0, 0, 0] == (1, 0, 0)
assert data_new.dtype == np.dtype([('R', 'uint8'), ('G', 'uint8'), ('B', 'uint8')])
|
<commit_before>
from dipy.io.utils import decfa
from nibabel import Nifti1Image
import numpy as np
def test_decfa():
data_orig = np.zeros((4, 4, 4, 3))
data_orig[0, 0, 0] = np.array([1, 0, 0])
img_orig = Nifti1Image(data_orig, np.eye(4))
img_new = decfa(img_orig)
data_new = img_new.get_data()
assert data_new[0, 0, 0] = (1, 0, 0)<commit_msg>TST: Test properly, including the dtype.<commit_after>from dipy.io.utils import decfa
from nibabel import Nifti1Image
import numpy as np
def test_decfa():
data_orig = np.zeros((4, 4, 4, 3))
data_orig[0, 0, 0] = np.array([1, 0, 0])
img_orig = Nifti1Image(data_orig, np.eye(4))
img_new = decfa(img_orig)
data_new = img_new.get_data()
assert data_new[0, 0, 0] == (1, 0, 0)
assert data_new.dtype == np.dtype([('R', 'uint8'), ('G', 'uint8'), ('B', 'uint8')])
|
9258d026f7782084cd75b78e13872bc6b3f65c8d
|
keras/dtensor/__init__.py
|
keras/dtensor/__init__.py
|
# Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras' DTensor library."""
_DTENSOR_API_ENABLED = False
# Conditional import the dtensor API, since it is currently broken in OSS.
if _DTENSOR_API_ENABLED:
from tensorflow.compat.v2.experimental import dtensor as dtensor_api # pylint: disable=g-import-not-at-top
else:
# Leave it with a placeholder, so that the import line from other python file
# will not break.
dtensor_api = None
|
# Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras' DTensor library."""
_DTENSOR_API_ENABLED = True
# Conditional import the dtensor API, since it is currently broken in OSS.
if _DTENSOR_API_ENABLED:
from tensorflow.compat.v2.experimental import dtensor as dtensor_api # pylint: disable=g-import-not-at-top
else:
# Leave it with a placeholder, so that the import line from other python file
# will not break.
dtensor_api = None
|
Enable the keras dtensor API in OSS.
|
Enable the keras dtensor API in OSS.
PiperOrigin-RevId: 438858608
|
Python
|
apache-2.0
|
keras-team/keras,keras-team/keras
|
# Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras' DTensor library."""
_DTENSOR_API_ENABLED = False
# Conditional import the dtensor API, since it is currently broken in OSS.
if _DTENSOR_API_ENABLED:
from tensorflow.compat.v2.experimental import dtensor as dtensor_api # pylint: disable=g-import-not-at-top
else:
# Leave it with a placeholder, so that the import line from other python file
# will not break.
dtensor_api = None
Enable the keras dtensor API in OSS.
PiperOrigin-RevId: 438858608
|
# Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras' DTensor library."""
_DTENSOR_API_ENABLED = True
# Conditional import the dtensor API, since it is currently broken in OSS.
if _DTENSOR_API_ENABLED:
from tensorflow.compat.v2.experimental import dtensor as dtensor_api # pylint: disable=g-import-not-at-top
else:
# Leave it with a placeholder, so that the import line from other python file
# will not break.
dtensor_api = None
|
<commit_before># Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras' DTensor library."""
_DTENSOR_API_ENABLED = False
# Conditional import the dtensor API, since it is currently broken in OSS.
if _DTENSOR_API_ENABLED:
from tensorflow.compat.v2.experimental import dtensor as dtensor_api # pylint: disable=g-import-not-at-top
else:
# Leave it with a placeholder, so that the import line from other python file
# will not break.
dtensor_api = None
<commit_msg>Enable the keras dtensor API in OSS.
PiperOrigin-RevId: 438858608<commit_after>
|
# Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras' DTensor library."""
_DTENSOR_API_ENABLED = True
# Conditional import the dtensor API, since it is currently broken in OSS.
if _DTENSOR_API_ENABLED:
from tensorflow.compat.v2.experimental import dtensor as dtensor_api # pylint: disable=g-import-not-at-top
else:
# Leave it with a placeholder, so that the import line from other python file
# will not break.
dtensor_api = None
|
# Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras' DTensor library."""
_DTENSOR_API_ENABLED = False
# Conditional import the dtensor API, since it is currently broken in OSS.
if _DTENSOR_API_ENABLED:
from tensorflow.compat.v2.experimental import dtensor as dtensor_api # pylint: disable=g-import-not-at-top
else:
# Leave it with a placeholder, so that the import line from other python file
# will not break.
dtensor_api = None
Enable the keras dtensor API in OSS.
PiperOrigin-RevId: 438858608# Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras' DTensor library."""
_DTENSOR_API_ENABLED = True
# Conditional import the dtensor API, since it is currently broken in OSS.
if _DTENSOR_API_ENABLED:
from tensorflow.compat.v2.experimental import dtensor as dtensor_api # pylint: disable=g-import-not-at-top
else:
# Leave it with a placeholder, so that the import line from other python file
# will not break.
dtensor_api = None
|
<commit_before># Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras' DTensor library."""
_DTENSOR_API_ENABLED = False
# Conditional import the dtensor API, since it is currently broken in OSS.
if _DTENSOR_API_ENABLED:
from tensorflow.compat.v2.experimental import dtensor as dtensor_api # pylint: disable=g-import-not-at-top
else:
# Leave it with a placeholder, so that the import line from other python file
# will not break.
dtensor_api = None
<commit_msg>Enable the keras dtensor API in OSS.
PiperOrigin-RevId: 438858608<commit_after># Copyright 2022 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras' DTensor library."""
_DTENSOR_API_ENABLED = True
# Conditional import the dtensor API, since it is currently broken in OSS.
if _DTENSOR_API_ENABLED:
from tensorflow.compat.v2.experimental import dtensor as dtensor_api # pylint: disable=g-import-not-at-top
else:
# Leave it with a placeholder, so that the import line from other python file
# will not break.
dtensor_api = None
|
093127f85f6d8f3f0ef669abfc0ba7cc9778fbe5
|
chef/data_bag.py
|
chef/data_bag.py
|
import abc
import collections
from chef.base import ChefObject, ChefQuery, ChefObjectMeta
class DataBagMeta(ChefObjectMeta, abc.ABCMeta):
"""A metaclass to allow DataBag to use multiple inheritance."""
class DataBag(ChefObject, ChefQuery):
__metaclass__ = DataBagMeta
url = '/data'
def _populate(self, data):
self.obj_class = DataBagItem
self.names = data.keys()
self.parent = self
class DataBagItem(ChefObject, collections.Mapping):
__metaclass__ = DataBagMeta
url = '/data'
def __init__(self, name, api=None, skip_load=False, parent=None):
self.bag = parent
super(DataBagItem, self).__init__(parent.name+'/'+name, api=api, skip_load=skip_load)
self.name = name
def _populate(self, data):
self.data = data
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
def __getitem__(self, key):
return self.data[key]
|
import abc
import collections
from chef.base import ChefObject, ChefQuery, ChefObjectMeta
class DataBagMeta(ChefObjectMeta, abc.ABCMeta):
"""A metaclass to allow DataBag to use multiple inheritance."""
class DataBag(ChefObject, ChefQuery):
__metaclass__ = DataBagMeta
url = '/data'
def _populate(self, data):
self.obj_class = DataBagItem
self.names = data.keys()
self.parent = self
class DataBagItem(ChefObject, collections.Mapping):
__metaclass__ = DataBagMeta
url = '/data'
attributes = {
'raw_data': dict,
}
def __init__(self, name, api=None, skip_load=False, parent=None):
self.bag = parent
super(DataBagItem, self).__init__(parent.name+'/'+name, api=api, skip_load=skip_load)
self.name = name
def _populate(self, data):
if 'json_class' in data:
self.raw_data = data['json_class']
else:
self.raw_data = data
def __len__(self):
return len(self.raw_data)
def __iter__(self):
return iter(self.raw_data)
def __getitem__(self, key):
return self.raw_data[key]
|
Handle both possible JSON formats for data bag items.
|
Handle both possible JSON formats for data bag items.
This won't work if there is an actual data bag
item key called 'json_class', but that would be silly.
|
Python
|
apache-2.0
|
dipakvwarade/pychef,coderanger/pychef,Scalr/pychef,coderanger/pychef,jarosser06/pychef,dipakvwarade/pychef,cread/pychef,Scalr/pychef,jarosser06/pychef,cread/pychef
|
import abc
import collections
from chef.base import ChefObject, ChefQuery, ChefObjectMeta
class DataBagMeta(ChefObjectMeta, abc.ABCMeta):
"""A metaclass to allow DataBag to use multiple inheritance."""
class DataBag(ChefObject, ChefQuery):
__metaclass__ = DataBagMeta
url = '/data'
def _populate(self, data):
self.obj_class = DataBagItem
self.names = data.keys()
self.parent = self
class DataBagItem(ChefObject, collections.Mapping):
__metaclass__ = DataBagMeta
url = '/data'
def __init__(self, name, api=None, skip_load=False, parent=None):
self.bag = parent
super(DataBagItem, self).__init__(parent.name+'/'+name, api=api, skip_load=skip_load)
self.name = name
def _populate(self, data):
self.data = data
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
def __getitem__(self, key):
return self.data[key]
Handle both possible JSON formats for data bag items.
This won't work if there is an actual data bag
item key called 'json_class', but that would be silly.
|
import abc
import collections
from chef.base import ChefObject, ChefQuery, ChefObjectMeta
class DataBagMeta(ChefObjectMeta, abc.ABCMeta):
"""A metaclass to allow DataBag to use multiple inheritance."""
class DataBag(ChefObject, ChefQuery):
__metaclass__ = DataBagMeta
url = '/data'
def _populate(self, data):
self.obj_class = DataBagItem
self.names = data.keys()
self.parent = self
class DataBagItem(ChefObject, collections.Mapping):
__metaclass__ = DataBagMeta
url = '/data'
attributes = {
'raw_data': dict,
}
def __init__(self, name, api=None, skip_load=False, parent=None):
self.bag = parent
super(DataBagItem, self).__init__(parent.name+'/'+name, api=api, skip_load=skip_load)
self.name = name
def _populate(self, data):
if 'json_class' in data:
self.raw_data = data['json_class']
else:
self.raw_data = data
def __len__(self):
return len(self.raw_data)
def __iter__(self):
return iter(self.raw_data)
def __getitem__(self, key):
return self.raw_data[key]
|
<commit_before>import abc
import collections
from chef.base import ChefObject, ChefQuery, ChefObjectMeta
class DataBagMeta(ChefObjectMeta, abc.ABCMeta):
"""A metaclass to allow DataBag to use multiple inheritance."""
class DataBag(ChefObject, ChefQuery):
__metaclass__ = DataBagMeta
url = '/data'
def _populate(self, data):
self.obj_class = DataBagItem
self.names = data.keys()
self.parent = self
class DataBagItem(ChefObject, collections.Mapping):
__metaclass__ = DataBagMeta
url = '/data'
def __init__(self, name, api=None, skip_load=False, parent=None):
self.bag = parent
super(DataBagItem, self).__init__(parent.name+'/'+name, api=api, skip_load=skip_load)
self.name = name
def _populate(self, data):
self.data = data
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
def __getitem__(self, key):
return self.data[key]
<commit_msg>Handle both possible JSON formats for data bag items.
This won't work if there is an actual data bag
item key called 'json_class', but that would be silly.<commit_after>
|
import abc
import collections
from chef.base import ChefObject, ChefQuery, ChefObjectMeta
class DataBagMeta(ChefObjectMeta, abc.ABCMeta):
"""A metaclass to allow DataBag to use multiple inheritance."""
class DataBag(ChefObject, ChefQuery):
__metaclass__ = DataBagMeta
url = '/data'
def _populate(self, data):
self.obj_class = DataBagItem
self.names = data.keys()
self.parent = self
class DataBagItem(ChefObject, collections.Mapping):
__metaclass__ = DataBagMeta
url = '/data'
attributes = {
'raw_data': dict,
}
def __init__(self, name, api=None, skip_load=False, parent=None):
self.bag = parent
super(DataBagItem, self).__init__(parent.name+'/'+name, api=api, skip_load=skip_load)
self.name = name
def _populate(self, data):
if 'json_class' in data:
self.raw_data = data['json_class']
else:
self.raw_data = data
def __len__(self):
return len(self.raw_data)
def __iter__(self):
return iter(self.raw_data)
def __getitem__(self, key):
return self.raw_data[key]
|
import abc
import collections
from chef.base import ChefObject, ChefQuery, ChefObjectMeta
class DataBagMeta(ChefObjectMeta, abc.ABCMeta):
"""A metaclass to allow DataBag to use multiple inheritance."""
class DataBag(ChefObject, ChefQuery):
__metaclass__ = DataBagMeta
url = '/data'
def _populate(self, data):
self.obj_class = DataBagItem
self.names = data.keys()
self.parent = self
class DataBagItem(ChefObject, collections.Mapping):
__metaclass__ = DataBagMeta
url = '/data'
def __init__(self, name, api=None, skip_load=False, parent=None):
self.bag = parent
super(DataBagItem, self).__init__(parent.name+'/'+name, api=api, skip_load=skip_load)
self.name = name
def _populate(self, data):
self.data = data
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
def __getitem__(self, key):
return self.data[key]
Handle both possible JSON formats for data bag items.
This won't work if there is an actual data bag
item key called 'json_class', but that would be silly.import abc
import collections
from chef.base import ChefObject, ChefQuery, ChefObjectMeta
class DataBagMeta(ChefObjectMeta, abc.ABCMeta):
"""A metaclass to allow DataBag to use multiple inheritance."""
class DataBag(ChefObject, ChefQuery):
__metaclass__ = DataBagMeta
url = '/data'
def _populate(self, data):
self.obj_class = DataBagItem
self.names = data.keys()
self.parent = self
class DataBagItem(ChefObject, collections.Mapping):
__metaclass__ = DataBagMeta
url = '/data'
attributes = {
'raw_data': dict,
}
def __init__(self, name, api=None, skip_load=False, parent=None):
self.bag = parent
super(DataBagItem, self).__init__(parent.name+'/'+name, api=api, skip_load=skip_load)
self.name = name
def _populate(self, data):
if 'json_class' in data:
self.raw_data = data['json_class']
else:
self.raw_data = data
def __len__(self):
return len(self.raw_data)
def __iter__(self):
return iter(self.raw_data)
def __getitem__(self, key):
return self.raw_data[key]
|
<commit_before>import abc
import collections
from chef.base import ChefObject, ChefQuery, ChefObjectMeta
class DataBagMeta(ChefObjectMeta, abc.ABCMeta):
"""A metaclass to allow DataBag to use multiple inheritance."""
class DataBag(ChefObject, ChefQuery):
__metaclass__ = DataBagMeta
url = '/data'
def _populate(self, data):
self.obj_class = DataBagItem
self.names = data.keys()
self.parent = self
class DataBagItem(ChefObject, collections.Mapping):
__metaclass__ = DataBagMeta
url = '/data'
def __init__(self, name, api=None, skip_load=False, parent=None):
self.bag = parent
super(DataBagItem, self).__init__(parent.name+'/'+name, api=api, skip_load=skip_load)
self.name = name
def _populate(self, data):
self.data = data
def __len__(self):
return len(self.data)
def __iter__(self):
return iter(self.data)
def __getitem__(self, key):
return self.data[key]
<commit_msg>Handle both possible JSON formats for data bag items.
This won't work if there is an actual data bag
item key called 'json_class', but that would be silly.<commit_after>import abc
import collections
from chef.base import ChefObject, ChefQuery, ChefObjectMeta
class DataBagMeta(ChefObjectMeta, abc.ABCMeta):
"""A metaclass to allow DataBag to use multiple inheritance."""
class DataBag(ChefObject, ChefQuery):
__metaclass__ = DataBagMeta
url = '/data'
def _populate(self, data):
self.obj_class = DataBagItem
self.names = data.keys()
self.parent = self
class DataBagItem(ChefObject, collections.Mapping):
__metaclass__ = DataBagMeta
url = '/data'
attributes = {
'raw_data': dict,
}
def __init__(self, name, api=None, skip_load=False, parent=None):
self.bag = parent
super(DataBagItem, self).__init__(parent.name+'/'+name, api=api, skip_load=skip_load)
self.name = name
def _populate(self, data):
if 'json_class' in data:
self.raw_data = data['json_class']
else:
self.raw_data = data
def __len__(self):
return len(self.raw_data)
def __iter__(self):
return iter(self.raw_data)
def __getitem__(self, key):
return self.raw_data[key]
|
3fec3b367eb406b53238832cf5531901455f7907
|
lit/lit/LitFormats.py
|
lit/lit/LitFormats.py
|
from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
|
from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
|
Test commit (removed extra blank line)
|
Test commit (removed extra blank line)
git-svn-id: a4a6f32337ebd29ad4763b423022f00f68d1c7b7@98988 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
bsd-3-clause
|
lodyagin/bare_cxx,lodyagin/bare_cxx,lodyagin/bare_cxx,lodyagin/bare_cxx,lodyagin/bare_cxx
|
from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
Test commit (removed extra blank line)
git-svn-id: a4a6f32337ebd29ad4763b423022f00f68d1c7b7@98988 91177308-0d34-0410-b5e6-96231b3b80d8
|
from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
|
<commit_before>from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
<commit_msg>Test commit (removed extra blank line)
git-svn-id: a4a6f32337ebd29ad4763b423022f00f68d1c7b7@98988 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
|
from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
|
from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
Test commit (removed extra blank line)
git-svn-id: a4a6f32337ebd29ad4763b423022f00f68d1c7b7@98988 91177308-0d34-0410-b5e6-96231b3b80d8from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
|
<commit_before>from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
<commit_msg>Test commit (removed extra blank line)
git-svn-id: a4a6f32337ebd29ad4763b423022f00f68d1c7b7@98988 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>from TestFormats import GoogleTest, ShTest, TclTest
from TestFormats import SyntaxCheckTest, OneCommandPerFileTest
|
a273342b6e89709fc838dfd6abcee0a525272cea
|
management/admin.py
|
management/admin.py
|
from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from .models import (Location, Permanence, Equipment, Lending, Position,
MembershipType, Membership, PublicFile, PublicImage,
ProtectedFile, ProtectedImage, AdminFile, AdminImage)
class AdminFileInline(GenericTabularInline):
model = AdminFile
class AdminImageInline(GenericTabularInline):
model = AdminImage
class ProtectedFileInline(GenericTabularInline):
model = ProtectedFile
class ProtectedImageInline(GenericTabularInline):
model = ProtectedImage
class PublicFileInline(GenericTabularInline):
model = PublicFile
class PublicImageInline(GenericTabularInline):
model = PublicImage
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
@admin.register(MembershipType)
class MembershipTypeAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Position)
class PositionAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(Location)
admin.site.register(Permanence)
admin.site.register(Equipment)
admin.site.register(Lending)
admin.site.register(PublicFile)
admin.site.register(PublicImage)
|
from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from .models import (Location, Permanence, Equipment, Lending, Position,
MembershipType, Membership, PublicFile, PublicImage,
ProtectedFile, ProtectedImage, AdminFile, AdminImage)
class AdminFileInline(GenericTabularInline):
model = AdminFile
class AdminImageInline(GenericTabularInline):
model = AdminImage
class ProtectedFileInline(GenericTabularInline):
model = ProtectedFile
class ProtectedImageInline(GenericTabularInline):
model = ProtectedImage
class PublicFileInline(GenericTabularInline):
model = PublicFile
class PublicImageInline(GenericTabularInline):
model = PublicImage
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
@admin.register(MembershipType)
class MembershipTypeAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Position)
class PositionAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(Location)
admin.site.register(Permanence)
admin.site.register(Equipment)
admin.site.register(Lending)
admin.site.register(PublicFile)
admin.site.register(PublicImage)
admin.site.register(ProtectedFile)
admin.site.register(ProtectedImage)
admin.site.register(AdminFile)
admin.site.register(AdminImage)
|
Add ProtectedFile, ProtectedImage, AdminFile and AdminImage.
|
Add ProtectedFile, ProtectedImage, AdminFile and AdminImage.
|
Python
|
mit
|
QSchulz/sportassociation,QSchulz/sportassociation,QSchulz/sportassociation
|
from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from .models import (Location, Permanence, Equipment, Lending, Position,
MembershipType, Membership, PublicFile, PublicImage,
ProtectedFile, ProtectedImage, AdminFile, AdminImage)
class AdminFileInline(GenericTabularInline):
model = AdminFile
class AdminImageInline(GenericTabularInline):
model = AdminImage
class ProtectedFileInline(GenericTabularInline):
model = ProtectedFile
class ProtectedImageInline(GenericTabularInline):
model = ProtectedImage
class PublicFileInline(GenericTabularInline):
model = PublicFile
class PublicImageInline(GenericTabularInline):
model = PublicImage
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
@admin.register(MembershipType)
class MembershipTypeAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Position)
class PositionAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(Location)
admin.site.register(Permanence)
admin.site.register(Equipment)
admin.site.register(Lending)
admin.site.register(PublicFile)
admin.site.register(PublicImage)
Add ProtectedFile, ProtectedImage, AdminFile and AdminImage.
|
from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from .models import (Location, Permanence, Equipment, Lending, Position,
MembershipType, Membership, PublicFile, PublicImage,
ProtectedFile, ProtectedImage, AdminFile, AdminImage)
class AdminFileInline(GenericTabularInline):
model = AdminFile
class AdminImageInline(GenericTabularInline):
model = AdminImage
class ProtectedFileInline(GenericTabularInline):
model = ProtectedFile
class ProtectedImageInline(GenericTabularInline):
model = ProtectedImage
class PublicFileInline(GenericTabularInline):
model = PublicFile
class PublicImageInline(GenericTabularInline):
model = PublicImage
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
@admin.register(MembershipType)
class MembershipTypeAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Position)
class PositionAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(Location)
admin.site.register(Permanence)
admin.site.register(Equipment)
admin.site.register(Lending)
admin.site.register(PublicFile)
admin.site.register(PublicImage)
admin.site.register(ProtectedFile)
admin.site.register(ProtectedImage)
admin.site.register(AdminFile)
admin.site.register(AdminImage)
|
<commit_before>from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from .models import (Location, Permanence, Equipment, Lending, Position,
MembershipType, Membership, PublicFile, PublicImage,
ProtectedFile, ProtectedImage, AdminFile, AdminImage)
class AdminFileInline(GenericTabularInline):
model = AdminFile
class AdminImageInline(GenericTabularInline):
model = AdminImage
class ProtectedFileInline(GenericTabularInline):
model = ProtectedFile
class ProtectedImageInline(GenericTabularInline):
model = ProtectedImage
class PublicFileInline(GenericTabularInline):
model = PublicFile
class PublicImageInline(GenericTabularInline):
model = PublicImage
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
@admin.register(MembershipType)
class MembershipTypeAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Position)
class PositionAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(Location)
admin.site.register(Permanence)
admin.site.register(Equipment)
admin.site.register(Lending)
admin.site.register(PublicFile)
admin.site.register(PublicImage)
<commit_msg>Add ProtectedFile, ProtectedImage, AdminFile and AdminImage.<commit_after>
|
from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from .models import (Location, Permanence, Equipment, Lending, Position,
MembershipType, Membership, PublicFile, PublicImage,
ProtectedFile, ProtectedImage, AdminFile, AdminImage)
class AdminFileInline(GenericTabularInline):
model = AdminFile
class AdminImageInline(GenericTabularInline):
model = AdminImage
class ProtectedFileInline(GenericTabularInline):
model = ProtectedFile
class ProtectedImageInline(GenericTabularInline):
model = ProtectedImage
class PublicFileInline(GenericTabularInline):
model = PublicFile
class PublicImageInline(GenericTabularInline):
model = PublicImage
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
@admin.register(MembershipType)
class MembershipTypeAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Position)
class PositionAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(Location)
admin.site.register(Permanence)
admin.site.register(Equipment)
admin.site.register(Lending)
admin.site.register(PublicFile)
admin.site.register(PublicImage)
admin.site.register(ProtectedFile)
admin.site.register(ProtectedImage)
admin.site.register(AdminFile)
admin.site.register(AdminImage)
|
from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from .models import (Location, Permanence, Equipment, Lending, Position,
MembershipType, Membership, PublicFile, PublicImage,
ProtectedFile, ProtectedImage, AdminFile, AdminImage)
class AdminFileInline(GenericTabularInline):
model = AdminFile
class AdminImageInline(GenericTabularInline):
model = AdminImage
class ProtectedFileInline(GenericTabularInline):
model = ProtectedFile
class ProtectedImageInline(GenericTabularInline):
model = ProtectedImage
class PublicFileInline(GenericTabularInline):
model = PublicFile
class PublicImageInline(GenericTabularInline):
model = PublicImage
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
@admin.register(MembershipType)
class MembershipTypeAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Position)
class PositionAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(Location)
admin.site.register(Permanence)
admin.site.register(Equipment)
admin.site.register(Lending)
admin.site.register(PublicFile)
admin.site.register(PublicImage)
Add ProtectedFile, ProtectedImage, AdminFile and AdminImage.from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from .models import (Location, Permanence, Equipment, Lending, Position,
MembershipType, Membership, PublicFile, PublicImage,
ProtectedFile, ProtectedImage, AdminFile, AdminImage)
class AdminFileInline(GenericTabularInline):
model = AdminFile
class AdminImageInline(GenericTabularInline):
model = AdminImage
class ProtectedFileInline(GenericTabularInline):
model = ProtectedFile
class ProtectedImageInline(GenericTabularInline):
model = ProtectedImage
class PublicFileInline(GenericTabularInline):
model = PublicFile
class PublicImageInline(GenericTabularInline):
model = PublicImage
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
@admin.register(MembershipType)
class MembershipTypeAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Position)
class PositionAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(Location)
admin.site.register(Permanence)
admin.site.register(Equipment)
admin.site.register(Lending)
admin.site.register(PublicFile)
admin.site.register(PublicImage)
admin.site.register(ProtectedFile)
admin.site.register(ProtectedImage)
admin.site.register(AdminFile)
admin.site.register(AdminImage)
|
<commit_before>from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from .models import (Location, Permanence, Equipment, Lending, Position,
MembershipType, Membership, PublicFile, PublicImage,
ProtectedFile, ProtectedImage, AdminFile, AdminImage)
class AdminFileInline(GenericTabularInline):
model = AdminFile
class AdminImageInline(GenericTabularInline):
model = AdminImage
class ProtectedFileInline(GenericTabularInline):
model = ProtectedFile
class ProtectedImageInline(GenericTabularInline):
model = ProtectedImage
class PublicFileInline(GenericTabularInline):
model = PublicFile
class PublicImageInline(GenericTabularInline):
model = PublicImage
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
@admin.register(MembershipType)
class MembershipTypeAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Position)
class PositionAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(Location)
admin.site.register(Permanence)
admin.site.register(Equipment)
admin.site.register(Lending)
admin.site.register(PublicFile)
admin.site.register(PublicImage)
<commit_msg>Add ProtectedFile, ProtectedImage, AdminFile and AdminImage.<commit_after>from django.contrib import admin
from django.contrib.contenttypes.admin import GenericTabularInline
from .models import (Location, Permanence, Equipment, Lending, Position,
MembershipType, Membership, PublicFile, PublicImage,
ProtectedFile, ProtectedImage, AdminFile, AdminImage)
class AdminFileInline(GenericTabularInline):
model = AdminFile
class AdminImageInline(GenericTabularInline):
model = AdminImage
class ProtectedFileInline(GenericTabularInline):
model = ProtectedFile
class ProtectedImageInline(GenericTabularInline):
model = ProtectedImage
class PublicFileInline(GenericTabularInline):
model = PublicFile
class PublicImageInline(GenericTabularInline):
model = PublicImage
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
@admin.register(MembershipType)
class MembershipTypeAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
@admin.register(Position)
class PositionAdmin(admin.ModelAdmin):
class Media:
js = ('tinymce/tinymce.min.js', 'js/tinymce_4_config.js')
admin.site.register(Location)
admin.site.register(Permanence)
admin.site.register(Equipment)
admin.site.register(Lending)
admin.site.register(PublicFile)
admin.site.register(PublicImage)
admin.site.register(ProtectedFile)
admin.site.register(ProtectedImage)
admin.site.register(AdminFile)
admin.site.register(AdminImage)
|
e118ee78b534a83b33f91b27cfc1f75d64e8e924
|
test_utils/testmaker/base_serializer.py
|
test_utils/testmaker/base_serializer.py
|
import cPickle as pickle
import logging
import time
ser = logging.getLogger('testserializer')
class Serializer(object):
"""A pluggable Serializer class"""
name = "default"
def __init__(self, name='default'):
"""Constructor"""
self.data = {}
self.name = name
def save_request(self, request):
"""Saves the Request to the serialization stream"""
request_dict = {
'name': self.name,
'time': time.time(),
'path': request.path,
'get': request.GET,
'post': request.POST,
'arg_dict': request.REQUEST,
}
ser.info(pickle.dumps(request_dict))
ser.info('---REQUEST_BREAK---')
def save_response(self, path, response):
"""Saves the Response-like objects information that might be tested"""
response_dict = {
'name': self.name,
'time': time.time(),
'path': path,
'context': response.context,
'content': response.content,
'status_code': response.status_code,
'cookies': response.cookies,
'headers': response._headers,
}
try:
ser.info(pickle.dumps(response_dict))
ser.info('---RESPONSE_BREAK---')
except (TypeError, pickle.PicklingError):
#Can't pickle wsgi.error objects
pass
|
import cPickle as pickle
import logging
import time
class Serializer(object):
"""A pluggable Serializer class"""
name = "default"
def __init__(self, name='default'):
"""Constructor"""
self.ser = logging.getLogger('testserializer')
self.data = {}
self.name = name
def process_request(self, request):
request_dict = {
'name': self.name,
'time': time.time(),
'path': request.path,
'get': request.GET,
'post': request.POST,
'arg_dict': request.REQUEST,
'method': request.method,
}
return request_dict
def save_request(self, request):
"""Saves the Request to the serialization stream"""
request_dict = self.process_request(request)
self.ser.info(pickle.dumps(request_dict))
self.ser.info('---REQUEST_BREAK---')
def process_response(self, path, response):
response_dict = {
'name': self.name,
'time': time.time(),
'path': path,
'context': response.context,
'content': response.content,
'status_code': response.status_code,
'cookies': response.cookies,
'headers': response._headers,
}
return response_dict
def save_response(self, path, response):
"""Saves the Response-like objects information that might be tested"""
response_dict = self.process_response(path, response)
try:
self.ser.info(pickle.dumps(response_dict))
self.ser.info('---RESPONSE_BREAK---')
except (TypeError, pickle.PicklingError):
#Can't pickle wsgi.error objects
pass
|
Move serializer into the class so it can be subclassed.
|
Move serializer into the class so it can be subclassed.
|
Python
|
mit
|
frac/django-test-utils,acdha/django-test-utils,ericholscher/django-test-utils,frac/django-test-utils,ericholscher/django-test-utils,acdha/django-test-utils
|
import cPickle as pickle
import logging
import time
ser = logging.getLogger('testserializer')
class Serializer(object):
"""A pluggable Serializer class"""
name = "default"
def __init__(self, name='default'):
"""Constructor"""
self.data = {}
self.name = name
def save_request(self, request):
"""Saves the Request to the serialization stream"""
request_dict = {
'name': self.name,
'time': time.time(),
'path': request.path,
'get': request.GET,
'post': request.POST,
'arg_dict': request.REQUEST,
}
ser.info(pickle.dumps(request_dict))
ser.info('---REQUEST_BREAK---')
def save_response(self, path, response):
"""Saves the Response-like objects information that might be tested"""
response_dict = {
'name': self.name,
'time': time.time(),
'path': path,
'context': response.context,
'content': response.content,
'status_code': response.status_code,
'cookies': response.cookies,
'headers': response._headers,
}
try:
ser.info(pickle.dumps(response_dict))
ser.info('---RESPONSE_BREAK---')
except (TypeError, pickle.PicklingError):
#Can't pickle wsgi.error objects
pass
Move serializer into the class so it can be subclassed.
|
import cPickle as pickle
import logging
import time
class Serializer(object):
"""A pluggable Serializer class"""
name = "default"
def __init__(self, name='default'):
"""Constructor"""
self.ser = logging.getLogger('testserializer')
self.data = {}
self.name = name
def process_request(self, request):
request_dict = {
'name': self.name,
'time': time.time(),
'path': request.path,
'get': request.GET,
'post': request.POST,
'arg_dict': request.REQUEST,
'method': request.method,
}
return request_dict
def save_request(self, request):
"""Saves the Request to the serialization stream"""
request_dict = self.process_request(request)
self.ser.info(pickle.dumps(request_dict))
self.ser.info('---REQUEST_BREAK---')
def process_response(self, path, response):
response_dict = {
'name': self.name,
'time': time.time(),
'path': path,
'context': response.context,
'content': response.content,
'status_code': response.status_code,
'cookies': response.cookies,
'headers': response._headers,
}
return response_dict
def save_response(self, path, response):
"""Saves the Response-like objects information that might be tested"""
response_dict = self.process_response(path, response)
try:
self.ser.info(pickle.dumps(response_dict))
self.ser.info('---RESPONSE_BREAK---')
except (TypeError, pickle.PicklingError):
#Can't pickle wsgi.error objects
pass
|
<commit_before>import cPickle as pickle
import logging
import time
ser = logging.getLogger('testserializer')
class Serializer(object):
"""A pluggable Serializer class"""
name = "default"
def __init__(self, name='default'):
"""Constructor"""
self.data = {}
self.name = name
def save_request(self, request):
"""Saves the Request to the serialization stream"""
request_dict = {
'name': self.name,
'time': time.time(),
'path': request.path,
'get': request.GET,
'post': request.POST,
'arg_dict': request.REQUEST,
}
ser.info(pickle.dumps(request_dict))
ser.info('---REQUEST_BREAK---')
def save_response(self, path, response):
"""Saves the Response-like objects information that might be tested"""
response_dict = {
'name': self.name,
'time': time.time(),
'path': path,
'context': response.context,
'content': response.content,
'status_code': response.status_code,
'cookies': response.cookies,
'headers': response._headers,
}
try:
ser.info(pickle.dumps(response_dict))
ser.info('---RESPONSE_BREAK---')
except (TypeError, pickle.PicklingError):
#Can't pickle wsgi.error objects
pass
<commit_msg>Move serializer into the class so it can be subclassed.<commit_after>
|
import cPickle as pickle
import logging
import time
class Serializer(object):
"""A pluggable Serializer class"""
name = "default"
def __init__(self, name='default'):
"""Constructor"""
self.ser = logging.getLogger('testserializer')
self.data = {}
self.name = name
def process_request(self, request):
request_dict = {
'name': self.name,
'time': time.time(),
'path': request.path,
'get': request.GET,
'post': request.POST,
'arg_dict': request.REQUEST,
'method': request.method,
}
return request_dict
def save_request(self, request):
"""Saves the Request to the serialization stream"""
request_dict = self.process_request(request)
self.ser.info(pickle.dumps(request_dict))
self.ser.info('---REQUEST_BREAK---')
def process_response(self, path, response):
response_dict = {
'name': self.name,
'time': time.time(),
'path': path,
'context': response.context,
'content': response.content,
'status_code': response.status_code,
'cookies': response.cookies,
'headers': response._headers,
}
return response_dict
def save_response(self, path, response):
"""Saves the Response-like objects information that might be tested"""
response_dict = self.process_response(path, response)
try:
self.ser.info(pickle.dumps(response_dict))
self.ser.info('---RESPONSE_BREAK---')
except (TypeError, pickle.PicklingError):
#Can't pickle wsgi.error objects
pass
|
import cPickle as pickle
import logging
import time
ser = logging.getLogger('testserializer')
class Serializer(object):
"""A pluggable Serializer class"""
name = "default"
def __init__(self, name='default'):
"""Constructor"""
self.data = {}
self.name = name
def save_request(self, request):
"""Saves the Request to the serialization stream"""
request_dict = {
'name': self.name,
'time': time.time(),
'path': request.path,
'get': request.GET,
'post': request.POST,
'arg_dict': request.REQUEST,
}
ser.info(pickle.dumps(request_dict))
ser.info('---REQUEST_BREAK---')
def save_response(self, path, response):
"""Saves the Response-like objects information that might be tested"""
response_dict = {
'name': self.name,
'time': time.time(),
'path': path,
'context': response.context,
'content': response.content,
'status_code': response.status_code,
'cookies': response.cookies,
'headers': response._headers,
}
try:
ser.info(pickle.dumps(response_dict))
ser.info('---RESPONSE_BREAK---')
except (TypeError, pickle.PicklingError):
#Can't pickle wsgi.error objects
pass
Move serializer into the class so it can be subclassed.import cPickle as pickle
import logging
import time
class Serializer(object):
"""A pluggable Serializer class"""
name = "default"
def __init__(self, name='default'):
"""Constructor"""
self.ser = logging.getLogger('testserializer')
self.data = {}
self.name = name
def process_request(self, request):
request_dict = {
'name': self.name,
'time': time.time(),
'path': request.path,
'get': request.GET,
'post': request.POST,
'arg_dict': request.REQUEST,
'method': request.method,
}
return request_dict
def save_request(self, request):
"""Saves the Request to the serialization stream"""
request_dict = self.process_request(request)
self.ser.info(pickle.dumps(request_dict))
self.ser.info('---REQUEST_BREAK---')
def process_response(self, path, response):
response_dict = {
'name': self.name,
'time': time.time(),
'path': path,
'context': response.context,
'content': response.content,
'status_code': response.status_code,
'cookies': response.cookies,
'headers': response._headers,
}
return response_dict
def save_response(self, path, response):
"""Saves the Response-like objects information that might be tested"""
response_dict = self.process_response(path, response)
try:
self.ser.info(pickle.dumps(response_dict))
self.ser.info('---RESPONSE_BREAK---')
except (TypeError, pickle.PicklingError):
#Can't pickle wsgi.error objects
pass
|
<commit_before>import cPickle as pickle
import logging
import time
ser = logging.getLogger('testserializer')
class Serializer(object):
"""A pluggable Serializer class"""
name = "default"
def __init__(self, name='default'):
"""Constructor"""
self.data = {}
self.name = name
def save_request(self, request):
"""Saves the Request to the serialization stream"""
request_dict = {
'name': self.name,
'time': time.time(),
'path': request.path,
'get': request.GET,
'post': request.POST,
'arg_dict': request.REQUEST,
}
ser.info(pickle.dumps(request_dict))
ser.info('---REQUEST_BREAK---')
def save_response(self, path, response):
"""Saves the Response-like objects information that might be tested"""
response_dict = {
'name': self.name,
'time': time.time(),
'path': path,
'context': response.context,
'content': response.content,
'status_code': response.status_code,
'cookies': response.cookies,
'headers': response._headers,
}
try:
ser.info(pickle.dumps(response_dict))
ser.info('---RESPONSE_BREAK---')
except (TypeError, pickle.PicklingError):
#Can't pickle wsgi.error objects
pass
<commit_msg>Move serializer into the class so it can be subclassed.<commit_after>import cPickle as pickle
import logging
import time
class Serializer(object):
"""A pluggable Serializer class"""
name = "default"
def __init__(self, name='default'):
"""Constructor"""
self.ser = logging.getLogger('testserializer')
self.data = {}
self.name = name
def process_request(self, request):
request_dict = {
'name': self.name,
'time': time.time(),
'path': request.path,
'get': request.GET,
'post': request.POST,
'arg_dict': request.REQUEST,
'method': request.method,
}
return request_dict
def save_request(self, request):
"""Saves the Request to the serialization stream"""
request_dict = self.process_request(request)
self.ser.info(pickle.dumps(request_dict))
self.ser.info('---REQUEST_BREAK---')
def process_response(self, path, response):
response_dict = {
'name': self.name,
'time': time.time(),
'path': path,
'context': response.context,
'content': response.content,
'status_code': response.status_code,
'cookies': response.cookies,
'headers': response._headers,
}
return response_dict
def save_response(self, path, response):
"""Saves the Response-like objects information that might be tested"""
response_dict = self.process_response(path, response)
try:
self.ser.info(pickle.dumps(response_dict))
self.ser.info('---RESPONSE_BREAK---')
except (TypeError, pickle.PicklingError):
#Can't pickle wsgi.error objects
pass
|
b3935065232a97b7eb65c38e5c7bc60570467c71
|
news/urls.py
|
news/urls.py
|
from django.conf.urls import url
from . import views
app_name = 'news'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<year>\d{4})/(?P<month>\d{2})/(?P<day>\d{2})/(?P<slug>[-\w]+)/$',
views.ArticleView.as_view(), name='article'),
]
|
from django.urls import include, path
from . import views
app_name = 'news'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<int:year>/<int:month>/<int:day>/<slug:slug>/', views.ArticleView.as_view(), name='article'),
]
|
Move news urlpatterns to Django 2.0 preferred method
|
Move news urlpatterns to Django 2.0 preferred method
|
Python
|
mit
|
evanepio/dotmanca,evanepio/dotmanca,evanepio/dotmanca
|
from django.conf.urls import url
from . import views
app_name = 'news'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<year>\d{4})/(?P<month>\d{2})/(?P<day>\d{2})/(?P<slug>[-\w]+)/$',
views.ArticleView.as_view(), name='article'),
]
Move news urlpatterns to Django 2.0 preferred method
|
from django.urls import include, path
from . import views
app_name = 'news'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<int:year>/<int:month>/<int:day>/<slug:slug>/', views.ArticleView.as_view(), name='article'),
]
|
<commit_before>from django.conf.urls import url
from . import views
app_name = 'news'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<year>\d{4})/(?P<month>\d{2})/(?P<day>\d{2})/(?P<slug>[-\w]+)/$',
views.ArticleView.as_view(), name='article'),
]
<commit_msg>Move news urlpatterns to Django 2.0 preferred method<commit_after>
|
from django.urls import include, path
from . import views
app_name = 'news'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<int:year>/<int:month>/<int:day>/<slug:slug>/', views.ArticleView.as_view(), name='article'),
]
|
from django.conf.urls import url
from . import views
app_name = 'news'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<year>\d{4})/(?P<month>\d{2})/(?P<day>\d{2})/(?P<slug>[-\w]+)/$',
views.ArticleView.as_view(), name='article'),
]
Move news urlpatterns to Django 2.0 preferred methodfrom django.urls import include, path
from . import views
app_name = 'news'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<int:year>/<int:month>/<int:day>/<slug:slug>/', views.ArticleView.as_view(), name='article'),
]
|
<commit_before>from django.conf.urls import url
from . import views
app_name = 'news'
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^(?P<year>\d{4})/(?P<month>\d{2})/(?P<day>\d{2})/(?P<slug>[-\w]+)/$',
views.ArticleView.as_view(), name='article'),
]
<commit_msg>Move news urlpatterns to Django 2.0 preferred method<commit_after>from django.urls import include, path
from . import views
app_name = 'news'
urlpatterns = [
path('', views.IndexView.as_view(), name='index'),
path('<int:year>/<int:month>/<int:day>/<slug:slug>/', views.ArticleView.as_view(), name='article'),
]
|
910d9e724e1e80d967853b21f553d753c70fefc0
|
noah/noah.py
|
noah/noah.py
|
import json
import random
class Noah(object):
def __init__(self, dictionary_file):
self.dictionary = json.load(dictionary_file)
def list(self):
return '\n'.join([entry['word'] for entry in self.dictionary])
def define(self, word):
entry = next((x for x in self.dictionary if x['word'] == word), None)
if not entry is None:
return '%s (%s)' % (entry['word'], entry['part_of_speech'])
def random(self):
return(random.choice(self.dictionary))
def main():
with open('../dictionaries/english.json') as dictionary:
n = Noah(dictionary)
print n.list()
print n.define('aardvark')
print n.random()
if __name__ == '__main__':
main()
|
import json
import random
import pprint
class Noah(object):
def __init__(self, dictionary_file):
self.dictionary = json.load(dictionary_file)
def list(self):
return '\n'.join([entry['word'] for entry in self.dictionary])
def define(self, word):
return self.output(filter(lambda x: x['word'] == word, self.dictionary))
if not entry is None:
return self.output(entry)
def random(self):
return self.output(random.choice(self.dictionary))
def output(self, data):
return json.dumps(data, indent=4)
def main():
with open('../dictionaries/english.json') as dictionary:
n = Noah(dictionary)
print n.list()
print n.define('run')
print n.random()
if __name__ == '__main__':
main()
|
Define returns all entries for given query.
|
Define returns all entries for given query.
|
Python
|
mit
|
maxdeviant/noah
|
import json
import random
class Noah(object):
def __init__(self, dictionary_file):
self.dictionary = json.load(dictionary_file)
def list(self):
return '\n'.join([entry['word'] for entry in self.dictionary])
def define(self, word):
entry = next((x for x in self.dictionary if x['word'] == word), None)
if not entry is None:
return '%s (%s)' % (entry['word'], entry['part_of_speech'])
def random(self):
return(random.choice(self.dictionary))
def main():
with open('../dictionaries/english.json') as dictionary:
n = Noah(dictionary)
print n.list()
print n.define('aardvark')
print n.random()
if __name__ == '__main__':
main()Define returns all entries for given query.
|
import json
import random
import pprint
class Noah(object):
def __init__(self, dictionary_file):
self.dictionary = json.load(dictionary_file)
def list(self):
return '\n'.join([entry['word'] for entry in self.dictionary])
def define(self, word):
return self.output(filter(lambda x: x['word'] == word, self.dictionary))
if not entry is None:
return self.output(entry)
def random(self):
return self.output(random.choice(self.dictionary))
def output(self, data):
return json.dumps(data, indent=4)
def main():
with open('../dictionaries/english.json') as dictionary:
n = Noah(dictionary)
print n.list()
print n.define('run')
print n.random()
if __name__ == '__main__':
main()
|
<commit_before>import json
import random
class Noah(object):
def __init__(self, dictionary_file):
self.dictionary = json.load(dictionary_file)
def list(self):
return '\n'.join([entry['word'] for entry in self.dictionary])
def define(self, word):
entry = next((x for x in self.dictionary if x['word'] == word), None)
if not entry is None:
return '%s (%s)' % (entry['word'], entry['part_of_speech'])
def random(self):
return(random.choice(self.dictionary))
def main():
with open('../dictionaries/english.json') as dictionary:
n = Noah(dictionary)
print n.list()
print n.define('aardvark')
print n.random()
if __name__ == '__main__':
main()<commit_msg>Define returns all entries for given query.<commit_after>
|
import json
import random
import pprint
class Noah(object):
def __init__(self, dictionary_file):
self.dictionary = json.load(dictionary_file)
def list(self):
return '\n'.join([entry['word'] for entry in self.dictionary])
def define(self, word):
return self.output(filter(lambda x: x['word'] == word, self.dictionary))
if not entry is None:
return self.output(entry)
def random(self):
return self.output(random.choice(self.dictionary))
def output(self, data):
return json.dumps(data, indent=4)
def main():
with open('../dictionaries/english.json') as dictionary:
n = Noah(dictionary)
print n.list()
print n.define('run')
print n.random()
if __name__ == '__main__':
main()
|
import json
import random
class Noah(object):
def __init__(self, dictionary_file):
self.dictionary = json.load(dictionary_file)
def list(self):
return '\n'.join([entry['word'] for entry in self.dictionary])
def define(self, word):
entry = next((x for x in self.dictionary if x['word'] == word), None)
if not entry is None:
return '%s (%s)' % (entry['word'], entry['part_of_speech'])
def random(self):
return(random.choice(self.dictionary))
def main():
with open('../dictionaries/english.json') as dictionary:
n = Noah(dictionary)
print n.list()
print n.define('aardvark')
print n.random()
if __name__ == '__main__':
main()Define returns all entries for given query.import json
import random
import pprint
class Noah(object):
def __init__(self, dictionary_file):
self.dictionary = json.load(dictionary_file)
def list(self):
return '\n'.join([entry['word'] for entry in self.dictionary])
def define(self, word):
return self.output(filter(lambda x: x['word'] == word, self.dictionary))
if not entry is None:
return self.output(entry)
def random(self):
return self.output(random.choice(self.dictionary))
def output(self, data):
return json.dumps(data, indent=4)
def main():
with open('../dictionaries/english.json') as dictionary:
n = Noah(dictionary)
print n.list()
print n.define('run')
print n.random()
if __name__ == '__main__':
main()
|
<commit_before>import json
import random
class Noah(object):
def __init__(self, dictionary_file):
self.dictionary = json.load(dictionary_file)
def list(self):
return '\n'.join([entry['word'] for entry in self.dictionary])
def define(self, word):
entry = next((x for x in self.dictionary if x['word'] == word), None)
if not entry is None:
return '%s (%s)' % (entry['word'], entry['part_of_speech'])
def random(self):
return(random.choice(self.dictionary))
def main():
with open('../dictionaries/english.json') as dictionary:
n = Noah(dictionary)
print n.list()
print n.define('aardvark')
print n.random()
if __name__ == '__main__':
main()<commit_msg>Define returns all entries for given query.<commit_after>import json
import random
import pprint
class Noah(object):
def __init__(self, dictionary_file):
self.dictionary = json.load(dictionary_file)
def list(self):
return '\n'.join([entry['word'] for entry in self.dictionary])
def define(self, word):
return self.output(filter(lambda x: x['word'] == word, self.dictionary))
if not entry is None:
return self.output(entry)
def random(self):
return self.output(random.choice(self.dictionary))
def output(self, data):
return json.dumps(data, indent=4)
def main():
with open('../dictionaries/english.json') as dictionary:
n = Noah(dictionary)
print n.list()
print n.define('run')
print n.random()
if __name__ == '__main__':
main()
|
418e1e0ba2dcedd20966eea76699eb754eef53b4
|
node/sort.py
|
node/sort.py
|
#!/usr/bin/env python
from nodes import Node
class Sort(Node):
char = "S"
args = 1
results = 1
@Node.test_func([[2,3,4,1]], [[1,2,3,4]])
@Node.test_func(["test"], ["estt"])
def func(self, a: Node.indexable):
"""sorted(a) - returns the same type as given"""
if isinstance(a, tuple):
return [tuple(sorted(a))]
if isinstance(a, str):
return "".join(sorted(a))
return [sorted(a)]
@Node.test_func([3], [[1,2]])
def one_range(self, a:int):
"""range(1,a)"""
return [list(range(1,a))]
|
#!/usr/bin/env python
from nodes import Node
class Sort(Node):
char = "S"
args = 1
results = 1
@Node.test_func([[2,3,4,1]], [[1,2,3,4]])
@Node.test_func(["test"], ["estt"])
def func(self, a: Node.indexable):
"""sorted(a) - returns the same type as given"""
if isinstance(a, tuple):
return [tuple(sorted(a))]
if isinstance(a, str):
return "".join(sorted(a))
return [sorted(a)]
@Node.test_func([3], [[1,2,3]])
def one_range(self, a:int):
"""range(1,a)"""
return [list(range(1,a+1))]
|
Change 1 based range so it counts up to n
|
Change 1 based range so it counts up to n
|
Python
|
mit
|
muddyfish/PYKE,muddyfish/PYKE
|
#!/usr/bin/env python
from nodes import Node
class Sort(Node):
char = "S"
args = 1
results = 1
@Node.test_func([[2,3,4,1]], [[1,2,3,4]])
@Node.test_func(["test"], ["estt"])
def func(self, a: Node.indexable):
"""sorted(a) - returns the same type as given"""
if isinstance(a, tuple):
return [tuple(sorted(a))]
if isinstance(a, str):
return "".join(sorted(a))
return [sorted(a)]
@Node.test_func([3], [[1,2]])
def one_range(self, a:int):
"""range(1,a)"""
return [list(range(1,a))]
Change 1 based range so it counts up to n
|
#!/usr/bin/env python
from nodes import Node
class Sort(Node):
char = "S"
args = 1
results = 1
@Node.test_func([[2,3,4,1]], [[1,2,3,4]])
@Node.test_func(["test"], ["estt"])
def func(self, a: Node.indexable):
"""sorted(a) - returns the same type as given"""
if isinstance(a, tuple):
return [tuple(sorted(a))]
if isinstance(a, str):
return "".join(sorted(a))
return [sorted(a)]
@Node.test_func([3], [[1,2,3]])
def one_range(self, a:int):
"""range(1,a)"""
return [list(range(1,a+1))]
|
<commit_before>#!/usr/bin/env python
from nodes import Node
class Sort(Node):
char = "S"
args = 1
results = 1
@Node.test_func([[2,3,4,1]], [[1,2,3,4]])
@Node.test_func(["test"], ["estt"])
def func(self, a: Node.indexable):
"""sorted(a) - returns the same type as given"""
if isinstance(a, tuple):
return [tuple(sorted(a))]
if isinstance(a, str):
return "".join(sorted(a))
return [sorted(a)]
@Node.test_func([3], [[1,2]])
def one_range(self, a:int):
"""range(1,a)"""
return [list(range(1,a))]
<commit_msg>Change 1 based range so it counts up to n<commit_after>
|
#!/usr/bin/env python
from nodes import Node
class Sort(Node):
char = "S"
args = 1
results = 1
@Node.test_func([[2,3,4,1]], [[1,2,3,4]])
@Node.test_func(["test"], ["estt"])
def func(self, a: Node.indexable):
"""sorted(a) - returns the same type as given"""
if isinstance(a, tuple):
return [tuple(sorted(a))]
if isinstance(a, str):
return "".join(sorted(a))
return [sorted(a)]
@Node.test_func([3], [[1,2,3]])
def one_range(self, a:int):
"""range(1,a)"""
return [list(range(1,a+1))]
|
#!/usr/bin/env python
from nodes import Node
class Sort(Node):
char = "S"
args = 1
results = 1
@Node.test_func([[2,3,4,1]], [[1,2,3,4]])
@Node.test_func(["test"], ["estt"])
def func(self, a: Node.indexable):
"""sorted(a) - returns the same type as given"""
if isinstance(a, tuple):
return [tuple(sorted(a))]
if isinstance(a, str):
return "".join(sorted(a))
return [sorted(a)]
@Node.test_func([3], [[1,2]])
def one_range(self, a:int):
"""range(1,a)"""
return [list(range(1,a))]
Change 1 based range so it counts up to n#!/usr/bin/env python
from nodes import Node
class Sort(Node):
char = "S"
args = 1
results = 1
@Node.test_func([[2,3,4,1]], [[1,2,3,4]])
@Node.test_func(["test"], ["estt"])
def func(self, a: Node.indexable):
"""sorted(a) - returns the same type as given"""
if isinstance(a, tuple):
return [tuple(sorted(a))]
if isinstance(a, str):
return "".join(sorted(a))
return [sorted(a)]
@Node.test_func([3], [[1,2,3]])
def one_range(self, a:int):
"""range(1,a)"""
return [list(range(1,a+1))]
|
<commit_before>#!/usr/bin/env python
from nodes import Node
class Sort(Node):
char = "S"
args = 1
results = 1
@Node.test_func([[2,3,4,1]], [[1,2,3,4]])
@Node.test_func(["test"], ["estt"])
def func(self, a: Node.indexable):
"""sorted(a) - returns the same type as given"""
if isinstance(a, tuple):
return [tuple(sorted(a))]
if isinstance(a, str):
return "".join(sorted(a))
return [sorted(a)]
@Node.test_func([3], [[1,2]])
def one_range(self, a:int):
"""range(1,a)"""
return [list(range(1,a))]
<commit_msg>Change 1 based range so it counts up to n<commit_after>#!/usr/bin/env python
from nodes import Node
class Sort(Node):
char = "S"
args = 1
results = 1
@Node.test_func([[2,3,4,1]], [[1,2,3,4]])
@Node.test_func(["test"], ["estt"])
def func(self, a: Node.indexable):
"""sorted(a) - returns the same type as given"""
if isinstance(a, tuple):
return [tuple(sorted(a))]
if isinstance(a, str):
return "".join(sorted(a))
return [sorted(a)]
@Node.test_func([3], [[1,2,3]])
def one_range(self, a:int):
"""range(1,a)"""
return [list(range(1,a+1))]
|
c246f0e9add0a5b6d7fce9b9e2107671440b5f90
|
mica/starcheck/tests/make_database.py
|
mica/starcheck/tests/make_database.py
|
import os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
# This still uses the silly find files newer than this other file method, so
# set the time stamp on that reference file
if not os.path.exists(os.path.join(TESTDIR, 'starcheck')):
os.makedirs(os.path.join(TESTDIR, 'starcheck'))
bash("touch -d {} {}".format(DateTime(-15).iso, mica.starcheck.process.FILES['touch_file']))
# And just check that the update script didn't raise any exceptions
mica.starcheck.process.update()
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
|
import os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
# And just check (implicitly) that the update script didn't raise any exceptions
config = mica.starcheck.process.DEFAULT_CONFIG
config['start'] = DateTime() - 30
mica.starcheck.process.update(config)
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
|
Update test script to use a provided start time
|
Update test script to use a provided start time
|
Python
|
bsd-3-clause
|
sot/mica,sot/mica
|
import os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
# This still uses the silly find files newer than this other file method, so
# set the time stamp on that reference file
if not os.path.exists(os.path.join(TESTDIR, 'starcheck')):
os.makedirs(os.path.join(TESTDIR, 'starcheck'))
bash("touch -d {} {}".format(DateTime(-15).iso, mica.starcheck.process.FILES['touch_file']))
# And just check that the update script didn't raise any exceptions
mica.starcheck.process.update()
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
Update test script to use a provided start time
|
import os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
# And just check (implicitly) that the update script didn't raise any exceptions
config = mica.starcheck.process.DEFAULT_CONFIG
config['start'] = DateTime() - 30
mica.starcheck.process.update(config)
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
|
<commit_before>import os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
# This still uses the silly find files newer than this other file method, so
# set the time stamp on that reference file
if not os.path.exists(os.path.join(TESTDIR, 'starcheck')):
os.makedirs(os.path.join(TESTDIR, 'starcheck'))
bash("touch -d {} {}".format(DateTime(-15).iso, mica.starcheck.process.FILES['touch_file']))
# And just check that the update script didn't raise any exceptions
mica.starcheck.process.update()
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
<commit_msg>Update test script to use a provided start time<commit_after>
|
import os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
# And just check (implicitly) that the update script didn't raise any exceptions
config = mica.starcheck.process.DEFAULT_CONFIG
config['start'] = DateTime() - 30
mica.starcheck.process.update(config)
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
|
import os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
# This still uses the silly find files newer than this other file method, so
# set the time stamp on that reference file
if not os.path.exists(os.path.join(TESTDIR, 'starcheck')):
os.makedirs(os.path.join(TESTDIR, 'starcheck'))
bash("touch -d {} {}".format(DateTime(-15).iso, mica.starcheck.process.FILES['touch_file']))
# And just check that the update script didn't raise any exceptions
mica.starcheck.process.update()
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
Update test script to use a provided start timeimport os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
# And just check (implicitly) that the update script didn't raise any exceptions
config = mica.starcheck.process.DEFAULT_CONFIG
config['start'] = DateTime() - 30
mica.starcheck.process.update(config)
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
|
<commit_before>import os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
# This still uses the silly find files newer than this other file method, so
# set the time stamp on that reference file
if not os.path.exists(os.path.join(TESTDIR, 'starcheck')):
os.makedirs(os.path.join(TESTDIR, 'starcheck'))
bash("touch -d {} {}".format(DateTime(-15).iso, mica.starcheck.process.FILES['touch_file']))
# And just check that the update script didn't raise any exceptions
mica.starcheck.process.update()
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
<commit_msg>Update test script to use a provided start time<commit_after>import os
import tempfile
from Chandra.Time import DateTime
from Ska.Shell import bash
import mica.common
# Override MICA_ARCHIVE with a temporary directory
TESTDIR = tempfile.mkdtemp()
mica.common.MICA_ARCHIVE = TESTDIR
# import mica.starcheck.starcheck after setting MICA_ARCHIVE
import mica.starcheck.process
# Just ingest files from the last couple of weeks or so
# And just check (implicitly) that the update script didn't raise any exceptions
config = mica.starcheck.process.DEFAULT_CONFIG
config['start'] = DateTime() - 30
mica.starcheck.process.update(config)
# Cleanup manually
bash("rm -r {}".format(TESTDIR))
|
2fd5f0656434340763cc51c47238d4a40e61789b
|
modernrpc/__init__.py
|
modernrpc/__init__.py
|
# coding: utf-8
from packaging.version import Version
import django
# Set default_app_config only with Django up to 3.1. This prevents a Warning on newer releases
# See https://docs.djangoproject.com/fr/3.2/releases/3.2/#automatic-appconfig-discovery
if Version(django.get_version()) < Version("3.2"):
default_app_config = "modernrpc.apps.ModernRpcConfig"
# Package version is now stored in pyproject.toml only. To retrieve it from code, use:
# import pkg_resources; version = pkg_resources.get_distribution('django-modern-rpc').version
|
# coding: utf-8
from distutils.version import StrictVersion
import django
# distutils.version, overridden by setuptools._distutils.version in recent python releases, is deprecated
# and will be removed in Python 3.12. We will probably drop Django < 3.2 until then, so this should be fine
if StrictVersion(django.get_version()) < StrictVersion("3.2"):
# Set default_app_config only with Django up to 3.1. This prevents a Warning on newer releases
# See https://docs.djangoproject.com/fr/3.2/releases/3.2/#automatic-appconfig-discovery
default_app_config = "modernrpc.apps.ModernRpcConfig"
# Package version is now stored in pyproject.toml only. To retrieve it from code, use:
# import pkg_resources; version = pkg_resources.get_distribution('django-modern-rpc').version
|
Remove unwanted dependency to 'packaging'
|
Remove unwanted dependency to 'packaging'
|
Python
|
mit
|
alorence/django-modern-rpc,alorence/django-modern-rpc
|
# coding: utf-8
from packaging.version import Version
import django
# Set default_app_config only with Django up to 3.1. This prevents a Warning on newer releases
# See https://docs.djangoproject.com/fr/3.2/releases/3.2/#automatic-appconfig-discovery
if Version(django.get_version()) < Version("3.2"):
default_app_config = "modernrpc.apps.ModernRpcConfig"
# Package version is now stored in pyproject.toml only. To retrieve it from code, use:
# import pkg_resources; version = pkg_resources.get_distribution('django-modern-rpc').version
Remove unwanted dependency to 'packaging'
|
# coding: utf-8
from distutils.version import StrictVersion
import django
# distutils.version, overridden by setuptools._distutils.version in recent python releases, is deprecated
# and will be removed in Python 3.12. We will probably drop Django < 3.2 until then, so this should be fine
if StrictVersion(django.get_version()) < StrictVersion("3.2"):
# Set default_app_config only with Django up to 3.1. This prevents a Warning on newer releases
# See https://docs.djangoproject.com/fr/3.2/releases/3.2/#automatic-appconfig-discovery
default_app_config = "modernrpc.apps.ModernRpcConfig"
# Package version is now stored in pyproject.toml only. To retrieve it from code, use:
# import pkg_resources; version = pkg_resources.get_distribution('django-modern-rpc').version
|
<commit_before># coding: utf-8
from packaging.version import Version
import django
# Set default_app_config only with Django up to 3.1. This prevents a Warning on newer releases
# See https://docs.djangoproject.com/fr/3.2/releases/3.2/#automatic-appconfig-discovery
if Version(django.get_version()) < Version("3.2"):
default_app_config = "modernrpc.apps.ModernRpcConfig"
# Package version is now stored in pyproject.toml only. To retrieve it from code, use:
# import pkg_resources; version = pkg_resources.get_distribution('django-modern-rpc').version
<commit_msg>Remove unwanted dependency to 'packaging'<commit_after>
|
# coding: utf-8
from distutils.version import StrictVersion
import django
# distutils.version, overridden by setuptools._distutils.version in recent python releases, is deprecated
# and will be removed in Python 3.12. We will probably drop Django < 3.2 until then, so this should be fine
if StrictVersion(django.get_version()) < StrictVersion("3.2"):
# Set default_app_config only with Django up to 3.1. This prevents a Warning on newer releases
# See https://docs.djangoproject.com/fr/3.2/releases/3.2/#automatic-appconfig-discovery
default_app_config = "modernrpc.apps.ModernRpcConfig"
# Package version is now stored in pyproject.toml only. To retrieve it from code, use:
# import pkg_resources; version = pkg_resources.get_distribution('django-modern-rpc').version
|
# coding: utf-8
from packaging.version import Version
import django
# Set default_app_config only with Django up to 3.1. This prevents a Warning on newer releases
# See https://docs.djangoproject.com/fr/3.2/releases/3.2/#automatic-appconfig-discovery
if Version(django.get_version()) < Version("3.2"):
default_app_config = "modernrpc.apps.ModernRpcConfig"
# Package version is now stored in pyproject.toml only. To retrieve it from code, use:
# import pkg_resources; version = pkg_resources.get_distribution('django-modern-rpc').version
Remove unwanted dependency to 'packaging'# coding: utf-8
from distutils.version import StrictVersion
import django
# distutils.version, overridden by setuptools._distutils.version in recent python releases, is deprecated
# and will be removed in Python 3.12. We will probably drop Django < 3.2 until then, so this should be fine
if StrictVersion(django.get_version()) < StrictVersion("3.2"):
# Set default_app_config only with Django up to 3.1. This prevents a Warning on newer releases
# See https://docs.djangoproject.com/fr/3.2/releases/3.2/#automatic-appconfig-discovery
default_app_config = "modernrpc.apps.ModernRpcConfig"
# Package version is now stored in pyproject.toml only. To retrieve it from code, use:
# import pkg_resources; version = pkg_resources.get_distribution('django-modern-rpc').version
|
<commit_before># coding: utf-8
from packaging.version import Version
import django
# Set default_app_config only with Django up to 3.1. This prevents a Warning on newer releases
# See https://docs.djangoproject.com/fr/3.2/releases/3.2/#automatic-appconfig-discovery
if Version(django.get_version()) < Version("3.2"):
default_app_config = "modernrpc.apps.ModernRpcConfig"
# Package version is now stored in pyproject.toml only. To retrieve it from code, use:
# import pkg_resources; version = pkg_resources.get_distribution('django-modern-rpc').version
<commit_msg>Remove unwanted dependency to 'packaging'<commit_after># coding: utf-8
from distutils.version import StrictVersion
import django
# distutils.version, overridden by setuptools._distutils.version in recent python releases, is deprecated
# and will be removed in Python 3.12. We will probably drop Django < 3.2 until then, so this should be fine
if StrictVersion(django.get_version()) < StrictVersion("3.2"):
# Set default_app_config only with Django up to 3.1. This prevents a Warning on newer releases
# See https://docs.djangoproject.com/fr/3.2/releases/3.2/#automatic-appconfig-discovery
default_app_config = "modernrpc.apps.ModernRpcConfig"
# Package version is now stored in pyproject.toml only. To retrieve it from code, use:
# import pkg_resources; version = pkg_resources.get_distribution('django-modern-rpc').version
|
1f76df1fe6b77850f8741b2f52b2509ce204f93f
|
stats-to-datadog.py
|
stats-to-datadog.py
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
sys.argv[2], sys.argv[3]
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
Add stats for each partition.
|
Add stats for each partition.
|
Python
|
mit
|
evertrue/capillary,evertrue/capillary,evertrue/capillary,keenlabs/capillary,evertrue/capillary,keenlabs/capillary,keenlabs/capillary
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
sys.argv[2], sys.argv[3]
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
Add stats for each partition.
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
<commit_before>import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
sys.argv[2], sys.argv[3]
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
<commit_msg>Add stats for each partition.<commit_after>
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
sys.argv[2], sys.argv[3]
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
Add stats for each partition.import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
<commit_before>import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
sys.argv[2], sys.argv[3]
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
<commit_msg>Add stats for each partition.<commit_after>import urllib2
import json
import sys
from statsd import statsd
statsd.connect('localhost', 8125)
topology = sys.argv[1]
toporoot = sys.argv[2]
topic = sys.argv[3]
state = urllib2.urlopen(
"http://localhost:9000/api/status?toporoot={}&topic={}".format(
toporoot, topic
)
).read()
data = json.loads(state)
amount = 0
for looplord in data:
if looplord['amount'] is not None:
statsd.histogram(
'razor.kafkamon.topology.partition',
amount,
tags = [
"topic:{}".format(sys),
"topology:{}".format(topology),
"partition:{}".format(looplord['partition'])
]
)
amount += looplord['amount']
statsd.histogram(
'razor.kafkamon.total_delta',
amount, tags = [ "topology:{}".format(topology) ]
)
|
6a8f39104a1a7722ee0a0a2437256dd3c123ab18
|
src/newt/db/tests/base.py
|
src/newt/db/tests/base.py
|
import gc
import sys
PYPY = hasattr(sys, 'pypy_version_info')
from .. import pg_connection
class DBSetup(object):
maxDiff = None
@property
def dsn(self):
return 'postgresql://localhost/' + self.dbname
def setUp(self, call_super=True):
self.dbname = self.__class__.__name__.lower() + '_newt_test_database'
self.base_conn = pg_connection('')
self.base_conn.autocommit = True
self.base_cursor = self.base_conn.cursor()
self.drop_db()
self.base_cursor.execute('create database ' + self.dbname)
self.call_super = call_super
if call_super:
super(DBSetup, self).setUp()
def drop_db(self):
self.base_cursor.execute('drop database if exists ' + self.dbname)
def tearDown(self):
if self.call_super:
super(DBSetup, self).tearDown()
if PYPY:
# Make sure there aren't any leaked connections around
# that would keep us from dropping the DB
# (https://travis-ci.org/newtdb/db/jobs/195267673)
# This needs a fix from RelStorage post 2.0.0.
gc.collect()
gc.collect()
self.drop_db()
self.base_cursor.close()
self.base_conn.close()
|
import gc
import sys
import unittest
PYPY = hasattr(sys, 'pypy_version_info')
from .. import pg_connection
from .._util import closing
class DBSetup(object):
maxDiff = None
@property
def dsn(self):
return 'postgresql://localhost/' + self.dbname
def setUp(self, call_super=True):
self.dbname = self.__class__.__name__.lower() + '_newt_test_database'
self.base_conn = pg_connection('')
self.base_conn.autocommit = True
self.base_cursor = self.base_conn.cursor()
self.drop_db()
self.base_cursor.execute('create database ' + self.dbname)
self.call_super = call_super
if call_super:
super(DBSetup, self).setUp()
def drop_db(self):
self.base_cursor.execute(
"""
select pg_terminate_backend(pid) from pg_stat_activity
where datname = %s
""", (self.dbname,))
self.base_cursor.execute('drop database if exists ' + self.dbname)
def tearDown(self):
if self.call_super:
super(DBSetup, self).tearDown()
if PYPY:
# Make sure there aren't any leaked connections around
# that would keep us from dropping the DB
# (https://travis-ci.org/newtdb/db/jobs/195267673)
# This needs a fix from RelStorage post 2.0.0.
gc.collect()
gc.collect()
self.drop_db()
self.base_cursor.close()
self.base_conn.close()
class TestCase(DBSetup, unittest.TestCase):
pass
|
Make it easier to clean up tests by closing db sessions
|
Make it easier to clean up tests by closing db sessions
Also added a convenience test base class
|
Python
|
mit
|
newtdb/db
|
import gc
import sys
PYPY = hasattr(sys, 'pypy_version_info')
from .. import pg_connection
class DBSetup(object):
maxDiff = None
@property
def dsn(self):
return 'postgresql://localhost/' + self.dbname
def setUp(self, call_super=True):
self.dbname = self.__class__.__name__.lower() + '_newt_test_database'
self.base_conn = pg_connection('')
self.base_conn.autocommit = True
self.base_cursor = self.base_conn.cursor()
self.drop_db()
self.base_cursor.execute('create database ' + self.dbname)
self.call_super = call_super
if call_super:
super(DBSetup, self).setUp()
def drop_db(self):
self.base_cursor.execute('drop database if exists ' + self.dbname)
def tearDown(self):
if self.call_super:
super(DBSetup, self).tearDown()
if PYPY:
# Make sure there aren't any leaked connections around
# that would keep us from dropping the DB
# (https://travis-ci.org/newtdb/db/jobs/195267673)
# This needs a fix from RelStorage post 2.0.0.
gc.collect()
gc.collect()
self.drop_db()
self.base_cursor.close()
self.base_conn.close()
Make it easier to clean up tests by closing db sessions
Also added a convenience test base class
|
import gc
import sys
import unittest
PYPY = hasattr(sys, 'pypy_version_info')
from .. import pg_connection
from .._util import closing
class DBSetup(object):
maxDiff = None
@property
def dsn(self):
return 'postgresql://localhost/' + self.dbname
def setUp(self, call_super=True):
self.dbname = self.__class__.__name__.lower() + '_newt_test_database'
self.base_conn = pg_connection('')
self.base_conn.autocommit = True
self.base_cursor = self.base_conn.cursor()
self.drop_db()
self.base_cursor.execute('create database ' + self.dbname)
self.call_super = call_super
if call_super:
super(DBSetup, self).setUp()
def drop_db(self):
self.base_cursor.execute(
"""
select pg_terminate_backend(pid) from pg_stat_activity
where datname = %s
""", (self.dbname,))
self.base_cursor.execute('drop database if exists ' + self.dbname)
def tearDown(self):
if self.call_super:
super(DBSetup, self).tearDown()
if PYPY:
# Make sure there aren't any leaked connections around
# that would keep us from dropping the DB
# (https://travis-ci.org/newtdb/db/jobs/195267673)
# This needs a fix from RelStorage post 2.0.0.
gc.collect()
gc.collect()
self.drop_db()
self.base_cursor.close()
self.base_conn.close()
class TestCase(DBSetup, unittest.TestCase):
pass
|
<commit_before>import gc
import sys
PYPY = hasattr(sys, 'pypy_version_info')
from .. import pg_connection
class DBSetup(object):
maxDiff = None
@property
def dsn(self):
return 'postgresql://localhost/' + self.dbname
def setUp(self, call_super=True):
self.dbname = self.__class__.__name__.lower() + '_newt_test_database'
self.base_conn = pg_connection('')
self.base_conn.autocommit = True
self.base_cursor = self.base_conn.cursor()
self.drop_db()
self.base_cursor.execute('create database ' + self.dbname)
self.call_super = call_super
if call_super:
super(DBSetup, self).setUp()
def drop_db(self):
self.base_cursor.execute('drop database if exists ' + self.dbname)
def tearDown(self):
if self.call_super:
super(DBSetup, self).tearDown()
if PYPY:
# Make sure there aren't any leaked connections around
# that would keep us from dropping the DB
# (https://travis-ci.org/newtdb/db/jobs/195267673)
# This needs a fix from RelStorage post 2.0.0.
gc.collect()
gc.collect()
self.drop_db()
self.base_cursor.close()
self.base_conn.close()
<commit_msg>Make it easier to clean up tests by closing db sessions
Also added a convenience test base class<commit_after>
|
import gc
import sys
import unittest
PYPY = hasattr(sys, 'pypy_version_info')
from .. import pg_connection
from .._util import closing
class DBSetup(object):
maxDiff = None
@property
def dsn(self):
return 'postgresql://localhost/' + self.dbname
def setUp(self, call_super=True):
self.dbname = self.__class__.__name__.lower() + '_newt_test_database'
self.base_conn = pg_connection('')
self.base_conn.autocommit = True
self.base_cursor = self.base_conn.cursor()
self.drop_db()
self.base_cursor.execute('create database ' + self.dbname)
self.call_super = call_super
if call_super:
super(DBSetup, self).setUp()
def drop_db(self):
self.base_cursor.execute(
"""
select pg_terminate_backend(pid) from pg_stat_activity
where datname = %s
""", (self.dbname,))
self.base_cursor.execute('drop database if exists ' + self.dbname)
def tearDown(self):
if self.call_super:
super(DBSetup, self).tearDown()
if PYPY:
# Make sure there aren't any leaked connections around
# that would keep us from dropping the DB
# (https://travis-ci.org/newtdb/db/jobs/195267673)
# This needs a fix from RelStorage post 2.0.0.
gc.collect()
gc.collect()
self.drop_db()
self.base_cursor.close()
self.base_conn.close()
class TestCase(DBSetup, unittest.TestCase):
pass
|
import gc
import sys
PYPY = hasattr(sys, 'pypy_version_info')
from .. import pg_connection
class DBSetup(object):
maxDiff = None
@property
def dsn(self):
return 'postgresql://localhost/' + self.dbname
def setUp(self, call_super=True):
self.dbname = self.__class__.__name__.lower() + '_newt_test_database'
self.base_conn = pg_connection('')
self.base_conn.autocommit = True
self.base_cursor = self.base_conn.cursor()
self.drop_db()
self.base_cursor.execute('create database ' + self.dbname)
self.call_super = call_super
if call_super:
super(DBSetup, self).setUp()
def drop_db(self):
self.base_cursor.execute('drop database if exists ' + self.dbname)
def tearDown(self):
if self.call_super:
super(DBSetup, self).tearDown()
if PYPY:
# Make sure there aren't any leaked connections around
# that would keep us from dropping the DB
# (https://travis-ci.org/newtdb/db/jobs/195267673)
# This needs a fix from RelStorage post 2.0.0.
gc.collect()
gc.collect()
self.drop_db()
self.base_cursor.close()
self.base_conn.close()
Make it easier to clean up tests by closing db sessions
Also added a convenience test base classimport gc
import sys
import unittest
PYPY = hasattr(sys, 'pypy_version_info')
from .. import pg_connection
from .._util import closing
class DBSetup(object):
maxDiff = None
@property
def dsn(self):
return 'postgresql://localhost/' + self.dbname
def setUp(self, call_super=True):
self.dbname = self.__class__.__name__.lower() + '_newt_test_database'
self.base_conn = pg_connection('')
self.base_conn.autocommit = True
self.base_cursor = self.base_conn.cursor()
self.drop_db()
self.base_cursor.execute('create database ' + self.dbname)
self.call_super = call_super
if call_super:
super(DBSetup, self).setUp()
def drop_db(self):
self.base_cursor.execute(
"""
select pg_terminate_backend(pid) from pg_stat_activity
where datname = %s
""", (self.dbname,))
self.base_cursor.execute('drop database if exists ' + self.dbname)
def tearDown(self):
if self.call_super:
super(DBSetup, self).tearDown()
if PYPY:
# Make sure there aren't any leaked connections around
# that would keep us from dropping the DB
# (https://travis-ci.org/newtdb/db/jobs/195267673)
# This needs a fix from RelStorage post 2.0.0.
gc.collect()
gc.collect()
self.drop_db()
self.base_cursor.close()
self.base_conn.close()
class TestCase(DBSetup, unittest.TestCase):
pass
|
<commit_before>import gc
import sys
PYPY = hasattr(sys, 'pypy_version_info')
from .. import pg_connection
class DBSetup(object):
maxDiff = None
@property
def dsn(self):
return 'postgresql://localhost/' + self.dbname
def setUp(self, call_super=True):
self.dbname = self.__class__.__name__.lower() + '_newt_test_database'
self.base_conn = pg_connection('')
self.base_conn.autocommit = True
self.base_cursor = self.base_conn.cursor()
self.drop_db()
self.base_cursor.execute('create database ' + self.dbname)
self.call_super = call_super
if call_super:
super(DBSetup, self).setUp()
def drop_db(self):
self.base_cursor.execute('drop database if exists ' + self.dbname)
def tearDown(self):
if self.call_super:
super(DBSetup, self).tearDown()
if PYPY:
# Make sure there aren't any leaked connections around
# that would keep us from dropping the DB
# (https://travis-ci.org/newtdb/db/jobs/195267673)
# This needs a fix from RelStorage post 2.0.0.
gc.collect()
gc.collect()
self.drop_db()
self.base_cursor.close()
self.base_conn.close()
<commit_msg>Make it easier to clean up tests by closing db sessions
Also added a convenience test base class<commit_after>import gc
import sys
import unittest
PYPY = hasattr(sys, 'pypy_version_info')
from .. import pg_connection
from .._util import closing
class DBSetup(object):
maxDiff = None
@property
def dsn(self):
return 'postgresql://localhost/' + self.dbname
def setUp(self, call_super=True):
self.dbname = self.__class__.__name__.lower() + '_newt_test_database'
self.base_conn = pg_connection('')
self.base_conn.autocommit = True
self.base_cursor = self.base_conn.cursor()
self.drop_db()
self.base_cursor.execute('create database ' + self.dbname)
self.call_super = call_super
if call_super:
super(DBSetup, self).setUp()
def drop_db(self):
self.base_cursor.execute(
"""
select pg_terminate_backend(pid) from pg_stat_activity
where datname = %s
""", (self.dbname,))
self.base_cursor.execute('drop database if exists ' + self.dbname)
def tearDown(self):
if self.call_super:
super(DBSetup, self).tearDown()
if PYPY:
# Make sure there aren't any leaked connections around
# that would keep us from dropping the DB
# (https://travis-ci.org/newtdb/db/jobs/195267673)
# This needs a fix from RelStorage post 2.0.0.
gc.collect()
gc.collect()
self.drop_db()
self.base_cursor.close()
self.base_conn.close()
class TestCase(DBSetup, unittest.TestCase):
pass
|
c6b82ffcef179cd9c51f6a98124ea80dbd9d60fd
|
pylua/interpreter.py
|
pylua/interpreter.py
|
from pylua.objspace import ObjectSpace
from pylua.luaframe import LuaBuiltinFrame, SReturnValue
from pylua.helpers import debug_print
from pylua.bytecode import Constant
def m_print(arg):
print(arg)
class Interpreter(object):
def __init__(self, flags, frames):
self.flags = flags
self.frames = frames
self.num_frames = len(frames)
def run(self):
returnvalue = None
space = ObjectSpace()
space.globals['print'] = Constant(f_val=LuaBuiltinFrame(m_print))
while True:
frame_ind = 0
next_frame = self.frames[frame_ind]
frame_ind += 1
returnvalue = next_frame.execute_frame(space)
if frame_ind == self.num_frames or returnvalue is not None:
break
debug_print("Finished intepreting")
return returnvalue
|
from pylua.objspace import ObjectSpace
from pylua.luaframe import LuaBuiltinFrame, SReturnValue
from pylua.helpers import debug_print
from pylua.bytecode import Constant
"""
prints arg to std out
"""
def m_print(arg):
print(arg)
class Interpreter(object):
def __init__(self, flags, frames):
self.flags = flags
self.frames = frames
self.num_frames = len(frames)
def run(self):
returnvalue = None
space = ObjectSpace()
# register a global print function, only works with one argument at the
# moment and is a hack
space.globals['print'] = Constant(f_val=LuaBuiltinFrame(m_print))
while True:
frame_ind = 0
next_frame = self.frames[frame_ind]
frame_ind += 1
returnvalue = next_frame.execute_frame(space)
if frame_ind == self.num_frames or returnvalue is not None:
break
debug_print("Finished intepreting")
return returnvalue
|
Add a comment for my print hack
|
Add a comment for my print hack
|
Python
|
bsd-3-clause
|
fhahn/luna,fhahn/luna
|
from pylua.objspace import ObjectSpace
from pylua.luaframe import LuaBuiltinFrame, SReturnValue
from pylua.helpers import debug_print
from pylua.bytecode import Constant
def m_print(arg):
print(arg)
class Interpreter(object):
def __init__(self, flags, frames):
self.flags = flags
self.frames = frames
self.num_frames = len(frames)
def run(self):
returnvalue = None
space = ObjectSpace()
space.globals['print'] = Constant(f_val=LuaBuiltinFrame(m_print))
while True:
frame_ind = 0
next_frame = self.frames[frame_ind]
frame_ind += 1
returnvalue = next_frame.execute_frame(space)
if frame_ind == self.num_frames or returnvalue is not None:
break
debug_print("Finished intepreting")
return returnvalue
Add a comment for my print hack
|
from pylua.objspace import ObjectSpace
from pylua.luaframe import LuaBuiltinFrame, SReturnValue
from pylua.helpers import debug_print
from pylua.bytecode import Constant
"""
prints arg to std out
"""
def m_print(arg):
print(arg)
class Interpreter(object):
def __init__(self, flags, frames):
self.flags = flags
self.frames = frames
self.num_frames = len(frames)
def run(self):
returnvalue = None
space = ObjectSpace()
# register a global print function, only works with one argument at the
# moment and is a hack
space.globals['print'] = Constant(f_val=LuaBuiltinFrame(m_print))
while True:
frame_ind = 0
next_frame = self.frames[frame_ind]
frame_ind += 1
returnvalue = next_frame.execute_frame(space)
if frame_ind == self.num_frames or returnvalue is not None:
break
debug_print("Finished intepreting")
return returnvalue
|
<commit_before>from pylua.objspace import ObjectSpace
from pylua.luaframe import LuaBuiltinFrame, SReturnValue
from pylua.helpers import debug_print
from pylua.bytecode import Constant
def m_print(arg):
print(arg)
class Interpreter(object):
def __init__(self, flags, frames):
self.flags = flags
self.frames = frames
self.num_frames = len(frames)
def run(self):
returnvalue = None
space = ObjectSpace()
space.globals['print'] = Constant(f_val=LuaBuiltinFrame(m_print))
while True:
frame_ind = 0
next_frame = self.frames[frame_ind]
frame_ind += 1
returnvalue = next_frame.execute_frame(space)
if frame_ind == self.num_frames or returnvalue is not None:
break
debug_print("Finished intepreting")
return returnvalue
<commit_msg>Add a comment for my print hack<commit_after>
|
from pylua.objspace import ObjectSpace
from pylua.luaframe import LuaBuiltinFrame, SReturnValue
from pylua.helpers import debug_print
from pylua.bytecode import Constant
"""
prints arg to std out
"""
def m_print(arg):
print(arg)
class Interpreter(object):
def __init__(self, flags, frames):
self.flags = flags
self.frames = frames
self.num_frames = len(frames)
def run(self):
returnvalue = None
space = ObjectSpace()
# register a global print function, only works with one argument at the
# moment and is a hack
space.globals['print'] = Constant(f_val=LuaBuiltinFrame(m_print))
while True:
frame_ind = 0
next_frame = self.frames[frame_ind]
frame_ind += 1
returnvalue = next_frame.execute_frame(space)
if frame_ind == self.num_frames or returnvalue is not None:
break
debug_print("Finished intepreting")
return returnvalue
|
from pylua.objspace import ObjectSpace
from pylua.luaframe import LuaBuiltinFrame, SReturnValue
from pylua.helpers import debug_print
from pylua.bytecode import Constant
def m_print(arg):
print(arg)
class Interpreter(object):
def __init__(self, flags, frames):
self.flags = flags
self.frames = frames
self.num_frames = len(frames)
def run(self):
returnvalue = None
space = ObjectSpace()
space.globals['print'] = Constant(f_val=LuaBuiltinFrame(m_print))
while True:
frame_ind = 0
next_frame = self.frames[frame_ind]
frame_ind += 1
returnvalue = next_frame.execute_frame(space)
if frame_ind == self.num_frames or returnvalue is not None:
break
debug_print("Finished intepreting")
return returnvalue
Add a comment for my print hackfrom pylua.objspace import ObjectSpace
from pylua.luaframe import LuaBuiltinFrame, SReturnValue
from pylua.helpers import debug_print
from pylua.bytecode import Constant
"""
prints arg to std out
"""
def m_print(arg):
print(arg)
class Interpreter(object):
def __init__(self, flags, frames):
self.flags = flags
self.frames = frames
self.num_frames = len(frames)
def run(self):
returnvalue = None
space = ObjectSpace()
# register a global print function, only works with one argument at the
# moment and is a hack
space.globals['print'] = Constant(f_val=LuaBuiltinFrame(m_print))
while True:
frame_ind = 0
next_frame = self.frames[frame_ind]
frame_ind += 1
returnvalue = next_frame.execute_frame(space)
if frame_ind == self.num_frames or returnvalue is not None:
break
debug_print("Finished intepreting")
return returnvalue
|
<commit_before>from pylua.objspace import ObjectSpace
from pylua.luaframe import LuaBuiltinFrame, SReturnValue
from pylua.helpers import debug_print
from pylua.bytecode import Constant
def m_print(arg):
print(arg)
class Interpreter(object):
def __init__(self, flags, frames):
self.flags = flags
self.frames = frames
self.num_frames = len(frames)
def run(self):
returnvalue = None
space = ObjectSpace()
space.globals['print'] = Constant(f_val=LuaBuiltinFrame(m_print))
while True:
frame_ind = 0
next_frame = self.frames[frame_ind]
frame_ind += 1
returnvalue = next_frame.execute_frame(space)
if frame_ind == self.num_frames or returnvalue is not None:
break
debug_print("Finished intepreting")
return returnvalue
<commit_msg>Add a comment for my print hack<commit_after>from pylua.objspace import ObjectSpace
from pylua.luaframe import LuaBuiltinFrame, SReturnValue
from pylua.helpers import debug_print
from pylua.bytecode import Constant
"""
prints arg to std out
"""
def m_print(arg):
print(arg)
class Interpreter(object):
def __init__(self, flags, frames):
self.flags = flags
self.frames = frames
self.num_frames = len(frames)
def run(self):
returnvalue = None
space = ObjectSpace()
# register a global print function, only works with one argument at the
# moment and is a hack
space.globals['print'] = Constant(f_val=LuaBuiltinFrame(m_print))
while True:
frame_ind = 0
next_frame = self.frames[frame_ind]
frame_ind += 1
returnvalue = next_frame.execute_frame(space)
if frame_ind == self.num_frames or returnvalue is not None:
break
debug_print("Finished intepreting")
return returnvalue
|
0128ca2edb48cb58c8a68b4b6e9a8eaeba53518c
|
python/play/dwalk.py
|
python/play/dwalk.py
|
#! /usr/bin/env python
"""dwalk: walk a directory tree, printing entries hierarchically
+ top
| > file
| > file
| + dir
| | > file
| | > file
| | + dir
| | | > file
| | + dir
| | | > file
| | | > file
| + dir
| | + dir
| | | > file"""
import os
import sys
def dwalk(path, header=''):
print header + '+ ' + path
files = []
dirs = []
for e in os.listdir(path):
epath = os.path.join(path, e)
if os.path.isdir(epath):
# print header + '{} is a dir'.format(e)
dirs.append(e)
else:
# print header + '{} is a file'.format(e)
files.append(e)
# print '{} dirs: {}'.format(header, dirs)
# print '{} files: {}'.format(header, files)
for f in sorted(files):
# print header + '| > ' + os.path.join(path, f)
print header + '| > ' + f
for d in sorted(dirs):
dwalk(os.path.join(path,d), header+'| ')
|
#! /usr/bin/env python
# vim: set sw=4 ai et sm:
"""dwalk: walk a directory tree, printing entries hierarchically
+ top
| > file
| > file
| + dir
| | > file
| | > file
| | + dir
| | | > file
| | + dir
| | | > file
| | | > file
| + dir
| | + dir
| | | > file"""
import os
import sys
def dwalk(path, header=''):
print header + '+ ' + path
files = []
dirs = []
for e in os.listdir(path):
epath = os.path.join(path, e)
if os.path.isdir(epath):
# print header + '{} is a dir'.format(e)
dirs.append(e)
else:
# print header + '{} is a file'.format(e)
files.append(e)
# print '{} dirs: {}'.format(header, dirs)
# print '{} files: {}'.format(header, files)
for f in sorted(files):
# print header + '| > ' + os.path.join(path, f)
print header + '| > ' + f
for d in sorted(dirs):
dwalk(os.path.join(path,d), header+'| ')
|
Convert tabs to spaces, and set up vim modeline to expand tabs going forward.
|
Convert tabs to spaces, and set up vim modeline to expand tabs going forward.
|
Python
|
bsd-2-clause
|
tedzo/python_play
|
#! /usr/bin/env python
"""dwalk: walk a directory tree, printing entries hierarchically
+ top
| > file
| > file
| + dir
| | > file
| | > file
| | + dir
| | | > file
| | + dir
| | | > file
| | | > file
| + dir
| | + dir
| | | > file"""
import os
import sys
def dwalk(path, header=''):
print header + '+ ' + path
files = []
dirs = []
for e in os.listdir(path):
epath = os.path.join(path, e)
if os.path.isdir(epath):
# print header + '{} is a dir'.format(e)
dirs.append(e)
else:
# print header + '{} is a file'.format(e)
files.append(e)
# print '{} dirs: {}'.format(header, dirs)
# print '{} files: {}'.format(header, files)
for f in sorted(files):
# print header + '| > ' + os.path.join(path, f)
print header + '| > ' + f
for d in sorted(dirs):
dwalk(os.path.join(path,d), header+'| ')
Convert tabs to spaces, and set up vim modeline to expand tabs going forward.
|
#! /usr/bin/env python
# vim: set sw=4 ai et sm:
"""dwalk: walk a directory tree, printing entries hierarchically
+ top
| > file
| > file
| + dir
| | > file
| | > file
| | + dir
| | | > file
| | + dir
| | | > file
| | | > file
| + dir
| | + dir
| | | > file"""
import os
import sys
def dwalk(path, header=''):
print header + '+ ' + path
files = []
dirs = []
for e in os.listdir(path):
epath = os.path.join(path, e)
if os.path.isdir(epath):
# print header + '{} is a dir'.format(e)
dirs.append(e)
else:
# print header + '{} is a file'.format(e)
files.append(e)
# print '{} dirs: {}'.format(header, dirs)
# print '{} files: {}'.format(header, files)
for f in sorted(files):
# print header + '| > ' + os.path.join(path, f)
print header + '| > ' + f
for d in sorted(dirs):
dwalk(os.path.join(path,d), header+'| ')
|
<commit_before>#! /usr/bin/env python
"""dwalk: walk a directory tree, printing entries hierarchically
+ top
| > file
| > file
| + dir
| | > file
| | > file
| | + dir
| | | > file
| | + dir
| | | > file
| | | > file
| + dir
| | + dir
| | | > file"""
import os
import sys
def dwalk(path, header=''):
print header + '+ ' + path
files = []
dirs = []
for e in os.listdir(path):
epath = os.path.join(path, e)
if os.path.isdir(epath):
# print header + '{} is a dir'.format(e)
dirs.append(e)
else:
# print header + '{} is a file'.format(e)
files.append(e)
# print '{} dirs: {}'.format(header, dirs)
# print '{} files: {}'.format(header, files)
for f in sorted(files):
# print header + '| > ' + os.path.join(path, f)
print header + '| > ' + f
for d in sorted(dirs):
dwalk(os.path.join(path,d), header+'| ')
<commit_msg>Convert tabs to spaces, and set up vim modeline to expand tabs going forward.<commit_after>
|
#! /usr/bin/env python
# vim: set sw=4 ai et sm:
"""dwalk: walk a directory tree, printing entries hierarchically
+ top
| > file
| > file
| + dir
| | > file
| | > file
| | + dir
| | | > file
| | + dir
| | | > file
| | | > file
| + dir
| | + dir
| | | > file"""
import os
import sys
def dwalk(path, header=''):
print header + '+ ' + path
files = []
dirs = []
for e in os.listdir(path):
epath = os.path.join(path, e)
if os.path.isdir(epath):
# print header + '{} is a dir'.format(e)
dirs.append(e)
else:
# print header + '{} is a file'.format(e)
files.append(e)
# print '{} dirs: {}'.format(header, dirs)
# print '{} files: {}'.format(header, files)
for f in sorted(files):
# print header + '| > ' + os.path.join(path, f)
print header + '| > ' + f
for d in sorted(dirs):
dwalk(os.path.join(path,d), header+'| ')
|
#! /usr/bin/env python
"""dwalk: walk a directory tree, printing entries hierarchically
+ top
| > file
| > file
| + dir
| | > file
| | > file
| | + dir
| | | > file
| | + dir
| | | > file
| | | > file
| + dir
| | + dir
| | | > file"""
import os
import sys
def dwalk(path, header=''):
print header + '+ ' + path
files = []
dirs = []
for e in os.listdir(path):
epath = os.path.join(path, e)
if os.path.isdir(epath):
# print header + '{} is a dir'.format(e)
dirs.append(e)
else:
# print header + '{} is a file'.format(e)
files.append(e)
# print '{} dirs: {}'.format(header, dirs)
# print '{} files: {}'.format(header, files)
for f in sorted(files):
# print header + '| > ' + os.path.join(path, f)
print header + '| > ' + f
for d in sorted(dirs):
dwalk(os.path.join(path,d), header+'| ')
Convert tabs to spaces, and set up vim modeline to expand tabs going forward.#! /usr/bin/env python
# vim: set sw=4 ai et sm:
"""dwalk: walk a directory tree, printing entries hierarchically
+ top
| > file
| > file
| + dir
| | > file
| | > file
| | + dir
| | | > file
| | + dir
| | | > file
| | | > file
| + dir
| | + dir
| | | > file"""
import os
import sys
def dwalk(path, header=''):
print header + '+ ' + path
files = []
dirs = []
for e in os.listdir(path):
epath = os.path.join(path, e)
if os.path.isdir(epath):
# print header + '{} is a dir'.format(e)
dirs.append(e)
else:
# print header + '{} is a file'.format(e)
files.append(e)
# print '{} dirs: {}'.format(header, dirs)
# print '{} files: {}'.format(header, files)
for f in sorted(files):
# print header + '| > ' + os.path.join(path, f)
print header + '| > ' + f
for d in sorted(dirs):
dwalk(os.path.join(path,d), header+'| ')
|
<commit_before>#! /usr/bin/env python
"""dwalk: walk a directory tree, printing entries hierarchically
+ top
| > file
| > file
| + dir
| | > file
| | > file
| | + dir
| | | > file
| | + dir
| | | > file
| | | > file
| + dir
| | + dir
| | | > file"""
import os
import sys
def dwalk(path, header=''):
print header + '+ ' + path
files = []
dirs = []
for e in os.listdir(path):
epath = os.path.join(path, e)
if os.path.isdir(epath):
# print header + '{} is a dir'.format(e)
dirs.append(e)
else:
# print header + '{} is a file'.format(e)
files.append(e)
# print '{} dirs: {}'.format(header, dirs)
# print '{} files: {}'.format(header, files)
for f in sorted(files):
# print header + '| > ' + os.path.join(path, f)
print header + '| > ' + f
for d in sorted(dirs):
dwalk(os.path.join(path,d), header+'| ')
<commit_msg>Convert tabs to spaces, and set up vim modeline to expand tabs going forward.<commit_after>#! /usr/bin/env python
# vim: set sw=4 ai et sm:
"""dwalk: walk a directory tree, printing entries hierarchically
+ top
| > file
| > file
| + dir
| | > file
| | > file
| | + dir
| | | > file
| | + dir
| | | > file
| | | > file
| + dir
| | + dir
| | | > file"""
import os
import sys
def dwalk(path, header=''):
print header + '+ ' + path
files = []
dirs = []
for e in os.listdir(path):
epath = os.path.join(path, e)
if os.path.isdir(epath):
# print header + '{} is a dir'.format(e)
dirs.append(e)
else:
# print header + '{} is a file'.format(e)
files.append(e)
# print '{} dirs: {}'.format(header, dirs)
# print '{} files: {}'.format(header, files)
for f in sorted(files):
# print header + '| > ' + os.path.join(path, f)
print header + '| > ' + f
for d in sorted(dirs):
dwalk(os.path.join(path,d), header+'| ')
|
f1be3f0920bbd270a5906364e77182b67ae4c354
|
rejected/__init__.py
|
rejected/__init__.py
|
"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.7.0"
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
|
"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.7.0"
import logging
try:
# not available in python 2.6
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(NullHandler())
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
|
Include a NullHandler to avoid logging warnings
|
Include a NullHandler to avoid logging warnings
|
Python
|
bsd-3-clause
|
gmr/rejected,gmr/rejected
|
"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.7.0"
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
Include a NullHandler to avoid logging warnings
|
"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.7.0"
import logging
try:
# not available in python 2.6
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(NullHandler())
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
|
<commit_before>"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.7.0"
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
<commit_msg>Include a NullHandler to avoid logging warnings<commit_after>
|
"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.7.0"
import logging
try:
# not available in python 2.6
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(NullHandler())
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
|
"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.7.0"
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
Include a NullHandler to avoid logging warnings"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.7.0"
import logging
try:
# not available in python 2.6
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(NullHandler())
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
|
<commit_before>"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.7.0"
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
<commit_msg>Include a NullHandler to avoid logging warnings<commit_after>"""
Rejected is a Python RabbitMQ Consumer Framework and Controller Daemon
"""
__author__ = 'Gavin M. Roy <gavinmroy@gmail.com>'
__since__ = "2009-09-10"
__version__ = "3.7.0"
import logging
try:
# not available in python 2.6
from logging import NullHandler
except ImportError:
class NullHandler(logging.Handler):
def emit(self, record):
pass
# Add NullHandler to prevent logging warnings
logging.getLogger(__name__).addHandler(NullHandler())
from consumer import Consumer
from consumer import PublishingConsumer
from consumer import SmartConsumer
from consumer import SmartPublishingConsumer
from consumer import ConsumerException
from consumer import MessageException
|
6e425ca6dfeb668b0cb85dd54e83e3296aec970f
|
logTemps.py
|
logTemps.py
|
######################################################
# logs time, fahrenheit and humidity every 5 minutes
#
######################################################
import time
import HTU21DF
import logging
logging.basicConfig(filename='sampleDay.log',level=logging.DEBUG,format='%(asctime)s\t%(message)s',datefmt='%Y-%m-%d %H:%M:%S')
def celcius_to_fahrenheit(celcius):
return (celcius * 1.8) + 32
while True:
HTU21DF.htu_reset
temp_fahrenheit = celcius_to_fahrenheit(HTU21DF.read_temperature())
humidity = HTU21DF.read_humidity()
logging.info('%f\t%F', temp_fahrenheit, humidity)
time.sleep(300)
|
######################################################
# logs time, fahrenheit and humidity every 5 minutes
#
######################################################
import time
import HTU21DF
import logging
logging.basicConfig(filename='var/log/temperature/temp-humidity.log',level=logging.DEBUG,format='%(asctime)s\t%(message)s',datefmt='%Y-%m-%d %H:%M:%S')
def celcius_to_fahrenheit(celcius):
return (celcius * 1.8) + 32
while True:
HTU21DF.htu_reset
temp_fahrenheit = celcius_to_fahrenheit(HTU21DF.read_temperature())
humidity = HTU21DF.read_humidity()
logging.info('%f\t%F', temp_fahrenheit, humidity)
time.sleep(300)
|
Update to new log location
|
Update to new log location
|
Python
|
mit
|
khuisman/project-cool-attic
|
######################################################
# logs time, fahrenheit and humidity every 5 minutes
#
######################################################
import time
import HTU21DF
import logging
logging.basicConfig(filename='sampleDay.log',level=logging.DEBUG,format='%(asctime)s\t%(message)s',datefmt='%Y-%m-%d %H:%M:%S')
def celcius_to_fahrenheit(celcius):
return (celcius * 1.8) + 32
while True:
HTU21DF.htu_reset
temp_fahrenheit = celcius_to_fahrenheit(HTU21DF.read_temperature())
humidity = HTU21DF.read_humidity()
logging.info('%f\t%F', temp_fahrenheit, humidity)
time.sleep(300)
Update to new log location
|
######################################################
# logs time, fahrenheit and humidity every 5 minutes
#
######################################################
import time
import HTU21DF
import logging
logging.basicConfig(filename='var/log/temperature/temp-humidity.log',level=logging.DEBUG,format='%(asctime)s\t%(message)s',datefmt='%Y-%m-%d %H:%M:%S')
def celcius_to_fahrenheit(celcius):
return (celcius * 1.8) + 32
while True:
HTU21DF.htu_reset
temp_fahrenheit = celcius_to_fahrenheit(HTU21DF.read_temperature())
humidity = HTU21DF.read_humidity()
logging.info('%f\t%F', temp_fahrenheit, humidity)
time.sleep(300)
|
<commit_before>######################################################
# logs time, fahrenheit and humidity every 5 minutes
#
######################################################
import time
import HTU21DF
import logging
logging.basicConfig(filename='sampleDay.log',level=logging.DEBUG,format='%(asctime)s\t%(message)s',datefmt='%Y-%m-%d %H:%M:%S')
def celcius_to_fahrenheit(celcius):
return (celcius * 1.8) + 32
while True:
HTU21DF.htu_reset
temp_fahrenheit = celcius_to_fahrenheit(HTU21DF.read_temperature())
humidity = HTU21DF.read_humidity()
logging.info('%f\t%F', temp_fahrenheit, humidity)
time.sleep(300)
<commit_msg>Update to new log location<commit_after>
|
######################################################
# logs time, fahrenheit and humidity every 5 minutes
#
######################################################
import time
import HTU21DF
import logging
logging.basicConfig(filename='var/log/temperature/temp-humidity.log',level=logging.DEBUG,format='%(asctime)s\t%(message)s',datefmt='%Y-%m-%d %H:%M:%S')
def celcius_to_fahrenheit(celcius):
return (celcius * 1.8) + 32
while True:
HTU21DF.htu_reset
temp_fahrenheit = celcius_to_fahrenheit(HTU21DF.read_temperature())
humidity = HTU21DF.read_humidity()
logging.info('%f\t%F', temp_fahrenheit, humidity)
time.sleep(300)
|
######################################################
# logs time, fahrenheit and humidity every 5 minutes
#
######################################################
import time
import HTU21DF
import logging
logging.basicConfig(filename='sampleDay.log',level=logging.DEBUG,format='%(asctime)s\t%(message)s',datefmt='%Y-%m-%d %H:%M:%S')
def celcius_to_fahrenheit(celcius):
return (celcius * 1.8) + 32
while True:
HTU21DF.htu_reset
temp_fahrenheit = celcius_to_fahrenheit(HTU21DF.read_temperature())
humidity = HTU21DF.read_humidity()
logging.info('%f\t%F', temp_fahrenheit, humidity)
time.sleep(300)
Update to new log location######################################################
# logs time, fahrenheit and humidity every 5 minutes
#
######################################################
import time
import HTU21DF
import logging
logging.basicConfig(filename='var/log/temperature/temp-humidity.log',level=logging.DEBUG,format='%(asctime)s\t%(message)s',datefmt='%Y-%m-%d %H:%M:%S')
def celcius_to_fahrenheit(celcius):
return (celcius * 1.8) + 32
while True:
HTU21DF.htu_reset
temp_fahrenheit = celcius_to_fahrenheit(HTU21DF.read_temperature())
humidity = HTU21DF.read_humidity()
logging.info('%f\t%F', temp_fahrenheit, humidity)
time.sleep(300)
|
<commit_before>######################################################
# logs time, fahrenheit and humidity every 5 minutes
#
######################################################
import time
import HTU21DF
import logging
logging.basicConfig(filename='sampleDay.log',level=logging.DEBUG,format='%(asctime)s\t%(message)s',datefmt='%Y-%m-%d %H:%M:%S')
def celcius_to_fahrenheit(celcius):
return (celcius * 1.8) + 32
while True:
HTU21DF.htu_reset
temp_fahrenheit = celcius_to_fahrenheit(HTU21DF.read_temperature())
humidity = HTU21DF.read_humidity()
logging.info('%f\t%F', temp_fahrenheit, humidity)
time.sleep(300)
<commit_msg>Update to new log location<commit_after>######################################################
# logs time, fahrenheit and humidity every 5 minutes
#
######################################################
import time
import HTU21DF
import logging
logging.basicConfig(filename='var/log/temperature/temp-humidity.log',level=logging.DEBUG,format='%(asctime)s\t%(message)s',datefmt='%Y-%m-%d %H:%M:%S')
def celcius_to_fahrenheit(celcius):
return (celcius * 1.8) + 32
while True:
HTU21DF.htu_reset
temp_fahrenheit = celcius_to_fahrenheit(HTU21DF.read_temperature())
humidity = HTU21DF.read_humidity()
logging.info('%f\t%F', temp_fahrenheit, humidity)
time.sleep(300)
|
85de2a0bb8727583fef61fdadcca6bb3e649a454
|
apps/addons/api/views.py
|
apps/addons/api/views.py
|
from rest_framework import generics, serializers
from rest_framework.response import Response
from waffle.decorators import waffle_switch
import amo
from addons.models import Addon
class AddonSerializer(serializers.ModelSerializer):
addon_type = serializers.SerializerMethodField('get_addon_type')
description = serializers.CharField()
icons = serializers.SerializerMethodField('get_icons')
name = serializers.CharField()
rating = serializers.FloatField(source='average_rating')
summary = serializers.CharField()
class Meta:
model = Addon
fields = [
'addon_type',
'description',
'icons',
'id',
'guid',
'name',
'rating',
'slug',
'summary',
]
def get_addon_type(self, instance):
return unicode(amo.ADDON_TYPE[instance.type])
def get_icons(self, instance):
return {
'32': instance.get_icon_url(32),
'64': instance.get_icon_url(64),
}
class SearchView(generics.RetrieveAPIView):
serializer_class = AddonSerializer
@waffle_switch('frontend-prototype')
def retrieve(self, request, *args, **kwargs):
queryset = Addon.objects.filter(type__in=amo.GROUP_TYPE_ADDON)
if 'q' in request.GET:
queryset = queryset.filter(slug__contains=request.GET['q'])
serializer = self.get_serializer(queryset[:20], many=True)
return Response(serializer.data)
|
from rest_framework import generics, serializers
from rest_framework.response import Response
from waffle.decorators import waffle_switch
import amo
from addons.models import Addon
class AddonSerializer(serializers.ModelSerializer):
addon_type = serializers.SerializerMethodField('get_addon_type')
description = serializers.CharField()
icons = serializers.SerializerMethodField('get_icons')
name = serializers.CharField()
rating = serializers.FloatField(source='average_rating')
summary = serializers.CharField()
class Meta:
model = Addon
fields = [
'addon_type',
'description',
'icons',
'id',
'guid',
'name',
'rating',
'slug',
'summary',
]
def get_addon_type(self, instance):
return unicode(amo.ADDON_TYPE[instance.type])
def get_icons(self, instance):
return {
'32': instance.get_icon_url(32),
'64': instance.get_icon_url(64),
}
class SearchView(generics.RetrieveAPIView):
serializer_class = AddonSerializer
@waffle_switch('frontend-prototype')
def retrieve(self, request, *args, **kwargs):
queryset = Addon.objects.filter(type__in=amo.GROUP_TYPE_ADDON)
if 'q' in request.GET:
queryset = queryset.filter(slug__contains=request.GET['q'])
serializer = self.get_serializer(queryset[:20], many=True)
return Response(serializer.data, headers={
'Access-Control-Allow-Origin': '*',
})
|
Set CORS header for add-on search API
|
Set CORS header for add-on search API
|
Python
|
bsd-3-clause
|
eviljeff/olympia,harry-7/addons-server,mozilla/olympia,Prashant-Surya/addons-server,mozilla/addons-server,atiqueahmedziad/addons-server,mozilla/olympia,harikishen/addons-server,mozilla/addons-server,bqbn/addons-server,mstriemer/olympia,jpetto/olympia,Prashant-Surya/addons-server,psiinon/addons-server,mozilla/addons-server,harikishen/addons-server,kumar303/olympia,mdaif/olympia,jpetto/olympia,wagnerand/addons-server,kumar303/olympia,kumar303/addons-server,tsl143/addons-server,mstriemer/olympia,bqbn/addons-server,wagnerand/addons-server,jpetto/olympia,diox/olympia,Revanth47/addons-server,kumar303/olympia,kumar303/addons-server,wagnerand/addons-server,wagnerand/olympia,tsl143/addons-server,eviljeff/olympia,kumar303/addons-server,harry-7/addons-server,andymckay/addons-server,wagnerand/addons-server,harikishen/addons-server,andymckay/olympia,eviljeff/olympia,mstriemer/addons-server,wagnerand/olympia,psiinon/addons-server,andymckay/addons-server,aviarypl/mozilla-l10n-addons-server,bqbn/addons-server,mstriemer/addons-server,andymckay/addons-server,harikishen/addons-server,lavish205/olympia,mdaif/olympia,psiinon/addons-server,mstriemer/olympia,wagnerand/olympia,bqbn/addons-server,kumar303/olympia,harry-7/addons-server,Revanth47/addons-server,atiqueahmedziad/addons-server,Prashant-Surya/addons-server,harry-7/addons-server,mozilla/olympia,aviarypl/mozilla-l10n-addons-server,andymckay/olympia,andymckay/olympia,atiqueahmedziad/addons-server,mstriemer/addons-server,aviarypl/mozilla-l10n-addons-server,mstriemer/addons-server,Prashant-Surya/addons-server,lavish205/olympia,kumar303/addons-server,eviljeff/olympia,mstriemer/olympia,lavish205/olympia,mozilla/olympia,psiinon/addons-server,jpetto/olympia,diox/olympia,Revanth47/addons-server,mdaif/olympia,aviarypl/mozilla-l10n-addons-server,tsl143/addons-server,lavish205/olympia,mozilla/addons-server,Revanth47/addons-server,mdaif/olympia,andymckay/addons-server,tsl143/addons-server,diox/olympia,wagnerand/olympia,mdaif/olympia,atiqueahmedziad/addons-server,diox/olympia,andymckay/olympia
|
from rest_framework import generics, serializers
from rest_framework.response import Response
from waffle.decorators import waffle_switch
import amo
from addons.models import Addon
class AddonSerializer(serializers.ModelSerializer):
addon_type = serializers.SerializerMethodField('get_addon_type')
description = serializers.CharField()
icons = serializers.SerializerMethodField('get_icons')
name = serializers.CharField()
rating = serializers.FloatField(source='average_rating')
summary = serializers.CharField()
class Meta:
model = Addon
fields = [
'addon_type',
'description',
'icons',
'id',
'guid',
'name',
'rating',
'slug',
'summary',
]
def get_addon_type(self, instance):
return unicode(amo.ADDON_TYPE[instance.type])
def get_icons(self, instance):
return {
'32': instance.get_icon_url(32),
'64': instance.get_icon_url(64),
}
class SearchView(generics.RetrieveAPIView):
serializer_class = AddonSerializer
@waffle_switch('frontend-prototype')
def retrieve(self, request, *args, **kwargs):
queryset = Addon.objects.filter(type__in=amo.GROUP_TYPE_ADDON)
if 'q' in request.GET:
queryset = queryset.filter(slug__contains=request.GET['q'])
serializer = self.get_serializer(queryset[:20], many=True)
return Response(serializer.data)
Set CORS header for add-on search API
|
from rest_framework import generics, serializers
from rest_framework.response import Response
from waffle.decorators import waffle_switch
import amo
from addons.models import Addon
class AddonSerializer(serializers.ModelSerializer):
addon_type = serializers.SerializerMethodField('get_addon_type')
description = serializers.CharField()
icons = serializers.SerializerMethodField('get_icons')
name = serializers.CharField()
rating = serializers.FloatField(source='average_rating')
summary = serializers.CharField()
class Meta:
model = Addon
fields = [
'addon_type',
'description',
'icons',
'id',
'guid',
'name',
'rating',
'slug',
'summary',
]
def get_addon_type(self, instance):
return unicode(amo.ADDON_TYPE[instance.type])
def get_icons(self, instance):
return {
'32': instance.get_icon_url(32),
'64': instance.get_icon_url(64),
}
class SearchView(generics.RetrieveAPIView):
serializer_class = AddonSerializer
@waffle_switch('frontend-prototype')
def retrieve(self, request, *args, **kwargs):
queryset = Addon.objects.filter(type__in=amo.GROUP_TYPE_ADDON)
if 'q' in request.GET:
queryset = queryset.filter(slug__contains=request.GET['q'])
serializer = self.get_serializer(queryset[:20], many=True)
return Response(serializer.data, headers={
'Access-Control-Allow-Origin': '*',
})
|
<commit_before>from rest_framework import generics, serializers
from rest_framework.response import Response
from waffle.decorators import waffle_switch
import amo
from addons.models import Addon
class AddonSerializer(serializers.ModelSerializer):
addon_type = serializers.SerializerMethodField('get_addon_type')
description = serializers.CharField()
icons = serializers.SerializerMethodField('get_icons')
name = serializers.CharField()
rating = serializers.FloatField(source='average_rating')
summary = serializers.CharField()
class Meta:
model = Addon
fields = [
'addon_type',
'description',
'icons',
'id',
'guid',
'name',
'rating',
'slug',
'summary',
]
def get_addon_type(self, instance):
return unicode(amo.ADDON_TYPE[instance.type])
def get_icons(self, instance):
return {
'32': instance.get_icon_url(32),
'64': instance.get_icon_url(64),
}
class SearchView(generics.RetrieveAPIView):
serializer_class = AddonSerializer
@waffle_switch('frontend-prototype')
def retrieve(self, request, *args, **kwargs):
queryset = Addon.objects.filter(type__in=amo.GROUP_TYPE_ADDON)
if 'q' in request.GET:
queryset = queryset.filter(slug__contains=request.GET['q'])
serializer = self.get_serializer(queryset[:20], many=True)
return Response(serializer.data)
<commit_msg>Set CORS header for add-on search API<commit_after>
|
from rest_framework import generics, serializers
from rest_framework.response import Response
from waffle.decorators import waffle_switch
import amo
from addons.models import Addon
class AddonSerializer(serializers.ModelSerializer):
addon_type = serializers.SerializerMethodField('get_addon_type')
description = serializers.CharField()
icons = serializers.SerializerMethodField('get_icons')
name = serializers.CharField()
rating = serializers.FloatField(source='average_rating')
summary = serializers.CharField()
class Meta:
model = Addon
fields = [
'addon_type',
'description',
'icons',
'id',
'guid',
'name',
'rating',
'slug',
'summary',
]
def get_addon_type(self, instance):
return unicode(amo.ADDON_TYPE[instance.type])
def get_icons(self, instance):
return {
'32': instance.get_icon_url(32),
'64': instance.get_icon_url(64),
}
class SearchView(generics.RetrieveAPIView):
serializer_class = AddonSerializer
@waffle_switch('frontend-prototype')
def retrieve(self, request, *args, **kwargs):
queryset = Addon.objects.filter(type__in=amo.GROUP_TYPE_ADDON)
if 'q' in request.GET:
queryset = queryset.filter(slug__contains=request.GET['q'])
serializer = self.get_serializer(queryset[:20], many=True)
return Response(serializer.data, headers={
'Access-Control-Allow-Origin': '*',
})
|
from rest_framework import generics, serializers
from rest_framework.response import Response
from waffle.decorators import waffle_switch
import amo
from addons.models import Addon
class AddonSerializer(serializers.ModelSerializer):
addon_type = serializers.SerializerMethodField('get_addon_type')
description = serializers.CharField()
icons = serializers.SerializerMethodField('get_icons')
name = serializers.CharField()
rating = serializers.FloatField(source='average_rating')
summary = serializers.CharField()
class Meta:
model = Addon
fields = [
'addon_type',
'description',
'icons',
'id',
'guid',
'name',
'rating',
'slug',
'summary',
]
def get_addon_type(self, instance):
return unicode(amo.ADDON_TYPE[instance.type])
def get_icons(self, instance):
return {
'32': instance.get_icon_url(32),
'64': instance.get_icon_url(64),
}
class SearchView(generics.RetrieveAPIView):
serializer_class = AddonSerializer
@waffle_switch('frontend-prototype')
def retrieve(self, request, *args, **kwargs):
queryset = Addon.objects.filter(type__in=amo.GROUP_TYPE_ADDON)
if 'q' in request.GET:
queryset = queryset.filter(slug__contains=request.GET['q'])
serializer = self.get_serializer(queryset[:20], many=True)
return Response(serializer.data)
Set CORS header for add-on search APIfrom rest_framework import generics, serializers
from rest_framework.response import Response
from waffle.decorators import waffle_switch
import amo
from addons.models import Addon
class AddonSerializer(serializers.ModelSerializer):
addon_type = serializers.SerializerMethodField('get_addon_type')
description = serializers.CharField()
icons = serializers.SerializerMethodField('get_icons')
name = serializers.CharField()
rating = serializers.FloatField(source='average_rating')
summary = serializers.CharField()
class Meta:
model = Addon
fields = [
'addon_type',
'description',
'icons',
'id',
'guid',
'name',
'rating',
'slug',
'summary',
]
def get_addon_type(self, instance):
return unicode(amo.ADDON_TYPE[instance.type])
def get_icons(self, instance):
return {
'32': instance.get_icon_url(32),
'64': instance.get_icon_url(64),
}
class SearchView(generics.RetrieveAPIView):
serializer_class = AddonSerializer
@waffle_switch('frontend-prototype')
def retrieve(self, request, *args, **kwargs):
queryset = Addon.objects.filter(type__in=amo.GROUP_TYPE_ADDON)
if 'q' in request.GET:
queryset = queryset.filter(slug__contains=request.GET['q'])
serializer = self.get_serializer(queryset[:20], many=True)
return Response(serializer.data, headers={
'Access-Control-Allow-Origin': '*',
})
|
<commit_before>from rest_framework import generics, serializers
from rest_framework.response import Response
from waffle.decorators import waffle_switch
import amo
from addons.models import Addon
class AddonSerializer(serializers.ModelSerializer):
addon_type = serializers.SerializerMethodField('get_addon_type')
description = serializers.CharField()
icons = serializers.SerializerMethodField('get_icons')
name = serializers.CharField()
rating = serializers.FloatField(source='average_rating')
summary = serializers.CharField()
class Meta:
model = Addon
fields = [
'addon_type',
'description',
'icons',
'id',
'guid',
'name',
'rating',
'slug',
'summary',
]
def get_addon_type(self, instance):
return unicode(amo.ADDON_TYPE[instance.type])
def get_icons(self, instance):
return {
'32': instance.get_icon_url(32),
'64': instance.get_icon_url(64),
}
class SearchView(generics.RetrieveAPIView):
serializer_class = AddonSerializer
@waffle_switch('frontend-prototype')
def retrieve(self, request, *args, **kwargs):
queryset = Addon.objects.filter(type__in=amo.GROUP_TYPE_ADDON)
if 'q' in request.GET:
queryset = queryset.filter(slug__contains=request.GET['q'])
serializer = self.get_serializer(queryset[:20], many=True)
return Response(serializer.data)
<commit_msg>Set CORS header for add-on search API<commit_after>from rest_framework import generics, serializers
from rest_framework.response import Response
from waffle.decorators import waffle_switch
import amo
from addons.models import Addon
class AddonSerializer(serializers.ModelSerializer):
addon_type = serializers.SerializerMethodField('get_addon_type')
description = serializers.CharField()
icons = serializers.SerializerMethodField('get_icons')
name = serializers.CharField()
rating = serializers.FloatField(source='average_rating')
summary = serializers.CharField()
class Meta:
model = Addon
fields = [
'addon_type',
'description',
'icons',
'id',
'guid',
'name',
'rating',
'slug',
'summary',
]
def get_addon_type(self, instance):
return unicode(amo.ADDON_TYPE[instance.type])
def get_icons(self, instance):
return {
'32': instance.get_icon_url(32),
'64': instance.get_icon_url(64),
}
class SearchView(generics.RetrieveAPIView):
serializer_class = AddonSerializer
@waffle_switch('frontend-prototype')
def retrieve(self, request, *args, **kwargs):
queryset = Addon.objects.filter(type__in=amo.GROUP_TYPE_ADDON)
if 'q' in request.GET:
queryset = queryset.filter(slug__contains=request.GET['q'])
serializer = self.get_serializer(queryset[:20], many=True)
return Response(serializer.data, headers={
'Access-Control-Allow-Origin': '*',
})
|
6ba1d1805a65ff7e07b795ed7b54fc3375a1e3e4
|
main_AWS.py
|
main_AWS.py
|
def process_single_user(username, password):
try:
lingo = duolingo.Duolingo(username, password)
except ValueError:
raise Exception("Username Invalid")
print("Trying to Buy Streak Freeze for " + username)
if(lingo.buy_streak_freeze()):
print("Bought streak freeze for " + username)
else:
print("Unable to buy streak freeze")
try:
print("Trying to Buy Double or nothing for " + username)
lingo.buy_item('rupee_wager', 'en')
print("Bought Double or nothing for " + username)
except:
print("Unable to buy double or nothing")
def main(a, b):
import duolingo, os
usernames = os.environ['usernames'].split(',')
passwords = os.environ['passwords'].split(',')
list(map(process_single_user, usernames, passwords))
|
def process_single_user(username, password):
import duolingo
try:
lingo = duolingo.Duolingo(username, password)
except ValueError:
raise Exception("Username Invalid")
stuff_to_purchase = ['streak_freeze', 'rupee_wager']
for item in stuff_to_purchase:
try:
print("Trying to Buy " + item + " for " + username)
lingo.buy_item(item, 'en')
print("Bought " + item + " for " + username)
except duolingo.AlreadyHaveStoreItemException:
print("Item Already Equipped")
except Exception:
raise ValueError("Unable to buy double or nothing")
def main(a, b):
import duolingo, os
usernames = os.environ['usernames'].split(',')
passwords = os.environ['passwords'].split(',')
list(map(process_single_user, usernames, passwords))
|
UPdate to duolingo for the API
|
UPdate to duolingo for the API
|
Python
|
mit
|
alexsanjoseph/duolingo-save-streak,alexsanjoseph/duolingo-save-streak
|
def process_single_user(username, password):
try:
lingo = duolingo.Duolingo(username, password)
except ValueError:
raise Exception("Username Invalid")
print("Trying to Buy Streak Freeze for " + username)
if(lingo.buy_streak_freeze()):
print("Bought streak freeze for " + username)
else:
print("Unable to buy streak freeze")
try:
print("Trying to Buy Double or nothing for " + username)
lingo.buy_item('rupee_wager', 'en')
print("Bought Double or nothing for " + username)
except:
print("Unable to buy double or nothing")
def main(a, b):
import duolingo, os
usernames = os.environ['usernames'].split(',')
passwords = os.environ['passwords'].split(',')
list(map(process_single_user, usernames, passwords))
UPdate to duolingo for the API
|
def process_single_user(username, password):
import duolingo
try:
lingo = duolingo.Duolingo(username, password)
except ValueError:
raise Exception("Username Invalid")
stuff_to_purchase = ['streak_freeze', 'rupee_wager']
for item in stuff_to_purchase:
try:
print("Trying to Buy " + item + " for " + username)
lingo.buy_item(item, 'en')
print("Bought " + item + " for " + username)
except duolingo.AlreadyHaveStoreItemException:
print("Item Already Equipped")
except Exception:
raise ValueError("Unable to buy double or nothing")
def main(a, b):
import duolingo, os
usernames = os.environ['usernames'].split(',')
passwords = os.environ['passwords'].split(',')
list(map(process_single_user, usernames, passwords))
|
<commit_before>def process_single_user(username, password):
try:
lingo = duolingo.Duolingo(username, password)
except ValueError:
raise Exception("Username Invalid")
print("Trying to Buy Streak Freeze for " + username)
if(lingo.buy_streak_freeze()):
print("Bought streak freeze for " + username)
else:
print("Unable to buy streak freeze")
try:
print("Trying to Buy Double or nothing for " + username)
lingo.buy_item('rupee_wager', 'en')
print("Bought Double or nothing for " + username)
except:
print("Unable to buy double or nothing")
def main(a, b):
import duolingo, os
usernames = os.environ['usernames'].split(',')
passwords = os.environ['passwords'].split(',')
list(map(process_single_user, usernames, passwords))
<commit_msg>UPdate to duolingo for the API<commit_after>
|
def process_single_user(username, password):
import duolingo
try:
lingo = duolingo.Duolingo(username, password)
except ValueError:
raise Exception("Username Invalid")
stuff_to_purchase = ['streak_freeze', 'rupee_wager']
for item in stuff_to_purchase:
try:
print("Trying to Buy " + item + " for " + username)
lingo.buy_item(item, 'en')
print("Bought " + item + " for " + username)
except duolingo.AlreadyHaveStoreItemException:
print("Item Already Equipped")
except Exception:
raise ValueError("Unable to buy double or nothing")
def main(a, b):
import duolingo, os
usernames = os.environ['usernames'].split(',')
passwords = os.environ['passwords'].split(',')
list(map(process_single_user, usernames, passwords))
|
def process_single_user(username, password):
try:
lingo = duolingo.Duolingo(username, password)
except ValueError:
raise Exception("Username Invalid")
print("Trying to Buy Streak Freeze for " + username)
if(lingo.buy_streak_freeze()):
print("Bought streak freeze for " + username)
else:
print("Unable to buy streak freeze")
try:
print("Trying to Buy Double or nothing for " + username)
lingo.buy_item('rupee_wager', 'en')
print("Bought Double or nothing for " + username)
except:
print("Unable to buy double or nothing")
def main(a, b):
import duolingo, os
usernames = os.environ['usernames'].split(',')
passwords = os.environ['passwords'].split(',')
list(map(process_single_user, usernames, passwords))
UPdate to duolingo for the APIdef process_single_user(username, password):
import duolingo
try:
lingo = duolingo.Duolingo(username, password)
except ValueError:
raise Exception("Username Invalid")
stuff_to_purchase = ['streak_freeze', 'rupee_wager']
for item in stuff_to_purchase:
try:
print("Trying to Buy " + item + " for " + username)
lingo.buy_item(item, 'en')
print("Bought " + item + " for " + username)
except duolingo.AlreadyHaveStoreItemException:
print("Item Already Equipped")
except Exception:
raise ValueError("Unable to buy double or nothing")
def main(a, b):
import duolingo, os
usernames = os.environ['usernames'].split(',')
passwords = os.environ['passwords'].split(',')
list(map(process_single_user, usernames, passwords))
|
<commit_before>def process_single_user(username, password):
try:
lingo = duolingo.Duolingo(username, password)
except ValueError:
raise Exception("Username Invalid")
print("Trying to Buy Streak Freeze for " + username)
if(lingo.buy_streak_freeze()):
print("Bought streak freeze for " + username)
else:
print("Unable to buy streak freeze")
try:
print("Trying to Buy Double or nothing for " + username)
lingo.buy_item('rupee_wager', 'en')
print("Bought Double or nothing for " + username)
except:
print("Unable to buy double or nothing")
def main(a, b):
import duolingo, os
usernames = os.environ['usernames'].split(',')
passwords = os.environ['passwords'].split(',')
list(map(process_single_user, usernames, passwords))
<commit_msg>UPdate to duolingo for the API<commit_after>def process_single_user(username, password):
import duolingo
try:
lingo = duolingo.Duolingo(username, password)
except ValueError:
raise Exception("Username Invalid")
stuff_to_purchase = ['streak_freeze', 'rupee_wager']
for item in stuff_to_purchase:
try:
print("Trying to Buy " + item + " for " + username)
lingo.buy_item(item, 'en')
print("Bought " + item + " for " + username)
except duolingo.AlreadyHaveStoreItemException:
print("Item Already Equipped")
except Exception:
raise ValueError("Unable to buy double or nothing")
def main(a, b):
import duolingo, os
usernames = os.environ['usernames'].split(',')
passwords = os.environ['passwords'].split(',')
list(map(process_single_user, usernames, passwords))
|
fa36fc3301e7db47d72d0cd7c47bddf30cd7719d
|
06_test/unit_test_func.py
|
06_test/unit_test_func.py
|
#/usr/bin/env python
import unittest
from my_calc import my_add
class test_func(unittest.TestCase):
def test_my_add(self):
res = my_add(1, 2)
self.assertEqual(res, 3)
if __name__ == "__main__":
unittest.main()
|
#/usr/bin/env python
import unittest
from my_calc import my_add
class test_func(unittest.TestCase):
def test_my_add(self):
print("Test begin")
res = my_add(1, 2)
self.assertEqual(res, 3)
def setUp(self):
print("Setup")
def tearDown(self):
print("Tear down")
if __name__ == "__main__":
unittest.main()
|
Test unit test constructor and destructor
|
Test unit test constructor and destructor
|
Python
|
bsd-2-clause
|
zzz0072/Python_Exercises,zzz0072/Python_Exercises
|
#/usr/bin/env python
import unittest
from my_calc import my_add
class test_func(unittest.TestCase):
def test_my_add(self):
res = my_add(1, 2)
self.assertEqual(res, 3)
if __name__ == "__main__":
unittest.main()
Test unit test constructor and destructor
|
#/usr/bin/env python
import unittest
from my_calc import my_add
class test_func(unittest.TestCase):
def test_my_add(self):
print("Test begin")
res = my_add(1, 2)
self.assertEqual(res, 3)
def setUp(self):
print("Setup")
def tearDown(self):
print("Tear down")
if __name__ == "__main__":
unittest.main()
|
<commit_before>#/usr/bin/env python
import unittest
from my_calc import my_add
class test_func(unittest.TestCase):
def test_my_add(self):
res = my_add(1, 2)
self.assertEqual(res, 3)
if __name__ == "__main__":
unittest.main()
<commit_msg>Test unit test constructor and destructor<commit_after>
|
#/usr/bin/env python
import unittest
from my_calc import my_add
class test_func(unittest.TestCase):
def test_my_add(self):
print("Test begin")
res = my_add(1, 2)
self.assertEqual(res, 3)
def setUp(self):
print("Setup")
def tearDown(self):
print("Tear down")
if __name__ == "__main__":
unittest.main()
|
#/usr/bin/env python
import unittest
from my_calc import my_add
class test_func(unittest.TestCase):
def test_my_add(self):
res = my_add(1, 2)
self.assertEqual(res, 3)
if __name__ == "__main__":
unittest.main()
Test unit test constructor and destructor#/usr/bin/env python
import unittest
from my_calc import my_add
class test_func(unittest.TestCase):
def test_my_add(self):
print("Test begin")
res = my_add(1, 2)
self.assertEqual(res, 3)
def setUp(self):
print("Setup")
def tearDown(self):
print("Tear down")
if __name__ == "__main__":
unittest.main()
|
<commit_before>#/usr/bin/env python
import unittest
from my_calc import my_add
class test_func(unittest.TestCase):
def test_my_add(self):
res = my_add(1, 2)
self.assertEqual(res, 3)
if __name__ == "__main__":
unittest.main()
<commit_msg>Test unit test constructor and destructor<commit_after>#/usr/bin/env python
import unittest
from my_calc import my_add
class test_func(unittest.TestCase):
def test_my_add(self):
print("Test begin")
res = my_add(1, 2)
self.assertEqual(res, 3)
def setUp(self):
print("Setup")
def tearDown(self):
print("Tear down")
if __name__ == "__main__":
unittest.main()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.