commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
91dc57bb07d6bfd17af378fd6fccf353317cb06c
|
tests/test_scatter_series.py
|
tests/test_scatter_series.py
|
from unittest import TestCase
from unittest.mock import patch
from quickplots.series import ScatterSeries, Series
class LineSeriesCreationTests(TestCase):
def test_can_create_line_series(self):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertIsInstance(series, Series)
@patch("quickplots.series.Series.__init__")
def test_scatter_chart_uses_chart_initialisation(self, mock):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertTrue(mock.called)
|
from unittest import TestCase
from unittest.mock import patch
from quickplots.series import ScatterSeries, Series
class LineSeriesCreationTests(TestCase):
def test_can_create_line_series(self):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertIsInstance(series, Series)
@patch("quickplots.series.Series.__init__")
def test_scatter_chart_uses_chart_initialisation(self, mock):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertTrue(mock.called)
def test_scatter_series_repr(self):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertEqual(str(series), "<ScatterSeries (3 data points)>")
series = ScatterSeries((1, 1), (2, 4), (3, 9), name="line")
self.assertEqual(str(series), "<ScatterSeries 'line' (3 data points)>")
|
Add check on ScatterSeries repr
|
Add check on ScatterSeries repr
|
Python
|
mit
|
samirelanduk/quickplots
|
from unittest import TestCase
from unittest.mock import patch
from quickplots.series import ScatterSeries, Series
class LineSeriesCreationTests(TestCase):
def test_can_create_line_series(self):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertIsInstance(series, Series)
@patch("quickplots.series.Series.__init__")
def test_scatter_chart_uses_chart_initialisation(self, mock):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertTrue(mock.called)
Add check on ScatterSeries repr
|
from unittest import TestCase
from unittest.mock import patch
from quickplots.series import ScatterSeries, Series
class LineSeriesCreationTests(TestCase):
def test_can_create_line_series(self):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertIsInstance(series, Series)
@patch("quickplots.series.Series.__init__")
def test_scatter_chart_uses_chart_initialisation(self, mock):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertTrue(mock.called)
def test_scatter_series_repr(self):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertEqual(str(series), "<ScatterSeries (3 data points)>")
series = ScatterSeries((1, 1), (2, 4), (3, 9), name="line")
self.assertEqual(str(series), "<ScatterSeries 'line' (3 data points)>")
|
<commit_before>from unittest import TestCase
from unittest.mock import patch
from quickplots.series import ScatterSeries, Series
class LineSeriesCreationTests(TestCase):
def test_can_create_line_series(self):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertIsInstance(series, Series)
@patch("quickplots.series.Series.__init__")
def test_scatter_chart_uses_chart_initialisation(self, mock):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertTrue(mock.called)
<commit_msg>Add check on ScatterSeries repr<commit_after>
|
from unittest import TestCase
from unittest.mock import patch
from quickplots.series import ScatterSeries, Series
class LineSeriesCreationTests(TestCase):
def test_can_create_line_series(self):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertIsInstance(series, Series)
@patch("quickplots.series.Series.__init__")
def test_scatter_chart_uses_chart_initialisation(self, mock):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertTrue(mock.called)
def test_scatter_series_repr(self):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertEqual(str(series), "<ScatterSeries (3 data points)>")
series = ScatterSeries((1, 1), (2, 4), (3, 9), name="line")
self.assertEqual(str(series), "<ScatterSeries 'line' (3 data points)>")
|
from unittest import TestCase
from unittest.mock import patch
from quickplots.series import ScatterSeries, Series
class LineSeriesCreationTests(TestCase):
def test_can_create_line_series(self):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertIsInstance(series, Series)
@patch("quickplots.series.Series.__init__")
def test_scatter_chart_uses_chart_initialisation(self, mock):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertTrue(mock.called)
Add check on ScatterSeries reprfrom unittest import TestCase
from unittest.mock import patch
from quickplots.series import ScatterSeries, Series
class LineSeriesCreationTests(TestCase):
def test_can_create_line_series(self):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertIsInstance(series, Series)
@patch("quickplots.series.Series.__init__")
def test_scatter_chart_uses_chart_initialisation(self, mock):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertTrue(mock.called)
def test_scatter_series_repr(self):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertEqual(str(series), "<ScatterSeries (3 data points)>")
series = ScatterSeries((1, 1), (2, 4), (3, 9), name="line")
self.assertEqual(str(series), "<ScatterSeries 'line' (3 data points)>")
|
<commit_before>from unittest import TestCase
from unittest.mock import patch
from quickplots.series import ScatterSeries, Series
class LineSeriesCreationTests(TestCase):
def test_can_create_line_series(self):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertIsInstance(series, Series)
@patch("quickplots.series.Series.__init__")
def test_scatter_chart_uses_chart_initialisation(self, mock):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertTrue(mock.called)
<commit_msg>Add check on ScatterSeries repr<commit_after>from unittest import TestCase
from unittest.mock import patch
from quickplots.series import ScatterSeries, Series
class LineSeriesCreationTests(TestCase):
def test_can_create_line_series(self):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertIsInstance(series, Series)
@patch("quickplots.series.Series.__init__")
def test_scatter_chart_uses_chart_initialisation(self, mock):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertTrue(mock.called)
def test_scatter_series_repr(self):
series = ScatterSeries((1, 1), (2, 4), (3, 9))
self.assertEqual(str(series), "<ScatterSeries (3 data points)>")
series = ScatterSeries((1, 1), (2, 4), (3, 9), name="line")
self.assertEqual(str(series), "<ScatterSeries 'line' (3 data points)>")
|
48fc33be592e27e632958a58de99356494a4e511
|
test/dbusdef.py
|
test/dbusdef.py
|
import dbus
bus = dbus.SystemBus()
dummy = dbus.Interface(bus.get_object('org.bluez', '/org/bluez'), 'org.freedesktop.DBus.Introspectable')
#print dummy.Introspect()
manager = dbus.Interface(bus.get_object('org.bluez', '/org/bluez'), 'org.bluez.Manager')
database = dbus.Interface(bus.get_object('org.bluez', '/org/bluez'), 'org.bluez.Database')
try:
adapter = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.Adapter')
test = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.Test')
rfcomm = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.RFCOMM')
except:
adapter = ""
test = ""
rfcomm = ""
def create_service(identifier):
try:
path = manager.FindService(identifier)
except:
path = ""
if (path != ""):
return dbus.Interface(bus.get_object('org.bluez', path), 'org.bluez.Service')
echo = create_service("echo")
transfer = create_service("transfer")
network = create_service("network")
input = create_service("input")
audio = create_service("audio")
headset = create_service("headset")
def connect_service(identifier):
try:
conn = manager.ActivateService(identifier)
except:
conn = ""
if (conn != ""):
return dbus.Interface(bus.get_object(conn, "/org/bluez/" + identifier), 'org.bluez.' + identifier + '.Manager')
|
import dbus
bus = dbus.SystemBus()
dummy = dbus.Interface(bus.get_object('org.bluez', '/'), 'org.freedesktop.DBus.Introspectable')
#print dummy.Introspect()
manager = dbus.Interface(bus.get_object('org.bluez', '/'), 'org.bluez.Manager')
try:
adapter = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.Adapter')
except:
pass
|
Remove old 3.x API cruft
|
Remove old 3.x API cruft
|
Python
|
lgpl-2.1
|
pkarasev3/bluez,mapfau/bluez,silent-snowman/bluez,ComputeCycles/bluez,pstglia/external-bluetooth-bluez,ComputeCycles/bluez,pkarasev3/bluez,ComputeCycles/bluez,silent-snowman/bluez,pstglia/external-bluetooth-bluez,pstglia/external-bluetooth-bluez,mapfau/bluez,mapfau/bluez,silent-snowman/bluez,mapfau/bluez,pkarasev3/bluez,silent-snowman/bluez,pkarasev3/bluez,ComputeCycles/bluez,pstglia/external-bluetooth-bluez
|
import dbus
bus = dbus.SystemBus()
dummy = dbus.Interface(bus.get_object('org.bluez', '/org/bluez'), 'org.freedesktop.DBus.Introspectable')
#print dummy.Introspect()
manager = dbus.Interface(bus.get_object('org.bluez', '/org/bluez'), 'org.bluez.Manager')
database = dbus.Interface(bus.get_object('org.bluez', '/org/bluez'), 'org.bluez.Database')
try:
adapter = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.Adapter')
test = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.Test')
rfcomm = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.RFCOMM')
except:
adapter = ""
test = ""
rfcomm = ""
def create_service(identifier):
try:
path = manager.FindService(identifier)
except:
path = ""
if (path != ""):
return dbus.Interface(bus.get_object('org.bluez', path), 'org.bluez.Service')
echo = create_service("echo")
transfer = create_service("transfer")
network = create_service("network")
input = create_service("input")
audio = create_service("audio")
headset = create_service("headset")
def connect_service(identifier):
try:
conn = manager.ActivateService(identifier)
except:
conn = ""
if (conn != ""):
return dbus.Interface(bus.get_object(conn, "/org/bluez/" + identifier), 'org.bluez.' + identifier + '.Manager')
Remove old 3.x API cruft
|
import dbus
bus = dbus.SystemBus()
dummy = dbus.Interface(bus.get_object('org.bluez', '/'), 'org.freedesktop.DBus.Introspectable')
#print dummy.Introspect()
manager = dbus.Interface(bus.get_object('org.bluez', '/'), 'org.bluez.Manager')
try:
adapter = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.Adapter')
except:
pass
|
<commit_before>import dbus
bus = dbus.SystemBus()
dummy = dbus.Interface(bus.get_object('org.bluez', '/org/bluez'), 'org.freedesktop.DBus.Introspectable')
#print dummy.Introspect()
manager = dbus.Interface(bus.get_object('org.bluez', '/org/bluez'), 'org.bluez.Manager')
database = dbus.Interface(bus.get_object('org.bluez', '/org/bluez'), 'org.bluez.Database')
try:
adapter = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.Adapter')
test = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.Test')
rfcomm = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.RFCOMM')
except:
adapter = ""
test = ""
rfcomm = ""
def create_service(identifier):
try:
path = manager.FindService(identifier)
except:
path = ""
if (path != ""):
return dbus.Interface(bus.get_object('org.bluez', path), 'org.bluez.Service')
echo = create_service("echo")
transfer = create_service("transfer")
network = create_service("network")
input = create_service("input")
audio = create_service("audio")
headset = create_service("headset")
def connect_service(identifier):
try:
conn = manager.ActivateService(identifier)
except:
conn = ""
if (conn != ""):
return dbus.Interface(bus.get_object(conn, "/org/bluez/" + identifier), 'org.bluez.' + identifier + '.Manager')
<commit_msg>Remove old 3.x API cruft<commit_after>
|
import dbus
bus = dbus.SystemBus()
dummy = dbus.Interface(bus.get_object('org.bluez', '/'), 'org.freedesktop.DBus.Introspectable')
#print dummy.Introspect()
manager = dbus.Interface(bus.get_object('org.bluez', '/'), 'org.bluez.Manager')
try:
adapter = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.Adapter')
except:
pass
|
import dbus
bus = dbus.SystemBus()
dummy = dbus.Interface(bus.get_object('org.bluez', '/org/bluez'), 'org.freedesktop.DBus.Introspectable')
#print dummy.Introspect()
manager = dbus.Interface(bus.get_object('org.bluez', '/org/bluez'), 'org.bluez.Manager')
database = dbus.Interface(bus.get_object('org.bluez', '/org/bluez'), 'org.bluez.Database')
try:
adapter = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.Adapter')
test = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.Test')
rfcomm = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.RFCOMM')
except:
adapter = ""
test = ""
rfcomm = ""
def create_service(identifier):
try:
path = manager.FindService(identifier)
except:
path = ""
if (path != ""):
return dbus.Interface(bus.get_object('org.bluez', path), 'org.bluez.Service')
echo = create_service("echo")
transfer = create_service("transfer")
network = create_service("network")
input = create_service("input")
audio = create_service("audio")
headset = create_service("headset")
def connect_service(identifier):
try:
conn = manager.ActivateService(identifier)
except:
conn = ""
if (conn != ""):
return dbus.Interface(bus.get_object(conn, "/org/bluez/" + identifier), 'org.bluez.' + identifier + '.Manager')
Remove old 3.x API cruftimport dbus
bus = dbus.SystemBus()
dummy = dbus.Interface(bus.get_object('org.bluez', '/'), 'org.freedesktop.DBus.Introspectable')
#print dummy.Introspect()
manager = dbus.Interface(bus.get_object('org.bluez', '/'), 'org.bluez.Manager')
try:
adapter = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.Adapter')
except:
pass
|
<commit_before>import dbus
bus = dbus.SystemBus()
dummy = dbus.Interface(bus.get_object('org.bluez', '/org/bluez'), 'org.freedesktop.DBus.Introspectable')
#print dummy.Introspect()
manager = dbus.Interface(bus.get_object('org.bluez', '/org/bluez'), 'org.bluez.Manager')
database = dbus.Interface(bus.get_object('org.bluez', '/org/bluez'), 'org.bluez.Database')
try:
adapter = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.Adapter')
test = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.Test')
rfcomm = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.RFCOMM')
except:
adapter = ""
test = ""
rfcomm = ""
def create_service(identifier):
try:
path = manager.FindService(identifier)
except:
path = ""
if (path != ""):
return dbus.Interface(bus.get_object('org.bluez', path), 'org.bluez.Service')
echo = create_service("echo")
transfer = create_service("transfer")
network = create_service("network")
input = create_service("input")
audio = create_service("audio")
headset = create_service("headset")
def connect_service(identifier):
try:
conn = manager.ActivateService(identifier)
except:
conn = ""
if (conn != ""):
return dbus.Interface(bus.get_object(conn, "/org/bluez/" + identifier), 'org.bluez.' + identifier + '.Manager')
<commit_msg>Remove old 3.x API cruft<commit_after>import dbus
bus = dbus.SystemBus()
dummy = dbus.Interface(bus.get_object('org.bluez', '/'), 'org.freedesktop.DBus.Introspectable')
#print dummy.Introspect()
manager = dbus.Interface(bus.get_object('org.bluez', '/'), 'org.bluez.Manager')
try:
adapter = dbus.Interface(bus.get_object('org.bluez', manager.DefaultAdapter()), 'org.bluez.Adapter')
except:
pass
|
5b3cdb3a1735a02fc10761b96f2d74b2fab8cd61
|
test_scraper.py
|
test_scraper.py
|
from scraper import search_CL
def test_search_CL():
test_body, test_encoding = search_CL(minAsk=100)
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'
|
from scraper import search_CL
from scraper import read_search_results
def test_search_CL():
test_body, test_encoding = search_CL(minAsk=100)
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'
def test_read_search_result():
test_body, test_encoding = read_search_results()
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'
|
Add test_read_search_result() to test reading data from a local .html
|
Add test_read_search_result() to test reading data from a local .html
|
Python
|
mit
|
jefrailey/basic-scraper
|
from scraper import search_CL
def test_search_CL():
test_body, test_encoding = search_CL(minAsk=100)
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'Add test_read_search_result() to test reading data from a local .html
|
from scraper import search_CL
from scraper import read_search_results
def test_search_CL():
test_body, test_encoding = search_CL(minAsk=100)
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'
def test_read_search_result():
test_body, test_encoding = read_search_results()
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'
|
<commit_before>from scraper import search_CL
def test_search_CL():
test_body, test_encoding = search_CL(minAsk=100)
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'<commit_msg>Add test_read_search_result() to test reading data from a local .html<commit_after>
|
from scraper import search_CL
from scraper import read_search_results
def test_search_CL():
test_body, test_encoding = search_CL(minAsk=100)
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'
def test_read_search_result():
test_body, test_encoding = read_search_results()
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'
|
from scraper import search_CL
def test_search_CL():
test_body, test_encoding = search_CL(minAsk=100)
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'Add test_read_search_result() to test reading data from a local .htmlfrom scraper import search_CL
from scraper import read_search_results
def test_search_CL():
test_body, test_encoding = search_CL(minAsk=100)
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'
def test_read_search_result():
test_body, test_encoding = read_search_results()
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'
|
<commit_before>from scraper import search_CL
def test_search_CL():
test_body, test_encoding = search_CL(minAsk=100)
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'<commit_msg>Add test_read_search_result() to test reading data from a local .html<commit_after>from scraper import search_CL
from scraper import read_search_results
def test_search_CL():
test_body, test_encoding = search_CL(minAsk=100)
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'
def test_read_search_result():
test_body, test_encoding = read_search_results()
assert "<span class=\"desktop\">craigslist</span>" in test_body
assert test_encoding == 'utf-8'
|
01ef56e5ef825897648de792d1734a336499fb0b
|
ynr/apps/candidates/views/version_data.py
|
ynr/apps/candidates/views/version_data.py
|
import sys
from datetime import datetime
from random import randint
def get_client_ip(request):
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
ip = x_forwarded_for.split(",")[-1].strip()
else:
ip = request.META.get("REMOTE_ADDR")
return ip
def create_version_id():
"""Generate a random ID to use to identify a person version"""
return "{:016x}".format(randint(0, sys.maxsize))
def get_current_timestamp():
return datetime.utcnow().isoformat()
def get_change_metadata(request, information_source):
result = {
"information_source": information_source,
"version_id": create_version_id(),
"timestamp": get_current_timestamp(),
}
if request is not None:
result["username"] = request.user.username
return result
|
import sys
from datetime import datetime
from random import randint
def get_client_ip(request):
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
ip = x_forwarded_for.split(",")[-1].strip()
else:
ip = request.META.get("REMOTE_ADDR")
return ip
def create_version_id():
"""Generate a random ID to use to identify a person version"""
return "{:016x}".format(randint(0, sys.maxsize))
def get_current_timestamp():
return datetime.utcnow().isoformat()
def get_change_metadata(request, information_source, user=None):
"""
:type user: django.contrib.auth.models.User
"""
result = {
"information_source": information_source,
"version_id": create_version_id(),
"timestamp": get_current_timestamp(),
}
if request is not None:
result["username"] = request.user.username
if user:
result["username"] = user.username
return result
|
Allow passing User directly to get_change_metadata
|
Allow passing User directly to get_change_metadata
|
Python
|
agpl-3.0
|
DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative
|
import sys
from datetime import datetime
from random import randint
def get_client_ip(request):
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
ip = x_forwarded_for.split(",")[-1].strip()
else:
ip = request.META.get("REMOTE_ADDR")
return ip
def create_version_id():
"""Generate a random ID to use to identify a person version"""
return "{:016x}".format(randint(0, sys.maxsize))
def get_current_timestamp():
return datetime.utcnow().isoformat()
def get_change_metadata(request, information_source):
result = {
"information_source": information_source,
"version_id": create_version_id(),
"timestamp": get_current_timestamp(),
}
if request is not None:
result["username"] = request.user.username
return result
Allow passing User directly to get_change_metadata
|
import sys
from datetime import datetime
from random import randint
def get_client_ip(request):
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
ip = x_forwarded_for.split(",")[-1].strip()
else:
ip = request.META.get("REMOTE_ADDR")
return ip
def create_version_id():
"""Generate a random ID to use to identify a person version"""
return "{:016x}".format(randint(0, sys.maxsize))
def get_current_timestamp():
return datetime.utcnow().isoformat()
def get_change_metadata(request, information_source, user=None):
"""
:type user: django.contrib.auth.models.User
"""
result = {
"information_source": information_source,
"version_id": create_version_id(),
"timestamp": get_current_timestamp(),
}
if request is not None:
result["username"] = request.user.username
if user:
result["username"] = user.username
return result
|
<commit_before>import sys
from datetime import datetime
from random import randint
def get_client_ip(request):
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
ip = x_forwarded_for.split(",")[-1].strip()
else:
ip = request.META.get("REMOTE_ADDR")
return ip
def create_version_id():
"""Generate a random ID to use to identify a person version"""
return "{:016x}".format(randint(0, sys.maxsize))
def get_current_timestamp():
return datetime.utcnow().isoformat()
def get_change_metadata(request, information_source):
result = {
"information_source": information_source,
"version_id": create_version_id(),
"timestamp": get_current_timestamp(),
}
if request is not None:
result["username"] = request.user.username
return result
<commit_msg>Allow passing User directly to get_change_metadata<commit_after>
|
import sys
from datetime import datetime
from random import randint
def get_client_ip(request):
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
ip = x_forwarded_for.split(",")[-1].strip()
else:
ip = request.META.get("REMOTE_ADDR")
return ip
def create_version_id():
"""Generate a random ID to use to identify a person version"""
return "{:016x}".format(randint(0, sys.maxsize))
def get_current_timestamp():
return datetime.utcnow().isoformat()
def get_change_metadata(request, information_source, user=None):
"""
:type user: django.contrib.auth.models.User
"""
result = {
"information_source": information_source,
"version_id": create_version_id(),
"timestamp": get_current_timestamp(),
}
if request is not None:
result["username"] = request.user.username
if user:
result["username"] = user.username
return result
|
import sys
from datetime import datetime
from random import randint
def get_client_ip(request):
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
ip = x_forwarded_for.split(",")[-1].strip()
else:
ip = request.META.get("REMOTE_ADDR")
return ip
def create_version_id():
"""Generate a random ID to use to identify a person version"""
return "{:016x}".format(randint(0, sys.maxsize))
def get_current_timestamp():
return datetime.utcnow().isoformat()
def get_change_metadata(request, information_source):
result = {
"information_source": information_source,
"version_id": create_version_id(),
"timestamp": get_current_timestamp(),
}
if request is not None:
result["username"] = request.user.username
return result
Allow passing User directly to get_change_metadataimport sys
from datetime import datetime
from random import randint
def get_client_ip(request):
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
ip = x_forwarded_for.split(",")[-1].strip()
else:
ip = request.META.get("REMOTE_ADDR")
return ip
def create_version_id():
"""Generate a random ID to use to identify a person version"""
return "{:016x}".format(randint(0, sys.maxsize))
def get_current_timestamp():
return datetime.utcnow().isoformat()
def get_change_metadata(request, information_source, user=None):
"""
:type user: django.contrib.auth.models.User
"""
result = {
"information_source": information_source,
"version_id": create_version_id(),
"timestamp": get_current_timestamp(),
}
if request is not None:
result["username"] = request.user.username
if user:
result["username"] = user.username
return result
|
<commit_before>import sys
from datetime import datetime
from random import randint
def get_client_ip(request):
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
ip = x_forwarded_for.split(",")[-1].strip()
else:
ip = request.META.get("REMOTE_ADDR")
return ip
def create_version_id():
"""Generate a random ID to use to identify a person version"""
return "{:016x}".format(randint(0, sys.maxsize))
def get_current_timestamp():
return datetime.utcnow().isoformat()
def get_change_metadata(request, information_source):
result = {
"information_source": information_source,
"version_id": create_version_id(),
"timestamp": get_current_timestamp(),
}
if request is not None:
result["username"] = request.user.username
return result
<commit_msg>Allow passing User directly to get_change_metadata<commit_after>import sys
from datetime import datetime
from random import randint
def get_client_ip(request):
x_forwarded_for = request.META.get("HTTP_X_FORWARDED_FOR")
if x_forwarded_for:
ip = x_forwarded_for.split(",")[-1].strip()
else:
ip = request.META.get("REMOTE_ADDR")
return ip
def create_version_id():
"""Generate a random ID to use to identify a person version"""
return "{:016x}".format(randint(0, sys.maxsize))
def get_current_timestamp():
return datetime.utcnow().isoformat()
def get_change_metadata(request, information_source, user=None):
"""
:type user: django.contrib.auth.models.User
"""
result = {
"information_source": information_source,
"version_id": create_version_id(),
"timestamp": get_current_timestamp(),
}
if request is not None:
result["username"] = request.user.username
if user:
result["username"] = user.username
return result
|
aaf3db955585396fe30c341fd406cd98ebdd8f7a
|
src/info_retrieval/info_retrieval.py
|
src/info_retrieval/info_retrieval.py
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = new Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
|
Stop writing Java code :/
|
Stop writing Java code :/
|
Python
|
mit
|
amkahn/question-answering,amkahn/question-answering
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = new Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
Stop writing Java code :/
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
|
<commit_before># LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = new Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
<commit_msg>Stop writing Java code :/<commit_after>
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
|
# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = new Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
Stop writing Java code :/# LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
|
<commit_before># LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = new Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
<commit_msg>Stop writing Java code :/<commit_after># LING 573 Question Answering System
# Code last updated 4/17/14 by Clara Gordon
# This code implements an InfoRetriever for the question answering system.
from pymur import *
from general_classes import *
class InfoRetriever:
# builds a QueryEnvironment associated with the indexed document collection
def __init__(self, index_path):
# how to get this to link up to the doc collection?
self.path_to_idx = index_path
self.index = Index(self.path_to_idx)
self.query_env = QueryEnvironment()
self.query_env.addIndex(self.path_to_idx)
# creates a list of all the passages returned by all the queries generated by
# the query-processing module
def retrieve_passages(self, queries):
passages = []
for query in queries:
# second argument is the number of documents desired
docs = self.query_env.runQuery("#combine[passage50:25](" + query + ")", 20)
for doc in docs:
doc_num = doc.document
begin = doc.begin
end = doc.end
doc_id = self.query_env.documents([doc_num])[0].metadata['docno'] # need this for output
passage = Passage(self.index.document(doc_num, True)[begin, end], doc.score, doc_id)
passages.append(passage)
return passages
|
be75e40136bcba9e3c02a811beedf9a800381062
|
changeling/api.py
|
changeling/api.py
|
import changeling.models
class ChangeAPI(object):
def __init__(self, config, storage):
self.config = config
self.storage = storage
def list(self):
for change_data in self.storage.list_changes():
yield changeling.models.Change.from_dict(change_data)
def save(self, change):
data = change.to_dict()
self.storage.save_change(data)
|
import changeling.models
class ChangeAPI(object):
def __init__(self, storage):
self.storage = storage
def list(self):
for change_data in self.storage.list_changes():
yield changeling.models.Change.from_dict(change_data)
def save(self, change):
data = change.to_dict()
self.storage.save_change(data)
|
Drop config argument from ChangeAPI
|
Drop config argument from ChangeAPI
|
Python
|
apache-2.0
|
bcwaldon/changeling,bcwaldon/changeling
|
import changeling.models
class ChangeAPI(object):
def __init__(self, config, storage):
self.config = config
self.storage = storage
def list(self):
for change_data in self.storage.list_changes():
yield changeling.models.Change.from_dict(change_data)
def save(self, change):
data = change.to_dict()
self.storage.save_change(data)
Drop config argument from ChangeAPI
|
import changeling.models
class ChangeAPI(object):
def __init__(self, storage):
self.storage = storage
def list(self):
for change_data in self.storage.list_changes():
yield changeling.models.Change.from_dict(change_data)
def save(self, change):
data = change.to_dict()
self.storage.save_change(data)
|
<commit_before>import changeling.models
class ChangeAPI(object):
def __init__(self, config, storage):
self.config = config
self.storage = storage
def list(self):
for change_data in self.storage.list_changes():
yield changeling.models.Change.from_dict(change_data)
def save(self, change):
data = change.to_dict()
self.storage.save_change(data)
<commit_msg>Drop config argument from ChangeAPI<commit_after>
|
import changeling.models
class ChangeAPI(object):
def __init__(self, storage):
self.storage = storage
def list(self):
for change_data in self.storage.list_changes():
yield changeling.models.Change.from_dict(change_data)
def save(self, change):
data = change.to_dict()
self.storage.save_change(data)
|
import changeling.models
class ChangeAPI(object):
def __init__(self, config, storage):
self.config = config
self.storage = storage
def list(self):
for change_data in self.storage.list_changes():
yield changeling.models.Change.from_dict(change_data)
def save(self, change):
data = change.to_dict()
self.storage.save_change(data)
Drop config argument from ChangeAPIimport changeling.models
class ChangeAPI(object):
def __init__(self, storage):
self.storage = storage
def list(self):
for change_data in self.storage.list_changes():
yield changeling.models.Change.from_dict(change_data)
def save(self, change):
data = change.to_dict()
self.storage.save_change(data)
|
<commit_before>import changeling.models
class ChangeAPI(object):
def __init__(self, config, storage):
self.config = config
self.storage = storage
def list(self):
for change_data in self.storage.list_changes():
yield changeling.models.Change.from_dict(change_data)
def save(self, change):
data = change.to_dict()
self.storage.save_change(data)
<commit_msg>Drop config argument from ChangeAPI<commit_after>import changeling.models
class ChangeAPI(object):
def __init__(self, storage):
self.storage = storage
def list(self):
for change_data in self.storage.list_changes():
yield changeling.models.Change.from_dict(change_data)
def save(self, change):
data = change.to_dict()
self.storage.save_change(data)
|
2b372d479f8c022d72954396be9a4a045596f497
|
tests/test52.py
|
tests/test52.py
|
import judicious
judicious.seed("cc722bf6-e319-cf63-a671-cbae64dfdb0f")
# 1 (complete): 3799aa89-ccae-c268-d0e8-cc4e9ddddee4
# 2 (timeout) : 4d30601d-dfe3-ee53-8594-7fc0aa8e68ec
# 3 (complete): fe07a885-53c3-9a22-c93e-91436e5d8f0c
# 1 (complete): 4f4d13ed-7d1c-cbee-638d-6aee5188c929
# 2 (timeout) : 720ebe41-5987-b9f0-b571-fd7fb50f2b05
# 3 (timeout) : 358e7d25-af92-8a18-23ec-49025aecc87b
# 4 (complete) : cab5c911-741c-8721-d851-483669940626
def experiment():
with judicious.Person(lifetime=60) as person:
consent = person.consent()
j1 = person.joke()
j2 = person.joke()
j3 = person.joke()
j4 = person.joke()
person.complete()
return [j1, j2, j3, j4]
results = judicious.map3(experiment, [None for _ in range(100)])
print(results)
|
import judicious
# judicious.register("https://imprudent.herokuapp.com")
# judicious.seed("cc722bf6-e319-cf63-a671-cbae64dfd40f")
def experiment():
with judicious.Person(lifetime=60) as person:
if not person.consent():
return None
j1 = person.joke()
j2 = person.joke()
j3 = person.joke()
j4 = person.joke()
person.complete()
return (j1, j2, j3, j4)
results = judicious.map3(experiment, [None for _ in range(1)])
print(results)
|
Update context manager test script
|
Update context manager test script
|
Python
|
mit
|
suchow/judicious,suchow/judicious,suchow/judicious
|
import judicious
judicious.seed("cc722bf6-e319-cf63-a671-cbae64dfdb0f")
# 1 (complete): 3799aa89-ccae-c268-d0e8-cc4e9ddddee4
# 2 (timeout) : 4d30601d-dfe3-ee53-8594-7fc0aa8e68ec
# 3 (complete): fe07a885-53c3-9a22-c93e-91436e5d8f0c
# 1 (complete): 4f4d13ed-7d1c-cbee-638d-6aee5188c929
# 2 (timeout) : 720ebe41-5987-b9f0-b571-fd7fb50f2b05
# 3 (timeout) : 358e7d25-af92-8a18-23ec-49025aecc87b
# 4 (complete) : cab5c911-741c-8721-d851-483669940626
def experiment():
with judicious.Person(lifetime=60) as person:
consent = person.consent()
j1 = person.joke()
j2 = person.joke()
j3 = person.joke()
j4 = person.joke()
person.complete()
return [j1, j2, j3, j4]
results = judicious.map3(experiment, [None for _ in range(100)])
print(results)
Update context manager test script
|
import judicious
# judicious.register("https://imprudent.herokuapp.com")
# judicious.seed("cc722bf6-e319-cf63-a671-cbae64dfd40f")
def experiment():
with judicious.Person(lifetime=60) as person:
if not person.consent():
return None
j1 = person.joke()
j2 = person.joke()
j3 = person.joke()
j4 = person.joke()
person.complete()
return (j1, j2, j3, j4)
results = judicious.map3(experiment, [None for _ in range(1)])
print(results)
|
<commit_before>import judicious
judicious.seed("cc722bf6-e319-cf63-a671-cbae64dfdb0f")
# 1 (complete): 3799aa89-ccae-c268-d0e8-cc4e9ddddee4
# 2 (timeout) : 4d30601d-dfe3-ee53-8594-7fc0aa8e68ec
# 3 (complete): fe07a885-53c3-9a22-c93e-91436e5d8f0c
# 1 (complete): 4f4d13ed-7d1c-cbee-638d-6aee5188c929
# 2 (timeout) : 720ebe41-5987-b9f0-b571-fd7fb50f2b05
# 3 (timeout) : 358e7d25-af92-8a18-23ec-49025aecc87b
# 4 (complete) : cab5c911-741c-8721-d851-483669940626
def experiment():
with judicious.Person(lifetime=60) as person:
consent = person.consent()
j1 = person.joke()
j2 = person.joke()
j3 = person.joke()
j4 = person.joke()
person.complete()
return [j1, j2, j3, j4]
results = judicious.map3(experiment, [None for _ in range(100)])
print(results)
<commit_msg>Update context manager test script<commit_after>
|
import judicious
# judicious.register("https://imprudent.herokuapp.com")
# judicious.seed("cc722bf6-e319-cf63-a671-cbae64dfd40f")
def experiment():
with judicious.Person(lifetime=60) as person:
if not person.consent():
return None
j1 = person.joke()
j2 = person.joke()
j3 = person.joke()
j4 = person.joke()
person.complete()
return (j1, j2, j3, j4)
results = judicious.map3(experiment, [None for _ in range(1)])
print(results)
|
import judicious
judicious.seed("cc722bf6-e319-cf63-a671-cbae64dfdb0f")
# 1 (complete): 3799aa89-ccae-c268-d0e8-cc4e9ddddee4
# 2 (timeout) : 4d30601d-dfe3-ee53-8594-7fc0aa8e68ec
# 3 (complete): fe07a885-53c3-9a22-c93e-91436e5d8f0c
# 1 (complete): 4f4d13ed-7d1c-cbee-638d-6aee5188c929
# 2 (timeout) : 720ebe41-5987-b9f0-b571-fd7fb50f2b05
# 3 (timeout) : 358e7d25-af92-8a18-23ec-49025aecc87b
# 4 (complete) : cab5c911-741c-8721-d851-483669940626
def experiment():
with judicious.Person(lifetime=60) as person:
consent = person.consent()
j1 = person.joke()
j2 = person.joke()
j3 = person.joke()
j4 = person.joke()
person.complete()
return [j1, j2, j3, j4]
results = judicious.map3(experiment, [None for _ in range(100)])
print(results)
Update context manager test scriptimport judicious
# judicious.register("https://imprudent.herokuapp.com")
# judicious.seed("cc722bf6-e319-cf63-a671-cbae64dfd40f")
def experiment():
with judicious.Person(lifetime=60) as person:
if not person.consent():
return None
j1 = person.joke()
j2 = person.joke()
j3 = person.joke()
j4 = person.joke()
person.complete()
return (j1, j2, j3, j4)
results = judicious.map3(experiment, [None for _ in range(1)])
print(results)
|
<commit_before>import judicious
judicious.seed("cc722bf6-e319-cf63-a671-cbae64dfdb0f")
# 1 (complete): 3799aa89-ccae-c268-d0e8-cc4e9ddddee4
# 2 (timeout) : 4d30601d-dfe3-ee53-8594-7fc0aa8e68ec
# 3 (complete): fe07a885-53c3-9a22-c93e-91436e5d8f0c
# 1 (complete): 4f4d13ed-7d1c-cbee-638d-6aee5188c929
# 2 (timeout) : 720ebe41-5987-b9f0-b571-fd7fb50f2b05
# 3 (timeout) : 358e7d25-af92-8a18-23ec-49025aecc87b
# 4 (complete) : cab5c911-741c-8721-d851-483669940626
def experiment():
with judicious.Person(lifetime=60) as person:
consent = person.consent()
j1 = person.joke()
j2 = person.joke()
j3 = person.joke()
j4 = person.joke()
person.complete()
return [j1, j2, j3, j4]
results = judicious.map3(experiment, [None for _ in range(100)])
print(results)
<commit_msg>Update context manager test script<commit_after>import judicious
# judicious.register("https://imprudent.herokuapp.com")
# judicious.seed("cc722bf6-e319-cf63-a671-cbae64dfd40f")
def experiment():
with judicious.Person(lifetime=60) as person:
if not person.consent():
return None
j1 = person.joke()
j2 = person.joke()
j3 = person.joke()
j4 = person.joke()
person.complete()
return (j1, j2, j3, j4)
results = judicious.map3(experiment, [None for _ in range(1)])
print(results)
|
ed3e70d26f70e701e29e5d0798f5f55229ba5f6d
|
tracker/servers/cron_jobs.py
|
tracker/servers/cron_jobs.py
|
from servers.models import HttpSeries
from django_cron import CronJobBase, Schedule
class HttpSeriesCronJob(CronJobBase):
RUN_EVERY_MINS = 2
schedule = Schedule(run_every_mins=RUN_EVERY_MINS)
code = "servers.cron.HttpSeriesCronJob"
def do(self):
for series in HttpSeries.objects.all():
series.create_next_value()
|
from servers.models import HttpSeries
from django_cron import CronJobBase, Schedule
class HttpSeriesCronJob(CronJobBase):
RUN_EVERY_MINS = 1
schedule = Schedule(run_every_mins=RUN_EVERY_MINS)
code = "servers.cron.HttpSeriesCronJob"
def do(self):
for series in HttpSeries.objects.all():
series.create_next_value()
|
Set acquisition freq to 1 minute.
|
Set acquisition freq to 1 minute.
|
Python
|
mit
|
tomaszpiotro/Tracker,tomaszpiotro/Tracker,tomaszpiotro/Tracker
|
from servers.models import HttpSeries
from django_cron import CronJobBase, Schedule
class HttpSeriesCronJob(CronJobBase):
RUN_EVERY_MINS = 2
schedule = Schedule(run_every_mins=RUN_EVERY_MINS)
code = "servers.cron.HttpSeriesCronJob"
def do(self):
for series in HttpSeries.objects.all():
series.create_next_value()
Set acquisition freq to 1 minute.
|
from servers.models import HttpSeries
from django_cron import CronJobBase, Schedule
class HttpSeriesCronJob(CronJobBase):
RUN_EVERY_MINS = 1
schedule = Schedule(run_every_mins=RUN_EVERY_MINS)
code = "servers.cron.HttpSeriesCronJob"
def do(self):
for series in HttpSeries.objects.all():
series.create_next_value()
|
<commit_before>from servers.models import HttpSeries
from django_cron import CronJobBase, Schedule
class HttpSeriesCronJob(CronJobBase):
RUN_EVERY_MINS = 2
schedule = Schedule(run_every_mins=RUN_EVERY_MINS)
code = "servers.cron.HttpSeriesCronJob"
def do(self):
for series in HttpSeries.objects.all():
series.create_next_value()
<commit_msg>Set acquisition freq to 1 minute.<commit_after>
|
from servers.models import HttpSeries
from django_cron import CronJobBase, Schedule
class HttpSeriesCronJob(CronJobBase):
RUN_EVERY_MINS = 1
schedule = Schedule(run_every_mins=RUN_EVERY_MINS)
code = "servers.cron.HttpSeriesCronJob"
def do(self):
for series in HttpSeries.objects.all():
series.create_next_value()
|
from servers.models import HttpSeries
from django_cron import CronJobBase, Schedule
class HttpSeriesCronJob(CronJobBase):
RUN_EVERY_MINS = 2
schedule = Schedule(run_every_mins=RUN_EVERY_MINS)
code = "servers.cron.HttpSeriesCronJob"
def do(self):
for series in HttpSeries.objects.all():
series.create_next_value()
Set acquisition freq to 1 minute.from servers.models import HttpSeries
from django_cron import CronJobBase, Schedule
class HttpSeriesCronJob(CronJobBase):
RUN_EVERY_MINS = 1
schedule = Schedule(run_every_mins=RUN_EVERY_MINS)
code = "servers.cron.HttpSeriesCronJob"
def do(self):
for series in HttpSeries.objects.all():
series.create_next_value()
|
<commit_before>from servers.models import HttpSeries
from django_cron import CronJobBase, Schedule
class HttpSeriesCronJob(CronJobBase):
RUN_EVERY_MINS = 2
schedule = Schedule(run_every_mins=RUN_EVERY_MINS)
code = "servers.cron.HttpSeriesCronJob"
def do(self):
for series in HttpSeries.objects.all():
series.create_next_value()
<commit_msg>Set acquisition freq to 1 minute.<commit_after>from servers.models import HttpSeries
from django_cron import CronJobBase, Schedule
class HttpSeriesCronJob(CronJobBase):
RUN_EVERY_MINS = 1
schedule = Schedule(run_every_mins=RUN_EVERY_MINS)
code = "servers.cron.HttpSeriesCronJob"
def do(self):
for series in HttpSeries.objects.all():
series.create_next_value()
|
ea18ab430e49c5deb5a0c19fbda66cbaca8256c7
|
esipy/__init__.py
|
esipy/__init__.py
|
# -*- encoding: utf-8 -*-
from __future__ import absolute_import
from .client import EsiClient # noqa
from .security import EsiSecurity # noqa
try:
from pyswagger import App # noqa
except Exception: # pragma: no cover
# Not installed or in install (not yet installed) so ignore
pass
__version__ = '0.0.5'
|
# -*- encoding: utf-8 -*-
from __future__ import absolute_import
try:
from .client import EsiClient # noqa
from .security import EsiSecurity # noqa
from pyswagger import App # noqa
except Exception: # pragma: no cover
# Not installed or in install (not yet installed) so ignore
pass
__version__ = '0.0.5'
|
Move import in try except (forgot the others)
|
Move import in try except (forgot the others)
|
Python
|
bsd-3-clause
|
a-tal/EsiPy,Kyria/EsiPy
|
# -*- encoding: utf-8 -*-
from __future__ import absolute_import
from .client import EsiClient # noqa
from .security import EsiSecurity # noqa
try:
from pyswagger import App # noqa
except Exception: # pragma: no cover
# Not installed or in install (not yet installed) so ignore
pass
__version__ = '0.0.5'
Move import in try except (forgot the others)
|
# -*- encoding: utf-8 -*-
from __future__ import absolute_import
try:
from .client import EsiClient # noqa
from .security import EsiSecurity # noqa
from pyswagger import App # noqa
except Exception: # pragma: no cover
# Not installed or in install (not yet installed) so ignore
pass
__version__ = '0.0.5'
|
<commit_before># -*- encoding: utf-8 -*-
from __future__ import absolute_import
from .client import EsiClient # noqa
from .security import EsiSecurity # noqa
try:
from pyswagger import App # noqa
except Exception: # pragma: no cover
# Not installed or in install (not yet installed) so ignore
pass
__version__ = '0.0.5'
<commit_msg>Move import in try except (forgot the others)<commit_after>
|
# -*- encoding: utf-8 -*-
from __future__ import absolute_import
try:
from .client import EsiClient # noqa
from .security import EsiSecurity # noqa
from pyswagger import App # noqa
except Exception: # pragma: no cover
# Not installed or in install (not yet installed) so ignore
pass
__version__ = '0.0.5'
|
# -*- encoding: utf-8 -*-
from __future__ import absolute_import
from .client import EsiClient # noqa
from .security import EsiSecurity # noqa
try:
from pyswagger import App # noqa
except Exception: # pragma: no cover
# Not installed or in install (not yet installed) so ignore
pass
__version__ = '0.0.5'
Move import in try except (forgot the others)# -*- encoding: utf-8 -*-
from __future__ import absolute_import
try:
from .client import EsiClient # noqa
from .security import EsiSecurity # noqa
from pyswagger import App # noqa
except Exception: # pragma: no cover
# Not installed or in install (not yet installed) so ignore
pass
__version__ = '0.0.5'
|
<commit_before># -*- encoding: utf-8 -*-
from __future__ import absolute_import
from .client import EsiClient # noqa
from .security import EsiSecurity # noqa
try:
from pyswagger import App # noqa
except Exception: # pragma: no cover
# Not installed or in install (not yet installed) so ignore
pass
__version__ = '0.0.5'
<commit_msg>Move import in try except (forgot the others)<commit_after># -*- encoding: utf-8 -*-
from __future__ import absolute_import
try:
from .client import EsiClient # noqa
from .security import EsiSecurity # noqa
from pyswagger import App # noqa
except Exception: # pragma: no cover
# Not installed or in install (not yet installed) so ignore
pass
__version__ = '0.0.5'
|
6319303c93af973718c5e26c4d6b1d47310ff804
|
install_deps.py
|
install_deps.py
|
#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return install_deps
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
if dep_name == 'None':
continue
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
|
#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return [dep for dep in install_deps if dep != 'None']
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
|
Correct for None appearing in requirements list
|
Correct for None appearing in requirements list
|
Python
|
bsd-3-clause
|
Neurita/galton
|
#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return install_deps
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
if dep_name == 'None':
continue
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
Correct for None appearing in requirements list
|
#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return [dep for dep in install_deps if dep != 'None']
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
|
<commit_before>#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return install_deps
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
if dep_name == 'None':
continue
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
<commit_msg>Correct for None appearing in requirements list<commit_after>
|
#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return [dep for dep in install_deps if dep != 'None']
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
|
#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return install_deps
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
if dep_name == 'None':
continue
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
Correct for None appearing in requirements list#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return [dep for dep in install_deps if dep != 'None']
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
|
<commit_before>#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return install_deps
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
if dep_name == 'None':
continue
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
<commit_msg>Correct for None appearing in requirements list<commit_after>#!/usr/bin/env python
"""
Install the packages you have listed in the requirements file you input as
first argument.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import sys
import fileinput
import subprocess
from pip.req import parse_requirements
def get_requirements(*args):
"""Parse all requirements files given and return a list of the
dependencies"""
install_deps = []
try:
for fpath in args:
install_deps.extend([str(d.req) for d in parse_requirements(fpath)])
except:
print('Error reading {} file looking for dependencies.'.format(fpath))
return [dep for dep in install_deps if dep != 'None']
if __name__ == '__main__':
for line in fileinput.input():
req_filepaths = sys.argv[1:]
deps = get_requirements(*req_filepaths)
try:
for dep_name in deps:
cmd = "pip install {0}".format(dep_name)
print('#', cmd)
subprocess.check_call(cmd, shell=True)
except:
print('Error installing {}'.format(dep_name))
|
1c60cf7082672335279d5b96e83f3cb2eb57424f
|
purchase_supplier_minimum_order/models/__init__.py
|
purchase_supplier_minimum_order/models/__init__.py
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
Enforce minimum PO value for supplier.
|
Enforce minimum PO value for supplier.
|
Python
|
agpl-3.0
|
OpusVL/odoo-purchase-min-order
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Enforce minimum PO value for supplier.
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>Enforce minimum PO value for supplier.<commit_after>
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
# -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
Enforce minimum PO value for supplier.# -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
<commit_before># -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
<commit_msg>Enforce minimum PO value for supplier.<commit_after># -*- coding: utf-8 -*-
##############################################################################
#
# Set minimum order on suppliers
# Copyright (C) 2016 OpusVL (<http://opusvl.com/>)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from . import (
res_partner,
purchase,
)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
f0fad3a710e6c9ec9475fa379521ba1e8e369c02
|
fixlib/channel.py
|
fixlib/channel.py
|
import asyncore
import util
try:
import simplejson as json
except ImportError:
import json
class ChannelServer(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
dest.register('close', self.closehook)
def handle_accept(self):
client = self.accept()
SideChannel(client[0], self.dest)
def closehook(self, hook, data):
print 'HOOK-CLOSE'
self.close()
class SideChannel(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
self.buffer = None
def handle_close(self):
self.close()
def handle_read(self):
raw = self.recv(8192)
if raw:
msg = util.json_decode(json.loads(raw))
self.dest.queue(msg)
self.buffer = {'result': 'done'}
def writable(self):
return self.buffer
def handle_write(self):
self.send(json.dumps(self.buffer))
self.close()
|
import asyncore
import util
try:
import simplejson as json
except ImportError:
import json
class ChannelServer(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
dest.register('close', lambda x, y: self.close())
def handle_accept(self):
client = self.accept()
SideChannel(client[0], self.dest)
class SideChannel(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
self.buffer = None
def handle_close(self):
self.close()
def handle_read(self):
raw = self.recv(8192)
if raw:
msg = util.json_decode(json.loads(raw))
self.dest.queue(msg)
self.buffer = {'result': 'done'}
def writable(self):
return self.buffer
def handle_write(self):
self.send(json.dumps(self.buffer))
self.close()
|
Use a lambda as a proxy.
|
Use a lambda as a proxy.
|
Python
|
bsd-3-clause
|
djc/fixlib
|
import asyncore
import util
try:
import simplejson as json
except ImportError:
import json
class ChannelServer(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
dest.register('close', self.closehook)
def handle_accept(self):
client = self.accept()
SideChannel(client[0], self.dest)
def closehook(self, hook, data):
print 'HOOK-CLOSE'
self.close()
class SideChannel(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
self.buffer = None
def handle_close(self):
self.close()
def handle_read(self):
raw = self.recv(8192)
if raw:
msg = util.json_decode(json.loads(raw))
self.dest.queue(msg)
self.buffer = {'result': 'done'}
def writable(self):
return self.buffer
def handle_write(self):
self.send(json.dumps(self.buffer))
self.close()
Use a lambda as a proxy.
|
import asyncore
import util
try:
import simplejson as json
except ImportError:
import json
class ChannelServer(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
dest.register('close', lambda x, y: self.close())
def handle_accept(self):
client = self.accept()
SideChannel(client[0], self.dest)
class SideChannel(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
self.buffer = None
def handle_close(self):
self.close()
def handle_read(self):
raw = self.recv(8192)
if raw:
msg = util.json_decode(json.loads(raw))
self.dest.queue(msg)
self.buffer = {'result': 'done'}
def writable(self):
return self.buffer
def handle_write(self):
self.send(json.dumps(self.buffer))
self.close()
|
<commit_before>import asyncore
import util
try:
import simplejson as json
except ImportError:
import json
class ChannelServer(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
dest.register('close', self.closehook)
def handle_accept(self):
client = self.accept()
SideChannel(client[0], self.dest)
def closehook(self, hook, data):
print 'HOOK-CLOSE'
self.close()
class SideChannel(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
self.buffer = None
def handle_close(self):
self.close()
def handle_read(self):
raw = self.recv(8192)
if raw:
msg = util.json_decode(json.loads(raw))
self.dest.queue(msg)
self.buffer = {'result': 'done'}
def writable(self):
return self.buffer
def handle_write(self):
self.send(json.dumps(self.buffer))
self.close()
<commit_msg>Use a lambda as a proxy.<commit_after>
|
import asyncore
import util
try:
import simplejson as json
except ImportError:
import json
class ChannelServer(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
dest.register('close', lambda x, y: self.close())
def handle_accept(self):
client = self.accept()
SideChannel(client[0], self.dest)
class SideChannel(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
self.buffer = None
def handle_close(self):
self.close()
def handle_read(self):
raw = self.recv(8192)
if raw:
msg = util.json_decode(json.loads(raw))
self.dest.queue(msg)
self.buffer = {'result': 'done'}
def writable(self):
return self.buffer
def handle_write(self):
self.send(json.dumps(self.buffer))
self.close()
|
import asyncore
import util
try:
import simplejson as json
except ImportError:
import json
class ChannelServer(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
dest.register('close', self.closehook)
def handle_accept(self):
client = self.accept()
SideChannel(client[0], self.dest)
def closehook(self, hook, data):
print 'HOOK-CLOSE'
self.close()
class SideChannel(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
self.buffer = None
def handle_close(self):
self.close()
def handle_read(self):
raw = self.recv(8192)
if raw:
msg = util.json_decode(json.loads(raw))
self.dest.queue(msg)
self.buffer = {'result': 'done'}
def writable(self):
return self.buffer
def handle_write(self):
self.send(json.dumps(self.buffer))
self.close()
Use a lambda as a proxy.import asyncore
import util
try:
import simplejson as json
except ImportError:
import json
class ChannelServer(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
dest.register('close', lambda x, y: self.close())
def handle_accept(self):
client = self.accept()
SideChannel(client[0], self.dest)
class SideChannel(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
self.buffer = None
def handle_close(self):
self.close()
def handle_read(self):
raw = self.recv(8192)
if raw:
msg = util.json_decode(json.loads(raw))
self.dest.queue(msg)
self.buffer = {'result': 'done'}
def writable(self):
return self.buffer
def handle_write(self):
self.send(json.dumps(self.buffer))
self.close()
|
<commit_before>import asyncore
import util
try:
import simplejson as json
except ImportError:
import json
class ChannelServer(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
dest.register('close', self.closehook)
def handle_accept(self):
client = self.accept()
SideChannel(client[0], self.dest)
def closehook(self, hook, data):
print 'HOOK-CLOSE'
self.close()
class SideChannel(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
self.buffer = None
def handle_close(self):
self.close()
def handle_read(self):
raw = self.recv(8192)
if raw:
msg = util.json_decode(json.loads(raw))
self.dest.queue(msg)
self.buffer = {'result': 'done'}
def writable(self):
return self.buffer
def handle_write(self):
self.send(json.dumps(self.buffer))
self.close()
<commit_msg>Use a lambda as a proxy.<commit_after>import asyncore
import util
try:
import simplejson as json
except ImportError:
import json
class ChannelServer(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
dest.register('close', lambda x, y: self.close())
def handle_accept(self):
client = self.accept()
SideChannel(client[0], self.dest)
class SideChannel(asyncore.dispatcher):
def __init__(self, sock, dest):
asyncore.dispatcher.__init__(self, sock)
self.dest = dest
self.buffer = None
def handle_close(self):
self.close()
def handle_read(self):
raw = self.recv(8192)
if raw:
msg = util.json_decode(json.loads(raw))
self.dest.queue(msg)
self.buffer = {'result': 'done'}
def writable(self):
return self.buffer
def handle_write(self):
self.send(json.dumps(self.buffer))
self.close()
|
485bfd97d1b305ad0944192d4ea8c77a479936ad
|
util/log.py
|
util/log.py
|
import sys
from colors import Colors
class Log:
@classmethod
def print_msg(cls, title, msg, color, new_line = True):
Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line)
@classmethod
def msg(cls, msg, new_line = True):
Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line)
@classmethod
def info(cls, msg, new_line = True):
Log.print_msg("Info", msg, Colors.CYAN_FG, new_line)
@classmethod
def warn(cls, msg, new_line = True):
Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line)
@classmethod
def err(cls, msg, new_line = True):
Log.print_msg("Error", msg, Colors.RED_FG, new_line)
@classmethod
def raw(cls, msg, new_line = True):
if new_line and msg[-1:] != "\n":
msg += "\n"
sys.stdout.write("{0}".format(msg))
|
import sys
from colors import Colors
class Log:
@classmethod
def print_msg(cls, title, msg, color, new_line = True):
Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line)
@classmethod
def msg(cls, msg, new_line = True):
Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line)
@classmethod
def info(cls, msg, new_line = True):
Log.print_msg("Info", msg, Colors.CYAN_FG, new_line)
@classmethod
def warn(cls, msg, new_line = True):
Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line)
@classmethod
def note(cls, msg, new_line = True):
Log.print_msg("Note", msg, Colors.YELLOW_FG, new_line)
@classmethod
def err(cls, msg, new_line = True):
Log.print_msg("Error", msg, Colors.RED_FG, new_line)
@classmethod
def fatal(cls, msg, new_line = True):
Log.print_msg("Fatal", msg, Colors.RED_FG, new_line)
exit(1)
@classmethod
def raw(cls, msg, new_line = True):
if new_line and msg[-1:] != "\n":
msg += "\n"
sys.stdout.write("{0}".format(msg))
|
Add note and fatal to Log
|
Add note and fatal to Log
|
Python
|
mit
|
JBarberU/strawberry_py
|
import sys
from colors import Colors
class Log:
@classmethod
def print_msg(cls, title, msg, color, new_line = True):
Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line)
@classmethod
def msg(cls, msg, new_line = True):
Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line)
@classmethod
def info(cls, msg, new_line = True):
Log.print_msg("Info", msg, Colors.CYAN_FG, new_line)
@classmethod
def warn(cls, msg, new_line = True):
Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line)
@classmethod
def err(cls, msg, new_line = True):
Log.print_msg("Error", msg, Colors.RED_FG, new_line)
@classmethod
def raw(cls, msg, new_line = True):
if new_line and msg[-1:] != "\n":
msg += "\n"
sys.stdout.write("{0}".format(msg))
Add note and fatal to Log
|
import sys
from colors import Colors
class Log:
@classmethod
def print_msg(cls, title, msg, color, new_line = True):
Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line)
@classmethod
def msg(cls, msg, new_line = True):
Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line)
@classmethod
def info(cls, msg, new_line = True):
Log.print_msg("Info", msg, Colors.CYAN_FG, new_line)
@classmethod
def warn(cls, msg, new_line = True):
Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line)
@classmethod
def note(cls, msg, new_line = True):
Log.print_msg("Note", msg, Colors.YELLOW_FG, new_line)
@classmethod
def err(cls, msg, new_line = True):
Log.print_msg("Error", msg, Colors.RED_FG, new_line)
@classmethod
def fatal(cls, msg, new_line = True):
Log.print_msg("Fatal", msg, Colors.RED_FG, new_line)
exit(1)
@classmethod
def raw(cls, msg, new_line = True):
if new_line and msg[-1:] != "\n":
msg += "\n"
sys.stdout.write("{0}".format(msg))
|
<commit_before>import sys
from colors import Colors
class Log:
@classmethod
def print_msg(cls, title, msg, color, new_line = True):
Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line)
@classmethod
def msg(cls, msg, new_line = True):
Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line)
@classmethod
def info(cls, msg, new_line = True):
Log.print_msg("Info", msg, Colors.CYAN_FG, new_line)
@classmethod
def warn(cls, msg, new_line = True):
Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line)
@classmethod
def err(cls, msg, new_line = True):
Log.print_msg("Error", msg, Colors.RED_FG, new_line)
@classmethod
def raw(cls, msg, new_line = True):
if new_line and msg[-1:] != "\n":
msg += "\n"
sys.stdout.write("{0}".format(msg))
<commit_msg>Add note and fatal to Log<commit_after>
|
import sys
from colors import Colors
class Log:
@classmethod
def print_msg(cls, title, msg, color, new_line = True):
Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line)
@classmethod
def msg(cls, msg, new_line = True):
Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line)
@classmethod
def info(cls, msg, new_line = True):
Log.print_msg("Info", msg, Colors.CYAN_FG, new_line)
@classmethod
def warn(cls, msg, new_line = True):
Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line)
@classmethod
def note(cls, msg, new_line = True):
Log.print_msg("Note", msg, Colors.YELLOW_FG, new_line)
@classmethod
def err(cls, msg, new_line = True):
Log.print_msg("Error", msg, Colors.RED_FG, new_line)
@classmethod
def fatal(cls, msg, new_line = True):
Log.print_msg("Fatal", msg, Colors.RED_FG, new_line)
exit(1)
@classmethod
def raw(cls, msg, new_line = True):
if new_line and msg[-1:] != "\n":
msg += "\n"
sys.stdout.write("{0}".format(msg))
|
import sys
from colors import Colors
class Log:
@classmethod
def print_msg(cls, title, msg, color, new_line = True):
Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line)
@classmethod
def msg(cls, msg, new_line = True):
Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line)
@classmethod
def info(cls, msg, new_line = True):
Log.print_msg("Info", msg, Colors.CYAN_FG, new_line)
@classmethod
def warn(cls, msg, new_line = True):
Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line)
@classmethod
def err(cls, msg, new_line = True):
Log.print_msg("Error", msg, Colors.RED_FG, new_line)
@classmethod
def raw(cls, msg, new_line = True):
if new_line and msg[-1:] != "\n":
msg += "\n"
sys.stdout.write("{0}".format(msg))
Add note and fatal to Logimport sys
from colors import Colors
class Log:
@classmethod
def print_msg(cls, title, msg, color, new_line = True):
Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line)
@classmethod
def msg(cls, msg, new_line = True):
Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line)
@classmethod
def info(cls, msg, new_line = True):
Log.print_msg("Info", msg, Colors.CYAN_FG, new_line)
@classmethod
def warn(cls, msg, new_line = True):
Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line)
@classmethod
def note(cls, msg, new_line = True):
Log.print_msg("Note", msg, Colors.YELLOW_FG, new_line)
@classmethod
def err(cls, msg, new_line = True):
Log.print_msg("Error", msg, Colors.RED_FG, new_line)
@classmethod
def fatal(cls, msg, new_line = True):
Log.print_msg("Fatal", msg, Colors.RED_FG, new_line)
exit(1)
@classmethod
def raw(cls, msg, new_line = True):
if new_line and msg[-1:] != "\n":
msg += "\n"
sys.stdout.write("{0}".format(msg))
|
<commit_before>import sys
from colors import Colors
class Log:
@classmethod
def print_msg(cls, title, msg, color, new_line = True):
Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line)
@classmethod
def msg(cls, msg, new_line = True):
Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line)
@classmethod
def info(cls, msg, new_line = True):
Log.print_msg("Info", msg, Colors.CYAN_FG, new_line)
@classmethod
def warn(cls, msg, new_line = True):
Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line)
@classmethod
def err(cls, msg, new_line = True):
Log.print_msg("Error", msg, Colors.RED_FG, new_line)
@classmethod
def raw(cls, msg, new_line = True):
if new_line and msg[-1:] != "\n":
msg += "\n"
sys.stdout.write("{0}".format(msg))
<commit_msg>Add note and fatal to Log<commit_after>import sys
from colors import Colors
class Log:
@classmethod
def print_msg(cls, title, msg, color, new_line = True):
Log.raw("{0}{1}{2}: {3}".format(color, title, Colors.NORMAL, msg), new_line)
@classmethod
def msg(cls, msg, new_line = True):
Log.print_msg("Message", msg, Colors.MAGENTA_FG, new_line)
@classmethod
def info(cls, msg, new_line = True):
Log.print_msg("Info", msg, Colors.CYAN_FG, new_line)
@classmethod
def warn(cls, msg, new_line = True):
Log.print_msg("Warning", msg, Colors.YELLOW_FG, new_line)
@classmethod
def note(cls, msg, new_line = True):
Log.print_msg("Note", msg, Colors.YELLOW_FG, new_line)
@classmethod
def err(cls, msg, new_line = True):
Log.print_msg("Error", msg, Colors.RED_FG, new_line)
@classmethod
def fatal(cls, msg, new_line = True):
Log.print_msg("Fatal", msg, Colors.RED_FG, new_line)
exit(1)
@classmethod
def raw(cls, msg, new_line = True):
if new_line and msg[-1:] != "\n":
msg += "\n"
sys.stdout.write("{0}".format(msg))
|
f1abc6b9ca132f80c53743dfff31247beabdbd7c
|
astropy/constants/__init__.py
|
astropy/constants/__init__.py
|
"""
Contains astronomical and physical constants for use in Astropy or other
places.
The package contains a `~astropy.constants.cgs` and `~astropy.constants.si`
module that define constants in CGS and SI units, respectively. A typical use
case might be::
from astropy.constants.cgs import c
... define the mass of something you want the rest energy of as m ...
E = m*c**2
"""
from . import cgs
from . import si
from .constant import Constant
# Update the docstring to include a list of units from the si module
__doc__ += """
The following constants are defined in `~astropy.constants.cgs` and
`~astropy.constants.si` .
"""
for nm, val in si.__dict__.iteritems():
if isinstance(val, Constant):
__doc__ += ' * ' + nm + '\n ' + val.name + '\n'
del nm, val
__doc__ += '\n'
|
"""
Contains astronomical and physical constants for use in Astropy or other
places.
The package contains a `~astropy.constants.cgs` and `~astropy.constants.si`
module that define constants in CGS and SI units, respectively. A typical use
case might be::
from astropy.constants.cgs import c
... define the mass of something you want the rest energy of as m ...
E = m*c**2
"""
from . import cgs
from . import si
from .constant import Constant
# Update the docstring to include a list of units from the si module
__doc__ += """
The following constants are defined in `~astropy.constants.cgs` and
`~astropy.constants.si` .
"""
for nm, val in sorted(si.__dict__.iteritems()):
if isinstance(val, Constant):
__doc__ += ' * ' + nm + '\n ' + val.name + '\n'
del nm, val
__doc__ += '\n'
|
Sort constant names alphabetically in docstring
|
Sort constant names alphabetically in docstring
|
Python
|
bsd-3-clause
|
saimn/astropy,tbabej/astropy,bsipocz/astropy,DougBurke/astropy,pllim/astropy,mhvk/astropy,stargaser/astropy,StuartLittlefair/astropy,joergdietrich/astropy,dhomeier/astropy,larrybradley/astropy,mhvk/astropy,larrybradley/astropy,pllim/astropy,stargaser/astropy,dhomeier/astropy,DougBurke/astropy,MSeifert04/astropy,mhvk/astropy,funbaker/astropy,bsipocz/astropy,dhomeier/astropy,joergdietrich/astropy,StuartLittlefair/astropy,astropy/astropy,pllim/astropy,joergdietrich/astropy,bsipocz/astropy,dhomeier/astropy,larrybradley/astropy,tbabej/astropy,saimn/astropy,astropy/astropy,lpsinger/astropy,joergdietrich/astropy,StuartLittlefair/astropy,stargaser/astropy,aleksandr-bakanov/astropy,tbabej/astropy,lpsinger/astropy,saimn/astropy,lpsinger/astropy,AustereCuriosity/astropy,DougBurke/astropy,mhvk/astropy,mhvk/astropy,AustereCuriosity/astropy,stargaser/astropy,saimn/astropy,StuartLittlefair/astropy,larrybradley/astropy,kelle/astropy,aleksandr-bakanov/astropy,pllim/astropy,AustereCuriosity/astropy,astropy/astropy,kelle/astropy,tbabej/astropy,funbaker/astropy,funbaker/astropy,lpsinger/astropy,larrybradley/astropy,MSeifert04/astropy,astropy/astropy,AustereCuriosity/astropy,lpsinger/astropy,aleksandr-bakanov/astropy,astropy/astropy,kelle/astropy,DougBurke/astropy,dhomeier/astropy,MSeifert04/astropy,MSeifert04/astropy,joergdietrich/astropy,tbabej/astropy,AustereCuriosity/astropy,kelle/astropy,kelle/astropy,funbaker/astropy,aleksandr-bakanov/astropy,bsipocz/astropy,StuartLittlefair/astropy,pllim/astropy,saimn/astropy
|
"""
Contains astronomical and physical constants for use in Astropy or other
places.
The package contains a `~astropy.constants.cgs` and `~astropy.constants.si`
module that define constants in CGS and SI units, respectively. A typical use
case might be::
from astropy.constants.cgs import c
... define the mass of something you want the rest energy of as m ...
E = m*c**2
"""
from . import cgs
from . import si
from .constant import Constant
# Update the docstring to include a list of units from the si module
__doc__ += """
The following constants are defined in `~astropy.constants.cgs` and
`~astropy.constants.si` .
"""
for nm, val in si.__dict__.iteritems():
if isinstance(val, Constant):
__doc__ += ' * ' + nm + '\n ' + val.name + '\n'
del nm, val
__doc__ += '\n'
Sort constant names alphabetically in docstring
|
"""
Contains astronomical and physical constants for use in Astropy or other
places.
The package contains a `~astropy.constants.cgs` and `~astropy.constants.si`
module that define constants in CGS and SI units, respectively. A typical use
case might be::
from astropy.constants.cgs import c
... define the mass of something you want the rest energy of as m ...
E = m*c**2
"""
from . import cgs
from . import si
from .constant import Constant
# Update the docstring to include a list of units from the si module
__doc__ += """
The following constants are defined in `~astropy.constants.cgs` and
`~astropy.constants.si` .
"""
for nm, val in sorted(si.__dict__.iteritems()):
if isinstance(val, Constant):
__doc__ += ' * ' + nm + '\n ' + val.name + '\n'
del nm, val
__doc__ += '\n'
|
<commit_before>"""
Contains astronomical and physical constants for use in Astropy or other
places.
The package contains a `~astropy.constants.cgs` and `~astropy.constants.si`
module that define constants in CGS and SI units, respectively. A typical use
case might be::
from astropy.constants.cgs import c
... define the mass of something you want the rest energy of as m ...
E = m*c**2
"""
from . import cgs
from . import si
from .constant import Constant
# Update the docstring to include a list of units from the si module
__doc__ += """
The following constants are defined in `~astropy.constants.cgs` and
`~astropy.constants.si` .
"""
for nm, val in si.__dict__.iteritems():
if isinstance(val, Constant):
__doc__ += ' * ' + nm + '\n ' + val.name + '\n'
del nm, val
__doc__ += '\n'
<commit_msg>Sort constant names alphabetically in docstring<commit_after>
|
"""
Contains astronomical and physical constants for use in Astropy or other
places.
The package contains a `~astropy.constants.cgs` and `~astropy.constants.si`
module that define constants in CGS and SI units, respectively. A typical use
case might be::
from astropy.constants.cgs import c
... define the mass of something you want the rest energy of as m ...
E = m*c**2
"""
from . import cgs
from . import si
from .constant import Constant
# Update the docstring to include a list of units from the si module
__doc__ += """
The following constants are defined in `~astropy.constants.cgs` and
`~astropy.constants.si` .
"""
for nm, val in sorted(si.__dict__.iteritems()):
if isinstance(val, Constant):
__doc__ += ' * ' + nm + '\n ' + val.name + '\n'
del nm, val
__doc__ += '\n'
|
"""
Contains astronomical and physical constants for use in Astropy or other
places.
The package contains a `~astropy.constants.cgs` and `~astropy.constants.si`
module that define constants in CGS and SI units, respectively. A typical use
case might be::
from astropy.constants.cgs import c
... define the mass of something you want the rest energy of as m ...
E = m*c**2
"""
from . import cgs
from . import si
from .constant import Constant
# Update the docstring to include a list of units from the si module
__doc__ += """
The following constants are defined in `~astropy.constants.cgs` and
`~astropy.constants.si` .
"""
for nm, val in si.__dict__.iteritems():
if isinstance(val, Constant):
__doc__ += ' * ' + nm + '\n ' + val.name + '\n'
del nm, val
__doc__ += '\n'
Sort constant names alphabetically in docstring"""
Contains astronomical and physical constants for use in Astropy or other
places.
The package contains a `~astropy.constants.cgs` and `~astropy.constants.si`
module that define constants in CGS and SI units, respectively. A typical use
case might be::
from astropy.constants.cgs import c
... define the mass of something you want the rest energy of as m ...
E = m*c**2
"""
from . import cgs
from . import si
from .constant import Constant
# Update the docstring to include a list of units from the si module
__doc__ += """
The following constants are defined in `~astropy.constants.cgs` and
`~astropy.constants.si` .
"""
for nm, val in sorted(si.__dict__.iteritems()):
if isinstance(val, Constant):
__doc__ += ' * ' + nm + '\n ' + val.name + '\n'
del nm, val
__doc__ += '\n'
|
<commit_before>"""
Contains astronomical and physical constants for use in Astropy or other
places.
The package contains a `~astropy.constants.cgs` and `~astropy.constants.si`
module that define constants in CGS and SI units, respectively. A typical use
case might be::
from astropy.constants.cgs import c
... define the mass of something you want the rest energy of as m ...
E = m*c**2
"""
from . import cgs
from . import si
from .constant import Constant
# Update the docstring to include a list of units from the si module
__doc__ += """
The following constants are defined in `~astropy.constants.cgs` and
`~astropy.constants.si` .
"""
for nm, val in si.__dict__.iteritems():
if isinstance(val, Constant):
__doc__ += ' * ' + nm + '\n ' + val.name + '\n'
del nm, val
__doc__ += '\n'
<commit_msg>Sort constant names alphabetically in docstring<commit_after>"""
Contains astronomical and physical constants for use in Astropy or other
places.
The package contains a `~astropy.constants.cgs` and `~astropy.constants.si`
module that define constants in CGS and SI units, respectively. A typical use
case might be::
from astropy.constants.cgs import c
... define the mass of something you want the rest energy of as m ...
E = m*c**2
"""
from . import cgs
from . import si
from .constant import Constant
# Update the docstring to include a list of units from the si module
__doc__ += """
The following constants are defined in `~astropy.constants.cgs` and
`~astropy.constants.si` .
"""
for nm, val in sorted(si.__dict__.iteritems()):
if isinstance(val, Constant):
__doc__ += ' * ' + nm + '\n ' + val.name + '\n'
del nm, val
__doc__ += '\n'
|
aa370f5eb39b587d71e511cb618951875896e75a
|
ckanext/ckanext-apicatalog_scheming/ckanext/apicatalog_scheming/tests/test_plugin.py
|
ckanext/ckanext-apicatalog_scheming/ckanext/apicatalog_scheming/tests/test_plugin.py
|
import pytest
from ckan.tests.factories import User, Dataset, Organization
from ckan.plugins.toolkit import get_action
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_allowed_organization_user_should_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
organization2 = Organization(users=org2_users)
dataset = Dataset(private=True, owner_org=organization1['id'], allowed_organizations=organization2['name'])
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == [dataset['name']]
|
import pytest
from ckan.tests.factories import User, Dataset, Organization
from ckan.plugins.toolkit import get_action
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_allowed_organization_user_should_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
organization2 = Organization(users=org2_users)
dataset = Dataset(private=True, owner_org=organization1['id'], allowed_organizations=organization2['name'])
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == [dataset['name']]
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_not_allowed_organization_user_should_not_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
Organization(users=org2_users)
Dataset(private=True, owner_org=organization1['id'], allowed_organizations="")
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == []
|
Add not allowed organization test
|
LIKA-410: Add not allowed organization test
|
Python
|
mit
|
vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog,vrk-kpa/api-catalog
|
import pytest
from ckan.tests.factories import User, Dataset, Organization
from ckan.plugins.toolkit import get_action
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_allowed_organization_user_should_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
organization2 = Organization(users=org2_users)
dataset = Dataset(private=True, owner_org=organization1['id'], allowed_organizations=organization2['name'])
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == [dataset['name']]
LIKA-410: Add not allowed organization test
|
import pytest
from ckan.tests.factories import User, Dataset, Organization
from ckan.plugins.toolkit import get_action
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_allowed_organization_user_should_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
organization2 = Organization(users=org2_users)
dataset = Dataset(private=True, owner_org=organization1['id'], allowed_organizations=organization2['name'])
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == [dataset['name']]
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_not_allowed_organization_user_should_not_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
Organization(users=org2_users)
Dataset(private=True, owner_org=organization1['id'], allowed_organizations="")
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == []
|
<commit_before>import pytest
from ckan.tests.factories import User, Dataset, Organization
from ckan.plugins.toolkit import get_action
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_allowed_organization_user_should_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
organization2 = Organization(users=org2_users)
dataset = Dataset(private=True, owner_org=organization1['id'], allowed_organizations=organization2['name'])
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == [dataset['name']]
<commit_msg>LIKA-410: Add not allowed organization test<commit_after>
|
import pytest
from ckan.tests.factories import User, Dataset, Organization
from ckan.plugins.toolkit import get_action
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_allowed_organization_user_should_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
organization2 = Organization(users=org2_users)
dataset = Dataset(private=True, owner_org=organization1['id'], allowed_organizations=organization2['name'])
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == [dataset['name']]
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_not_allowed_organization_user_should_not_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
Organization(users=org2_users)
Dataset(private=True, owner_org=organization1['id'], allowed_organizations="")
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == []
|
import pytest
from ckan.tests.factories import User, Dataset, Organization
from ckan.plugins.toolkit import get_action
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_allowed_organization_user_should_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
organization2 = Organization(users=org2_users)
dataset = Dataset(private=True, owner_org=organization1['id'], allowed_organizations=organization2['name'])
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == [dataset['name']]
LIKA-410: Add not allowed organization testimport pytest
from ckan.tests.factories import User, Dataset, Organization
from ckan.plugins.toolkit import get_action
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_allowed_organization_user_should_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
organization2 = Organization(users=org2_users)
dataset = Dataset(private=True, owner_org=organization1['id'], allowed_organizations=organization2['name'])
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == [dataset['name']]
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_not_allowed_organization_user_should_not_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
Organization(users=org2_users)
Dataset(private=True, owner_org=organization1['id'], allowed_organizations="")
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == []
|
<commit_before>import pytest
from ckan.tests.factories import User, Dataset, Organization
from ckan.plugins.toolkit import get_action
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_allowed_organization_user_should_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
organization2 = Organization(users=org2_users)
dataset = Dataset(private=True, owner_org=organization1['id'], allowed_organizations=organization2['name'])
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == [dataset['name']]
<commit_msg>LIKA-410: Add not allowed organization test<commit_after>import pytest
from ckan.tests.factories import User, Dataset, Organization
from ckan.plugins.toolkit import get_action
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_allowed_organization_user_should_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
organization2 = Organization(users=org2_users)
dataset = Dataset(private=True, owner_org=organization1['id'], allowed_organizations=organization2['name'])
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == [dataset['name']]
@pytest.mark.usefixtures('with_plugins', 'clean_db', 'clean_index')
def test_not_allowed_organization_user_should_not_see_subsystem():
organization1 = Organization()
user2 = User()
org2_users = [{"name": user2["name"], "capacity": "admin"}]
Organization(users=org2_users)
Dataset(private=True, owner_org=organization1['id'], allowed_organizations="")
results = get_action(u"package_search")(
{u"user": user2["name"]}, {u"include_private": True}
)["results"]
names = [r["name"] for r in results]
assert names == []
|
0ee42ac3b80893557691d722eda207733289c97c
|
micropsi_core/world/minecraft/spockplugin.py
|
micropsi_core/world/minecraft/spockplugin.py
|
import logging
from spock.mcp import mcdata, mcpacket
from spock.mcmap import smpmap
from micropsi_core.world.minecraft.psidispatcher import PsiDispatcher
from spock.utils import pl_announce
@pl_announce('Micropsi')
class MicropsiPlugin(object):
def __init__(self, ploader, settings):
self.worldadapter = settings['worldadapter']
self.worldadapter.spockplugin = self
self.net = ploader.requires('Net')
self.event = ploader.requires('Event')
self.world = ploader.requires('World')
self.clientinfo = ploader.requires('ClientInfo')
#MicroPsi Datatargets
self.psi_dispatcher = PsiDispatcher(self)
self.move_x = 0
self.move_z = 0
self.move_x_ = 0
self.move_z_ = 0
def move(self, position=None):
if not (self.net.connected and self.net.proto_state == mcdata.PLAY_STATE):
return
if position is None:
position = self.client_info.position
self.net.push(mcpacket.Packet(
ident='PLAY>Player Position and Look',
data=position
))
|
import logging
from spock.mcp import mcdata, mcpacket
from spock.mcmap import smpmap
from micropsi_core.world.minecraft.psidispatcher import PsiDispatcher
from spock.utils import pl_announce
@pl_announce('Micropsi')
class MicropsiPlugin(object):
def __init__(self, ploader, settings):
self.worldadapter = settings['worldadapter']
self.worldadapter.spockplugin = self
self.net = ploader.requires('Net')
self.event = ploader.requires('Event')
self.world = ploader.requires('World')
self.clientinfo = ploader.requires('ClientInfo')
#MicroPsi Datatargets
self.psi_dispatcher = PsiDispatcher(self)
self.move_x = 0
self.move_z = 0
self.move_x_ = 0
self.move_z_ = 0
def move(self, position=None):
if not (self.net.connected and self.net.proto_state == mcdata.PLAY_STATE):
return
self.clientinfo.position = position
|
Move now sets the client position and allows the movement plugin do its thing
|
Move now sets the client position and allows the
movement plugin do its thing
|
Python
|
mit
|
ianupright/micropsi2,ianupright/micropsi2,printedheart/micropsi2,ianupright/micropsi2,printedheart/micropsi2,printedheart/micropsi2
|
import logging
from spock.mcp import mcdata, mcpacket
from spock.mcmap import smpmap
from micropsi_core.world.minecraft.psidispatcher import PsiDispatcher
from spock.utils import pl_announce
@pl_announce('Micropsi')
class MicropsiPlugin(object):
def __init__(self, ploader, settings):
self.worldadapter = settings['worldadapter']
self.worldadapter.spockplugin = self
self.net = ploader.requires('Net')
self.event = ploader.requires('Event')
self.world = ploader.requires('World')
self.clientinfo = ploader.requires('ClientInfo')
#MicroPsi Datatargets
self.psi_dispatcher = PsiDispatcher(self)
self.move_x = 0
self.move_z = 0
self.move_x_ = 0
self.move_z_ = 0
def move(self, position=None):
if not (self.net.connected and self.net.proto_state == mcdata.PLAY_STATE):
return
if position is None:
position = self.client_info.position
self.net.push(mcpacket.Packet(
ident='PLAY>Player Position and Look',
data=position
))Move now sets the client position and allows the
movement plugin do its thing
|
import logging
from spock.mcp import mcdata, mcpacket
from spock.mcmap import smpmap
from micropsi_core.world.minecraft.psidispatcher import PsiDispatcher
from spock.utils import pl_announce
@pl_announce('Micropsi')
class MicropsiPlugin(object):
def __init__(self, ploader, settings):
self.worldadapter = settings['worldadapter']
self.worldadapter.spockplugin = self
self.net = ploader.requires('Net')
self.event = ploader.requires('Event')
self.world = ploader.requires('World')
self.clientinfo = ploader.requires('ClientInfo')
#MicroPsi Datatargets
self.psi_dispatcher = PsiDispatcher(self)
self.move_x = 0
self.move_z = 0
self.move_x_ = 0
self.move_z_ = 0
def move(self, position=None):
if not (self.net.connected and self.net.proto_state == mcdata.PLAY_STATE):
return
self.clientinfo.position = position
|
<commit_before>import logging
from spock.mcp import mcdata, mcpacket
from spock.mcmap import smpmap
from micropsi_core.world.minecraft.psidispatcher import PsiDispatcher
from spock.utils import pl_announce
@pl_announce('Micropsi')
class MicropsiPlugin(object):
def __init__(self, ploader, settings):
self.worldadapter = settings['worldadapter']
self.worldadapter.spockplugin = self
self.net = ploader.requires('Net')
self.event = ploader.requires('Event')
self.world = ploader.requires('World')
self.clientinfo = ploader.requires('ClientInfo')
#MicroPsi Datatargets
self.psi_dispatcher = PsiDispatcher(self)
self.move_x = 0
self.move_z = 0
self.move_x_ = 0
self.move_z_ = 0
def move(self, position=None):
if not (self.net.connected and self.net.proto_state == mcdata.PLAY_STATE):
return
if position is None:
position = self.client_info.position
self.net.push(mcpacket.Packet(
ident='PLAY>Player Position and Look',
data=position
))<commit_msg>Move now sets the client position and allows the
movement plugin do its thing<commit_after>
|
import logging
from spock.mcp import mcdata, mcpacket
from spock.mcmap import smpmap
from micropsi_core.world.minecraft.psidispatcher import PsiDispatcher
from spock.utils import pl_announce
@pl_announce('Micropsi')
class MicropsiPlugin(object):
def __init__(self, ploader, settings):
self.worldadapter = settings['worldadapter']
self.worldadapter.spockplugin = self
self.net = ploader.requires('Net')
self.event = ploader.requires('Event')
self.world = ploader.requires('World')
self.clientinfo = ploader.requires('ClientInfo')
#MicroPsi Datatargets
self.psi_dispatcher = PsiDispatcher(self)
self.move_x = 0
self.move_z = 0
self.move_x_ = 0
self.move_z_ = 0
def move(self, position=None):
if not (self.net.connected and self.net.proto_state == mcdata.PLAY_STATE):
return
self.clientinfo.position = position
|
import logging
from spock.mcp import mcdata, mcpacket
from spock.mcmap import smpmap
from micropsi_core.world.minecraft.psidispatcher import PsiDispatcher
from spock.utils import pl_announce
@pl_announce('Micropsi')
class MicropsiPlugin(object):
def __init__(self, ploader, settings):
self.worldadapter = settings['worldadapter']
self.worldadapter.spockplugin = self
self.net = ploader.requires('Net')
self.event = ploader.requires('Event')
self.world = ploader.requires('World')
self.clientinfo = ploader.requires('ClientInfo')
#MicroPsi Datatargets
self.psi_dispatcher = PsiDispatcher(self)
self.move_x = 0
self.move_z = 0
self.move_x_ = 0
self.move_z_ = 0
def move(self, position=None):
if not (self.net.connected and self.net.proto_state == mcdata.PLAY_STATE):
return
if position is None:
position = self.client_info.position
self.net.push(mcpacket.Packet(
ident='PLAY>Player Position and Look',
data=position
))Move now sets the client position and allows the
movement plugin do its thingimport logging
from spock.mcp import mcdata, mcpacket
from spock.mcmap import smpmap
from micropsi_core.world.minecraft.psidispatcher import PsiDispatcher
from spock.utils import pl_announce
@pl_announce('Micropsi')
class MicropsiPlugin(object):
def __init__(self, ploader, settings):
self.worldadapter = settings['worldadapter']
self.worldadapter.spockplugin = self
self.net = ploader.requires('Net')
self.event = ploader.requires('Event')
self.world = ploader.requires('World')
self.clientinfo = ploader.requires('ClientInfo')
#MicroPsi Datatargets
self.psi_dispatcher = PsiDispatcher(self)
self.move_x = 0
self.move_z = 0
self.move_x_ = 0
self.move_z_ = 0
def move(self, position=None):
if not (self.net.connected and self.net.proto_state == mcdata.PLAY_STATE):
return
self.clientinfo.position = position
|
<commit_before>import logging
from spock.mcp import mcdata, mcpacket
from spock.mcmap import smpmap
from micropsi_core.world.minecraft.psidispatcher import PsiDispatcher
from spock.utils import pl_announce
@pl_announce('Micropsi')
class MicropsiPlugin(object):
def __init__(self, ploader, settings):
self.worldadapter = settings['worldadapter']
self.worldadapter.spockplugin = self
self.net = ploader.requires('Net')
self.event = ploader.requires('Event')
self.world = ploader.requires('World')
self.clientinfo = ploader.requires('ClientInfo')
#MicroPsi Datatargets
self.psi_dispatcher = PsiDispatcher(self)
self.move_x = 0
self.move_z = 0
self.move_x_ = 0
self.move_z_ = 0
def move(self, position=None):
if not (self.net.connected and self.net.proto_state == mcdata.PLAY_STATE):
return
if position is None:
position = self.client_info.position
self.net.push(mcpacket.Packet(
ident='PLAY>Player Position and Look',
data=position
))<commit_msg>Move now sets the client position and allows the
movement plugin do its thing<commit_after>import logging
from spock.mcp import mcdata, mcpacket
from spock.mcmap import smpmap
from micropsi_core.world.minecraft.psidispatcher import PsiDispatcher
from spock.utils import pl_announce
@pl_announce('Micropsi')
class MicropsiPlugin(object):
def __init__(self, ploader, settings):
self.worldadapter = settings['worldadapter']
self.worldadapter.spockplugin = self
self.net = ploader.requires('Net')
self.event = ploader.requires('Event')
self.world = ploader.requires('World')
self.clientinfo = ploader.requires('ClientInfo')
#MicroPsi Datatargets
self.psi_dispatcher = PsiDispatcher(self)
self.move_x = 0
self.move_z = 0
self.move_x_ = 0
self.move_z_ = 0
def move(self, position=None):
if not (self.net.connected and self.net.proto_state == mcdata.PLAY_STATE):
return
self.clientinfo.position = position
|
90d1ff207cce93585b52e5d107efe7221ed37175
|
test_project/djmercadopago_test_app_settings.SAMPLE.py
|
test_project/djmercadopago_test_app_settings.SAMPLE.py
|
DJMERCADOPAGO_CLIENT_ID = 'your-mp-client-id'
DJMERCADOPAGO_CLIENTE_SECRET = 'your-mp-secret'
DJMERCADOPAGO_SANDBOX_MODE = True
DJMERCADOPAGO_CHECKOUT_PREFERENCE_BUILDER = \
'full.path.to.checkout.builder.implementation.function'
|
DJMERCADOPAGO_CLIENT_ID = 'your-mp-client-id'
DJMERCADOPAGO_CLIENTE_SECRET = 'your-mp-secret'
DJMERCADOPAGO_SANDBOX_MODE = True
DJMERCADOPAGO_CHECKOUT_PREFERENCE_UPDATER_FUNCTION = \
'full.path.to.checkout.builder.implementation.function'
|
Test project: update settings names
|
Test project: update settings names
|
Python
|
bsd-3-clause
|
data-tsunami/django-mercadopago,data-tsunami/django-mercadopago
|
DJMERCADOPAGO_CLIENT_ID = 'your-mp-client-id'
DJMERCADOPAGO_CLIENTE_SECRET = 'your-mp-secret'
DJMERCADOPAGO_SANDBOX_MODE = True
DJMERCADOPAGO_CHECKOUT_PREFERENCE_BUILDER = \
'full.path.to.checkout.builder.implementation.function'
Test project: update settings names
|
DJMERCADOPAGO_CLIENT_ID = 'your-mp-client-id'
DJMERCADOPAGO_CLIENTE_SECRET = 'your-mp-secret'
DJMERCADOPAGO_SANDBOX_MODE = True
DJMERCADOPAGO_CHECKOUT_PREFERENCE_UPDATER_FUNCTION = \
'full.path.to.checkout.builder.implementation.function'
|
<commit_before>DJMERCADOPAGO_CLIENT_ID = 'your-mp-client-id'
DJMERCADOPAGO_CLIENTE_SECRET = 'your-mp-secret'
DJMERCADOPAGO_SANDBOX_MODE = True
DJMERCADOPAGO_CHECKOUT_PREFERENCE_BUILDER = \
'full.path.to.checkout.builder.implementation.function'
<commit_msg>Test project: update settings names<commit_after>
|
DJMERCADOPAGO_CLIENT_ID = 'your-mp-client-id'
DJMERCADOPAGO_CLIENTE_SECRET = 'your-mp-secret'
DJMERCADOPAGO_SANDBOX_MODE = True
DJMERCADOPAGO_CHECKOUT_PREFERENCE_UPDATER_FUNCTION = \
'full.path.to.checkout.builder.implementation.function'
|
DJMERCADOPAGO_CLIENT_ID = 'your-mp-client-id'
DJMERCADOPAGO_CLIENTE_SECRET = 'your-mp-secret'
DJMERCADOPAGO_SANDBOX_MODE = True
DJMERCADOPAGO_CHECKOUT_PREFERENCE_BUILDER = \
'full.path.to.checkout.builder.implementation.function'
Test project: update settings namesDJMERCADOPAGO_CLIENT_ID = 'your-mp-client-id'
DJMERCADOPAGO_CLIENTE_SECRET = 'your-mp-secret'
DJMERCADOPAGO_SANDBOX_MODE = True
DJMERCADOPAGO_CHECKOUT_PREFERENCE_UPDATER_FUNCTION = \
'full.path.to.checkout.builder.implementation.function'
|
<commit_before>DJMERCADOPAGO_CLIENT_ID = 'your-mp-client-id'
DJMERCADOPAGO_CLIENTE_SECRET = 'your-mp-secret'
DJMERCADOPAGO_SANDBOX_MODE = True
DJMERCADOPAGO_CHECKOUT_PREFERENCE_BUILDER = \
'full.path.to.checkout.builder.implementation.function'
<commit_msg>Test project: update settings names<commit_after>DJMERCADOPAGO_CLIENT_ID = 'your-mp-client-id'
DJMERCADOPAGO_CLIENTE_SECRET = 'your-mp-secret'
DJMERCADOPAGO_SANDBOX_MODE = True
DJMERCADOPAGO_CHECKOUT_PREFERENCE_UPDATER_FUNCTION = \
'full.path.to.checkout.builder.implementation.function'
|
9185d882dc5fc7131b90d3b93dff8b6603538a3d
|
app/cogs/twitch_emotes.py
|
app/cogs/twitch_emotes.py
|
from io import BytesIO
import requests
from discord.ext import commands
from discord.ext.commands import Bot
TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'
class TwitchEmotes:
def __init__(self, bot: Bot):
self.bot = bot
r = requests.get(TWITCH_EMOTES_API)
emote_data = r.json()
emote_template = emote_data['template']['small']
emote_ids = {name: info['image_id'] for name, info in
emote_data['emotes'].items()}
emote_cache = {}
@bot.listen('on_message')
async def respond(message):
if message.author == bot.user:
return
text = message.content
if text in emote_ids:
if text not in emote_cache:
url = emote_template.replace('{image_id}',
str(emote_ids[text]))
emote_img = requests.get(url).content
emote_cache[text] = emote_img
data = BytesIO(emote_cache[text])
filename = '%s.png' % text
await bot.send_file(message.channel, data, filename=filename)
def setup(bot: Bot):
bot.add_cog(TwitchEmotes(bot))
|
from io import BytesIO
import logging
import requests
from discord.ext import commands
from discord.ext.commands import Bot
TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'
logger = logging.getLogger(__name__)
class TwitchEmotes:
def __init__(self, bot: Bot):
self.bot = bot
r = requests.get(TWITCH_EMOTES_API)
emote_data = r.json()
emote_template = emote_data['template']['small']
emote_ids = {name: info['image_id'] for name, info in
emote_data['emotes'].items()}
emote_cache = {}
logger.info('Got %d emotes from Twitchemotes.com API' % len(emote_ids))
logger.info('Using template: %s' % emote_template)
@bot.listen('on_message')
async def respond(message):
if message.author == bot.user:
return
text = message.content
if text in emote_ids:
if text not in emote_cache:
url = emote_template.replace('{image_id}',
str(emote_ids[text]))
logger.info('Fetching emote %s from %s' % (text, url))
emote_img = requests.get(url).content
emote_cache[text] = emote_img
data = BytesIO(emote_cache[text])
filename = '%s.png' % text
await bot.send_file(message.channel, data, filename=filename)
def setup(bot: Bot):
bot.add_cog(TwitchEmotes(bot))
|
Add logging to Twitch emotes module
|
Add logging to Twitch emotes module
|
Python
|
mit
|
andrewlin16/duckbot,andrewlin16/duckbot
|
from io import BytesIO
import requests
from discord.ext import commands
from discord.ext.commands import Bot
TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'
class TwitchEmotes:
def __init__(self, bot: Bot):
self.bot = bot
r = requests.get(TWITCH_EMOTES_API)
emote_data = r.json()
emote_template = emote_data['template']['small']
emote_ids = {name: info['image_id'] for name, info in
emote_data['emotes'].items()}
emote_cache = {}
@bot.listen('on_message')
async def respond(message):
if message.author == bot.user:
return
text = message.content
if text in emote_ids:
if text not in emote_cache:
url = emote_template.replace('{image_id}',
str(emote_ids[text]))
emote_img = requests.get(url).content
emote_cache[text] = emote_img
data = BytesIO(emote_cache[text])
filename = '%s.png' % text
await bot.send_file(message.channel, data, filename=filename)
def setup(bot: Bot):
bot.add_cog(TwitchEmotes(bot))
Add logging to Twitch emotes module
|
from io import BytesIO
import logging
import requests
from discord.ext import commands
from discord.ext.commands import Bot
TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'
logger = logging.getLogger(__name__)
class TwitchEmotes:
def __init__(self, bot: Bot):
self.bot = bot
r = requests.get(TWITCH_EMOTES_API)
emote_data = r.json()
emote_template = emote_data['template']['small']
emote_ids = {name: info['image_id'] for name, info in
emote_data['emotes'].items()}
emote_cache = {}
logger.info('Got %d emotes from Twitchemotes.com API' % len(emote_ids))
logger.info('Using template: %s' % emote_template)
@bot.listen('on_message')
async def respond(message):
if message.author == bot.user:
return
text = message.content
if text in emote_ids:
if text not in emote_cache:
url = emote_template.replace('{image_id}',
str(emote_ids[text]))
logger.info('Fetching emote %s from %s' % (text, url))
emote_img = requests.get(url).content
emote_cache[text] = emote_img
data = BytesIO(emote_cache[text])
filename = '%s.png' % text
await bot.send_file(message.channel, data, filename=filename)
def setup(bot: Bot):
bot.add_cog(TwitchEmotes(bot))
|
<commit_before>from io import BytesIO
import requests
from discord.ext import commands
from discord.ext.commands import Bot
TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'
class TwitchEmotes:
def __init__(self, bot: Bot):
self.bot = bot
r = requests.get(TWITCH_EMOTES_API)
emote_data = r.json()
emote_template = emote_data['template']['small']
emote_ids = {name: info['image_id'] for name, info in
emote_data['emotes'].items()}
emote_cache = {}
@bot.listen('on_message')
async def respond(message):
if message.author == bot.user:
return
text = message.content
if text in emote_ids:
if text not in emote_cache:
url = emote_template.replace('{image_id}',
str(emote_ids[text]))
emote_img = requests.get(url).content
emote_cache[text] = emote_img
data = BytesIO(emote_cache[text])
filename = '%s.png' % text
await bot.send_file(message.channel, data, filename=filename)
def setup(bot: Bot):
bot.add_cog(TwitchEmotes(bot))
<commit_msg>Add logging to Twitch emotes module<commit_after>
|
from io import BytesIO
import logging
import requests
from discord.ext import commands
from discord.ext.commands import Bot
TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'
logger = logging.getLogger(__name__)
class TwitchEmotes:
def __init__(self, bot: Bot):
self.bot = bot
r = requests.get(TWITCH_EMOTES_API)
emote_data = r.json()
emote_template = emote_data['template']['small']
emote_ids = {name: info['image_id'] for name, info in
emote_data['emotes'].items()}
emote_cache = {}
logger.info('Got %d emotes from Twitchemotes.com API' % len(emote_ids))
logger.info('Using template: %s' % emote_template)
@bot.listen('on_message')
async def respond(message):
if message.author == bot.user:
return
text = message.content
if text in emote_ids:
if text not in emote_cache:
url = emote_template.replace('{image_id}',
str(emote_ids[text]))
logger.info('Fetching emote %s from %s' % (text, url))
emote_img = requests.get(url).content
emote_cache[text] = emote_img
data = BytesIO(emote_cache[text])
filename = '%s.png' % text
await bot.send_file(message.channel, data, filename=filename)
def setup(bot: Bot):
bot.add_cog(TwitchEmotes(bot))
|
from io import BytesIO
import requests
from discord.ext import commands
from discord.ext.commands import Bot
TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'
class TwitchEmotes:
def __init__(self, bot: Bot):
self.bot = bot
r = requests.get(TWITCH_EMOTES_API)
emote_data = r.json()
emote_template = emote_data['template']['small']
emote_ids = {name: info['image_id'] for name, info in
emote_data['emotes'].items()}
emote_cache = {}
@bot.listen('on_message')
async def respond(message):
if message.author == bot.user:
return
text = message.content
if text in emote_ids:
if text not in emote_cache:
url = emote_template.replace('{image_id}',
str(emote_ids[text]))
emote_img = requests.get(url).content
emote_cache[text] = emote_img
data = BytesIO(emote_cache[text])
filename = '%s.png' % text
await bot.send_file(message.channel, data, filename=filename)
def setup(bot: Bot):
bot.add_cog(TwitchEmotes(bot))
Add logging to Twitch emotes modulefrom io import BytesIO
import logging
import requests
from discord.ext import commands
from discord.ext.commands import Bot
TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'
logger = logging.getLogger(__name__)
class TwitchEmotes:
def __init__(self, bot: Bot):
self.bot = bot
r = requests.get(TWITCH_EMOTES_API)
emote_data = r.json()
emote_template = emote_data['template']['small']
emote_ids = {name: info['image_id'] for name, info in
emote_data['emotes'].items()}
emote_cache = {}
logger.info('Got %d emotes from Twitchemotes.com API' % len(emote_ids))
logger.info('Using template: %s' % emote_template)
@bot.listen('on_message')
async def respond(message):
if message.author == bot.user:
return
text = message.content
if text in emote_ids:
if text not in emote_cache:
url = emote_template.replace('{image_id}',
str(emote_ids[text]))
logger.info('Fetching emote %s from %s' % (text, url))
emote_img = requests.get(url).content
emote_cache[text] = emote_img
data = BytesIO(emote_cache[text])
filename = '%s.png' % text
await bot.send_file(message.channel, data, filename=filename)
def setup(bot: Bot):
bot.add_cog(TwitchEmotes(bot))
|
<commit_before>from io import BytesIO
import requests
from discord.ext import commands
from discord.ext.commands import Bot
TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'
class TwitchEmotes:
def __init__(self, bot: Bot):
self.bot = bot
r = requests.get(TWITCH_EMOTES_API)
emote_data = r.json()
emote_template = emote_data['template']['small']
emote_ids = {name: info['image_id'] for name, info in
emote_data['emotes'].items()}
emote_cache = {}
@bot.listen('on_message')
async def respond(message):
if message.author == bot.user:
return
text = message.content
if text in emote_ids:
if text not in emote_cache:
url = emote_template.replace('{image_id}',
str(emote_ids[text]))
emote_img = requests.get(url).content
emote_cache[text] = emote_img
data = BytesIO(emote_cache[text])
filename = '%s.png' % text
await bot.send_file(message.channel, data, filename=filename)
def setup(bot: Bot):
bot.add_cog(TwitchEmotes(bot))
<commit_msg>Add logging to Twitch emotes module<commit_after>from io import BytesIO
import logging
import requests
from discord.ext import commands
from discord.ext.commands import Bot
TWITCH_EMOTES_API = 'https://twitchemotes.com/api_cache/v2/global.json'
logger = logging.getLogger(__name__)
class TwitchEmotes:
def __init__(self, bot: Bot):
self.bot = bot
r = requests.get(TWITCH_EMOTES_API)
emote_data = r.json()
emote_template = emote_data['template']['small']
emote_ids = {name: info['image_id'] for name, info in
emote_data['emotes'].items()}
emote_cache = {}
logger.info('Got %d emotes from Twitchemotes.com API' % len(emote_ids))
logger.info('Using template: %s' % emote_template)
@bot.listen('on_message')
async def respond(message):
if message.author == bot.user:
return
text = message.content
if text in emote_ids:
if text not in emote_cache:
url = emote_template.replace('{image_id}',
str(emote_ids[text]))
logger.info('Fetching emote %s from %s' % (text, url))
emote_img = requests.get(url).content
emote_cache[text] = emote_img
data = BytesIO(emote_cache[text])
filename = '%s.png' % text
await bot.send_file(message.channel, data, filename=filename)
def setup(bot: Bot):
bot.add_cog(TwitchEmotes(bot))
|
6c0999f2593da49c0fa78d035a049111a5676836
|
chatterbot_corpus/__init__.py
|
chatterbot_corpus/__init__.py
|
"""
A machine readable multilingual dialog corpus.
"""
from .corpus import Corpus
__version__ = '0.0.1'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/chatterbot-corpus'
|
"""
A machine readable multilingual dialog corpus.
"""
from .corpus import Corpus
__version__ = '0.0.2'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/chatterbot-corpus'
|
Update release version to 0.0.2
|
Update release version to 0.0.2
|
Python
|
bsd-3-clause
|
gunthercox/chatterbot-corpus
|
"""
A machine readable multilingual dialog corpus.
"""
from .corpus import Corpus
__version__ = '0.0.1'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/chatterbot-corpus'
Update release version to 0.0.2
|
"""
A machine readable multilingual dialog corpus.
"""
from .corpus import Corpus
__version__ = '0.0.2'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/chatterbot-corpus'
|
<commit_before>"""
A machine readable multilingual dialog corpus.
"""
from .corpus import Corpus
__version__ = '0.0.1'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/chatterbot-corpus'
<commit_msg>Update release version to 0.0.2<commit_after>
|
"""
A machine readable multilingual dialog corpus.
"""
from .corpus import Corpus
__version__ = '0.0.2'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/chatterbot-corpus'
|
"""
A machine readable multilingual dialog corpus.
"""
from .corpus import Corpus
__version__ = '0.0.1'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/chatterbot-corpus'
Update release version to 0.0.2"""
A machine readable multilingual dialog corpus.
"""
from .corpus import Corpus
__version__ = '0.0.2'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/chatterbot-corpus'
|
<commit_before>"""
A machine readable multilingual dialog corpus.
"""
from .corpus import Corpus
__version__ = '0.0.1'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/chatterbot-corpus'
<commit_msg>Update release version to 0.0.2<commit_after>"""
A machine readable multilingual dialog corpus.
"""
from .corpus import Corpus
__version__ = '0.0.2'
__author__ = 'Gunther Cox'
__email__ = 'gunthercx@gmail.com'
__url__ = 'https://github.com/gunthercox/chatterbot-corpus'
|
79d2e089eff2f6bcfd150d3ac6e165bfefa475cb
|
modeltranslation/__init__.py
|
modeltranslation/__init__.py
|
from pathlib import Path
__version__ = (Path(__file__).parent / "VERSION").open().read().strip()
default_app_config = 'modeltranslation.apps.ModeltranslationConfig'
|
from pathlib import Path
from django import VERSION as _django_version
__version__ = (Path(__file__).parent / "VERSION").open().read().strip()
if _django_version < (3, 2):
default_app_config = 'modeltranslation.apps.ModeltranslationConfig'
|
Add django version check for default_app_config
|
fix: Add django version check for default_app_config
|
Python
|
bsd-3-clause
|
deschler/django-modeltranslation,deschler/django-modeltranslation
|
from pathlib import Path
__version__ = (Path(__file__).parent / "VERSION").open().read().strip()
default_app_config = 'modeltranslation.apps.ModeltranslationConfig'
fix: Add django version check for default_app_config
|
from pathlib import Path
from django import VERSION as _django_version
__version__ = (Path(__file__).parent / "VERSION").open().read().strip()
if _django_version < (3, 2):
default_app_config = 'modeltranslation.apps.ModeltranslationConfig'
|
<commit_before>from pathlib import Path
__version__ = (Path(__file__).parent / "VERSION").open().read().strip()
default_app_config = 'modeltranslation.apps.ModeltranslationConfig'
<commit_msg>fix: Add django version check for default_app_config<commit_after>
|
from pathlib import Path
from django import VERSION as _django_version
__version__ = (Path(__file__).parent / "VERSION").open().read().strip()
if _django_version < (3, 2):
default_app_config = 'modeltranslation.apps.ModeltranslationConfig'
|
from pathlib import Path
__version__ = (Path(__file__).parent / "VERSION").open().read().strip()
default_app_config = 'modeltranslation.apps.ModeltranslationConfig'
fix: Add django version check for default_app_configfrom pathlib import Path
from django import VERSION as _django_version
__version__ = (Path(__file__).parent / "VERSION").open().read().strip()
if _django_version < (3, 2):
default_app_config = 'modeltranslation.apps.ModeltranslationConfig'
|
<commit_before>from pathlib import Path
__version__ = (Path(__file__).parent / "VERSION").open().read().strip()
default_app_config = 'modeltranslation.apps.ModeltranslationConfig'
<commit_msg>fix: Add django version check for default_app_config<commit_after>from pathlib import Path
from django import VERSION as _django_version
__version__ = (Path(__file__).parent / "VERSION").open().read().strip()
if _django_version < (3, 2):
default_app_config = 'modeltranslation.apps.ModeltranslationConfig'
|
ac5c03cef0f0b3676b22e66e89f74ec33f69e9c6
|
tests/python/utils.py
|
tests/python/utils.py
|
from pyroute2 import NSPopen
from distutils.spawn import find_executable
class NSPopenWithCheck(NSPopen):
"""
A wrapper for NSPopen that additionally checks if the program
to be executed is available from the system path or not.
If found, it proceeds with the usual NSPopen() call.
Otherwise, it raises an exception.
"""
def __init__(self, nsname, *argv, **kwarg):
name = list(argv)[0][0]
path = find_executable(name)
if path is None:
raise Exception(name + ": command not found")
else:
super(NSPopenWithCheck, self).__init__(nsname, *argv, **kwarg)
|
from pyroute2 import NSPopen
from distutils.spawn import find_executable
def has_executable(name):
path = find_executable(name)
if path is None:
raise Exception(name + ": command not found")
return path
class NSPopenWithCheck(NSPopen):
"""
A wrapper for NSPopen that additionally checks if the program
to be executed is available from the system path or not.
If found, it proceeds with the usual NSPopen() call.
Otherwise, it raises an exception.
"""
def __init__(self, nsname, *argv, **kwarg):
name = list(argv)[0][0]
has_executable(name)
super(NSPopenWithCheck, self).__init__(nsname, *argv, **kwarg)
|
Add a generic utility to check any binary availability
|
Add a generic utility to check any binary availability
In order to run, some test programs depend on the availability
of binaries in locations that are part of PATH. So, we add a
generic utility to simplify this.
Signed-off-by: Sandipan Das <ae8113f64d9c72812e097938d25a8975da69c074@linux.vnet.ibm.com>
|
Python
|
apache-2.0
|
mcaleavya/bcc,brendangregg/bcc,iovisor/bcc,brendangregg/bcc,tuxology/bcc,mcaleavya/bcc,brendangregg/bcc,iovisor/bcc,iovisor/bcc,tuxology/bcc,tuxology/bcc,iovisor/bcc,brendangregg/bcc,brendangregg/bcc,mcaleavya/bcc,mcaleavya/bcc,iovisor/bcc,mcaleavya/bcc,tuxology/bcc,tuxology/bcc
|
from pyroute2 import NSPopen
from distutils.spawn import find_executable
class NSPopenWithCheck(NSPopen):
"""
A wrapper for NSPopen that additionally checks if the program
to be executed is available from the system path or not.
If found, it proceeds with the usual NSPopen() call.
Otherwise, it raises an exception.
"""
def __init__(self, nsname, *argv, **kwarg):
name = list(argv)[0][0]
path = find_executable(name)
if path is None:
raise Exception(name + ": command not found")
else:
super(NSPopenWithCheck, self).__init__(nsname, *argv, **kwarg)
Add a generic utility to check any binary availability
In order to run, some test programs depend on the availability
of binaries in locations that are part of PATH. So, we add a
generic utility to simplify this.
Signed-off-by: Sandipan Das <ae8113f64d9c72812e097938d25a8975da69c074@linux.vnet.ibm.com>
|
from pyroute2 import NSPopen
from distutils.spawn import find_executable
def has_executable(name):
path = find_executable(name)
if path is None:
raise Exception(name + ": command not found")
return path
class NSPopenWithCheck(NSPopen):
"""
A wrapper for NSPopen that additionally checks if the program
to be executed is available from the system path or not.
If found, it proceeds with the usual NSPopen() call.
Otherwise, it raises an exception.
"""
def __init__(self, nsname, *argv, **kwarg):
name = list(argv)[0][0]
has_executable(name)
super(NSPopenWithCheck, self).__init__(nsname, *argv, **kwarg)
|
<commit_before>from pyroute2 import NSPopen
from distutils.spawn import find_executable
class NSPopenWithCheck(NSPopen):
"""
A wrapper for NSPopen that additionally checks if the program
to be executed is available from the system path or not.
If found, it proceeds with the usual NSPopen() call.
Otherwise, it raises an exception.
"""
def __init__(self, nsname, *argv, **kwarg):
name = list(argv)[0][0]
path = find_executable(name)
if path is None:
raise Exception(name + ": command not found")
else:
super(NSPopenWithCheck, self).__init__(nsname, *argv, **kwarg)
<commit_msg>Add a generic utility to check any binary availability
In order to run, some test programs depend on the availability
of binaries in locations that are part of PATH. So, we add a
generic utility to simplify this.
Signed-off-by: Sandipan Das <ae8113f64d9c72812e097938d25a8975da69c074@linux.vnet.ibm.com><commit_after>
|
from pyroute2 import NSPopen
from distutils.spawn import find_executable
def has_executable(name):
path = find_executable(name)
if path is None:
raise Exception(name + ": command not found")
return path
class NSPopenWithCheck(NSPopen):
"""
A wrapper for NSPopen that additionally checks if the program
to be executed is available from the system path or not.
If found, it proceeds with the usual NSPopen() call.
Otherwise, it raises an exception.
"""
def __init__(self, nsname, *argv, **kwarg):
name = list(argv)[0][0]
has_executable(name)
super(NSPopenWithCheck, self).__init__(nsname, *argv, **kwarg)
|
from pyroute2 import NSPopen
from distutils.spawn import find_executable
class NSPopenWithCheck(NSPopen):
"""
A wrapper for NSPopen that additionally checks if the program
to be executed is available from the system path or not.
If found, it proceeds with the usual NSPopen() call.
Otherwise, it raises an exception.
"""
def __init__(self, nsname, *argv, **kwarg):
name = list(argv)[0][0]
path = find_executable(name)
if path is None:
raise Exception(name + ": command not found")
else:
super(NSPopenWithCheck, self).__init__(nsname, *argv, **kwarg)
Add a generic utility to check any binary availability
In order to run, some test programs depend on the availability
of binaries in locations that are part of PATH. So, we add a
generic utility to simplify this.
Signed-off-by: Sandipan Das <ae8113f64d9c72812e097938d25a8975da69c074@linux.vnet.ibm.com>from pyroute2 import NSPopen
from distutils.spawn import find_executable
def has_executable(name):
path = find_executable(name)
if path is None:
raise Exception(name + ": command not found")
return path
class NSPopenWithCheck(NSPopen):
"""
A wrapper for NSPopen that additionally checks if the program
to be executed is available from the system path or not.
If found, it proceeds with the usual NSPopen() call.
Otherwise, it raises an exception.
"""
def __init__(self, nsname, *argv, **kwarg):
name = list(argv)[0][0]
has_executable(name)
super(NSPopenWithCheck, self).__init__(nsname, *argv, **kwarg)
|
<commit_before>from pyroute2 import NSPopen
from distutils.spawn import find_executable
class NSPopenWithCheck(NSPopen):
"""
A wrapper for NSPopen that additionally checks if the program
to be executed is available from the system path or not.
If found, it proceeds with the usual NSPopen() call.
Otherwise, it raises an exception.
"""
def __init__(self, nsname, *argv, **kwarg):
name = list(argv)[0][0]
path = find_executable(name)
if path is None:
raise Exception(name + ": command not found")
else:
super(NSPopenWithCheck, self).__init__(nsname, *argv, **kwarg)
<commit_msg>Add a generic utility to check any binary availability
In order to run, some test programs depend on the availability
of binaries in locations that are part of PATH. So, we add a
generic utility to simplify this.
Signed-off-by: Sandipan Das <ae8113f64d9c72812e097938d25a8975da69c074@linux.vnet.ibm.com><commit_after>from pyroute2 import NSPopen
from distutils.spawn import find_executable
def has_executable(name):
path = find_executable(name)
if path is None:
raise Exception(name + ": command not found")
return path
class NSPopenWithCheck(NSPopen):
"""
A wrapper for NSPopen that additionally checks if the program
to be executed is available from the system path or not.
If found, it proceeds with the usual NSPopen() call.
Otherwise, it raises an exception.
"""
def __init__(self, nsname, *argv, **kwarg):
name = list(argv)[0][0]
has_executable(name)
super(NSPopenWithCheck, self).__init__(nsname, *argv, **kwarg)
|
d52c661f65a4618893f676def6f18a226ad52a6c
|
oneflow/settings/snippets/celery_development.py
|
oneflow/settings/snippets/celery_development.py
|
BROKER_URL = 'redis://{0}:6379/1'.format(MAIN_SERVER)
CELERY_RESULT_BACKEND = BROKER_URL
|
BROKER_URL = 'redis://{0}:6379/1'.format(MAIN_SERVER)
CELERY_RESULT_BACKEND = BROKER_URL
# I use these to debug kombu crashes; we get a more informative message.
#CELERY_TASK_SERIALIZER = 'json'
#CELERY_RESULT_SERIALIZER = 'json'
|
Put the JSON serializer in celery development settings, this has helped me a lot.
|
Put the JSON serializer in celery development settings, this has helped me a lot.
|
Python
|
agpl-3.0
|
WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow,WillianPaiva/1flow,1flow/1flow,1flow/1flow,1flow/1flow,WillianPaiva/1flow,WillianPaiva/1flow,1flow/1flow
|
BROKER_URL = 'redis://{0}:6379/1'.format(MAIN_SERVER)
CELERY_RESULT_BACKEND = BROKER_URL
Put the JSON serializer in celery development settings, this has helped me a lot.
|
BROKER_URL = 'redis://{0}:6379/1'.format(MAIN_SERVER)
CELERY_RESULT_BACKEND = BROKER_URL
# I use these to debug kombu crashes; we get a more informative message.
#CELERY_TASK_SERIALIZER = 'json'
#CELERY_RESULT_SERIALIZER = 'json'
|
<commit_before>BROKER_URL = 'redis://{0}:6379/1'.format(MAIN_SERVER)
CELERY_RESULT_BACKEND = BROKER_URL
<commit_msg>Put the JSON serializer in celery development settings, this has helped me a lot.<commit_after>
|
BROKER_URL = 'redis://{0}:6379/1'.format(MAIN_SERVER)
CELERY_RESULT_BACKEND = BROKER_URL
# I use these to debug kombu crashes; we get a more informative message.
#CELERY_TASK_SERIALIZER = 'json'
#CELERY_RESULT_SERIALIZER = 'json'
|
BROKER_URL = 'redis://{0}:6379/1'.format(MAIN_SERVER)
CELERY_RESULT_BACKEND = BROKER_URL
Put the JSON serializer in celery development settings, this has helped me a lot.BROKER_URL = 'redis://{0}:6379/1'.format(MAIN_SERVER)
CELERY_RESULT_BACKEND = BROKER_URL
# I use these to debug kombu crashes; we get a more informative message.
#CELERY_TASK_SERIALIZER = 'json'
#CELERY_RESULT_SERIALIZER = 'json'
|
<commit_before>BROKER_URL = 'redis://{0}:6379/1'.format(MAIN_SERVER)
CELERY_RESULT_BACKEND = BROKER_URL
<commit_msg>Put the JSON serializer in celery development settings, this has helped me a lot.<commit_after>BROKER_URL = 'redis://{0}:6379/1'.format(MAIN_SERVER)
CELERY_RESULT_BACKEND = BROKER_URL
# I use these to debug kombu crashes; we get a more informative message.
#CELERY_TASK_SERIALIZER = 'json'
#CELERY_RESULT_SERIALIZER = 'json'
|
b195d6e7b79b07daf95c36cac2b9f808f5bd8b87
|
appengine-vmruntime/setup.py
|
appengine-vmruntime/setup.py
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import find_packages
from setuptools import setup
setup(
name='appengine-vm-runtime',
version='0.64',
description='Python Managed VMs Runtime',
url='https://github.com/GoogleCloudPlatform/appengine-python-vm-runtime',
author='Google',
license='Apache License 2.0',
include_package_data=True,
packages=find_packages('.'),
install_requires=[
'appengine-compat',
'Werkzeug>=0.10'
]
)
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import find_packages
from setuptools import setup
setup(
name='appengine-vm-runtime',
version='0.65',
description='Python Managed VMs Runtime',
url='https://github.com/GoogleCloudPlatform/appengine-python-vm-runtime',
author='Google',
license='Apache License 2.0',
include_package_data=True,
packages=find_packages('.'),
install_requires=[
'appengine-compat',
'Werkzeug>=0.10'
]
)
|
Increase appengine-vm-runtime version field to 0.65
|
Increase appengine-vm-runtime version field to 0.65
|
Python
|
apache-2.0
|
GoogleCloudPlatform/python-compat-runtime,GoogleCloudPlatform/python-compat-runtime,GoogleCloudPlatform/python-compat-runtime,GoogleCloudPlatform/python-compat-runtime,GoogleCloudPlatform/python-compat-runtime
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import find_packages
from setuptools import setup
setup(
name='appengine-vm-runtime',
version='0.64',
description='Python Managed VMs Runtime',
url='https://github.com/GoogleCloudPlatform/appengine-python-vm-runtime',
author='Google',
license='Apache License 2.0',
include_package_data=True,
packages=find_packages('.'),
install_requires=[
'appengine-compat',
'Werkzeug>=0.10'
]
)
Increase appengine-vm-runtime version field to 0.65
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import find_packages
from setuptools import setup
setup(
name='appengine-vm-runtime',
version='0.65',
description='Python Managed VMs Runtime',
url='https://github.com/GoogleCloudPlatform/appengine-python-vm-runtime',
author='Google',
license='Apache License 2.0',
include_package_data=True,
packages=find_packages('.'),
install_requires=[
'appengine-compat',
'Werkzeug>=0.10'
]
)
|
<commit_before># Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import find_packages
from setuptools import setup
setup(
name='appengine-vm-runtime',
version='0.64',
description='Python Managed VMs Runtime',
url='https://github.com/GoogleCloudPlatform/appengine-python-vm-runtime',
author='Google',
license='Apache License 2.0',
include_package_data=True,
packages=find_packages('.'),
install_requires=[
'appengine-compat',
'Werkzeug>=0.10'
]
)
<commit_msg>Increase appengine-vm-runtime version field to 0.65<commit_after>
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import find_packages
from setuptools import setup
setup(
name='appengine-vm-runtime',
version='0.65',
description='Python Managed VMs Runtime',
url='https://github.com/GoogleCloudPlatform/appengine-python-vm-runtime',
author='Google',
license='Apache License 2.0',
include_package_data=True,
packages=find_packages('.'),
install_requires=[
'appengine-compat',
'Werkzeug>=0.10'
]
)
|
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import find_packages
from setuptools import setup
setup(
name='appengine-vm-runtime',
version='0.64',
description='Python Managed VMs Runtime',
url='https://github.com/GoogleCloudPlatform/appengine-python-vm-runtime',
author='Google',
license='Apache License 2.0',
include_package_data=True,
packages=find_packages('.'),
install_requires=[
'appengine-compat',
'Werkzeug>=0.10'
]
)
Increase appengine-vm-runtime version field to 0.65# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import find_packages
from setuptools import setup
setup(
name='appengine-vm-runtime',
version='0.65',
description='Python Managed VMs Runtime',
url='https://github.com/GoogleCloudPlatform/appengine-python-vm-runtime',
author='Google',
license='Apache License 2.0',
include_package_data=True,
packages=find_packages('.'),
install_requires=[
'appengine-compat',
'Werkzeug>=0.10'
]
)
|
<commit_before># Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import find_packages
from setuptools import setup
setup(
name='appengine-vm-runtime',
version='0.64',
description='Python Managed VMs Runtime',
url='https://github.com/GoogleCloudPlatform/appengine-python-vm-runtime',
author='Google',
license='Apache License 2.0',
include_package_data=True,
packages=find_packages('.'),
install_requires=[
'appengine-compat',
'Werkzeug>=0.10'
]
)
<commit_msg>Increase appengine-vm-runtime version field to 0.65<commit_after># Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from setuptools import find_packages
from setuptools import setup
setup(
name='appengine-vm-runtime',
version='0.65',
description='Python Managed VMs Runtime',
url='https://github.com/GoogleCloudPlatform/appengine-python-vm-runtime',
author='Google',
license='Apache License 2.0',
include_package_data=True,
packages=find_packages('.'),
install_requires=[
'appengine-compat',
'Werkzeug>=0.10'
]
)
|
3024a35626118e5fcf504bde9785992aa7e3eea5
|
apps/members/models.py
|
apps/members/models.py
|
from django.db import models
from bluebottle.bb_accounts.models import BlueBottleBaseUser
from bluebottle.utils.models import Address
from djchoices.choices import DjangoChoices, ChoiceItem
from django.conf import settings
from django.utils.translation import ugettext as _
class Member(BlueBottleBaseUser):
pass
class UserAddress(Address):
class AddressType(DjangoChoices):
primary = ChoiceItem('primary', label=_("Primary"))
secondary = ChoiceItem('secondary', label=_("Secondary"))
address_type = models.CharField(_("address type"),max_length=10, blank=True, choices=AddressType.choices,
default=AddressType.primary)
user = models.OneToOneField(settings.AUTH_USER_MODEL, verbose_name=_("user"), related_name="address")
class Meta:
verbose_name = _("user address")
verbose_name_plural = _("user addresses")
#default_serializer = 'members.serializers.UserProfileSerializer'
|
from django.db import models
from bluebottle.bb_accounts.models import BlueBottleBaseUser
from bluebottle.utils.models import Address
from djchoices.choices import DjangoChoices, ChoiceItem
from django.conf import settings
from django.utils.translation import ugettext as _
class Member(BlueBottleBaseUser):
# Create an address if none exists
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
super(Member, self).save(force_insert=False, force_update=False, using=None, update_fields=None)
try:
self.address
except UserAddress.DoesNotExist:
self.address = UserAddress.objects.create(user=self)
self.address.save()
class UserAddress(Address):
class AddressType(DjangoChoices):
primary = ChoiceItem('primary', label=_("Primary"))
secondary = ChoiceItem('secondary', label=_("Secondary"))
address_type = models.CharField(_("address type"),max_length=10, blank=True, choices=AddressType.choices,
default=AddressType.primary)
user = models.OneToOneField(settings.AUTH_USER_MODEL, verbose_name=_("user"), related_name="address")
class Meta:
verbose_name = _("user address")
verbose_name_plural = _("user addresses")
#default_serializer = 'members.serializers.UserProfileSerializer'
|
Create an address if none exists for a user
|
Create an address if none exists for a user
|
Python
|
bsd-3-clause
|
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
|
from django.db import models
from bluebottle.bb_accounts.models import BlueBottleBaseUser
from bluebottle.utils.models import Address
from djchoices.choices import DjangoChoices, ChoiceItem
from django.conf import settings
from django.utils.translation import ugettext as _
class Member(BlueBottleBaseUser):
pass
class UserAddress(Address):
class AddressType(DjangoChoices):
primary = ChoiceItem('primary', label=_("Primary"))
secondary = ChoiceItem('secondary', label=_("Secondary"))
address_type = models.CharField(_("address type"),max_length=10, blank=True, choices=AddressType.choices,
default=AddressType.primary)
user = models.OneToOneField(settings.AUTH_USER_MODEL, verbose_name=_("user"), related_name="address")
class Meta:
verbose_name = _("user address")
verbose_name_plural = _("user addresses")
#default_serializer = 'members.serializers.UserProfileSerializer'Create an address if none exists for a user
|
from django.db import models
from bluebottle.bb_accounts.models import BlueBottleBaseUser
from bluebottle.utils.models import Address
from djchoices.choices import DjangoChoices, ChoiceItem
from django.conf import settings
from django.utils.translation import ugettext as _
class Member(BlueBottleBaseUser):
# Create an address if none exists
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
super(Member, self).save(force_insert=False, force_update=False, using=None, update_fields=None)
try:
self.address
except UserAddress.DoesNotExist:
self.address = UserAddress.objects.create(user=self)
self.address.save()
class UserAddress(Address):
class AddressType(DjangoChoices):
primary = ChoiceItem('primary', label=_("Primary"))
secondary = ChoiceItem('secondary', label=_("Secondary"))
address_type = models.CharField(_("address type"),max_length=10, blank=True, choices=AddressType.choices,
default=AddressType.primary)
user = models.OneToOneField(settings.AUTH_USER_MODEL, verbose_name=_("user"), related_name="address")
class Meta:
verbose_name = _("user address")
verbose_name_plural = _("user addresses")
#default_serializer = 'members.serializers.UserProfileSerializer'
|
<commit_before>from django.db import models
from bluebottle.bb_accounts.models import BlueBottleBaseUser
from bluebottle.utils.models import Address
from djchoices.choices import DjangoChoices, ChoiceItem
from django.conf import settings
from django.utils.translation import ugettext as _
class Member(BlueBottleBaseUser):
pass
class UserAddress(Address):
class AddressType(DjangoChoices):
primary = ChoiceItem('primary', label=_("Primary"))
secondary = ChoiceItem('secondary', label=_("Secondary"))
address_type = models.CharField(_("address type"),max_length=10, blank=True, choices=AddressType.choices,
default=AddressType.primary)
user = models.OneToOneField(settings.AUTH_USER_MODEL, verbose_name=_("user"), related_name="address")
class Meta:
verbose_name = _("user address")
verbose_name_plural = _("user addresses")
#default_serializer = 'members.serializers.UserProfileSerializer'<commit_msg>Create an address if none exists for a user<commit_after>
|
from django.db import models
from bluebottle.bb_accounts.models import BlueBottleBaseUser
from bluebottle.utils.models import Address
from djchoices.choices import DjangoChoices, ChoiceItem
from django.conf import settings
from django.utils.translation import ugettext as _
class Member(BlueBottleBaseUser):
# Create an address if none exists
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
super(Member, self).save(force_insert=False, force_update=False, using=None, update_fields=None)
try:
self.address
except UserAddress.DoesNotExist:
self.address = UserAddress.objects.create(user=self)
self.address.save()
class UserAddress(Address):
class AddressType(DjangoChoices):
primary = ChoiceItem('primary', label=_("Primary"))
secondary = ChoiceItem('secondary', label=_("Secondary"))
address_type = models.CharField(_("address type"),max_length=10, blank=True, choices=AddressType.choices,
default=AddressType.primary)
user = models.OneToOneField(settings.AUTH_USER_MODEL, verbose_name=_("user"), related_name="address")
class Meta:
verbose_name = _("user address")
verbose_name_plural = _("user addresses")
#default_serializer = 'members.serializers.UserProfileSerializer'
|
from django.db import models
from bluebottle.bb_accounts.models import BlueBottleBaseUser
from bluebottle.utils.models import Address
from djchoices.choices import DjangoChoices, ChoiceItem
from django.conf import settings
from django.utils.translation import ugettext as _
class Member(BlueBottleBaseUser):
pass
class UserAddress(Address):
class AddressType(DjangoChoices):
primary = ChoiceItem('primary', label=_("Primary"))
secondary = ChoiceItem('secondary', label=_("Secondary"))
address_type = models.CharField(_("address type"),max_length=10, blank=True, choices=AddressType.choices,
default=AddressType.primary)
user = models.OneToOneField(settings.AUTH_USER_MODEL, verbose_name=_("user"), related_name="address")
class Meta:
verbose_name = _("user address")
verbose_name_plural = _("user addresses")
#default_serializer = 'members.serializers.UserProfileSerializer'Create an address if none exists for a userfrom django.db import models
from bluebottle.bb_accounts.models import BlueBottleBaseUser
from bluebottle.utils.models import Address
from djchoices.choices import DjangoChoices, ChoiceItem
from django.conf import settings
from django.utils.translation import ugettext as _
class Member(BlueBottleBaseUser):
# Create an address if none exists
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
super(Member, self).save(force_insert=False, force_update=False, using=None, update_fields=None)
try:
self.address
except UserAddress.DoesNotExist:
self.address = UserAddress.objects.create(user=self)
self.address.save()
class UserAddress(Address):
class AddressType(DjangoChoices):
primary = ChoiceItem('primary', label=_("Primary"))
secondary = ChoiceItem('secondary', label=_("Secondary"))
address_type = models.CharField(_("address type"),max_length=10, blank=True, choices=AddressType.choices,
default=AddressType.primary)
user = models.OneToOneField(settings.AUTH_USER_MODEL, verbose_name=_("user"), related_name="address")
class Meta:
verbose_name = _("user address")
verbose_name_plural = _("user addresses")
#default_serializer = 'members.serializers.UserProfileSerializer'
|
<commit_before>from django.db import models
from bluebottle.bb_accounts.models import BlueBottleBaseUser
from bluebottle.utils.models import Address
from djchoices.choices import DjangoChoices, ChoiceItem
from django.conf import settings
from django.utils.translation import ugettext as _
class Member(BlueBottleBaseUser):
pass
class UserAddress(Address):
class AddressType(DjangoChoices):
primary = ChoiceItem('primary', label=_("Primary"))
secondary = ChoiceItem('secondary', label=_("Secondary"))
address_type = models.CharField(_("address type"),max_length=10, blank=True, choices=AddressType.choices,
default=AddressType.primary)
user = models.OneToOneField(settings.AUTH_USER_MODEL, verbose_name=_("user"), related_name="address")
class Meta:
verbose_name = _("user address")
verbose_name_plural = _("user addresses")
#default_serializer = 'members.serializers.UserProfileSerializer'<commit_msg>Create an address if none exists for a user<commit_after>from django.db import models
from bluebottle.bb_accounts.models import BlueBottleBaseUser
from bluebottle.utils.models import Address
from djchoices.choices import DjangoChoices, ChoiceItem
from django.conf import settings
from django.utils.translation import ugettext as _
class Member(BlueBottleBaseUser):
# Create an address if none exists
def save(self, force_insert=False, force_update=False, using=None, update_fields=None):
super(Member, self).save(force_insert=False, force_update=False, using=None, update_fields=None)
try:
self.address
except UserAddress.DoesNotExist:
self.address = UserAddress.objects.create(user=self)
self.address.save()
class UserAddress(Address):
class AddressType(DjangoChoices):
primary = ChoiceItem('primary', label=_("Primary"))
secondary = ChoiceItem('secondary', label=_("Secondary"))
address_type = models.CharField(_("address type"),max_length=10, blank=True, choices=AddressType.choices,
default=AddressType.primary)
user = models.OneToOneField(settings.AUTH_USER_MODEL, verbose_name=_("user"), related_name="address")
class Meta:
verbose_name = _("user address")
verbose_name_plural = _("user addresses")
#default_serializer = 'members.serializers.UserProfileSerializer'
|
88e7441314fa1f9cc1d23d2d7eac2a10429a2624
|
masters/master.chromium.git/master_source_cfg.py
|
masters/master.chromium.git/master_source_cfg.py
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master.chromium_git_poller_bb8 import ChromiumGitPoller
def Update(config, active_master, c):
poller = ChromiumGitPoller(
repourl='http://git.chromium.org/chromium/src.git',
branch='master',
pollinterval=10,
revlinktmpl='http://git.chromium.org/gitweb/?p=chromium/src.git;a=commit;h=%s')
c['change_source'].append(poller)
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master.chromium_git_poller_bb8 import ChromiumGitPoller
def Update(config, active_master, c):
poller = ChromiumGitPoller(
repourl='https://chromium.googlesource.com/chromium/src.git',
branch='master',
pollinterval=10,
revlinktmpl='https://chromium.googlesource.com/chromium/src/+/%h')
c['change_source'].append(poller)
|
Switch polling URL to git-on-borg.
|
Switch polling URL to git-on-borg.
TBR=mmoss@chromium.org,cmp@chromium.org
Review URL: https://codereview.chromium.org/12152002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@179999 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master.chromium_git_poller_bb8 import ChromiumGitPoller
def Update(config, active_master, c):
poller = ChromiumGitPoller(
repourl='http://git.chromium.org/chromium/src.git',
branch='master',
pollinterval=10,
revlinktmpl='http://git.chromium.org/gitweb/?p=chromium/src.git;a=commit;h=%s')
c['change_source'].append(poller)
Switch polling URL to git-on-borg.
TBR=mmoss@chromium.org,cmp@chromium.org
Review URL: https://codereview.chromium.org/12152002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@179999 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master.chromium_git_poller_bb8 import ChromiumGitPoller
def Update(config, active_master, c):
poller = ChromiumGitPoller(
repourl='https://chromium.googlesource.com/chromium/src.git',
branch='master',
pollinterval=10,
revlinktmpl='https://chromium.googlesource.com/chromium/src/+/%h')
c['change_source'].append(poller)
|
<commit_before># Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master.chromium_git_poller_bb8 import ChromiumGitPoller
def Update(config, active_master, c):
poller = ChromiumGitPoller(
repourl='http://git.chromium.org/chromium/src.git',
branch='master',
pollinterval=10,
revlinktmpl='http://git.chromium.org/gitweb/?p=chromium/src.git;a=commit;h=%s')
c['change_source'].append(poller)
<commit_msg>Switch polling URL to git-on-borg.
TBR=mmoss@chromium.org,cmp@chromium.org
Review URL: https://codereview.chromium.org/12152002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@179999 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master.chromium_git_poller_bb8 import ChromiumGitPoller
def Update(config, active_master, c):
poller = ChromiumGitPoller(
repourl='https://chromium.googlesource.com/chromium/src.git',
branch='master',
pollinterval=10,
revlinktmpl='https://chromium.googlesource.com/chromium/src/+/%h')
c['change_source'].append(poller)
|
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master.chromium_git_poller_bb8 import ChromiumGitPoller
def Update(config, active_master, c):
poller = ChromiumGitPoller(
repourl='http://git.chromium.org/chromium/src.git',
branch='master',
pollinterval=10,
revlinktmpl='http://git.chromium.org/gitweb/?p=chromium/src.git;a=commit;h=%s')
c['change_source'].append(poller)
Switch polling URL to git-on-borg.
TBR=mmoss@chromium.org,cmp@chromium.org
Review URL: https://codereview.chromium.org/12152002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@179999 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master.chromium_git_poller_bb8 import ChromiumGitPoller
def Update(config, active_master, c):
poller = ChromiumGitPoller(
repourl='https://chromium.googlesource.com/chromium/src.git',
branch='master',
pollinterval=10,
revlinktmpl='https://chromium.googlesource.com/chromium/src/+/%h')
c['change_source'].append(poller)
|
<commit_before># Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master.chromium_git_poller_bb8 import ChromiumGitPoller
def Update(config, active_master, c):
poller = ChromiumGitPoller(
repourl='http://git.chromium.org/chromium/src.git',
branch='master',
pollinterval=10,
revlinktmpl='http://git.chromium.org/gitweb/?p=chromium/src.git;a=commit;h=%s')
c['change_source'].append(poller)
<commit_msg>Switch polling URL to git-on-borg.
TBR=mmoss@chromium.org,cmp@chromium.org
Review URL: https://codereview.chromium.org/12152002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@179999 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from master.chromium_git_poller_bb8 import ChromiumGitPoller
def Update(config, active_master, c):
poller = ChromiumGitPoller(
repourl='https://chromium.googlesource.com/chromium/src.git',
branch='master',
pollinterval=10,
revlinktmpl='https://chromium.googlesource.com/chromium/src/+/%h')
c['change_source'].append(poller)
|
cdc43f6f6ee2d040675f10028af6372b0bf42a08
|
msmbuilder/tests/__init__.py
|
msmbuilder/tests/__init__.py
|
import sys
import warnings
from warnings import warn as orig_warn
def my_warn(message, category=None, stacklevel=1):
# taken from warnings module
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
module = globals['__name__']
filename = globals.get('__file__')
m = {
'argspec': 'inspect.getargspec() is deprecated'
}
if module == 'scipy._lib.decorator' and m['argspec'] in message:
return
if module == 'mdtraj.formats.hdf5' and m['argspec'] in message:
return
if module == 'statsmodels.base.wrapper' and m['argspec'] in message:
return
if module == 'nose.util' and m['argspec'] in message:
return
print("Warning: module: ", module)
print("Warning: message: ", message)
return orig_warn(message=message, category=category,
stacklevel=stacklevel + 1)
warnings.warn = my_warn
|
import sys
import warnings
from warnings import warn as orig_warn
def my_warn(message, category=None, stacklevel=1):
# taken from warnings module
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
module = globals['__name__']
filename = globals.get('__file__')
m = {
'argspec': 'inspect.getargspec() is deprecated'
}
if module == 'scipy._lib.decorator' and m['argspec'] in message:
return
if module == 'mdtraj.formats.hdf5' and m['argspec'] in message:
return
if module == 'statsmodels.base.wrapper' and m['argspec'] in message:
return
if module == 'nose.util' and m['argspec'] in message:
return
print("Warning: module: ", module)
print("Warning: message: ", message)
# This explicit check is necessary for python < 3.5 maybe??
if category is None:
category = UserWarning
return orig_warn(message=message, category=category,
stacklevel=stacklevel + 1)
warnings.warn = my_warn
|
Fix for my nefarious `warn` replacement
|
Fix for my nefarious `warn` replacement
|
Python
|
lgpl-2.1
|
dr-nate/msmbuilder,brookehus/msmbuilder,cxhernandez/msmbuilder,msmbuilder/msmbuilder,dr-nate/msmbuilder,rafwiewiora/msmbuilder,Eigenstate/msmbuilder,rafwiewiora/msmbuilder,rafwiewiora/msmbuilder,msultan/msmbuilder,cxhernandez/msmbuilder,dr-nate/msmbuilder,msultan/msmbuilder,msmbuilder/msmbuilder,stephenliu1989/msmbuilder,msultan/msmbuilder,peastman/msmbuilder,brookehus/msmbuilder,peastman/msmbuilder,mpharrigan/mixtape,mpharrigan/mixtape,msmbuilder/msmbuilder,msmbuilder/msmbuilder,Eigenstate/msmbuilder,cxhernandez/msmbuilder,rafwiewiora/msmbuilder,mpharrigan/mixtape,stephenliu1989/msmbuilder,msultan/msmbuilder,peastman/msmbuilder,brookehus/msmbuilder,stephenliu1989/msmbuilder,brookehus/msmbuilder,mpharrigan/mixtape,dr-nate/msmbuilder,cxhernandez/msmbuilder,mpharrigan/mixtape,peastman/msmbuilder,stephenliu1989/msmbuilder,Eigenstate/msmbuilder,msultan/msmbuilder,Eigenstate/msmbuilder,cxhernandez/msmbuilder,brookehus/msmbuilder,peastman/msmbuilder,rafwiewiora/msmbuilder,dr-nate/msmbuilder,msmbuilder/msmbuilder,Eigenstate/msmbuilder
|
import sys
import warnings
from warnings import warn as orig_warn
def my_warn(message, category=None, stacklevel=1):
# taken from warnings module
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
module = globals['__name__']
filename = globals.get('__file__')
m = {
'argspec': 'inspect.getargspec() is deprecated'
}
if module == 'scipy._lib.decorator' and m['argspec'] in message:
return
if module == 'mdtraj.formats.hdf5' and m['argspec'] in message:
return
if module == 'statsmodels.base.wrapper' and m['argspec'] in message:
return
if module == 'nose.util' and m['argspec'] in message:
return
print("Warning: module: ", module)
print("Warning: message: ", message)
return orig_warn(message=message, category=category,
stacklevel=stacklevel + 1)
warnings.warn = my_warn
Fix for my nefarious `warn` replacement
|
import sys
import warnings
from warnings import warn as orig_warn
def my_warn(message, category=None, stacklevel=1):
# taken from warnings module
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
module = globals['__name__']
filename = globals.get('__file__')
m = {
'argspec': 'inspect.getargspec() is deprecated'
}
if module == 'scipy._lib.decorator' and m['argspec'] in message:
return
if module == 'mdtraj.formats.hdf5' and m['argspec'] in message:
return
if module == 'statsmodels.base.wrapper' and m['argspec'] in message:
return
if module == 'nose.util' and m['argspec'] in message:
return
print("Warning: module: ", module)
print("Warning: message: ", message)
# This explicit check is necessary for python < 3.5 maybe??
if category is None:
category = UserWarning
return orig_warn(message=message, category=category,
stacklevel=stacklevel + 1)
warnings.warn = my_warn
|
<commit_before>import sys
import warnings
from warnings import warn as orig_warn
def my_warn(message, category=None, stacklevel=1):
# taken from warnings module
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
module = globals['__name__']
filename = globals.get('__file__')
m = {
'argspec': 'inspect.getargspec() is deprecated'
}
if module == 'scipy._lib.decorator' and m['argspec'] in message:
return
if module == 'mdtraj.formats.hdf5' and m['argspec'] in message:
return
if module == 'statsmodels.base.wrapper' and m['argspec'] in message:
return
if module == 'nose.util' and m['argspec'] in message:
return
print("Warning: module: ", module)
print("Warning: message: ", message)
return orig_warn(message=message, category=category,
stacklevel=stacklevel + 1)
warnings.warn = my_warn
<commit_msg>Fix for my nefarious `warn` replacement<commit_after>
|
import sys
import warnings
from warnings import warn as orig_warn
def my_warn(message, category=None, stacklevel=1):
# taken from warnings module
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
module = globals['__name__']
filename = globals.get('__file__')
m = {
'argspec': 'inspect.getargspec() is deprecated'
}
if module == 'scipy._lib.decorator' and m['argspec'] in message:
return
if module == 'mdtraj.formats.hdf5' and m['argspec'] in message:
return
if module == 'statsmodels.base.wrapper' and m['argspec'] in message:
return
if module == 'nose.util' and m['argspec'] in message:
return
print("Warning: module: ", module)
print("Warning: message: ", message)
# This explicit check is necessary for python < 3.5 maybe??
if category is None:
category = UserWarning
return orig_warn(message=message, category=category,
stacklevel=stacklevel + 1)
warnings.warn = my_warn
|
import sys
import warnings
from warnings import warn as orig_warn
def my_warn(message, category=None, stacklevel=1):
# taken from warnings module
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
module = globals['__name__']
filename = globals.get('__file__')
m = {
'argspec': 'inspect.getargspec() is deprecated'
}
if module == 'scipy._lib.decorator' and m['argspec'] in message:
return
if module == 'mdtraj.formats.hdf5' and m['argspec'] in message:
return
if module == 'statsmodels.base.wrapper' and m['argspec'] in message:
return
if module == 'nose.util' and m['argspec'] in message:
return
print("Warning: module: ", module)
print("Warning: message: ", message)
return orig_warn(message=message, category=category,
stacklevel=stacklevel + 1)
warnings.warn = my_warn
Fix for my nefarious `warn` replacementimport sys
import warnings
from warnings import warn as orig_warn
def my_warn(message, category=None, stacklevel=1):
# taken from warnings module
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
module = globals['__name__']
filename = globals.get('__file__')
m = {
'argspec': 'inspect.getargspec() is deprecated'
}
if module == 'scipy._lib.decorator' and m['argspec'] in message:
return
if module == 'mdtraj.formats.hdf5' and m['argspec'] in message:
return
if module == 'statsmodels.base.wrapper' and m['argspec'] in message:
return
if module == 'nose.util' and m['argspec'] in message:
return
print("Warning: module: ", module)
print("Warning: message: ", message)
# This explicit check is necessary for python < 3.5 maybe??
if category is None:
category = UserWarning
return orig_warn(message=message, category=category,
stacklevel=stacklevel + 1)
warnings.warn = my_warn
|
<commit_before>import sys
import warnings
from warnings import warn as orig_warn
def my_warn(message, category=None, stacklevel=1):
# taken from warnings module
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
module = globals['__name__']
filename = globals.get('__file__')
m = {
'argspec': 'inspect.getargspec() is deprecated'
}
if module == 'scipy._lib.decorator' and m['argspec'] in message:
return
if module == 'mdtraj.formats.hdf5' and m['argspec'] in message:
return
if module == 'statsmodels.base.wrapper' and m['argspec'] in message:
return
if module == 'nose.util' and m['argspec'] in message:
return
print("Warning: module: ", module)
print("Warning: message: ", message)
return orig_warn(message=message, category=category,
stacklevel=stacklevel + 1)
warnings.warn = my_warn
<commit_msg>Fix for my nefarious `warn` replacement<commit_after>import sys
import warnings
from warnings import warn as orig_warn
def my_warn(message, category=None, stacklevel=1):
# taken from warnings module
# Get context information
try:
caller = sys._getframe(stacklevel)
except ValueError:
globals = sys.__dict__
lineno = 1
else:
globals = caller.f_globals
lineno = caller.f_lineno
module = globals['__name__']
filename = globals.get('__file__')
m = {
'argspec': 'inspect.getargspec() is deprecated'
}
if module == 'scipy._lib.decorator' and m['argspec'] in message:
return
if module == 'mdtraj.formats.hdf5' and m['argspec'] in message:
return
if module == 'statsmodels.base.wrapper' and m['argspec'] in message:
return
if module == 'nose.util' and m['argspec'] in message:
return
print("Warning: module: ", module)
print("Warning: message: ", message)
# This explicit check is necessary for python < 3.5 maybe??
if category is None:
category = UserWarning
return orig_warn(message=message, category=category,
stacklevel=stacklevel + 1)
warnings.warn = my_warn
|
44345a82380541e14c1fe099e6ab4ee7dfcd243a
|
massa/domain.py
|
massa/domain.py
|
# -*- coding: utf-8 -*-
from sqlalchemy import (
Column,
Date,
Integer,
MetaData,
Numeric,
String,
Table,
)
def define_tables(metadata):
Table('measurement', metadata,
Column('id', Integer, primary_key=True),
Column('weight', Numeric(4, 1), nullable=False),
Column('code', String(25), nullable=False),
Column('note', String(140), nullable=True),
Column('date_measured', Date(), nullable=False),
)
class Db(object):
def __init__(self, engine):
self._meta = MetaData(engine)
define_tables(self._meta)
def make_tables(self):
self._meta.create_all()
def drop_tables(self):
self._meta.drop_all()
@property
def measurement(self):
return self._meta.tables['measurement']
class MeasurementService(object):
def __init__(self, table):
self._table = table
|
# -*- coding: utf-8 -*-
from sqlalchemy import (
Column,
Date,
Integer,
MetaData,
Numeric,
String,
Table,
)
def define_tables(metadata):
Table('measurement', metadata,
Column('id', Integer, primary_key=True),
Column('weight', Numeric(4, 1), nullable=False),
Column('code', String(25), nullable=False),
Column('note', String(140), nullable=True),
Column('date_measured', Date(), nullable=False),
)
class Db(object):
def __init__(self, engine):
self._meta = MetaData(engine)
define_tables(self._meta)
def make_tables(self):
self._meta.create_all()
def drop_tables(self):
self._meta.drop_all()
@property
def measurement(self):
return self._meta.tables['measurement']
class MeasurementService(object):
def __init__(self, table):
self._table = table
def create(self, **kwargs):
i = self._table.insert()
i.execute(**kwargs)
|
Add a method to create a measurement.
|
Add a method to create a measurement.
|
Python
|
mit
|
jaapverloop/massa
|
# -*- coding: utf-8 -*-
from sqlalchemy import (
Column,
Date,
Integer,
MetaData,
Numeric,
String,
Table,
)
def define_tables(metadata):
Table('measurement', metadata,
Column('id', Integer, primary_key=True),
Column('weight', Numeric(4, 1), nullable=False),
Column('code', String(25), nullable=False),
Column('note', String(140), nullable=True),
Column('date_measured', Date(), nullable=False),
)
class Db(object):
def __init__(self, engine):
self._meta = MetaData(engine)
define_tables(self._meta)
def make_tables(self):
self._meta.create_all()
def drop_tables(self):
self._meta.drop_all()
@property
def measurement(self):
return self._meta.tables['measurement']
class MeasurementService(object):
def __init__(self, table):
self._table = table
Add a method to create a measurement.
|
# -*- coding: utf-8 -*-
from sqlalchemy import (
Column,
Date,
Integer,
MetaData,
Numeric,
String,
Table,
)
def define_tables(metadata):
Table('measurement', metadata,
Column('id', Integer, primary_key=True),
Column('weight', Numeric(4, 1), nullable=False),
Column('code', String(25), nullable=False),
Column('note', String(140), nullable=True),
Column('date_measured', Date(), nullable=False),
)
class Db(object):
def __init__(self, engine):
self._meta = MetaData(engine)
define_tables(self._meta)
def make_tables(self):
self._meta.create_all()
def drop_tables(self):
self._meta.drop_all()
@property
def measurement(self):
return self._meta.tables['measurement']
class MeasurementService(object):
def __init__(self, table):
self._table = table
def create(self, **kwargs):
i = self._table.insert()
i.execute(**kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from sqlalchemy import (
Column,
Date,
Integer,
MetaData,
Numeric,
String,
Table,
)
def define_tables(metadata):
Table('measurement', metadata,
Column('id', Integer, primary_key=True),
Column('weight', Numeric(4, 1), nullable=False),
Column('code', String(25), nullable=False),
Column('note', String(140), nullable=True),
Column('date_measured', Date(), nullable=False),
)
class Db(object):
def __init__(self, engine):
self._meta = MetaData(engine)
define_tables(self._meta)
def make_tables(self):
self._meta.create_all()
def drop_tables(self):
self._meta.drop_all()
@property
def measurement(self):
return self._meta.tables['measurement']
class MeasurementService(object):
def __init__(self, table):
self._table = table
<commit_msg>Add a method to create a measurement.<commit_after>
|
# -*- coding: utf-8 -*-
from sqlalchemy import (
Column,
Date,
Integer,
MetaData,
Numeric,
String,
Table,
)
def define_tables(metadata):
Table('measurement', metadata,
Column('id', Integer, primary_key=True),
Column('weight', Numeric(4, 1), nullable=False),
Column('code', String(25), nullable=False),
Column('note', String(140), nullable=True),
Column('date_measured', Date(), nullable=False),
)
class Db(object):
def __init__(self, engine):
self._meta = MetaData(engine)
define_tables(self._meta)
def make_tables(self):
self._meta.create_all()
def drop_tables(self):
self._meta.drop_all()
@property
def measurement(self):
return self._meta.tables['measurement']
class MeasurementService(object):
def __init__(self, table):
self._table = table
def create(self, **kwargs):
i = self._table.insert()
i.execute(**kwargs)
|
# -*- coding: utf-8 -*-
from sqlalchemy import (
Column,
Date,
Integer,
MetaData,
Numeric,
String,
Table,
)
def define_tables(metadata):
Table('measurement', metadata,
Column('id', Integer, primary_key=True),
Column('weight', Numeric(4, 1), nullable=False),
Column('code', String(25), nullable=False),
Column('note', String(140), nullable=True),
Column('date_measured', Date(), nullable=False),
)
class Db(object):
def __init__(self, engine):
self._meta = MetaData(engine)
define_tables(self._meta)
def make_tables(self):
self._meta.create_all()
def drop_tables(self):
self._meta.drop_all()
@property
def measurement(self):
return self._meta.tables['measurement']
class MeasurementService(object):
def __init__(self, table):
self._table = table
Add a method to create a measurement.# -*- coding: utf-8 -*-
from sqlalchemy import (
Column,
Date,
Integer,
MetaData,
Numeric,
String,
Table,
)
def define_tables(metadata):
Table('measurement', metadata,
Column('id', Integer, primary_key=True),
Column('weight', Numeric(4, 1), nullable=False),
Column('code', String(25), nullable=False),
Column('note', String(140), nullable=True),
Column('date_measured', Date(), nullable=False),
)
class Db(object):
def __init__(self, engine):
self._meta = MetaData(engine)
define_tables(self._meta)
def make_tables(self):
self._meta.create_all()
def drop_tables(self):
self._meta.drop_all()
@property
def measurement(self):
return self._meta.tables['measurement']
class MeasurementService(object):
def __init__(self, table):
self._table = table
def create(self, **kwargs):
i = self._table.insert()
i.execute(**kwargs)
|
<commit_before># -*- coding: utf-8 -*-
from sqlalchemy import (
Column,
Date,
Integer,
MetaData,
Numeric,
String,
Table,
)
def define_tables(metadata):
Table('measurement', metadata,
Column('id', Integer, primary_key=True),
Column('weight', Numeric(4, 1), nullable=False),
Column('code', String(25), nullable=False),
Column('note', String(140), nullable=True),
Column('date_measured', Date(), nullable=False),
)
class Db(object):
def __init__(self, engine):
self._meta = MetaData(engine)
define_tables(self._meta)
def make_tables(self):
self._meta.create_all()
def drop_tables(self):
self._meta.drop_all()
@property
def measurement(self):
return self._meta.tables['measurement']
class MeasurementService(object):
def __init__(self, table):
self._table = table
<commit_msg>Add a method to create a measurement.<commit_after># -*- coding: utf-8 -*-
from sqlalchemy import (
Column,
Date,
Integer,
MetaData,
Numeric,
String,
Table,
)
def define_tables(metadata):
Table('measurement', metadata,
Column('id', Integer, primary_key=True),
Column('weight', Numeric(4, 1), nullable=False),
Column('code', String(25), nullable=False),
Column('note', String(140), nullable=True),
Column('date_measured', Date(), nullable=False),
)
class Db(object):
def __init__(self, engine):
self._meta = MetaData(engine)
define_tables(self._meta)
def make_tables(self):
self._meta.create_all()
def drop_tables(self):
self._meta.drop_all()
@property
def measurement(self):
return self._meta.tables['measurement']
class MeasurementService(object):
def __init__(self, table):
self._table = table
def create(self, **kwargs):
i = self._table.insert()
i.execute(**kwargs)
|
4f497d86f7fedfb19ec910d5f0978f72d260b935
|
begotemp/views/geo_zone.py
|
begotemp/views/geo_zone.py
|
# -*- coding: utf-8 -*-
""" Tools for geographical zones management."""
import logging
from pyramid.view import view_config
from webhelpers import paginate
from anuket.models import DBSession
from begotemp.models.zone import Zone
log = logging.getLogger(__name__)
def includeme(config):
config.add_route('geo.zone_list', '/geo/zone')
@view_config(route_name='geo.zone_list', permission='admin',
renderer='/geo/zone/zone_list.mako')
def zone_list_view(request):
stats=None
# construct the query
zones = DBSession.query(Zone)
zones = zones.order_by(Zone.zone_number)
# paginate results
page_url = paginate.PageURL_WebOb(request)
zones = paginate.Page(zones,
page=int(request.params.get("page", 1)),
items_per_page=20,
url=page_url)
return dict(zones=zones, stats=stats)
|
# -*- coding: utf-8 -*-
""" Tools for geographical zones management."""
import logging
from pyramid.view import view_config
from webhelpers import paginate
from anuket.models import DBSession
from begotemp.models.zone import Zone
log = logging.getLogger(__name__)
def includeme(config):
config.add_route('geo.zone_list', '/geo/zone')
@view_config(route_name='geo.zone_list', permission='admin',
renderer='/geo/zone/zone_list.mako')
def zone_list_view(request):
_ = request.translate
stats=None
# construct the query
zones = DBSession.query(Zone)
zones = zones.order_by(Zone.zone_number)
# add a flash message for empty results
if zones.count() == 0:
request.session.flash(_(u"There is no results!"), 'error')
# paginate results
page_url = paginate.PageURL_WebOb(request)
zones = paginate.Page(zones,
page=int(request.params.get("page", 1)),
items_per_page=20,
url=page_url)
return dict(zones=zones, stats=stats)
|
Add a flash message for empty zone list
|
Add a flash message for empty zone list
|
Python
|
mit
|
miniwark/begotemp
|
# -*- coding: utf-8 -*-
""" Tools for geographical zones management."""
import logging
from pyramid.view import view_config
from webhelpers import paginate
from anuket.models import DBSession
from begotemp.models.zone import Zone
log = logging.getLogger(__name__)
def includeme(config):
config.add_route('geo.zone_list', '/geo/zone')
@view_config(route_name='geo.zone_list', permission='admin',
renderer='/geo/zone/zone_list.mako')
def zone_list_view(request):
stats=None
# construct the query
zones = DBSession.query(Zone)
zones = zones.order_by(Zone.zone_number)
# paginate results
page_url = paginate.PageURL_WebOb(request)
zones = paginate.Page(zones,
page=int(request.params.get("page", 1)),
items_per_page=20,
url=page_url)
return dict(zones=zones, stats=stats)Add a flash message for empty zone list
|
# -*- coding: utf-8 -*-
""" Tools for geographical zones management."""
import logging
from pyramid.view import view_config
from webhelpers import paginate
from anuket.models import DBSession
from begotemp.models.zone import Zone
log = logging.getLogger(__name__)
def includeme(config):
config.add_route('geo.zone_list', '/geo/zone')
@view_config(route_name='geo.zone_list', permission='admin',
renderer='/geo/zone/zone_list.mako')
def zone_list_view(request):
_ = request.translate
stats=None
# construct the query
zones = DBSession.query(Zone)
zones = zones.order_by(Zone.zone_number)
# add a flash message for empty results
if zones.count() == 0:
request.session.flash(_(u"There is no results!"), 'error')
# paginate results
page_url = paginate.PageURL_WebOb(request)
zones = paginate.Page(zones,
page=int(request.params.get("page", 1)),
items_per_page=20,
url=page_url)
return dict(zones=zones, stats=stats)
|
<commit_before># -*- coding: utf-8 -*-
""" Tools for geographical zones management."""
import logging
from pyramid.view import view_config
from webhelpers import paginate
from anuket.models import DBSession
from begotemp.models.zone import Zone
log = logging.getLogger(__name__)
def includeme(config):
config.add_route('geo.zone_list', '/geo/zone')
@view_config(route_name='geo.zone_list', permission='admin',
renderer='/geo/zone/zone_list.mako')
def zone_list_view(request):
stats=None
# construct the query
zones = DBSession.query(Zone)
zones = zones.order_by(Zone.zone_number)
# paginate results
page_url = paginate.PageURL_WebOb(request)
zones = paginate.Page(zones,
page=int(request.params.get("page", 1)),
items_per_page=20,
url=page_url)
return dict(zones=zones, stats=stats)<commit_msg>Add a flash message for empty zone list<commit_after>
|
# -*- coding: utf-8 -*-
""" Tools for geographical zones management."""
import logging
from pyramid.view import view_config
from webhelpers import paginate
from anuket.models import DBSession
from begotemp.models.zone import Zone
log = logging.getLogger(__name__)
def includeme(config):
config.add_route('geo.zone_list', '/geo/zone')
@view_config(route_name='geo.zone_list', permission='admin',
renderer='/geo/zone/zone_list.mako')
def zone_list_view(request):
_ = request.translate
stats=None
# construct the query
zones = DBSession.query(Zone)
zones = zones.order_by(Zone.zone_number)
# add a flash message for empty results
if zones.count() == 0:
request.session.flash(_(u"There is no results!"), 'error')
# paginate results
page_url = paginate.PageURL_WebOb(request)
zones = paginate.Page(zones,
page=int(request.params.get("page", 1)),
items_per_page=20,
url=page_url)
return dict(zones=zones, stats=stats)
|
# -*- coding: utf-8 -*-
""" Tools for geographical zones management."""
import logging
from pyramid.view import view_config
from webhelpers import paginate
from anuket.models import DBSession
from begotemp.models.zone import Zone
log = logging.getLogger(__name__)
def includeme(config):
config.add_route('geo.zone_list', '/geo/zone')
@view_config(route_name='geo.zone_list', permission='admin',
renderer='/geo/zone/zone_list.mako')
def zone_list_view(request):
stats=None
# construct the query
zones = DBSession.query(Zone)
zones = zones.order_by(Zone.zone_number)
# paginate results
page_url = paginate.PageURL_WebOb(request)
zones = paginate.Page(zones,
page=int(request.params.get("page", 1)),
items_per_page=20,
url=page_url)
return dict(zones=zones, stats=stats)Add a flash message for empty zone list# -*- coding: utf-8 -*-
""" Tools for geographical zones management."""
import logging
from pyramid.view import view_config
from webhelpers import paginate
from anuket.models import DBSession
from begotemp.models.zone import Zone
log = logging.getLogger(__name__)
def includeme(config):
config.add_route('geo.zone_list', '/geo/zone')
@view_config(route_name='geo.zone_list', permission='admin',
renderer='/geo/zone/zone_list.mako')
def zone_list_view(request):
_ = request.translate
stats=None
# construct the query
zones = DBSession.query(Zone)
zones = zones.order_by(Zone.zone_number)
# add a flash message for empty results
if zones.count() == 0:
request.session.flash(_(u"There is no results!"), 'error')
# paginate results
page_url = paginate.PageURL_WebOb(request)
zones = paginate.Page(zones,
page=int(request.params.get("page", 1)),
items_per_page=20,
url=page_url)
return dict(zones=zones, stats=stats)
|
<commit_before># -*- coding: utf-8 -*-
""" Tools for geographical zones management."""
import logging
from pyramid.view import view_config
from webhelpers import paginate
from anuket.models import DBSession
from begotemp.models.zone import Zone
log = logging.getLogger(__name__)
def includeme(config):
config.add_route('geo.zone_list', '/geo/zone')
@view_config(route_name='geo.zone_list', permission='admin',
renderer='/geo/zone/zone_list.mako')
def zone_list_view(request):
stats=None
# construct the query
zones = DBSession.query(Zone)
zones = zones.order_by(Zone.zone_number)
# paginate results
page_url = paginate.PageURL_WebOb(request)
zones = paginate.Page(zones,
page=int(request.params.get("page", 1)),
items_per_page=20,
url=page_url)
return dict(zones=zones, stats=stats)<commit_msg>Add a flash message for empty zone list<commit_after># -*- coding: utf-8 -*-
""" Tools for geographical zones management."""
import logging
from pyramid.view import view_config
from webhelpers import paginate
from anuket.models import DBSession
from begotemp.models.zone import Zone
log = logging.getLogger(__name__)
def includeme(config):
config.add_route('geo.zone_list', '/geo/zone')
@view_config(route_name='geo.zone_list', permission='admin',
renderer='/geo/zone/zone_list.mako')
def zone_list_view(request):
_ = request.translate
stats=None
# construct the query
zones = DBSession.query(Zone)
zones = zones.order_by(Zone.zone_number)
# add a flash message for empty results
if zones.count() == 0:
request.session.flash(_(u"There is no results!"), 'error')
# paginate results
page_url = paginate.PageURL_WebOb(request)
zones = paginate.Page(zones,
page=int(request.params.get("page", 1)),
items_per_page=20,
url=page_url)
return dict(zones=zones, stats=stats)
|
4ae9e3f85af82bd1b7d06858757403d94b775f8a
|
stayput/cli.py
|
stayput/cli.py
|
import os
import sys
from importlib import machinery as imp
from stayput import Site
def main():
cwd = os.path.abspath(os.getcwd())
site = Site(root_path=cwd)
site.scan()
# do terrible things
try:
loader = imp.SourceFileLoader('stayput', os.path.join(cwd, 'stayput.py'))
config = loader.load_module()
config.configure(site)
except Exception as e:
print(e)
sys.exit(1)
# TODO clean up this mess and move compilation steps elsewhere
for key, item in site.items.items():
route = site.router(item)
baseroute = os.path.dirname(route)
content = site.templater.template(item)
os.makedirs(os.path.join(site.root_path, 'output', baseroute), exist_ok=True)
with open(os.path.join(site.root_path, 'output', route), 'w') as f:
f.write(content)
print("Compiled %s." % key)
|
import os
import sys
from importlib import machinery as imp
from stayput import Site
def main():
cwd = os.path.abspath(os.getcwd())
# No configuration, no site to build.
if not os.path.exists(os.path.join(cwd, 'stayput.py')):
print("Error: stayput.py not found.")
sys.exit(1)
# Create site object and scan for items
site = Site(root_path=cwd)
site.scan()
# do terrible things
try:
loader = imp.SourceFileLoader('stayput', os.path.join(cwd, 'stayput.py'))
config = loader.load_module()
config.configure(site)
except Exception as e:
print(e)
sys.exit(1)
# TODO clean up this mess and move compilation steps elsewhere
for key, item in site.items.items():
route = site.router(item)
baseroute = os.path.dirname(route)
content = site.templater.template(item)
os.makedirs(os.path.join(site.root_path, 'output', baseroute), exist_ok=True)
with open(os.path.join(site.root_path, 'output', route), 'w') as f:
f.write(content)
print("Compiled %s." % key)
|
Check for site config file
|
Check for site config file
|
Python
|
mit
|
veeti/stayput
|
import os
import sys
from importlib import machinery as imp
from stayput import Site
def main():
cwd = os.path.abspath(os.getcwd())
site = Site(root_path=cwd)
site.scan()
# do terrible things
try:
loader = imp.SourceFileLoader('stayput', os.path.join(cwd, 'stayput.py'))
config = loader.load_module()
config.configure(site)
except Exception as e:
print(e)
sys.exit(1)
# TODO clean up this mess and move compilation steps elsewhere
for key, item in site.items.items():
route = site.router(item)
baseroute = os.path.dirname(route)
content = site.templater.template(item)
os.makedirs(os.path.join(site.root_path, 'output', baseroute), exist_ok=True)
with open(os.path.join(site.root_path, 'output', route), 'w') as f:
f.write(content)
print("Compiled %s." % key)
Check for site config file
|
import os
import sys
from importlib import machinery as imp
from stayput import Site
def main():
cwd = os.path.abspath(os.getcwd())
# No configuration, no site to build.
if not os.path.exists(os.path.join(cwd, 'stayput.py')):
print("Error: stayput.py not found.")
sys.exit(1)
# Create site object and scan for items
site = Site(root_path=cwd)
site.scan()
# do terrible things
try:
loader = imp.SourceFileLoader('stayput', os.path.join(cwd, 'stayput.py'))
config = loader.load_module()
config.configure(site)
except Exception as e:
print(e)
sys.exit(1)
# TODO clean up this mess and move compilation steps elsewhere
for key, item in site.items.items():
route = site.router(item)
baseroute = os.path.dirname(route)
content = site.templater.template(item)
os.makedirs(os.path.join(site.root_path, 'output', baseroute), exist_ok=True)
with open(os.path.join(site.root_path, 'output', route), 'w') as f:
f.write(content)
print("Compiled %s." % key)
|
<commit_before>import os
import sys
from importlib import machinery as imp
from stayput import Site
def main():
cwd = os.path.abspath(os.getcwd())
site = Site(root_path=cwd)
site.scan()
# do terrible things
try:
loader = imp.SourceFileLoader('stayput', os.path.join(cwd, 'stayput.py'))
config = loader.load_module()
config.configure(site)
except Exception as e:
print(e)
sys.exit(1)
# TODO clean up this mess and move compilation steps elsewhere
for key, item in site.items.items():
route = site.router(item)
baseroute = os.path.dirname(route)
content = site.templater.template(item)
os.makedirs(os.path.join(site.root_path, 'output', baseroute), exist_ok=True)
with open(os.path.join(site.root_path, 'output', route), 'w') as f:
f.write(content)
print("Compiled %s." % key)
<commit_msg>Check for site config file<commit_after>
|
import os
import sys
from importlib import machinery as imp
from stayput import Site
def main():
cwd = os.path.abspath(os.getcwd())
# No configuration, no site to build.
if not os.path.exists(os.path.join(cwd, 'stayput.py')):
print("Error: stayput.py not found.")
sys.exit(1)
# Create site object and scan for items
site = Site(root_path=cwd)
site.scan()
# do terrible things
try:
loader = imp.SourceFileLoader('stayput', os.path.join(cwd, 'stayput.py'))
config = loader.load_module()
config.configure(site)
except Exception as e:
print(e)
sys.exit(1)
# TODO clean up this mess and move compilation steps elsewhere
for key, item in site.items.items():
route = site.router(item)
baseroute = os.path.dirname(route)
content = site.templater.template(item)
os.makedirs(os.path.join(site.root_path, 'output', baseroute), exist_ok=True)
with open(os.path.join(site.root_path, 'output', route), 'w') as f:
f.write(content)
print("Compiled %s." % key)
|
import os
import sys
from importlib import machinery as imp
from stayput import Site
def main():
cwd = os.path.abspath(os.getcwd())
site = Site(root_path=cwd)
site.scan()
# do terrible things
try:
loader = imp.SourceFileLoader('stayput', os.path.join(cwd, 'stayput.py'))
config = loader.load_module()
config.configure(site)
except Exception as e:
print(e)
sys.exit(1)
# TODO clean up this mess and move compilation steps elsewhere
for key, item in site.items.items():
route = site.router(item)
baseroute = os.path.dirname(route)
content = site.templater.template(item)
os.makedirs(os.path.join(site.root_path, 'output', baseroute), exist_ok=True)
with open(os.path.join(site.root_path, 'output', route), 'w') as f:
f.write(content)
print("Compiled %s." % key)
Check for site config fileimport os
import sys
from importlib import machinery as imp
from stayput import Site
def main():
cwd = os.path.abspath(os.getcwd())
# No configuration, no site to build.
if not os.path.exists(os.path.join(cwd, 'stayput.py')):
print("Error: stayput.py not found.")
sys.exit(1)
# Create site object and scan for items
site = Site(root_path=cwd)
site.scan()
# do terrible things
try:
loader = imp.SourceFileLoader('stayput', os.path.join(cwd, 'stayput.py'))
config = loader.load_module()
config.configure(site)
except Exception as e:
print(e)
sys.exit(1)
# TODO clean up this mess and move compilation steps elsewhere
for key, item in site.items.items():
route = site.router(item)
baseroute = os.path.dirname(route)
content = site.templater.template(item)
os.makedirs(os.path.join(site.root_path, 'output', baseroute), exist_ok=True)
with open(os.path.join(site.root_path, 'output', route), 'w') as f:
f.write(content)
print("Compiled %s." % key)
|
<commit_before>import os
import sys
from importlib import machinery as imp
from stayput import Site
def main():
cwd = os.path.abspath(os.getcwd())
site = Site(root_path=cwd)
site.scan()
# do terrible things
try:
loader = imp.SourceFileLoader('stayput', os.path.join(cwd, 'stayput.py'))
config = loader.load_module()
config.configure(site)
except Exception as e:
print(e)
sys.exit(1)
# TODO clean up this mess and move compilation steps elsewhere
for key, item in site.items.items():
route = site.router(item)
baseroute = os.path.dirname(route)
content = site.templater.template(item)
os.makedirs(os.path.join(site.root_path, 'output', baseroute), exist_ok=True)
with open(os.path.join(site.root_path, 'output', route), 'w') as f:
f.write(content)
print("Compiled %s." % key)
<commit_msg>Check for site config file<commit_after>import os
import sys
from importlib import machinery as imp
from stayput import Site
def main():
cwd = os.path.abspath(os.getcwd())
# No configuration, no site to build.
if not os.path.exists(os.path.join(cwd, 'stayput.py')):
print("Error: stayput.py not found.")
sys.exit(1)
# Create site object and scan for items
site = Site(root_path=cwd)
site.scan()
# do terrible things
try:
loader = imp.SourceFileLoader('stayput', os.path.join(cwd, 'stayput.py'))
config = loader.load_module()
config.configure(site)
except Exception as e:
print(e)
sys.exit(1)
# TODO clean up this mess and move compilation steps elsewhere
for key, item in site.items.items():
route = site.router(item)
baseroute = os.path.dirname(route)
content = site.templater.template(item)
os.makedirs(os.path.join(site.root_path, 'output', baseroute), exist_ok=True)
with open(os.path.join(site.root_path, 'output', route), 'w') as f:
f.write(content)
print("Compiled %s." % key)
|
42e5db7e254ce46e562993a8abded7e8e3c7102b
|
contrib/migrateticketmodel.py
|
contrib/migrateticketmodel.py
|
#!/usr/bin/env python
#
# This script completely migrates a <= 0.8.x Trac environment to use the new
# default ticket model introduced in Trac 0.9.
#
# In particular, this means that the severity field is removed (or rather
# disabled by removing all possible values), and the priority values are
# changed to the more meaningful new defaults.
#
# Make sure to make a backup of the Trac environment before running this!
import sys
from trac.env import open_environment
from trac.ticket.model import Priority, Severity
priority_mapping = {
'highest': 'blocker',
'high': 'critical',
'normal': 'major',
'low': 'minor',
'lowest': 'trivial'
}
def main():
if len(sys.argv) < 2:
print >> sys.stderr, 'usage: %s /path/to/projenv' \
% os.path.basename(sys.argv[0])
sys.exit(2)
env = open_environment(sys.argv[1])
db = env.get_db_cnx()
for oldprio, newprio in priority_mapping.items():
priority = Priority(env, oldprio, db)
priority.name = newprio
priority.update(db)
for severity in list(Severity.select(env, db)):
severity.delete(db)
db.commit()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
#
# This script completely migrates a <= 0.8.x Trac environment to use the new
# default ticket model introduced in Trac 0.9.
#
# In particular, this means that the severity field is removed (or rather
# disabled by removing all possible values), and the priority values are
# changed to the more meaningful new defaults.
#
# Make sure to make a backup of the Trac environment before running this!
import os
import sys
from trac.env import open_environment
from trac.ticket.model import Priority, Severity
priority_mapping = {
'highest': 'blocker',
'high': 'critical',
'normal': 'major',
'low': 'minor',
'lowest': 'trivial'
}
def main():
if len(sys.argv) < 2:
print >> sys.stderr, 'usage: %s /path/to/projenv' \
% os.path.basename(sys.argv[0])
sys.exit(2)
env = open_environment(sys.argv[1])
db = env.get_db_cnx()
for oldprio, newprio in priority_mapping.items():
priority = Priority(env, oldprio, db)
priority.name = newprio
priority.update(db)
for severity in list(Severity.select(env, db)):
severity.delete(db)
db.commit()
if __name__ == '__main__':
main()
|
Fix missing import in contrib script added in [2630].
|
Fix missing import in contrib script added in [2630].
git-svn-id: f68c6b3b1dcd5d00a2560c384475aaef3bc99487@2631 af82e41b-90c4-0310-8c96-b1721e28e2e2
|
Python
|
bsd-3-clause
|
dokipen/trac,moreati/trac-gitsvn,exocad/exotrac,dafrito/trac-mirror,moreati/trac-gitsvn,dafrito/trac-mirror,exocad/exotrac,dokipen/trac,moreati/trac-gitsvn,moreati/trac-gitsvn,dafrito/trac-mirror,dokipen/trac,exocad/exotrac,dafrito/trac-mirror,exocad/exotrac
|
#!/usr/bin/env python
#
# This script completely migrates a <= 0.8.x Trac environment to use the new
# default ticket model introduced in Trac 0.9.
#
# In particular, this means that the severity field is removed (or rather
# disabled by removing all possible values), and the priority values are
# changed to the more meaningful new defaults.
#
# Make sure to make a backup of the Trac environment before running this!
import sys
from trac.env import open_environment
from trac.ticket.model import Priority, Severity
priority_mapping = {
'highest': 'blocker',
'high': 'critical',
'normal': 'major',
'low': 'minor',
'lowest': 'trivial'
}
def main():
if len(sys.argv) < 2:
print >> sys.stderr, 'usage: %s /path/to/projenv' \
% os.path.basename(sys.argv[0])
sys.exit(2)
env = open_environment(sys.argv[1])
db = env.get_db_cnx()
for oldprio, newprio in priority_mapping.items():
priority = Priority(env, oldprio, db)
priority.name = newprio
priority.update(db)
for severity in list(Severity.select(env, db)):
severity.delete(db)
db.commit()
if __name__ == '__main__':
main()
Fix missing import in contrib script added in [2630].
git-svn-id: f68c6b3b1dcd5d00a2560c384475aaef3bc99487@2631 af82e41b-90c4-0310-8c96-b1721e28e2e2
|
#!/usr/bin/env python
#
# This script completely migrates a <= 0.8.x Trac environment to use the new
# default ticket model introduced in Trac 0.9.
#
# In particular, this means that the severity field is removed (or rather
# disabled by removing all possible values), and the priority values are
# changed to the more meaningful new defaults.
#
# Make sure to make a backup of the Trac environment before running this!
import os
import sys
from trac.env import open_environment
from trac.ticket.model import Priority, Severity
priority_mapping = {
'highest': 'blocker',
'high': 'critical',
'normal': 'major',
'low': 'minor',
'lowest': 'trivial'
}
def main():
if len(sys.argv) < 2:
print >> sys.stderr, 'usage: %s /path/to/projenv' \
% os.path.basename(sys.argv[0])
sys.exit(2)
env = open_environment(sys.argv[1])
db = env.get_db_cnx()
for oldprio, newprio in priority_mapping.items():
priority = Priority(env, oldprio, db)
priority.name = newprio
priority.update(db)
for severity in list(Severity.select(env, db)):
severity.delete(db)
db.commit()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
#
# This script completely migrates a <= 0.8.x Trac environment to use the new
# default ticket model introduced in Trac 0.9.
#
# In particular, this means that the severity field is removed (or rather
# disabled by removing all possible values), and the priority values are
# changed to the more meaningful new defaults.
#
# Make sure to make a backup of the Trac environment before running this!
import sys
from trac.env import open_environment
from trac.ticket.model import Priority, Severity
priority_mapping = {
'highest': 'blocker',
'high': 'critical',
'normal': 'major',
'low': 'minor',
'lowest': 'trivial'
}
def main():
if len(sys.argv) < 2:
print >> sys.stderr, 'usage: %s /path/to/projenv' \
% os.path.basename(sys.argv[0])
sys.exit(2)
env = open_environment(sys.argv[1])
db = env.get_db_cnx()
for oldprio, newprio in priority_mapping.items():
priority = Priority(env, oldprio, db)
priority.name = newprio
priority.update(db)
for severity in list(Severity.select(env, db)):
severity.delete(db)
db.commit()
if __name__ == '__main__':
main()
<commit_msg>Fix missing import in contrib script added in [2630].
git-svn-id: f68c6b3b1dcd5d00a2560c384475aaef3bc99487@2631 af82e41b-90c4-0310-8c96-b1721e28e2e2<commit_after>
|
#!/usr/bin/env python
#
# This script completely migrates a <= 0.8.x Trac environment to use the new
# default ticket model introduced in Trac 0.9.
#
# In particular, this means that the severity field is removed (or rather
# disabled by removing all possible values), and the priority values are
# changed to the more meaningful new defaults.
#
# Make sure to make a backup of the Trac environment before running this!
import os
import sys
from trac.env import open_environment
from trac.ticket.model import Priority, Severity
priority_mapping = {
'highest': 'blocker',
'high': 'critical',
'normal': 'major',
'low': 'minor',
'lowest': 'trivial'
}
def main():
if len(sys.argv) < 2:
print >> sys.stderr, 'usage: %s /path/to/projenv' \
% os.path.basename(sys.argv[0])
sys.exit(2)
env = open_environment(sys.argv[1])
db = env.get_db_cnx()
for oldprio, newprio in priority_mapping.items():
priority = Priority(env, oldprio, db)
priority.name = newprio
priority.update(db)
for severity in list(Severity.select(env, db)):
severity.delete(db)
db.commit()
if __name__ == '__main__':
main()
|
#!/usr/bin/env python
#
# This script completely migrates a <= 0.8.x Trac environment to use the new
# default ticket model introduced in Trac 0.9.
#
# In particular, this means that the severity field is removed (or rather
# disabled by removing all possible values), and the priority values are
# changed to the more meaningful new defaults.
#
# Make sure to make a backup of the Trac environment before running this!
import sys
from trac.env import open_environment
from trac.ticket.model import Priority, Severity
priority_mapping = {
'highest': 'blocker',
'high': 'critical',
'normal': 'major',
'low': 'minor',
'lowest': 'trivial'
}
def main():
if len(sys.argv) < 2:
print >> sys.stderr, 'usage: %s /path/to/projenv' \
% os.path.basename(sys.argv[0])
sys.exit(2)
env = open_environment(sys.argv[1])
db = env.get_db_cnx()
for oldprio, newprio in priority_mapping.items():
priority = Priority(env, oldprio, db)
priority.name = newprio
priority.update(db)
for severity in list(Severity.select(env, db)):
severity.delete(db)
db.commit()
if __name__ == '__main__':
main()
Fix missing import in contrib script added in [2630].
git-svn-id: f68c6b3b1dcd5d00a2560c384475aaef3bc99487@2631 af82e41b-90c4-0310-8c96-b1721e28e2e2#!/usr/bin/env python
#
# This script completely migrates a <= 0.8.x Trac environment to use the new
# default ticket model introduced in Trac 0.9.
#
# In particular, this means that the severity field is removed (or rather
# disabled by removing all possible values), and the priority values are
# changed to the more meaningful new defaults.
#
# Make sure to make a backup of the Trac environment before running this!
import os
import sys
from trac.env import open_environment
from trac.ticket.model import Priority, Severity
priority_mapping = {
'highest': 'blocker',
'high': 'critical',
'normal': 'major',
'low': 'minor',
'lowest': 'trivial'
}
def main():
if len(sys.argv) < 2:
print >> sys.stderr, 'usage: %s /path/to/projenv' \
% os.path.basename(sys.argv[0])
sys.exit(2)
env = open_environment(sys.argv[1])
db = env.get_db_cnx()
for oldprio, newprio in priority_mapping.items():
priority = Priority(env, oldprio, db)
priority.name = newprio
priority.update(db)
for severity in list(Severity.select(env, db)):
severity.delete(db)
db.commit()
if __name__ == '__main__':
main()
|
<commit_before>#!/usr/bin/env python
#
# This script completely migrates a <= 0.8.x Trac environment to use the new
# default ticket model introduced in Trac 0.9.
#
# In particular, this means that the severity field is removed (or rather
# disabled by removing all possible values), and the priority values are
# changed to the more meaningful new defaults.
#
# Make sure to make a backup of the Trac environment before running this!
import sys
from trac.env import open_environment
from trac.ticket.model import Priority, Severity
priority_mapping = {
'highest': 'blocker',
'high': 'critical',
'normal': 'major',
'low': 'minor',
'lowest': 'trivial'
}
def main():
if len(sys.argv) < 2:
print >> sys.stderr, 'usage: %s /path/to/projenv' \
% os.path.basename(sys.argv[0])
sys.exit(2)
env = open_environment(sys.argv[1])
db = env.get_db_cnx()
for oldprio, newprio in priority_mapping.items():
priority = Priority(env, oldprio, db)
priority.name = newprio
priority.update(db)
for severity in list(Severity.select(env, db)):
severity.delete(db)
db.commit()
if __name__ == '__main__':
main()
<commit_msg>Fix missing import in contrib script added in [2630].
git-svn-id: f68c6b3b1dcd5d00a2560c384475aaef3bc99487@2631 af82e41b-90c4-0310-8c96-b1721e28e2e2<commit_after>#!/usr/bin/env python
#
# This script completely migrates a <= 0.8.x Trac environment to use the new
# default ticket model introduced in Trac 0.9.
#
# In particular, this means that the severity field is removed (or rather
# disabled by removing all possible values), and the priority values are
# changed to the more meaningful new defaults.
#
# Make sure to make a backup of the Trac environment before running this!
import os
import sys
from trac.env import open_environment
from trac.ticket.model import Priority, Severity
priority_mapping = {
'highest': 'blocker',
'high': 'critical',
'normal': 'major',
'low': 'minor',
'lowest': 'trivial'
}
def main():
if len(sys.argv) < 2:
print >> sys.stderr, 'usage: %s /path/to/projenv' \
% os.path.basename(sys.argv[0])
sys.exit(2)
env = open_environment(sys.argv[1])
db = env.get_db_cnx()
for oldprio, newprio in priority_mapping.items():
priority = Priority(env, oldprio, db)
priority.name = newprio
priority.update(db)
for severity in list(Severity.select(env, db)):
severity.delete(db)
db.commit()
if __name__ == '__main__':
main()
|
d9226d778a831d6d9f9f8d7645869245d0757754
|
tests/integration/test_cli.py
|
tests/integration/test_cli.py
|
import os
import subprocess
import pytest
from chalice.utils import OSUtils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp')
@pytest.fixture
def local_app(tmpdir):
temp_dir_path = str(tmpdir)
OSUtils().copytree(PROJECT_DIR, temp_dir_path)
old_dir = os.getcwd()
try:
os.chdir(temp_dir_path)
yield temp_dir_path
finally:
os.chdir(old_dir)
def test_stack_trace_printed_on_error(local_app):
app_file = os.path.join(local_app, 'app.py')
with open(app_file, 'w') as f:
f.write(
'from chalice import Chalice\n'
'app = Chalice(app_name="test")\n'
'foobarbaz\n'
)
p = subprocess.Popen(['chalice', 'local'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = p.communicate()[1].decode('ascii')
rc = p.returncode
assert rc == 2
assert 'Traceback' in stderr
assert 'foobarbaz' in stderr
|
import os
import subprocess
import pytest
from chalice.utils import OSUtils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp')
@pytest.fixture
def local_app(tmpdir):
temp_dir_path = str(tmpdir)
OSUtils().copytree(PROJECT_DIR, temp_dir_path)
old_dir = os.getcwd()
try:
os.chdir(temp_dir_path)
yield temp_dir_path
finally:
os.chdir(old_dir)
def test_stack_trace_printed_on_error(local_app):
app_file = os.path.join(local_app, 'app.py')
with open(app_file, 'w') as f:
f.write(
'from chalice import Chalice\n'
'app = Chalice(app_name="test")\n'
'foobarbaz\n'
)
p = subprocess.Popen(['chalice', 'local', '--no-autoreload'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = p.communicate()[1].decode('ascii')
rc = p.returncode
assert rc == 2
assert 'Traceback' in stderr
assert 'foobarbaz' in stderr
|
Disable autoreload in integration tests
|
Disable autoreload in integration tests
|
Python
|
apache-2.0
|
awslabs/chalice
|
import os
import subprocess
import pytest
from chalice.utils import OSUtils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp')
@pytest.fixture
def local_app(tmpdir):
temp_dir_path = str(tmpdir)
OSUtils().copytree(PROJECT_DIR, temp_dir_path)
old_dir = os.getcwd()
try:
os.chdir(temp_dir_path)
yield temp_dir_path
finally:
os.chdir(old_dir)
def test_stack_trace_printed_on_error(local_app):
app_file = os.path.join(local_app, 'app.py')
with open(app_file, 'w') as f:
f.write(
'from chalice import Chalice\n'
'app = Chalice(app_name="test")\n'
'foobarbaz\n'
)
p = subprocess.Popen(['chalice', 'local'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = p.communicate()[1].decode('ascii')
rc = p.returncode
assert rc == 2
assert 'Traceback' in stderr
assert 'foobarbaz' in stderr
Disable autoreload in integration tests
|
import os
import subprocess
import pytest
from chalice.utils import OSUtils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp')
@pytest.fixture
def local_app(tmpdir):
temp_dir_path = str(tmpdir)
OSUtils().copytree(PROJECT_DIR, temp_dir_path)
old_dir = os.getcwd()
try:
os.chdir(temp_dir_path)
yield temp_dir_path
finally:
os.chdir(old_dir)
def test_stack_trace_printed_on_error(local_app):
app_file = os.path.join(local_app, 'app.py')
with open(app_file, 'w') as f:
f.write(
'from chalice import Chalice\n'
'app = Chalice(app_name="test")\n'
'foobarbaz\n'
)
p = subprocess.Popen(['chalice', 'local', '--no-autoreload'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = p.communicate()[1].decode('ascii')
rc = p.returncode
assert rc == 2
assert 'Traceback' in stderr
assert 'foobarbaz' in stderr
|
<commit_before>import os
import subprocess
import pytest
from chalice.utils import OSUtils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp')
@pytest.fixture
def local_app(tmpdir):
temp_dir_path = str(tmpdir)
OSUtils().copytree(PROJECT_DIR, temp_dir_path)
old_dir = os.getcwd()
try:
os.chdir(temp_dir_path)
yield temp_dir_path
finally:
os.chdir(old_dir)
def test_stack_trace_printed_on_error(local_app):
app_file = os.path.join(local_app, 'app.py')
with open(app_file, 'w') as f:
f.write(
'from chalice import Chalice\n'
'app = Chalice(app_name="test")\n'
'foobarbaz\n'
)
p = subprocess.Popen(['chalice', 'local'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = p.communicate()[1].decode('ascii')
rc = p.returncode
assert rc == 2
assert 'Traceback' in stderr
assert 'foobarbaz' in stderr
<commit_msg>Disable autoreload in integration tests<commit_after>
|
import os
import subprocess
import pytest
from chalice.utils import OSUtils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp')
@pytest.fixture
def local_app(tmpdir):
temp_dir_path = str(tmpdir)
OSUtils().copytree(PROJECT_DIR, temp_dir_path)
old_dir = os.getcwd()
try:
os.chdir(temp_dir_path)
yield temp_dir_path
finally:
os.chdir(old_dir)
def test_stack_trace_printed_on_error(local_app):
app_file = os.path.join(local_app, 'app.py')
with open(app_file, 'w') as f:
f.write(
'from chalice import Chalice\n'
'app = Chalice(app_name="test")\n'
'foobarbaz\n'
)
p = subprocess.Popen(['chalice', 'local', '--no-autoreload'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = p.communicate()[1].decode('ascii')
rc = p.returncode
assert rc == 2
assert 'Traceback' in stderr
assert 'foobarbaz' in stderr
|
import os
import subprocess
import pytest
from chalice.utils import OSUtils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp')
@pytest.fixture
def local_app(tmpdir):
temp_dir_path = str(tmpdir)
OSUtils().copytree(PROJECT_DIR, temp_dir_path)
old_dir = os.getcwd()
try:
os.chdir(temp_dir_path)
yield temp_dir_path
finally:
os.chdir(old_dir)
def test_stack_trace_printed_on_error(local_app):
app_file = os.path.join(local_app, 'app.py')
with open(app_file, 'w') as f:
f.write(
'from chalice import Chalice\n'
'app = Chalice(app_name="test")\n'
'foobarbaz\n'
)
p = subprocess.Popen(['chalice', 'local'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = p.communicate()[1].decode('ascii')
rc = p.returncode
assert rc == 2
assert 'Traceback' in stderr
assert 'foobarbaz' in stderr
Disable autoreload in integration testsimport os
import subprocess
import pytest
from chalice.utils import OSUtils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp')
@pytest.fixture
def local_app(tmpdir):
temp_dir_path = str(tmpdir)
OSUtils().copytree(PROJECT_DIR, temp_dir_path)
old_dir = os.getcwd()
try:
os.chdir(temp_dir_path)
yield temp_dir_path
finally:
os.chdir(old_dir)
def test_stack_trace_printed_on_error(local_app):
app_file = os.path.join(local_app, 'app.py')
with open(app_file, 'w') as f:
f.write(
'from chalice import Chalice\n'
'app = Chalice(app_name="test")\n'
'foobarbaz\n'
)
p = subprocess.Popen(['chalice', 'local', '--no-autoreload'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = p.communicate()[1].decode('ascii')
rc = p.returncode
assert rc == 2
assert 'Traceback' in stderr
assert 'foobarbaz' in stderr
|
<commit_before>import os
import subprocess
import pytest
from chalice.utils import OSUtils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp')
@pytest.fixture
def local_app(tmpdir):
temp_dir_path = str(tmpdir)
OSUtils().copytree(PROJECT_DIR, temp_dir_path)
old_dir = os.getcwd()
try:
os.chdir(temp_dir_path)
yield temp_dir_path
finally:
os.chdir(old_dir)
def test_stack_trace_printed_on_error(local_app):
app_file = os.path.join(local_app, 'app.py')
with open(app_file, 'w') as f:
f.write(
'from chalice import Chalice\n'
'app = Chalice(app_name="test")\n'
'foobarbaz\n'
)
p = subprocess.Popen(['chalice', 'local'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = p.communicate()[1].decode('ascii')
rc = p.returncode
assert rc == 2
assert 'Traceback' in stderr
assert 'foobarbaz' in stderr
<commit_msg>Disable autoreload in integration tests<commit_after>import os
import subprocess
import pytest
from chalice.utils import OSUtils
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
PROJECT_DIR = os.path.join(CURRENT_DIR, 'testapp')
@pytest.fixture
def local_app(tmpdir):
temp_dir_path = str(tmpdir)
OSUtils().copytree(PROJECT_DIR, temp_dir_path)
old_dir = os.getcwd()
try:
os.chdir(temp_dir_path)
yield temp_dir_path
finally:
os.chdir(old_dir)
def test_stack_trace_printed_on_error(local_app):
app_file = os.path.join(local_app, 'app.py')
with open(app_file, 'w') as f:
f.write(
'from chalice import Chalice\n'
'app = Chalice(app_name="test")\n'
'foobarbaz\n'
)
p = subprocess.Popen(['chalice', 'local', '--no-autoreload'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stderr = p.communicate()[1].decode('ascii')
rc = p.returncode
assert rc == 2
assert 'Traceback' in stderr
assert 'foobarbaz' in stderr
|
ecd5aaa396c5d8ee82cabbb5d95c5c0b6c150270
|
irrigator_pro/irrigator_pro/wsgi.py
|
irrigator_pro/irrigator_pro/wsgi.py
|
"""
WSGI config for irrigator_pro project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os, os.path, site, sys, socket
# Add django root dir to python path
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',))
print "PROJECT_ROOT=", PROJECT_ROOT
sys.path.append(PROJECT_ROOT)
# Add virtualenv dirs to python path
if socket.gethostname()=='gregs-mbp':
VIRTUAL_ENV_ROOT = os.path.join( PROJECT_ROOT, 'VirtualEnvs', 'irrigator_pro')
else:
VIRTUAL_ENV_ROOT = '/prod/VirtualEnvs/irrigator_pro/'
print "VIRTUAL_ENV_ROOT='%s'" % VIRTUAL_ENV_ROOT
activate_this = os.path.join(VIRTUAL_ENV_ROOT, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
# Get settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "irrigator_pro.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
"""
WSGI config for irrigator_pro project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os, os.path, site, sys, socket
# Add django root dir to python path
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',))
print "PROJECT_ROOT=", PROJECT_ROOT
sys.path.append(PROJECT_ROOT)
# Add virtualenv dirs to python path
host = socket.gethostname()
print "HOSTNAME=%s" % host
if host=='irrigatorpro':
VIRTUAL_ENV_ROOT = '/prod/VirtualEnvs/irrigator_pro/'
else:
VIRTUAL_ENV_ROOT = os.path.join( PROJECT_ROOT, 'VirtualEnvs', 'irrigator_pro')
print "VIRTUAL_ENV_ROOT='%s'" % VIRTUAL_ENV_ROOT
activate_this = os.path.join(VIRTUAL_ENV_ROOT, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
# Get settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "irrigator_pro.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
Change host test to check for 'irrigatorpro' instead of my laptop's name, since the latter changes depending on the (wireless) network.
|
Change host test to check for 'irrigatorpro' instead of my laptop's
name, since the latter changes depending on the (wireless) network.
|
Python
|
mit
|
warnes/irrigatorpro,warnes/irrigatorpro,warnes/irrigatorpro,warnes/irrigatorpro
|
"""
WSGI config for irrigator_pro project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os, os.path, site, sys, socket
# Add django root dir to python path
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',))
print "PROJECT_ROOT=", PROJECT_ROOT
sys.path.append(PROJECT_ROOT)
# Add virtualenv dirs to python path
if socket.gethostname()=='gregs-mbp':
VIRTUAL_ENV_ROOT = os.path.join( PROJECT_ROOT, 'VirtualEnvs', 'irrigator_pro')
else:
VIRTUAL_ENV_ROOT = '/prod/VirtualEnvs/irrigator_pro/'
print "VIRTUAL_ENV_ROOT='%s'" % VIRTUAL_ENV_ROOT
activate_this = os.path.join(VIRTUAL_ENV_ROOT, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
# Get settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "irrigator_pro.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
Change host test to check for 'irrigatorpro' instead of my laptop's
name, since the latter changes depending on the (wireless) network.
|
"""
WSGI config for irrigator_pro project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os, os.path, site, sys, socket
# Add django root dir to python path
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',))
print "PROJECT_ROOT=", PROJECT_ROOT
sys.path.append(PROJECT_ROOT)
# Add virtualenv dirs to python path
host = socket.gethostname()
print "HOSTNAME=%s" % host
if host=='irrigatorpro':
VIRTUAL_ENV_ROOT = '/prod/VirtualEnvs/irrigator_pro/'
else:
VIRTUAL_ENV_ROOT = os.path.join( PROJECT_ROOT, 'VirtualEnvs', 'irrigator_pro')
print "VIRTUAL_ENV_ROOT='%s'" % VIRTUAL_ENV_ROOT
activate_this = os.path.join(VIRTUAL_ENV_ROOT, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
# Get settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "irrigator_pro.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
<commit_before>"""
WSGI config for irrigator_pro project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os, os.path, site, sys, socket
# Add django root dir to python path
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',))
print "PROJECT_ROOT=", PROJECT_ROOT
sys.path.append(PROJECT_ROOT)
# Add virtualenv dirs to python path
if socket.gethostname()=='gregs-mbp':
VIRTUAL_ENV_ROOT = os.path.join( PROJECT_ROOT, 'VirtualEnvs', 'irrigator_pro')
else:
VIRTUAL_ENV_ROOT = '/prod/VirtualEnvs/irrigator_pro/'
print "VIRTUAL_ENV_ROOT='%s'" % VIRTUAL_ENV_ROOT
activate_this = os.path.join(VIRTUAL_ENV_ROOT, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
# Get settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "irrigator_pro.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
<commit_msg>Change host test to check for 'irrigatorpro' instead of my laptop's
name, since the latter changes depending on the (wireless) network.<commit_after>
|
"""
WSGI config for irrigator_pro project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os, os.path, site, sys, socket
# Add django root dir to python path
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',))
print "PROJECT_ROOT=", PROJECT_ROOT
sys.path.append(PROJECT_ROOT)
# Add virtualenv dirs to python path
host = socket.gethostname()
print "HOSTNAME=%s" % host
if host=='irrigatorpro':
VIRTUAL_ENV_ROOT = '/prod/VirtualEnvs/irrigator_pro/'
else:
VIRTUAL_ENV_ROOT = os.path.join( PROJECT_ROOT, 'VirtualEnvs', 'irrigator_pro')
print "VIRTUAL_ENV_ROOT='%s'" % VIRTUAL_ENV_ROOT
activate_this = os.path.join(VIRTUAL_ENV_ROOT, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
# Get settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "irrigator_pro.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
"""
WSGI config for irrigator_pro project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os, os.path, site, sys, socket
# Add django root dir to python path
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',))
print "PROJECT_ROOT=", PROJECT_ROOT
sys.path.append(PROJECT_ROOT)
# Add virtualenv dirs to python path
if socket.gethostname()=='gregs-mbp':
VIRTUAL_ENV_ROOT = os.path.join( PROJECT_ROOT, 'VirtualEnvs', 'irrigator_pro')
else:
VIRTUAL_ENV_ROOT = '/prod/VirtualEnvs/irrigator_pro/'
print "VIRTUAL_ENV_ROOT='%s'" % VIRTUAL_ENV_ROOT
activate_this = os.path.join(VIRTUAL_ENV_ROOT, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
# Get settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "irrigator_pro.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
Change host test to check for 'irrigatorpro' instead of my laptop's
name, since the latter changes depending on the (wireless) network."""
WSGI config for irrigator_pro project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os, os.path, site, sys, socket
# Add django root dir to python path
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',))
print "PROJECT_ROOT=", PROJECT_ROOT
sys.path.append(PROJECT_ROOT)
# Add virtualenv dirs to python path
host = socket.gethostname()
print "HOSTNAME=%s" % host
if host=='irrigatorpro':
VIRTUAL_ENV_ROOT = '/prod/VirtualEnvs/irrigator_pro/'
else:
VIRTUAL_ENV_ROOT = os.path.join( PROJECT_ROOT, 'VirtualEnvs', 'irrigator_pro')
print "VIRTUAL_ENV_ROOT='%s'" % VIRTUAL_ENV_ROOT
activate_this = os.path.join(VIRTUAL_ENV_ROOT, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
# Get settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "irrigator_pro.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
<commit_before>"""
WSGI config for irrigator_pro project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os, os.path, site, sys, socket
# Add django root dir to python path
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',))
print "PROJECT_ROOT=", PROJECT_ROOT
sys.path.append(PROJECT_ROOT)
# Add virtualenv dirs to python path
if socket.gethostname()=='gregs-mbp':
VIRTUAL_ENV_ROOT = os.path.join( PROJECT_ROOT, 'VirtualEnvs', 'irrigator_pro')
else:
VIRTUAL_ENV_ROOT = '/prod/VirtualEnvs/irrigator_pro/'
print "VIRTUAL_ENV_ROOT='%s'" % VIRTUAL_ENV_ROOT
activate_this = os.path.join(VIRTUAL_ENV_ROOT, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
# Get settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "irrigator_pro.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
<commit_msg>Change host test to check for 'irrigatorpro' instead of my laptop's
name, since the latter changes depending on the (wireless) network.<commit_after>"""
WSGI config for irrigator_pro project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os, os.path, site, sys, socket
# Add django root dir to python path
PROJECT_ROOT = os.path.abspath(os.path.join(os.path.dirname(__file__), '..',))
print "PROJECT_ROOT=", PROJECT_ROOT
sys.path.append(PROJECT_ROOT)
# Add virtualenv dirs to python path
host = socket.gethostname()
print "HOSTNAME=%s" % host
if host=='irrigatorpro':
VIRTUAL_ENV_ROOT = '/prod/VirtualEnvs/irrigator_pro/'
else:
VIRTUAL_ENV_ROOT = os.path.join( PROJECT_ROOT, 'VirtualEnvs', 'irrigator_pro')
print "VIRTUAL_ENV_ROOT='%s'" % VIRTUAL_ENV_ROOT
activate_this = os.path.join(VIRTUAL_ENV_ROOT, 'bin', 'activate_this.py')
execfile(activate_this, dict(__file__=activate_this))
# Get settings
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "irrigator_pro.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
|
6cd2f4f1f2f4a4dca74fcfd6484278cc90e6f77a
|
tests/test_security_object.py
|
tests/test_security_object.py
|
from unittest import TestCase
from zipline.assets._securities import Security
class TestSecurityRichCmp(TestCase):
def test_lt(self):
self.assertTrue(Security(3) < Security(4))
self.assertFalse(Security(4) < Security(4))
self.assertFalse(Security(5) < Security(4))
def test_le(self):
self.assertTrue(Security(3) <= Security(4))
self.assertTrue(Security(4) <= Security(4))
self.assertFalse(Security(5) <= Security(4))
def test_eq(self):
self.assertFalse(Security(3) == Security(4))
self.assertTrue(Security(4) == Security(4))
self.assertFalse(Security(5) == Security(4))
def test_ge(self):
self.assertFalse(Security(3) >= Security(4))
self.assertTrue(Security(4) >= Security(4))
self.assertTrue(Security(5) >= Security(4))
def test_gt(self):
self.assertFalse(Security(3) > Security(4))
self.assertFalse(Security(4) > Security(4))
self.assertTrue(Security(5) > Security(4))
def test_type_mismatch(self):
self.assertIsNotNone(Security(3) < 'a')
self.assertIsNotNone('a' < Security(3))
|
import sys
from unittest import TestCase
from zipline.assets._securities import Security
class TestSecurityRichCmp(TestCase):
def test_lt(self):
self.assertTrue(Security(3) < Security(4))
self.assertFalse(Security(4) < Security(4))
self.assertFalse(Security(5) < Security(4))
def test_le(self):
self.assertTrue(Security(3) <= Security(4))
self.assertTrue(Security(4) <= Security(4))
self.assertFalse(Security(5) <= Security(4))
def test_eq(self):
self.assertFalse(Security(3) == Security(4))
self.assertTrue(Security(4) == Security(4))
self.assertFalse(Security(5) == Security(4))
def test_ge(self):
self.assertFalse(Security(3) >= Security(4))
self.assertTrue(Security(4) >= Security(4))
self.assertTrue(Security(5) >= Security(4))
def test_gt(self):
self.assertFalse(Security(3) > Security(4))
self.assertFalse(Security(4) > Security(4))
self.assertTrue(Security(5) > Security(4))
def test_type_mismatch(self):
if sys.version_info.major < 3:
self.assertIsNotNone(Security(3) < 'a')
self.assertIsNotNone('a' < Security(3))
else:
with self.assertRaises(TypeError):
Security(3) < 'a'
with self.assertRaises(TypeError):
'a' < Security(3)
|
Update Security class unit tests for Python3 compatibility
|
TEST: Update Security class unit tests for Python3 compatibility
|
Python
|
apache-2.0
|
sketchytechky/zipline,stkubr/zipline,wubr2000/zipline,michaeljohnbennett/zipline,jimgoo/zipline-fork,kmather73/zipline,morrisonwudi/zipline,cmorgan/zipline,keir-rex/zipline,grundgruen/zipline,umuzungu/zipline,zhoulingjun/zipline,jordancheah/zipline,florentchandelier/zipline,nborggren/zipline,joequant/zipline,ronalcc/zipline,ChinaQuants/zipline,ronalcc/zipline,florentchandelier/zipline,gwulfs/zipline,chrjxj/zipline,jordancheah/zipline,stkubr/zipline,enigmampc/catalyst,dmitriz/zipline,magne-max/zipline-ja,dkushner/zipline,semio/zipline,quantopian/zipline,otmaneJai/Zipline,bartosh/zipline,humdings/zipline,dkushner/zipline,Scapogo/zipline,michaeljohnbennett/zipline,dmitriz/zipline,gwulfs/zipline,otmaneJai/Zipline,StratsOn/zipline,iamkingmaker/zipline,humdings/zipline,iamkingmaker/zipline,magne-max/zipline-ja,joequant/zipline,enigmampc/catalyst,CDSFinance/zipline,zhoulingjun/zipline,YuepengGuo/zipline,AlirezaShahabi/zipline,euri10/zipline,aajtodd/zipline,umuzungu/zipline,AlirezaShahabi/zipline,DVegaCapital/zipline,semio/zipline,wilsonkichoi/zipline,sketchytechky/zipline,jimgoo/zipline-fork,chrjxj/zipline,nborggren/zipline,MonoCloud/zipline,morrisonwudi/zipline,alphaBenj/zipline,CDSFinance/zipline,kmather73/zipline,alphaBenj/zipline,StratsOn/zipline,bartosh/zipline,quantopian/zipline,grundgruen/zipline,cmorgan/zipline,aajtodd/zipline,wilsonkichoi/zipline,ChinaQuants/zipline,Scapogo/zipline,YuepengGuo/zipline,euri10/zipline,DVegaCapital/zipline,MonoCloud/zipline,wubr2000/zipline,keir-rex/zipline
|
from unittest import TestCase
from zipline.assets._securities import Security
class TestSecurityRichCmp(TestCase):
def test_lt(self):
self.assertTrue(Security(3) < Security(4))
self.assertFalse(Security(4) < Security(4))
self.assertFalse(Security(5) < Security(4))
def test_le(self):
self.assertTrue(Security(3) <= Security(4))
self.assertTrue(Security(4) <= Security(4))
self.assertFalse(Security(5) <= Security(4))
def test_eq(self):
self.assertFalse(Security(3) == Security(4))
self.assertTrue(Security(4) == Security(4))
self.assertFalse(Security(5) == Security(4))
def test_ge(self):
self.assertFalse(Security(3) >= Security(4))
self.assertTrue(Security(4) >= Security(4))
self.assertTrue(Security(5) >= Security(4))
def test_gt(self):
self.assertFalse(Security(3) > Security(4))
self.assertFalse(Security(4) > Security(4))
self.assertTrue(Security(5) > Security(4))
def test_type_mismatch(self):
self.assertIsNotNone(Security(3) < 'a')
self.assertIsNotNone('a' < Security(3))
TEST: Update Security class unit tests for Python3 compatibility
|
import sys
from unittest import TestCase
from zipline.assets._securities import Security
class TestSecurityRichCmp(TestCase):
def test_lt(self):
self.assertTrue(Security(3) < Security(4))
self.assertFalse(Security(4) < Security(4))
self.assertFalse(Security(5) < Security(4))
def test_le(self):
self.assertTrue(Security(3) <= Security(4))
self.assertTrue(Security(4) <= Security(4))
self.assertFalse(Security(5) <= Security(4))
def test_eq(self):
self.assertFalse(Security(3) == Security(4))
self.assertTrue(Security(4) == Security(4))
self.assertFalse(Security(5) == Security(4))
def test_ge(self):
self.assertFalse(Security(3) >= Security(4))
self.assertTrue(Security(4) >= Security(4))
self.assertTrue(Security(5) >= Security(4))
def test_gt(self):
self.assertFalse(Security(3) > Security(4))
self.assertFalse(Security(4) > Security(4))
self.assertTrue(Security(5) > Security(4))
def test_type_mismatch(self):
if sys.version_info.major < 3:
self.assertIsNotNone(Security(3) < 'a')
self.assertIsNotNone('a' < Security(3))
else:
with self.assertRaises(TypeError):
Security(3) < 'a'
with self.assertRaises(TypeError):
'a' < Security(3)
|
<commit_before>from unittest import TestCase
from zipline.assets._securities import Security
class TestSecurityRichCmp(TestCase):
def test_lt(self):
self.assertTrue(Security(3) < Security(4))
self.assertFalse(Security(4) < Security(4))
self.assertFalse(Security(5) < Security(4))
def test_le(self):
self.assertTrue(Security(3) <= Security(4))
self.assertTrue(Security(4) <= Security(4))
self.assertFalse(Security(5) <= Security(4))
def test_eq(self):
self.assertFalse(Security(3) == Security(4))
self.assertTrue(Security(4) == Security(4))
self.assertFalse(Security(5) == Security(4))
def test_ge(self):
self.assertFalse(Security(3) >= Security(4))
self.assertTrue(Security(4) >= Security(4))
self.assertTrue(Security(5) >= Security(4))
def test_gt(self):
self.assertFalse(Security(3) > Security(4))
self.assertFalse(Security(4) > Security(4))
self.assertTrue(Security(5) > Security(4))
def test_type_mismatch(self):
self.assertIsNotNone(Security(3) < 'a')
self.assertIsNotNone('a' < Security(3))
<commit_msg>TEST: Update Security class unit tests for Python3 compatibility<commit_after>
|
import sys
from unittest import TestCase
from zipline.assets._securities import Security
class TestSecurityRichCmp(TestCase):
def test_lt(self):
self.assertTrue(Security(3) < Security(4))
self.assertFalse(Security(4) < Security(4))
self.assertFalse(Security(5) < Security(4))
def test_le(self):
self.assertTrue(Security(3) <= Security(4))
self.assertTrue(Security(4) <= Security(4))
self.assertFalse(Security(5) <= Security(4))
def test_eq(self):
self.assertFalse(Security(3) == Security(4))
self.assertTrue(Security(4) == Security(4))
self.assertFalse(Security(5) == Security(4))
def test_ge(self):
self.assertFalse(Security(3) >= Security(4))
self.assertTrue(Security(4) >= Security(4))
self.assertTrue(Security(5) >= Security(4))
def test_gt(self):
self.assertFalse(Security(3) > Security(4))
self.assertFalse(Security(4) > Security(4))
self.assertTrue(Security(5) > Security(4))
def test_type_mismatch(self):
if sys.version_info.major < 3:
self.assertIsNotNone(Security(3) < 'a')
self.assertIsNotNone('a' < Security(3))
else:
with self.assertRaises(TypeError):
Security(3) < 'a'
with self.assertRaises(TypeError):
'a' < Security(3)
|
from unittest import TestCase
from zipline.assets._securities import Security
class TestSecurityRichCmp(TestCase):
def test_lt(self):
self.assertTrue(Security(3) < Security(4))
self.assertFalse(Security(4) < Security(4))
self.assertFalse(Security(5) < Security(4))
def test_le(self):
self.assertTrue(Security(3) <= Security(4))
self.assertTrue(Security(4) <= Security(4))
self.assertFalse(Security(5) <= Security(4))
def test_eq(self):
self.assertFalse(Security(3) == Security(4))
self.assertTrue(Security(4) == Security(4))
self.assertFalse(Security(5) == Security(4))
def test_ge(self):
self.assertFalse(Security(3) >= Security(4))
self.assertTrue(Security(4) >= Security(4))
self.assertTrue(Security(5) >= Security(4))
def test_gt(self):
self.assertFalse(Security(3) > Security(4))
self.assertFalse(Security(4) > Security(4))
self.assertTrue(Security(5) > Security(4))
def test_type_mismatch(self):
self.assertIsNotNone(Security(3) < 'a')
self.assertIsNotNone('a' < Security(3))
TEST: Update Security class unit tests for Python3 compatibilityimport sys
from unittest import TestCase
from zipline.assets._securities import Security
class TestSecurityRichCmp(TestCase):
def test_lt(self):
self.assertTrue(Security(3) < Security(4))
self.assertFalse(Security(4) < Security(4))
self.assertFalse(Security(5) < Security(4))
def test_le(self):
self.assertTrue(Security(3) <= Security(4))
self.assertTrue(Security(4) <= Security(4))
self.assertFalse(Security(5) <= Security(4))
def test_eq(self):
self.assertFalse(Security(3) == Security(4))
self.assertTrue(Security(4) == Security(4))
self.assertFalse(Security(5) == Security(4))
def test_ge(self):
self.assertFalse(Security(3) >= Security(4))
self.assertTrue(Security(4) >= Security(4))
self.assertTrue(Security(5) >= Security(4))
def test_gt(self):
self.assertFalse(Security(3) > Security(4))
self.assertFalse(Security(4) > Security(4))
self.assertTrue(Security(5) > Security(4))
def test_type_mismatch(self):
if sys.version_info.major < 3:
self.assertIsNotNone(Security(3) < 'a')
self.assertIsNotNone('a' < Security(3))
else:
with self.assertRaises(TypeError):
Security(3) < 'a'
with self.assertRaises(TypeError):
'a' < Security(3)
|
<commit_before>from unittest import TestCase
from zipline.assets._securities import Security
class TestSecurityRichCmp(TestCase):
def test_lt(self):
self.assertTrue(Security(3) < Security(4))
self.assertFalse(Security(4) < Security(4))
self.assertFalse(Security(5) < Security(4))
def test_le(self):
self.assertTrue(Security(3) <= Security(4))
self.assertTrue(Security(4) <= Security(4))
self.assertFalse(Security(5) <= Security(4))
def test_eq(self):
self.assertFalse(Security(3) == Security(4))
self.assertTrue(Security(4) == Security(4))
self.assertFalse(Security(5) == Security(4))
def test_ge(self):
self.assertFalse(Security(3) >= Security(4))
self.assertTrue(Security(4) >= Security(4))
self.assertTrue(Security(5) >= Security(4))
def test_gt(self):
self.assertFalse(Security(3) > Security(4))
self.assertFalse(Security(4) > Security(4))
self.assertTrue(Security(5) > Security(4))
def test_type_mismatch(self):
self.assertIsNotNone(Security(3) < 'a')
self.assertIsNotNone('a' < Security(3))
<commit_msg>TEST: Update Security class unit tests for Python3 compatibility<commit_after>import sys
from unittest import TestCase
from zipline.assets._securities import Security
class TestSecurityRichCmp(TestCase):
def test_lt(self):
self.assertTrue(Security(3) < Security(4))
self.assertFalse(Security(4) < Security(4))
self.assertFalse(Security(5) < Security(4))
def test_le(self):
self.assertTrue(Security(3) <= Security(4))
self.assertTrue(Security(4) <= Security(4))
self.assertFalse(Security(5) <= Security(4))
def test_eq(self):
self.assertFalse(Security(3) == Security(4))
self.assertTrue(Security(4) == Security(4))
self.assertFalse(Security(5) == Security(4))
def test_ge(self):
self.assertFalse(Security(3) >= Security(4))
self.assertTrue(Security(4) >= Security(4))
self.assertTrue(Security(5) >= Security(4))
def test_gt(self):
self.assertFalse(Security(3) > Security(4))
self.assertFalse(Security(4) > Security(4))
self.assertTrue(Security(5) > Security(4))
def test_type_mismatch(self):
if sys.version_info.major < 3:
self.assertIsNotNone(Security(3) < 'a')
self.assertIsNotNone('a' < Security(3))
else:
with self.assertRaises(TypeError):
Security(3) < 'a'
with self.assertRaises(TypeError):
'a' < Security(3)
|
f3c0d4f0f64f79377b6f631426d2e78d8faecc60
|
bouncer-plumbing/mlab-to-bouncer/makeconfig.py
|
bouncer-plumbing/mlab-to-bouncer/makeconfig.py
|
#!/usr/bin/env python
import sys
import yaml
def read_parts_from_stdin():
data = sys.stdin.read()
parts_string = data.split("----")
parts_parsed = []
for part in parts_string:
part_parsed = yaml.safe_load(part)
parts_parsed.append(part_parsed)
return parts_parsed
def assemble_bouncer_config(parts):
merged_parts = { }
for part in parts:
merged_parts.update(part)
bouncer_config = { 'collector': merged_parts }
return yaml.dump(bouncer_config)
def write_bouncer_config(path, bouncer_config_contents):
try:
f = open(path, 'w')
f.write(bouncer_config_contents)
f.close()
except IOError:
print "Couldn't write to bouncer config file."
exit(1)
bouncer_config_path = '/home/mlab/data/bouncer.yaml'
if len(sys.argv) >= 2:
bouncer_config_path = sys.argv[1]
# FIXME: Read from the mlab-ns simulator.
parts = read_parts_from_stdin()
bouncer_config = assemble_bouncer_config(parts)
write_bouncer_config(bouncer_config_path, bouncer_config)
|
#!/usr/bin/env python
import sys
import yaml
import json
import subprocess
def read_parts_from_mlabns():
# FIXME: Check wget exit status
MLAB_NS_QUERY_URL = "http://localhost:8585/ooni?match=all"
json_list = subprocess.Popen(["wget", MLAB_NS_QUERY_URL, "-O", "-"], stdout=subprocess.PIPE).communicate()[0]
sliver_list = json.loads(json_list)
part_list = []
for sliver in sliver_list:
part_list.append(sliver['tool_extra'])
return part_list
def assemble_bouncer_config(parts):
merged_parts = { }
for part in parts:
merged_parts.update(part)
bouncer_config = { 'collector': merged_parts }
return yaml.safe_dump(bouncer_config)
def write_bouncer_config(path, bouncer_config_contents):
try:
f = open(path, 'w')
f.write(bouncer_config_contents)
f.close()
except IOError:
print "Couldn't write to bouncer config file."
exit(1)
bouncer_config_path = '/home/mlab/data/bouncer.yaml'
if len(sys.argv) >= 2:
bouncer_config_path = sys.argv[1]
# FIXME: Read from the mlab-ns simulator.
parts = read_parts_from_mlabns()
bouncer_config = assemble_bouncer_config(parts)
write_bouncer_config(bouncer_config_path, bouncer_config)
|
Make bouncer script pull from mlab-ns simulator
|
Make bouncer script pull from mlab-ns simulator
|
Python
|
apache-2.0
|
hellais/ooni-support,m-lab/ooni-support,m-lab/ooni-support,hellais/ooni-support
|
#!/usr/bin/env python
import sys
import yaml
def read_parts_from_stdin():
data = sys.stdin.read()
parts_string = data.split("----")
parts_parsed = []
for part in parts_string:
part_parsed = yaml.safe_load(part)
parts_parsed.append(part_parsed)
return parts_parsed
def assemble_bouncer_config(parts):
merged_parts = { }
for part in parts:
merged_parts.update(part)
bouncer_config = { 'collector': merged_parts }
return yaml.dump(bouncer_config)
def write_bouncer_config(path, bouncer_config_contents):
try:
f = open(path, 'w')
f.write(bouncer_config_contents)
f.close()
except IOError:
print "Couldn't write to bouncer config file."
exit(1)
bouncer_config_path = '/home/mlab/data/bouncer.yaml'
if len(sys.argv) >= 2:
bouncer_config_path = sys.argv[1]
# FIXME: Read from the mlab-ns simulator.
parts = read_parts_from_stdin()
bouncer_config = assemble_bouncer_config(parts)
write_bouncer_config(bouncer_config_path, bouncer_config)
Make bouncer script pull from mlab-ns simulator
|
#!/usr/bin/env python
import sys
import yaml
import json
import subprocess
def read_parts_from_mlabns():
# FIXME: Check wget exit status
MLAB_NS_QUERY_URL = "http://localhost:8585/ooni?match=all"
json_list = subprocess.Popen(["wget", MLAB_NS_QUERY_URL, "-O", "-"], stdout=subprocess.PIPE).communicate()[0]
sliver_list = json.loads(json_list)
part_list = []
for sliver in sliver_list:
part_list.append(sliver['tool_extra'])
return part_list
def assemble_bouncer_config(parts):
merged_parts = { }
for part in parts:
merged_parts.update(part)
bouncer_config = { 'collector': merged_parts }
return yaml.safe_dump(bouncer_config)
def write_bouncer_config(path, bouncer_config_contents):
try:
f = open(path, 'w')
f.write(bouncer_config_contents)
f.close()
except IOError:
print "Couldn't write to bouncer config file."
exit(1)
bouncer_config_path = '/home/mlab/data/bouncer.yaml'
if len(sys.argv) >= 2:
bouncer_config_path = sys.argv[1]
# FIXME: Read from the mlab-ns simulator.
parts = read_parts_from_mlabns()
bouncer_config = assemble_bouncer_config(parts)
write_bouncer_config(bouncer_config_path, bouncer_config)
|
<commit_before>#!/usr/bin/env python
import sys
import yaml
def read_parts_from_stdin():
data = sys.stdin.read()
parts_string = data.split("----")
parts_parsed = []
for part in parts_string:
part_parsed = yaml.safe_load(part)
parts_parsed.append(part_parsed)
return parts_parsed
def assemble_bouncer_config(parts):
merged_parts = { }
for part in parts:
merged_parts.update(part)
bouncer_config = { 'collector': merged_parts }
return yaml.dump(bouncer_config)
def write_bouncer_config(path, bouncer_config_contents):
try:
f = open(path, 'w')
f.write(bouncer_config_contents)
f.close()
except IOError:
print "Couldn't write to bouncer config file."
exit(1)
bouncer_config_path = '/home/mlab/data/bouncer.yaml'
if len(sys.argv) >= 2:
bouncer_config_path = sys.argv[1]
# FIXME: Read from the mlab-ns simulator.
parts = read_parts_from_stdin()
bouncer_config = assemble_bouncer_config(parts)
write_bouncer_config(bouncer_config_path, bouncer_config)
<commit_msg>Make bouncer script pull from mlab-ns simulator<commit_after>
|
#!/usr/bin/env python
import sys
import yaml
import json
import subprocess
def read_parts_from_mlabns():
# FIXME: Check wget exit status
MLAB_NS_QUERY_URL = "http://localhost:8585/ooni?match=all"
json_list = subprocess.Popen(["wget", MLAB_NS_QUERY_URL, "-O", "-"], stdout=subprocess.PIPE).communicate()[0]
sliver_list = json.loads(json_list)
part_list = []
for sliver in sliver_list:
part_list.append(sliver['tool_extra'])
return part_list
def assemble_bouncer_config(parts):
merged_parts = { }
for part in parts:
merged_parts.update(part)
bouncer_config = { 'collector': merged_parts }
return yaml.safe_dump(bouncer_config)
def write_bouncer_config(path, bouncer_config_contents):
try:
f = open(path, 'w')
f.write(bouncer_config_contents)
f.close()
except IOError:
print "Couldn't write to bouncer config file."
exit(1)
bouncer_config_path = '/home/mlab/data/bouncer.yaml'
if len(sys.argv) >= 2:
bouncer_config_path = sys.argv[1]
# FIXME: Read from the mlab-ns simulator.
parts = read_parts_from_mlabns()
bouncer_config = assemble_bouncer_config(parts)
write_bouncer_config(bouncer_config_path, bouncer_config)
|
#!/usr/bin/env python
import sys
import yaml
def read_parts_from_stdin():
data = sys.stdin.read()
parts_string = data.split("----")
parts_parsed = []
for part in parts_string:
part_parsed = yaml.safe_load(part)
parts_parsed.append(part_parsed)
return parts_parsed
def assemble_bouncer_config(parts):
merged_parts = { }
for part in parts:
merged_parts.update(part)
bouncer_config = { 'collector': merged_parts }
return yaml.dump(bouncer_config)
def write_bouncer_config(path, bouncer_config_contents):
try:
f = open(path, 'w')
f.write(bouncer_config_contents)
f.close()
except IOError:
print "Couldn't write to bouncer config file."
exit(1)
bouncer_config_path = '/home/mlab/data/bouncer.yaml'
if len(sys.argv) >= 2:
bouncer_config_path = sys.argv[1]
# FIXME: Read from the mlab-ns simulator.
parts = read_parts_from_stdin()
bouncer_config = assemble_bouncer_config(parts)
write_bouncer_config(bouncer_config_path, bouncer_config)
Make bouncer script pull from mlab-ns simulator#!/usr/bin/env python
import sys
import yaml
import json
import subprocess
def read_parts_from_mlabns():
# FIXME: Check wget exit status
MLAB_NS_QUERY_URL = "http://localhost:8585/ooni?match=all"
json_list = subprocess.Popen(["wget", MLAB_NS_QUERY_URL, "-O", "-"], stdout=subprocess.PIPE).communicate()[0]
sliver_list = json.loads(json_list)
part_list = []
for sliver in sliver_list:
part_list.append(sliver['tool_extra'])
return part_list
def assemble_bouncer_config(parts):
merged_parts = { }
for part in parts:
merged_parts.update(part)
bouncer_config = { 'collector': merged_parts }
return yaml.safe_dump(bouncer_config)
def write_bouncer_config(path, bouncer_config_contents):
try:
f = open(path, 'w')
f.write(bouncer_config_contents)
f.close()
except IOError:
print "Couldn't write to bouncer config file."
exit(1)
bouncer_config_path = '/home/mlab/data/bouncer.yaml'
if len(sys.argv) >= 2:
bouncer_config_path = sys.argv[1]
# FIXME: Read from the mlab-ns simulator.
parts = read_parts_from_mlabns()
bouncer_config = assemble_bouncer_config(parts)
write_bouncer_config(bouncer_config_path, bouncer_config)
|
<commit_before>#!/usr/bin/env python
import sys
import yaml
def read_parts_from_stdin():
data = sys.stdin.read()
parts_string = data.split("----")
parts_parsed = []
for part in parts_string:
part_parsed = yaml.safe_load(part)
parts_parsed.append(part_parsed)
return parts_parsed
def assemble_bouncer_config(parts):
merged_parts = { }
for part in parts:
merged_parts.update(part)
bouncer_config = { 'collector': merged_parts }
return yaml.dump(bouncer_config)
def write_bouncer_config(path, bouncer_config_contents):
try:
f = open(path, 'w')
f.write(bouncer_config_contents)
f.close()
except IOError:
print "Couldn't write to bouncer config file."
exit(1)
bouncer_config_path = '/home/mlab/data/bouncer.yaml'
if len(sys.argv) >= 2:
bouncer_config_path = sys.argv[1]
# FIXME: Read from the mlab-ns simulator.
parts = read_parts_from_stdin()
bouncer_config = assemble_bouncer_config(parts)
write_bouncer_config(bouncer_config_path, bouncer_config)
<commit_msg>Make bouncer script pull from mlab-ns simulator<commit_after>#!/usr/bin/env python
import sys
import yaml
import json
import subprocess
def read_parts_from_mlabns():
# FIXME: Check wget exit status
MLAB_NS_QUERY_URL = "http://localhost:8585/ooni?match=all"
json_list = subprocess.Popen(["wget", MLAB_NS_QUERY_URL, "-O", "-"], stdout=subprocess.PIPE).communicate()[0]
sliver_list = json.loads(json_list)
part_list = []
for sliver in sliver_list:
part_list.append(sliver['tool_extra'])
return part_list
def assemble_bouncer_config(parts):
merged_parts = { }
for part in parts:
merged_parts.update(part)
bouncer_config = { 'collector': merged_parts }
return yaml.safe_dump(bouncer_config)
def write_bouncer_config(path, bouncer_config_contents):
try:
f = open(path, 'w')
f.write(bouncer_config_contents)
f.close()
except IOError:
print "Couldn't write to bouncer config file."
exit(1)
bouncer_config_path = '/home/mlab/data/bouncer.yaml'
if len(sys.argv) >= 2:
bouncer_config_path = sys.argv[1]
# FIXME: Read from the mlab-ns simulator.
parts = read_parts_from_mlabns()
bouncer_config = assemble_bouncer_config(parts)
write_bouncer_config(bouncer_config_path, bouncer_config)
|
e1d527d0676fd0b3a7a1f7b5e9b98ddc23a41cd6
|
storage_service/locations/tests/test_fixity_log.py
|
storage_service/locations/tests/test_fixity_log.py
|
from django.test import TestCase
from locations import models
class TestFixityLog(TestCase):
fixtures = ['base.json', 'fixity_log.json']
def setUp(self):
self.fl_object = models.FixityLog.objects.all()[0]
#self.auth = requests.auth.HTTPBasicAuth(self.ds_object.user, self.ds_object.password)
def test_has_required_attributes(self):
assert self.fl_object.package
assert self.fl_object.success
assert self.fl_object.error_details
assert self.fl_object.datetime_reported
|
from django.test import TestCase
from locations import models
class TestFixityLog(TestCase):
fixtures = ['base.json', 'package.json', 'fixity_log.json']
def setUp(self):
self.fl_object = models.FixityLog.objects.all()[0]
#self.auth = requests.auth.HTTPBasicAuth(self.ds_object.user, self.ds_object.password)
def test_has_required_attributes(self):
assert self.fl_object.package
assert self.fl_object.success
assert self.fl_object.error_details
assert self.fl_object.datetime_reported
|
Fix to storage service test.
|
Fix to storage service test.
|
Python
|
agpl-3.0
|
artefactual/archivematica-storage-service,artefactual/archivematica-storage-service,artefactual/archivematica-storage-service,artefactual/archivematica-storage-service
|
from django.test import TestCase
from locations import models
class TestFixityLog(TestCase):
fixtures = ['base.json', 'fixity_log.json']
def setUp(self):
self.fl_object = models.FixityLog.objects.all()[0]
#self.auth = requests.auth.HTTPBasicAuth(self.ds_object.user, self.ds_object.password)
def test_has_required_attributes(self):
assert self.fl_object.package
assert self.fl_object.success
assert self.fl_object.error_details
assert self.fl_object.datetime_reported
Fix to storage service test.
|
from django.test import TestCase
from locations import models
class TestFixityLog(TestCase):
fixtures = ['base.json', 'package.json', 'fixity_log.json']
def setUp(self):
self.fl_object = models.FixityLog.objects.all()[0]
#self.auth = requests.auth.HTTPBasicAuth(self.ds_object.user, self.ds_object.password)
def test_has_required_attributes(self):
assert self.fl_object.package
assert self.fl_object.success
assert self.fl_object.error_details
assert self.fl_object.datetime_reported
|
<commit_before>from django.test import TestCase
from locations import models
class TestFixityLog(TestCase):
fixtures = ['base.json', 'fixity_log.json']
def setUp(self):
self.fl_object = models.FixityLog.objects.all()[0]
#self.auth = requests.auth.HTTPBasicAuth(self.ds_object.user, self.ds_object.password)
def test_has_required_attributes(self):
assert self.fl_object.package
assert self.fl_object.success
assert self.fl_object.error_details
assert self.fl_object.datetime_reported
<commit_msg>Fix to storage service test.<commit_after>
|
from django.test import TestCase
from locations import models
class TestFixityLog(TestCase):
fixtures = ['base.json', 'package.json', 'fixity_log.json']
def setUp(self):
self.fl_object = models.FixityLog.objects.all()[0]
#self.auth = requests.auth.HTTPBasicAuth(self.ds_object.user, self.ds_object.password)
def test_has_required_attributes(self):
assert self.fl_object.package
assert self.fl_object.success
assert self.fl_object.error_details
assert self.fl_object.datetime_reported
|
from django.test import TestCase
from locations import models
class TestFixityLog(TestCase):
fixtures = ['base.json', 'fixity_log.json']
def setUp(self):
self.fl_object = models.FixityLog.objects.all()[0]
#self.auth = requests.auth.HTTPBasicAuth(self.ds_object.user, self.ds_object.password)
def test_has_required_attributes(self):
assert self.fl_object.package
assert self.fl_object.success
assert self.fl_object.error_details
assert self.fl_object.datetime_reported
Fix to storage service test.from django.test import TestCase
from locations import models
class TestFixityLog(TestCase):
fixtures = ['base.json', 'package.json', 'fixity_log.json']
def setUp(self):
self.fl_object = models.FixityLog.objects.all()[0]
#self.auth = requests.auth.HTTPBasicAuth(self.ds_object.user, self.ds_object.password)
def test_has_required_attributes(self):
assert self.fl_object.package
assert self.fl_object.success
assert self.fl_object.error_details
assert self.fl_object.datetime_reported
|
<commit_before>from django.test import TestCase
from locations import models
class TestFixityLog(TestCase):
fixtures = ['base.json', 'fixity_log.json']
def setUp(self):
self.fl_object = models.FixityLog.objects.all()[0]
#self.auth = requests.auth.HTTPBasicAuth(self.ds_object.user, self.ds_object.password)
def test_has_required_attributes(self):
assert self.fl_object.package
assert self.fl_object.success
assert self.fl_object.error_details
assert self.fl_object.datetime_reported
<commit_msg>Fix to storage service test.<commit_after>from django.test import TestCase
from locations import models
class TestFixityLog(TestCase):
fixtures = ['base.json', 'package.json', 'fixity_log.json']
def setUp(self):
self.fl_object = models.FixityLog.objects.all()[0]
#self.auth = requests.auth.HTTPBasicAuth(self.ds_object.user, self.ds_object.password)
def test_has_required_attributes(self):
assert self.fl_object.package
assert self.fl_object.success
assert self.fl_object.error_details
assert self.fl_object.datetime_reported
|
38524cdfbaef36594a546ba3be8883ba00b2df28
|
tests/_support/docstrings.py
|
tests/_support/docstrings.py
|
from invoke import task
@task
def no_docstring():
pass
@task
def one_line():
""" foo
"""
@task
def two_lines():
""" foo
bar
"""
@task
def leading_whitespace():
"""
foo
"""
@task(aliases=('a', 'b'))
def with_aliases():
""" foo
"""
|
from invoke import task
@task
def no_docstring():
pass
@task
def one_line():
"""foo
"""
@task
def two_lines():
"""foo
bar
"""
@task
def leading_whitespace():
"""
foo
"""
@task(aliases=('a', 'b'))
def with_aliases():
"""foo
"""
|
Remove leading whitespace from non leading whitespace tests
|
Remove leading whitespace from non leading whitespace tests
|
Python
|
bsd-2-clause
|
tyewang/invoke,mkusz/invoke,mkusz/invoke,singingwolfboy/invoke,frol/invoke,pyinvoke/invoke,kejbaly2/invoke,mattrobenolt/invoke,mattrobenolt/invoke,frol/invoke,pfmoore/invoke,pfmoore/invoke,sophacles/invoke,pyinvoke/invoke,kejbaly2/invoke
|
from invoke import task
@task
def no_docstring():
pass
@task
def one_line():
""" foo
"""
@task
def two_lines():
""" foo
bar
"""
@task
def leading_whitespace():
"""
foo
"""
@task(aliases=('a', 'b'))
def with_aliases():
""" foo
"""
Remove leading whitespace from non leading whitespace tests
|
from invoke import task
@task
def no_docstring():
pass
@task
def one_line():
"""foo
"""
@task
def two_lines():
"""foo
bar
"""
@task
def leading_whitespace():
"""
foo
"""
@task(aliases=('a', 'b'))
def with_aliases():
"""foo
"""
|
<commit_before>from invoke import task
@task
def no_docstring():
pass
@task
def one_line():
""" foo
"""
@task
def two_lines():
""" foo
bar
"""
@task
def leading_whitespace():
"""
foo
"""
@task(aliases=('a', 'b'))
def with_aliases():
""" foo
"""
<commit_msg>Remove leading whitespace from non leading whitespace tests<commit_after>
|
from invoke import task
@task
def no_docstring():
pass
@task
def one_line():
"""foo
"""
@task
def two_lines():
"""foo
bar
"""
@task
def leading_whitespace():
"""
foo
"""
@task(aliases=('a', 'b'))
def with_aliases():
"""foo
"""
|
from invoke import task
@task
def no_docstring():
pass
@task
def one_line():
""" foo
"""
@task
def two_lines():
""" foo
bar
"""
@task
def leading_whitespace():
"""
foo
"""
@task(aliases=('a', 'b'))
def with_aliases():
""" foo
"""
Remove leading whitespace from non leading whitespace testsfrom invoke import task
@task
def no_docstring():
pass
@task
def one_line():
"""foo
"""
@task
def two_lines():
"""foo
bar
"""
@task
def leading_whitespace():
"""
foo
"""
@task(aliases=('a', 'b'))
def with_aliases():
"""foo
"""
|
<commit_before>from invoke import task
@task
def no_docstring():
pass
@task
def one_line():
""" foo
"""
@task
def two_lines():
""" foo
bar
"""
@task
def leading_whitespace():
"""
foo
"""
@task(aliases=('a', 'b'))
def with_aliases():
""" foo
"""
<commit_msg>Remove leading whitespace from non leading whitespace tests<commit_after>from invoke import task
@task
def no_docstring():
pass
@task
def one_line():
"""foo
"""
@task
def two_lines():
"""foo
bar
"""
@task
def leading_whitespace():
"""
foo
"""
@task(aliases=('a', 'b'))
def with_aliases():
"""foo
"""
|
708571834a7a0c92273e628c2854b4ebefad3f4f
|
nrf/examples/ssd1306_mod.py
|
nrf/examples/ssd1306_mod.py
|
# NOTE: Modified version to align with implemented I2C API in nrf port.
#
# Examples usage of SSD1306_SPI on pca10040
#
# from machine import Pin, SPI
# from ssd1306 import SSD1306_SPI
# spi = SPI(0, baudrate=40000000)
# dc = Pin.board.PA11
# res = Pin.board.PA12
# cs = Pin.board.PA13
# disp = SSD1306_SPI(128, 64, spi, dc, res, cs)
#
#
# Example usage of SSD1306_I2C on pca10040
#
# from machine import Pin, I2C
# from ssd1306 import SSD1306_I2C
# i2c = I2C(0, Pin.board.PA3, Pin.board.PA4)
# disp = SSD1306_I2C_Mod(128, 64, i2c)
from ssd1306 import SSD1306_I2C
class SSD1306_I2C_Mod(SSD1306_I2C):
def write_data(self, buf):
buffer = bytearray([0x40]) + buf # Co=0, D/C#=1
self.i2c.writeto(self.addr, buffer)
|
# NOTE: Modified version to align with implemented I2C API in nrf port.
#
# Examples usage of SSD1306_SPI on pca10040
#
# from machine import Pin, SPI
# from ssd1306 import SSD1306_SPI
# spi = SPI(0, baudrate=40000000)
# dc = Pin.board.PA11
# res = Pin.board.PA12
# cs = Pin.board.PA13
# disp = SSD1306_SPI(128, 64, spi, dc, res, cs)
#
#
# Example usage of SSD1306_I2C on pca10040
#
# from machine import Pin, I2C
# from ssd1306_mod import SSD1306_I2C_Mod
# i2c = I2C(0, Pin.board.PA3, Pin.board.PA4)
# disp = SSD1306_I2C_Mod(128, 64, i2c)
from ssd1306 import SSD1306_I2C
class SSD1306_I2C_Mod(SSD1306_I2C):
def write_data(self, buf):
buffer = bytearray([0x40]) + buf # Co=0, D/C#=1
self.i2c.writeto(self.addr, buffer)
|
Update ssd1306 modification example to import correct class.
|
nrf/examples: Update ssd1306 modification example to import correct class.
|
Python
|
mit
|
tralamazza/micropython,tralamazza/micropython,adafruit/micropython,adafruit/micropython,adafruit/circuitpython,adafruit/circuitpython,adafruit/micropython,adafruit/circuitpython,tralamazza/micropython,adafruit/circuitpython,adafruit/micropython,tralamazza/micropython,adafruit/micropython,adafruit/circuitpython,adafruit/circuitpython
|
# NOTE: Modified version to align with implemented I2C API in nrf port.
#
# Examples usage of SSD1306_SPI on pca10040
#
# from machine import Pin, SPI
# from ssd1306 import SSD1306_SPI
# spi = SPI(0, baudrate=40000000)
# dc = Pin.board.PA11
# res = Pin.board.PA12
# cs = Pin.board.PA13
# disp = SSD1306_SPI(128, 64, spi, dc, res, cs)
#
#
# Example usage of SSD1306_I2C on pca10040
#
# from machine import Pin, I2C
# from ssd1306 import SSD1306_I2C
# i2c = I2C(0, Pin.board.PA3, Pin.board.PA4)
# disp = SSD1306_I2C_Mod(128, 64, i2c)
from ssd1306 import SSD1306_I2C
class SSD1306_I2C_Mod(SSD1306_I2C):
def write_data(self, buf):
buffer = bytearray([0x40]) + buf # Co=0, D/C#=1
self.i2c.writeto(self.addr, buffer)
nrf/examples: Update ssd1306 modification example to import correct class.
|
# NOTE: Modified version to align with implemented I2C API in nrf port.
#
# Examples usage of SSD1306_SPI on pca10040
#
# from machine import Pin, SPI
# from ssd1306 import SSD1306_SPI
# spi = SPI(0, baudrate=40000000)
# dc = Pin.board.PA11
# res = Pin.board.PA12
# cs = Pin.board.PA13
# disp = SSD1306_SPI(128, 64, spi, dc, res, cs)
#
#
# Example usage of SSD1306_I2C on pca10040
#
# from machine import Pin, I2C
# from ssd1306_mod import SSD1306_I2C_Mod
# i2c = I2C(0, Pin.board.PA3, Pin.board.PA4)
# disp = SSD1306_I2C_Mod(128, 64, i2c)
from ssd1306 import SSD1306_I2C
class SSD1306_I2C_Mod(SSD1306_I2C):
def write_data(self, buf):
buffer = bytearray([0x40]) + buf # Co=0, D/C#=1
self.i2c.writeto(self.addr, buffer)
|
<commit_before># NOTE: Modified version to align with implemented I2C API in nrf port.
#
# Examples usage of SSD1306_SPI on pca10040
#
# from machine import Pin, SPI
# from ssd1306 import SSD1306_SPI
# spi = SPI(0, baudrate=40000000)
# dc = Pin.board.PA11
# res = Pin.board.PA12
# cs = Pin.board.PA13
# disp = SSD1306_SPI(128, 64, spi, dc, res, cs)
#
#
# Example usage of SSD1306_I2C on pca10040
#
# from machine import Pin, I2C
# from ssd1306 import SSD1306_I2C
# i2c = I2C(0, Pin.board.PA3, Pin.board.PA4)
# disp = SSD1306_I2C_Mod(128, 64, i2c)
from ssd1306 import SSD1306_I2C
class SSD1306_I2C_Mod(SSD1306_I2C):
def write_data(self, buf):
buffer = bytearray([0x40]) + buf # Co=0, D/C#=1
self.i2c.writeto(self.addr, buffer)
<commit_msg>nrf/examples: Update ssd1306 modification example to import correct class.<commit_after>
|
# NOTE: Modified version to align with implemented I2C API in nrf port.
#
# Examples usage of SSD1306_SPI on pca10040
#
# from machine import Pin, SPI
# from ssd1306 import SSD1306_SPI
# spi = SPI(0, baudrate=40000000)
# dc = Pin.board.PA11
# res = Pin.board.PA12
# cs = Pin.board.PA13
# disp = SSD1306_SPI(128, 64, spi, dc, res, cs)
#
#
# Example usage of SSD1306_I2C on pca10040
#
# from machine import Pin, I2C
# from ssd1306_mod import SSD1306_I2C_Mod
# i2c = I2C(0, Pin.board.PA3, Pin.board.PA4)
# disp = SSD1306_I2C_Mod(128, 64, i2c)
from ssd1306 import SSD1306_I2C
class SSD1306_I2C_Mod(SSD1306_I2C):
def write_data(self, buf):
buffer = bytearray([0x40]) + buf # Co=0, D/C#=1
self.i2c.writeto(self.addr, buffer)
|
# NOTE: Modified version to align with implemented I2C API in nrf port.
#
# Examples usage of SSD1306_SPI on pca10040
#
# from machine import Pin, SPI
# from ssd1306 import SSD1306_SPI
# spi = SPI(0, baudrate=40000000)
# dc = Pin.board.PA11
# res = Pin.board.PA12
# cs = Pin.board.PA13
# disp = SSD1306_SPI(128, 64, spi, dc, res, cs)
#
#
# Example usage of SSD1306_I2C on pca10040
#
# from machine import Pin, I2C
# from ssd1306 import SSD1306_I2C
# i2c = I2C(0, Pin.board.PA3, Pin.board.PA4)
# disp = SSD1306_I2C_Mod(128, 64, i2c)
from ssd1306 import SSD1306_I2C
class SSD1306_I2C_Mod(SSD1306_I2C):
def write_data(self, buf):
buffer = bytearray([0x40]) + buf # Co=0, D/C#=1
self.i2c.writeto(self.addr, buffer)
nrf/examples: Update ssd1306 modification example to import correct class.# NOTE: Modified version to align with implemented I2C API in nrf port.
#
# Examples usage of SSD1306_SPI on pca10040
#
# from machine import Pin, SPI
# from ssd1306 import SSD1306_SPI
# spi = SPI(0, baudrate=40000000)
# dc = Pin.board.PA11
# res = Pin.board.PA12
# cs = Pin.board.PA13
# disp = SSD1306_SPI(128, 64, spi, dc, res, cs)
#
#
# Example usage of SSD1306_I2C on pca10040
#
# from machine import Pin, I2C
# from ssd1306_mod import SSD1306_I2C_Mod
# i2c = I2C(0, Pin.board.PA3, Pin.board.PA4)
# disp = SSD1306_I2C_Mod(128, 64, i2c)
from ssd1306 import SSD1306_I2C
class SSD1306_I2C_Mod(SSD1306_I2C):
def write_data(self, buf):
buffer = bytearray([0x40]) + buf # Co=0, D/C#=1
self.i2c.writeto(self.addr, buffer)
|
<commit_before># NOTE: Modified version to align with implemented I2C API in nrf port.
#
# Examples usage of SSD1306_SPI on pca10040
#
# from machine import Pin, SPI
# from ssd1306 import SSD1306_SPI
# spi = SPI(0, baudrate=40000000)
# dc = Pin.board.PA11
# res = Pin.board.PA12
# cs = Pin.board.PA13
# disp = SSD1306_SPI(128, 64, spi, dc, res, cs)
#
#
# Example usage of SSD1306_I2C on pca10040
#
# from machine import Pin, I2C
# from ssd1306 import SSD1306_I2C
# i2c = I2C(0, Pin.board.PA3, Pin.board.PA4)
# disp = SSD1306_I2C_Mod(128, 64, i2c)
from ssd1306 import SSD1306_I2C
class SSD1306_I2C_Mod(SSD1306_I2C):
def write_data(self, buf):
buffer = bytearray([0x40]) + buf # Co=0, D/C#=1
self.i2c.writeto(self.addr, buffer)
<commit_msg>nrf/examples: Update ssd1306 modification example to import correct class.<commit_after># NOTE: Modified version to align with implemented I2C API in nrf port.
#
# Examples usage of SSD1306_SPI on pca10040
#
# from machine import Pin, SPI
# from ssd1306 import SSD1306_SPI
# spi = SPI(0, baudrate=40000000)
# dc = Pin.board.PA11
# res = Pin.board.PA12
# cs = Pin.board.PA13
# disp = SSD1306_SPI(128, 64, spi, dc, res, cs)
#
#
# Example usage of SSD1306_I2C on pca10040
#
# from machine import Pin, I2C
# from ssd1306_mod import SSD1306_I2C_Mod
# i2c = I2C(0, Pin.board.PA3, Pin.board.PA4)
# disp = SSD1306_I2C_Mod(128, 64, i2c)
from ssd1306 import SSD1306_I2C
class SSD1306_I2C_Mod(SSD1306_I2C):
def write_data(self, buf):
buffer = bytearray([0x40]) + buf # Co=0, D/C#=1
self.i2c.writeto(self.addr, buffer)
|
707a6016a3023fe423ede53db707c55273b0f6d0
|
oauth2_provider/backends.py
|
oauth2_provider/backends.py
|
from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
oauthlib_core = get_oauthlib_core()
valid, r = oauthlib_core.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
|
from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
valid, r = OAuthLibCore.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
|
Use the OAuthLibCore object defined at the module level.
|
Use the OAuthLibCore object defined at the module level.
|
Python
|
bsd-2-clause
|
bleib1dj/django-oauth-toolkit,StepicOrg/django-oauth-toolkit,JensTimmerman/django-oauth-toolkit,JensTimmerman/django-oauth-toolkit,StepicOrg/django-oauth-toolkit,DeskConnect/django-oauth-toolkit,bleib1dj/django-oauth-toolkit,DeskConnect/django-oauth-toolkit
|
from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
oauthlib_core = get_oauthlib_core()
valid, r = oauthlib_core.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
Use the OAuthLibCore object defined at the module level.
|
from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
valid, r = OAuthLibCore.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
|
<commit_before>from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
oauthlib_core = get_oauthlib_core()
valid, r = oauthlib_core.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
<commit_msg>Use the OAuthLibCore object defined at the module level.<commit_after>
|
from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
valid, r = OAuthLibCore.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
|
from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
oauthlib_core = get_oauthlib_core()
valid, r = oauthlib_core.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
Use the OAuthLibCore object defined at the module level.from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
valid, r = OAuthLibCore.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
|
<commit_before>from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
oauthlib_core = get_oauthlib_core()
valid, r = oauthlib_core.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
<commit_msg>Use the OAuthLibCore object defined at the module level.<commit_after>from django.contrib.auth import get_user_model
from .oauth2_backends import get_oauthlib_core
UserModel = get_user_model()
OAuthLibCore = get_oauthlib_core()
class OAuth2Backend(object):
"""
Authenticate against an OAuth2 access token
"""
def authenticate(self, **credentials):
request = credentials.get('request')
if request is not None:
valid, r = OAuthLibCore.verify_request(request, scopes=[])
if valid:
return r.user
return None
def get_user(self, user_id):
try:
return UserModel.objects.get(pk=user_id)
except UserModel.DoesNotExist:
return None
|
6874ebd9e1d1ad3cd9a0babc0797558ef76527a9
|
picoCTF-web/api/api/setup.py
|
picoCTF-web/api/api/setup.py
|
"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
|
"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
db.cache.ensure_index("function", name="function")
|
Index on the cache collection. More work could be done here
|
Index on the cache collection. More work could be done here
|
Python
|
mit
|
picoCTF/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,picoCTF/picoCTF,picoCTF/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,picoCTF/picoCTF,picoCTF/picoCTF,picoCTF/picoCTF
|
"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
Index on the cache collection. More work could be done here
|
"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
db.cache.ensure_index("function", name="function")
|
<commit_before>"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
<commit_msg>Index on the cache collection. More work could be done here<commit_after>
|
"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
db.cache.ensure_index("function", name="function")
|
"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
Index on the cache collection. More work could be done here"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
db.cache.ensure_index("function", name="function")
|
<commit_before>"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
<commit_msg>Index on the cache collection. More work could be done here<commit_after>"""
Setup for the API
"""
import api
log = api.logger.use(__name__)
def index_mongo():
"""
Ensure the mongo collections are indexed.
"""
db = api.common.get_conn()
log.debug("Ensuring mongo is indexed.")
db.users.ensure_index("uid", unique=True, name="unique uid")
db.groups.ensure_index("gid", unique=True, name="unique gid")
db.problems.ensure_index("pid", unique=True, name="unique pid")
db.submissions.ensure_index("tid", name="submission tids")
db.cache.ensure_index("expireAt", expireAfterSeconds=0)
db.cache.ensure_index("function", name="function")
|
922df036fdb2642f5a9fc0c126e36169315242db
|
direnaj/direnaj_api/config/server_celeryconfig.py
|
direnaj/direnaj_api/config/server_celeryconfig.py
|
__author__ = 'onur'
from kombu import Queue
CELERY_ACKS_LATE = True
CELERYD_PREFETCH_MULTIPLIER = 1
BROKER_URL = 'amqp://%s' % "direnaj-staging.cmpe.boun.edu.tr"
CELERY_DEFAULT_QUEUE = 'control'
CELERY_DEFAULT_EXCHANGE = 'campaigns'
CELERY_DEFAULT_EXCHANGE_TYPE = 'topic'
CELERY_QUEUES = (
Queue('timelines', routing_key='*.timeline.*'),
Queue('streamings', routing_key='*.streaming.*'),
Queue('control', routing_key='control'),
)
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
'dispatch_timeline_harvester_tasks_every_three_minutes': {
'task': 'check_watchlist_and_dispatch_tasks',
'schedule': timedelta(seconds=60*3),
},
}
|
__author__ = 'onur'
from kombu import Queue
CELERY_ACKS_LATE = True
CELERYD_PREFETCH_MULTIPLIER = 1
BROKER_URL = 'amqp://%s' % "direnaj-staging.cmpe.boun.edu.tr"
CELERY_DEFAULT_QUEUE = 'control'
CELERY_DEFAULT_EXCHANGE = 'campaigns'
CELERY_DEFAULT_EXCHANGE_TYPE = 'topic'
CELERY_QUEUES = (
#Queue('timelines', routing_key='*.timeline.*'),
#Queue('streamings', routing_key='*.streaming.*'),
Queue('control', routing_key='control'),
)
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
'dispatch_timeline_harvester_tasks_every_three_minutes': {
'task': 'check_watchlist_and_dispatch_tasks',
'schedule': timedelta(seconds=60*3),
},
}
|
Fix for periodic task scheduler (5)
|
Fix for periodic task scheduler (5)
|
Python
|
mit
|
boun-cmpe-soslab/drenaj,boun-cmpe-soslab/drenaj,boun-cmpe-soslab/drenaj,boun-cmpe-soslab/drenaj
|
__author__ = 'onur'
from kombu import Queue
CELERY_ACKS_LATE = True
CELERYD_PREFETCH_MULTIPLIER = 1
BROKER_URL = 'amqp://%s' % "direnaj-staging.cmpe.boun.edu.tr"
CELERY_DEFAULT_QUEUE = 'control'
CELERY_DEFAULT_EXCHANGE = 'campaigns'
CELERY_DEFAULT_EXCHANGE_TYPE = 'topic'
CELERY_QUEUES = (
Queue('timelines', routing_key='*.timeline.*'),
Queue('streamings', routing_key='*.streaming.*'),
Queue('control', routing_key='control'),
)
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
'dispatch_timeline_harvester_tasks_every_three_minutes': {
'task': 'check_watchlist_and_dispatch_tasks',
'schedule': timedelta(seconds=60*3),
},
}Fix for periodic task scheduler (5)
|
__author__ = 'onur'
from kombu import Queue
CELERY_ACKS_LATE = True
CELERYD_PREFETCH_MULTIPLIER = 1
BROKER_URL = 'amqp://%s' % "direnaj-staging.cmpe.boun.edu.tr"
CELERY_DEFAULT_QUEUE = 'control'
CELERY_DEFAULT_EXCHANGE = 'campaigns'
CELERY_DEFAULT_EXCHANGE_TYPE = 'topic'
CELERY_QUEUES = (
#Queue('timelines', routing_key='*.timeline.*'),
#Queue('streamings', routing_key='*.streaming.*'),
Queue('control', routing_key='control'),
)
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
'dispatch_timeline_harvester_tasks_every_three_minutes': {
'task': 'check_watchlist_and_dispatch_tasks',
'schedule': timedelta(seconds=60*3),
},
}
|
<commit_before>__author__ = 'onur'
from kombu import Queue
CELERY_ACKS_LATE = True
CELERYD_PREFETCH_MULTIPLIER = 1
BROKER_URL = 'amqp://%s' % "direnaj-staging.cmpe.boun.edu.tr"
CELERY_DEFAULT_QUEUE = 'control'
CELERY_DEFAULT_EXCHANGE = 'campaigns'
CELERY_DEFAULT_EXCHANGE_TYPE = 'topic'
CELERY_QUEUES = (
Queue('timelines', routing_key='*.timeline.*'),
Queue('streamings', routing_key='*.streaming.*'),
Queue('control', routing_key='control'),
)
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
'dispatch_timeline_harvester_tasks_every_three_minutes': {
'task': 'check_watchlist_and_dispatch_tasks',
'schedule': timedelta(seconds=60*3),
},
}<commit_msg>Fix for periodic task scheduler (5)<commit_after>
|
__author__ = 'onur'
from kombu import Queue
CELERY_ACKS_LATE = True
CELERYD_PREFETCH_MULTIPLIER = 1
BROKER_URL = 'amqp://%s' % "direnaj-staging.cmpe.boun.edu.tr"
CELERY_DEFAULT_QUEUE = 'control'
CELERY_DEFAULT_EXCHANGE = 'campaigns'
CELERY_DEFAULT_EXCHANGE_TYPE = 'topic'
CELERY_QUEUES = (
#Queue('timelines', routing_key='*.timeline.*'),
#Queue('streamings', routing_key='*.streaming.*'),
Queue('control', routing_key='control'),
)
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
'dispatch_timeline_harvester_tasks_every_three_minutes': {
'task': 'check_watchlist_and_dispatch_tasks',
'schedule': timedelta(seconds=60*3),
},
}
|
__author__ = 'onur'
from kombu import Queue
CELERY_ACKS_LATE = True
CELERYD_PREFETCH_MULTIPLIER = 1
BROKER_URL = 'amqp://%s' % "direnaj-staging.cmpe.boun.edu.tr"
CELERY_DEFAULT_QUEUE = 'control'
CELERY_DEFAULT_EXCHANGE = 'campaigns'
CELERY_DEFAULT_EXCHANGE_TYPE = 'topic'
CELERY_QUEUES = (
Queue('timelines', routing_key='*.timeline.*'),
Queue('streamings', routing_key='*.streaming.*'),
Queue('control', routing_key='control'),
)
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
'dispatch_timeline_harvester_tasks_every_three_minutes': {
'task': 'check_watchlist_and_dispatch_tasks',
'schedule': timedelta(seconds=60*3),
},
}Fix for periodic task scheduler (5)__author__ = 'onur'
from kombu import Queue
CELERY_ACKS_LATE = True
CELERYD_PREFETCH_MULTIPLIER = 1
BROKER_URL = 'amqp://%s' % "direnaj-staging.cmpe.boun.edu.tr"
CELERY_DEFAULT_QUEUE = 'control'
CELERY_DEFAULT_EXCHANGE = 'campaigns'
CELERY_DEFAULT_EXCHANGE_TYPE = 'topic'
CELERY_QUEUES = (
#Queue('timelines', routing_key='*.timeline.*'),
#Queue('streamings', routing_key='*.streaming.*'),
Queue('control', routing_key='control'),
)
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
'dispatch_timeline_harvester_tasks_every_three_minutes': {
'task': 'check_watchlist_and_dispatch_tasks',
'schedule': timedelta(seconds=60*3),
},
}
|
<commit_before>__author__ = 'onur'
from kombu import Queue
CELERY_ACKS_LATE = True
CELERYD_PREFETCH_MULTIPLIER = 1
BROKER_URL = 'amqp://%s' % "direnaj-staging.cmpe.boun.edu.tr"
CELERY_DEFAULT_QUEUE = 'control'
CELERY_DEFAULT_EXCHANGE = 'campaigns'
CELERY_DEFAULT_EXCHANGE_TYPE = 'topic'
CELERY_QUEUES = (
Queue('timelines', routing_key='*.timeline.*'),
Queue('streamings', routing_key='*.streaming.*'),
Queue('control', routing_key='control'),
)
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
'dispatch_timeline_harvester_tasks_every_three_minutes': {
'task': 'check_watchlist_and_dispatch_tasks',
'schedule': timedelta(seconds=60*3),
},
}<commit_msg>Fix for periodic task scheduler (5)<commit_after>__author__ = 'onur'
from kombu import Queue
CELERY_ACKS_LATE = True
CELERYD_PREFETCH_MULTIPLIER = 1
BROKER_URL = 'amqp://%s' % "direnaj-staging.cmpe.boun.edu.tr"
CELERY_DEFAULT_QUEUE = 'control'
CELERY_DEFAULT_EXCHANGE = 'campaigns'
CELERY_DEFAULT_EXCHANGE_TYPE = 'topic'
CELERY_QUEUES = (
#Queue('timelines', routing_key='*.timeline.*'),
#Queue('streamings', routing_key='*.streaming.*'),
Queue('control', routing_key='control'),
)
from datetime import timedelta
CELERYBEAT_SCHEDULE = {
'dispatch_timeline_harvester_tasks_every_three_minutes': {
'task': 'check_watchlist_and_dispatch_tasks',
'schedule': timedelta(seconds=60*3),
},
}
|
d2f31bf45b7dfcf7dc9e62e442c99a1c1eb98e89
|
docs/examples/api/accessing_values_test.py
|
docs/examples/api/accessing_values_test.py
|
import mappyfile
def test():
# START OF API EXAMPLE
# load will accept a filename (loads will accept a string)
mapfile = mappyfile.open("./docs/examples/raster.map")
# print the map name
print(mapfile["name"]) # "MyMap"
# access layers
layers = mapfile["layers"]
layer2 = layers[1] # access by index
# access classes in a layer
classes = layer2["classes"]
for c in classes:
print(c["name"])
# END OF API EXAMPLE
assert(mapfile["name"] == 'MyMap')
if __name__ == "__main__":
test()
|
import mappyfile
def test():
# START OF API EXAMPLE
# open will accept a filename (loads will accept a string)
mapfile = mappyfile.open("./docs/examples/raster.map")
# print the map name
print(mapfile["name"]) # "MyMap"
# access layers
layers = mapfile["layers"]
layer2 = layers[1] # access by index
# access classes in a layer
classes = layer2["classes"]
for c in classes:
print(c["name"])
# END OF API EXAMPLE
assert(mapfile["name"] == 'MyMap')
if __name__ == "__main__":
test()
|
Update example to match new API
|
Update example to match new API
|
Python
|
mit
|
geographika/mappyfile,geographika/mappyfile
|
import mappyfile
def test():
# START OF API EXAMPLE
# load will accept a filename (loads will accept a string)
mapfile = mappyfile.open("./docs/examples/raster.map")
# print the map name
print(mapfile["name"]) # "MyMap"
# access layers
layers = mapfile["layers"]
layer2 = layers[1] # access by index
# access classes in a layer
classes = layer2["classes"]
for c in classes:
print(c["name"])
# END OF API EXAMPLE
assert(mapfile["name"] == 'MyMap')
if __name__ == "__main__":
test()Update example to match new API
|
import mappyfile
def test():
# START OF API EXAMPLE
# open will accept a filename (loads will accept a string)
mapfile = mappyfile.open("./docs/examples/raster.map")
# print the map name
print(mapfile["name"]) # "MyMap"
# access layers
layers = mapfile["layers"]
layer2 = layers[1] # access by index
# access classes in a layer
classes = layer2["classes"]
for c in classes:
print(c["name"])
# END OF API EXAMPLE
assert(mapfile["name"] == 'MyMap')
if __name__ == "__main__":
test()
|
<commit_before>import mappyfile
def test():
# START OF API EXAMPLE
# load will accept a filename (loads will accept a string)
mapfile = mappyfile.open("./docs/examples/raster.map")
# print the map name
print(mapfile["name"]) # "MyMap"
# access layers
layers = mapfile["layers"]
layer2 = layers[1] # access by index
# access classes in a layer
classes = layer2["classes"]
for c in classes:
print(c["name"])
# END OF API EXAMPLE
assert(mapfile["name"] == 'MyMap')
if __name__ == "__main__":
test()<commit_msg>Update example to match new API<commit_after>
|
import mappyfile
def test():
# START OF API EXAMPLE
# open will accept a filename (loads will accept a string)
mapfile = mappyfile.open("./docs/examples/raster.map")
# print the map name
print(mapfile["name"]) # "MyMap"
# access layers
layers = mapfile["layers"]
layer2 = layers[1] # access by index
# access classes in a layer
classes = layer2["classes"]
for c in classes:
print(c["name"])
# END OF API EXAMPLE
assert(mapfile["name"] == 'MyMap')
if __name__ == "__main__":
test()
|
import mappyfile
def test():
# START OF API EXAMPLE
# load will accept a filename (loads will accept a string)
mapfile = mappyfile.open("./docs/examples/raster.map")
# print the map name
print(mapfile["name"]) # "MyMap"
# access layers
layers = mapfile["layers"]
layer2 = layers[1] # access by index
# access classes in a layer
classes = layer2["classes"]
for c in classes:
print(c["name"])
# END OF API EXAMPLE
assert(mapfile["name"] == 'MyMap')
if __name__ == "__main__":
test()Update example to match new APIimport mappyfile
def test():
# START OF API EXAMPLE
# open will accept a filename (loads will accept a string)
mapfile = mappyfile.open("./docs/examples/raster.map")
# print the map name
print(mapfile["name"]) # "MyMap"
# access layers
layers = mapfile["layers"]
layer2 = layers[1] # access by index
# access classes in a layer
classes = layer2["classes"]
for c in classes:
print(c["name"])
# END OF API EXAMPLE
assert(mapfile["name"] == 'MyMap')
if __name__ == "__main__":
test()
|
<commit_before>import mappyfile
def test():
# START OF API EXAMPLE
# load will accept a filename (loads will accept a string)
mapfile = mappyfile.open("./docs/examples/raster.map")
# print the map name
print(mapfile["name"]) # "MyMap"
# access layers
layers = mapfile["layers"]
layer2 = layers[1] # access by index
# access classes in a layer
classes = layer2["classes"]
for c in classes:
print(c["name"])
# END OF API EXAMPLE
assert(mapfile["name"] == 'MyMap')
if __name__ == "__main__":
test()<commit_msg>Update example to match new API<commit_after>import mappyfile
def test():
# START OF API EXAMPLE
# open will accept a filename (loads will accept a string)
mapfile = mappyfile.open("./docs/examples/raster.map")
# print the map name
print(mapfile["name"]) # "MyMap"
# access layers
layers = mapfile["layers"]
layer2 = layers[1] # access by index
# access classes in a layer
classes = layer2["classes"]
for c in classes:
print(c["name"])
# END OF API EXAMPLE
assert(mapfile["name"] == 'MyMap')
if __name__ == "__main__":
test()
|
51533420b6422515ea10fb323cb318c104a99650
|
pypi/models.py
|
pypi/models.py
|
from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
|
from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
ordering = ('-version',)
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
|
Order by version instead, it should mostly be what we want.
|
Order by version instead, it should mostly be what we want.
|
Python
|
mit
|
kitsunde/django-pypi
|
from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
Order by version instead, it should mostly be what we want.
|
from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
ordering = ('-version',)
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
|
<commit_before>from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
<commit_msg>Order by version instead, it should mostly be what we want.<commit_after>
|
from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
ordering = ('-version',)
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
|
from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
Order by version instead, it should mostly be what we want.from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
ordering = ('-version',)
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
|
<commit_before>from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
<commit_msg>Order by version instead, it should mostly be what we want.<commit_after>from django.db import models
class Package(models.Model):
name = models.CharField(max_length=100)
version = models.CharField(max_length=100)
released_at = models.DateTimeField()
class Meta:
get_latest_by = 'released_at'
ordering = ('-version',)
unique_together = ('name', 'version')
def __unicode__(self):
return "%s %s" % (self.name, self.version)
|
7b6125c0af688ec1b6b4e0baf667e71064dbb0cf
|
test/unit/Algorithms/OrdinaryPercolationTest.py
|
test/unit/Algorithms/OrdinaryPercolationTest.py
|
import OpenPNM
mgr = OpenPNM.Base.Workspace()
mgr.loglevel = 60
class OrdinaryPercolationTest:
def setup_test(self):
self.net = OpenPNM.Network.Cubic(shape=[5, 5, 5])
self.geo = OpenPNM.Geometry.Toray090(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.phase = OpenPNM.Phases.Water(network=self.net)
self.phys = OpenPNM.Physics.Standard(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
|
import OpenPNM as op
import scipy as sp
mgr = op.Base.Workspace()
mgr.loglevel = 60
class OrdinaryPercolationTest:
def setup_test(self):
self.net = op.Network.Cubic(shape=[5, 5, 5])
self.geo = op.Geometry.Toray090(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.phase = op.Phases.Water(network=self.net)
self.phys = op.Physics.Standard(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.OP = op.Algorithms.OrdinaryPercolation(network=self.net,
invading_phase=self.phase)
Ps = self.net.pores(labels=['bottom_boundary'])
self.OP.run(inlets=Ps)
self.OP.return_results(Pc=7000)
lpf = self.OP.evaluate_late_pore_filling(Pc=8000)
assert sp.size(lpf) == self.net.Np
|
Add unit test for late pore filling
|
Add unit test for late pore filling
|
Python
|
mit
|
TomTranter/OpenPNM,PMEAL/OpenPNM
|
import OpenPNM
mgr = OpenPNM.Base.Workspace()
mgr.loglevel = 60
class OrdinaryPercolationTest:
def setup_test(self):
self.net = OpenPNM.Network.Cubic(shape=[5, 5, 5])
self.geo = OpenPNM.Geometry.Toray090(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.phase = OpenPNM.Phases.Water(network=self.net)
self.phys = OpenPNM.Physics.Standard(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
Add unit test for late pore filling
|
import OpenPNM as op
import scipy as sp
mgr = op.Base.Workspace()
mgr.loglevel = 60
class OrdinaryPercolationTest:
def setup_test(self):
self.net = op.Network.Cubic(shape=[5, 5, 5])
self.geo = op.Geometry.Toray090(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.phase = op.Phases.Water(network=self.net)
self.phys = op.Physics.Standard(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.OP = op.Algorithms.OrdinaryPercolation(network=self.net,
invading_phase=self.phase)
Ps = self.net.pores(labels=['bottom_boundary'])
self.OP.run(inlets=Ps)
self.OP.return_results(Pc=7000)
lpf = self.OP.evaluate_late_pore_filling(Pc=8000)
assert sp.size(lpf) == self.net.Np
|
<commit_before>import OpenPNM
mgr = OpenPNM.Base.Workspace()
mgr.loglevel = 60
class OrdinaryPercolationTest:
def setup_test(self):
self.net = OpenPNM.Network.Cubic(shape=[5, 5, 5])
self.geo = OpenPNM.Geometry.Toray090(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.phase = OpenPNM.Phases.Water(network=self.net)
self.phys = OpenPNM.Physics.Standard(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
<commit_msg>Add unit test for late pore filling<commit_after>
|
import OpenPNM as op
import scipy as sp
mgr = op.Base.Workspace()
mgr.loglevel = 60
class OrdinaryPercolationTest:
def setup_test(self):
self.net = op.Network.Cubic(shape=[5, 5, 5])
self.geo = op.Geometry.Toray090(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.phase = op.Phases.Water(network=self.net)
self.phys = op.Physics.Standard(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.OP = op.Algorithms.OrdinaryPercolation(network=self.net,
invading_phase=self.phase)
Ps = self.net.pores(labels=['bottom_boundary'])
self.OP.run(inlets=Ps)
self.OP.return_results(Pc=7000)
lpf = self.OP.evaluate_late_pore_filling(Pc=8000)
assert sp.size(lpf) == self.net.Np
|
import OpenPNM
mgr = OpenPNM.Base.Workspace()
mgr.loglevel = 60
class OrdinaryPercolationTest:
def setup_test(self):
self.net = OpenPNM.Network.Cubic(shape=[5, 5, 5])
self.geo = OpenPNM.Geometry.Toray090(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.phase = OpenPNM.Phases.Water(network=self.net)
self.phys = OpenPNM.Physics.Standard(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
Add unit test for late pore fillingimport OpenPNM as op
import scipy as sp
mgr = op.Base.Workspace()
mgr.loglevel = 60
class OrdinaryPercolationTest:
def setup_test(self):
self.net = op.Network.Cubic(shape=[5, 5, 5])
self.geo = op.Geometry.Toray090(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.phase = op.Phases.Water(network=self.net)
self.phys = op.Physics.Standard(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.OP = op.Algorithms.OrdinaryPercolation(network=self.net,
invading_phase=self.phase)
Ps = self.net.pores(labels=['bottom_boundary'])
self.OP.run(inlets=Ps)
self.OP.return_results(Pc=7000)
lpf = self.OP.evaluate_late_pore_filling(Pc=8000)
assert sp.size(lpf) == self.net.Np
|
<commit_before>import OpenPNM
mgr = OpenPNM.Base.Workspace()
mgr.loglevel = 60
class OrdinaryPercolationTest:
def setup_test(self):
self.net = OpenPNM.Network.Cubic(shape=[5, 5, 5])
self.geo = OpenPNM.Geometry.Toray090(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.phase = OpenPNM.Phases.Water(network=self.net)
self.phys = OpenPNM.Physics.Standard(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
<commit_msg>Add unit test for late pore filling<commit_after>import OpenPNM as op
import scipy as sp
mgr = op.Base.Workspace()
mgr.loglevel = 60
class OrdinaryPercolationTest:
def setup_test(self):
self.net = op.Network.Cubic(shape=[5, 5, 5])
self.geo = op.Geometry.Toray090(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.phase = op.Phases.Water(network=self.net)
self.phys = op.Physics.Standard(network=self.net,
pores=self.net.Ps,
throats=self.net.Ts)
self.OP = op.Algorithms.OrdinaryPercolation(network=self.net,
invading_phase=self.phase)
Ps = self.net.pores(labels=['bottom_boundary'])
self.OP.run(inlets=Ps)
self.OP.return_results(Pc=7000)
lpf = self.OP.evaluate_late_pore_filling(Pc=8000)
assert sp.size(lpf) == self.net.Np
|
fdf33278f66028a932dbecb999f66445ab0a3cd1
|
shuup/admin/modules/product_types/views/edit.py
|
shuup/admin/modules/product_types/views/edit.py
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django import forms
from shuup.admin.utils.views import CreateOrUpdateView
from shuup.core.models import ProductType
from shuup.utils.multilanguage_model_form import MultiLanguageModelForm
class ProductTypeForm(MultiLanguageModelForm):
class Meta:
model = ProductType
exclude = () # All the fields!
widgets = {
"attributes": forms.CheckboxSelectMultiple
}
class ProductTypeEditView(CreateOrUpdateView):
model = ProductType
form_class = ProductTypeForm
template_name = "shuup/admin/product_types/edit.jinja"
context_object_name = "product_type"
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from shuup.admin.forms.fields import Select2MultipleField
from shuup.admin.utils.views import CreateOrUpdateView
from shuup.core.models import Attribute, ProductType
from shuup.utils.multilanguage_model_form import MultiLanguageModelForm
class ProductTypeForm(MultiLanguageModelForm):
attributes = Select2MultipleField(model=Attribute, required=False)
class Meta:
model = ProductType
exclude = ()
def __init__(self, **kwargs):
super(ProductTypeForm, self).__init__(**kwargs)
if self.instance.pk:
choices = [(a.pk, a.name) for a in self.instance.attributes.all()]
self.fields["attributes"].widget.choices = choices
self.fields["attributes"].initial = [pk for pk, name in choices]
def clean_attributes(self):
attributes = [int(a_id) for a_id in self.cleaned_data.get("attributes", [])]
return Attribute.objects.filter(pk__in=attributes).all()
def save(self, commit=True):
obj = super(ProductTypeForm, self).save(commit=commit)
obj.attributes.clear()
obj.attributes = self.cleaned_data["attributes"]
return self.instance
class ProductTypeEditView(CreateOrUpdateView):
model = ProductType
form_class = ProductTypeForm
template_name = "shuup/admin/product_types/edit.jinja"
context_object_name = "product_type"
|
Use Select2 in attribute selection
|
Use Select2 in attribute selection
With large amounts of attributes product type creation was really slow
Refs SH-73
|
Python
|
agpl-3.0
|
shawnadelic/shuup,suutari-ai/shoop,shoopio/shoop,suutari-ai/shoop,shawnadelic/shuup,hrayr-artunyan/shuup,suutari/shoop,suutari/shoop,hrayr-artunyan/shuup,suutari/shoop,suutari-ai/shoop,hrayr-artunyan/shuup,shoopio/shoop,shawnadelic/shuup,shoopio/shoop
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django import forms
from shuup.admin.utils.views import CreateOrUpdateView
from shuup.core.models import ProductType
from shuup.utils.multilanguage_model_form import MultiLanguageModelForm
class ProductTypeForm(MultiLanguageModelForm):
class Meta:
model = ProductType
exclude = () # All the fields!
widgets = {
"attributes": forms.CheckboxSelectMultiple
}
class ProductTypeEditView(CreateOrUpdateView):
model = ProductType
form_class = ProductTypeForm
template_name = "shuup/admin/product_types/edit.jinja"
context_object_name = "product_type"
Use Select2 in attribute selection
With large amounts of attributes product type creation was really slow
Refs SH-73
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from shuup.admin.forms.fields import Select2MultipleField
from shuup.admin.utils.views import CreateOrUpdateView
from shuup.core.models import Attribute, ProductType
from shuup.utils.multilanguage_model_form import MultiLanguageModelForm
class ProductTypeForm(MultiLanguageModelForm):
attributes = Select2MultipleField(model=Attribute, required=False)
class Meta:
model = ProductType
exclude = ()
def __init__(self, **kwargs):
super(ProductTypeForm, self).__init__(**kwargs)
if self.instance.pk:
choices = [(a.pk, a.name) for a in self.instance.attributes.all()]
self.fields["attributes"].widget.choices = choices
self.fields["attributes"].initial = [pk for pk, name in choices]
def clean_attributes(self):
attributes = [int(a_id) for a_id in self.cleaned_data.get("attributes", [])]
return Attribute.objects.filter(pk__in=attributes).all()
def save(self, commit=True):
obj = super(ProductTypeForm, self).save(commit=commit)
obj.attributes.clear()
obj.attributes = self.cleaned_data["attributes"]
return self.instance
class ProductTypeEditView(CreateOrUpdateView):
model = ProductType
form_class = ProductTypeForm
template_name = "shuup/admin/product_types/edit.jinja"
context_object_name = "product_type"
|
<commit_before># -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django import forms
from shuup.admin.utils.views import CreateOrUpdateView
from shuup.core.models import ProductType
from shuup.utils.multilanguage_model_form import MultiLanguageModelForm
class ProductTypeForm(MultiLanguageModelForm):
class Meta:
model = ProductType
exclude = () # All the fields!
widgets = {
"attributes": forms.CheckboxSelectMultiple
}
class ProductTypeEditView(CreateOrUpdateView):
model = ProductType
form_class = ProductTypeForm
template_name = "shuup/admin/product_types/edit.jinja"
context_object_name = "product_type"
<commit_msg>Use Select2 in attribute selection
With large amounts of attributes product type creation was really slow
Refs SH-73<commit_after>
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from shuup.admin.forms.fields import Select2MultipleField
from shuup.admin.utils.views import CreateOrUpdateView
from shuup.core.models import Attribute, ProductType
from shuup.utils.multilanguage_model_form import MultiLanguageModelForm
class ProductTypeForm(MultiLanguageModelForm):
attributes = Select2MultipleField(model=Attribute, required=False)
class Meta:
model = ProductType
exclude = ()
def __init__(self, **kwargs):
super(ProductTypeForm, self).__init__(**kwargs)
if self.instance.pk:
choices = [(a.pk, a.name) for a in self.instance.attributes.all()]
self.fields["attributes"].widget.choices = choices
self.fields["attributes"].initial = [pk for pk, name in choices]
def clean_attributes(self):
attributes = [int(a_id) for a_id in self.cleaned_data.get("attributes", [])]
return Attribute.objects.filter(pk__in=attributes).all()
def save(self, commit=True):
obj = super(ProductTypeForm, self).save(commit=commit)
obj.attributes.clear()
obj.attributes = self.cleaned_data["attributes"]
return self.instance
class ProductTypeEditView(CreateOrUpdateView):
model = ProductType
form_class = ProductTypeForm
template_name = "shuup/admin/product_types/edit.jinja"
context_object_name = "product_type"
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django import forms
from shuup.admin.utils.views import CreateOrUpdateView
from shuup.core.models import ProductType
from shuup.utils.multilanguage_model_form import MultiLanguageModelForm
class ProductTypeForm(MultiLanguageModelForm):
class Meta:
model = ProductType
exclude = () # All the fields!
widgets = {
"attributes": forms.CheckboxSelectMultiple
}
class ProductTypeEditView(CreateOrUpdateView):
model = ProductType
form_class = ProductTypeForm
template_name = "shuup/admin/product_types/edit.jinja"
context_object_name = "product_type"
Use Select2 in attribute selection
With large amounts of attributes product type creation was really slow
Refs SH-73# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from shuup.admin.forms.fields import Select2MultipleField
from shuup.admin.utils.views import CreateOrUpdateView
from shuup.core.models import Attribute, ProductType
from shuup.utils.multilanguage_model_form import MultiLanguageModelForm
class ProductTypeForm(MultiLanguageModelForm):
attributes = Select2MultipleField(model=Attribute, required=False)
class Meta:
model = ProductType
exclude = ()
def __init__(self, **kwargs):
super(ProductTypeForm, self).__init__(**kwargs)
if self.instance.pk:
choices = [(a.pk, a.name) for a in self.instance.attributes.all()]
self.fields["attributes"].widget.choices = choices
self.fields["attributes"].initial = [pk for pk, name in choices]
def clean_attributes(self):
attributes = [int(a_id) for a_id in self.cleaned_data.get("attributes", [])]
return Attribute.objects.filter(pk__in=attributes).all()
def save(self, commit=True):
obj = super(ProductTypeForm, self).save(commit=commit)
obj.attributes.clear()
obj.attributes = self.cleaned_data["attributes"]
return self.instance
class ProductTypeEditView(CreateOrUpdateView):
model = ProductType
form_class = ProductTypeForm
template_name = "shuup/admin/product_types/edit.jinja"
context_object_name = "product_type"
|
<commit_before># -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from django import forms
from shuup.admin.utils.views import CreateOrUpdateView
from shuup.core.models import ProductType
from shuup.utils.multilanguage_model_form import MultiLanguageModelForm
class ProductTypeForm(MultiLanguageModelForm):
class Meta:
model = ProductType
exclude = () # All the fields!
widgets = {
"attributes": forms.CheckboxSelectMultiple
}
class ProductTypeEditView(CreateOrUpdateView):
model = ProductType
form_class = ProductTypeForm
template_name = "shuup/admin/product_types/edit.jinja"
context_object_name = "product_type"
<commit_msg>Use Select2 in attribute selection
With large amounts of attributes product type creation was really slow
Refs SH-73<commit_after># -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
from __future__ import unicode_literals
from shuup.admin.forms.fields import Select2MultipleField
from shuup.admin.utils.views import CreateOrUpdateView
from shuup.core.models import Attribute, ProductType
from shuup.utils.multilanguage_model_form import MultiLanguageModelForm
class ProductTypeForm(MultiLanguageModelForm):
attributes = Select2MultipleField(model=Attribute, required=False)
class Meta:
model = ProductType
exclude = ()
def __init__(self, **kwargs):
super(ProductTypeForm, self).__init__(**kwargs)
if self.instance.pk:
choices = [(a.pk, a.name) for a in self.instance.attributes.all()]
self.fields["attributes"].widget.choices = choices
self.fields["attributes"].initial = [pk for pk, name in choices]
def clean_attributes(self):
attributes = [int(a_id) for a_id in self.cleaned_data.get("attributes", [])]
return Attribute.objects.filter(pk__in=attributes).all()
def save(self, commit=True):
obj = super(ProductTypeForm, self).save(commit=commit)
obj.attributes.clear()
obj.attributes = self.cleaned_data["attributes"]
return self.instance
class ProductTypeEditView(CreateOrUpdateView):
model = ProductType
form_class = ProductTypeForm
template_name = "shuup/admin/product_types/edit.jinja"
context_object_name = "product_type"
|
6d6709b0df05cccfd44bd68cea9fb30c4b6bd41f
|
asymmetric_jwt_auth/models.py
|
asymmetric_jwt_auth/models.py
|
from django.conf import settings
from django.db import models
from django.contrib.auth.models import User
class PublicKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='public_keys')
key = models.TextField(help_text="The user's RSA public key")
comment = models.CharField(max_length=100, help_text="Comment describing this key", blank=True)
def save(self, *args, **kwargs):
key_parts = self.key.split(' ')
if len(key_parts) == 3 and not self.comment:
self.comment = key_parts.pop()
super(PublicKey, self).save(*args, **kwargs)
|
from django.conf import settings
from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from cryptography.hazmat.primitives.serialization import load_ssh_public_key
from cryptography.hazmat.backends import default_backend
def validate_public_key(value):
try:
load_ssh_public_key(value.encode('utf-8'), default_backend())
except Exception as e:
raise ValidationError('Public key is invalid: %s' % e)
class PublicKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='public_keys')
key = models.TextField(help_text="The user's RSA public key", validators=[validate_public_key])
comment = models.CharField(max_length=100, help_text="Comment describing this key", blank=True)
def save(self, *args, **kwargs):
key_parts = self.key.split(' ')
if len(key_parts) == 3 and not self.comment:
self.comment = key_parts.pop()
super(PublicKey, self).save(*args, **kwargs)
|
Validate a public key before saving it
|
Validate a public key before saving it
|
Python
|
isc
|
crgwbr/asymmetric_jwt_auth,crgwbr/asymmetric_jwt_auth
|
from django.conf import settings
from django.db import models
from django.contrib.auth.models import User
class PublicKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='public_keys')
key = models.TextField(help_text="The user's RSA public key")
comment = models.CharField(max_length=100, help_text="Comment describing this key", blank=True)
def save(self, *args, **kwargs):
key_parts = self.key.split(' ')
if len(key_parts) == 3 and not self.comment:
self.comment = key_parts.pop()
super(PublicKey, self).save(*args, **kwargs)
Validate a public key before saving it
|
from django.conf import settings
from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from cryptography.hazmat.primitives.serialization import load_ssh_public_key
from cryptography.hazmat.backends import default_backend
def validate_public_key(value):
try:
load_ssh_public_key(value.encode('utf-8'), default_backend())
except Exception as e:
raise ValidationError('Public key is invalid: %s' % e)
class PublicKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='public_keys')
key = models.TextField(help_text="The user's RSA public key", validators=[validate_public_key])
comment = models.CharField(max_length=100, help_text="Comment describing this key", blank=True)
def save(self, *args, **kwargs):
key_parts = self.key.split(' ')
if len(key_parts) == 3 and not self.comment:
self.comment = key_parts.pop()
super(PublicKey, self).save(*args, **kwargs)
|
<commit_before>from django.conf import settings
from django.db import models
from django.contrib.auth.models import User
class PublicKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='public_keys')
key = models.TextField(help_text="The user's RSA public key")
comment = models.CharField(max_length=100, help_text="Comment describing this key", blank=True)
def save(self, *args, **kwargs):
key_parts = self.key.split(' ')
if len(key_parts) == 3 and not self.comment:
self.comment = key_parts.pop()
super(PublicKey, self).save(*args, **kwargs)
<commit_msg>Validate a public key before saving it<commit_after>
|
from django.conf import settings
from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from cryptography.hazmat.primitives.serialization import load_ssh_public_key
from cryptography.hazmat.backends import default_backend
def validate_public_key(value):
try:
load_ssh_public_key(value.encode('utf-8'), default_backend())
except Exception as e:
raise ValidationError('Public key is invalid: %s' % e)
class PublicKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='public_keys')
key = models.TextField(help_text="The user's RSA public key", validators=[validate_public_key])
comment = models.CharField(max_length=100, help_text="Comment describing this key", blank=True)
def save(self, *args, **kwargs):
key_parts = self.key.split(' ')
if len(key_parts) == 3 and not self.comment:
self.comment = key_parts.pop()
super(PublicKey, self).save(*args, **kwargs)
|
from django.conf import settings
from django.db import models
from django.contrib.auth.models import User
class PublicKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='public_keys')
key = models.TextField(help_text="The user's RSA public key")
comment = models.CharField(max_length=100, help_text="Comment describing this key", blank=True)
def save(self, *args, **kwargs):
key_parts = self.key.split(' ')
if len(key_parts) == 3 and not self.comment:
self.comment = key_parts.pop()
super(PublicKey, self).save(*args, **kwargs)
Validate a public key before saving itfrom django.conf import settings
from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from cryptography.hazmat.primitives.serialization import load_ssh_public_key
from cryptography.hazmat.backends import default_backend
def validate_public_key(value):
try:
load_ssh_public_key(value.encode('utf-8'), default_backend())
except Exception as e:
raise ValidationError('Public key is invalid: %s' % e)
class PublicKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='public_keys')
key = models.TextField(help_text="The user's RSA public key", validators=[validate_public_key])
comment = models.CharField(max_length=100, help_text="Comment describing this key", blank=True)
def save(self, *args, **kwargs):
key_parts = self.key.split(' ')
if len(key_parts) == 3 and not self.comment:
self.comment = key_parts.pop()
super(PublicKey, self).save(*args, **kwargs)
|
<commit_before>from django.conf import settings
from django.db import models
from django.contrib.auth.models import User
class PublicKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='public_keys')
key = models.TextField(help_text="The user's RSA public key")
comment = models.CharField(max_length=100, help_text="Comment describing this key", blank=True)
def save(self, *args, **kwargs):
key_parts = self.key.split(' ')
if len(key_parts) == 3 and not self.comment:
self.comment = key_parts.pop()
super(PublicKey, self).save(*args, **kwargs)
<commit_msg>Validate a public key before saving it<commit_after>from django.conf import settings
from django.db import models
from django.contrib.auth.models import User
from django.core.exceptions import ValidationError
from cryptography.hazmat.primitives.serialization import load_ssh_public_key
from cryptography.hazmat.backends import default_backend
def validate_public_key(value):
try:
load_ssh_public_key(value.encode('utf-8'), default_backend())
except Exception as e:
raise ValidationError('Public key is invalid: %s' % e)
class PublicKey(models.Model):
user = models.ForeignKey(settings.AUTH_USER_MODEL, related_name='public_keys')
key = models.TextField(help_text="The user's RSA public key", validators=[validate_public_key])
comment = models.CharField(max_length=100, help_text="Comment describing this key", blank=True)
def save(self, *args, **kwargs):
key_parts = self.key.split(' ')
if len(key_parts) == 3 and not self.comment:
self.comment = key_parts.pop()
super(PublicKey, self).save(*args, **kwargs)
|
eab3b3417f649e06c5d3f09b6c3369ef92da2e7d
|
users/admin.py
|
users/admin.py
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import AdminPasswordChangeForm
from tastypie.admin import ApiKeyInline
from tastypie.models import ApiKey
from .forms import UserCreationForm, UserChangeForm
from .models import User, Membership
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
readonly_fields = ('created', 'updated')
class UserAdmin(BaseUserAdmin):
form = UserChangeForm
add_form = UserCreationForm
change_password_form = AdminPasswordChangeForm
inlines = BaseUserAdmin.inlines + [ApiKeyInline, MembershipInline]
class MembershipAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'created',
'updated'
)
date_hierarchy = 'created'
search_fields = ['creator__username']
class ApiKeyAdmin(admin.ModelAdmin):
list_display = ('user', 'created', )
date_hierarchy = 'created'
admin.site.register(User, UserAdmin)
admin.site.register(Membership, MembershipAdmin)
try:
admin.site.unregister(ApiKey)
except admin.sites.NotRegistered:
pass
finally:
admin.site.register(ApiKey, ApiKeyAdmin)
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import AdminPasswordChangeForm
from tastypie.admin import ApiKeyInline
from tastypie.models import ApiKey
from .forms import UserCreationForm, UserChangeForm
from .models import User, Membership
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
readonly_fields = ('created', 'updated')
class UserAdmin(BaseUserAdmin):
form = UserChangeForm
add_form = UserCreationForm
change_password_form = AdminPasswordChangeForm
inlines = BaseUserAdmin.inlines + [ApiKeyInline, MembershipInline]
class MembershipAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'created',
'updated'
)
date_hierarchy = 'created'
search_fields = ['creator__username']
list_filter = ['membership_type']
class ApiKeyAdmin(admin.ModelAdmin):
list_display = ('user', 'created', )
date_hierarchy = 'created'
admin.site.register(User, UserAdmin)
admin.site.register(Membership, MembershipAdmin)
try:
admin.site.unregister(ApiKey)
except admin.sites.NotRegistered:
pass
finally:
admin.site.register(ApiKey, ApiKeyAdmin)
|
Add a filter for membership_type.
|
Add a filter for membership_type.
|
Python
|
apache-2.0
|
malemburg/pythondotorg,Mariatta/pythondotorg,lebronhkh/pythondotorg,lebronhkh/pythondotorg,fe11x/pythondotorg,lepture/pythondotorg,proevo/pythondotorg,willingc/pythondotorg,ahua/pythondotorg,SujaySKumar/pythondotorg,ahua/pythondotorg,berkerpeksag/pythondotorg,python/pythondotorg,SujaySKumar/pythondotorg,fe11x/pythondotorg,Mariatta/pythondotorg,ahua/pythondotorg,lepture/pythondotorg,manhhomienbienthuy/pythondotorg,fe11x/pythondotorg,manhhomienbienthuy/pythondotorg,willingc/pythondotorg,malemburg/pythondotorg,proevo/pythondotorg,manhhomienbienthuy/pythondotorg,berkerpeksag/pythondotorg,SujaySKumar/pythondotorg,fe11x/pythondotorg,manhhomienbienthuy/pythondotorg,python/pythondotorg,malemburg/pythondotorg,SujaySKumar/pythondotorg,lebronhkh/pythondotorg,berkerpeksag/pythondotorg,python/pythondotorg,lepture/pythondotorg,lepture/pythondotorg,willingc/pythondotorg,ahua/pythondotorg,fe11x/pythondotorg,Mariatta/pythondotorg,proevo/pythondotorg,berkerpeksag/pythondotorg,lebronhkh/pythondotorg,berkerpeksag/pythondotorg,lepture/pythondotorg,SujaySKumar/pythondotorg,python/pythondotorg,willingc/pythondotorg,Mariatta/pythondotorg,proevo/pythondotorg,malemburg/pythondotorg,ahua/pythondotorg,lebronhkh/pythondotorg
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import AdminPasswordChangeForm
from tastypie.admin import ApiKeyInline
from tastypie.models import ApiKey
from .forms import UserCreationForm, UserChangeForm
from .models import User, Membership
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
readonly_fields = ('created', 'updated')
class UserAdmin(BaseUserAdmin):
form = UserChangeForm
add_form = UserCreationForm
change_password_form = AdminPasswordChangeForm
inlines = BaseUserAdmin.inlines + [ApiKeyInline, MembershipInline]
class MembershipAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'created',
'updated'
)
date_hierarchy = 'created'
search_fields = ['creator__username']
class ApiKeyAdmin(admin.ModelAdmin):
list_display = ('user', 'created', )
date_hierarchy = 'created'
admin.site.register(User, UserAdmin)
admin.site.register(Membership, MembershipAdmin)
try:
admin.site.unregister(ApiKey)
except admin.sites.NotRegistered:
pass
finally:
admin.site.register(ApiKey, ApiKeyAdmin)
Add a filter for membership_type.
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import AdminPasswordChangeForm
from tastypie.admin import ApiKeyInline
from tastypie.models import ApiKey
from .forms import UserCreationForm, UserChangeForm
from .models import User, Membership
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
readonly_fields = ('created', 'updated')
class UserAdmin(BaseUserAdmin):
form = UserChangeForm
add_form = UserCreationForm
change_password_form = AdminPasswordChangeForm
inlines = BaseUserAdmin.inlines + [ApiKeyInline, MembershipInline]
class MembershipAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'created',
'updated'
)
date_hierarchy = 'created'
search_fields = ['creator__username']
list_filter = ['membership_type']
class ApiKeyAdmin(admin.ModelAdmin):
list_display = ('user', 'created', )
date_hierarchy = 'created'
admin.site.register(User, UserAdmin)
admin.site.register(Membership, MembershipAdmin)
try:
admin.site.unregister(ApiKey)
except admin.sites.NotRegistered:
pass
finally:
admin.site.register(ApiKey, ApiKeyAdmin)
|
<commit_before>from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import AdminPasswordChangeForm
from tastypie.admin import ApiKeyInline
from tastypie.models import ApiKey
from .forms import UserCreationForm, UserChangeForm
from .models import User, Membership
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
readonly_fields = ('created', 'updated')
class UserAdmin(BaseUserAdmin):
form = UserChangeForm
add_form = UserCreationForm
change_password_form = AdminPasswordChangeForm
inlines = BaseUserAdmin.inlines + [ApiKeyInline, MembershipInline]
class MembershipAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'created',
'updated'
)
date_hierarchy = 'created'
search_fields = ['creator__username']
class ApiKeyAdmin(admin.ModelAdmin):
list_display = ('user', 'created', )
date_hierarchy = 'created'
admin.site.register(User, UserAdmin)
admin.site.register(Membership, MembershipAdmin)
try:
admin.site.unregister(ApiKey)
except admin.sites.NotRegistered:
pass
finally:
admin.site.register(ApiKey, ApiKeyAdmin)
<commit_msg>Add a filter for membership_type.<commit_after>
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import AdminPasswordChangeForm
from tastypie.admin import ApiKeyInline
from tastypie.models import ApiKey
from .forms import UserCreationForm, UserChangeForm
from .models import User, Membership
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
readonly_fields = ('created', 'updated')
class UserAdmin(BaseUserAdmin):
form = UserChangeForm
add_form = UserCreationForm
change_password_form = AdminPasswordChangeForm
inlines = BaseUserAdmin.inlines + [ApiKeyInline, MembershipInline]
class MembershipAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'created',
'updated'
)
date_hierarchy = 'created'
search_fields = ['creator__username']
list_filter = ['membership_type']
class ApiKeyAdmin(admin.ModelAdmin):
list_display = ('user', 'created', )
date_hierarchy = 'created'
admin.site.register(User, UserAdmin)
admin.site.register(Membership, MembershipAdmin)
try:
admin.site.unregister(ApiKey)
except admin.sites.NotRegistered:
pass
finally:
admin.site.register(ApiKey, ApiKeyAdmin)
|
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import AdminPasswordChangeForm
from tastypie.admin import ApiKeyInline
from tastypie.models import ApiKey
from .forms import UserCreationForm, UserChangeForm
from .models import User, Membership
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
readonly_fields = ('created', 'updated')
class UserAdmin(BaseUserAdmin):
form = UserChangeForm
add_form = UserCreationForm
change_password_form = AdminPasswordChangeForm
inlines = BaseUserAdmin.inlines + [ApiKeyInline, MembershipInline]
class MembershipAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'created',
'updated'
)
date_hierarchy = 'created'
search_fields = ['creator__username']
class ApiKeyAdmin(admin.ModelAdmin):
list_display = ('user', 'created', )
date_hierarchy = 'created'
admin.site.register(User, UserAdmin)
admin.site.register(Membership, MembershipAdmin)
try:
admin.site.unregister(ApiKey)
except admin.sites.NotRegistered:
pass
finally:
admin.site.register(ApiKey, ApiKeyAdmin)
Add a filter for membership_type.from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import AdminPasswordChangeForm
from tastypie.admin import ApiKeyInline
from tastypie.models import ApiKey
from .forms import UserCreationForm, UserChangeForm
from .models import User, Membership
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
readonly_fields = ('created', 'updated')
class UserAdmin(BaseUserAdmin):
form = UserChangeForm
add_form = UserCreationForm
change_password_form = AdminPasswordChangeForm
inlines = BaseUserAdmin.inlines + [ApiKeyInline, MembershipInline]
class MembershipAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'created',
'updated'
)
date_hierarchy = 'created'
search_fields = ['creator__username']
list_filter = ['membership_type']
class ApiKeyAdmin(admin.ModelAdmin):
list_display = ('user', 'created', )
date_hierarchy = 'created'
admin.site.register(User, UserAdmin)
admin.site.register(Membership, MembershipAdmin)
try:
admin.site.unregister(ApiKey)
except admin.sites.NotRegistered:
pass
finally:
admin.site.register(ApiKey, ApiKeyAdmin)
|
<commit_before>from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import AdminPasswordChangeForm
from tastypie.admin import ApiKeyInline
from tastypie.models import ApiKey
from .forms import UserCreationForm, UserChangeForm
from .models import User, Membership
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
readonly_fields = ('created', 'updated')
class UserAdmin(BaseUserAdmin):
form = UserChangeForm
add_form = UserCreationForm
change_password_form = AdminPasswordChangeForm
inlines = BaseUserAdmin.inlines + [ApiKeyInline, MembershipInline]
class MembershipAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'created',
'updated'
)
date_hierarchy = 'created'
search_fields = ['creator__username']
class ApiKeyAdmin(admin.ModelAdmin):
list_display = ('user', 'created', )
date_hierarchy = 'created'
admin.site.register(User, UserAdmin)
admin.site.register(Membership, MembershipAdmin)
try:
admin.site.unregister(ApiKey)
except admin.sites.NotRegistered:
pass
finally:
admin.site.register(ApiKey, ApiKeyAdmin)
<commit_msg>Add a filter for membership_type.<commit_after>from django.contrib import admin
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.forms import AdminPasswordChangeForm
from tastypie.admin import ApiKeyInline
from tastypie.models import ApiKey
from .forms import UserCreationForm, UserChangeForm
from .models import User, Membership
class MembershipInline(admin.StackedInline):
model = Membership
extra = 0
readonly_fields = ('created', 'updated')
class UserAdmin(BaseUserAdmin):
form = UserChangeForm
add_form = UserCreationForm
change_password_form = AdminPasswordChangeForm
inlines = BaseUserAdmin.inlines + [ApiKeyInline, MembershipInline]
class MembershipAdmin(admin.ModelAdmin):
list_display = (
'__str__',
'created',
'updated'
)
date_hierarchy = 'created'
search_fields = ['creator__username']
list_filter = ['membership_type']
class ApiKeyAdmin(admin.ModelAdmin):
list_display = ('user', 'created', )
date_hierarchy = 'created'
admin.site.register(User, UserAdmin)
admin.site.register(Membership, MembershipAdmin)
try:
admin.site.unregister(ApiKey)
except admin.sites.NotRegistered:
pass
finally:
admin.site.register(ApiKey, ApiKeyAdmin)
|
5657fb5cb8d5abbc8f6ab8cf59208a97e8104f34
|
utils/graph.py
|
utils/graph.py
|
"""
This module serves as an interface to
matplotlib.
"""
from utils import config
OFFSET = 2 # offset = max_x/stepsize * OFFSET
def init(output):
import matplotlib
config.mpl(matplotlib, bool(output))
from matplotlib import pyplot
globals()['plt'] = pyplot
def line_plot(xs, ys, color='red'):
plt.plot(
xs,
ys,
color=color,
linewidth=2.0
)
def legend(*args):
plt.legend(args, loc='best')
def scatter_plot(x, y, color='blue'):
plt.scatter(x, y, color=color)
def scale_x_plot(max_x, stepsize):
offset = max_x/stepsize * OFFSET
plt.axis(xmin=-offset, xmax=max_x+offset)
def scale_y_plot(max_y, stepsize):
offset = max_y/stepsize * OFFSET
plt.axis(ymin=-offset, ymax=max_y+offset)
def prepare_plot(xlabel, ylabel, title):
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
plt.grid(True)
def display_plot(output):
if output:
if output == 'stdout':
plt.savefig(sys.stdout, format='png')
else:
plt.savefig(output)
else:
plt.show()
|
"""
This module serves as an interface to
matplotlib.
"""
from utils import config
OFFSET = 2 # offset = max_x/stepsize * OFFSET
def init(output):
import matplotlib
config.mpl(matplotlib, bool(output))
from matplotlib import pyplot
globals()['plt'] = pyplot
def line_plot(xs, ys, color='red'):
plt.plot(
xs,
ys,
color=color,
linewidth=2.0
)
def legend(*args):
plt.legend(args, loc='best')
def scatter_plot(x, y, color='blue'):
plt.scatter(x, y, color=color)
def scale_x_plot(max_x, stepsize):
offset = max_x/stepsize * OFFSET
plt.axis(xmin=-offset, xmax=max_x+offset)
def scale_y_plot(max_y, stepsize):
offset = max_y/stepsize * OFFSET
plt.axis(ymin=-offset, ymax=max_y+offset)
def prepare_plot(xlabel, ylabel, title):
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
plt.grid(True)
def display_plot(output):
if output:
plt.savefig(output)
else:
plt.show()
|
Remove unused `--output stdout` option
|
Remove unused `--output stdout` option
|
Python
|
mit
|
wei2912/bce-simulation,wei2912/bce-simulation,wei2912/bce-simulation,wei2912/bce-simulation
|
"""
This module serves as an interface to
matplotlib.
"""
from utils import config
OFFSET = 2 # offset = max_x/stepsize * OFFSET
def init(output):
import matplotlib
config.mpl(matplotlib, bool(output))
from matplotlib import pyplot
globals()['plt'] = pyplot
def line_plot(xs, ys, color='red'):
plt.plot(
xs,
ys,
color=color,
linewidth=2.0
)
def legend(*args):
plt.legend(args, loc='best')
def scatter_plot(x, y, color='blue'):
plt.scatter(x, y, color=color)
def scale_x_plot(max_x, stepsize):
offset = max_x/stepsize * OFFSET
plt.axis(xmin=-offset, xmax=max_x+offset)
def scale_y_plot(max_y, stepsize):
offset = max_y/stepsize * OFFSET
plt.axis(ymin=-offset, ymax=max_y+offset)
def prepare_plot(xlabel, ylabel, title):
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
plt.grid(True)
def display_plot(output):
if output:
if output == 'stdout':
plt.savefig(sys.stdout, format='png')
else:
plt.savefig(output)
else:
plt.show()
Remove unused `--output stdout` option
|
"""
This module serves as an interface to
matplotlib.
"""
from utils import config
OFFSET = 2 # offset = max_x/stepsize * OFFSET
def init(output):
import matplotlib
config.mpl(matplotlib, bool(output))
from matplotlib import pyplot
globals()['plt'] = pyplot
def line_plot(xs, ys, color='red'):
plt.plot(
xs,
ys,
color=color,
linewidth=2.0
)
def legend(*args):
plt.legend(args, loc='best')
def scatter_plot(x, y, color='blue'):
plt.scatter(x, y, color=color)
def scale_x_plot(max_x, stepsize):
offset = max_x/stepsize * OFFSET
plt.axis(xmin=-offset, xmax=max_x+offset)
def scale_y_plot(max_y, stepsize):
offset = max_y/stepsize * OFFSET
plt.axis(ymin=-offset, ymax=max_y+offset)
def prepare_plot(xlabel, ylabel, title):
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
plt.grid(True)
def display_plot(output):
if output:
plt.savefig(output)
else:
plt.show()
|
<commit_before>"""
This module serves as an interface to
matplotlib.
"""
from utils import config
OFFSET = 2 # offset = max_x/stepsize * OFFSET
def init(output):
import matplotlib
config.mpl(matplotlib, bool(output))
from matplotlib import pyplot
globals()['plt'] = pyplot
def line_plot(xs, ys, color='red'):
plt.plot(
xs,
ys,
color=color,
linewidth=2.0
)
def legend(*args):
plt.legend(args, loc='best')
def scatter_plot(x, y, color='blue'):
plt.scatter(x, y, color=color)
def scale_x_plot(max_x, stepsize):
offset = max_x/stepsize * OFFSET
plt.axis(xmin=-offset, xmax=max_x+offset)
def scale_y_plot(max_y, stepsize):
offset = max_y/stepsize * OFFSET
plt.axis(ymin=-offset, ymax=max_y+offset)
def prepare_plot(xlabel, ylabel, title):
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
plt.grid(True)
def display_plot(output):
if output:
if output == 'stdout':
plt.savefig(sys.stdout, format='png')
else:
plt.savefig(output)
else:
plt.show()
<commit_msg>Remove unused `--output stdout` option<commit_after>
|
"""
This module serves as an interface to
matplotlib.
"""
from utils import config
OFFSET = 2 # offset = max_x/stepsize * OFFSET
def init(output):
import matplotlib
config.mpl(matplotlib, bool(output))
from matplotlib import pyplot
globals()['plt'] = pyplot
def line_plot(xs, ys, color='red'):
plt.plot(
xs,
ys,
color=color,
linewidth=2.0
)
def legend(*args):
plt.legend(args, loc='best')
def scatter_plot(x, y, color='blue'):
plt.scatter(x, y, color=color)
def scale_x_plot(max_x, stepsize):
offset = max_x/stepsize * OFFSET
plt.axis(xmin=-offset, xmax=max_x+offset)
def scale_y_plot(max_y, stepsize):
offset = max_y/stepsize * OFFSET
plt.axis(ymin=-offset, ymax=max_y+offset)
def prepare_plot(xlabel, ylabel, title):
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
plt.grid(True)
def display_plot(output):
if output:
plt.savefig(output)
else:
plt.show()
|
"""
This module serves as an interface to
matplotlib.
"""
from utils import config
OFFSET = 2 # offset = max_x/stepsize * OFFSET
def init(output):
import matplotlib
config.mpl(matplotlib, bool(output))
from matplotlib import pyplot
globals()['plt'] = pyplot
def line_plot(xs, ys, color='red'):
plt.plot(
xs,
ys,
color=color,
linewidth=2.0
)
def legend(*args):
plt.legend(args, loc='best')
def scatter_plot(x, y, color='blue'):
plt.scatter(x, y, color=color)
def scale_x_plot(max_x, stepsize):
offset = max_x/stepsize * OFFSET
plt.axis(xmin=-offset, xmax=max_x+offset)
def scale_y_plot(max_y, stepsize):
offset = max_y/stepsize * OFFSET
plt.axis(ymin=-offset, ymax=max_y+offset)
def prepare_plot(xlabel, ylabel, title):
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
plt.grid(True)
def display_plot(output):
if output:
if output == 'stdout':
plt.savefig(sys.stdout, format='png')
else:
plt.savefig(output)
else:
plt.show()
Remove unused `--output stdout` option"""
This module serves as an interface to
matplotlib.
"""
from utils import config
OFFSET = 2 # offset = max_x/stepsize * OFFSET
def init(output):
import matplotlib
config.mpl(matplotlib, bool(output))
from matplotlib import pyplot
globals()['plt'] = pyplot
def line_plot(xs, ys, color='red'):
plt.plot(
xs,
ys,
color=color,
linewidth=2.0
)
def legend(*args):
plt.legend(args, loc='best')
def scatter_plot(x, y, color='blue'):
plt.scatter(x, y, color=color)
def scale_x_plot(max_x, stepsize):
offset = max_x/stepsize * OFFSET
plt.axis(xmin=-offset, xmax=max_x+offset)
def scale_y_plot(max_y, stepsize):
offset = max_y/stepsize * OFFSET
plt.axis(ymin=-offset, ymax=max_y+offset)
def prepare_plot(xlabel, ylabel, title):
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
plt.grid(True)
def display_plot(output):
if output:
plt.savefig(output)
else:
plt.show()
|
<commit_before>"""
This module serves as an interface to
matplotlib.
"""
from utils import config
OFFSET = 2 # offset = max_x/stepsize * OFFSET
def init(output):
import matplotlib
config.mpl(matplotlib, bool(output))
from matplotlib import pyplot
globals()['plt'] = pyplot
def line_plot(xs, ys, color='red'):
plt.plot(
xs,
ys,
color=color,
linewidth=2.0
)
def legend(*args):
plt.legend(args, loc='best')
def scatter_plot(x, y, color='blue'):
plt.scatter(x, y, color=color)
def scale_x_plot(max_x, stepsize):
offset = max_x/stepsize * OFFSET
plt.axis(xmin=-offset, xmax=max_x+offset)
def scale_y_plot(max_y, stepsize):
offset = max_y/stepsize * OFFSET
plt.axis(ymin=-offset, ymax=max_y+offset)
def prepare_plot(xlabel, ylabel, title):
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
plt.grid(True)
def display_plot(output):
if output:
if output == 'stdout':
plt.savefig(sys.stdout, format='png')
else:
plt.savefig(output)
else:
plt.show()
<commit_msg>Remove unused `--output stdout` option<commit_after>"""
This module serves as an interface to
matplotlib.
"""
from utils import config
OFFSET = 2 # offset = max_x/stepsize * OFFSET
def init(output):
import matplotlib
config.mpl(matplotlib, bool(output))
from matplotlib import pyplot
globals()['plt'] = pyplot
def line_plot(xs, ys, color='red'):
plt.plot(
xs,
ys,
color=color,
linewidth=2.0
)
def legend(*args):
plt.legend(args, loc='best')
def scatter_plot(x, y, color='blue'):
plt.scatter(x, y, color=color)
def scale_x_plot(max_x, stepsize):
offset = max_x/stepsize * OFFSET
plt.axis(xmin=-offset, xmax=max_x+offset)
def scale_y_plot(max_y, stepsize):
offset = max_y/stepsize * OFFSET
plt.axis(ymin=-offset, ymax=max_y+offset)
def prepare_plot(xlabel, ylabel, title):
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.title(title)
plt.grid(True)
def display_plot(output):
if output:
plt.savefig(output)
else:
plt.show()
|
0e301d3ee54366187b2e12fa5c6927f27e907347
|
tools/python/frame_processor/frame_processor.py
|
tools/python/frame_processor/frame_processor.py
|
from frame_receiver.ipc_channel import IpcChannel, IpcChannelException
from frame_receiver.ipc_message import IpcMessage, IpcMessageException
from frame_processor_config import FrameProcessorConfig
import time
class FrameProcessor(object):
def __init__(self):
# Instantiate a configuration container object, which will be populated
# with sensible default values
self.config = FrameProcessorConfig("FrameProcessor - test harness to simulate operation of FrameProcessor application")
# Create the appropriate IPC channels
self.ctrl_channel = IpcChannel(IpcChannel.CHANNEL_TYPE_REQ)
def run(self):
self.ctrl_channel.connect(self.config.ctrl_endpoint)
for i in range(10):
msg = "HELLO " + str(i)
print "Sending message", msg
self.ctrl_channel.send(msg)
reply = self.ctrl_channel.recv()
print "Got reply:", reply
time.sleep(1)
if __name__ == "__main__":
fp = FrameProcessor()
fp.run()
|
from frame_receiver.ipc_channel import IpcChannel, IpcChannelException
from frame_receiver.ipc_message import IpcMessage, IpcMessageException
from frame_processor_config import FrameProcessorConfig
import time
class FrameProcessor(object):
def __init__(self):
# Instantiate a configuration container object, which will be populated
# with sensible default values
self.config = FrameProcessorConfig("FrameProcessor - test harness to simulate operation of FrameProcessor application")
# Create the appropriate IPC channels
self.ctrl_channel = IpcChannel(IpcChannel.CHANNEL_TYPE_REQ)
def run(self):
self.ctrl_channel.connect(self.config.ctrl_endpoint)
for i in range(10):
#msg = "HELLO " + str(i)
msg = IpcMessage(msg_type='cmd', msg_val='status')
print "Sending message", msg
self.ctrl_channel.send(msg.encode())
reply = self.ctrl_channel.recv()
print "Got reply:", reply
time.sleep(0.01)
if __name__ == "__main__":
fp = FrameProcessor()
fp.run()
|
Update python frame processor test harness to send IPC JSON messages to frame receiver for testing of control path and channel multiplexing
|
Update python frame processor test harness to send IPC JSON messages to
frame receiver for testing of control path and channel multiplexing
|
Python
|
apache-2.0
|
odin-detector/odin-data,percival-detector/odin-data,odin-detector/odin-data,odin-detector/odin-data,percival-detector/odin-data,odin-detector/odin-data,odin-detector/odin-data,odin-detector/odin-data,percival-detector/odin-data,percival-detector/odin-data,percival-detector/odin-data,percival-detector/odin-data,odin-detector/odin-data,percival-detector/odin-data
|
from frame_receiver.ipc_channel import IpcChannel, IpcChannelException
from frame_receiver.ipc_message import IpcMessage, IpcMessageException
from frame_processor_config import FrameProcessorConfig
import time
class FrameProcessor(object):
def __init__(self):
# Instantiate a configuration container object, which will be populated
# with sensible default values
self.config = FrameProcessorConfig("FrameProcessor - test harness to simulate operation of FrameProcessor application")
# Create the appropriate IPC channels
self.ctrl_channel = IpcChannel(IpcChannel.CHANNEL_TYPE_REQ)
def run(self):
self.ctrl_channel.connect(self.config.ctrl_endpoint)
for i in range(10):
msg = "HELLO " + str(i)
print "Sending message", msg
self.ctrl_channel.send(msg)
reply = self.ctrl_channel.recv()
print "Got reply:", reply
time.sleep(1)
if __name__ == "__main__":
fp = FrameProcessor()
fp.run()
Update python frame processor test harness to send IPC JSON messages to
frame receiver for testing of control path and channel multiplexing
|
from frame_receiver.ipc_channel import IpcChannel, IpcChannelException
from frame_receiver.ipc_message import IpcMessage, IpcMessageException
from frame_processor_config import FrameProcessorConfig
import time
class FrameProcessor(object):
def __init__(self):
# Instantiate a configuration container object, which will be populated
# with sensible default values
self.config = FrameProcessorConfig("FrameProcessor - test harness to simulate operation of FrameProcessor application")
# Create the appropriate IPC channels
self.ctrl_channel = IpcChannel(IpcChannel.CHANNEL_TYPE_REQ)
def run(self):
self.ctrl_channel.connect(self.config.ctrl_endpoint)
for i in range(10):
#msg = "HELLO " + str(i)
msg = IpcMessage(msg_type='cmd', msg_val='status')
print "Sending message", msg
self.ctrl_channel.send(msg.encode())
reply = self.ctrl_channel.recv()
print "Got reply:", reply
time.sleep(0.01)
if __name__ == "__main__":
fp = FrameProcessor()
fp.run()
|
<commit_before>from frame_receiver.ipc_channel import IpcChannel, IpcChannelException
from frame_receiver.ipc_message import IpcMessage, IpcMessageException
from frame_processor_config import FrameProcessorConfig
import time
class FrameProcessor(object):
def __init__(self):
# Instantiate a configuration container object, which will be populated
# with sensible default values
self.config = FrameProcessorConfig("FrameProcessor - test harness to simulate operation of FrameProcessor application")
# Create the appropriate IPC channels
self.ctrl_channel = IpcChannel(IpcChannel.CHANNEL_TYPE_REQ)
def run(self):
self.ctrl_channel.connect(self.config.ctrl_endpoint)
for i in range(10):
msg = "HELLO " + str(i)
print "Sending message", msg
self.ctrl_channel.send(msg)
reply = self.ctrl_channel.recv()
print "Got reply:", reply
time.sleep(1)
if __name__ == "__main__":
fp = FrameProcessor()
fp.run()
<commit_msg>Update python frame processor test harness to send IPC JSON messages to
frame receiver for testing of control path and channel multiplexing<commit_after>
|
from frame_receiver.ipc_channel import IpcChannel, IpcChannelException
from frame_receiver.ipc_message import IpcMessage, IpcMessageException
from frame_processor_config import FrameProcessorConfig
import time
class FrameProcessor(object):
def __init__(self):
# Instantiate a configuration container object, which will be populated
# with sensible default values
self.config = FrameProcessorConfig("FrameProcessor - test harness to simulate operation of FrameProcessor application")
# Create the appropriate IPC channels
self.ctrl_channel = IpcChannel(IpcChannel.CHANNEL_TYPE_REQ)
def run(self):
self.ctrl_channel.connect(self.config.ctrl_endpoint)
for i in range(10):
#msg = "HELLO " + str(i)
msg = IpcMessage(msg_type='cmd', msg_val='status')
print "Sending message", msg
self.ctrl_channel.send(msg.encode())
reply = self.ctrl_channel.recv()
print "Got reply:", reply
time.sleep(0.01)
if __name__ == "__main__":
fp = FrameProcessor()
fp.run()
|
from frame_receiver.ipc_channel import IpcChannel, IpcChannelException
from frame_receiver.ipc_message import IpcMessage, IpcMessageException
from frame_processor_config import FrameProcessorConfig
import time
class FrameProcessor(object):
def __init__(self):
# Instantiate a configuration container object, which will be populated
# with sensible default values
self.config = FrameProcessorConfig("FrameProcessor - test harness to simulate operation of FrameProcessor application")
# Create the appropriate IPC channels
self.ctrl_channel = IpcChannel(IpcChannel.CHANNEL_TYPE_REQ)
def run(self):
self.ctrl_channel.connect(self.config.ctrl_endpoint)
for i in range(10):
msg = "HELLO " + str(i)
print "Sending message", msg
self.ctrl_channel.send(msg)
reply = self.ctrl_channel.recv()
print "Got reply:", reply
time.sleep(1)
if __name__ == "__main__":
fp = FrameProcessor()
fp.run()
Update python frame processor test harness to send IPC JSON messages to
frame receiver for testing of control path and channel multiplexingfrom frame_receiver.ipc_channel import IpcChannel, IpcChannelException
from frame_receiver.ipc_message import IpcMessage, IpcMessageException
from frame_processor_config import FrameProcessorConfig
import time
class FrameProcessor(object):
def __init__(self):
# Instantiate a configuration container object, which will be populated
# with sensible default values
self.config = FrameProcessorConfig("FrameProcessor - test harness to simulate operation of FrameProcessor application")
# Create the appropriate IPC channels
self.ctrl_channel = IpcChannel(IpcChannel.CHANNEL_TYPE_REQ)
def run(self):
self.ctrl_channel.connect(self.config.ctrl_endpoint)
for i in range(10):
#msg = "HELLO " + str(i)
msg = IpcMessage(msg_type='cmd', msg_val='status')
print "Sending message", msg
self.ctrl_channel.send(msg.encode())
reply = self.ctrl_channel.recv()
print "Got reply:", reply
time.sleep(0.01)
if __name__ == "__main__":
fp = FrameProcessor()
fp.run()
|
<commit_before>from frame_receiver.ipc_channel import IpcChannel, IpcChannelException
from frame_receiver.ipc_message import IpcMessage, IpcMessageException
from frame_processor_config import FrameProcessorConfig
import time
class FrameProcessor(object):
def __init__(self):
# Instantiate a configuration container object, which will be populated
# with sensible default values
self.config = FrameProcessorConfig("FrameProcessor - test harness to simulate operation of FrameProcessor application")
# Create the appropriate IPC channels
self.ctrl_channel = IpcChannel(IpcChannel.CHANNEL_TYPE_REQ)
def run(self):
self.ctrl_channel.connect(self.config.ctrl_endpoint)
for i in range(10):
msg = "HELLO " + str(i)
print "Sending message", msg
self.ctrl_channel.send(msg)
reply = self.ctrl_channel.recv()
print "Got reply:", reply
time.sleep(1)
if __name__ == "__main__":
fp = FrameProcessor()
fp.run()
<commit_msg>Update python frame processor test harness to send IPC JSON messages to
frame receiver for testing of control path and channel multiplexing<commit_after>from frame_receiver.ipc_channel import IpcChannel, IpcChannelException
from frame_receiver.ipc_message import IpcMessage, IpcMessageException
from frame_processor_config import FrameProcessorConfig
import time
class FrameProcessor(object):
def __init__(self):
# Instantiate a configuration container object, which will be populated
# with sensible default values
self.config = FrameProcessorConfig("FrameProcessor - test harness to simulate operation of FrameProcessor application")
# Create the appropriate IPC channels
self.ctrl_channel = IpcChannel(IpcChannel.CHANNEL_TYPE_REQ)
def run(self):
self.ctrl_channel.connect(self.config.ctrl_endpoint)
for i in range(10):
#msg = "HELLO " + str(i)
msg = IpcMessage(msg_type='cmd', msg_val='status')
print "Sending message", msg
self.ctrl_channel.send(msg.encode())
reply = self.ctrl_channel.recv()
print "Got reply:", reply
time.sleep(0.01)
if __name__ == "__main__":
fp = FrameProcessor()
fp.run()
|
90506789edab1afb58ecebb90218f2654498a754
|
regserver/regserver/urls.py
|
regserver/regserver/urls.py
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns(
'',
url(r'', include('regulations.urls')),
url(r'^eregulations/', include('regulations.urls')),
)
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns(
'',
url(r'', include('regulations.urls')),
)
|
Remove eregulations url as reversing is not consistent
|
Remove eregulations url as reversing is not consistent
|
Python
|
cc0-1.0
|
18F/regulations-site,adderall/regulations-site,eregs/regulations-site,EricSchles/regulations-site,grapesmoker/regulations-site,tadhg-ohiggins/regulations-site,ascott1/regulations-site,jeremiak/regulations-site,ascott1/regulations-site,willbarton/regulations-site,EricSchles/regulations-site,jeremiak/regulations-site,grapesmoker/regulations-site,tadhg-ohiggins/regulations-site,18F/regulations-site,adderall/regulations-site,grapesmoker/regulations-site,willbarton/regulations-site,ascott1/regulations-site,eregs/regulations-site,adderall/regulations-site,willbarton/regulations-site,tadhg-ohiggins/regulations-site,eregs/regulations-site,eregs/regulations-site,EricSchles/regulations-site,grapesmoker/regulations-site,willbarton/regulations-site,18F/regulations-site,ascott1/regulations-site,18F/regulations-site,jeremiak/regulations-site,tadhg-ohiggins/regulations-site,EricSchles/regulations-site,jeremiak/regulations-site,adderall/regulations-site
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns(
'',
url(r'', include('regulations.urls')),
url(r'^eregulations/', include('regulations.urls')),
)
Remove eregulations url as reversing is not consistent
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns(
'',
url(r'', include('regulations.urls')),
)
|
<commit_before>from django.conf.urls import patterns, include, url
urlpatterns = patterns(
'',
url(r'', include('regulations.urls')),
url(r'^eregulations/', include('regulations.urls')),
)
<commit_msg>Remove eregulations url as reversing is not consistent<commit_after>
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns(
'',
url(r'', include('regulations.urls')),
)
|
from django.conf.urls import patterns, include, url
urlpatterns = patterns(
'',
url(r'', include('regulations.urls')),
url(r'^eregulations/', include('regulations.urls')),
)
Remove eregulations url as reversing is not consistentfrom django.conf.urls import patterns, include, url
urlpatterns = patterns(
'',
url(r'', include('regulations.urls')),
)
|
<commit_before>from django.conf.urls import patterns, include, url
urlpatterns = patterns(
'',
url(r'', include('regulations.urls')),
url(r'^eregulations/', include('regulations.urls')),
)
<commit_msg>Remove eregulations url as reversing is not consistent<commit_after>from django.conf.urls import patterns, include, url
urlpatterns = patterns(
'',
url(r'', include('regulations.urls')),
)
|
a2be69f45ffa351c43a60d3eca0414788e9c9737
|
cherrypy/test/test_native.py
|
cherrypy/test/test_native.py
|
"""Test the native server."""
import pytest
from requests_toolbelt import sessions
import cherrypy._cpnative_server
pytestmark = pytest.mark.skipif(
'sys.platform == "win32"',
reason="tests fail on Windows",
)
@pytest.fixture
def cp_native_server(request):
"""A native server."""
class Root(object):
@cherrypy.expose
def index(self):
return 'Hello World!'
cls = cherrypy._cpnative_server.CPHTTPServer
cherrypy.server.httpserver = cls(cherrypy.server)
cherrypy.tree.mount(Root(), '/')
cherrypy.engine.start()
request.addfinalizer(cherrypy.engine.stop)
url = 'http://localhost:{cherrypy.server.socket_port}'.format(**globals())
return sessions.BaseUrlSession(url)
def test_basic_request(cp_native_server):
"""A request to a native server should succeed."""
cp_native_server.get('/')
|
"""Test the native server."""
import pytest
from requests_toolbelt import sessions
import cherrypy._cpnative_server
pytestmark = pytest.mark.skipif(
'sys.platform == "win32"',
reason='tests fail on Windows',
)
@pytest.fixture
def cp_native_server(request):
"""A native server."""
class Root(object):
@cherrypy.expose
def index(self):
return 'Hello World!'
cls = cherrypy._cpnative_server.CPHTTPServer
cherrypy.server.httpserver = cls(cherrypy.server)
cherrypy.tree.mount(Root(), '/')
cherrypy.engine.start()
request.addfinalizer(cherrypy.engine.stop)
url = 'http://localhost:{cherrypy.server.socket_port}'.format(**globals())
return sessions.BaseUrlSession(url)
def test_basic_request(cp_native_server):
"""A request to a native server should succeed."""
cp_native_server.get('/')
|
Replace double quotes with single ones
|
Replace double quotes with single ones
|
Python
|
bsd-3-clause
|
cherrypy/cherrypy,Safihre/cherrypy,cherrypy/cherrypy,Safihre/cherrypy
|
"""Test the native server."""
import pytest
from requests_toolbelt import sessions
import cherrypy._cpnative_server
pytestmark = pytest.mark.skipif(
'sys.platform == "win32"',
reason="tests fail on Windows",
)
@pytest.fixture
def cp_native_server(request):
"""A native server."""
class Root(object):
@cherrypy.expose
def index(self):
return 'Hello World!'
cls = cherrypy._cpnative_server.CPHTTPServer
cherrypy.server.httpserver = cls(cherrypy.server)
cherrypy.tree.mount(Root(), '/')
cherrypy.engine.start()
request.addfinalizer(cherrypy.engine.stop)
url = 'http://localhost:{cherrypy.server.socket_port}'.format(**globals())
return sessions.BaseUrlSession(url)
def test_basic_request(cp_native_server):
"""A request to a native server should succeed."""
cp_native_server.get('/')
Replace double quotes with single ones
|
"""Test the native server."""
import pytest
from requests_toolbelt import sessions
import cherrypy._cpnative_server
pytestmark = pytest.mark.skipif(
'sys.platform == "win32"',
reason='tests fail on Windows',
)
@pytest.fixture
def cp_native_server(request):
"""A native server."""
class Root(object):
@cherrypy.expose
def index(self):
return 'Hello World!'
cls = cherrypy._cpnative_server.CPHTTPServer
cherrypy.server.httpserver = cls(cherrypy.server)
cherrypy.tree.mount(Root(), '/')
cherrypy.engine.start()
request.addfinalizer(cherrypy.engine.stop)
url = 'http://localhost:{cherrypy.server.socket_port}'.format(**globals())
return sessions.BaseUrlSession(url)
def test_basic_request(cp_native_server):
"""A request to a native server should succeed."""
cp_native_server.get('/')
|
<commit_before>"""Test the native server."""
import pytest
from requests_toolbelt import sessions
import cherrypy._cpnative_server
pytestmark = pytest.mark.skipif(
'sys.platform == "win32"',
reason="tests fail on Windows",
)
@pytest.fixture
def cp_native_server(request):
"""A native server."""
class Root(object):
@cherrypy.expose
def index(self):
return 'Hello World!'
cls = cherrypy._cpnative_server.CPHTTPServer
cherrypy.server.httpserver = cls(cherrypy.server)
cherrypy.tree.mount(Root(), '/')
cherrypy.engine.start()
request.addfinalizer(cherrypy.engine.stop)
url = 'http://localhost:{cherrypy.server.socket_port}'.format(**globals())
return sessions.BaseUrlSession(url)
def test_basic_request(cp_native_server):
"""A request to a native server should succeed."""
cp_native_server.get('/')
<commit_msg>Replace double quotes with single ones<commit_after>
|
"""Test the native server."""
import pytest
from requests_toolbelt import sessions
import cherrypy._cpnative_server
pytestmark = pytest.mark.skipif(
'sys.platform == "win32"',
reason='tests fail on Windows',
)
@pytest.fixture
def cp_native_server(request):
"""A native server."""
class Root(object):
@cherrypy.expose
def index(self):
return 'Hello World!'
cls = cherrypy._cpnative_server.CPHTTPServer
cherrypy.server.httpserver = cls(cherrypy.server)
cherrypy.tree.mount(Root(), '/')
cherrypy.engine.start()
request.addfinalizer(cherrypy.engine.stop)
url = 'http://localhost:{cherrypy.server.socket_port}'.format(**globals())
return sessions.BaseUrlSession(url)
def test_basic_request(cp_native_server):
"""A request to a native server should succeed."""
cp_native_server.get('/')
|
"""Test the native server."""
import pytest
from requests_toolbelt import sessions
import cherrypy._cpnative_server
pytestmark = pytest.mark.skipif(
'sys.platform == "win32"',
reason="tests fail on Windows",
)
@pytest.fixture
def cp_native_server(request):
"""A native server."""
class Root(object):
@cherrypy.expose
def index(self):
return 'Hello World!'
cls = cherrypy._cpnative_server.CPHTTPServer
cherrypy.server.httpserver = cls(cherrypy.server)
cherrypy.tree.mount(Root(), '/')
cherrypy.engine.start()
request.addfinalizer(cherrypy.engine.stop)
url = 'http://localhost:{cherrypy.server.socket_port}'.format(**globals())
return sessions.BaseUrlSession(url)
def test_basic_request(cp_native_server):
"""A request to a native server should succeed."""
cp_native_server.get('/')
Replace double quotes with single ones"""Test the native server."""
import pytest
from requests_toolbelt import sessions
import cherrypy._cpnative_server
pytestmark = pytest.mark.skipif(
'sys.platform == "win32"',
reason='tests fail on Windows',
)
@pytest.fixture
def cp_native_server(request):
"""A native server."""
class Root(object):
@cherrypy.expose
def index(self):
return 'Hello World!'
cls = cherrypy._cpnative_server.CPHTTPServer
cherrypy.server.httpserver = cls(cherrypy.server)
cherrypy.tree.mount(Root(), '/')
cherrypy.engine.start()
request.addfinalizer(cherrypy.engine.stop)
url = 'http://localhost:{cherrypy.server.socket_port}'.format(**globals())
return sessions.BaseUrlSession(url)
def test_basic_request(cp_native_server):
"""A request to a native server should succeed."""
cp_native_server.get('/')
|
<commit_before>"""Test the native server."""
import pytest
from requests_toolbelt import sessions
import cherrypy._cpnative_server
pytestmark = pytest.mark.skipif(
'sys.platform == "win32"',
reason="tests fail on Windows",
)
@pytest.fixture
def cp_native_server(request):
"""A native server."""
class Root(object):
@cherrypy.expose
def index(self):
return 'Hello World!'
cls = cherrypy._cpnative_server.CPHTTPServer
cherrypy.server.httpserver = cls(cherrypy.server)
cherrypy.tree.mount(Root(), '/')
cherrypy.engine.start()
request.addfinalizer(cherrypy.engine.stop)
url = 'http://localhost:{cherrypy.server.socket_port}'.format(**globals())
return sessions.BaseUrlSession(url)
def test_basic_request(cp_native_server):
"""A request to a native server should succeed."""
cp_native_server.get('/')
<commit_msg>Replace double quotes with single ones<commit_after>"""Test the native server."""
import pytest
from requests_toolbelt import sessions
import cherrypy._cpnative_server
pytestmark = pytest.mark.skipif(
'sys.platform == "win32"',
reason='tests fail on Windows',
)
@pytest.fixture
def cp_native_server(request):
"""A native server."""
class Root(object):
@cherrypy.expose
def index(self):
return 'Hello World!'
cls = cherrypy._cpnative_server.CPHTTPServer
cherrypy.server.httpserver = cls(cherrypy.server)
cherrypy.tree.mount(Root(), '/')
cherrypy.engine.start()
request.addfinalizer(cherrypy.engine.stop)
url = 'http://localhost:{cherrypy.server.socket_port}'.format(**globals())
return sessions.BaseUrlSession(url)
def test_basic_request(cp_native_server):
"""A request to a native server should succeed."""
cp_native_server.get('/')
|
c4dc76587a5021de30e8811332869fa4cc6f9ed0
|
bucketeer/test/test_commit.py
|
bucketeer/test/test_commit.py
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
global existing_bucket, test_dir
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
def setUp(self):
# Create a bucket to test on existing bucket
connection = boto.connect_s3()
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
return
def tearDown(self):
# Remove bucket created to test on existing bucket
connection = boto.connect_s3()
bucket = connection.delete_bucket(existing_bucket)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
if __name__ == '__main__':
unittest.main()
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
# Create a bucket to test on existing bucket
connection = boto.connect_s3()
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
# Remove bucket created to test on existing bucket
connection = boto.connect_s3()
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
if __name__ == '__main__':
unittest.main()
|
Add create and remove file for setUp and tearDown
|
Add create and remove file for setUp and tearDown
|
Python
|
mit
|
mgarbacz/bucketeer
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
global existing_bucket, test_dir
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
def setUp(self):
# Create a bucket to test on existing bucket
connection = boto.connect_s3()
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
return
def tearDown(self):
# Remove bucket created to test on existing bucket
connection = boto.connect_s3()
bucket = connection.delete_bucket(existing_bucket)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
if __name__ == '__main__':
unittest.main()
Add create and remove file for setUp and tearDown
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
# Create a bucket to test on existing bucket
connection = boto.connect_s3()
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
# Remove bucket created to test on existing bucket
connection = boto.connect_s3()
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
global existing_bucket, test_dir
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
def setUp(self):
# Create a bucket to test on existing bucket
connection = boto.connect_s3()
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
return
def tearDown(self):
# Remove bucket created to test on existing bucket
connection = boto.connect_s3()
bucket = connection.delete_bucket(existing_bucket)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add create and remove file for setUp and tearDown<commit_after>
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
# Create a bucket to test on existing bucket
connection = boto.connect_s3()
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
# Remove bucket created to test on existing bucket
connection = boto.connect_s3()
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
if __name__ == '__main__':
unittest.main()
|
import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
global existing_bucket, test_dir
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
def setUp(self):
# Create a bucket to test on existing bucket
connection = boto.connect_s3()
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
return
def tearDown(self):
# Remove bucket created to test on existing bucket
connection = boto.connect_s3()
bucket = connection.delete_bucket(existing_bucket)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
if __name__ == '__main__':
unittest.main()
Add create and remove file for setUp and tearDownimport unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
# Create a bucket to test on existing bucket
connection = boto.connect_s3()
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
# Remove bucket created to test on existing bucket
connection = boto.connect_s3()
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
global existing_bucket, test_dir
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
def setUp(self):
# Create a bucket to test on existing bucket
connection = boto.connect_s3()
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
return
def tearDown(self):
# Remove bucket created to test on existing bucket
connection = boto.connect_s3()
bucket = connection.delete_bucket(existing_bucket)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
if __name__ == '__main__':
unittest.main()
<commit_msg>Add create and remove file for setUp and tearDown<commit_after>import unittest, boto, os
from bucketeer import commit
class BuckeeterTest(unittest.TestCase):
global existing_bucket, test_dir, test_file
existing_bucket = 'bucket.exists'
test_dir = 'bucketeer_test_dir'
test_file = 'bucketeer_test_file'
def setUp(self):
# Create a bucket to test on existing bucket
connection = boto.connect_s3()
bucket = connection.create_bucket(existing_bucket)
# Create directory to house test files
os.makedirs(test_dir)
# Create test file
open(test_dir + '/' + test_file, 'w').close()
return
def tearDown(self):
# Remove bucket created to test on existing bucket
connection = boto.connect_s3()
bucket = connection.delete_bucket(existing_bucket)
# Remove test file
os.remove(test_dir + '/' + test_file)
# Remove directory created to house test files
os.rmdir(test_dir)
return
def testMain(self):
self.assertTrue(commit)
if __name__ == '__main__':
unittest.main()
|
236f10e790757db0cc563f5f19ca5863877b1e7f
|
busstops/management/tests/test_import_singapore.py
|
busstops/management/tests/test_import_singapore.py
|
import os
import vcr
from django.test import TestCase, override_settings
from django.core.management import call_command
from ...models import StopPoint, Service, Place
FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures')
class ImportSingaporeTest(TestCase):
@classmethod
def setUpTestData(cls):
with vcr.use_cassette(os.path.join(FIXTURES_DIR, 'singapore.yaml')):
call_command('import_singapore')
call_command('import_singapore_places')
def test_import_stops(self):
self.assertEqual(499, StopPoint.objects.all().count())
stop = StopPoint.objects.first()
self.assertEqual(str(stop), 'AFT BRAS BASAH STN EXIT A')
def test_import_services(self):
service = Service.objects.get()
self.assertEqual(service.operator.get().name, 'SBS Transit')
self.assertEqual(service.slug, 'sg-sbst-10')
def test_import_places(self):
self.assertEqual(307, Place.objects.count())
place = Place.objects.get(name='Central Singapore')
response = self.client.get(place.get_absolute_url())
self.assertContains(response, '<h1>Central Singapore</h1>')
self.assertContains(response, 'Fort Canning')
self.assertContains(response, 'Bayfront Subzone')
|
import os
import vcr
from django.test import TestCase
from django.core.management import call_command
from ...models import StopPoint, Service, Place
FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures')
class ImportSingaporeTest(TestCase):
@classmethod
def setUpTestData(cls):
with vcr.use_cassette(os.path.join(FIXTURES_DIR, 'singapore.yaml')):
call_command('import_singapore')
call_command('import_singapore_places')
def test_import_stops(self):
self.assertEqual(499, StopPoint.objects.all().count())
stop = StopPoint.objects.first()
self.assertEqual(str(stop), 'AFT BRAS BASAH STN EXIT A')
def test_import_services(self):
service = Service.objects.get()
self.assertEqual(service.operator.get().name, 'SBS Transit')
self.assertEqual(service.slug, 'sg-sbst-10')
def test_import_places(self):
self.assertEqual(307, Place.objects.count())
place = Place.objects.get(name='Central Singapore')
response = self.client.get(place.get_absolute_url())
self.assertContains(response, '<h1>Central Singapore</h1>')
self.assertContains(response, 'Fort Canning')
self.assertContains(response, 'Bayfront Subzone')
|
Remove unused import to fix flake8
|
Remove unused import to fix flake8
|
Python
|
mpl-2.0
|
jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk,jclgoodwin/bustimes.org.uk
|
import os
import vcr
from django.test import TestCase, override_settings
from django.core.management import call_command
from ...models import StopPoint, Service, Place
FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures')
class ImportSingaporeTest(TestCase):
@classmethod
def setUpTestData(cls):
with vcr.use_cassette(os.path.join(FIXTURES_DIR, 'singapore.yaml')):
call_command('import_singapore')
call_command('import_singapore_places')
def test_import_stops(self):
self.assertEqual(499, StopPoint.objects.all().count())
stop = StopPoint.objects.first()
self.assertEqual(str(stop), 'AFT BRAS BASAH STN EXIT A')
def test_import_services(self):
service = Service.objects.get()
self.assertEqual(service.operator.get().name, 'SBS Transit')
self.assertEqual(service.slug, 'sg-sbst-10')
def test_import_places(self):
self.assertEqual(307, Place.objects.count())
place = Place.objects.get(name='Central Singapore')
response = self.client.get(place.get_absolute_url())
self.assertContains(response, '<h1>Central Singapore</h1>')
self.assertContains(response, 'Fort Canning')
self.assertContains(response, 'Bayfront Subzone')
Remove unused import to fix flake8
|
import os
import vcr
from django.test import TestCase
from django.core.management import call_command
from ...models import StopPoint, Service, Place
FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures')
class ImportSingaporeTest(TestCase):
@classmethod
def setUpTestData(cls):
with vcr.use_cassette(os.path.join(FIXTURES_DIR, 'singapore.yaml')):
call_command('import_singapore')
call_command('import_singapore_places')
def test_import_stops(self):
self.assertEqual(499, StopPoint.objects.all().count())
stop = StopPoint.objects.first()
self.assertEqual(str(stop), 'AFT BRAS BASAH STN EXIT A')
def test_import_services(self):
service = Service.objects.get()
self.assertEqual(service.operator.get().name, 'SBS Transit')
self.assertEqual(service.slug, 'sg-sbst-10')
def test_import_places(self):
self.assertEqual(307, Place.objects.count())
place = Place.objects.get(name='Central Singapore')
response = self.client.get(place.get_absolute_url())
self.assertContains(response, '<h1>Central Singapore</h1>')
self.assertContains(response, 'Fort Canning')
self.assertContains(response, 'Bayfront Subzone')
|
<commit_before>import os
import vcr
from django.test import TestCase, override_settings
from django.core.management import call_command
from ...models import StopPoint, Service, Place
FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures')
class ImportSingaporeTest(TestCase):
@classmethod
def setUpTestData(cls):
with vcr.use_cassette(os.path.join(FIXTURES_DIR, 'singapore.yaml')):
call_command('import_singapore')
call_command('import_singapore_places')
def test_import_stops(self):
self.assertEqual(499, StopPoint.objects.all().count())
stop = StopPoint.objects.first()
self.assertEqual(str(stop), 'AFT BRAS BASAH STN EXIT A')
def test_import_services(self):
service = Service.objects.get()
self.assertEqual(service.operator.get().name, 'SBS Transit')
self.assertEqual(service.slug, 'sg-sbst-10')
def test_import_places(self):
self.assertEqual(307, Place.objects.count())
place = Place.objects.get(name='Central Singapore')
response = self.client.get(place.get_absolute_url())
self.assertContains(response, '<h1>Central Singapore</h1>')
self.assertContains(response, 'Fort Canning')
self.assertContains(response, 'Bayfront Subzone')
<commit_msg>Remove unused import to fix flake8<commit_after>
|
import os
import vcr
from django.test import TestCase
from django.core.management import call_command
from ...models import StopPoint, Service, Place
FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures')
class ImportSingaporeTest(TestCase):
@classmethod
def setUpTestData(cls):
with vcr.use_cassette(os.path.join(FIXTURES_DIR, 'singapore.yaml')):
call_command('import_singapore')
call_command('import_singapore_places')
def test_import_stops(self):
self.assertEqual(499, StopPoint.objects.all().count())
stop = StopPoint.objects.first()
self.assertEqual(str(stop), 'AFT BRAS BASAH STN EXIT A')
def test_import_services(self):
service = Service.objects.get()
self.assertEqual(service.operator.get().name, 'SBS Transit')
self.assertEqual(service.slug, 'sg-sbst-10')
def test_import_places(self):
self.assertEqual(307, Place.objects.count())
place = Place.objects.get(name='Central Singapore')
response = self.client.get(place.get_absolute_url())
self.assertContains(response, '<h1>Central Singapore</h1>')
self.assertContains(response, 'Fort Canning')
self.assertContains(response, 'Bayfront Subzone')
|
import os
import vcr
from django.test import TestCase, override_settings
from django.core.management import call_command
from ...models import StopPoint, Service, Place
FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures')
class ImportSingaporeTest(TestCase):
@classmethod
def setUpTestData(cls):
with vcr.use_cassette(os.path.join(FIXTURES_DIR, 'singapore.yaml')):
call_command('import_singapore')
call_command('import_singapore_places')
def test_import_stops(self):
self.assertEqual(499, StopPoint.objects.all().count())
stop = StopPoint.objects.first()
self.assertEqual(str(stop), 'AFT BRAS BASAH STN EXIT A')
def test_import_services(self):
service = Service.objects.get()
self.assertEqual(service.operator.get().name, 'SBS Transit')
self.assertEqual(service.slug, 'sg-sbst-10')
def test_import_places(self):
self.assertEqual(307, Place.objects.count())
place = Place.objects.get(name='Central Singapore')
response = self.client.get(place.get_absolute_url())
self.assertContains(response, '<h1>Central Singapore</h1>')
self.assertContains(response, 'Fort Canning')
self.assertContains(response, 'Bayfront Subzone')
Remove unused import to fix flake8import os
import vcr
from django.test import TestCase
from django.core.management import call_command
from ...models import StopPoint, Service, Place
FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures')
class ImportSingaporeTest(TestCase):
@classmethod
def setUpTestData(cls):
with vcr.use_cassette(os.path.join(FIXTURES_DIR, 'singapore.yaml')):
call_command('import_singapore')
call_command('import_singapore_places')
def test_import_stops(self):
self.assertEqual(499, StopPoint.objects.all().count())
stop = StopPoint.objects.first()
self.assertEqual(str(stop), 'AFT BRAS BASAH STN EXIT A')
def test_import_services(self):
service = Service.objects.get()
self.assertEqual(service.operator.get().name, 'SBS Transit')
self.assertEqual(service.slug, 'sg-sbst-10')
def test_import_places(self):
self.assertEqual(307, Place.objects.count())
place = Place.objects.get(name='Central Singapore')
response = self.client.get(place.get_absolute_url())
self.assertContains(response, '<h1>Central Singapore</h1>')
self.assertContains(response, 'Fort Canning')
self.assertContains(response, 'Bayfront Subzone')
|
<commit_before>import os
import vcr
from django.test import TestCase, override_settings
from django.core.management import call_command
from ...models import StopPoint, Service, Place
FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures')
class ImportSingaporeTest(TestCase):
@classmethod
def setUpTestData(cls):
with vcr.use_cassette(os.path.join(FIXTURES_DIR, 'singapore.yaml')):
call_command('import_singapore')
call_command('import_singapore_places')
def test_import_stops(self):
self.assertEqual(499, StopPoint.objects.all().count())
stop = StopPoint.objects.first()
self.assertEqual(str(stop), 'AFT BRAS BASAH STN EXIT A')
def test_import_services(self):
service = Service.objects.get()
self.assertEqual(service.operator.get().name, 'SBS Transit')
self.assertEqual(service.slug, 'sg-sbst-10')
def test_import_places(self):
self.assertEqual(307, Place.objects.count())
place = Place.objects.get(name='Central Singapore')
response = self.client.get(place.get_absolute_url())
self.assertContains(response, '<h1>Central Singapore</h1>')
self.assertContains(response, 'Fort Canning')
self.assertContains(response, 'Bayfront Subzone')
<commit_msg>Remove unused import to fix flake8<commit_after>import os
import vcr
from django.test import TestCase
from django.core.management import call_command
from ...models import StopPoint, Service, Place
FIXTURES_DIR = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'fixtures')
class ImportSingaporeTest(TestCase):
@classmethod
def setUpTestData(cls):
with vcr.use_cassette(os.path.join(FIXTURES_DIR, 'singapore.yaml')):
call_command('import_singapore')
call_command('import_singapore_places')
def test_import_stops(self):
self.assertEqual(499, StopPoint.objects.all().count())
stop = StopPoint.objects.first()
self.assertEqual(str(stop), 'AFT BRAS BASAH STN EXIT A')
def test_import_services(self):
service = Service.objects.get()
self.assertEqual(service.operator.get().name, 'SBS Transit')
self.assertEqual(service.slug, 'sg-sbst-10')
def test_import_places(self):
self.assertEqual(307, Place.objects.count())
place = Place.objects.get(name='Central Singapore')
response = self.client.get(place.get_absolute_url())
self.assertContains(response, '<h1>Central Singapore</h1>')
self.assertContains(response, 'Fort Canning')
self.assertContains(response, 'Bayfront Subzone')
|
34ab7f1090d878bf8328f25f0fa4be4e575e7f43
|
numba/sigutils.py
|
numba/sigutils.py
|
from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
"""
Return whether *sig* is a potentially valid signature
specification (for user-facing APIs).
"""
return isinstance(sig, (str, tuple, typing.Signature))
def _parse_signature_string(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
def normalize_signature(sig):
"""
From *sig* (a signature specification), return a ``(return_type, args)``
tuple, where ``args`` itself is a tuple of types, and ``return_type``
can be None if not specified.
"""
if isinstance(sig, str):
parsed = _parse_signature_string(sig)
else:
parsed = sig
if isinstance(parsed, tuple):
args, return_type = parsed, None
elif isinstance(parsed, typing.Signature):
args, return_type = parsed.args, parsed.return_type
else:
raise TypeError("invalid signature: %r (type: %r) evaluates to %r "
"instead of tuple or Signature" % (
sig, sig.__class__.__name__,
parsed.__class__.__name__
))
def check_type(ty):
if not isinstance(ty, types.Type):
raise TypeError("invalid type in signature: expected a type "
"instance, got %r" % (ty,))
if return_type is not None:
check_type(return_type)
for ty in args:
check_type(ty)
return args, return_type
|
from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
"""
Return whether *sig* is a potentially valid signature
specification (for user-facing APIs).
"""
return isinstance(sig, (str, tuple, typing.Signature))
def _parse_signature_string(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
def normalize_signature(sig):
"""
From *sig* (a signature specification), return a ``(args, return_type)``
tuple, where ``args`` itself is a tuple of types, and ``return_type``
can be None if not specified.
"""
if isinstance(sig, str):
parsed = _parse_signature_string(sig)
else:
parsed = sig
if isinstance(parsed, tuple):
args, return_type = parsed, None
elif isinstance(parsed, typing.Signature):
args, return_type = parsed.args, parsed.return_type
else:
raise TypeError("invalid signature: %r (type: %r) evaluates to %r "
"instead of tuple or Signature" % (
sig, sig.__class__.__name__,
parsed.__class__.__name__
))
def check_type(ty):
if not isinstance(ty, types.Type):
raise TypeError("invalid type in signature: expected a type "
"instance, got %r" % (ty,))
if return_type is not None:
check_type(return_type)
for ty in args:
check_type(ty)
return args, return_type
|
Update return value order in normalize_signature docstring
|
Update return value order in normalize_signature docstring [skip ci]
|
Python
|
bsd-2-clause
|
IntelLabs/numba,gmarkall/numba,jriehl/numba,jriehl/numba,seibert/numba,numba/numba,IntelLabs/numba,stonebig/numba,stonebig/numba,stonebig/numba,stonebig/numba,sklam/numba,stuartarchibald/numba,seibert/numba,stuartarchibald/numba,jriehl/numba,seibert/numba,cpcloud/numba,cpcloud/numba,cpcloud/numba,numba/numba,IntelLabs/numba,stuartarchibald/numba,seibert/numba,gmarkall/numba,numba/numba,numba/numba,IntelLabs/numba,stuartarchibald/numba,cpcloud/numba,gmarkall/numba,seibert/numba,cpcloud/numba,numba/numba,stuartarchibald/numba,jriehl/numba,sklam/numba,gmarkall/numba,IntelLabs/numba,sklam/numba,stonebig/numba,jriehl/numba,sklam/numba,gmarkall/numba,sklam/numba
|
from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
"""
Return whether *sig* is a potentially valid signature
specification (for user-facing APIs).
"""
return isinstance(sig, (str, tuple, typing.Signature))
def _parse_signature_string(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
def normalize_signature(sig):
"""
From *sig* (a signature specification), return a ``(return_type, args)``
tuple, where ``args`` itself is a tuple of types, and ``return_type``
can be None if not specified.
"""
if isinstance(sig, str):
parsed = _parse_signature_string(sig)
else:
parsed = sig
if isinstance(parsed, tuple):
args, return_type = parsed, None
elif isinstance(parsed, typing.Signature):
args, return_type = parsed.args, parsed.return_type
else:
raise TypeError("invalid signature: %r (type: %r) evaluates to %r "
"instead of tuple or Signature" % (
sig, sig.__class__.__name__,
parsed.__class__.__name__
))
def check_type(ty):
if not isinstance(ty, types.Type):
raise TypeError("invalid type in signature: expected a type "
"instance, got %r" % (ty,))
if return_type is not None:
check_type(return_type)
for ty in args:
check_type(ty)
return args, return_type
Update return value order in normalize_signature docstring [skip ci]
|
from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
"""
Return whether *sig* is a potentially valid signature
specification (for user-facing APIs).
"""
return isinstance(sig, (str, tuple, typing.Signature))
def _parse_signature_string(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
def normalize_signature(sig):
"""
From *sig* (a signature specification), return a ``(args, return_type)``
tuple, where ``args`` itself is a tuple of types, and ``return_type``
can be None if not specified.
"""
if isinstance(sig, str):
parsed = _parse_signature_string(sig)
else:
parsed = sig
if isinstance(parsed, tuple):
args, return_type = parsed, None
elif isinstance(parsed, typing.Signature):
args, return_type = parsed.args, parsed.return_type
else:
raise TypeError("invalid signature: %r (type: %r) evaluates to %r "
"instead of tuple or Signature" % (
sig, sig.__class__.__name__,
parsed.__class__.__name__
))
def check_type(ty):
if not isinstance(ty, types.Type):
raise TypeError("invalid type in signature: expected a type "
"instance, got %r" % (ty,))
if return_type is not None:
check_type(return_type)
for ty in args:
check_type(ty)
return args, return_type
|
<commit_before>from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
"""
Return whether *sig* is a potentially valid signature
specification (for user-facing APIs).
"""
return isinstance(sig, (str, tuple, typing.Signature))
def _parse_signature_string(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
def normalize_signature(sig):
"""
From *sig* (a signature specification), return a ``(return_type, args)``
tuple, where ``args`` itself is a tuple of types, and ``return_type``
can be None if not specified.
"""
if isinstance(sig, str):
parsed = _parse_signature_string(sig)
else:
parsed = sig
if isinstance(parsed, tuple):
args, return_type = parsed, None
elif isinstance(parsed, typing.Signature):
args, return_type = parsed.args, parsed.return_type
else:
raise TypeError("invalid signature: %r (type: %r) evaluates to %r "
"instead of tuple or Signature" % (
sig, sig.__class__.__name__,
parsed.__class__.__name__
))
def check_type(ty):
if not isinstance(ty, types.Type):
raise TypeError("invalid type in signature: expected a type "
"instance, got %r" % (ty,))
if return_type is not None:
check_type(return_type)
for ty in args:
check_type(ty)
return args, return_type
<commit_msg>Update return value order in normalize_signature docstring [skip ci]<commit_after>
|
from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
"""
Return whether *sig* is a potentially valid signature
specification (for user-facing APIs).
"""
return isinstance(sig, (str, tuple, typing.Signature))
def _parse_signature_string(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
def normalize_signature(sig):
"""
From *sig* (a signature specification), return a ``(args, return_type)``
tuple, where ``args`` itself is a tuple of types, and ``return_type``
can be None if not specified.
"""
if isinstance(sig, str):
parsed = _parse_signature_string(sig)
else:
parsed = sig
if isinstance(parsed, tuple):
args, return_type = parsed, None
elif isinstance(parsed, typing.Signature):
args, return_type = parsed.args, parsed.return_type
else:
raise TypeError("invalid signature: %r (type: %r) evaluates to %r "
"instead of tuple or Signature" % (
sig, sig.__class__.__name__,
parsed.__class__.__name__
))
def check_type(ty):
if not isinstance(ty, types.Type):
raise TypeError("invalid type in signature: expected a type "
"instance, got %r" % (ty,))
if return_type is not None:
check_type(return_type)
for ty in args:
check_type(ty)
return args, return_type
|
from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
"""
Return whether *sig* is a potentially valid signature
specification (for user-facing APIs).
"""
return isinstance(sig, (str, tuple, typing.Signature))
def _parse_signature_string(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
def normalize_signature(sig):
"""
From *sig* (a signature specification), return a ``(return_type, args)``
tuple, where ``args`` itself is a tuple of types, and ``return_type``
can be None if not specified.
"""
if isinstance(sig, str):
parsed = _parse_signature_string(sig)
else:
parsed = sig
if isinstance(parsed, tuple):
args, return_type = parsed, None
elif isinstance(parsed, typing.Signature):
args, return_type = parsed.args, parsed.return_type
else:
raise TypeError("invalid signature: %r (type: %r) evaluates to %r "
"instead of tuple or Signature" % (
sig, sig.__class__.__name__,
parsed.__class__.__name__
))
def check_type(ty):
if not isinstance(ty, types.Type):
raise TypeError("invalid type in signature: expected a type "
"instance, got %r" % (ty,))
if return_type is not None:
check_type(return_type)
for ty in args:
check_type(ty)
return args, return_type
Update return value order in normalize_signature docstring [skip ci]from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
"""
Return whether *sig* is a potentially valid signature
specification (for user-facing APIs).
"""
return isinstance(sig, (str, tuple, typing.Signature))
def _parse_signature_string(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
def normalize_signature(sig):
"""
From *sig* (a signature specification), return a ``(args, return_type)``
tuple, where ``args`` itself is a tuple of types, and ``return_type``
can be None if not specified.
"""
if isinstance(sig, str):
parsed = _parse_signature_string(sig)
else:
parsed = sig
if isinstance(parsed, tuple):
args, return_type = parsed, None
elif isinstance(parsed, typing.Signature):
args, return_type = parsed.args, parsed.return_type
else:
raise TypeError("invalid signature: %r (type: %r) evaluates to %r "
"instead of tuple or Signature" % (
sig, sig.__class__.__name__,
parsed.__class__.__name__
))
def check_type(ty):
if not isinstance(ty, types.Type):
raise TypeError("invalid type in signature: expected a type "
"instance, got %r" % (ty,))
if return_type is not None:
check_type(return_type)
for ty in args:
check_type(ty)
return args, return_type
|
<commit_before>from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
"""
Return whether *sig* is a potentially valid signature
specification (for user-facing APIs).
"""
return isinstance(sig, (str, tuple, typing.Signature))
def _parse_signature_string(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
def normalize_signature(sig):
"""
From *sig* (a signature specification), return a ``(return_type, args)``
tuple, where ``args`` itself is a tuple of types, and ``return_type``
can be None if not specified.
"""
if isinstance(sig, str):
parsed = _parse_signature_string(sig)
else:
parsed = sig
if isinstance(parsed, tuple):
args, return_type = parsed, None
elif isinstance(parsed, typing.Signature):
args, return_type = parsed.args, parsed.return_type
else:
raise TypeError("invalid signature: %r (type: %r) evaluates to %r "
"instead of tuple or Signature" % (
sig, sig.__class__.__name__,
parsed.__class__.__name__
))
def check_type(ty):
if not isinstance(ty, types.Type):
raise TypeError("invalid type in signature: expected a type "
"instance, got %r" % (ty,))
if return_type is not None:
check_type(return_type)
for ty in args:
check_type(ty)
return args, return_type
<commit_msg>Update return value order in normalize_signature docstring [skip ci]<commit_after>from __future__ import print_function, division, absolute_import
from numba import types, typing
def is_signature(sig):
"""
Return whether *sig* is a potentially valid signature
specification (for user-facing APIs).
"""
return isinstance(sig, (str, tuple, typing.Signature))
def _parse_signature_string(signature_str):
# Just eval signature_str using the types submodules as globals
return eval(signature_str, {}, types.__dict__)
def normalize_signature(sig):
"""
From *sig* (a signature specification), return a ``(args, return_type)``
tuple, where ``args`` itself is a tuple of types, and ``return_type``
can be None if not specified.
"""
if isinstance(sig, str):
parsed = _parse_signature_string(sig)
else:
parsed = sig
if isinstance(parsed, tuple):
args, return_type = parsed, None
elif isinstance(parsed, typing.Signature):
args, return_type = parsed.args, parsed.return_type
else:
raise TypeError("invalid signature: %r (type: %r) evaluates to %r "
"instead of tuple or Signature" % (
sig, sig.__class__.__name__,
parsed.__class__.__name__
))
def check_type(ty):
if not isinstance(ty, types.Type):
raise TypeError("invalid type in signature: expected a type "
"instance, got %r" % (ty,))
if return_type is not None:
check_type(return_type)
for ty in args:
check_type(ty)
return args, return_type
|
b1d889dc4207af08e8c1ee3f75006fa6b4051376
|
vitrage/rpc.py
|
vitrage/rpc.py
|
# Copyright 2015 - Alcatel-Lucent
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import oslo_messaging as messaging
OPTS = [
cfg.StrOpt('rpc_topic',
default='rpcapiv1',
help='The topic vitrage listens on'),
]
def set_defaults(control_exchange):
messaging.set_transport_defaults(control_exchange)
def get_client(transport, target, version_cap=None, serializer=None):
assert transport is not None
return messaging.RPCClient(transport,
target,
version_cap=version_cap,
serializer=serializer)
def get_server(target, endpoints, transport, serializer=None):
assert transport is not None
return messaging.get_rpc_server(transport,
target,
endpoints,
executor='eventlet',
serializer=serializer)
|
# Copyright 2015 - Alcatel-Lucent
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_messaging.rpc import dispatcher
OPTS = [
cfg.StrOpt('rpc_topic',
default='rpcapiv1',
help='The topic vitrage listens on'),
]
def set_defaults(control_exchange):
messaging.set_transport_defaults(control_exchange)
def get_client(transport, target, version_cap=None, serializer=None):
assert transport is not None
return messaging.RPCClient(transport,
target,
version_cap=version_cap,
serializer=serializer)
def get_server(target, endpoints, transport, serializer=None):
assert transport is not None
access_policy = dispatcher.DefaultRPCAccessPolicy
return messaging.get_rpc_server(transport,
target,
endpoints,
executor='eventlet',
serializer=serializer,
access_policy=access_policy)
|
Set access_policy for messaging's dispatcher
|
Set access_policy for messaging's dispatcher
oslo.messaging allow dispatcher to restrict endpoint methods since
5.11.0 in d3a8f280ebd6fd12865fd20c4d772774e39aefa2, set with
DefaultRPCAccessPolicy to fix FutureWarning like:
"The access_policy argument is changing its default value to <class
'oslo_messaging.rpc.dispatcher.DefaultRPCAccessPolicy'> in version '?',
please update the code to explicitly set None as the value:
access_policy defaults to LegacyRPCAccessPolicy which exposes private
methods. Explicitly set access_policy to DefaultRPCAccessPolicy or
ExplicitRPCAccessPolicy.
Change-Id: Ib90013909237816cf906d584778f5e0ce7152fab
|
Python
|
apache-2.0
|
openstack/vitrage,openstack/vitrage,openstack/vitrage
|
# Copyright 2015 - Alcatel-Lucent
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import oslo_messaging as messaging
OPTS = [
cfg.StrOpt('rpc_topic',
default='rpcapiv1',
help='The topic vitrage listens on'),
]
def set_defaults(control_exchange):
messaging.set_transport_defaults(control_exchange)
def get_client(transport, target, version_cap=None, serializer=None):
assert transport is not None
return messaging.RPCClient(transport,
target,
version_cap=version_cap,
serializer=serializer)
def get_server(target, endpoints, transport, serializer=None):
assert transport is not None
return messaging.get_rpc_server(transport,
target,
endpoints,
executor='eventlet',
serializer=serializer)
Set access_policy for messaging's dispatcher
oslo.messaging allow dispatcher to restrict endpoint methods since
5.11.0 in d3a8f280ebd6fd12865fd20c4d772774e39aefa2, set with
DefaultRPCAccessPolicy to fix FutureWarning like:
"The access_policy argument is changing its default value to <class
'oslo_messaging.rpc.dispatcher.DefaultRPCAccessPolicy'> in version '?',
please update the code to explicitly set None as the value:
access_policy defaults to LegacyRPCAccessPolicy which exposes private
methods. Explicitly set access_policy to DefaultRPCAccessPolicy or
ExplicitRPCAccessPolicy.
Change-Id: Ib90013909237816cf906d584778f5e0ce7152fab
|
# Copyright 2015 - Alcatel-Lucent
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_messaging.rpc import dispatcher
OPTS = [
cfg.StrOpt('rpc_topic',
default='rpcapiv1',
help='The topic vitrage listens on'),
]
def set_defaults(control_exchange):
messaging.set_transport_defaults(control_exchange)
def get_client(transport, target, version_cap=None, serializer=None):
assert transport is not None
return messaging.RPCClient(transport,
target,
version_cap=version_cap,
serializer=serializer)
def get_server(target, endpoints, transport, serializer=None):
assert transport is not None
access_policy = dispatcher.DefaultRPCAccessPolicy
return messaging.get_rpc_server(transport,
target,
endpoints,
executor='eventlet',
serializer=serializer,
access_policy=access_policy)
|
<commit_before># Copyright 2015 - Alcatel-Lucent
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import oslo_messaging as messaging
OPTS = [
cfg.StrOpt('rpc_topic',
default='rpcapiv1',
help='The topic vitrage listens on'),
]
def set_defaults(control_exchange):
messaging.set_transport_defaults(control_exchange)
def get_client(transport, target, version_cap=None, serializer=None):
assert transport is not None
return messaging.RPCClient(transport,
target,
version_cap=version_cap,
serializer=serializer)
def get_server(target, endpoints, transport, serializer=None):
assert transport is not None
return messaging.get_rpc_server(transport,
target,
endpoints,
executor='eventlet',
serializer=serializer)
<commit_msg>Set access_policy for messaging's dispatcher
oslo.messaging allow dispatcher to restrict endpoint methods since
5.11.0 in d3a8f280ebd6fd12865fd20c4d772774e39aefa2, set with
DefaultRPCAccessPolicy to fix FutureWarning like:
"The access_policy argument is changing its default value to <class
'oslo_messaging.rpc.dispatcher.DefaultRPCAccessPolicy'> in version '?',
please update the code to explicitly set None as the value:
access_policy defaults to LegacyRPCAccessPolicy which exposes private
methods. Explicitly set access_policy to DefaultRPCAccessPolicy or
ExplicitRPCAccessPolicy.
Change-Id: Ib90013909237816cf906d584778f5e0ce7152fab<commit_after>
|
# Copyright 2015 - Alcatel-Lucent
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_messaging.rpc import dispatcher
OPTS = [
cfg.StrOpt('rpc_topic',
default='rpcapiv1',
help='The topic vitrage listens on'),
]
def set_defaults(control_exchange):
messaging.set_transport_defaults(control_exchange)
def get_client(transport, target, version_cap=None, serializer=None):
assert transport is not None
return messaging.RPCClient(transport,
target,
version_cap=version_cap,
serializer=serializer)
def get_server(target, endpoints, transport, serializer=None):
assert transport is not None
access_policy = dispatcher.DefaultRPCAccessPolicy
return messaging.get_rpc_server(transport,
target,
endpoints,
executor='eventlet',
serializer=serializer,
access_policy=access_policy)
|
# Copyright 2015 - Alcatel-Lucent
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import oslo_messaging as messaging
OPTS = [
cfg.StrOpt('rpc_topic',
default='rpcapiv1',
help='The topic vitrage listens on'),
]
def set_defaults(control_exchange):
messaging.set_transport_defaults(control_exchange)
def get_client(transport, target, version_cap=None, serializer=None):
assert transport is not None
return messaging.RPCClient(transport,
target,
version_cap=version_cap,
serializer=serializer)
def get_server(target, endpoints, transport, serializer=None):
assert transport is not None
return messaging.get_rpc_server(transport,
target,
endpoints,
executor='eventlet',
serializer=serializer)
Set access_policy for messaging's dispatcher
oslo.messaging allow dispatcher to restrict endpoint methods since
5.11.0 in d3a8f280ebd6fd12865fd20c4d772774e39aefa2, set with
DefaultRPCAccessPolicy to fix FutureWarning like:
"The access_policy argument is changing its default value to <class
'oslo_messaging.rpc.dispatcher.DefaultRPCAccessPolicy'> in version '?',
please update the code to explicitly set None as the value:
access_policy defaults to LegacyRPCAccessPolicy which exposes private
methods. Explicitly set access_policy to DefaultRPCAccessPolicy or
ExplicitRPCAccessPolicy.
Change-Id: Ib90013909237816cf906d584778f5e0ce7152fab# Copyright 2015 - Alcatel-Lucent
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_messaging.rpc import dispatcher
OPTS = [
cfg.StrOpt('rpc_topic',
default='rpcapiv1',
help='The topic vitrage listens on'),
]
def set_defaults(control_exchange):
messaging.set_transport_defaults(control_exchange)
def get_client(transport, target, version_cap=None, serializer=None):
assert transport is not None
return messaging.RPCClient(transport,
target,
version_cap=version_cap,
serializer=serializer)
def get_server(target, endpoints, transport, serializer=None):
assert transport is not None
access_policy = dispatcher.DefaultRPCAccessPolicy
return messaging.get_rpc_server(transport,
target,
endpoints,
executor='eventlet',
serializer=serializer,
access_policy=access_policy)
|
<commit_before># Copyright 2015 - Alcatel-Lucent
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import oslo_messaging as messaging
OPTS = [
cfg.StrOpt('rpc_topic',
default='rpcapiv1',
help='The topic vitrage listens on'),
]
def set_defaults(control_exchange):
messaging.set_transport_defaults(control_exchange)
def get_client(transport, target, version_cap=None, serializer=None):
assert transport is not None
return messaging.RPCClient(transport,
target,
version_cap=version_cap,
serializer=serializer)
def get_server(target, endpoints, transport, serializer=None):
assert transport is not None
return messaging.get_rpc_server(transport,
target,
endpoints,
executor='eventlet',
serializer=serializer)
<commit_msg>Set access_policy for messaging's dispatcher
oslo.messaging allow dispatcher to restrict endpoint methods since
5.11.0 in d3a8f280ebd6fd12865fd20c4d772774e39aefa2, set with
DefaultRPCAccessPolicy to fix FutureWarning like:
"The access_policy argument is changing its default value to <class
'oslo_messaging.rpc.dispatcher.DefaultRPCAccessPolicy'> in version '?',
please update the code to explicitly set None as the value:
access_policy defaults to LegacyRPCAccessPolicy which exposes private
methods. Explicitly set access_policy to DefaultRPCAccessPolicy or
ExplicitRPCAccessPolicy.
Change-Id: Ib90013909237816cf906d584778f5e0ce7152fab<commit_after># Copyright 2015 - Alcatel-Lucent
# Copyright 2016 - Nokia
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_config import cfg
import oslo_messaging as messaging
from oslo_messaging.rpc import dispatcher
OPTS = [
cfg.StrOpt('rpc_topic',
default='rpcapiv1',
help='The topic vitrage listens on'),
]
def set_defaults(control_exchange):
messaging.set_transport_defaults(control_exchange)
def get_client(transport, target, version_cap=None, serializer=None):
assert transport is not None
return messaging.RPCClient(transport,
target,
version_cap=version_cap,
serializer=serializer)
def get_server(target, endpoints, transport, serializer=None):
assert transport is not None
access_policy = dispatcher.DefaultRPCAccessPolicy
return messaging.get_rpc_server(transport,
target,
endpoints,
executor='eventlet',
serializer=serializer,
access_policy=access_policy)
|
b521c3a23d2802419fa2e15839aaceb27794ab64
|
nikola/md.py
|
nikola/md.py
|
"""Implementation of compile_html based on markdown."""
__all__ = ['compile_html']
import codecs
from markdown import markdown
def compile_html(source, dest):
with codecs.open(source, "r", "utf8") as in_file:
data = in_file.read()
output = markdown(data)
with codecs.open(dest, "w+", "utf8") as out_file:
out_file.write(output)
|
"""Implementation of compile_html based on markdown."""
__all__ = ['compile_html']
import codecs
import re
from markdown import markdown
def compile_html(source, dest):
with codecs.open(source, "r", "utf8") as in_file:
data = in_file.read()
output = markdown(data, ['fenced_code', 'codehilite'])
# python-markdown's highlighter uses the class 'codehilite' to wrap code,
# instead of the standard 'code'. None of the standard pygments
# stylesheets use this class, so swap it to be 'code'
output = re.sub(r'(<div[^>]+class="[^"]*)codehilite([^>]+)', r'\1code\2',
output)
with codecs.open(dest, "w+", "utf8") as out_file:
out_file.write(output)
|
Make python-markdown play well with pygments.
|
Make python-markdown play well with pygments.
Use the codehilite and fenced_code extensions, and add a regexp substitution
to make the codehilite extension match pygments' css "api".
|
Python
|
mit
|
damianavila/nikola,x1101/nikola,wcmckee/nikola,jjconti/nikola,knowsuchagency/nikola,TyberiusPrime/nikola,pluser/nikola,yamila-moreno/nikola,getnikola/nikola,immanetize/nikola,Proteus-tech/nikola,okin/nikola,lucacerone/nikola,techdragon/nikola,TyberiusPrime/nikola,Proteus-tech/nikola,xuhdev/nikola,schettino72/nikola,techdragon/nikola,andredias/nikola,yamila-moreno/nikola,andredias/nikola,JohnTroony/nikola,masayuko/nikola,servalproject/nikola,getnikola/nikola,immanetize/nikola,servalproject/nikola,jjconti/nikola,okin/nikola,berezovskyi/nikola,kotnik/nikola,okin/nikola,damianavila/nikola,schettino72/nikola,Proteus-tech/nikola,getnikola/nikola,wcmckee/nikola,berezovskyi/nikola,xuhdev/nikola,okin/nikola,s2hc-johan/nikola,xuhdev/nikola,s2hc-johan/nikola,gwax/nikola,xuhdev/nikola,atiro/nikola,kotnik/nikola,masayuko/nikola,atiro/nikola,knowsuchagency/nikola,Proteus-tech/nikola,immanetize/nikola,lucacerone/nikola,x1101/nikola,kotnik/nikola,knowsuchagency/nikola,andredias/nikola,techdragon/nikola,servalproject/nikola,JohnTroony/nikola,s2hc-johan/nikola,pluser/nikola,pluser/nikola,atiro/nikola,TyberiusPrime/nikola,x1101/nikola,damianavila/nikola,getnikola/nikola,gwax/nikola,berezovskyi/nikola,wcmckee/nikola,jjconti/nikola,masayuko/nikola,yamila-moreno/nikola,schettino72/nikola,gwax/nikola,JohnTroony/nikola,lucacerone/nikola
|
"""Implementation of compile_html based on markdown."""
__all__ = ['compile_html']
import codecs
from markdown import markdown
def compile_html(source, dest):
with codecs.open(source, "r", "utf8") as in_file:
data = in_file.read()
output = markdown(data)
with codecs.open(dest, "w+", "utf8") as out_file:
out_file.write(output)
Make python-markdown play well with pygments.
Use the codehilite and fenced_code extensions, and add a regexp substitution
to make the codehilite extension match pygments' css "api".
|
"""Implementation of compile_html based on markdown."""
__all__ = ['compile_html']
import codecs
import re
from markdown import markdown
def compile_html(source, dest):
with codecs.open(source, "r", "utf8") as in_file:
data = in_file.read()
output = markdown(data, ['fenced_code', 'codehilite'])
# python-markdown's highlighter uses the class 'codehilite' to wrap code,
# instead of the standard 'code'. None of the standard pygments
# stylesheets use this class, so swap it to be 'code'
output = re.sub(r'(<div[^>]+class="[^"]*)codehilite([^>]+)', r'\1code\2',
output)
with codecs.open(dest, "w+", "utf8") as out_file:
out_file.write(output)
|
<commit_before>"""Implementation of compile_html based on markdown."""
__all__ = ['compile_html']
import codecs
from markdown import markdown
def compile_html(source, dest):
with codecs.open(source, "r", "utf8") as in_file:
data = in_file.read()
output = markdown(data)
with codecs.open(dest, "w+", "utf8") as out_file:
out_file.write(output)
<commit_msg>Make python-markdown play well with pygments.
Use the codehilite and fenced_code extensions, and add a regexp substitution
to make the codehilite extension match pygments' css "api".<commit_after>
|
"""Implementation of compile_html based on markdown."""
__all__ = ['compile_html']
import codecs
import re
from markdown import markdown
def compile_html(source, dest):
with codecs.open(source, "r", "utf8") as in_file:
data = in_file.read()
output = markdown(data, ['fenced_code', 'codehilite'])
# python-markdown's highlighter uses the class 'codehilite' to wrap code,
# instead of the standard 'code'. None of the standard pygments
# stylesheets use this class, so swap it to be 'code'
output = re.sub(r'(<div[^>]+class="[^"]*)codehilite([^>]+)', r'\1code\2',
output)
with codecs.open(dest, "w+", "utf8") as out_file:
out_file.write(output)
|
"""Implementation of compile_html based on markdown."""
__all__ = ['compile_html']
import codecs
from markdown import markdown
def compile_html(source, dest):
with codecs.open(source, "r", "utf8") as in_file:
data = in_file.read()
output = markdown(data)
with codecs.open(dest, "w+", "utf8") as out_file:
out_file.write(output)
Make python-markdown play well with pygments.
Use the codehilite and fenced_code extensions, and add a regexp substitution
to make the codehilite extension match pygments' css "api"."""Implementation of compile_html based on markdown."""
__all__ = ['compile_html']
import codecs
import re
from markdown import markdown
def compile_html(source, dest):
with codecs.open(source, "r", "utf8") as in_file:
data = in_file.read()
output = markdown(data, ['fenced_code', 'codehilite'])
# python-markdown's highlighter uses the class 'codehilite' to wrap code,
# instead of the standard 'code'. None of the standard pygments
# stylesheets use this class, so swap it to be 'code'
output = re.sub(r'(<div[^>]+class="[^"]*)codehilite([^>]+)', r'\1code\2',
output)
with codecs.open(dest, "w+", "utf8") as out_file:
out_file.write(output)
|
<commit_before>"""Implementation of compile_html based on markdown."""
__all__ = ['compile_html']
import codecs
from markdown import markdown
def compile_html(source, dest):
with codecs.open(source, "r", "utf8") as in_file:
data = in_file.read()
output = markdown(data)
with codecs.open(dest, "w+", "utf8") as out_file:
out_file.write(output)
<commit_msg>Make python-markdown play well with pygments.
Use the codehilite and fenced_code extensions, and add a regexp substitution
to make the codehilite extension match pygments' css "api".<commit_after>"""Implementation of compile_html based on markdown."""
__all__ = ['compile_html']
import codecs
import re
from markdown import markdown
def compile_html(source, dest):
with codecs.open(source, "r", "utf8") as in_file:
data = in_file.read()
output = markdown(data, ['fenced_code', 'codehilite'])
# python-markdown's highlighter uses the class 'codehilite' to wrap code,
# instead of the standard 'code'. None of the standard pygments
# stylesheets use this class, so swap it to be 'code'
output = re.sub(r'(<div[^>]+class="[^"]*)codehilite([^>]+)', r'\1code\2',
output)
with codecs.open(dest, "w+", "utf8") as out_file:
out_file.write(output)
|
bcd7f8f3d7313538ab1c04da9c42e774350ccdfe
|
ui/widgets/histogram/TrackingHistogramWidget.py
|
ui/widgets/histogram/TrackingHistogramWidget.py
|
"""
TrackingHistogramWidget
:Authors:
Berend Klein Haneveld
"""
from PySide.QtGui import *
from PySide.QtCore import *
from HistogramWidget import HistogramWidget
from TrackingNodeItem import TrackingNodeItem
class TrackingHistogramWidget(HistogramWidget):
"""
TrackingHistogramWidget
"""
updatePosition = Signal(float)
def __init__(self):
super(TrackingHistogramWidget, self).__init__()
self.nodeItem = None
def update(self):
super(TrackingHistogramWidget, self).update()
if not self.nodeItem:
return
self.nodeItem.update()
def setHistogram(self, histogram):
super(TrackingHistogramWidget, self).setHistogram(histogram)
if not self.nodeItem:
self.nodeItem = TrackingNodeItem()
self.scene().addItem(self.nodeItem)
self.nodeItem.setHistogramItem(self._histogramItem)
self.nodeItem.setPos(QPoint(0, 0))
self.nodeItem.setZValue(300)
self.nodeItem.delegate = self
def updatePos(self, position):
self.updatePosition.emit(position)
|
"""
TrackingHistogramWidget
:Authors:
Berend Klein Haneveld
"""
from PySide.QtGui import *
from PySide.QtCore import *
from HistogramWidget import HistogramWidget
from TrackingNodeItem import TrackingNodeItem
from ui.widgets import Style
class TrackingHistogramWidget(HistogramWidget):
"""
TrackingHistogramWidget
"""
updatePosition = Signal(float)
def __init__(self):
super(TrackingHistogramWidget, self).__init__()
self.nodeItem = None
Style.styleWidgetForTab(self)
def update(self):
super(TrackingHistogramWidget, self).update()
if not self.nodeItem:
return
self.nodeItem.update()
def setHistogram(self, histogram):
super(TrackingHistogramWidget, self).setHistogram(histogram)
if not self.nodeItem:
self.nodeItem = TrackingNodeItem()
self.scene().addItem(self.nodeItem)
self.nodeItem.setHistogramItem(self._histogramItem)
self.nodeItem.setPos(QPoint(0, 0))
self.nodeItem.setZValue(300)
self.nodeItem.delegate = self
def updatePos(self, position):
self.updatePosition.emit(position)
|
Fix background color on OS X for histogram widget of ray.
|
Fix background color on OS X for histogram widget of ray.
|
Python
|
mit
|
berendkleinhaneveld/Registrationshop,berendkleinhaneveld/Registrationshop
|
"""
TrackingHistogramWidget
:Authors:
Berend Klein Haneveld
"""
from PySide.QtGui import *
from PySide.QtCore import *
from HistogramWidget import HistogramWidget
from TrackingNodeItem import TrackingNodeItem
class TrackingHistogramWidget(HistogramWidget):
"""
TrackingHistogramWidget
"""
updatePosition = Signal(float)
def __init__(self):
super(TrackingHistogramWidget, self).__init__()
self.nodeItem = None
def update(self):
super(TrackingHistogramWidget, self).update()
if not self.nodeItem:
return
self.nodeItem.update()
def setHistogram(self, histogram):
super(TrackingHistogramWidget, self).setHistogram(histogram)
if not self.nodeItem:
self.nodeItem = TrackingNodeItem()
self.scene().addItem(self.nodeItem)
self.nodeItem.setHistogramItem(self._histogramItem)
self.nodeItem.setPos(QPoint(0, 0))
self.nodeItem.setZValue(300)
self.nodeItem.delegate = self
def updatePos(self, position):
self.updatePosition.emit(position)
Fix background color on OS X for histogram widget of ray.
|
"""
TrackingHistogramWidget
:Authors:
Berend Klein Haneveld
"""
from PySide.QtGui import *
from PySide.QtCore import *
from HistogramWidget import HistogramWidget
from TrackingNodeItem import TrackingNodeItem
from ui.widgets import Style
class TrackingHistogramWidget(HistogramWidget):
"""
TrackingHistogramWidget
"""
updatePosition = Signal(float)
def __init__(self):
super(TrackingHistogramWidget, self).__init__()
self.nodeItem = None
Style.styleWidgetForTab(self)
def update(self):
super(TrackingHistogramWidget, self).update()
if not self.nodeItem:
return
self.nodeItem.update()
def setHistogram(self, histogram):
super(TrackingHistogramWidget, self).setHistogram(histogram)
if not self.nodeItem:
self.nodeItem = TrackingNodeItem()
self.scene().addItem(self.nodeItem)
self.nodeItem.setHistogramItem(self._histogramItem)
self.nodeItem.setPos(QPoint(0, 0))
self.nodeItem.setZValue(300)
self.nodeItem.delegate = self
def updatePos(self, position):
self.updatePosition.emit(position)
|
<commit_before>"""
TrackingHistogramWidget
:Authors:
Berend Klein Haneveld
"""
from PySide.QtGui import *
from PySide.QtCore import *
from HistogramWidget import HistogramWidget
from TrackingNodeItem import TrackingNodeItem
class TrackingHistogramWidget(HistogramWidget):
"""
TrackingHistogramWidget
"""
updatePosition = Signal(float)
def __init__(self):
super(TrackingHistogramWidget, self).__init__()
self.nodeItem = None
def update(self):
super(TrackingHistogramWidget, self).update()
if not self.nodeItem:
return
self.nodeItem.update()
def setHistogram(self, histogram):
super(TrackingHistogramWidget, self).setHistogram(histogram)
if not self.nodeItem:
self.nodeItem = TrackingNodeItem()
self.scene().addItem(self.nodeItem)
self.nodeItem.setHistogramItem(self._histogramItem)
self.nodeItem.setPos(QPoint(0, 0))
self.nodeItem.setZValue(300)
self.nodeItem.delegate = self
def updatePos(self, position):
self.updatePosition.emit(position)
<commit_msg>Fix background color on OS X for histogram widget of ray.<commit_after>
|
"""
TrackingHistogramWidget
:Authors:
Berend Klein Haneveld
"""
from PySide.QtGui import *
from PySide.QtCore import *
from HistogramWidget import HistogramWidget
from TrackingNodeItem import TrackingNodeItem
from ui.widgets import Style
class TrackingHistogramWidget(HistogramWidget):
"""
TrackingHistogramWidget
"""
updatePosition = Signal(float)
def __init__(self):
super(TrackingHistogramWidget, self).__init__()
self.nodeItem = None
Style.styleWidgetForTab(self)
def update(self):
super(TrackingHistogramWidget, self).update()
if not self.nodeItem:
return
self.nodeItem.update()
def setHistogram(self, histogram):
super(TrackingHistogramWidget, self).setHistogram(histogram)
if not self.nodeItem:
self.nodeItem = TrackingNodeItem()
self.scene().addItem(self.nodeItem)
self.nodeItem.setHistogramItem(self._histogramItem)
self.nodeItem.setPos(QPoint(0, 0))
self.nodeItem.setZValue(300)
self.nodeItem.delegate = self
def updatePos(self, position):
self.updatePosition.emit(position)
|
"""
TrackingHistogramWidget
:Authors:
Berend Klein Haneveld
"""
from PySide.QtGui import *
from PySide.QtCore import *
from HistogramWidget import HistogramWidget
from TrackingNodeItem import TrackingNodeItem
class TrackingHistogramWidget(HistogramWidget):
"""
TrackingHistogramWidget
"""
updatePosition = Signal(float)
def __init__(self):
super(TrackingHistogramWidget, self).__init__()
self.nodeItem = None
def update(self):
super(TrackingHistogramWidget, self).update()
if not self.nodeItem:
return
self.nodeItem.update()
def setHistogram(self, histogram):
super(TrackingHistogramWidget, self).setHistogram(histogram)
if not self.nodeItem:
self.nodeItem = TrackingNodeItem()
self.scene().addItem(self.nodeItem)
self.nodeItem.setHistogramItem(self._histogramItem)
self.nodeItem.setPos(QPoint(0, 0))
self.nodeItem.setZValue(300)
self.nodeItem.delegate = self
def updatePos(self, position):
self.updatePosition.emit(position)
Fix background color on OS X for histogram widget of ray."""
TrackingHistogramWidget
:Authors:
Berend Klein Haneveld
"""
from PySide.QtGui import *
from PySide.QtCore import *
from HistogramWidget import HistogramWidget
from TrackingNodeItem import TrackingNodeItem
from ui.widgets import Style
class TrackingHistogramWidget(HistogramWidget):
"""
TrackingHistogramWidget
"""
updatePosition = Signal(float)
def __init__(self):
super(TrackingHistogramWidget, self).__init__()
self.nodeItem = None
Style.styleWidgetForTab(self)
def update(self):
super(TrackingHistogramWidget, self).update()
if not self.nodeItem:
return
self.nodeItem.update()
def setHistogram(self, histogram):
super(TrackingHistogramWidget, self).setHistogram(histogram)
if not self.nodeItem:
self.nodeItem = TrackingNodeItem()
self.scene().addItem(self.nodeItem)
self.nodeItem.setHistogramItem(self._histogramItem)
self.nodeItem.setPos(QPoint(0, 0))
self.nodeItem.setZValue(300)
self.nodeItem.delegate = self
def updatePos(self, position):
self.updatePosition.emit(position)
|
<commit_before>"""
TrackingHistogramWidget
:Authors:
Berend Klein Haneveld
"""
from PySide.QtGui import *
from PySide.QtCore import *
from HistogramWidget import HistogramWidget
from TrackingNodeItem import TrackingNodeItem
class TrackingHistogramWidget(HistogramWidget):
"""
TrackingHistogramWidget
"""
updatePosition = Signal(float)
def __init__(self):
super(TrackingHistogramWidget, self).__init__()
self.nodeItem = None
def update(self):
super(TrackingHistogramWidget, self).update()
if not self.nodeItem:
return
self.nodeItem.update()
def setHistogram(self, histogram):
super(TrackingHistogramWidget, self).setHistogram(histogram)
if not self.nodeItem:
self.nodeItem = TrackingNodeItem()
self.scene().addItem(self.nodeItem)
self.nodeItem.setHistogramItem(self._histogramItem)
self.nodeItem.setPos(QPoint(0, 0))
self.nodeItem.setZValue(300)
self.nodeItem.delegate = self
def updatePos(self, position):
self.updatePosition.emit(position)
<commit_msg>Fix background color on OS X for histogram widget of ray.<commit_after>"""
TrackingHistogramWidget
:Authors:
Berend Klein Haneveld
"""
from PySide.QtGui import *
from PySide.QtCore import *
from HistogramWidget import HistogramWidget
from TrackingNodeItem import TrackingNodeItem
from ui.widgets import Style
class TrackingHistogramWidget(HistogramWidget):
"""
TrackingHistogramWidget
"""
updatePosition = Signal(float)
def __init__(self):
super(TrackingHistogramWidget, self).__init__()
self.nodeItem = None
Style.styleWidgetForTab(self)
def update(self):
super(TrackingHistogramWidget, self).update()
if not self.nodeItem:
return
self.nodeItem.update()
def setHistogram(self, histogram):
super(TrackingHistogramWidget, self).setHistogram(histogram)
if not self.nodeItem:
self.nodeItem = TrackingNodeItem()
self.scene().addItem(self.nodeItem)
self.nodeItem.setHistogramItem(self._histogramItem)
self.nodeItem.setPos(QPoint(0, 0))
self.nodeItem.setZValue(300)
self.nodeItem.delegate = self
def updatePos(self, position):
self.updatePosition.emit(position)
|
7c7319590e5deaed36365f91fb0aebdf93407f07
|
__init__.py
|
__init__.py
|
from ._jute import Interface, Dynamic
__all__ = [
'Interface',
'Dynamic',
]
|
from ._jute import Interface, Dynamic, InterfaceConformanceError
__all__ = [
'Interface',
'Dynamic',
'InterfaceConformanceError',
]
|
Add InterfaceConformanceError to exported names.
|
Add InterfaceConformanceError to exported names.
|
Python
|
mit
|
jongiddy/jute,jongiddy/jute
|
from ._jute import Interface, Dynamic
__all__ = [
'Interface',
'Dynamic',
]
Add InterfaceConformanceError to exported names.
|
from ._jute import Interface, Dynamic, InterfaceConformanceError
__all__ = [
'Interface',
'Dynamic',
'InterfaceConformanceError',
]
|
<commit_before>from ._jute import Interface, Dynamic
__all__ = [
'Interface',
'Dynamic',
]
<commit_msg>Add InterfaceConformanceError to exported names.<commit_after>
|
from ._jute import Interface, Dynamic, InterfaceConformanceError
__all__ = [
'Interface',
'Dynamic',
'InterfaceConformanceError',
]
|
from ._jute import Interface, Dynamic
__all__ = [
'Interface',
'Dynamic',
]
Add InterfaceConformanceError to exported names.from ._jute import Interface, Dynamic, InterfaceConformanceError
__all__ = [
'Interface',
'Dynamic',
'InterfaceConformanceError',
]
|
<commit_before>from ._jute import Interface, Dynamic
__all__ = [
'Interface',
'Dynamic',
]
<commit_msg>Add InterfaceConformanceError to exported names.<commit_after>from ._jute import Interface, Dynamic, InterfaceConformanceError
__all__ = [
'Interface',
'Dynamic',
'InterfaceConformanceError',
]
|
87cd4025aed62d76e3c64ba939f5241307b4478f
|
CascadeCount.py
|
CascadeCount.py
|
from __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
MRJobNetworkX.run()
|
from __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
CascadeCount.run()
|
Load from the pre processed data
|
Load from the pre processed data
|
Python
|
mit
|
danjamker/DiffusionSimulation
|
from __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
MRJobNetworkX.run()
Load from the pre processed data
|
from __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
CascadeCount.run()
|
<commit_before>from __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
MRJobNetworkX.run()
<commit_msg>Load from the pre processed data<commit_after>
|
from __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
CascadeCount.run()
|
from __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
MRJobNetworkX.run()
Load from the pre processed datafrom __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
CascadeCount.run()
|
<commit_before>from __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
MRJobNetworkX.run()
<commit_msg>Load from the pre processed data<commit_after>from __future__ import division
import gzip
try:
from BytesIO import BytesIO
except ImportError:
from io import BytesIO
try:
from urlparse import urlparse
except ImportError:
from urllib.parse import urlparse
import hdfs
import pandas as pd
from mrjob.job import MRJob
from mrjob.protocol import JSONValueProtocol
from mrjob.step import MRStep
class CascadeCount(MRJob):
OUTPUT_PROTOCOL = JSONValueProtocol
def configure_options(self):
super(CascadeCount, self).configure_options()
def mapper(self, _, line):
client = hdfs.client.Client("http://" + urlparse(line).netloc)
if line[-1] != "#":
with client.read(urlparse(line).path) as r:
# with open(urlparse(line).path) as r:
buf = BytesIO(r.read())
# If the data is in a GZipped file.
if ".gz" in line:
gzip_f = gzip.GzipFile(fileobj=buf)
content = gzip_f.read()
buf = StringIO.StringIO(content)
dtf = pd.read_csv(buf, index_col=False, header=None, sep="\t", engine="python",
compression=None).drop_duplicates(subset=[2], keep='last')
yield "apple", len(dft.index)
def steps(self):
return [
MRStep(mapper_init=self.mapper_init,
mapper=self.mapper
)
]
if __name__ == '__main__':
CascadeCount.run()
|
caab96114964a1c9154df67d97c66c701cede8d9
|
waterbutler/core/__init__.py
|
waterbutler/core/__init__.py
|
from waterbutler.core.utils import async_retry
from waterbutler.core.utils import backgrounded
__all__ = [
'backgrounded',
'async_retry'
]
|
from waterbutler.core.utils import async_retry
from waterbutler.core.utils import make_provider
__all__ = [
'async_retry',
'make_provider',
]
|
Allow make_provider to be imported from waterbutler core
|
Allow make_provider to be imported from waterbutler core
|
Python
|
apache-2.0
|
chrisseto/waterbutler,Johnetordoff/waterbutler,rafaeldelucena/waterbutler,cosenal/waterbutler,CenterForOpenScience/waterbutler,rdhyee/waterbutler,Ghalko/waterbutler,kwierman/waterbutler,icereval/waterbutler,felliott/waterbutler,hmoco/waterbutler,TomBaxter/waterbutler,RCOSDP/waterbutler
|
from waterbutler.core.utils import async_retry
from waterbutler.core.utils import backgrounded
__all__ = [
'backgrounded',
'async_retry'
]
Allow make_provider to be imported from waterbutler core
|
from waterbutler.core.utils import async_retry
from waterbutler.core.utils import make_provider
__all__ = [
'async_retry',
'make_provider',
]
|
<commit_before>from waterbutler.core.utils import async_retry
from waterbutler.core.utils import backgrounded
__all__ = [
'backgrounded',
'async_retry'
]
<commit_msg>Allow make_provider to be imported from waterbutler core<commit_after>
|
from waterbutler.core.utils import async_retry
from waterbutler.core.utils import make_provider
__all__ = [
'async_retry',
'make_provider',
]
|
from waterbutler.core.utils import async_retry
from waterbutler.core.utils import backgrounded
__all__ = [
'backgrounded',
'async_retry'
]
Allow make_provider to be imported from waterbutler corefrom waterbutler.core.utils import async_retry
from waterbutler.core.utils import make_provider
__all__ = [
'async_retry',
'make_provider',
]
|
<commit_before>from waterbutler.core.utils import async_retry
from waterbutler.core.utils import backgrounded
__all__ = [
'backgrounded',
'async_retry'
]
<commit_msg>Allow make_provider to be imported from waterbutler core<commit_after>from waterbutler.core.utils import async_retry
from waterbutler.core.utils import make_provider
__all__ = [
'async_retry',
'make_provider',
]
|
44f92d7c96b074054b11876d208494da1acef7e7
|
Lib/tempfile.py
|
Lib/tempfile.py
|
# Temporary file name allocation
import posix
import path
# Changeable parameters (by clients!)...
# XXX Should the environment variable $TMPDIR override tempdir?
tempdir = '/usr/tmp'
template = '@'
# Kludge to hold mutable state
class Struct: pass
G = Struct()
G.i = 0
# User-callable function
# XXX Should this have a parameter, like C's mktemp()?
# XXX Should we instead use the model of Standard C's tempnam()?
# XXX By all means, avoid a mess with four different functions like C...
def mktemp():
while 1:
G.i = G.i+1
file = tempdir +'/'+ template + `posix.getpid()` +'.'+ `G.i`
if not path.exists(file):
break
return file
|
# Temporary file name allocation
import posix
import path
# Changeable parameters (by clients!)...
# XXX Should the environment variable $TMPDIR override tempdir?
tempdir = '/usr/tmp'
template = '@'
# Counter for generating unique names
counter = 0
# User-callable function
# XXX Should this have a parameter, like C's mktemp()?
# XXX Should we instead use the model of Standard C's tempnam()?
# XXX By all means, avoid a mess with four different functions like C...
def mktemp():
global counter
while 1:
counter = counter+1
file = tempdir+'/'+template+`posix.getpid()`+'.'+`counter`
if not path.exists(file):
break
return file
|
Use 'global' instead of struct kludge.
|
Use 'global' instead of struct kludge.
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
# Temporary file name allocation
import posix
import path
# Changeable parameters (by clients!)...
# XXX Should the environment variable $TMPDIR override tempdir?
tempdir = '/usr/tmp'
template = '@'
# Kludge to hold mutable state
class Struct: pass
G = Struct()
G.i = 0
# User-callable function
# XXX Should this have a parameter, like C's mktemp()?
# XXX Should we instead use the model of Standard C's tempnam()?
# XXX By all means, avoid a mess with four different functions like C...
def mktemp():
while 1:
G.i = G.i+1
file = tempdir +'/'+ template + `posix.getpid()` +'.'+ `G.i`
if not path.exists(file):
break
return file
Use 'global' instead of struct kludge.
|
# Temporary file name allocation
import posix
import path
# Changeable parameters (by clients!)...
# XXX Should the environment variable $TMPDIR override tempdir?
tempdir = '/usr/tmp'
template = '@'
# Counter for generating unique names
counter = 0
# User-callable function
# XXX Should this have a parameter, like C's mktemp()?
# XXX Should we instead use the model of Standard C's tempnam()?
# XXX By all means, avoid a mess with four different functions like C...
def mktemp():
global counter
while 1:
counter = counter+1
file = tempdir+'/'+template+`posix.getpid()`+'.'+`counter`
if not path.exists(file):
break
return file
|
<commit_before># Temporary file name allocation
import posix
import path
# Changeable parameters (by clients!)...
# XXX Should the environment variable $TMPDIR override tempdir?
tempdir = '/usr/tmp'
template = '@'
# Kludge to hold mutable state
class Struct: pass
G = Struct()
G.i = 0
# User-callable function
# XXX Should this have a parameter, like C's mktemp()?
# XXX Should we instead use the model of Standard C's tempnam()?
# XXX By all means, avoid a mess with four different functions like C...
def mktemp():
while 1:
G.i = G.i+1
file = tempdir +'/'+ template + `posix.getpid()` +'.'+ `G.i`
if not path.exists(file):
break
return file
<commit_msg>Use 'global' instead of struct kludge.<commit_after>
|
# Temporary file name allocation
import posix
import path
# Changeable parameters (by clients!)...
# XXX Should the environment variable $TMPDIR override tempdir?
tempdir = '/usr/tmp'
template = '@'
# Counter for generating unique names
counter = 0
# User-callable function
# XXX Should this have a parameter, like C's mktemp()?
# XXX Should we instead use the model of Standard C's tempnam()?
# XXX By all means, avoid a mess with four different functions like C...
def mktemp():
global counter
while 1:
counter = counter+1
file = tempdir+'/'+template+`posix.getpid()`+'.'+`counter`
if not path.exists(file):
break
return file
|
# Temporary file name allocation
import posix
import path
# Changeable parameters (by clients!)...
# XXX Should the environment variable $TMPDIR override tempdir?
tempdir = '/usr/tmp'
template = '@'
# Kludge to hold mutable state
class Struct: pass
G = Struct()
G.i = 0
# User-callable function
# XXX Should this have a parameter, like C's mktemp()?
# XXX Should we instead use the model of Standard C's tempnam()?
# XXX By all means, avoid a mess with four different functions like C...
def mktemp():
while 1:
G.i = G.i+1
file = tempdir +'/'+ template + `posix.getpid()` +'.'+ `G.i`
if not path.exists(file):
break
return file
Use 'global' instead of struct kludge.# Temporary file name allocation
import posix
import path
# Changeable parameters (by clients!)...
# XXX Should the environment variable $TMPDIR override tempdir?
tempdir = '/usr/tmp'
template = '@'
# Counter for generating unique names
counter = 0
# User-callable function
# XXX Should this have a parameter, like C's mktemp()?
# XXX Should we instead use the model of Standard C's tempnam()?
# XXX By all means, avoid a mess with four different functions like C...
def mktemp():
global counter
while 1:
counter = counter+1
file = tempdir+'/'+template+`posix.getpid()`+'.'+`counter`
if not path.exists(file):
break
return file
|
<commit_before># Temporary file name allocation
import posix
import path
# Changeable parameters (by clients!)...
# XXX Should the environment variable $TMPDIR override tempdir?
tempdir = '/usr/tmp'
template = '@'
# Kludge to hold mutable state
class Struct: pass
G = Struct()
G.i = 0
# User-callable function
# XXX Should this have a parameter, like C's mktemp()?
# XXX Should we instead use the model of Standard C's tempnam()?
# XXX By all means, avoid a mess with four different functions like C...
def mktemp():
while 1:
G.i = G.i+1
file = tempdir +'/'+ template + `posix.getpid()` +'.'+ `G.i`
if not path.exists(file):
break
return file
<commit_msg>Use 'global' instead of struct kludge.<commit_after># Temporary file name allocation
import posix
import path
# Changeable parameters (by clients!)...
# XXX Should the environment variable $TMPDIR override tempdir?
tempdir = '/usr/tmp'
template = '@'
# Counter for generating unique names
counter = 0
# User-callable function
# XXX Should this have a parameter, like C's mktemp()?
# XXX Should we instead use the model of Standard C's tempnam()?
# XXX By all means, avoid a mess with four different functions like C...
def mktemp():
global counter
while 1:
counter = counter+1
file = tempdir+'/'+template+`posix.getpid()`+'.'+`counter`
if not path.exists(file):
break
return file
|
70323d2cc7c568fecda66adb0e8ace1922b15b8f
|
recipes/graphviz/run_test.py
|
recipes/graphviz/run_test.py
|
#!/usr/bin/env python
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
|
#!/usr/bin/env python
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tpdf", "-o", "sample.pdf", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tsvg", "-o", "sample.svg", "sample.dot"], shell=True)
|
Add tests for svg and pdf on Windows
|
Add tests for svg and pdf on Windows
|
Python
|
bsd-3-clause
|
cpaulik/staged-recipes,jerowe/staged-recipes,cpaulik/staged-recipes,asmeurer/staged-recipes,hajapy/staged-recipes,guillochon/staged-recipes,richardotis/staged-recipes,glemaitre/staged-recipes,kwilcox/staged-recipes,patricksnape/staged-recipes,pstjohn/staged-recipes,johannesring/staged-recipes,caspervdw/staged-recipes,petrushy/staged-recipes,sannykr/staged-recipes,isuruf/staged-recipes,birdsarah/staged-recipes,vamega/staged-recipes,sodre/staged-recipes,Cashalow/staged-recipes,khallock/staged-recipes,rmcgibbo/staged-recipes,Juanlu001/staged-recipes,tylere/staged-recipes,scopatz/staged-recipes,vamega/staged-recipes,Savvysherpa/staged-recipes,tylere/staged-recipes,dharhas/staged-recipes,dfroger/staged-recipes,nicoddemus/staged-recipes,JohnGreeley/staged-recipes,shadowwalkersb/staged-recipes,jerowe/staged-recipes,OpenPIV/staged-recipes,jjhelmus/staged-recipes,NOAA-ORR-ERD/staged-recipes,jakirkham/staged-recipes,JohnGreeley/staged-recipes,richardotis/staged-recipes,jakirkham/staged-recipes,data-exp-lab/staged-recipes,ceholden/staged-recipes,sodre/staged-recipes,NOAA-ORR-ERD/staged-recipes,igortg/staged-recipes,hadim/staged-recipes,grlee77/staged-recipes,nicoddemus/staged-recipes,dschreij/staged-recipes,chohner/staged-recipes,rvalieris/staged-recipes,jcb91/staged-recipes,valgur/staged-recipes,larray-project/staged-recipes,birdsarah/staged-recipes,rvalieris/staged-recipes,benvandyke/staged-recipes,goanpeca/staged-recipes,isuruf/staged-recipes,mcernak/staged-recipes,jochym/staged-recipes,sannykr/staged-recipes,data-exp-lab/staged-recipes,petrushy/staged-recipes,sodre/staged-recipes,Juanlu001/staged-recipes,stuertz/staged-recipes,planetarypy/staged-recipes,chrisburr/staged-recipes,mariusvniekerk/staged-recipes,jochym/staged-recipes,chrisburr/staged-recipes,stuertz/staged-recipes,gqmelo/staged-recipes,ocefpaf/staged-recipes,mcernak/staged-recipes,atedstone/staged-recipes,johannesring/staged-recipes,basnijholt/staged-recipes,mariusvniekerk/staged-recipes,patricksnape/staged-recipes,ReimarBauer/staged-recipes,bmabey/staged-recipes,dschreij/staged-recipes,mcs07/staged-recipes,kwilcox/staged-recipes,basnijholt/staged-recipes,koverholt/staged-recipes,atedstone/staged-recipes,SylvainCorlay/staged-recipes,dharhas/staged-recipes,pmlandwehr/staged-recipes,shadowwalkersb/staged-recipes,valgur/staged-recipes,khallock/staged-recipes,barkls/staged-recipes,glemaitre/staged-recipes,dfroger/staged-recipes,hbredin/staged-recipes,benvandyke/staged-recipes,gqmelo/staged-recipes,synapticarbors/staged-recipes,ReimarBauer/staged-recipes,koverholt/staged-recipes,conda-forge/staged-recipes,Cashalow/staged-recipes,caspervdw/staged-recipes,johanneskoester/staged-recipes,ceholden/staged-recipes,rolando-contrib/staged-recipes,hbredin/staged-recipes,rmcgibbo/staged-recipes,conda-forge/staged-recipes,bmabey/staged-recipes,scopatz/staged-recipes,SylvainCorlay/staged-recipes,blowekamp/staged-recipes,ocefpaf/staged-recipes,guillochon/staged-recipes,chohner/staged-recipes,planetarypy/staged-recipes,blowekamp/staged-recipes,asmeurer/staged-recipes,pmlandwehr/staged-recipes,jcb91/staged-recipes,hadim/staged-recipes,barkls/staged-recipes,pstjohn/staged-recipes,igortg/staged-recipes,mcs07/staged-recipes,Savvysherpa/staged-recipes,larray-project/staged-recipes,rolando-contrib/staged-recipes,hajapy/staged-recipes,jjhelmus/staged-recipes,OpenPIV/staged-recipes,johanneskoester/staged-recipes,grlee77/staged-recipes,synapticarbors/staged-recipes,goanpeca/staged-recipes
|
#!/usr/bin/env python
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
Add tests for svg and pdf on Windows
|
#!/usr/bin/env python
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tpdf", "-o", "sample.pdf", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tsvg", "-o", "sample.svg", "sample.dot"], shell=True)
|
<commit_before>#!/usr/bin/env python
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
<commit_msg>Add tests for svg and pdf on Windows<commit_after>
|
#!/usr/bin/env python
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tpdf", "-o", "sample.pdf", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tsvg", "-o", "sample.svg", "sample.dot"], shell=True)
|
#!/usr/bin/env python
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
Add tests for svg and pdf on Windows#!/usr/bin/env python
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tpdf", "-o", "sample.pdf", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tsvg", "-o", "sample.svg", "sample.dot"], shell=True)
|
<commit_before>#!/usr/bin/env python
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
<commit_msg>Add tests for svg and pdf on Windows<commit_after>#!/usr/bin/env python
import os
# This is failing for now on Windows. We need to submit
# a patch to the graphviz package to fix it
if not os.name == 'nt':
# Install graphviz Python package
import pip
pip.main(['install', 'graphviz'])
# Dask test
import dask.array as da
x = da.ones(4, chunks=(2,))
for fmt in ['pdf', 'png', 'dot', 'svg']:
(x + 1).sum().visualize(filename='graph.%s' % fmt)
else:
import subprocess
subprocess.call(["dot", "-Tpng", "-o", "sample.png", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tpdf", "-o", "sample.pdf", "sample.dot"], shell=True)
subprocess.call(["dot", "-Tsvg", "-o", "sample.svg", "sample.dot"], shell=True)
|
18550958c5c6d3a2d56074d53aa4f0b73e510163
|
AT0206/Analysis/signal_mask.py
|
AT0206/Analysis/signal_mask.py
|
'''
Make a mask of the emission
'''
cube = SpectralCube.read("M33_206_b_c_HI.fits")
cube = cube.with_mask(cube != 0*u.Jy)
noise_cube = Noise(cube)
new_noise = noise_cube.scale
cube = cube.with_mask(cube > new_noise*u.Jy)
# Load in the broad clean mask used
clean_mask = fits.getdata("../../../Arecibo/M33_newmask.fits")
# Need to match the dims
clean_mask = clean_mask.squeeze()
clean_mask = clean_mask[11:195, ::-1, ::-1]
clean_mask = clean_mask[:, 595:3504, 1065:3033]
old_mask = RadioMask(old_cube)
old_mask.intersection(old_arecibo_mask)
|
'''
Make a mask of the emission
'''
from astropy.io import fits
from spectral_cube import SpectralCube, BooleanArrayMask
from signal_id import RadioMask, Noise
from astropy import units as u
make_mask = True
save_mask = False
cube = SpectralCube.read("M33_206_b_c_HI.fits")
cube = cube.with_mask(cube != 0*u.Jy)
if make_mask:
# noise_cube = Noise(cube)
# new_noise = noise_cube.scale
new_noise = 0.0010592446454102172
cube = cube.with_mask(cube > new_noise*u.Jy)
# Load in the broad clean mask used
clean_mask = fits.getdata("../../../Arecibo/M33_newmask.fits")
# Need to match the dims
clean_mask = clean_mask.squeeze()
clean_mask = clean_mask[11:195, ::-1, ::-1]
clean_mask = clean_mask[:, 595:3504, 1065:3033]
from signal_id.utils import get_pixel_scales
pixscale = get_pixel_scales(cube.wcs)
beam_struct = cube.beam.as_tophat_kernel(pixscale)
mask = RadioMask(cube)
mask.intersection(clean_mask)
# mask.remove_small_regions()
# mask.open(iterations=3)
# mask.close(iterations=3)
# mask.dilate(iterations=6)
if save_mask:
mask.write('M33_206_b_c.source_mask.fits')
else:
# Try loading the mask in
mask_fits = fits.getdata('m33_206_b_c.source_mask.fits')
mask = BooleanArrayMask(mask_fits.astype(bool), cube.wcs)
cube = cube.with_mask(mask)
|
Choose whether to make new mask
|
Choose whether to make new mask
|
Python
|
mit
|
e-koch/VLA_Lband,e-koch/VLA_Lband
|
'''
Make a mask of the emission
'''
cube = SpectralCube.read("M33_206_b_c_HI.fits")
cube = cube.with_mask(cube != 0*u.Jy)
noise_cube = Noise(cube)
new_noise = noise_cube.scale
cube = cube.with_mask(cube > new_noise*u.Jy)
# Load in the broad clean mask used
clean_mask = fits.getdata("../../../Arecibo/M33_newmask.fits")
# Need to match the dims
clean_mask = clean_mask.squeeze()
clean_mask = clean_mask[11:195, ::-1, ::-1]
clean_mask = clean_mask[:, 595:3504, 1065:3033]
old_mask = RadioMask(old_cube)
old_mask.intersection(old_arecibo_mask)
Choose whether to make new mask
|
'''
Make a mask of the emission
'''
from astropy.io import fits
from spectral_cube import SpectralCube, BooleanArrayMask
from signal_id import RadioMask, Noise
from astropy import units as u
make_mask = True
save_mask = False
cube = SpectralCube.read("M33_206_b_c_HI.fits")
cube = cube.with_mask(cube != 0*u.Jy)
if make_mask:
# noise_cube = Noise(cube)
# new_noise = noise_cube.scale
new_noise = 0.0010592446454102172
cube = cube.with_mask(cube > new_noise*u.Jy)
# Load in the broad clean mask used
clean_mask = fits.getdata("../../../Arecibo/M33_newmask.fits")
# Need to match the dims
clean_mask = clean_mask.squeeze()
clean_mask = clean_mask[11:195, ::-1, ::-1]
clean_mask = clean_mask[:, 595:3504, 1065:3033]
from signal_id.utils import get_pixel_scales
pixscale = get_pixel_scales(cube.wcs)
beam_struct = cube.beam.as_tophat_kernel(pixscale)
mask = RadioMask(cube)
mask.intersection(clean_mask)
# mask.remove_small_regions()
# mask.open(iterations=3)
# mask.close(iterations=3)
# mask.dilate(iterations=6)
if save_mask:
mask.write('M33_206_b_c.source_mask.fits')
else:
# Try loading the mask in
mask_fits = fits.getdata('m33_206_b_c.source_mask.fits')
mask = BooleanArrayMask(mask_fits.astype(bool), cube.wcs)
cube = cube.with_mask(mask)
|
<commit_before>
'''
Make a mask of the emission
'''
cube = SpectralCube.read("M33_206_b_c_HI.fits")
cube = cube.with_mask(cube != 0*u.Jy)
noise_cube = Noise(cube)
new_noise = noise_cube.scale
cube = cube.with_mask(cube > new_noise*u.Jy)
# Load in the broad clean mask used
clean_mask = fits.getdata("../../../Arecibo/M33_newmask.fits")
# Need to match the dims
clean_mask = clean_mask.squeeze()
clean_mask = clean_mask[11:195, ::-1, ::-1]
clean_mask = clean_mask[:, 595:3504, 1065:3033]
old_mask = RadioMask(old_cube)
old_mask.intersection(old_arecibo_mask)
<commit_msg>Choose whether to make new mask<commit_after>
|
'''
Make a mask of the emission
'''
from astropy.io import fits
from spectral_cube import SpectralCube, BooleanArrayMask
from signal_id import RadioMask, Noise
from astropy import units as u
make_mask = True
save_mask = False
cube = SpectralCube.read("M33_206_b_c_HI.fits")
cube = cube.with_mask(cube != 0*u.Jy)
if make_mask:
# noise_cube = Noise(cube)
# new_noise = noise_cube.scale
new_noise = 0.0010592446454102172
cube = cube.with_mask(cube > new_noise*u.Jy)
# Load in the broad clean mask used
clean_mask = fits.getdata("../../../Arecibo/M33_newmask.fits")
# Need to match the dims
clean_mask = clean_mask.squeeze()
clean_mask = clean_mask[11:195, ::-1, ::-1]
clean_mask = clean_mask[:, 595:3504, 1065:3033]
from signal_id.utils import get_pixel_scales
pixscale = get_pixel_scales(cube.wcs)
beam_struct = cube.beam.as_tophat_kernel(pixscale)
mask = RadioMask(cube)
mask.intersection(clean_mask)
# mask.remove_small_regions()
# mask.open(iterations=3)
# mask.close(iterations=3)
# mask.dilate(iterations=6)
if save_mask:
mask.write('M33_206_b_c.source_mask.fits')
else:
# Try loading the mask in
mask_fits = fits.getdata('m33_206_b_c.source_mask.fits')
mask = BooleanArrayMask(mask_fits.astype(bool), cube.wcs)
cube = cube.with_mask(mask)
|
'''
Make a mask of the emission
'''
cube = SpectralCube.read("M33_206_b_c_HI.fits")
cube = cube.with_mask(cube != 0*u.Jy)
noise_cube = Noise(cube)
new_noise = noise_cube.scale
cube = cube.with_mask(cube > new_noise*u.Jy)
# Load in the broad clean mask used
clean_mask = fits.getdata("../../../Arecibo/M33_newmask.fits")
# Need to match the dims
clean_mask = clean_mask.squeeze()
clean_mask = clean_mask[11:195, ::-1, ::-1]
clean_mask = clean_mask[:, 595:3504, 1065:3033]
old_mask = RadioMask(old_cube)
old_mask.intersection(old_arecibo_mask)
Choose whether to make new mask
'''
Make a mask of the emission
'''
from astropy.io import fits
from spectral_cube import SpectralCube, BooleanArrayMask
from signal_id import RadioMask, Noise
from astropy import units as u
make_mask = True
save_mask = False
cube = SpectralCube.read("M33_206_b_c_HI.fits")
cube = cube.with_mask(cube != 0*u.Jy)
if make_mask:
# noise_cube = Noise(cube)
# new_noise = noise_cube.scale
new_noise = 0.0010592446454102172
cube = cube.with_mask(cube > new_noise*u.Jy)
# Load in the broad clean mask used
clean_mask = fits.getdata("../../../Arecibo/M33_newmask.fits")
# Need to match the dims
clean_mask = clean_mask.squeeze()
clean_mask = clean_mask[11:195, ::-1, ::-1]
clean_mask = clean_mask[:, 595:3504, 1065:3033]
from signal_id.utils import get_pixel_scales
pixscale = get_pixel_scales(cube.wcs)
beam_struct = cube.beam.as_tophat_kernel(pixscale)
mask = RadioMask(cube)
mask.intersection(clean_mask)
# mask.remove_small_regions()
# mask.open(iterations=3)
# mask.close(iterations=3)
# mask.dilate(iterations=6)
if save_mask:
mask.write('M33_206_b_c.source_mask.fits')
else:
# Try loading the mask in
mask_fits = fits.getdata('m33_206_b_c.source_mask.fits')
mask = BooleanArrayMask(mask_fits.astype(bool), cube.wcs)
cube = cube.with_mask(mask)
|
<commit_before>
'''
Make a mask of the emission
'''
cube = SpectralCube.read("M33_206_b_c_HI.fits")
cube = cube.with_mask(cube != 0*u.Jy)
noise_cube = Noise(cube)
new_noise = noise_cube.scale
cube = cube.with_mask(cube > new_noise*u.Jy)
# Load in the broad clean mask used
clean_mask = fits.getdata("../../../Arecibo/M33_newmask.fits")
# Need to match the dims
clean_mask = clean_mask.squeeze()
clean_mask = clean_mask[11:195, ::-1, ::-1]
clean_mask = clean_mask[:, 595:3504, 1065:3033]
old_mask = RadioMask(old_cube)
old_mask.intersection(old_arecibo_mask)
<commit_msg>Choose whether to make new mask<commit_after>
'''
Make a mask of the emission
'''
from astropy.io import fits
from spectral_cube import SpectralCube, BooleanArrayMask
from signal_id import RadioMask, Noise
from astropy import units as u
make_mask = True
save_mask = False
cube = SpectralCube.read("M33_206_b_c_HI.fits")
cube = cube.with_mask(cube != 0*u.Jy)
if make_mask:
# noise_cube = Noise(cube)
# new_noise = noise_cube.scale
new_noise = 0.0010592446454102172
cube = cube.with_mask(cube > new_noise*u.Jy)
# Load in the broad clean mask used
clean_mask = fits.getdata("../../../Arecibo/M33_newmask.fits")
# Need to match the dims
clean_mask = clean_mask.squeeze()
clean_mask = clean_mask[11:195, ::-1, ::-1]
clean_mask = clean_mask[:, 595:3504, 1065:3033]
from signal_id.utils import get_pixel_scales
pixscale = get_pixel_scales(cube.wcs)
beam_struct = cube.beam.as_tophat_kernel(pixscale)
mask = RadioMask(cube)
mask.intersection(clean_mask)
# mask.remove_small_regions()
# mask.open(iterations=3)
# mask.close(iterations=3)
# mask.dilate(iterations=6)
if save_mask:
mask.write('M33_206_b_c.source_mask.fits')
else:
# Try loading the mask in
mask_fits = fits.getdata('m33_206_b_c.source_mask.fits')
mask = BooleanArrayMask(mask_fits.astype(bool), cube.wcs)
cube = cube.with_mask(mask)
|
62400c15ceafe1af9a41d526deb94591ebdd3bcb
|
ideascube/conf/idb_irq_bardarash.py
|
ideascube/conf/idb_irq_bardarash.py
|
"""Bardarash in Kurdistan, Iraq"""
from .azraq import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'latin_name', 'birth_year', 'gender', 'country_of_origin_occupation', 'school_level']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Language skills'), ['ar_level', 'ku_level', 'sdb_level', 'en_level']),
)
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'birth_year', 'gender']
ENTRY_ACTIVITY_CHOICES = []
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'khanacademy',
}
]
|
"""Bardarash in Kurdistan, Iraq"""
from .idb_jor_azraq import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'latin_name', 'birth_year', 'gender', 'country_of_origin_occupation', 'school_level']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Language skills'), ['ar_level', 'ku_level', 'sdb_level', 'en_level']),
)
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'birth_year', 'gender']
ENTRY_ACTIVITY_CHOICES = []
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'khanacademy',
}
]
|
Fix import bardarash conf import path
|
Fix import bardarash conf import path
|
Python
|
agpl-3.0
|
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
|
"""Bardarash in Kurdistan, Iraq"""
from .azraq import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'latin_name', 'birth_year', 'gender', 'country_of_origin_occupation', 'school_level']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Language skills'), ['ar_level', 'ku_level', 'sdb_level', 'en_level']),
)
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'birth_year', 'gender']
ENTRY_ACTIVITY_CHOICES = []
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'khanacademy',
}
]
Fix import bardarash conf import path
|
"""Bardarash in Kurdistan, Iraq"""
from .idb_jor_azraq import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'latin_name', 'birth_year', 'gender', 'country_of_origin_occupation', 'school_level']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Language skills'), ['ar_level', 'ku_level', 'sdb_level', 'en_level']),
)
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'birth_year', 'gender']
ENTRY_ACTIVITY_CHOICES = []
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'khanacademy',
}
]
|
<commit_before>"""Bardarash in Kurdistan, Iraq"""
from .azraq import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'latin_name', 'birth_year', 'gender', 'country_of_origin_occupation', 'school_level']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Language skills'), ['ar_level', 'ku_level', 'sdb_level', 'en_level']),
)
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'birth_year', 'gender']
ENTRY_ACTIVITY_CHOICES = []
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'khanacademy',
}
]
<commit_msg>Fix import bardarash conf import path<commit_after>
|
"""Bardarash in Kurdistan, Iraq"""
from .idb_jor_azraq import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'latin_name', 'birth_year', 'gender', 'country_of_origin_occupation', 'school_level']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Language skills'), ['ar_level', 'ku_level', 'sdb_level', 'en_level']),
)
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'birth_year', 'gender']
ENTRY_ACTIVITY_CHOICES = []
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'khanacademy',
}
]
|
"""Bardarash in Kurdistan, Iraq"""
from .azraq import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'latin_name', 'birth_year', 'gender', 'country_of_origin_occupation', 'school_level']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Language skills'), ['ar_level', 'ku_level', 'sdb_level', 'en_level']),
)
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'birth_year', 'gender']
ENTRY_ACTIVITY_CHOICES = []
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'khanacademy',
}
]
Fix import bardarash conf import path"""Bardarash in Kurdistan, Iraq"""
from .idb_jor_azraq import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'latin_name', 'birth_year', 'gender', 'country_of_origin_occupation', 'school_level']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Language skills'), ['ar_level', 'ku_level', 'sdb_level', 'en_level']),
)
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'birth_year', 'gender']
ENTRY_ACTIVITY_CHOICES = []
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'khanacademy',
}
]
|
<commit_before>"""Bardarash in Kurdistan, Iraq"""
from .azraq import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'latin_name', 'birth_year', 'gender', 'country_of_origin_occupation', 'school_level']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Language skills'), ['ar_level', 'ku_level', 'sdb_level', 'en_level']),
)
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'birth_year', 'gender']
ENTRY_ACTIVITY_CHOICES = []
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'khanacademy',
}
]
<commit_msg>Fix import bardarash conf import path<commit_after>"""Bardarash in Kurdistan, Iraq"""
from .idb_jor_azraq import * # noqa
from django.utils.translation import ugettext_lazy as _
USER_FORM_FIELDS = (
('Ideasbox', ['serial', 'box_awareness']),
(_('Personal informations'), ['short_name', 'full_name', 'latin_name', 'birth_year', 'gender', 'country_of_origin_occupation', 'school_level']), # noqa
(_('Family'), ['marital_status', 'family_status', 'children_under_12', 'children_under_18', 'children_above_18']), # noqa
(_('In the camp'), ['camp_entry_date', 'camp_activities', 'current_occupation', 'camp_address']), # noqa
(_('Language skills'), ['ar_level', 'ku_level', 'sdb_level', 'en_level']),
)
MONITORING_ENTRY_EXPORT_FIELDS = ['serial', 'birth_year', 'gender']
ENTRY_ACTIVITY_CHOICES = []
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'library',
},
{
'id': 'mediacenter',
},
{
'id': 'khanacademy',
}
]
|
a03d34cbfd4f9fcf98ba0cb4584a24f0632897cf
|
Simulator/src/sim_tools/text_box.py
|
Simulator/src/sim_tools/text_box.py
|
import pygame
class Text_Box(object):
'''Text_Box()
You never have to initialize this! Just call Text_Box.draw(display, pos, color, text)
It draws the same way a pygame primitive would.
'''
pygame.font.init()
font = pygame.font.SysFont("monospace", 15)
@classmethod
def draw(self, display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
''' draw(display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
pos: In pygame coordinates
color: [0, 255]
text: Can by multiline, of arbitrary length
To change text during operation, use the "set_text method"
Ex:
>>> tb = Text_Box()
>>> tb.draw(display, text='hello')
or in a draw loop,
>>> tb.draw(display, pos, color, text)
'''
lines = text.splitlines()
width = height = 0
for l in lines:
width = max(width, self.font.size(l)[0])
height += self.font.get_linesize()
height = 0
for l in lines:
t = self.font.render(l, 0, color)
display.blit(
t,
(pos[0], pos[1] + height)
)
height += self.font.get_linesize()
|
import pygame
#WRITEN BY JACOB PANIKULAM
class Text_Box(object):
'''Text_Box()
You never have to initialize this! Just call Text_Box.draw(display, pos, color, text)
It draws the same way a pygame primitive would.
'''
pygame.font.init()
font = pygame.font.SysFont("monospace", 15)
@classmethod
def draw(self, display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
''' draw(display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
pos: In pygame coordinates
color: [0, 255]
text: Can by multiline, of arbitrary length
To change text during operation, use the "set_text method"
Ex:
>>> tb = Text_Box()
>>> tb.draw(display, text='hello')
or in a draw loop,
>>> tb.draw(display, pos, color, text)
'''
lines = text.splitlines()
width = height = 0
for l in lines:
width = max(width, self.font.size(l)[0])
height += self.font.get_linesize()
height = 0
for l in lines:
t = self.font.render(l, 0, color)
display.blit(
t,
(pos[0], pos[1] + height)
)
height += self.font.get_linesize()
|
Add Jake's name to top of file
|
Add Jake's name to top of file
|
Python
|
mit
|
ufieeehw/IEEE2016,ufieeehw/IEEE2016,ufieeehw/IEEE2016,ufieeehw/IEEE2016
|
import pygame
class Text_Box(object):
'''Text_Box()
You never have to initialize this! Just call Text_Box.draw(display, pos, color, text)
It draws the same way a pygame primitive would.
'''
pygame.font.init()
font = pygame.font.SysFont("monospace", 15)
@classmethod
def draw(self, display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
''' draw(display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
pos: In pygame coordinates
color: [0, 255]
text: Can by multiline, of arbitrary length
To change text during operation, use the "set_text method"
Ex:
>>> tb = Text_Box()
>>> tb.draw(display, text='hello')
or in a draw loop,
>>> tb.draw(display, pos, color, text)
'''
lines = text.splitlines()
width = height = 0
for l in lines:
width = max(width, self.font.size(l)[0])
height += self.font.get_linesize()
height = 0
for l in lines:
t = self.font.render(l, 0, color)
display.blit(
t,
(pos[0], pos[1] + height)
)
height += self.font.get_linesize()
Add Jake's name to top of file
|
import pygame
#WRITEN BY JACOB PANIKULAM
class Text_Box(object):
'''Text_Box()
You never have to initialize this! Just call Text_Box.draw(display, pos, color, text)
It draws the same way a pygame primitive would.
'''
pygame.font.init()
font = pygame.font.SysFont("monospace", 15)
@classmethod
def draw(self, display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
''' draw(display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
pos: In pygame coordinates
color: [0, 255]
text: Can by multiline, of arbitrary length
To change text during operation, use the "set_text method"
Ex:
>>> tb = Text_Box()
>>> tb.draw(display, text='hello')
or in a draw loop,
>>> tb.draw(display, pos, color, text)
'''
lines = text.splitlines()
width = height = 0
for l in lines:
width = max(width, self.font.size(l)[0])
height += self.font.get_linesize()
height = 0
for l in lines:
t = self.font.render(l, 0, color)
display.blit(
t,
(pos[0], pos[1] + height)
)
height += self.font.get_linesize()
|
<commit_before>import pygame
class Text_Box(object):
'''Text_Box()
You never have to initialize this! Just call Text_Box.draw(display, pos, color, text)
It draws the same way a pygame primitive would.
'''
pygame.font.init()
font = pygame.font.SysFont("monospace", 15)
@classmethod
def draw(self, display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
''' draw(display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
pos: In pygame coordinates
color: [0, 255]
text: Can by multiline, of arbitrary length
To change text during operation, use the "set_text method"
Ex:
>>> tb = Text_Box()
>>> tb.draw(display, text='hello')
or in a draw loop,
>>> tb.draw(display, pos, color, text)
'''
lines = text.splitlines()
width = height = 0
for l in lines:
width = max(width, self.font.size(l)[0])
height += self.font.get_linesize()
height = 0
for l in lines:
t = self.font.render(l, 0, color)
display.blit(
t,
(pos[0], pos[1] + height)
)
height += self.font.get_linesize()
<commit_msg>Add Jake's name to top of file<commit_after>
|
import pygame
#WRITEN BY JACOB PANIKULAM
class Text_Box(object):
'''Text_Box()
You never have to initialize this! Just call Text_Box.draw(display, pos, color, text)
It draws the same way a pygame primitive would.
'''
pygame.font.init()
font = pygame.font.SysFont("monospace", 15)
@classmethod
def draw(self, display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
''' draw(display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
pos: In pygame coordinates
color: [0, 255]
text: Can by multiline, of arbitrary length
To change text during operation, use the "set_text method"
Ex:
>>> tb = Text_Box()
>>> tb.draw(display, text='hello')
or in a draw loop,
>>> tb.draw(display, pos, color, text)
'''
lines = text.splitlines()
width = height = 0
for l in lines:
width = max(width, self.font.size(l)[0])
height += self.font.get_linesize()
height = 0
for l in lines:
t = self.font.render(l, 0, color)
display.blit(
t,
(pos[0], pos[1] + height)
)
height += self.font.get_linesize()
|
import pygame
class Text_Box(object):
'''Text_Box()
You never have to initialize this! Just call Text_Box.draw(display, pos, color, text)
It draws the same way a pygame primitive would.
'''
pygame.font.init()
font = pygame.font.SysFont("monospace", 15)
@classmethod
def draw(self, display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
''' draw(display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
pos: In pygame coordinates
color: [0, 255]
text: Can by multiline, of arbitrary length
To change text during operation, use the "set_text method"
Ex:
>>> tb = Text_Box()
>>> tb.draw(display, text='hello')
or in a draw loop,
>>> tb.draw(display, pos, color, text)
'''
lines = text.splitlines()
width = height = 0
for l in lines:
width = max(width, self.font.size(l)[0])
height += self.font.get_linesize()
height = 0
for l in lines:
t = self.font.render(l, 0, color)
display.blit(
t,
(pos[0], pos[1] + height)
)
height += self.font.get_linesize()
Add Jake's name to top of fileimport pygame
#WRITEN BY JACOB PANIKULAM
class Text_Box(object):
'''Text_Box()
You never have to initialize this! Just call Text_Box.draw(display, pos, color, text)
It draws the same way a pygame primitive would.
'''
pygame.font.init()
font = pygame.font.SysFont("monospace", 15)
@classmethod
def draw(self, display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
''' draw(display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
pos: In pygame coordinates
color: [0, 255]
text: Can by multiline, of arbitrary length
To change text during operation, use the "set_text method"
Ex:
>>> tb = Text_Box()
>>> tb.draw(display, text='hello')
or in a draw loop,
>>> tb.draw(display, pos, color, text)
'''
lines = text.splitlines()
width = height = 0
for l in lines:
width = max(width, self.font.size(l)[0])
height += self.font.get_linesize()
height = 0
for l in lines:
t = self.font.render(l, 0, color)
display.blit(
t,
(pos[0], pos[1] + height)
)
height += self.font.get_linesize()
|
<commit_before>import pygame
class Text_Box(object):
'''Text_Box()
You never have to initialize this! Just call Text_Box.draw(display, pos, color, text)
It draws the same way a pygame primitive would.
'''
pygame.font.init()
font = pygame.font.SysFont("monospace", 15)
@classmethod
def draw(self, display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
''' draw(display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
pos: In pygame coordinates
color: [0, 255]
text: Can by multiline, of arbitrary length
To change text during operation, use the "set_text method"
Ex:
>>> tb = Text_Box()
>>> tb.draw(display, text='hello')
or in a draw loop,
>>> tb.draw(display, pos, color, text)
'''
lines = text.splitlines()
width = height = 0
for l in lines:
width = max(width, self.font.size(l)[0])
height += self.font.get_linesize()
height = 0
for l in lines:
t = self.font.render(l, 0, color)
display.blit(
t,
(pos[0], pos[1] + height)
)
height += self.font.get_linesize()
<commit_msg>Add Jake's name to top of file<commit_after>import pygame
#WRITEN BY JACOB PANIKULAM
class Text_Box(object):
'''Text_Box()
You never have to initialize this! Just call Text_Box.draw(display, pos, color, text)
It draws the same way a pygame primitive would.
'''
pygame.font.init()
font = pygame.font.SysFont("monospace", 15)
@classmethod
def draw(self, display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
''' draw(display, pos=(0, 0), color=(255, 255, 255), text="Empty!"):
pos: In pygame coordinates
color: [0, 255]
text: Can by multiline, of arbitrary length
To change text during operation, use the "set_text method"
Ex:
>>> tb = Text_Box()
>>> tb.draw(display, text='hello')
or in a draw loop,
>>> tb.draw(display, pos, color, text)
'''
lines = text.splitlines()
width = height = 0
for l in lines:
width = max(width, self.font.size(l)[0])
height += self.font.get_linesize()
height = 0
for l in lines:
t = self.font.render(l, 0, color)
display.blit(
t,
(pos[0], pos[1] + height)
)
height += self.font.get_linesize()
|
29fa35d8ff6fa26e676c693ea17faeb03440d116
|
scripts/3b-show-features.py
|
scripts/3b-show-features.py
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
args = parser.parse_args()
print args
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
proj.load_features()
proj.show_features()
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--image', help='show specific image')
args = parser.parse_args()
print args
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
proj.load_features()
if args.image:
image = proj.findImageByName(args.image)
proj.show_features_image(image)
else:
proj.show_features_images()
|
Support showing features for a single image.
|
Support showing features for a single image.
Former-commit-id: 4143a53dae02b9ece391f65a47c060bbcfd0b7a8
|
Python
|
mit
|
UASLab/ImageAnalysis
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
args = parser.parse_args()
print args
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
proj.load_features()
proj.show_features()
Support showing features for a single image.
Former-commit-id: 4143a53dae02b9ece391f65a47c060bbcfd0b7a8
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--image', help='show specific image')
args = parser.parse_args()
print args
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
proj.load_features()
if args.image:
image = proj.findImageByName(args.image)
proj.show_features_image(image)
else:
proj.show_features_images()
|
<commit_before>#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
args = parser.parse_args()
print args
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
proj.load_features()
proj.show_features()
<commit_msg>Support showing features for a single image.
Former-commit-id: 4143a53dae02b9ece391f65a47c060bbcfd0b7a8<commit_after>
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--image', help='show specific image')
args = parser.parse_args()
print args
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
proj.load_features()
if args.image:
image = proj.findImageByName(args.image)
proj.show_features_image(image)
else:
proj.show_features_images()
|
#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
args = parser.parse_args()
print args
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
proj.load_features()
proj.show_features()
Support showing features for a single image.
Former-commit-id: 4143a53dae02b9ece391f65a47c060bbcfd0b7a8#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--image', help='show specific image')
args = parser.parse_args()
print args
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
proj.load_features()
if args.image:
image = proj.findImageByName(args.image)
proj.show_features_image(image)
else:
proj.show_features_images()
|
<commit_before>#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
args = parser.parse_args()
print args
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
proj.load_features()
proj.show_features()
<commit_msg>Support showing features for a single image.
Former-commit-id: 4143a53dae02b9ece391f65a47c060bbcfd0b7a8<commit_after>#!/usr/bin/python
import sys
sys.path.insert(0, "/usr/local/opencv-2.4.11/lib/python2.7/site-packages/")
import argparse
import commands
import cv2
import fnmatch
import os.path
sys.path.append('../lib')
import ProjectMgr
# for all the images in the project image_dir, detect features using the
# specified method and parameters
parser = argparse.ArgumentParser(description='Load the project\'s images.')
parser.add_argument('--project', required=True, help='project directory')
parser.add_argument('--image', help='show specific image')
args = parser.parse_args()
print args
proj = ProjectMgr.ProjectMgr(args.project)
proj.load_image_info()
proj.load_features()
if args.image:
image = proj.findImageByName(args.image)
proj.show_features_image(image)
else:
proj.show_features_images()
|
0fc7a7962579cef554b30028e56923e2d05903c1
|
custom/icds_reports/models/manager.py
|
custom/icds_reports/models/manager.py
|
from __future__ import absolute_import, unicode_literals
import uuid
from django.db.models.manager import BaseManager
from django.db.models.query import QuerySet
from corehq.toggles import ICDS_COMPARE_QUERIES_AGAINST_CITUS, NAMESPACE_OTHER
class CitusComparisonQuerySet(QuerySet):
def _fetch_all(self):
from custom.icds_reports.tasks import run_citus_experiment_raw_sql
if ICDS_COMPARE_QUERIES_AGAINST_CITUS.enabled(uuid.uuid4().hex, NAMESPACE_OTHER):
query, params = self.query.sql_with_params()
run_citus_experiment_raw_sql.delay(query, params, data_source=self.model.__name__)
super(CitusComparisonQuerySet, self)._fetch_all()
class CitusComparisonManager(BaseManager.from_queryset(CitusComparisonQuerySet)):
pass
|
from __future__ import absolute_import, unicode_literals
import json
import uuid
from django.core.serializers.json import DjangoJSONEncoder
from django.db.models.manager import BaseManager
from django.db.models.query import QuerySet
from corehq.toggles import ICDS_COMPARE_QUERIES_AGAINST_CITUS, NAMESPACE_OTHER
class CitusComparisonQuerySet(QuerySet):
def _fetch_all(self):
from custom.icds_reports.tasks import run_citus_experiment_raw_sql
if ICDS_COMPARE_QUERIES_AGAINST_CITUS.enabled(uuid.uuid4().hex, NAMESPACE_OTHER):
query, params = self.query.sql_with_params()
params = json.loads(json.dumps(params, cls=DjangoJSONEncoder))
run_citus_experiment_raw_sql.delay(query, params, data_source=self.model.__name__)
super(CitusComparisonQuerySet, self)._fetch_all()
class CitusComparisonManager(BaseManager.from_queryset(CitusComparisonQuerySet)):
pass
|
Use django json encoder for models manger params
|
Use django json encoder for models manger params
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from __future__ import absolute_import, unicode_literals
import uuid
from django.db.models.manager import BaseManager
from django.db.models.query import QuerySet
from corehq.toggles import ICDS_COMPARE_QUERIES_AGAINST_CITUS, NAMESPACE_OTHER
class CitusComparisonQuerySet(QuerySet):
def _fetch_all(self):
from custom.icds_reports.tasks import run_citus_experiment_raw_sql
if ICDS_COMPARE_QUERIES_AGAINST_CITUS.enabled(uuid.uuid4().hex, NAMESPACE_OTHER):
query, params = self.query.sql_with_params()
run_citus_experiment_raw_sql.delay(query, params, data_source=self.model.__name__)
super(CitusComparisonQuerySet, self)._fetch_all()
class CitusComparisonManager(BaseManager.from_queryset(CitusComparisonQuerySet)):
pass
Use django json encoder for models manger params
|
from __future__ import absolute_import, unicode_literals
import json
import uuid
from django.core.serializers.json import DjangoJSONEncoder
from django.db.models.manager import BaseManager
from django.db.models.query import QuerySet
from corehq.toggles import ICDS_COMPARE_QUERIES_AGAINST_CITUS, NAMESPACE_OTHER
class CitusComparisonQuerySet(QuerySet):
def _fetch_all(self):
from custom.icds_reports.tasks import run_citus_experiment_raw_sql
if ICDS_COMPARE_QUERIES_AGAINST_CITUS.enabled(uuid.uuid4().hex, NAMESPACE_OTHER):
query, params = self.query.sql_with_params()
params = json.loads(json.dumps(params, cls=DjangoJSONEncoder))
run_citus_experiment_raw_sql.delay(query, params, data_source=self.model.__name__)
super(CitusComparisonQuerySet, self)._fetch_all()
class CitusComparisonManager(BaseManager.from_queryset(CitusComparisonQuerySet)):
pass
|
<commit_before>from __future__ import absolute_import, unicode_literals
import uuid
from django.db.models.manager import BaseManager
from django.db.models.query import QuerySet
from corehq.toggles import ICDS_COMPARE_QUERIES_AGAINST_CITUS, NAMESPACE_OTHER
class CitusComparisonQuerySet(QuerySet):
def _fetch_all(self):
from custom.icds_reports.tasks import run_citus_experiment_raw_sql
if ICDS_COMPARE_QUERIES_AGAINST_CITUS.enabled(uuid.uuid4().hex, NAMESPACE_OTHER):
query, params = self.query.sql_with_params()
run_citus_experiment_raw_sql.delay(query, params, data_source=self.model.__name__)
super(CitusComparisonQuerySet, self)._fetch_all()
class CitusComparisonManager(BaseManager.from_queryset(CitusComparisonQuerySet)):
pass
<commit_msg>Use django json encoder for models manger params<commit_after>
|
from __future__ import absolute_import, unicode_literals
import json
import uuid
from django.core.serializers.json import DjangoJSONEncoder
from django.db.models.manager import BaseManager
from django.db.models.query import QuerySet
from corehq.toggles import ICDS_COMPARE_QUERIES_AGAINST_CITUS, NAMESPACE_OTHER
class CitusComparisonQuerySet(QuerySet):
def _fetch_all(self):
from custom.icds_reports.tasks import run_citus_experiment_raw_sql
if ICDS_COMPARE_QUERIES_AGAINST_CITUS.enabled(uuid.uuid4().hex, NAMESPACE_OTHER):
query, params = self.query.sql_with_params()
params = json.loads(json.dumps(params, cls=DjangoJSONEncoder))
run_citus_experiment_raw_sql.delay(query, params, data_source=self.model.__name__)
super(CitusComparisonQuerySet, self)._fetch_all()
class CitusComparisonManager(BaseManager.from_queryset(CitusComparisonQuerySet)):
pass
|
from __future__ import absolute_import, unicode_literals
import uuid
from django.db.models.manager import BaseManager
from django.db.models.query import QuerySet
from corehq.toggles import ICDS_COMPARE_QUERIES_AGAINST_CITUS, NAMESPACE_OTHER
class CitusComparisonQuerySet(QuerySet):
def _fetch_all(self):
from custom.icds_reports.tasks import run_citus_experiment_raw_sql
if ICDS_COMPARE_QUERIES_AGAINST_CITUS.enabled(uuid.uuid4().hex, NAMESPACE_OTHER):
query, params = self.query.sql_with_params()
run_citus_experiment_raw_sql.delay(query, params, data_source=self.model.__name__)
super(CitusComparisonQuerySet, self)._fetch_all()
class CitusComparisonManager(BaseManager.from_queryset(CitusComparisonQuerySet)):
pass
Use django json encoder for models manger paramsfrom __future__ import absolute_import, unicode_literals
import json
import uuid
from django.core.serializers.json import DjangoJSONEncoder
from django.db.models.manager import BaseManager
from django.db.models.query import QuerySet
from corehq.toggles import ICDS_COMPARE_QUERIES_AGAINST_CITUS, NAMESPACE_OTHER
class CitusComparisonQuerySet(QuerySet):
def _fetch_all(self):
from custom.icds_reports.tasks import run_citus_experiment_raw_sql
if ICDS_COMPARE_QUERIES_AGAINST_CITUS.enabled(uuid.uuid4().hex, NAMESPACE_OTHER):
query, params = self.query.sql_with_params()
params = json.loads(json.dumps(params, cls=DjangoJSONEncoder))
run_citus_experiment_raw_sql.delay(query, params, data_source=self.model.__name__)
super(CitusComparisonQuerySet, self)._fetch_all()
class CitusComparisonManager(BaseManager.from_queryset(CitusComparisonQuerySet)):
pass
|
<commit_before>from __future__ import absolute_import, unicode_literals
import uuid
from django.db.models.manager import BaseManager
from django.db.models.query import QuerySet
from corehq.toggles import ICDS_COMPARE_QUERIES_AGAINST_CITUS, NAMESPACE_OTHER
class CitusComparisonQuerySet(QuerySet):
def _fetch_all(self):
from custom.icds_reports.tasks import run_citus_experiment_raw_sql
if ICDS_COMPARE_QUERIES_AGAINST_CITUS.enabled(uuid.uuid4().hex, NAMESPACE_OTHER):
query, params = self.query.sql_with_params()
run_citus_experiment_raw_sql.delay(query, params, data_source=self.model.__name__)
super(CitusComparisonQuerySet, self)._fetch_all()
class CitusComparisonManager(BaseManager.from_queryset(CitusComparisonQuerySet)):
pass
<commit_msg>Use django json encoder for models manger params<commit_after>from __future__ import absolute_import, unicode_literals
import json
import uuid
from django.core.serializers.json import DjangoJSONEncoder
from django.db.models.manager import BaseManager
from django.db.models.query import QuerySet
from corehq.toggles import ICDS_COMPARE_QUERIES_AGAINST_CITUS, NAMESPACE_OTHER
class CitusComparisonQuerySet(QuerySet):
def _fetch_all(self):
from custom.icds_reports.tasks import run_citus_experiment_raw_sql
if ICDS_COMPARE_QUERIES_AGAINST_CITUS.enabled(uuid.uuid4().hex, NAMESPACE_OTHER):
query, params = self.query.sql_with_params()
params = json.loads(json.dumps(params, cls=DjangoJSONEncoder))
run_citus_experiment_raw_sql.delay(query, params, data_source=self.model.__name__)
super(CitusComparisonQuerySet, self)._fetch_all()
class CitusComparisonManager(BaseManager.from_queryset(CitusComparisonQuerySet)):
pass
|
307b728c36adafab3832a03fffc6edef67828ca5
|
scripts/launch.py
|
scripts/launch.py
|
import logging
from os import getenv
import sys
from flash import app
logging.basicConfig(
datefmt='%Y/%m/%d %H.%M.%S',
format='%(levelname)s:%(name)s:%(message)s',
level=logging.DEBUG,
stream=sys.stdout,
)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=int(getenv('PORT', 5000)), debug=False)
|
import logging
from os import getenv
import sys
logging.basicConfig(
datefmt='%Y/%m/%d %H.%M.%S',
format='%(levelname)s:%(name)s:%(message)s',
level=logging.DEBUG,
stream=sys.stdout,
)
from flash import app
if __name__ == '__main__':
app.run(host='0.0.0.0', port=int(getenv('PORT', 5000)), debug=False)
|
Configure logging before importing application
|
Configure logging before importing application
|
Python
|
isc
|
textbook/flash,textbook/flash,textbook/flash
|
import logging
from os import getenv
import sys
from flash import app
logging.basicConfig(
datefmt='%Y/%m/%d %H.%M.%S',
format='%(levelname)s:%(name)s:%(message)s',
level=logging.DEBUG,
stream=sys.stdout,
)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=int(getenv('PORT', 5000)), debug=False)
Configure logging before importing application
|
import logging
from os import getenv
import sys
logging.basicConfig(
datefmt='%Y/%m/%d %H.%M.%S',
format='%(levelname)s:%(name)s:%(message)s',
level=logging.DEBUG,
stream=sys.stdout,
)
from flash import app
if __name__ == '__main__':
app.run(host='0.0.0.0', port=int(getenv('PORT', 5000)), debug=False)
|
<commit_before>import logging
from os import getenv
import sys
from flash import app
logging.basicConfig(
datefmt='%Y/%m/%d %H.%M.%S',
format='%(levelname)s:%(name)s:%(message)s',
level=logging.DEBUG,
stream=sys.stdout,
)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=int(getenv('PORT', 5000)), debug=False)
<commit_msg>Configure logging before importing application<commit_after>
|
import logging
from os import getenv
import sys
logging.basicConfig(
datefmt='%Y/%m/%d %H.%M.%S',
format='%(levelname)s:%(name)s:%(message)s',
level=logging.DEBUG,
stream=sys.stdout,
)
from flash import app
if __name__ == '__main__':
app.run(host='0.0.0.0', port=int(getenv('PORT', 5000)), debug=False)
|
import logging
from os import getenv
import sys
from flash import app
logging.basicConfig(
datefmt='%Y/%m/%d %H.%M.%S',
format='%(levelname)s:%(name)s:%(message)s',
level=logging.DEBUG,
stream=sys.stdout,
)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=int(getenv('PORT', 5000)), debug=False)
Configure logging before importing applicationimport logging
from os import getenv
import sys
logging.basicConfig(
datefmt='%Y/%m/%d %H.%M.%S',
format='%(levelname)s:%(name)s:%(message)s',
level=logging.DEBUG,
stream=sys.stdout,
)
from flash import app
if __name__ == '__main__':
app.run(host='0.0.0.0', port=int(getenv('PORT', 5000)), debug=False)
|
<commit_before>import logging
from os import getenv
import sys
from flash import app
logging.basicConfig(
datefmt='%Y/%m/%d %H.%M.%S',
format='%(levelname)s:%(name)s:%(message)s',
level=logging.DEBUG,
stream=sys.stdout,
)
if __name__ == '__main__':
app.run(host='0.0.0.0', port=int(getenv('PORT', 5000)), debug=False)
<commit_msg>Configure logging before importing application<commit_after>import logging
from os import getenv
import sys
logging.basicConfig(
datefmt='%Y/%m/%d %H.%M.%S',
format='%(levelname)s:%(name)s:%(message)s',
level=logging.DEBUG,
stream=sys.stdout,
)
from flash import app
if __name__ == '__main__':
app.run(host='0.0.0.0', port=int(getenv('PORT', 5000)), debug=False)
|
66ce0f3ce263804a032e399f8019fb481d4c4adc
|
manchester_traffic_offences/context_processors.py
|
manchester_traffic_offences/context_processors.py
|
from django.conf import settings
from django.utils.translation import ugettext as _, get_language
from django.utils.safestring import mark_safe
def globals(request):
return {
# Application Title (Populates <title>)
'app_title': _('Make a Plea: Traffic offences') + ' - GOV.UK',
# Proposition Title (Populates proposition header)
'proposition_title': _('Make a Plea: Traffic offences'),
# Current Phase (Sets the current phase and the colour of phase tags). Presumed values: alpha, beta, live
'phase': 'alpha',
# Product Type (Adds class to body based on service type). Presumed values: information, service
'product_type': 'service',
# Google Analytics ID (Tracking ID for the service)
'ga_id': 'UA-53811587-1',
# Version number
'version': settings.VERSION,
'html_lang': get_language,
'skip_link_message': _('Skip to main content'),
'logo_link_title': _('Go to the GOV.UK homepage'),
'crown_copyright_message': mark_safe(_('© Crown copyright'))
}
|
from django.conf import settings
from django.utils.translation import ugettext as _, get_language
from django.utils.safestring import mark_safe
def globals(request):
return {
# Application Title (Populates <title>)
'app_title': _('Make a Plea: Traffic offences') + ' - GOV.UK',
# Proposition Title (Populates proposition header)
'proposition_title': _('Make a Plea: Traffic offences'),
# Current Phase (Sets the current phase and the colour of phase tags). Presumed values: alpha, beta, live
'phase': 'beta',
# Product Type (Adds class to body based on service type). Presumed values: information, service
'product_type': 'service',
# Google Analytics ID (Tracking ID for the service)
'ga_id': 'UA-53811587-1',
# Version number
'version': settings.VERSION,
'html_lang': get_language,
'skip_link_message': _('Skip to main content'),
'logo_link_title': _('Go to the GOV.UK homepage'),
'crown_copyright_message': mark_safe(_('© Crown copyright'))
}
|
Switch phase tag to BETA
|
Switch phase tag to BETA
|
Python
|
mit
|
ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas,ministryofjustice/manchester_traffic_offences_pleas
|
from django.conf import settings
from django.utils.translation import ugettext as _, get_language
from django.utils.safestring import mark_safe
def globals(request):
return {
# Application Title (Populates <title>)
'app_title': _('Make a Plea: Traffic offences') + ' - GOV.UK',
# Proposition Title (Populates proposition header)
'proposition_title': _('Make a Plea: Traffic offences'),
# Current Phase (Sets the current phase and the colour of phase tags). Presumed values: alpha, beta, live
'phase': 'alpha',
# Product Type (Adds class to body based on service type). Presumed values: information, service
'product_type': 'service',
# Google Analytics ID (Tracking ID for the service)
'ga_id': 'UA-53811587-1',
# Version number
'version': settings.VERSION,
'html_lang': get_language,
'skip_link_message': _('Skip to main content'),
'logo_link_title': _('Go to the GOV.UK homepage'),
'crown_copyright_message': mark_safe(_('© Crown copyright'))
}
Switch phase tag to BETA
|
from django.conf import settings
from django.utils.translation import ugettext as _, get_language
from django.utils.safestring import mark_safe
def globals(request):
return {
# Application Title (Populates <title>)
'app_title': _('Make a Plea: Traffic offences') + ' - GOV.UK',
# Proposition Title (Populates proposition header)
'proposition_title': _('Make a Plea: Traffic offences'),
# Current Phase (Sets the current phase and the colour of phase tags). Presumed values: alpha, beta, live
'phase': 'beta',
# Product Type (Adds class to body based on service type). Presumed values: information, service
'product_type': 'service',
# Google Analytics ID (Tracking ID for the service)
'ga_id': 'UA-53811587-1',
# Version number
'version': settings.VERSION,
'html_lang': get_language,
'skip_link_message': _('Skip to main content'),
'logo_link_title': _('Go to the GOV.UK homepage'),
'crown_copyright_message': mark_safe(_('© Crown copyright'))
}
|
<commit_before>from django.conf import settings
from django.utils.translation import ugettext as _, get_language
from django.utils.safestring import mark_safe
def globals(request):
return {
# Application Title (Populates <title>)
'app_title': _('Make a Plea: Traffic offences') + ' - GOV.UK',
# Proposition Title (Populates proposition header)
'proposition_title': _('Make a Plea: Traffic offences'),
# Current Phase (Sets the current phase and the colour of phase tags). Presumed values: alpha, beta, live
'phase': 'alpha',
# Product Type (Adds class to body based on service type). Presumed values: information, service
'product_type': 'service',
# Google Analytics ID (Tracking ID for the service)
'ga_id': 'UA-53811587-1',
# Version number
'version': settings.VERSION,
'html_lang': get_language,
'skip_link_message': _('Skip to main content'),
'logo_link_title': _('Go to the GOV.UK homepage'),
'crown_copyright_message': mark_safe(_('© Crown copyright'))
}
<commit_msg>Switch phase tag to BETA<commit_after>
|
from django.conf import settings
from django.utils.translation import ugettext as _, get_language
from django.utils.safestring import mark_safe
def globals(request):
return {
# Application Title (Populates <title>)
'app_title': _('Make a Plea: Traffic offences') + ' - GOV.UK',
# Proposition Title (Populates proposition header)
'proposition_title': _('Make a Plea: Traffic offences'),
# Current Phase (Sets the current phase and the colour of phase tags). Presumed values: alpha, beta, live
'phase': 'beta',
# Product Type (Adds class to body based on service type). Presumed values: information, service
'product_type': 'service',
# Google Analytics ID (Tracking ID for the service)
'ga_id': 'UA-53811587-1',
# Version number
'version': settings.VERSION,
'html_lang': get_language,
'skip_link_message': _('Skip to main content'),
'logo_link_title': _('Go to the GOV.UK homepage'),
'crown_copyright_message': mark_safe(_('© Crown copyright'))
}
|
from django.conf import settings
from django.utils.translation import ugettext as _, get_language
from django.utils.safestring import mark_safe
def globals(request):
return {
# Application Title (Populates <title>)
'app_title': _('Make a Plea: Traffic offences') + ' - GOV.UK',
# Proposition Title (Populates proposition header)
'proposition_title': _('Make a Plea: Traffic offences'),
# Current Phase (Sets the current phase and the colour of phase tags). Presumed values: alpha, beta, live
'phase': 'alpha',
# Product Type (Adds class to body based on service type). Presumed values: information, service
'product_type': 'service',
# Google Analytics ID (Tracking ID for the service)
'ga_id': 'UA-53811587-1',
# Version number
'version': settings.VERSION,
'html_lang': get_language,
'skip_link_message': _('Skip to main content'),
'logo_link_title': _('Go to the GOV.UK homepage'),
'crown_copyright_message': mark_safe(_('© Crown copyright'))
}
Switch phase tag to BETAfrom django.conf import settings
from django.utils.translation import ugettext as _, get_language
from django.utils.safestring import mark_safe
def globals(request):
return {
# Application Title (Populates <title>)
'app_title': _('Make a Plea: Traffic offences') + ' - GOV.UK',
# Proposition Title (Populates proposition header)
'proposition_title': _('Make a Plea: Traffic offences'),
# Current Phase (Sets the current phase and the colour of phase tags). Presumed values: alpha, beta, live
'phase': 'beta',
# Product Type (Adds class to body based on service type). Presumed values: information, service
'product_type': 'service',
# Google Analytics ID (Tracking ID for the service)
'ga_id': 'UA-53811587-1',
# Version number
'version': settings.VERSION,
'html_lang': get_language,
'skip_link_message': _('Skip to main content'),
'logo_link_title': _('Go to the GOV.UK homepage'),
'crown_copyright_message': mark_safe(_('© Crown copyright'))
}
|
<commit_before>from django.conf import settings
from django.utils.translation import ugettext as _, get_language
from django.utils.safestring import mark_safe
def globals(request):
return {
# Application Title (Populates <title>)
'app_title': _('Make a Plea: Traffic offences') + ' - GOV.UK',
# Proposition Title (Populates proposition header)
'proposition_title': _('Make a Plea: Traffic offences'),
# Current Phase (Sets the current phase and the colour of phase tags). Presumed values: alpha, beta, live
'phase': 'alpha',
# Product Type (Adds class to body based on service type). Presumed values: information, service
'product_type': 'service',
# Google Analytics ID (Tracking ID for the service)
'ga_id': 'UA-53811587-1',
# Version number
'version': settings.VERSION,
'html_lang': get_language,
'skip_link_message': _('Skip to main content'),
'logo_link_title': _('Go to the GOV.UK homepage'),
'crown_copyright_message': mark_safe(_('© Crown copyright'))
}
<commit_msg>Switch phase tag to BETA<commit_after>from django.conf import settings
from django.utils.translation import ugettext as _, get_language
from django.utils.safestring import mark_safe
def globals(request):
return {
# Application Title (Populates <title>)
'app_title': _('Make a Plea: Traffic offences') + ' - GOV.UK',
# Proposition Title (Populates proposition header)
'proposition_title': _('Make a Plea: Traffic offences'),
# Current Phase (Sets the current phase and the colour of phase tags). Presumed values: alpha, beta, live
'phase': 'beta',
# Product Type (Adds class to body based on service type). Presumed values: information, service
'product_type': 'service',
# Google Analytics ID (Tracking ID for the service)
'ga_id': 'UA-53811587-1',
# Version number
'version': settings.VERSION,
'html_lang': get_language,
'skip_link_message': _('Skip to main content'),
'logo_link_title': _('Go to the GOV.UK homepage'),
'crown_copyright_message': mark_safe(_('© Crown copyright'))
}
|
cf550ac3a00531f2f964fbbb7e27c37071983d26
|
utils/aiohttp_wrap.py
|
utils/aiohttp_wrap.py
|
#!/bin/env python
import aiohttp
async def aio_get(url: str):
async with aiohttp.ClientSession() as session:
<<<<<<< HEAD
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
=======
async with session.get(url) as r:
if r.status == 200:
return r
else:
return None
>>>>>>> parent of 6b6d243... progress on DDG cog & aiohttp wrapper
|
#!/bin/env python
import aiohttp
async def aio_get_text(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
|
Revert "Revert "progress on DDG cog & aiohttp wrapper""
|
Revert "Revert "progress on DDG cog & aiohttp wrapper""
This reverts commit 85d3b1203d9861f986356e593a2b79d96c38c1b3.
|
Python
|
mit
|
Naught0/qtbot
|
#!/bin/env python
import aiohttp
async def aio_get(url: str):
async with aiohttp.ClientSession() as session:
<<<<<<< HEAD
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
=======
async with session.get(url) as r:
if r.status == 200:
return r
else:
return None
>>>>>>> parent of 6b6d243... progress on DDG cog & aiohttp wrapper
Revert "Revert "progress on DDG cog & aiohttp wrapper""
This reverts commit 85d3b1203d9861f986356e593a2b79d96c38c1b3.
|
#!/bin/env python
import aiohttp
async def aio_get_text(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
|
<commit_before>#!/bin/env python
import aiohttp
async def aio_get(url: str):
async with aiohttp.ClientSession() as session:
<<<<<<< HEAD
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
=======
async with session.get(url) as r:
if r.status == 200:
return r
else:
return None
>>>>>>> parent of 6b6d243... progress on DDG cog & aiohttp wrapper
<commit_msg>Revert "Revert "progress on DDG cog & aiohttp wrapper""
This reverts commit 85d3b1203d9861f986356e593a2b79d96c38c1b3.<commit_after>
|
#!/bin/env python
import aiohttp
async def aio_get_text(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
|
#!/bin/env python
import aiohttp
async def aio_get(url: str):
async with aiohttp.ClientSession() as session:
<<<<<<< HEAD
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
=======
async with session.get(url) as r:
if r.status == 200:
return r
else:
return None
>>>>>>> parent of 6b6d243... progress on DDG cog & aiohttp wrapper
Revert "Revert "progress on DDG cog & aiohttp wrapper""
This reverts commit 85d3b1203d9861f986356e593a2b79d96c38c1b3.#!/bin/env python
import aiohttp
async def aio_get_text(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
|
<commit_before>#!/bin/env python
import aiohttp
async def aio_get(url: str):
async with aiohttp.ClientSession() as session:
<<<<<<< HEAD
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
=======
async with session.get(url) as r:
if r.status == 200:
return r
else:
return None
>>>>>>> parent of 6b6d243... progress on DDG cog & aiohttp wrapper
<commit_msg>Revert "Revert "progress on DDG cog & aiohttp wrapper""
This reverts commit 85d3b1203d9861f986356e593a2b79d96c38c1b3.<commit_after>#!/bin/env python
import aiohttp
async def aio_get_text(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.text()
else:
return None
async def aio_get_json(url, headers=None):
async with aiohttp.ClientSession() as session:
async with session.get(url, headers=headers) as r:
if r.status == 200:
return r.json()
else:
return None
|
a0d614591ff472ca2904de5c562931d50f5654f3
|
publishconf.py
|
publishconf.py
|
#!/usr/bin/env python
# This file is only used if you use `make publish` or explicitly specify it as your config file
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'https://www.edwinksl.com'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = 'edwinksl'
# GOOGLE_ANALYTICS = ''
GOOGLE_ANALYTICS_UNIVERSAL = 'UA-72798337-1'
GOOGLE_ANALYTICS_UNIVERSAL_PROPERTY = 'auto'
|
#!/usr/bin/env python
# This file is only used if you use `make publish` or explicitly specify it as your config file
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'https://www.edwinksl.com'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = 'edwinksl'
# GOOGLE_ANALYTICS = ''
GOOGLE_ANALYTICS_UNIVERSAL = 'UA-100873462-1'
GOOGLE_ANALYTICS_UNIVERSAL_PROPERTY = 'auto'
|
Update Google Analytics tracking ID
|
Update Google Analytics tracking ID
|
Python
|
mit
|
edwinksl/edwinksl.github.io,edwinksl/edwinksl.github.io,edwinksl/edwinksl.github.io,edwinksl/edwinksl.github.io
|
#!/usr/bin/env python
# This file is only used if you use `make publish` or explicitly specify it as your config file
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'https://www.edwinksl.com'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = 'edwinksl'
# GOOGLE_ANALYTICS = ''
GOOGLE_ANALYTICS_UNIVERSAL = 'UA-72798337-1'
GOOGLE_ANALYTICS_UNIVERSAL_PROPERTY = 'auto'
Update Google Analytics tracking ID
|
#!/usr/bin/env python
# This file is only used if you use `make publish` or explicitly specify it as your config file
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'https://www.edwinksl.com'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = 'edwinksl'
# GOOGLE_ANALYTICS = ''
GOOGLE_ANALYTICS_UNIVERSAL = 'UA-100873462-1'
GOOGLE_ANALYTICS_UNIVERSAL_PROPERTY = 'auto'
|
<commit_before>#!/usr/bin/env python
# This file is only used if you use `make publish` or explicitly specify it as your config file
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'https://www.edwinksl.com'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = 'edwinksl'
# GOOGLE_ANALYTICS = ''
GOOGLE_ANALYTICS_UNIVERSAL = 'UA-72798337-1'
GOOGLE_ANALYTICS_UNIVERSAL_PROPERTY = 'auto'
<commit_msg>Update Google Analytics tracking ID<commit_after>
|
#!/usr/bin/env python
# This file is only used if you use `make publish` or explicitly specify it as your config file
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'https://www.edwinksl.com'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = 'edwinksl'
# GOOGLE_ANALYTICS = ''
GOOGLE_ANALYTICS_UNIVERSAL = 'UA-100873462-1'
GOOGLE_ANALYTICS_UNIVERSAL_PROPERTY = 'auto'
|
#!/usr/bin/env python
# This file is only used if you use `make publish` or explicitly specify it as your config file
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'https://www.edwinksl.com'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = 'edwinksl'
# GOOGLE_ANALYTICS = ''
GOOGLE_ANALYTICS_UNIVERSAL = 'UA-72798337-1'
GOOGLE_ANALYTICS_UNIVERSAL_PROPERTY = 'auto'
Update Google Analytics tracking ID#!/usr/bin/env python
# This file is only used if you use `make publish` or explicitly specify it as your config file
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'https://www.edwinksl.com'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = 'edwinksl'
# GOOGLE_ANALYTICS = ''
GOOGLE_ANALYTICS_UNIVERSAL = 'UA-100873462-1'
GOOGLE_ANALYTICS_UNIVERSAL_PROPERTY = 'auto'
|
<commit_before>#!/usr/bin/env python
# This file is only used if you use `make publish` or explicitly specify it as your config file
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'https://www.edwinksl.com'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = 'edwinksl'
# GOOGLE_ANALYTICS = ''
GOOGLE_ANALYTICS_UNIVERSAL = 'UA-72798337-1'
GOOGLE_ANALYTICS_UNIVERSAL_PROPERTY = 'auto'
<commit_msg>Update Google Analytics tracking ID<commit_after>#!/usr/bin/env python
# This file is only used if you use `make publish` or explicitly specify it as your config file
import os
import sys
sys.path.append(os.curdir)
from pelicanconf import *
SITEURL = 'https://www.edwinksl.com'
RELATIVE_URLS = False
FEED_ALL_ATOM = 'feeds/all.atom.xml'
CATEGORY_FEED_ATOM = 'feeds/%s.atom.xml'
DELETE_OUTPUT_DIRECTORY = True
# Following items are often useful when publishing
DISQUS_SITENAME = 'edwinksl'
# GOOGLE_ANALYTICS = ''
GOOGLE_ANALYTICS_UNIVERSAL = 'UA-100873462-1'
GOOGLE_ANALYTICS_UNIVERSAL_PROPERTY = 'auto'
|
b278eef6eb0f92694e70118e1e07855ea92dec5b
|
pulsar/client/transport/requests.py
|
pulsar/client/transport/requests.py
|
from __future__ import absolute_import
try:
from galaxy import eggs
eggs.require("requets")
except ImportError:
pass
try:
import requests
except ImportError:
requests = None
requests_multipart_post_available = False
try:
import requests_toolbelt
requests_multipart_post_available = True
except ImportError:
requests_toolbelt = None
REQUESTS_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests module - but it is unavailable. Please install requests."
REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests_toolbelt module - but it is unavailable. Please install requests_toolbelt."
import logging
log = logging.getLogger(__name__)
def post_file(url, path):
if requests_toolbelt is None:
raise ImportError(REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE)
__ensure_requests()
m = requests_toolbelt.MultipartEncoder(
fields={'file': ('filename', open(path, 'rb'))}
)
requests.post(url, data=m, headers={'Content-Type': m.content_type})
def get_file(url, path):
__ensure_requests()
r = requests.get(url, stream=True)
with open(path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
def __ensure_requests():
if requests is None:
raise ImportError(REQUESTS_UNAVAILABLE_MESSAGE)
|
from __future__ import absolute_import
try:
from galaxy import eggs
eggs.require("requests")
except ImportError:
pass
try:
import requests
except ImportError:
requests = None
requests_multipart_post_available = False
try:
import requests_toolbelt
requests_multipart_post_available = True
except ImportError:
requests_toolbelt = None
REQUESTS_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests module - but it is unavailable. Please install requests."
REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests_toolbelt module - but it is unavailable. Please install requests_toolbelt."
import logging
log = logging.getLogger(__name__)
def post_file(url, path):
if requests_toolbelt is None:
raise ImportError(REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE)
__ensure_requests()
m = requests_toolbelt.MultipartEncoder(
fields={'file': ('filename', open(path, 'rb'))}
)
requests.post(url, data=m, headers={'Content-Type': m.content_type})
def get_file(url, path):
__ensure_requests()
r = requests.get(url, stream=True)
with open(path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
def __ensure_requests():
if requests is None:
raise ImportError(REQUESTS_UNAVAILABLE_MESSAGE)
|
Fix typo Nate caught in Galaxy.
|
Fix typo Nate caught in Galaxy.
|
Python
|
apache-2.0
|
natefoo/pulsar,galaxyproject/pulsar,ssorgatem/pulsar,galaxyproject/pulsar,jmchilton/pulsar,natefoo/pulsar,ssorgatem/pulsar,jmchilton/pulsar
|
from __future__ import absolute_import
try:
from galaxy import eggs
eggs.require("requets")
except ImportError:
pass
try:
import requests
except ImportError:
requests = None
requests_multipart_post_available = False
try:
import requests_toolbelt
requests_multipart_post_available = True
except ImportError:
requests_toolbelt = None
REQUESTS_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests module - but it is unavailable. Please install requests."
REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests_toolbelt module - but it is unavailable. Please install requests_toolbelt."
import logging
log = logging.getLogger(__name__)
def post_file(url, path):
if requests_toolbelt is None:
raise ImportError(REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE)
__ensure_requests()
m = requests_toolbelt.MultipartEncoder(
fields={'file': ('filename', open(path, 'rb'))}
)
requests.post(url, data=m, headers={'Content-Type': m.content_type})
def get_file(url, path):
__ensure_requests()
r = requests.get(url, stream=True)
with open(path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
def __ensure_requests():
if requests is None:
raise ImportError(REQUESTS_UNAVAILABLE_MESSAGE)
Fix typo Nate caught in Galaxy.
|
from __future__ import absolute_import
try:
from galaxy import eggs
eggs.require("requests")
except ImportError:
pass
try:
import requests
except ImportError:
requests = None
requests_multipart_post_available = False
try:
import requests_toolbelt
requests_multipart_post_available = True
except ImportError:
requests_toolbelt = None
REQUESTS_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests module - but it is unavailable. Please install requests."
REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests_toolbelt module - but it is unavailable. Please install requests_toolbelt."
import logging
log = logging.getLogger(__name__)
def post_file(url, path):
if requests_toolbelt is None:
raise ImportError(REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE)
__ensure_requests()
m = requests_toolbelt.MultipartEncoder(
fields={'file': ('filename', open(path, 'rb'))}
)
requests.post(url, data=m, headers={'Content-Type': m.content_type})
def get_file(url, path):
__ensure_requests()
r = requests.get(url, stream=True)
with open(path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
def __ensure_requests():
if requests is None:
raise ImportError(REQUESTS_UNAVAILABLE_MESSAGE)
|
<commit_before>from __future__ import absolute_import
try:
from galaxy import eggs
eggs.require("requets")
except ImportError:
pass
try:
import requests
except ImportError:
requests = None
requests_multipart_post_available = False
try:
import requests_toolbelt
requests_multipart_post_available = True
except ImportError:
requests_toolbelt = None
REQUESTS_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests module - but it is unavailable. Please install requests."
REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests_toolbelt module - but it is unavailable. Please install requests_toolbelt."
import logging
log = logging.getLogger(__name__)
def post_file(url, path):
if requests_toolbelt is None:
raise ImportError(REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE)
__ensure_requests()
m = requests_toolbelt.MultipartEncoder(
fields={'file': ('filename', open(path, 'rb'))}
)
requests.post(url, data=m, headers={'Content-Type': m.content_type})
def get_file(url, path):
__ensure_requests()
r = requests.get(url, stream=True)
with open(path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
def __ensure_requests():
if requests is None:
raise ImportError(REQUESTS_UNAVAILABLE_MESSAGE)
<commit_msg>Fix typo Nate caught in Galaxy.<commit_after>
|
from __future__ import absolute_import
try:
from galaxy import eggs
eggs.require("requests")
except ImportError:
pass
try:
import requests
except ImportError:
requests = None
requests_multipart_post_available = False
try:
import requests_toolbelt
requests_multipart_post_available = True
except ImportError:
requests_toolbelt = None
REQUESTS_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests module - but it is unavailable. Please install requests."
REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests_toolbelt module - but it is unavailable. Please install requests_toolbelt."
import logging
log = logging.getLogger(__name__)
def post_file(url, path):
if requests_toolbelt is None:
raise ImportError(REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE)
__ensure_requests()
m = requests_toolbelt.MultipartEncoder(
fields={'file': ('filename', open(path, 'rb'))}
)
requests.post(url, data=m, headers={'Content-Type': m.content_type})
def get_file(url, path):
__ensure_requests()
r = requests.get(url, stream=True)
with open(path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
def __ensure_requests():
if requests is None:
raise ImportError(REQUESTS_UNAVAILABLE_MESSAGE)
|
from __future__ import absolute_import
try:
from galaxy import eggs
eggs.require("requets")
except ImportError:
pass
try:
import requests
except ImportError:
requests = None
requests_multipart_post_available = False
try:
import requests_toolbelt
requests_multipart_post_available = True
except ImportError:
requests_toolbelt = None
REQUESTS_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests module - but it is unavailable. Please install requests."
REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests_toolbelt module - but it is unavailable. Please install requests_toolbelt."
import logging
log = logging.getLogger(__name__)
def post_file(url, path):
if requests_toolbelt is None:
raise ImportError(REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE)
__ensure_requests()
m = requests_toolbelt.MultipartEncoder(
fields={'file': ('filename', open(path, 'rb'))}
)
requests.post(url, data=m, headers={'Content-Type': m.content_type})
def get_file(url, path):
__ensure_requests()
r = requests.get(url, stream=True)
with open(path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
def __ensure_requests():
if requests is None:
raise ImportError(REQUESTS_UNAVAILABLE_MESSAGE)
Fix typo Nate caught in Galaxy.from __future__ import absolute_import
try:
from galaxy import eggs
eggs.require("requests")
except ImportError:
pass
try:
import requests
except ImportError:
requests = None
requests_multipart_post_available = False
try:
import requests_toolbelt
requests_multipart_post_available = True
except ImportError:
requests_toolbelt = None
REQUESTS_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests module - but it is unavailable. Please install requests."
REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests_toolbelt module - but it is unavailable. Please install requests_toolbelt."
import logging
log = logging.getLogger(__name__)
def post_file(url, path):
if requests_toolbelt is None:
raise ImportError(REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE)
__ensure_requests()
m = requests_toolbelt.MultipartEncoder(
fields={'file': ('filename', open(path, 'rb'))}
)
requests.post(url, data=m, headers={'Content-Type': m.content_type})
def get_file(url, path):
__ensure_requests()
r = requests.get(url, stream=True)
with open(path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
def __ensure_requests():
if requests is None:
raise ImportError(REQUESTS_UNAVAILABLE_MESSAGE)
|
<commit_before>from __future__ import absolute_import
try:
from galaxy import eggs
eggs.require("requets")
except ImportError:
pass
try:
import requests
except ImportError:
requests = None
requests_multipart_post_available = False
try:
import requests_toolbelt
requests_multipart_post_available = True
except ImportError:
requests_toolbelt = None
REQUESTS_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests module - but it is unavailable. Please install requests."
REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests_toolbelt module - but it is unavailable. Please install requests_toolbelt."
import logging
log = logging.getLogger(__name__)
def post_file(url, path):
if requests_toolbelt is None:
raise ImportError(REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE)
__ensure_requests()
m = requests_toolbelt.MultipartEncoder(
fields={'file': ('filename', open(path, 'rb'))}
)
requests.post(url, data=m, headers={'Content-Type': m.content_type})
def get_file(url, path):
__ensure_requests()
r = requests.get(url, stream=True)
with open(path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
def __ensure_requests():
if requests is None:
raise ImportError(REQUESTS_UNAVAILABLE_MESSAGE)
<commit_msg>Fix typo Nate caught in Galaxy.<commit_after>from __future__ import absolute_import
try:
from galaxy import eggs
eggs.require("requests")
except ImportError:
pass
try:
import requests
except ImportError:
requests = None
requests_multipart_post_available = False
try:
import requests_toolbelt
requests_multipart_post_available = True
except ImportError:
requests_toolbelt = None
REQUESTS_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests module - but it is unavailable. Please install requests."
REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE = "Pulsar configured to use requests_toolbelt module - but it is unavailable. Please install requests_toolbelt."
import logging
log = logging.getLogger(__name__)
def post_file(url, path):
if requests_toolbelt is None:
raise ImportError(REQUESTS_TOOLBELT_UNAVAILABLE_MESSAGE)
__ensure_requests()
m = requests_toolbelt.MultipartEncoder(
fields={'file': ('filename', open(path, 'rb'))}
)
requests.post(url, data=m, headers={'Content-Type': m.content_type})
def get_file(url, path):
__ensure_requests()
r = requests.get(url, stream=True)
with open(path, 'wb') as f:
for chunk in r.iter_content(chunk_size=1024):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
f.flush()
def __ensure_requests():
if requests is None:
raise ImportError(REQUESTS_UNAVAILABLE_MESSAGE)
|
6ce6f22837b9e6a1dc8423038b6e2eb3d0a8de89
|
rxet/helper.py
|
rxet/helper.py
|
from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]
|
from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]
# read int as a big endian number
def read_uint32_BE(fileobj):
return unpack(">I", fileobj.read(4))[0]
|
Add big endian integer reading
|
Add big endian integer reading
|
Python
|
mit
|
RenolY2/battalion-tools
|
from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]Add big endian integer reading
|
from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]
# read int as a big endian number
def read_uint32_BE(fileobj):
return unpack(">I", fileobj.read(4))[0]
|
<commit_before>from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]<commit_msg>Add big endian integer reading<commit_after>
|
from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]
# read int as a big endian number
def read_uint32_BE(fileobj):
return unpack(">I", fileobj.read(4))[0]
|
from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]Add big endian integer readingfrom struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]
# read int as a big endian number
def read_uint32_BE(fileobj):
return unpack(">I", fileobj.read(4))[0]
|
<commit_before>from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]<commit_msg>Add big endian integer reading<commit_after>from struct import unpack
def read_uint32(fileobj):
return unpack("I", fileobj.read(4))[0]
# read int as a big endian number
def read_uint32_BE(fileobj):
return unpack(">I", fileobj.read(4))[0]
|
0d33cf650480ea7b71e13ef67b566fc6ec1c93ee
|
demo/demo/todos/models.py
|
demo/demo/todos/models.py
|
from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
complete = models.BooleanField()
|
from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
|
Remove "complete" boolean from demo todo model.
|
Remove "complete" boolean from demo todo model.
|
Python
|
bsd-3-clause
|
jgerigmeyer/jquery-django-superformset,jgerigmeyer/jquery-django-superformset
|
from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
complete = models.BooleanField()
Remove "complete" boolean from demo todo model.
|
from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
|
<commit_before>from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
complete = models.BooleanField()
<commit_msg>Remove "complete" boolean from demo todo model.<commit_after>
|
from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
|
from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
complete = models.BooleanField()
Remove "complete" boolean from demo todo model.from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
|
<commit_before>from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
complete = models.BooleanField()
<commit_msg>Remove "complete" boolean from demo todo model.<commit_after>from django.db import models
class Todo(models.Model):
name = models.CharField(max_length=200)
|
4f273d56ece230909094d8851d6f79695fb13d88
|
diffanalysis/exporters.py
|
diffanalysis/exporters.py
|
from django.conf import settings
from osmdata.exporters import CSVExporter
from .models import ActionReport
class AnalyzedCSVExporter(CSVExporter):
""" Enhance CSVExporter adding some fields from diffanalysis module
"""
def get_header_row(self):
return super().get_header_row() + (
'main_tag', 'is_geometric_action', 'is_tag_action',
'added_tags', 'removed_tags', 'modified_tags')
@staticmethod
def str_list(l):
""" Produce a list of str given a list of anything
"""
return [str(i) for i in l]
def get_row(self, action):
ar = ActionReport.objects.get_or_create_for_action(action)
return super().get_row(action) + (
ar.main_tag,
ar.is_geometric_action,
ar.is_tag_action,
self.str_list(ar.added_tags.all()),
self.str_list(ar.removed_tags.all()),
[self.str_list(i.all())
for i in [ar.modified_tags_old, ar.modified_tags_new]],
)
|
from django.conf import settings
from osmdata.exporters import CSVExporter
from .models import ActionReport
class AnalyzedCSVExporter(CSVExporter):
""" Enhance CSVExporter adding some fields from diffanalysis module
"""
def get_header_row(self):
return super().get_header_row() + (
'main_tag', 'is_geometric_action', 'is_tag_action',
'added_tags', 'removed_tags', 'modified_tags', 'version_delta')
@staticmethod
def str_list(l):
""" Produce a list of str given a list of anything
"""
return [str(i) for i in l]
def get_row(self, action):
ar = ActionReport.objects.get_or_create_for_action(action)
return super().get_row(action) + (
ar.main_tag,
ar.is_geometric_action,
ar.is_tag_action,
self.str_list(ar.added_tags.all()),
self.str_list(ar.removed_tags.all()),
[self.str_list(i.all())
for i in [ar.modified_tags_old, ar.modified_tags_new]],
ar.version_delta,
)
|
Include the version delta field in AnalyzedCSVExporter
|
Include the version delta field in AnalyzedCSVExporter
Fix #11
|
Python
|
agpl-3.0
|
Cartocite/osmada
|
from django.conf import settings
from osmdata.exporters import CSVExporter
from .models import ActionReport
class AnalyzedCSVExporter(CSVExporter):
""" Enhance CSVExporter adding some fields from diffanalysis module
"""
def get_header_row(self):
return super().get_header_row() + (
'main_tag', 'is_geometric_action', 'is_tag_action',
'added_tags', 'removed_tags', 'modified_tags')
@staticmethod
def str_list(l):
""" Produce a list of str given a list of anything
"""
return [str(i) for i in l]
def get_row(self, action):
ar = ActionReport.objects.get_or_create_for_action(action)
return super().get_row(action) + (
ar.main_tag,
ar.is_geometric_action,
ar.is_tag_action,
self.str_list(ar.added_tags.all()),
self.str_list(ar.removed_tags.all()),
[self.str_list(i.all())
for i in [ar.modified_tags_old, ar.modified_tags_new]],
)
Include the version delta field in AnalyzedCSVExporter
Fix #11
|
from django.conf import settings
from osmdata.exporters import CSVExporter
from .models import ActionReport
class AnalyzedCSVExporter(CSVExporter):
""" Enhance CSVExporter adding some fields from diffanalysis module
"""
def get_header_row(self):
return super().get_header_row() + (
'main_tag', 'is_geometric_action', 'is_tag_action',
'added_tags', 'removed_tags', 'modified_tags', 'version_delta')
@staticmethod
def str_list(l):
""" Produce a list of str given a list of anything
"""
return [str(i) for i in l]
def get_row(self, action):
ar = ActionReport.objects.get_or_create_for_action(action)
return super().get_row(action) + (
ar.main_tag,
ar.is_geometric_action,
ar.is_tag_action,
self.str_list(ar.added_tags.all()),
self.str_list(ar.removed_tags.all()),
[self.str_list(i.all())
for i in [ar.modified_tags_old, ar.modified_tags_new]],
ar.version_delta,
)
|
<commit_before>from django.conf import settings
from osmdata.exporters import CSVExporter
from .models import ActionReport
class AnalyzedCSVExporter(CSVExporter):
""" Enhance CSVExporter adding some fields from diffanalysis module
"""
def get_header_row(self):
return super().get_header_row() + (
'main_tag', 'is_geometric_action', 'is_tag_action',
'added_tags', 'removed_tags', 'modified_tags')
@staticmethod
def str_list(l):
""" Produce a list of str given a list of anything
"""
return [str(i) for i in l]
def get_row(self, action):
ar = ActionReport.objects.get_or_create_for_action(action)
return super().get_row(action) + (
ar.main_tag,
ar.is_geometric_action,
ar.is_tag_action,
self.str_list(ar.added_tags.all()),
self.str_list(ar.removed_tags.all()),
[self.str_list(i.all())
for i in [ar.modified_tags_old, ar.modified_tags_new]],
)
<commit_msg>Include the version delta field in AnalyzedCSVExporter
Fix #11<commit_after>
|
from django.conf import settings
from osmdata.exporters import CSVExporter
from .models import ActionReport
class AnalyzedCSVExporter(CSVExporter):
""" Enhance CSVExporter adding some fields from diffanalysis module
"""
def get_header_row(self):
return super().get_header_row() + (
'main_tag', 'is_geometric_action', 'is_tag_action',
'added_tags', 'removed_tags', 'modified_tags', 'version_delta')
@staticmethod
def str_list(l):
""" Produce a list of str given a list of anything
"""
return [str(i) for i in l]
def get_row(self, action):
ar = ActionReport.objects.get_or_create_for_action(action)
return super().get_row(action) + (
ar.main_tag,
ar.is_geometric_action,
ar.is_tag_action,
self.str_list(ar.added_tags.all()),
self.str_list(ar.removed_tags.all()),
[self.str_list(i.all())
for i in [ar.modified_tags_old, ar.modified_tags_new]],
ar.version_delta,
)
|
from django.conf import settings
from osmdata.exporters import CSVExporter
from .models import ActionReport
class AnalyzedCSVExporter(CSVExporter):
""" Enhance CSVExporter adding some fields from diffanalysis module
"""
def get_header_row(self):
return super().get_header_row() + (
'main_tag', 'is_geometric_action', 'is_tag_action',
'added_tags', 'removed_tags', 'modified_tags')
@staticmethod
def str_list(l):
""" Produce a list of str given a list of anything
"""
return [str(i) for i in l]
def get_row(self, action):
ar = ActionReport.objects.get_or_create_for_action(action)
return super().get_row(action) + (
ar.main_tag,
ar.is_geometric_action,
ar.is_tag_action,
self.str_list(ar.added_tags.all()),
self.str_list(ar.removed_tags.all()),
[self.str_list(i.all())
for i in [ar.modified_tags_old, ar.modified_tags_new]],
)
Include the version delta field in AnalyzedCSVExporter
Fix #11from django.conf import settings
from osmdata.exporters import CSVExporter
from .models import ActionReport
class AnalyzedCSVExporter(CSVExporter):
""" Enhance CSVExporter adding some fields from diffanalysis module
"""
def get_header_row(self):
return super().get_header_row() + (
'main_tag', 'is_geometric_action', 'is_tag_action',
'added_tags', 'removed_tags', 'modified_tags', 'version_delta')
@staticmethod
def str_list(l):
""" Produce a list of str given a list of anything
"""
return [str(i) for i in l]
def get_row(self, action):
ar = ActionReport.objects.get_or_create_for_action(action)
return super().get_row(action) + (
ar.main_tag,
ar.is_geometric_action,
ar.is_tag_action,
self.str_list(ar.added_tags.all()),
self.str_list(ar.removed_tags.all()),
[self.str_list(i.all())
for i in [ar.modified_tags_old, ar.modified_tags_new]],
ar.version_delta,
)
|
<commit_before>from django.conf import settings
from osmdata.exporters import CSVExporter
from .models import ActionReport
class AnalyzedCSVExporter(CSVExporter):
""" Enhance CSVExporter adding some fields from diffanalysis module
"""
def get_header_row(self):
return super().get_header_row() + (
'main_tag', 'is_geometric_action', 'is_tag_action',
'added_tags', 'removed_tags', 'modified_tags')
@staticmethod
def str_list(l):
""" Produce a list of str given a list of anything
"""
return [str(i) for i in l]
def get_row(self, action):
ar = ActionReport.objects.get_or_create_for_action(action)
return super().get_row(action) + (
ar.main_tag,
ar.is_geometric_action,
ar.is_tag_action,
self.str_list(ar.added_tags.all()),
self.str_list(ar.removed_tags.all()),
[self.str_list(i.all())
for i in [ar.modified_tags_old, ar.modified_tags_new]],
)
<commit_msg>Include the version delta field in AnalyzedCSVExporter
Fix #11<commit_after>from django.conf import settings
from osmdata.exporters import CSVExporter
from .models import ActionReport
class AnalyzedCSVExporter(CSVExporter):
""" Enhance CSVExporter adding some fields from diffanalysis module
"""
def get_header_row(self):
return super().get_header_row() + (
'main_tag', 'is_geometric_action', 'is_tag_action',
'added_tags', 'removed_tags', 'modified_tags', 'version_delta')
@staticmethod
def str_list(l):
""" Produce a list of str given a list of anything
"""
return [str(i) for i in l]
def get_row(self, action):
ar = ActionReport.objects.get_or_create_for_action(action)
return super().get_row(action) + (
ar.main_tag,
ar.is_geometric_action,
ar.is_tag_action,
self.str_list(ar.added_tags.all()),
self.str_list(ar.removed_tags.all()),
[self.str_list(i.all())
for i in [ar.modified_tags_old, ar.modified_tags_new]],
ar.version_delta,
)
|
2b7ea531846b43946afee2c4f1d9ed89c3cd947c
|
ForgeHg/forgehg/tests/functional/test_controllers.py
|
ForgeHg/forgehg/tests/functional/test_controllers.py
|
import os
import pkg_resources
from pylons import c
from ming.orm import ThreadLocalORMSession
from pyforge.lib import helpers as h
from forgehg.tests import TestController
class TestRootController(TestController):
def setUp(self):
TestController.setUp(self)
h.set_context('test', 'src_hg')
repo_dir = pkg_resources.resource_filename(
'forgehg', 'tests/data')
c.app.repo.fs_path = repo_dir
c.app.repo.status = 'ready'
c.app.repo.name = 'testrepo.hg'
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_index(self):
resp = self.app.get('/src_hg/')
assert 'hg://' in resp
assert 'ready' in resp
def test_commit(self):
resp = self.app.get('/src_hg/tip/')
assert '<ins>' in resp
|
import os
import pkg_resources
from pylons import c
from ming.orm import ThreadLocalORMSession
from pyforge.lib import helpers as h
from forgehg.tests import TestController
class TestRootController(TestController):
def setUp(self):
TestController.setUp(self)
h.set_context('test', 'src_hg')
repo_dir = pkg_resources.resource_filename(
'forgehg', 'tests/data')
c.app.repo.fs_path = repo_dir
c.app.repo.status = 'ready'
c.app.repo.name = 'testrepo.hg'
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_index(self):
resp = self.app.get('/src_hg/')
assert 'hg clone http://' in resp, resp
assert 'ready' in resp
def test_commit(self):
resp = self.app.get('/src_hg/tip/')
assert '<ins>' in resp
|
Update test to reflect changing hg codebase
|
Update test to reflect changing hg codebase
|
Python
|
apache-2.0
|
lym/allura-git,apache/allura,heiths/allura,lym/allura-git,leotrubach/sourceforge-allura,leotrubach/sourceforge-allura,apache/incubator-allura,Bitergia/allura,lym/allura-git,apache/allura,leotrubach/sourceforge-allura,heiths/allura,apache/allura,Bitergia/allura,apache/allura,lym/allura-git,Bitergia/allura,apache/incubator-allura,Bitergia/allura,leotrubach/sourceforge-allura,apache/incubator-allura,heiths/allura,lym/allura-git,Bitergia/allura,heiths/allura,apache/incubator-allura,heiths/allura,apache/allura
|
import os
import pkg_resources
from pylons import c
from ming.orm import ThreadLocalORMSession
from pyforge.lib import helpers as h
from forgehg.tests import TestController
class TestRootController(TestController):
def setUp(self):
TestController.setUp(self)
h.set_context('test', 'src_hg')
repo_dir = pkg_resources.resource_filename(
'forgehg', 'tests/data')
c.app.repo.fs_path = repo_dir
c.app.repo.status = 'ready'
c.app.repo.name = 'testrepo.hg'
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_index(self):
resp = self.app.get('/src_hg/')
assert 'hg://' in resp
assert 'ready' in resp
def test_commit(self):
resp = self.app.get('/src_hg/tip/')
assert '<ins>' in resp
Update test to reflect changing hg codebase
|
import os
import pkg_resources
from pylons import c
from ming.orm import ThreadLocalORMSession
from pyforge.lib import helpers as h
from forgehg.tests import TestController
class TestRootController(TestController):
def setUp(self):
TestController.setUp(self)
h.set_context('test', 'src_hg')
repo_dir = pkg_resources.resource_filename(
'forgehg', 'tests/data')
c.app.repo.fs_path = repo_dir
c.app.repo.status = 'ready'
c.app.repo.name = 'testrepo.hg'
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_index(self):
resp = self.app.get('/src_hg/')
assert 'hg clone http://' in resp, resp
assert 'ready' in resp
def test_commit(self):
resp = self.app.get('/src_hg/tip/')
assert '<ins>' in resp
|
<commit_before>import os
import pkg_resources
from pylons import c
from ming.orm import ThreadLocalORMSession
from pyforge.lib import helpers as h
from forgehg.tests import TestController
class TestRootController(TestController):
def setUp(self):
TestController.setUp(self)
h.set_context('test', 'src_hg')
repo_dir = pkg_resources.resource_filename(
'forgehg', 'tests/data')
c.app.repo.fs_path = repo_dir
c.app.repo.status = 'ready'
c.app.repo.name = 'testrepo.hg'
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_index(self):
resp = self.app.get('/src_hg/')
assert 'hg://' in resp
assert 'ready' in resp
def test_commit(self):
resp = self.app.get('/src_hg/tip/')
assert '<ins>' in resp
<commit_msg>Update test to reflect changing hg codebase<commit_after>
|
import os
import pkg_resources
from pylons import c
from ming.orm import ThreadLocalORMSession
from pyforge.lib import helpers as h
from forgehg.tests import TestController
class TestRootController(TestController):
def setUp(self):
TestController.setUp(self)
h.set_context('test', 'src_hg')
repo_dir = pkg_resources.resource_filename(
'forgehg', 'tests/data')
c.app.repo.fs_path = repo_dir
c.app.repo.status = 'ready'
c.app.repo.name = 'testrepo.hg'
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_index(self):
resp = self.app.get('/src_hg/')
assert 'hg clone http://' in resp, resp
assert 'ready' in resp
def test_commit(self):
resp = self.app.get('/src_hg/tip/')
assert '<ins>' in resp
|
import os
import pkg_resources
from pylons import c
from ming.orm import ThreadLocalORMSession
from pyforge.lib import helpers as h
from forgehg.tests import TestController
class TestRootController(TestController):
def setUp(self):
TestController.setUp(self)
h.set_context('test', 'src_hg')
repo_dir = pkg_resources.resource_filename(
'forgehg', 'tests/data')
c.app.repo.fs_path = repo_dir
c.app.repo.status = 'ready'
c.app.repo.name = 'testrepo.hg'
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_index(self):
resp = self.app.get('/src_hg/')
assert 'hg://' in resp
assert 'ready' in resp
def test_commit(self):
resp = self.app.get('/src_hg/tip/')
assert '<ins>' in resp
Update test to reflect changing hg codebaseimport os
import pkg_resources
from pylons import c
from ming.orm import ThreadLocalORMSession
from pyforge.lib import helpers as h
from forgehg.tests import TestController
class TestRootController(TestController):
def setUp(self):
TestController.setUp(self)
h.set_context('test', 'src_hg')
repo_dir = pkg_resources.resource_filename(
'forgehg', 'tests/data')
c.app.repo.fs_path = repo_dir
c.app.repo.status = 'ready'
c.app.repo.name = 'testrepo.hg'
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_index(self):
resp = self.app.get('/src_hg/')
assert 'hg clone http://' in resp, resp
assert 'ready' in resp
def test_commit(self):
resp = self.app.get('/src_hg/tip/')
assert '<ins>' in resp
|
<commit_before>import os
import pkg_resources
from pylons import c
from ming.orm import ThreadLocalORMSession
from pyforge.lib import helpers as h
from forgehg.tests import TestController
class TestRootController(TestController):
def setUp(self):
TestController.setUp(self)
h.set_context('test', 'src_hg')
repo_dir = pkg_resources.resource_filename(
'forgehg', 'tests/data')
c.app.repo.fs_path = repo_dir
c.app.repo.status = 'ready'
c.app.repo.name = 'testrepo.hg'
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_index(self):
resp = self.app.get('/src_hg/')
assert 'hg://' in resp
assert 'ready' in resp
def test_commit(self):
resp = self.app.get('/src_hg/tip/')
assert '<ins>' in resp
<commit_msg>Update test to reflect changing hg codebase<commit_after>import os
import pkg_resources
from pylons import c
from ming.orm import ThreadLocalORMSession
from pyforge.lib import helpers as h
from forgehg.tests import TestController
class TestRootController(TestController):
def setUp(self):
TestController.setUp(self)
h.set_context('test', 'src_hg')
repo_dir = pkg_resources.resource_filename(
'forgehg', 'tests/data')
c.app.repo.fs_path = repo_dir
c.app.repo.status = 'ready'
c.app.repo.name = 'testrepo.hg'
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_index(self):
resp = self.app.get('/src_hg/')
assert 'hg clone http://' in resp, resp
assert 'ready' in resp
def test_commit(self):
resp = self.app.get('/src_hg/tip/')
assert '<ins>' in resp
|
aaaaa995a77110b779d9613d95800af609324edc
|
falcom/tree/test/test_tree.py
|
falcom/tree/test/test_tree.py
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from hamcrest import *
import unittest
from ..read_only_tree import Tree
from ..mutable_tree import MutableTree
class GivenNothing (unittest.TestCase):
def test_cannot_init_tree_with_value (self):
assert_that(calling(Tree).with_args(value="hi"),
raises(TypeError))
def test_can_init_from_mutable_tree (self):
mtree = MutableTree(value=1)
mtree.append_value(2)
mtree.append_value(3)
mtree[0].append_value(4)
mtree[0].append_value(5)
mtree[0][0].append_value(6)
t = Tree(mtree)
class GivenEmptyTree (unittest.TestCase):
def test_empty_tree_has_null_value (self):
t = Tree()
assert_that(t.value, is_(none()))
def test_cannot_modify_value_for_empty_tree (self):
t = Tree()
assert_that(calling(setattr).with_args(t, "value", "hi"),
raises(AttributeError))
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from hamcrest import *
import unittest
from ..read_only_tree import Tree
from ..mutable_tree import MutableTree
class GivenNothing (unittest.TestCase):
def test_cannot_init_tree_with_value (self):
assert_that(calling(Tree).with_args(value="hi"),
raises(TypeError))
def test_can_init_from_mutable_tree (self):
mtree = MutableTree(value=1)
mtree.append_value(2)
mtree.append_value(3)
mtree[0].append_value(4)
mtree[0].append_value(5)
mtree[0][0].append_value(6)
t = Tree(mtree)
class GivenEmptyTree (unittest.TestCase):
def setUp (self):
self.tree = Tree()
def test_empty_tree_has_null_value (self):
assert_that(self.tree.value, is_(none()))
def test_cannot_modify_value_for_empty_tree (self):
assert_that(calling(setattr).with_args(self.tree,
"value",
"hi"),
raises(AttributeError))
|
Replace duplicate code with setUp method
|
Replace duplicate code with setUp method
|
Python
|
bsd-3-clause
|
mlibrary/image-conversion-and-validation,mlibrary/image-conversion-and-validation
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from hamcrest import *
import unittest
from ..read_only_tree import Tree
from ..mutable_tree import MutableTree
class GivenNothing (unittest.TestCase):
def test_cannot_init_tree_with_value (self):
assert_that(calling(Tree).with_args(value="hi"),
raises(TypeError))
def test_can_init_from_mutable_tree (self):
mtree = MutableTree(value=1)
mtree.append_value(2)
mtree.append_value(3)
mtree[0].append_value(4)
mtree[0].append_value(5)
mtree[0][0].append_value(6)
t = Tree(mtree)
class GivenEmptyTree (unittest.TestCase):
def test_empty_tree_has_null_value (self):
t = Tree()
assert_that(t.value, is_(none()))
def test_cannot_modify_value_for_empty_tree (self):
t = Tree()
assert_that(calling(setattr).with_args(t, "value", "hi"),
raises(AttributeError))
Replace duplicate code with setUp method
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from hamcrest import *
import unittest
from ..read_only_tree import Tree
from ..mutable_tree import MutableTree
class GivenNothing (unittest.TestCase):
def test_cannot_init_tree_with_value (self):
assert_that(calling(Tree).with_args(value="hi"),
raises(TypeError))
def test_can_init_from_mutable_tree (self):
mtree = MutableTree(value=1)
mtree.append_value(2)
mtree.append_value(3)
mtree[0].append_value(4)
mtree[0].append_value(5)
mtree[0][0].append_value(6)
t = Tree(mtree)
class GivenEmptyTree (unittest.TestCase):
def setUp (self):
self.tree = Tree()
def test_empty_tree_has_null_value (self):
assert_that(self.tree.value, is_(none()))
def test_cannot_modify_value_for_empty_tree (self):
assert_that(calling(setattr).with_args(self.tree,
"value",
"hi"),
raises(AttributeError))
|
<commit_before># Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from hamcrest import *
import unittest
from ..read_only_tree import Tree
from ..mutable_tree import MutableTree
class GivenNothing (unittest.TestCase):
def test_cannot_init_tree_with_value (self):
assert_that(calling(Tree).with_args(value="hi"),
raises(TypeError))
def test_can_init_from_mutable_tree (self):
mtree = MutableTree(value=1)
mtree.append_value(2)
mtree.append_value(3)
mtree[0].append_value(4)
mtree[0].append_value(5)
mtree[0][0].append_value(6)
t = Tree(mtree)
class GivenEmptyTree (unittest.TestCase):
def test_empty_tree_has_null_value (self):
t = Tree()
assert_that(t.value, is_(none()))
def test_cannot_modify_value_for_empty_tree (self):
t = Tree()
assert_that(calling(setattr).with_args(t, "value", "hi"),
raises(AttributeError))
<commit_msg>Replace duplicate code with setUp method<commit_after>
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from hamcrest import *
import unittest
from ..read_only_tree import Tree
from ..mutable_tree import MutableTree
class GivenNothing (unittest.TestCase):
def test_cannot_init_tree_with_value (self):
assert_that(calling(Tree).with_args(value="hi"),
raises(TypeError))
def test_can_init_from_mutable_tree (self):
mtree = MutableTree(value=1)
mtree.append_value(2)
mtree.append_value(3)
mtree[0].append_value(4)
mtree[0].append_value(5)
mtree[0][0].append_value(6)
t = Tree(mtree)
class GivenEmptyTree (unittest.TestCase):
def setUp (self):
self.tree = Tree()
def test_empty_tree_has_null_value (self):
assert_that(self.tree.value, is_(none()))
def test_cannot_modify_value_for_empty_tree (self):
assert_that(calling(setattr).with_args(self.tree,
"value",
"hi"),
raises(AttributeError))
|
# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from hamcrest import *
import unittest
from ..read_only_tree import Tree
from ..mutable_tree import MutableTree
class GivenNothing (unittest.TestCase):
def test_cannot_init_tree_with_value (self):
assert_that(calling(Tree).with_args(value="hi"),
raises(TypeError))
def test_can_init_from_mutable_tree (self):
mtree = MutableTree(value=1)
mtree.append_value(2)
mtree.append_value(3)
mtree[0].append_value(4)
mtree[0].append_value(5)
mtree[0][0].append_value(6)
t = Tree(mtree)
class GivenEmptyTree (unittest.TestCase):
def test_empty_tree_has_null_value (self):
t = Tree()
assert_that(t.value, is_(none()))
def test_cannot_modify_value_for_empty_tree (self):
t = Tree()
assert_that(calling(setattr).with_args(t, "value", "hi"),
raises(AttributeError))
Replace duplicate code with setUp method# Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from hamcrest import *
import unittest
from ..read_only_tree import Tree
from ..mutable_tree import MutableTree
class GivenNothing (unittest.TestCase):
def test_cannot_init_tree_with_value (self):
assert_that(calling(Tree).with_args(value="hi"),
raises(TypeError))
def test_can_init_from_mutable_tree (self):
mtree = MutableTree(value=1)
mtree.append_value(2)
mtree.append_value(3)
mtree[0].append_value(4)
mtree[0].append_value(5)
mtree[0][0].append_value(6)
t = Tree(mtree)
class GivenEmptyTree (unittest.TestCase):
def setUp (self):
self.tree = Tree()
def test_empty_tree_has_null_value (self):
assert_that(self.tree.value, is_(none()))
def test_cannot_modify_value_for_empty_tree (self):
assert_that(calling(setattr).with_args(self.tree,
"value",
"hi"),
raises(AttributeError))
|
<commit_before># Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from hamcrest import *
import unittest
from ..read_only_tree import Tree
from ..mutable_tree import MutableTree
class GivenNothing (unittest.TestCase):
def test_cannot_init_tree_with_value (self):
assert_that(calling(Tree).with_args(value="hi"),
raises(TypeError))
def test_can_init_from_mutable_tree (self):
mtree = MutableTree(value=1)
mtree.append_value(2)
mtree.append_value(3)
mtree[0].append_value(4)
mtree[0].append_value(5)
mtree[0][0].append_value(6)
t = Tree(mtree)
class GivenEmptyTree (unittest.TestCase):
def test_empty_tree_has_null_value (self):
t = Tree()
assert_that(t.value, is_(none()))
def test_cannot_modify_value_for_empty_tree (self):
t = Tree()
assert_that(calling(setattr).with_args(t, "value", "hi"),
raises(AttributeError))
<commit_msg>Replace duplicate code with setUp method<commit_after># Copyright (c) 2017 The Regents of the University of Michigan.
# All Rights Reserved. Licensed according to the terms of the Revised
# BSD License. See LICENSE.txt for details.
from hamcrest import *
import unittest
from ..read_only_tree import Tree
from ..mutable_tree import MutableTree
class GivenNothing (unittest.TestCase):
def test_cannot_init_tree_with_value (self):
assert_that(calling(Tree).with_args(value="hi"),
raises(TypeError))
def test_can_init_from_mutable_tree (self):
mtree = MutableTree(value=1)
mtree.append_value(2)
mtree.append_value(3)
mtree[0].append_value(4)
mtree[0].append_value(5)
mtree[0][0].append_value(6)
t = Tree(mtree)
class GivenEmptyTree (unittest.TestCase):
def setUp (self):
self.tree = Tree()
def test_empty_tree_has_null_value (self):
assert_that(self.tree.value, is_(none()))
def test_cannot_modify_value_for_empty_tree (self):
assert_that(calling(setattr).with_args(self.tree,
"value",
"hi"),
raises(AttributeError))
|
de1a07992ad4f4be25c57d992720f7b3c3f0bc04
|
first_databank/__openerp__.py
|
first_databank/__openerp__.py
|
# -*- coding: utf-8 -*-
# © 2015 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'First Databank',
'description': 'Provides base models for storage of First Databank data',
'version': '9.0.1.0.0',
'category': 'Connector',
'author': "LasLabs",
'license': 'AGPL-3',
'website': 'https://laslabs.com',
'depends': [
'medical_prescription_sale_stock',
'medical_insurance_us',
'medical_medicament_us',
# 'medical_patient_us',
'medical_physician_us',
'medical_pharmacy_us',
'medical_prescription_us',
'medical_prescription_sale_stock_us',
'medical_manufacturer',
],
'installable': True,
'application': False,
}
|
# -*- coding: utf-8 -*-
# © 2015 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'First Databank',
'description': 'Provides base models for storage of First Databank data',
'version': '9.0.1.0.0',
'category': 'Connector',
'author': "LasLabs",
'license': 'AGPL-3',
'website': 'https://laslabs.com',
'depends': [
'medical_prescription_sale_stock',
'medical_insurance_us',
'medical_medicament_us',
# 'medical_patient_us',
# 'medical_physician_us',
'medical_pharmacy_us',
'medical_prescription_us',
'medical_prescription_sale_stock_us',
'medical_manufacturer',
],
'installable': True,
'application': False,
}
|
Remove physician_us depends in first_databank
|
Remove physician_us depends in first_databank
|
Python
|
agpl-3.0
|
laslabs/odoo-connector-carepoint
|
# -*- coding: utf-8 -*-
# © 2015 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'First Databank',
'description': 'Provides base models for storage of First Databank data',
'version': '9.0.1.0.0',
'category': 'Connector',
'author': "LasLabs",
'license': 'AGPL-3',
'website': 'https://laslabs.com',
'depends': [
'medical_prescription_sale_stock',
'medical_insurance_us',
'medical_medicament_us',
# 'medical_patient_us',
'medical_physician_us',
'medical_pharmacy_us',
'medical_prescription_us',
'medical_prescription_sale_stock_us',
'medical_manufacturer',
],
'installable': True,
'application': False,
}
Remove physician_us depends in first_databank
|
# -*- coding: utf-8 -*-
# © 2015 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'First Databank',
'description': 'Provides base models for storage of First Databank data',
'version': '9.0.1.0.0',
'category': 'Connector',
'author': "LasLabs",
'license': 'AGPL-3',
'website': 'https://laslabs.com',
'depends': [
'medical_prescription_sale_stock',
'medical_insurance_us',
'medical_medicament_us',
# 'medical_patient_us',
# 'medical_physician_us',
'medical_pharmacy_us',
'medical_prescription_us',
'medical_prescription_sale_stock_us',
'medical_manufacturer',
],
'installable': True,
'application': False,
}
|
<commit_before># -*- coding: utf-8 -*-
# © 2015 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'First Databank',
'description': 'Provides base models for storage of First Databank data',
'version': '9.0.1.0.0',
'category': 'Connector',
'author': "LasLabs",
'license': 'AGPL-3',
'website': 'https://laslabs.com',
'depends': [
'medical_prescription_sale_stock',
'medical_insurance_us',
'medical_medicament_us',
# 'medical_patient_us',
'medical_physician_us',
'medical_pharmacy_us',
'medical_prescription_us',
'medical_prescription_sale_stock_us',
'medical_manufacturer',
],
'installable': True,
'application': False,
}
<commit_msg>Remove physician_us depends in first_databank<commit_after>
|
# -*- coding: utf-8 -*-
# © 2015 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'First Databank',
'description': 'Provides base models for storage of First Databank data',
'version': '9.0.1.0.0',
'category': 'Connector',
'author': "LasLabs",
'license': 'AGPL-3',
'website': 'https://laslabs.com',
'depends': [
'medical_prescription_sale_stock',
'medical_insurance_us',
'medical_medicament_us',
# 'medical_patient_us',
# 'medical_physician_us',
'medical_pharmacy_us',
'medical_prescription_us',
'medical_prescription_sale_stock_us',
'medical_manufacturer',
],
'installable': True,
'application': False,
}
|
# -*- coding: utf-8 -*-
# © 2015 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'First Databank',
'description': 'Provides base models for storage of First Databank data',
'version': '9.0.1.0.0',
'category': 'Connector',
'author': "LasLabs",
'license': 'AGPL-3',
'website': 'https://laslabs.com',
'depends': [
'medical_prescription_sale_stock',
'medical_insurance_us',
'medical_medicament_us',
# 'medical_patient_us',
'medical_physician_us',
'medical_pharmacy_us',
'medical_prescription_us',
'medical_prescription_sale_stock_us',
'medical_manufacturer',
],
'installable': True,
'application': False,
}
Remove physician_us depends in first_databank# -*- coding: utf-8 -*-
# © 2015 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'First Databank',
'description': 'Provides base models for storage of First Databank data',
'version': '9.0.1.0.0',
'category': 'Connector',
'author': "LasLabs",
'license': 'AGPL-3',
'website': 'https://laslabs.com',
'depends': [
'medical_prescription_sale_stock',
'medical_insurance_us',
'medical_medicament_us',
# 'medical_patient_us',
# 'medical_physician_us',
'medical_pharmacy_us',
'medical_prescription_us',
'medical_prescription_sale_stock_us',
'medical_manufacturer',
],
'installable': True,
'application': False,
}
|
<commit_before># -*- coding: utf-8 -*-
# © 2015 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'First Databank',
'description': 'Provides base models for storage of First Databank data',
'version': '9.0.1.0.0',
'category': 'Connector',
'author': "LasLabs",
'license': 'AGPL-3',
'website': 'https://laslabs.com',
'depends': [
'medical_prescription_sale_stock',
'medical_insurance_us',
'medical_medicament_us',
# 'medical_patient_us',
'medical_physician_us',
'medical_pharmacy_us',
'medical_prescription_us',
'medical_prescription_sale_stock_us',
'medical_manufacturer',
],
'installable': True,
'application': False,
}
<commit_msg>Remove physician_us depends in first_databank<commit_after># -*- coding: utf-8 -*-
# © 2015 LasLabs Inc.
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': 'First Databank',
'description': 'Provides base models for storage of First Databank data',
'version': '9.0.1.0.0',
'category': 'Connector',
'author': "LasLabs",
'license': 'AGPL-3',
'website': 'https://laslabs.com',
'depends': [
'medical_prescription_sale_stock',
'medical_insurance_us',
'medical_medicament_us',
# 'medical_patient_us',
# 'medical_physician_us',
'medical_pharmacy_us',
'medical_prescription_us',
'medical_prescription_sale_stock_us',
'medical_manufacturer',
],
'installable': True,
'application': False,
}
|
07b6f05d913337619205d6fd82b472e112e2a2c7
|
src/demo.py
|
src/demo.py
|
#!/usr/bin/env python
"""Brief demo of nonogram solving."""
import sys
from rules.nonogram import NonogramPuzzle
easy_puzzle = NonogramPuzzle([[1], [1, 1]],
[[1], [1], [1]])
ambiguous_puzzle = NonogramPuzzle([[1], [1]],
[[1], [1]])
def main(args):
from solver.solver_coroutine import test_solver
from solver.brute_force import BruteForceNonogramSolver
test_solver(BruteForceNonogramSolver, easy_puzzle)
test_solver(BruteForceNonogramSolver, ambiguous_puzzle)
from solver.backward_chain_solver import BackwardChainSolver
test_solver(BackwardChainSolver, easy_puzzle)
test_solver(BackwardChainSolver, ambiguous_puzzle)
if __name__ == "__main__":
main(sys.argv)
|
#!/usr/bin/env python
"""Brief demo of nonogram solving."""
import sys
from rules.nonogram import NonogramPuzzle
easy_puzzle = NonogramPuzzle([[1], [1, 1]],
[[1], [1], [1]])
ambiguous_puzzle = NonogramPuzzle([[1], [1]],
[[1], [1]])
hard_puzzle = NonogramPuzzle(
# https://commons.wikimedia.org/wiki/File:Paint_by_numbers_Animation.gif
[[3], [5], [3,1], [2,1], [3,3,4], [2,2,7], [6,1,1], [4,2,2], [1,1], [3,1],
[6], [2,7], [6,3,1], [1,2,2,1,1], [4,1,1,3], [4,2,2], [3,3,1], [3,3],
[3], [2,1]],
[[2], [1,2], [2,3], [2,3], [3,1,1], [2,1,1], [1,1,1,2,2], [1,1,3,1,3],
[2,6,4], [3,3,9,1], [5,3,2], [3,1,2,2], [2,1,7], [3,3,2], [2,4], [2,1,2],
[2,2,1], [2,2], [1], [1]])
def main(args):
from solver.solver_coroutine import test_solver
from solver.brute_force import BruteForceNonogramSolver
test_solver(BruteForceNonogramSolver, easy_puzzle)
test_solver(BruteForceNonogramSolver, ambiguous_puzzle)
from solver.backward_chain_solver import BackwardChainSolver
test_solver(BackwardChainSolver, easy_puzzle)
test_solver(BackwardChainSolver, ambiguous_puzzle)
test_solver(BackwardChainSolver, hard_puzzle)
if __name__ == "__main__":
main(sys.argv)
|
Add a hard puzzle not yet correctly solved
|
Add a hard puzzle not yet correctly solved
|
Python
|
apache-2.0
|
ggould256/nonogram
|
#!/usr/bin/env python
"""Brief demo of nonogram solving."""
import sys
from rules.nonogram import NonogramPuzzle
easy_puzzle = NonogramPuzzle([[1], [1, 1]],
[[1], [1], [1]])
ambiguous_puzzle = NonogramPuzzle([[1], [1]],
[[1], [1]])
def main(args):
from solver.solver_coroutine import test_solver
from solver.brute_force import BruteForceNonogramSolver
test_solver(BruteForceNonogramSolver, easy_puzzle)
test_solver(BruteForceNonogramSolver, ambiguous_puzzle)
from solver.backward_chain_solver import BackwardChainSolver
test_solver(BackwardChainSolver, easy_puzzle)
test_solver(BackwardChainSolver, ambiguous_puzzle)
if __name__ == "__main__":
main(sys.argv)
Add a hard puzzle not yet correctly solved
|
#!/usr/bin/env python
"""Brief demo of nonogram solving."""
import sys
from rules.nonogram import NonogramPuzzle
easy_puzzle = NonogramPuzzle([[1], [1, 1]],
[[1], [1], [1]])
ambiguous_puzzle = NonogramPuzzle([[1], [1]],
[[1], [1]])
hard_puzzle = NonogramPuzzle(
# https://commons.wikimedia.org/wiki/File:Paint_by_numbers_Animation.gif
[[3], [5], [3,1], [2,1], [3,3,4], [2,2,7], [6,1,1], [4,2,2], [1,1], [3,1],
[6], [2,7], [6,3,1], [1,2,2,1,1], [4,1,1,3], [4,2,2], [3,3,1], [3,3],
[3], [2,1]],
[[2], [1,2], [2,3], [2,3], [3,1,1], [2,1,1], [1,1,1,2,2], [1,1,3,1,3],
[2,6,4], [3,3,9,1], [5,3,2], [3,1,2,2], [2,1,7], [3,3,2], [2,4], [2,1,2],
[2,2,1], [2,2], [1], [1]])
def main(args):
from solver.solver_coroutine import test_solver
from solver.brute_force import BruteForceNonogramSolver
test_solver(BruteForceNonogramSolver, easy_puzzle)
test_solver(BruteForceNonogramSolver, ambiguous_puzzle)
from solver.backward_chain_solver import BackwardChainSolver
test_solver(BackwardChainSolver, easy_puzzle)
test_solver(BackwardChainSolver, ambiguous_puzzle)
test_solver(BackwardChainSolver, hard_puzzle)
if __name__ == "__main__":
main(sys.argv)
|
<commit_before>#!/usr/bin/env python
"""Brief demo of nonogram solving."""
import sys
from rules.nonogram import NonogramPuzzle
easy_puzzle = NonogramPuzzle([[1], [1, 1]],
[[1], [1], [1]])
ambiguous_puzzle = NonogramPuzzle([[1], [1]],
[[1], [1]])
def main(args):
from solver.solver_coroutine import test_solver
from solver.brute_force import BruteForceNonogramSolver
test_solver(BruteForceNonogramSolver, easy_puzzle)
test_solver(BruteForceNonogramSolver, ambiguous_puzzle)
from solver.backward_chain_solver import BackwardChainSolver
test_solver(BackwardChainSolver, easy_puzzle)
test_solver(BackwardChainSolver, ambiguous_puzzle)
if __name__ == "__main__":
main(sys.argv)
<commit_msg>Add a hard puzzle not yet correctly solved<commit_after>
|
#!/usr/bin/env python
"""Brief demo of nonogram solving."""
import sys
from rules.nonogram import NonogramPuzzle
easy_puzzle = NonogramPuzzle([[1], [1, 1]],
[[1], [1], [1]])
ambiguous_puzzle = NonogramPuzzle([[1], [1]],
[[1], [1]])
hard_puzzle = NonogramPuzzle(
# https://commons.wikimedia.org/wiki/File:Paint_by_numbers_Animation.gif
[[3], [5], [3,1], [2,1], [3,3,4], [2,2,7], [6,1,1], [4,2,2], [1,1], [3,1],
[6], [2,7], [6,3,1], [1,2,2,1,1], [4,1,1,3], [4,2,2], [3,3,1], [3,3],
[3], [2,1]],
[[2], [1,2], [2,3], [2,3], [3,1,1], [2,1,1], [1,1,1,2,2], [1,1,3,1,3],
[2,6,4], [3,3,9,1], [5,3,2], [3,1,2,2], [2,1,7], [3,3,2], [2,4], [2,1,2],
[2,2,1], [2,2], [1], [1]])
def main(args):
from solver.solver_coroutine import test_solver
from solver.brute_force import BruteForceNonogramSolver
test_solver(BruteForceNonogramSolver, easy_puzzle)
test_solver(BruteForceNonogramSolver, ambiguous_puzzle)
from solver.backward_chain_solver import BackwardChainSolver
test_solver(BackwardChainSolver, easy_puzzle)
test_solver(BackwardChainSolver, ambiguous_puzzle)
test_solver(BackwardChainSolver, hard_puzzle)
if __name__ == "__main__":
main(sys.argv)
|
#!/usr/bin/env python
"""Brief demo of nonogram solving."""
import sys
from rules.nonogram import NonogramPuzzle
easy_puzzle = NonogramPuzzle([[1], [1, 1]],
[[1], [1], [1]])
ambiguous_puzzle = NonogramPuzzle([[1], [1]],
[[1], [1]])
def main(args):
from solver.solver_coroutine import test_solver
from solver.brute_force import BruteForceNonogramSolver
test_solver(BruteForceNonogramSolver, easy_puzzle)
test_solver(BruteForceNonogramSolver, ambiguous_puzzle)
from solver.backward_chain_solver import BackwardChainSolver
test_solver(BackwardChainSolver, easy_puzzle)
test_solver(BackwardChainSolver, ambiguous_puzzle)
if __name__ == "__main__":
main(sys.argv)
Add a hard puzzle not yet correctly solved#!/usr/bin/env python
"""Brief demo of nonogram solving."""
import sys
from rules.nonogram import NonogramPuzzle
easy_puzzle = NonogramPuzzle([[1], [1, 1]],
[[1], [1], [1]])
ambiguous_puzzle = NonogramPuzzle([[1], [1]],
[[1], [1]])
hard_puzzle = NonogramPuzzle(
# https://commons.wikimedia.org/wiki/File:Paint_by_numbers_Animation.gif
[[3], [5], [3,1], [2,1], [3,3,4], [2,2,7], [6,1,1], [4,2,2], [1,1], [3,1],
[6], [2,7], [6,3,1], [1,2,2,1,1], [4,1,1,3], [4,2,2], [3,3,1], [3,3],
[3], [2,1]],
[[2], [1,2], [2,3], [2,3], [3,1,1], [2,1,1], [1,1,1,2,2], [1,1,3,1,3],
[2,6,4], [3,3,9,1], [5,3,2], [3,1,2,2], [2,1,7], [3,3,2], [2,4], [2,1,2],
[2,2,1], [2,2], [1], [1]])
def main(args):
from solver.solver_coroutine import test_solver
from solver.brute_force import BruteForceNonogramSolver
test_solver(BruteForceNonogramSolver, easy_puzzle)
test_solver(BruteForceNonogramSolver, ambiguous_puzzle)
from solver.backward_chain_solver import BackwardChainSolver
test_solver(BackwardChainSolver, easy_puzzle)
test_solver(BackwardChainSolver, ambiguous_puzzle)
test_solver(BackwardChainSolver, hard_puzzle)
if __name__ == "__main__":
main(sys.argv)
|
<commit_before>#!/usr/bin/env python
"""Brief demo of nonogram solving."""
import sys
from rules.nonogram import NonogramPuzzle
easy_puzzle = NonogramPuzzle([[1], [1, 1]],
[[1], [1], [1]])
ambiguous_puzzle = NonogramPuzzle([[1], [1]],
[[1], [1]])
def main(args):
from solver.solver_coroutine import test_solver
from solver.brute_force import BruteForceNonogramSolver
test_solver(BruteForceNonogramSolver, easy_puzzle)
test_solver(BruteForceNonogramSolver, ambiguous_puzzle)
from solver.backward_chain_solver import BackwardChainSolver
test_solver(BackwardChainSolver, easy_puzzle)
test_solver(BackwardChainSolver, ambiguous_puzzle)
if __name__ == "__main__":
main(sys.argv)
<commit_msg>Add a hard puzzle not yet correctly solved<commit_after>#!/usr/bin/env python
"""Brief demo of nonogram solving."""
import sys
from rules.nonogram import NonogramPuzzle
easy_puzzle = NonogramPuzzle([[1], [1, 1]],
[[1], [1], [1]])
ambiguous_puzzle = NonogramPuzzle([[1], [1]],
[[1], [1]])
hard_puzzle = NonogramPuzzle(
# https://commons.wikimedia.org/wiki/File:Paint_by_numbers_Animation.gif
[[3], [5], [3,1], [2,1], [3,3,4], [2,2,7], [6,1,1], [4,2,2], [1,1], [3,1],
[6], [2,7], [6,3,1], [1,2,2,1,1], [4,1,1,3], [4,2,2], [3,3,1], [3,3],
[3], [2,1]],
[[2], [1,2], [2,3], [2,3], [3,1,1], [2,1,1], [1,1,1,2,2], [1,1,3,1,3],
[2,6,4], [3,3,9,1], [5,3,2], [3,1,2,2], [2,1,7], [3,3,2], [2,4], [2,1,2],
[2,2,1], [2,2], [1], [1]])
def main(args):
from solver.solver_coroutine import test_solver
from solver.brute_force import BruteForceNonogramSolver
test_solver(BruteForceNonogramSolver, easy_puzzle)
test_solver(BruteForceNonogramSolver, ambiguous_puzzle)
from solver.backward_chain_solver import BackwardChainSolver
test_solver(BackwardChainSolver, easy_puzzle)
test_solver(BackwardChainSolver, ambiguous_puzzle)
test_solver(BackwardChainSolver, hard_puzzle)
if __name__ == "__main__":
main(sys.argv)
|
6e6fed456ff9c641292933f87c99af8be6823e3f
|
src/main.py
|
src/main.py
|
from rules import ascii_code_rule
import utils
import pprint
import bibtexparser
from bibtexparser.bparser import BibTexParser
from bibtexparser.customization import homogeneize_latex_encoding
DEFAULT = 'default'
pp = pprint.PrettyPrinter()
def main(file_name, output='default'):
with open(file_name) as bibtex_file:
parser = BibTexParser()
parser.customization = homogeneize_latex_encoding
bib_database = bibtexparser.load(bibtex_file, parser=parser)
for entry in bib_database.entries:
result = ascii_code_rule.test(entry)
if not result["passed"]:
print 'The following item seems suspicious according to the rule:'
print result['error']
print 'Do you want to keep it? (y/n)'
print '-' * 32
utils.print_entry(entry)
print entry
print '\n'
|
from rules import ascii_code_rule, no_short_title_rule, enforce_year_rule, no_super_long_title_rule
import utils
import pprint
import bibtexparser
from bibtexparser.bparser import BibTexParser
from bibtexparser.customization import homogeneize_latex_encoding
DEFAULT = 'default'
SCHEMAS = {
'ASCII_CODE_RULE': ascii_code_rule.test,
'NO_SHORT_TITLE_RULE': no_short_title_rule.test,
'ENFORCE_YEAR_RULE': enforce_year_rule.test,
'NO_SUPER_LONG_TITLE_RULE': no_super_long_title_rule.test
}
def get_yes_no():
while True:
answer = raw_input()
if answer.lower() in ['y', 'yes', 'n', 'no']:
return answer.lower() in ['y', 'yes']
else:
print "Please enter either y or n"
def retain_after_matching_schemas(entry):
results = {}
for rule, tester in SCHEMAS.iteritems():
results[rule] = tester(entry)
if not all(results[rule]['passed'] for rule in results):
print 'The following item seems suspicious according to the following rule(s):'
for rule, result in results.iteritems():
if not result['passed']:
print rule + ': ' + result['error']
print 'Do you want to keep it? (y/n)'
print '-' * 32
utils.print_entry(entry)
print '\n'
return get_yes_no()
else:
return True
def main(file_name, output='default'):
with open(file_name) as bibtex_file:
parser = BibTexParser()
parser.customization = homogeneize_latex_encoding
bib_database = bibtexparser.load(bibtex_file, parser=parser)
toRetain = []
toDump = []
# Filter out entries violating schemas
for entry in bib_database.entries:
if retain_after_matching_schemas(entry):
toRetain.append(entry)
else:
toDump.append(entry)
print toDump
|
Implement filter based on user input
|
Implement filter based on user input
|
Python
|
mit
|
DanielCMS/bibtex-cleaner
|
from rules import ascii_code_rule
import utils
import pprint
import bibtexparser
from bibtexparser.bparser import BibTexParser
from bibtexparser.customization import homogeneize_latex_encoding
DEFAULT = 'default'
pp = pprint.PrettyPrinter()
def main(file_name, output='default'):
with open(file_name) as bibtex_file:
parser = BibTexParser()
parser.customization = homogeneize_latex_encoding
bib_database = bibtexparser.load(bibtex_file, parser=parser)
for entry in bib_database.entries:
result = ascii_code_rule.test(entry)
if not result["passed"]:
print 'The following item seems suspicious according to the rule:'
print result['error']
print 'Do you want to keep it? (y/n)'
print '-' * 32
utils.print_entry(entry)
print entry
print '\n'
Implement filter based on user input
|
from rules import ascii_code_rule, no_short_title_rule, enforce_year_rule, no_super_long_title_rule
import utils
import pprint
import bibtexparser
from bibtexparser.bparser import BibTexParser
from bibtexparser.customization import homogeneize_latex_encoding
DEFAULT = 'default'
SCHEMAS = {
'ASCII_CODE_RULE': ascii_code_rule.test,
'NO_SHORT_TITLE_RULE': no_short_title_rule.test,
'ENFORCE_YEAR_RULE': enforce_year_rule.test,
'NO_SUPER_LONG_TITLE_RULE': no_super_long_title_rule.test
}
def get_yes_no():
while True:
answer = raw_input()
if answer.lower() in ['y', 'yes', 'n', 'no']:
return answer.lower() in ['y', 'yes']
else:
print "Please enter either y or n"
def retain_after_matching_schemas(entry):
results = {}
for rule, tester in SCHEMAS.iteritems():
results[rule] = tester(entry)
if not all(results[rule]['passed'] for rule in results):
print 'The following item seems suspicious according to the following rule(s):'
for rule, result in results.iteritems():
if not result['passed']:
print rule + ': ' + result['error']
print 'Do you want to keep it? (y/n)'
print '-' * 32
utils.print_entry(entry)
print '\n'
return get_yes_no()
else:
return True
def main(file_name, output='default'):
with open(file_name) as bibtex_file:
parser = BibTexParser()
parser.customization = homogeneize_latex_encoding
bib_database = bibtexparser.load(bibtex_file, parser=parser)
toRetain = []
toDump = []
# Filter out entries violating schemas
for entry in bib_database.entries:
if retain_after_matching_schemas(entry):
toRetain.append(entry)
else:
toDump.append(entry)
print toDump
|
<commit_before>from rules import ascii_code_rule
import utils
import pprint
import bibtexparser
from bibtexparser.bparser import BibTexParser
from bibtexparser.customization import homogeneize_latex_encoding
DEFAULT = 'default'
pp = pprint.PrettyPrinter()
def main(file_name, output='default'):
with open(file_name) as bibtex_file:
parser = BibTexParser()
parser.customization = homogeneize_latex_encoding
bib_database = bibtexparser.load(bibtex_file, parser=parser)
for entry in bib_database.entries:
result = ascii_code_rule.test(entry)
if not result["passed"]:
print 'The following item seems suspicious according to the rule:'
print result['error']
print 'Do you want to keep it? (y/n)'
print '-' * 32
utils.print_entry(entry)
print entry
print '\n'
<commit_msg>Implement filter based on user input<commit_after>
|
from rules import ascii_code_rule, no_short_title_rule, enforce_year_rule, no_super_long_title_rule
import utils
import pprint
import bibtexparser
from bibtexparser.bparser import BibTexParser
from bibtexparser.customization import homogeneize_latex_encoding
DEFAULT = 'default'
SCHEMAS = {
'ASCII_CODE_RULE': ascii_code_rule.test,
'NO_SHORT_TITLE_RULE': no_short_title_rule.test,
'ENFORCE_YEAR_RULE': enforce_year_rule.test,
'NO_SUPER_LONG_TITLE_RULE': no_super_long_title_rule.test
}
def get_yes_no():
while True:
answer = raw_input()
if answer.lower() in ['y', 'yes', 'n', 'no']:
return answer.lower() in ['y', 'yes']
else:
print "Please enter either y or n"
def retain_after_matching_schemas(entry):
results = {}
for rule, tester in SCHEMAS.iteritems():
results[rule] = tester(entry)
if not all(results[rule]['passed'] for rule in results):
print 'The following item seems suspicious according to the following rule(s):'
for rule, result in results.iteritems():
if not result['passed']:
print rule + ': ' + result['error']
print 'Do you want to keep it? (y/n)'
print '-' * 32
utils.print_entry(entry)
print '\n'
return get_yes_no()
else:
return True
def main(file_name, output='default'):
with open(file_name) as bibtex_file:
parser = BibTexParser()
parser.customization = homogeneize_latex_encoding
bib_database = bibtexparser.load(bibtex_file, parser=parser)
toRetain = []
toDump = []
# Filter out entries violating schemas
for entry in bib_database.entries:
if retain_after_matching_schemas(entry):
toRetain.append(entry)
else:
toDump.append(entry)
print toDump
|
from rules import ascii_code_rule
import utils
import pprint
import bibtexparser
from bibtexparser.bparser import BibTexParser
from bibtexparser.customization import homogeneize_latex_encoding
DEFAULT = 'default'
pp = pprint.PrettyPrinter()
def main(file_name, output='default'):
with open(file_name) as bibtex_file:
parser = BibTexParser()
parser.customization = homogeneize_latex_encoding
bib_database = bibtexparser.load(bibtex_file, parser=parser)
for entry in bib_database.entries:
result = ascii_code_rule.test(entry)
if not result["passed"]:
print 'The following item seems suspicious according to the rule:'
print result['error']
print 'Do you want to keep it? (y/n)'
print '-' * 32
utils.print_entry(entry)
print entry
print '\n'
Implement filter based on user inputfrom rules import ascii_code_rule, no_short_title_rule, enforce_year_rule, no_super_long_title_rule
import utils
import pprint
import bibtexparser
from bibtexparser.bparser import BibTexParser
from bibtexparser.customization import homogeneize_latex_encoding
DEFAULT = 'default'
SCHEMAS = {
'ASCII_CODE_RULE': ascii_code_rule.test,
'NO_SHORT_TITLE_RULE': no_short_title_rule.test,
'ENFORCE_YEAR_RULE': enforce_year_rule.test,
'NO_SUPER_LONG_TITLE_RULE': no_super_long_title_rule.test
}
def get_yes_no():
while True:
answer = raw_input()
if answer.lower() in ['y', 'yes', 'n', 'no']:
return answer.lower() in ['y', 'yes']
else:
print "Please enter either y or n"
def retain_after_matching_schemas(entry):
results = {}
for rule, tester in SCHEMAS.iteritems():
results[rule] = tester(entry)
if not all(results[rule]['passed'] for rule in results):
print 'The following item seems suspicious according to the following rule(s):'
for rule, result in results.iteritems():
if not result['passed']:
print rule + ': ' + result['error']
print 'Do you want to keep it? (y/n)'
print '-' * 32
utils.print_entry(entry)
print '\n'
return get_yes_no()
else:
return True
def main(file_name, output='default'):
with open(file_name) as bibtex_file:
parser = BibTexParser()
parser.customization = homogeneize_latex_encoding
bib_database = bibtexparser.load(bibtex_file, parser=parser)
toRetain = []
toDump = []
# Filter out entries violating schemas
for entry in bib_database.entries:
if retain_after_matching_schemas(entry):
toRetain.append(entry)
else:
toDump.append(entry)
print toDump
|
<commit_before>from rules import ascii_code_rule
import utils
import pprint
import bibtexparser
from bibtexparser.bparser import BibTexParser
from bibtexparser.customization import homogeneize_latex_encoding
DEFAULT = 'default'
pp = pprint.PrettyPrinter()
def main(file_name, output='default'):
with open(file_name) as bibtex_file:
parser = BibTexParser()
parser.customization = homogeneize_latex_encoding
bib_database = bibtexparser.load(bibtex_file, parser=parser)
for entry in bib_database.entries:
result = ascii_code_rule.test(entry)
if not result["passed"]:
print 'The following item seems suspicious according to the rule:'
print result['error']
print 'Do you want to keep it? (y/n)'
print '-' * 32
utils.print_entry(entry)
print entry
print '\n'
<commit_msg>Implement filter based on user input<commit_after>from rules import ascii_code_rule, no_short_title_rule, enforce_year_rule, no_super_long_title_rule
import utils
import pprint
import bibtexparser
from bibtexparser.bparser import BibTexParser
from bibtexparser.customization import homogeneize_latex_encoding
DEFAULT = 'default'
SCHEMAS = {
'ASCII_CODE_RULE': ascii_code_rule.test,
'NO_SHORT_TITLE_RULE': no_short_title_rule.test,
'ENFORCE_YEAR_RULE': enforce_year_rule.test,
'NO_SUPER_LONG_TITLE_RULE': no_super_long_title_rule.test
}
def get_yes_no():
while True:
answer = raw_input()
if answer.lower() in ['y', 'yes', 'n', 'no']:
return answer.lower() in ['y', 'yes']
else:
print "Please enter either y or n"
def retain_after_matching_schemas(entry):
results = {}
for rule, tester in SCHEMAS.iteritems():
results[rule] = tester(entry)
if not all(results[rule]['passed'] for rule in results):
print 'The following item seems suspicious according to the following rule(s):'
for rule, result in results.iteritems():
if not result['passed']:
print rule + ': ' + result['error']
print 'Do you want to keep it? (y/n)'
print '-' * 32
utils.print_entry(entry)
print '\n'
return get_yes_no()
else:
return True
def main(file_name, output='default'):
with open(file_name) as bibtex_file:
parser = BibTexParser()
parser.customization = homogeneize_latex_encoding
bib_database = bibtexparser.load(bibtex_file, parser=parser)
toRetain = []
toDump = []
# Filter out entries violating schemas
for entry in bib_database.entries:
if retain_after_matching_schemas(entry):
toRetain.append(entry)
else:
toDump.append(entry)
print toDump
|
79d8916aecc95919fa77ddd845fdd3e0a7e0c4d9
|
src/ngin.py
|
src/ngin.py
|
import argparse
from string import Template
"""
Subclassing template class
"""
class NginTemplate(Template):
delimiter = '#'
"""
Reverse Proxy Template
"""
reverse_proxy_template = """
server {
listen 80;
server_name #{server_name};
access_log /var/log/nginx/#{server_name}.access.log;
error_log /var/log/nginx/#{server_name}.error.log;
location / {
proxy_pass #{proxy_pass};
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}"""
"""
Initiate argparse
"""
parser = argparse.ArgumentParser()
"""
Add arguments
"""
parser.add_argument("-r", "--revproxy", help="reverse proxy", action="store_true")
parser.add_argument("-n", "--name", help="server name or domain name", action="store")
parser.add_argument("-p", "--proxypass", help="proxy pass server", action="store")
"""
Parsing arguments
"""
args = parser.parse_args()
"""
Reverse proxy config generator
Usage Example: ngin.py -r -n example.com -p http://localhost:9000
"""
if args.revproxy:
if args.name is None or args.proxypass is None:
raise SystemExit('Name and Pass is required!')
params = {'server_name': args.name, 'proxy_pass': args.proxypass}
conf = NginTemplate(reverse_proxy_template).safe_substitute(params)
print conf
|
import argparse
from nginx_conf import server, reverse_proxy
from nginx_blocks import make_location_block, make_server_block
from utils import to_nginx_template, make_indent
"""
Initiate argparse
"""
parser = argparse.ArgumentParser()
"""
Add arguments
"""
parser.add_argument("-r", "--revproxy", help="reverse proxy", action="store_true")
parser.add_argument("-n", "--name", help="server name or domain name", action="store")
parser.add_argument("-p", "--proxypass", help="proxy pass server", action="store")
"""
Parsing arguments
"""
args = parser.parse_args()
"""
Reverse proxy config generator
Usage Example: ngin.py -r -n example.com -p http://localhost:9000
"""
if args.revproxy:
if args.name is None or args.proxypass is None:
raise SystemExit('Name and Pass is required!')
server['server_name'] = args.name
reverse_proxy['proxy_pass'] = args.proxypass
location = make_location_block(to_nginx_template(reverse_proxy), '/')
server = to_nginx_template(server)
conf = make_server_block('{} {}'.format(server, location))
print make_indent(conf)
|
Remove old stuff with dynamic config generator
|
Remove old stuff with dynamic config generator
|
Python
|
mit
|
thesabbir/nginpro
|
import argparse
from string import Template
"""
Subclassing template class
"""
class NginTemplate(Template):
delimiter = '#'
"""
Reverse Proxy Template
"""
reverse_proxy_template = """
server {
listen 80;
server_name #{server_name};
access_log /var/log/nginx/#{server_name}.access.log;
error_log /var/log/nginx/#{server_name}.error.log;
location / {
proxy_pass #{proxy_pass};
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}"""
"""
Initiate argparse
"""
parser = argparse.ArgumentParser()
"""
Add arguments
"""
parser.add_argument("-r", "--revproxy", help="reverse proxy", action="store_true")
parser.add_argument("-n", "--name", help="server name or domain name", action="store")
parser.add_argument("-p", "--proxypass", help="proxy pass server", action="store")
"""
Parsing arguments
"""
args = parser.parse_args()
"""
Reverse proxy config generator
Usage Example: ngin.py -r -n example.com -p http://localhost:9000
"""
if args.revproxy:
if args.name is None or args.proxypass is None:
raise SystemExit('Name and Pass is required!')
params = {'server_name': args.name, 'proxy_pass': args.proxypass}
conf = NginTemplate(reverse_proxy_template).safe_substitute(params)
print conf
Remove old stuff with dynamic config generator
|
import argparse
from nginx_conf import server, reverse_proxy
from nginx_blocks import make_location_block, make_server_block
from utils import to_nginx_template, make_indent
"""
Initiate argparse
"""
parser = argparse.ArgumentParser()
"""
Add arguments
"""
parser.add_argument("-r", "--revproxy", help="reverse proxy", action="store_true")
parser.add_argument("-n", "--name", help="server name or domain name", action="store")
parser.add_argument("-p", "--proxypass", help="proxy pass server", action="store")
"""
Parsing arguments
"""
args = parser.parse_args()
"""
Reverse proxy config generator
Usage Example: ngin.py -r -n example.com -p http://localhost:9000
"""
if args.revproxy:
if args.name is None or args.proxypass is None:
raise SystemExit('Name and Pass is required!')
server['server_name'] = args.name
reverse_proxy['proxy_pass'] = args.proxypass
location = make_location_block(to_nginx_template(reverse_proxy), '/')
server = to_nginx_template(server)
conf = make_server_block('{} {}'.format(server, location))
print make_indent(conf)
|
<commit_before>import argparse
from string import Template
"""
Subclassing template class
"""
class NginTemplate(Template):
delimiter = '#'
"""
Reverse Proxy Template
"""
reverse_proxy_template = """
server {
listen 80;
server_name #{server_name};
access_log /var/log/nginx/#{server_name}.access.log;
error_log /var/log/nginx/#{server_name}.error.log;
location / {
proxy_pass #{proxy_pass};
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}"""
"""
Initiate argparse
"""
parser = argparse.ArgumentParser()
"""
Add arguments
"""
parser.add_argument("-r", "--revproxy", help="reverse proxy", action="store_true")
parser.add_argument("-n", "--name", help="server name or domain name", action="store")
parser.add_argument("-p", "--proxypass", help="proxy pass server", action="store")
"""
Parsing arguments
"""
args = parser.parse_args()
"""
Reverse proxy config generator
Usage Example: ngin.py -r -n example.com -p http://localhost:9000
"""
if args.revproxy:
if args.name is None or args.proxypass is None:
raise SystemExit('Name and Pass is required!')
params = {'server_name': args.name, 'proxy_pass': args.proxypass}
conf = NginTemplate(reverse_proxy_template).safe_substitute(params)
print conf
<commit_msg>Remove old stuff with dynamic config generator<commit_after>
|
import argparse
from nginx_conf import server, reverse_proxy
from nginx_blocks import make_location_block, make_server_block
from utils import to_nginx_template, make_indent
"""
Initiate argparse
"""
parser = argparse.ArgumentParser()
"""
Add arguments
"""
parser.add_argument("-r", "--revproxy", help="reverse proxy", action="store_true")
parser.add_argument("-n", "--name", help="server name or domain name", action="store")
parser.add_argument("-p", "--proxypass", help="proxy pass server", action="store")
"""
Parsing arguments
"""
args = parser.parse_args()
"""
Reverse proxy config generator
Usage Example: ngin.py -r -n example.com -p http://localhost:9000
"""
if args.revproxy:
if args.name is None or args.proxypass is None:
raise SystemExit('Name and Pass is required!')
server['server_name'] = args.name
reverse_proxy['proxy_pass'] = args.proxypass
location = make_location_block(to_nginx_template(reverse_proxy), '/')
server = to_nginx_template(server)
conf = make_server_block('{} {}'.format(server, location))
print make_indent(conf)
|
import argparse
from string import Template
"""
Subclassing template class
"""
class NginTemplate(Template):
delimiter = '#'
"""
Reverse Proxy Template
"""
reverse_proxy_template = """
server {
listen 80;
server_name #{server_name};
access_log /var/log/nginx/#{server_name}.access.log;
error_log /var/log/nginx/#{server_name}.error.log;
location / {
proxy_pass #{proxy_pass};
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}"""
"""
Initiate argparse
"""
parser = argparse.ArgumentParser()
"""
Add arguments
"""
parser.add_argument("-r", "--revproxy", help="reverse proxy", action="store_true")
parser.add_argument("-n", "--name", help="server name or domain name", action="store")
parser.add_argument("-p", "--proxypass", help="proxy pass server", action="store")
"""
Parsing arguments
"""
args = parser.parse_args()
"""
Reverse proxy config generator
Usage Example: ngin.py -r -n example.com -p http://localhost:9000
"""
if args.revproxy:
if args.name is None or args.proxypass is None:
raise SystemExit('Name and Pass is required!')
params = {'server_name': args.name, 'proxy_pass': args.proxypass}
conf = NginTemplate(reverse_proxy_template).safe_substitute(params)
print conf
Remove old stuff with dynamic config generatorimport argparse
from nginx_conf import server, reverse_proxy
from nginx_blocks import make_location_block, make_server_block
from utils import to_nginx_template, make_indent
"""
Initiate argparse
"""
parser = argparse.ArgumentParser()
"""
Add arguments
"""
parser.add_argument("-r", "--revproxy", help="reverse proxy", action="store_true")
parser.add_argument("-n", "--name", help="server name or domain name", action="store")
parser.add_argument("-p", "--proxypass", help="proxy pass server", action="store")
"""
Parsing arguments
"""
args = parser.parse_args()
"""
Reverse proxy config generator
Usage Example: ngin.py -r -n example.com -p http://localhost:9000
"""
if args.revproxy:
if args.name is None or args.proxypass is None:
raise SystemExit('Name and Pass is required!')
server['server_name'] = args.name
reverse_proxy['proxy_pass'] = args.proxypass
location = make_location_block(to_nginx_template(reverse_proxy), '/')
server = to_nginx_template(server)
conf = make_server_block('{} {}'.format(server, location))
print make_indent(conf)
|
<commit_before>import argparse
from string import Template
"""
Subclassing template class
"""
class NginTemplate(Template):
delimiter = '#'
"""
Reverse Proxy Template
"""
reverse_proxy_template = """
server {
listen 80;
server_name #{server_name};
access_log /var/log/nginx/#{server_name}.access.log;
error_log /var/log/nginx/#{server_name}.error.log;
location / {
proxy_pass #{proxy_pass};
proxy_set_header Host $http_host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
}
}"""
"""
Initiate argparse
"""
parser = argparse.ArgumentParser()
"""
Add arguments
"""
parser.add_argument("-r", "--revproxy", help="reverse proxy", action="store_true")
parser.add_argument("-n", "--name", help="server name or domain name", action="store")
parser.add_argument("-p", "--proxypass", help="proxy pass server", action="store")
"""
Parsing arguments
"""
args = parser.parse_args()
"""
Reverse proxy config generator
Usage Example: ngin.py -r -n example.com -p http://localhost:9000
"""
if args.revproxy:
if args.name is None or args.proxypass is None:
raise SystemExit('Name and Pass is required!')
params = {'server_name': args.name, 'proxy_pass': args.proxypass}
conf = NginTemplate(reverse_proxy_template).safe_substitute(params)
print conf
<commit_msg>Remove old stuff with dynamic config generator<commit_after>import argparse
from nginx_conf import server, reverse_proxy
from nginx_blocks import make_location_block, make_server_block
from utils import to_nginx_template, make_indent
"""
Initiate argparse
"""
parser = argparse.ArgumentParser()
"""
Add arguments
"""
parser.add_argument("-r", "--revproxy", help="reverse proxy", action="store_true")
parser.add_argument("-n", "--name", help="server name or domain name", action="store")
parser.add_argument("-p", "--proxypass", help="proxy pass server", action="store")
"""
Parsing arguments
"""
args = parser.parse_args()
"""
Reverse proxy config generator
Usage Example: ngin.py -r -n example.com -p http://localhost:9000
"""
if args.revproxy:
if args.name is None or args.proxypass is None:
raise SystemExit('Name and Pass is required!')
server['server_name'] = args.name
reverse_proxy['proxy_pass'] = args.proxypass
location = make_location_block(to_nginx_template(reverse_proxy), '/')
server = to_nginx_template(server)
conf = make_server_block('{} {}'.format(server, location))
print make_indent(conf)
|
e86bf68cb3454e203b5f077bc302151b30294a9d
|
opps/articles/templatetags/article_tags.py
|
opps/articles/templatetags/article_tags.py
|
# -*- coding: utf-8 -*-
from django import template
from django.conf import settings
from django.utils import timezone
from .models import ArticleBox
register = template.Library()
@register.simple_tag
def get_articlebox(slug, channel_slug=None, template_name=None):
if channel_slug:
slug = "{0}-{1}".format(slug, channel_slug)
try:
box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug,
date_available__lte=timezone.now(),
published=True)
except ArticleBox.DoesNotExist:
box = None
t = template.loader.get_template('articles/articlebox_detail.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articlebox': box, 'slug': slug}))
@register.simple_tag
def get_all_articlebox(channel_slug, template_name=None):
boxes = ArticleBox.objects.filter(site=settings.SITE_ID,
date_available__lte=timezone.now(),
published=True,
channel__slug=channel_slug)
t = template.loader.get_template('articles/articlebox_list.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articleboxes': boxes}))
|
# -*- coding: utf-8 -*-
from django import template
from django.conf import settings
from django.utils import timezone
from opps.articles.models import ArticleBox
register = template.Library()
@register.simple_tag
def get_articlebox(slug, channel_slug=None, template_name=None):
if channel_slug:
slug = u"{0}-{1}".format(slug, channel_slug)
try:
box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug,
date_available__lte=timezone.now(),
published=True)
except ArticleBox.DoesNotExist:
box = None
t = template.loader.get_template('articles/articlebox_detail.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articlebox': box, 'slug': slug}))
@register.simple_tag
def get_all_articlebox(channel_slug, template_name=None):
boxes = ArticleBox.objects.filter(site=settings.SITE_ID,
date_available__lte=timezone.now(),
published=True,
channel__slug=channel_slug)
t = template.loader.get_template('articles/articlebox_list.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articleboxes': boxes}))
|
Fix slug unicode on template tag article get ArticleBox
|
Fix slug unicode on template tag article get ArticleBox
|
Python
|
mit
|
YACOWS/opps,jeanmask/opps,YACOWS/opps,YACOWS/opps,opps/opps,williamroot/opps,williamroot/opps,opps/opps,williamroot/opps,jeanmask/opps,jeanmask/opps,YACOWS/opps,williamroot/opps,jeanmask/opps,opps/opps,opps/opps
|
# -*- coding: utf-8 -*-
from django import template
from django.conf import settings
from django.utils import timezone
from .models import ArticleBox
register = template.Library()
@register.simple_tag
def get_articlebox(slug, channel_slug=None, template_name=None):
if channel_slug:
slug = "{0}-{1}".format(slug, channel_slug)
try:
box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug,
date_available__lte=timezone.now(),
published=True)
except ArticleBox.DoesNotExist:
box = None
t = template.loader.get_template('articles/articlebox_detail.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articlebox': box, 'slug': slug}))
@register.simple_tag
def get_all_articlebox(channel_slug, template_name=None):
boxes = ArticleBox.objects.filter(site=settings.SITE_ID,
date_available__lte=timezone.now(),
published=True,
channel__slug=channel_slug)
t = template.loader.get_template('articles/articlebox_list.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articleboxes': boxes}))
Fix slug unicode on template tag article get ArticleBox
|
# -*- coding: utf-8 -*-
from django import template
from django.conf import settings
from django.utils import timezone
from opps.articles.models import ArticleBox
register = template.Library()
@register.simple_tag
def get_articlebox(slug, channel_slug=None, template_name=None):
if channel_slug:
slug = u"{0}-{1}".format(slug, channel_slug)
try:
box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug,
date_available__lte=timezone.now(),
published=True)
except ArticleBox.DoesNotExist:
box = None
t = template.loader.get_template('articles/articlebox_detail.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articlebox': box, 'slug': slug}))
@register.simple_tag
def get_all_articlebox(channel_slug, template_name=None):
boxes = ArticleBox.objects.filter(site=settings.SITE_ID,
date_available__lte=timezone.now(),
published=True,
channel__slug=channel_slug)
t = template.loader.get_template('articles/articlebox_list.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articleboxes': boxes}))
|
<commit_before># -*- coding: utf-8 -*-
from django import template
from django.conf import settings
from django.utils import timezone
from .models import ArticleBox
register = template.Library()
@register.simple_tag
def get_articlebox(slug, channel_slug=None, template_name=None):
if channel_slug:
slug = "{0}-{1}".format(slug, channel_slug)
try:
box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug,
date_available__lte=timezone.now(),
published=True)
except ArticleBox.DoesNotExist:
box = None
t = template.loader.get_template('articles/articlebox_detail.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articlebox': box, 'slug': slug}))
@register.simple_tag
def get_all_articlebox(channel_slug, template_name=None):
boxes = ArticleBox.objects.filter(site=settings.SITE_ID,
date_available__lte=timezone.now(),
published=True,
channel__slug=channel_slug)
t = template.loader.get_template('articles/articlebox_list.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articleboxes': boxes}))
<commit_msg>Fix slug unicode on template tag article get ArticleBox<commit_after>
|
# -*- coding: utf-8 -*-
from django import template
from django.conf import settings
from django.utils import timezone
from opps.articles.models import ArticleBox
register = template.Library()
@register.simple_tag
def get_articlebox(slug, channel_slug=None, template_name=None):
if channel_slug:
slug = u"{0}-{1}".format(slug, channel_slug)
try:
box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug,
date_available__lte=timezone.now(),
published=True)
except ArticleBox.DoesNotExist:
box = None
t = template.loader.get_template('articles/articlebox_detail.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articlebox': box, 'slug': slug}))
@register.simple_tag
def get_all_articlebox(channel_slug, template_name=None):
boxes = ArticleBox.objects.filter(site=settings.SITE_ID,
date_available__lte=timezone.now(),
published=True,
channel__slug=channel_slug)
t = template.loader.get_template('articles/articlebox_list.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articleboxes': boxes}))
|
# -*- coding: utf-8 -*-
from django import template
from django.conf import settings
from django.utils import timezone
from .models import ArticleBox
register = template.Library()
@register.simple_tag
def get_articlebox(slug, channel_slug=None, template_name=None):
if channel_slug:
slug = "{0}-{1}".format(slug, channel_slug)
try:
box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug,
date_available__lte=timezone.now(),
published=True)
except ArticleBox.DoesNotExist:
box = None
t = template.loader.get_template('articles/articlebox_detail.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articlebox': box, 'slug': slug}))
@register.simple_tag
def get_all_articlebox(channel_slug, template_name=None):
boxes = ArticleBox.objects.filter(site=settings.SITE_ID,
date_available__lte=timezone.now(),
published=True,
channel__slug=channel_slug)
t = template.loader.get_template('articles/articlebox_list.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articleboxes': boxes}))
Fix slug unicode on template tag article get ArticleBox# -*- coding: utf-8 -*-
from django import template
from django.conf import settings
from django.utils import timezone
from opps.articles.models import ArticleBox
register = template.Library()
@register.simple_tag
def get_articlebox(slug, channel_slug=None, template_name=None):
if channel_slug:
slug = u"{0}-{1}".format(slug, channel_slug)
try:
box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug,
date_available__lte=timezone.now(),
published=True)
except ArticleBox.DoesNotExist:
box = None
t = template.loader.get_template('articles/articlebox_detail.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articlebox': box, 'slug': slug}))
@register.simple_tag
def get_all_articlebox(channel_slug, template_name=None):
boxes = ArticleBox.objects.filter(site=settings.SITE_ID,
date_available__lte=timezone.now(),
published=True,
channel__slug=channel_slug)
t = template.loader.get_template('articles/articlebox_list.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articleboxes': boxes}))
|
<commit_before># -*- coding: utf-8 -*-
from django import template
from django.conf import settings
from django.utils import timezone
from .models import ArticleBox
register = template.Library()
@register.simple_tag
def get_articlebox(slug, channel_slug=None, template_name=None):
if channel_slug:
slug = "{0}-{1}".format(slug, channel_slug)
try:
box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug,
date_available__lte=timezone.now(),
published=True)
except ArticleBox.DoesNotExist:
box = None
t = template.loader.get_template('articles/articlebox_detail.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articlebox': box, 'slug': slug}))
@register.simple_tag
def get_all_articlebox(channel_slug, template_name=None):
boxes = ArticleBox.objects.filter(site=settings.SITE_ID,
date_available__lte=timezone.now(),
published=True,
channel__slug=channel_slug)
t = template.loader.get_template('articles/articlebox_list.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articleboxes': boxes}))
<commit_msg>Fix slug unicode on template tag article get ArticleBox<commit_after># -*- coding: utf-8 -*-
from django import template
from django.conf import settings
from django.utils import timezone
from opps.articles.models import ArticleBox
register = template.Library()
@register.simple_tag
def get_articlebox(slug, channel_slug=None, template_name=None):
if channel_slug:
slug = u"{0}-{1}".format(slug, channel_slug)
try:
box = ArticleBox.objects.get(site=settings.SITE_ID, slug=slug,
date_available__lte=timezone.now(),
published=True)
except ArticleBox.DoesNotExist:
box = None
t = template.loader.get_template('articles/articlebox_detail.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articlebox': box, 'slug': slug}))
@register.simple_tag
def get_all_articlebox(channel_slug, template_name=None):
boxes = ArticleBox.objects.filter(site=settings.SITE_ID,
date_available__lte=timezone.now(),
published=True,
channel__slug=channel_slug)
t = template.loader.get_template('articles/articlebox_list.html')
if template_name:
t = template.loader.get_template(template_name)
return t.render(template.Context({'articleboxes': boxes}))
|
195de4097075c7dd53671242822521e07690391e
|
databroker/__init__.py
|
databroker/__init__.py
|
import warnings
import logging
logger = logging.getLogger(__name__)
try:
from .databroker import (DataBroker, DataBroker as db,
get_events, get_table, stream, get_fields,
restream, process)
from .pims_readers import get_images
from .handler_registration import register_builtin_handlers
# register all built-in filestore handlers
register_builtin_handlers()
del register_builtin_handlers
except ImportError:
warnings.warn("The top-level functions (get_table, get_events, etc.)"
"cannot be created because "
"the necessary configuration was not found.")
from .broker import Broker, ArchiverPlugin
# set version string using versioneer
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
import warnings
import logging
logger = logging.getLogger(__name__)
try:
from .databroker import DataBroker
except ImportError:
warnings.warn("The top-level functions (get_table, get_events, etc.)"
"cannot be created because "
"the necessary configuration was not found.")
else:
from .databroker import (DataBroker, DataBroker as db,
get_events, get_table, stream, get_fields,
restream, process)
from .pims_readers import get_images
from .handler_registration import register_builtin_handlers
# register all built-in filestore handlers
register_builtin_handlers()
del register_builtin_handlers
from .broker import Broker, ArchiverPlugin
# set version string using versioneer
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
Isolate import error to DataBroker.
|
REF: Isolate import error to DataBroker.
|
Python
|
bsd-3-clause
|
ericdill/databroker,ericdill/databroker
|
import warnings
import logging
logger = logging.getLogger(__name__)
try:
from .databroker import (DataBroker, DataBroker as db,
get_events, get_table, stream, get_fields,
restream, process)
from .pims_readers import get_images
from .handler_registration import register_builtin_handlers
# register all built-in filestore handlers
register_builtin_handlers()
del register_builtin_handlers
except ImportError:
warnings.warn("The top-level functions (get_table, get_events, etc.)"
"cannot be created because "
"the necessary configuration was not found.")
from .broker import Broker, ArchiverPlugin
# set version string using versioneer
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
REF: Isolate import error to DataBroker.
|
import warnings
import logging
logger = logging.getLogger(__name__)
try:
from .databroker import DataBroker
except ImportError:
warnings.warn("The top-level functions (get_table, get_events, etc.)"
"cannot be created because "
"the necessary configuration was not found.")
else:
from .databroker import (DataBroker, DataBroker as db,
get_events, get_table, stream, get_fields,
restream, process)
from .pims_readers import get_images
from .handler_registration import register_builtin_handlers
# register all built-in filestore handlers
register_builtin_handlers()
del register_builtin_handlers
from .broker import Broker, ArchiverPlugin
# set version string using versioneer
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
<commit_before>import warnings
import logging
logger = logging.getLogger(__name__)
try:
from .databroker import (DataBroker, DataBroker as db,
get_events, get_table, stream, get_fields,
restream, process)
from .pims_readers import get_images
from .handler_registration import register_builtin_handlers
# register all built-in filestore handlers
register_builtin_handlers()
del register_builtin_handlers
except ImportError:
warnings.warn("The top-level functions (get_table, get_events, etc.)"
"cannot be created because "
"the necessary configuration was not found.")
from .broker import Broker, ArchiverPlugin
# set version string using versioneer
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
<commit_msg>REF: Isolate import error to DataBroker.<commit_after>
|
import warnings
import logging
logger = logging.getLogger(__name__)
try:
from .databroker import DataBroker
except ImportError:
warnings.warn("The top-level functions (get_table, get_events, etc.)"
"cannot be created because "
"the necessary configuration was not found.")
else:
from .databroker import (DataBroker, DataBroker as db,
get_events, get_table, stream, get_fields,
restream, process)
from .pims_readers import get_images
from .handler_registration import register_builtin_handlers
# register all built-in filestore handlers
register_builtin_handlers()
del register_builtin_handlers
from .broker import Broker, ArchiverPlugin
# set version string using versioneer
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
import warnings
import logging
logger = logging.getLogger(__name__)
try:
from .databroker import (DataBroker, DataBroker as db,
get_events, get_table, stream, get_fields,
restream, process)
from .pims_readers import get_images
from .handler_registration import register_builtin_handlers
# register all built-in filestore handlers
register_builtin_handlers()
del register_builtin_handlers
except ImportError:
warnings.warn("The top-level functions (get_table, get_events, etc.)"
"cannot be created because "
"the necessary configuration was not found.")
from .broker import Broker, ArchiverPlugin
# set version string using versioneer
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
REF: Isolate import error to DataBroker.import warnings
import logging
logger = logging.getLogger(__name__)
try:
from .databroker import DataBroker
except ImportError:
warnings.warn("The top-level functions (get_table, get_events, etc.)"
"cannot be created because "
"the necessary configuration was not found.")
else:
from .databroker import (DataBroker, DataBroker as db,
get_events, get_table, stream, get_fields,
restream, process)
from .pims_readers import get_images
from .handler_registration import register_builtin_handlers
# register all built-in filestore handlers
register_builtin_handlers()
del register_builtin_handlers
from .broker import Broker, ArchiverPlugin
# set version string using versioneer
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
<commit_before>import warnings
import logging
logger = logging.getLogger(__name__)
try:
from .databroker import (DataBroker, DataBroker as db,
get_events, get_table, stream, get_fields,
restream, process)
from .pims_readers import get_images
from .handler_registration import register_builtin_handlers
# register all built-in filestore handlers
register_builtin_handlers()
del register_builtin_handlers
except ImportError:
warnings.warn("The top-level functions (get_table, get_events, etc.)"
"cannot be created because "
"the necessary configuration was not found.")
from .broker import Broker, ArchiverPlugin
# set version string using versioneer
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
<commit_msg>REF: Isolate import error to DataBroker.<commit_after>import warnings
import logging
logger = logging.getLogger(__name__)
try:
from .databroker import DataBroker
except ImportError:
warnings.warn("The top-level functions (get_table, get_events, etc.)"
"cannot be created because "
"the necessary configuration was not found.")
else:
from .databroker import (DataBroker, DataBroker as db,
get_events, get_table, stream, get_fields,
restream, process)
from .pims_readers import get_images
from .handler_registration import register_builtin_handlers
# register all built-in filestore handlers
register_builtin_handlers()
del register_builtin_handlers
from .broker import Broker, ArchiverPlugin
# set version string using versioneer
from ._version import get_versions
__version__ = get_versions()['version']
del get_versions
|
31d3202855fc4bb341eaa0bc212d302bd7d0f2f6
|
keystoneclient/auth/token_endpoint.py
|
keystoneclient/auth/token_endpoint.py
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient.auth import base
class Token(base.BaseAuthPlugin):
"""A provider that will always use the given token and endpoint.
This is really only useful for testing and in certain CLI cases where you
have a known endpoint and admin token that you want to use.
"""
def __init__(self, endpoint, token):
# NOTE(jamielennox): endpoint is reserved for when plugins
# can be used to provide that information
self.endpoint = endpoint
self.token = token
def get_token(self, session):
return self.token
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient.auth import base
class Token(base.BaseAuthPlugin):
"""A provider that will always use the given token and endpoint.
This is really only useful for testing and in certain CLI cases where you
have a known endpoint and admin token that you want to use.
"""
def __init__(self, endpoint, token):
# NOTE(jamielennox): endpoint is reserved for when plugins
# can be used to provide that information
self.endpoint = endpoint
self.token = token
def get_token(self, session):
return self.token
def get_endpoint(self, session, **kwargs):
"""Return the supplied endpoint.
Using this plugin the same endpoint is returned regardless of the
parameters passed to the plugin.
"""
return self.endpoint
|
Add endpoint handling to Token/Endpoint auth
|
Add endpoint handling to Token/Endpoint auth
This auth plugin was initially created before get_endpoint was
available. Implement the get_endpoint method so that we can use the
plugin with relative URLs.
Closes-Bug: #1323926
Change-Id: Ic868f509e708ad29faf86ec5ceeab2a9c98a24fc
|
Python
|
apache-2.0
|
jamielennox/keystoneauth,citrix-openstack-build/keystoneauth,sileht/keystoneauth
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient.auth import base
class Token(base.BaseAuthPlugin):
"""A provider that will always use the given token and endpoint.
This is really only useful for testing and in certain CLI cases where you
have a known endpoint and admin token that you want to use.
"""
def __init__(self, endpoint, token):
# NOTE(jamielennox): endpoint is reserved for when plugins
# can be used to provide that information
self.endpoint = endpoint
self.token = token
def get_token(self, session):
return self.token
Add endpoint handling to Token/Endpoint auth
This auth plugin was initially created before get_endpoint was
available. Implement the get_endpoint method so that we can use the
plugin with relative URLs.
Closes-Bug: #1323926
Change-Id: Ic868f509e708ad29faf86ec5ceeab2a9c98a24fc
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient.auth import base
class Token(base.BaseAuthPlugin):
"""A provider that will always use the given token and endpoint.
This is really only useful for testing and in certain CLI cases where you
have a known endpoint and admin token that you want to use.
"""
def __init__(self, endpoint, token):
# NOTE(jamielennox): endpoint is reserved for when plugins
# can be used to provide that information
self.endpoint = endpoint
self.token = token
def get_token(self, session):
return self.token
def get_endpoint(self, session, **kwargs):
"""Return the supplied endpoint.
Using this plugin the same endpoint is returned regardless of the
parameters passed to the plugin.
"""
return self.endpoint
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient.auth import base
class Token(base.BaseAuthPlugin):
"""A provider that will always use the given token and endpoint.
This is really only useful for testing and in certain CLI cases where you
have a known endpoint and admin token that you want to use.
"""
def __init__(self, endpoint, token):
# NOTE(jamielennox): endpoint is reserved for when plugins
# can be used to provide that information
self.endpoint = endpoint
self.token = token
def get_token(self, session):
return self.token
<commit_msg>Add endpoint handling to Token/Endpoint auth
This auth plugin was initially created before get_endpoint was
available. Implement the get_endpoint method so that we can use the
plugin with relative URLs.
Closes-Bug: #1323926
Change-Id: Ic868f509e708ad29faf86ec5ceeab2a9c98a24fc<commit_after>
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient.auth import base
class Token(base.BaseAuthPlugin):
"""A provider that will always use the given token and endpoint.
This is really only useful for testing and in certain CLI cases where you
have a known endpoint and admin token that you want to use.
"""
def __init__(self, endpoint, token):
# NOTE(jamielennox): endpoint is reserved for when plugins
# can be used to provide that information
self.endpoint = endpoint
self.token = token
def get_token(self, session):
return self.token
def get_endpoint(self, session, **kwargs):
"""Return the supplied endpoint.
Using this plugin the same endpoint is returned regardless of the
parameters passed to the plugin.
"""
return self.endpoint
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient.auth import base
class Token(base.BaseAuthPlugin):
"""A provider that will always use the given token and endpoint.
This is really only useful for testing and in certain CLI cases where you
have a known endpoint and admin token that you want to use.
"""
def __init__(self, endpoint, token):
# NOTE(jamielennox): endpoint is reserved for when plugins
# can be used to provide that information
self.endpoint = endpoint
self.token = token
def get_token(self, session):
return self.token
Add endpoint handling to Token/Endpoint auth
This auth plugin was initially created before get_endpoint was
available. Implement the get_endpoint method so that we can use the
plugin with relative URLs.
Closes-Bug: #1323926
Change-Id: Ic868f509e708ad29faf86ec5ceeab2a9c98a24fc# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient.auth import base
class Token(base.BaseAuthPlugin):
"""A provider that will always use the given token and endpoint.
This is really only useful for testing and in certain CLI cases where you
have a known endpoint and admin token that you want to use.
"""
def __init__(self, endpoint, token):
# NOTE(jamielennox): endpoint is reserved for when plugins
# can be used to provide that information
self.endpoint = endpoint
self.token = token
def get_token(self, session):
return self.token
def get_endpoint(self, session, **kwargs):
"""Return the supplied endpoint.
Using this plugin the same endpoint is returned regardless of the
parameters passed to the plugin.
"""
return self.endpoint
|
<commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient.auth import base
class Token(base.BaseAuthPlugin):
"""A provider that will always use the given token and endpoint.
This is really only useful for testing and in certain CLI cases where you
have a known endpoint and admin token that you want to use.
"""
def __init__(self, endpoint, token):
# NOTE(jamielennox): endpoint is reserved for when plugins
# can be used to provide that information
self.endpoint = endpoint
self.token = token
def get_token(self, session):
return self.token
<commit_msg>Add endpoint handling to Token/Endpoint auth
This auth plugin was initially created before get_endpoint was
available. Implement the get_endpoint method so that we can use the
plugin with relative URLs.
Closes-Bug: #1323926
Change-Id: Ic868f509e708ad29faf86ec5ceeab2a9c98a24fc<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from keystoneclient.auth import base
class Token(base.BaseAuthPlugin):
"""A provider that will always use the given token and endpoint.
This is really only useful for testing and in certain CLI cases where you
have a known endpoint and admin token that you want to use.
"""
def __init__(self, endpoint, token):
# NOTE(jamielennox): endpoint is reserved for when plugins
# can be used to provide that information
self.endpoint = endpoint
self.token = token
def get_token(self, session):
return self.token
def get_endpoint(self, session, **kwargs):
"""Return the supplied endpoint.
Using this plugin the same endpoint is returned regardless of the
parameters passed to the plugin.
"""
return self.endpoint
|
1329e2e76fbd144594243c12655b58e424d6edcd
|
stix2/bundle.py
|
stix2/bundle.py
|
"""STIX 2 Bundle object"""
from .base import _STIXBase
from .properties import IDProperty, Property, TypeProperty
class Bundle(_STIXBase):
_type = 'bundle'
_properties = {
'type': TypeProperty(_type),
'id': IDProperty(_type),
'spec_version': Property(fixed="2.0"),
'objects': Property(),
}
def __init__(self, *args, **kwargs):
# Add any positional arguments to the 'objects' kwarg.
if args:
if isinstance(args[0], list):
kwargs['objects'] = args[0] + list(args[1:]) + kwargs.get('objects', [])
else:
kwargs['objects'] = list(args) + kwargs.get('objects', [])
super(Bundle, self).__init__(**kwargs)
|
"""STIX 2 Bundle object"""
from collections import OrderedDict
from .base import _STIXBase
from .properties import IDProperty, Property, TypeProperty
class Bundle(_STIXBase):
_type = 'bundle'
_properties = OrderedDict()
_properties = _properties.update([
('type', TypeProperty(_type)),
('id', IDProperty(_type)),
('spec_version', Property(fixed="2.0")),
('objects', Property()),
])
def __init__(self, *args, **kwargs):
# Add any positional arguments to the 'objects' kwarg.
if args:
if isinstance(args[0], list):
kwargs['objects'] = args[0] + list(args[1:]) + kwargs.get('objects', [])
else:
kwargs['objects'] = list(args) + kwargs.get('objects', [])
super(Bundle, self).__init__(**kwargs)
|
Apply OrderedDict changes to Bundle.
|
Apply OrderedDict changes to Bundle.
|
Python
|
bsd-3-clause
|
oasis-open/cti-python-stix2
|
"""STIX 2 Bundle object"""
from .base import _STIXBase
from .properties import IDProperty, Property, TypeProperty
class Bundle(_STIXBase):
_type = 'bundle'
_properties = {
'type': TypeProperty(_type),
'id': IDProperty(_type),
'spec_version': Property(fixed="2.0"),
'objects': Property(),
}
def __init__(self, *args, **kwargs):
# Add any positional arguments to the 'objects' kwarg.
if args:
if isinstance(args[0], list):
kwargs['objects'] = args[0] + list(args[1:]) + kwargs.get('objects', [])
else:
kwargs['objects'] = list(args) + kwargs.get('objects', [])
super(Bundle, self).__init__(**kwargs)
Apply OrderedDict changes to Bundle.
|
"""STIX 2 Bundle object"""
from collections import OrderedDict
from .base import _STIXBase
from .properties import IDProperty, Property, TypeProperty
class Bundle(_STIXBase):
_type = 'bundle'
_properties = OrderedDict()
_properties = _properties.update([
('type', TypeProperty(_type)),
('id', IDProperty(_type)),
('spec_version', Property(fixed="2.0")),
('objects', Property()),
])
def __init__(self, *args, **kwargs):
# Add any positional arguments to the 'objects' kwarg.
if args:
if isinstance(args[0], list):
kwargs['objects'] = args[0] + list(args[1:]) + kwargs.get('objects', [])
else:
kwargs['objects'] = list(args) + kwargs.get('objects', [])
super(Bundle, self).__init__(**kwargs)
|
<commit_before>"""STIX 2 Bundle object"""
from .base import _STIXBase
from .properties import IDProperty, Property, TypeProperty
class Bundle(_STIXBase):
_type = 'bundle'
_properties = {
'type': TypeProperty(_type),
'id': IDProperty(_type),
'spec_version': Property(fixed="2.0"),
'objects': Property(),
}
def __init__(self, *args, **kwargs):
# Add any positional arguments to the 'objects' kwarg.
if args:
if isinstance(args[0], list):
kwargs['objects'] = args[0] + list(args[1:]) + kwargs.get('objects', [])
else:
kwargs['objects'] = list(args) + kwargs.get('objects', [])
super(Bundle, self).__init__(**kwargs)
<commit_msg>Apply OrderedDict changes to Bundle.<commit_after>
|
"""STIX 2 Bundle object"""
from collections import OrderedDict
from .base import _STIXBase
from .properties import IDProperty, Property, TypeProperty
class Bundle(_STIXBase):
_type = 'bundle'
_properties = OrderedDict()
_properties = _properties.update([
('type', TypeProperty(_type)),
('id', IDProperty(_type)),
('spec_version', Property(fixed="2.0")),
('objects', Property()),
])
def __init__(self, *args, **kwargs):
# Add any positional arguments to the 'objects' kwarg.
if args:
if isinstance(args[0], list):
kwargs['objects'] = args[0] + list(args[1:]) + kwargs.get('objects', [])
else:
kwargs['objects'] = list(args) + kwargs.get('objects', [])
super(Bundle, self).__init__(**kwargs)
|
"""STIX 2 Bundle object"""
from .base import _STIXBase
from .properties import IDProperty, Property, TypeProperty
class Bundle(_STIXBase):
_type = 'bundle'
_properties = {
'type': TypeProperty(_type),
'id': IDProperty(_type),
'spec_version': Property(fixed="2.0"),
'objects': Property(),
}
def __init__(self, *args, **kwargs):
# Add any positional arguments to the 'objects' kwarg.
if args:
if isinstance(args[0], list):
kwargs['objects'] = args[0] + list(args[1:]) + kwargs.get('objects', [])
else:
kwargs['objects'] = list(args) + kwargs.get('objects', [])
super(Bundle, self).__init__(**kwargs)
Apply OrderedDict changes to Bundle."""STIX 2 Bundle object"""
from collections import OrderedDict
from .base import _STIXBase
from .properties import IDProperty, Property, TypeProperty
class Bundle(_STIXBase):
_type = 'bundle'
_properties = OrderedDict()
_properties = _properties.update([
('type', TypeProperty(_type)),
('id', IDProperty(_type)),
('spec_version', Property(fixed="2.0")),
('objects', Property()),
])
def __init__(self, *args, **kwargs):
# Add any positional arguments to the 'objects' kwarg.
if args:
if isinstance(args[0], list):
kwargs['objects'] = args[0] + list(args[1:]) + kwargs.get('objects', [])
else:
kwargs['objects'] = list(args) + kwargs.get('objects', [])
super(Bundle, self).__init__(**kwargs)
|
<commit_before>"""STIX 2 Bundle object"""
from .base import _STIXBase
from .properties import IDProperty, Property, TypeProperty
class Bundle(_STIXBase):
_type = 'bundle'
_properties = {
'type': TypeProperty(_type),
'id': IDProperty(_type),
'spec_version': Property(fixed="2.0"),
'objects': Property(),
}
def __init__(self, *args, **kwargs):
# Add any positional arguments to the 'objects' kwarg.
if args:
if isinstance(args[0], list):
kwargs['objects'] = args[0] + list(args[1:]) + kwargs.get('objects', [])
else:
kwargs['objects'] = list(args) + kwargs.get('objects', [])
super(Bundle, self).__init__(**kwargs)
<commit_msg>Apply OrderedDict changes to Bundle.<commit_after>"""STIX 2 Bundle object"""
from collections import OrderedDict
from .base import _STIXBase
from .properties import IDProperty, Property, TypeProperty
class Bundle(_STIXBase):
_type = 'bundle'
_properties = OrderedDict()
_properties = _properties.update([
('type', TypeProperty(_type)),
('id', IDProperty(_type)),
('spec_version', Property(fixed="2.0")),
('objects', Property()),
])
def __init__(self, *args, **kwargs):
# Add any positional arguments to the 'objects' kwarg.
if args:
if isinstance(args[0], list):
kwargs['objects'] = args[0] + list(args[1:]) + kwargs.get('objects', [])
else:
kwargs['objects'] = list(args) + kwargs.get('objects', [])
super(Bundle, self).__init__(**kwargs)
|
5deeeb3993925850d3deeaa87aad8b6f524c18fd
|
tobolist.py
|
tobolist.py
|
# -*- coding: utf-8 -*-
import csv
from difflib import get_close_matches
with open('./Zip32_10301.csv') as files:
csv_files = csv.reader(files)
csv_files = [' '.join(i) for i in csv_files]
#print csv_files[5]
address = u'高雄市三民區大昌二路'
print address
for i in get_close_matches(address.encode('utf-8'), csv_files):
print i
|
# -*- coding: utf-8 -*-
import csv
from difflib import get_close_matches
from random import choice
with open('./Zip32_10301.csv') as files:
csv_files = csv.reader(files)
csv_files = [' '.join(i) for i in csv_files]
print u'測試:', choice(csv_files)
address = u'高雄市三民區大昌二路307-1號'
print u'查詢:', address
print u'結果:'
for i in get_close_matches(address.encode('utf-8'), csv_files):
print i
|
Add full address for test.
|
Add full address for test.
|
Python
|
mit
|
moskytw/zipcodetw,simonfork/zipcodetw,linpan/zipcodetw,moskytw/zipcodetw,moskytw/zipcodetw,simonfork/zipcodetw,linpan/zipcodetw,linpan/zipcodetw,simonfork/zipcodetw
|
# -*- coding: utf-8 -*-
import csv
from difflib import get_close_matches
with open('./Zip32_10301.csv') as files:
csv_files = csv.reader(files)
csv_files = [' '.join(i) for i in csv_files]
#print csv_files[5]
address = u'高雄市三民區大昌二路'
print address
for i in get_close_matches(address.encode('utf-8'), csv_files):
print i
Add full address for test.
|
# -*- coding: utf-8 -*-
import csv
from difflib import get_close_matches
from random import choice
with open('./Zip32_10301.csv') as files:
csv_files = csv.reader(files)
csv_files = [' '.join(i) for i in csv_files]
print u'測試:', choice(csv_files)
address = u'高雄市三民區大昌二路307-1號'
print u'查詢:', address
print u'結果:'
for i in get_close_matches(address.encode('utf-8'), csv_files):
print i
|
<commit_before># -*- coding: utf-8 -*-
import csv
from difflib import get_close_matches
with open('./Zip32_10301.csv') as files:
csv_files = csv.reader(files)
csv_files = [' '.join(i) for i in csv_files]
#print csv_files[5]
address = u'高雄市三民區大昌二路'
print address
for i in get_close_matches(address.encode('utf-8'), csv_files):
print i
<commit_msg>Add full address for test.<commit_after>
|
# -*- coding: utf-8 -*-
import csv
from difflib import get_close_matches
from random import choice
with open('./Zip32_10301.csv') as files:
csv_files = csv.reader(files)
csv_files = [' '.join(i) for i in csv_files]
print u'測試:', choice(csv_files)
address = u'高雄市三民區大昌二路307-1號'
print u'查詢:', address
print u'結果:'
for i in get_close_matches(address.encode('utf-8'), csv_files):
print i
|
# -*- coding: utf-8 -*-
import csv
from difflib import get_close_matches
with open('./Zip32_10301.csv') as files:
csv_files = csv.reader(files)
csv_files = [' '.join(i) for i in csv_files]
#print csv_files[5]
address = u'高雄市三民區大昌二路'
print address
for i in get_close_matches(address.encode('utf-8'), csv_files):
print i
Add full address for test.# -*- coding: utf-8 -*-
import csv
from difflib import get_close_matches
from random import choice
with open('./Zip32_10301.csv') as files:
csv_files = csv.reader(files)
csv_files = [' '.join(i) for i in csv_files]
print u'測試:', choice(csv_files)
address = u'高雄市三民區大昌二路307-1號'
print u'查詢:', address
print u'結果:'
for i in get_close_matches(address.encode('utf-8'), csv_files):
print i
|
<commit_before># -*- coding: utf-8 -*-
import csv
from difflib import get_close_matches
with open('./Zip32_10301.csv') as files:
csv_files = csv.reader(files)
csv_files = [' '.join(i) for i in csv_files]
#print csv_files[5]
address = u'高雄市三民區大昌二路'
print address
for i in get_close_matches(address.encode('utf-8'), csv_files):
print i
<commit_msg>Add full address for test.<commit_after># -*- coding: utf-8 -*-
import csv
from difflib import get_close_matches
from random import choice
with open('./Zip32_10301.csv') as files:
csv_files = csv.reader(files)
csv_files = [' '.join(i) for i in csv_files]
print u'測試:', choice(csv_files)
address = u'高雄市三民區大昌二路307-1號'
print u'查詢:', address
print u'結果:'
for i in get_close_matches(address.encode('utf-8'), csv_files):
print i
|
5f7fcd4a22171315db56dd2c8ed5689b5a07cceb
|
apps/urls.py
|
apps/urls.py
|
# -*- coding: utf-8 -*-
#from apps.announcements.feeds import AnnouncementsFeed
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from apps.rss_creator.feeds import AnnouncementsTeilarFeed
#feeds = {
# 'announcements': AnnouncementsFeed,
#}
handler500 = 'apps.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'apps.accounts.views.index'),
(r'^about/', 'apps.accounts.views.about'),
# (r'^announcements/', 'apps.announcements.views.announcements'),
(r'^dionysos/', 'apps.dionysos.views.dionysos'),
(r'^eclass/', 'apps.eclass.views.eclass'),
# (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^announcementsteilar/$', AnnouncementsTeilarFeed()),
(r'^library/', 'apps.library.views.library'),
(r'^login/', 'apps.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'apps.accounts.views.accounts_settings'),
(r'^refrigerators/', 'apps.refrigerators.views.refrigerators'),
(r'^teachers/', 'apps.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
|
# -*- coding: utf-8 -*-
#from apps.announcements.feeds import AnnouncementsFeed
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
#feeds = {
# 'announcements': AnnouncementsFeed,
#}
handler500 = 'apps.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'apps.accounts.views.index'),
(r'^about/', 'apps.accounts.views.about'),
# (r'^announcements/', 'apps.announcements.views.announcements'),
(r'^dionysos/', 'apps.dionysos.views.dionysos'),
(r'^eclass/', 'apps.eclass.views.eclass'),
# (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^library/', 'apps.library.views.library'),
(r'^login/', 'apps.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'apps.accounts.views.accounts_settings'),
(r'^refrigerators/', 'apps.refrigerators.views.refrigerators'),
(r'^teachers/', 'apps.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
|
Remove reference to deprecated module
|
Remove reference to deprecated module
|
Python
|
agpl-3.0
|
LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr,LinuxTeam-teilar/cronos.teilar.gr
|
# -*- coding: utf-8 -*-
#from apps.announcements.feeds import AnnouncementsFeed
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from apps.rss_creator.feeds import AnnouncementsTeilarFeed
#feeds = {
# 'announcements': AnnouncementsFeed,
#}
handler500 = 'apps.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'apps.accounts.views.index'),
(r'^about/', 'apps.accounts.views.about'),
# (r'^announcements/', 'apps.announcements.views.announcements'),
(r'^dionysos/', 'apps.dionysos.views.dionysos'),
(r'^eclass/', 'apps.eclass.views.eclass'),
# (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^announcementsteilar/$', AnnouncementsTeilarFeed()),
(r'^library/', 'apps.library.views.library'),
(r'^login/', 'apps.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'apps.accounts.views.accounts_settings'),
(r'^refrigerators/', 'apps.refrigerators.views.refrigerators'),
(r'^teachers/', 'apps.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
Remove reference to deprecated module
|
# -*- coding: utf-8 -*-
#from apps.announcements.feeds import AnnouncementsFeed
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
#feeds = {
# 'announcements': AnnouncementsFeed,
#}
handler500 = 'apps.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'apps.accounts.views.index'),
(r'^about/', 'apps.accounts.views.about'),
# (r'^announcements/', 'apps.announcements.views.announcements'),
(r'^dionysos/', 'apps.dionysos.views.dionysos'),
(r'^eclass/', 'apps.eclass.views.eclass'),
# (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^library/', 'apps.library.views.library'),
(r'^login/', 'apps.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'apps.accounts.views.accounts_settings'),
(r'^refrigerators/', 'apps.refrigerators.views.refrigerators'),
(r'^teachers/', 'apps.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
|
<commit_before># -*- coding: utf-8 -*-
#from apps.announcements.feeds import AnnouncementsFeed
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from apps.rss_creator.feeds import AnnouncementsTeilarFeed
#feeds = {
# 'announcements': AnnouncementsFeed,
#}
handler500 = 'apps.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'apps.accounts.views.index'),
(r'^about/', 'apps.accounts.views.about'),
# (r'^announcements/', 'apps.announcements.views.announcements'),
(r'^dionysos/', 'apps.dionysos.views.dionysos'),
(r'^eclass/', 'apps.eclass.views.eclass'),
# (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^announcementsteilar/$', AnnouncementsTeilarFeed()),
(r'^library/', 'apps.library.views.library'),
(r'^login/', 'apps.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'apps.accounts.views.accounts_settings'),
(r'^refrigerators/', 'apps.refrigerators.views.refrigerators'),
(r'^teachers/', 'apps.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
<commit_msg>Remove reference to deprecated module<commit_after>
|
# -*- coding: utf-8 -*-
#from apps.announcements.feeds import AnnouncementsFeed
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
#feeds = {
# 'announcements': AnnouncementsFeed,
#}
handler500 = 'apps.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'apps.accounts.views.index'),
(r'^about/', 'apps.accounts.views.about'),
# (r'^announcements/', 'apps.announcements.views.announcements'),
(r'^dionysos/', 'apps.dionysos.views.dionysos'),
(r'^eclass/', 'apps.eclass.views.eclass'),
# (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^library/', 'apps.library.views.library'),
(r'^login/', 'apps.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'apps.accounts.views.accounts_settings'),
(r'^refrigerators/', 'apps.refrigerators.views.refrigerators'),
(r'^teachers/', 'apps.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
|
# -*- coding: utf-8 -*-
#from apps.announcements.feeds import AnnouncementsFeed
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from apps.rss_creator.feeds import AnnouncementsTeilarFeed
#feeds = {
# 'announcements': AnnouncementsFeed,
#}
handler500 = 'apps.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'apps.accounts.views.index'),
(r'^about/', 'apps.accounts.views.about'),
# (r'^announcements/', 'apps.announcements.views.announcements'),
(r'^dionysos/', 'apps.dionysos.views.dionysos'),
(r'^eclass/', 'apps.eclass.views.eclass'),
# (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^announcementsteilar/$', AnnouncementsTeilarFeed()),
(r'^library/', 'apps.library.views.library'),
(r'^login/', 'apps.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'apps.accounts.views.accounts_settings'),
(r'^refrigerators/', 'apps.refrigerators.views.refrigerators'),
(r'^teachers/', 'apps.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
Remove reference to deprecated module# -*- coding: utf-8 -*-
#from apps.announcements.feeds import AnnouncementsFeed
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
#feeds = {
# 'announcements': AnnouncementsFeed,
#}
handler500 = 'apps.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'apps.accounts.views.index'),
(r'^about/', 'apps.accounts.views.about'),
# (r'^announcements/', 'apps.announcements.views.announcements'),
(r'^dionysos/', 'apps.dionysos.views.dionysos'),
(r'^eclass/', 'apps.eclass.views.eclass'),
# (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^library/', 'apps.library.views.library'),
(r'^login/', 'apps.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'apps.accounts.views.accounts_settings'),
(r'^refrigerators/', 'apps.refrigerators.views.refrigerators'),
(r'^teachers/', 'apps.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
|
<commit_before># -*- coding: utf-8 -*-
#from apps.announcements.feeds import AnnouncementsFeed
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from apps.rss_creator.feeds import AnnouncementsTeilarFeed
#feeds = {
# 'announcements': AnnouncementsFeed,
#}
handler500 = 'apps.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'apps.accounts.views.index'),
(r'^about/', 'apps.accounts.views.about'),
# (r'^announcements/', 'apps.announcements.views.announcements'),
(r'^dionysos/', 'apps.dionysos.views.dionysos'),
(r'^eclass/', 'apps.eclass.views.eclass'),
# (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^announcementsteilar/$', AnnouncementsTeilarFeed()),
(r'^library/', 'apps.library.views.library'),
(r'^login/', 'apps.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'apps.accounts.views.accounts_settings'),
(r'^refrigerators/', 'apps.refrigerators.views.refrigerators'),
(r'^teachers/', 'apps.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
<commit_msg>Remove reference to deprecated module<commit_after># -*- coding: utf-8 -*-
#from apps.announcements.feeds import AnnouncementsFeed
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
#feeds = {
# 'announcements': AnnouncementsFeed,
#}
handler500 = 'apps.login.views.server_error'
urlpatterns = patterns('',
(r'^$', 'apps.accounts.views.index'),
(r'^about/', 'apps.accounts.views.about'),
# (r'^announcements/', 'apps.announcements.views.announcements'),
(r'^dionysos/', 'apps.dionysos.views.dionysos'),
(r'^eclass/', 'apps.eclass.views.eclass'),
# (r'^feeds/(?P<url>.*)/$', 'django.contrib.syndication.views.feed', {'feed_dict': feeds}),
(r'^library/', 'apps.library.views.library'),
(r'^login/', 'apps.login.views.cronos_login'),
(r'^logout/$', 'django.contrib.auth.views.logout', {'next_page': '/login'}),
(r'^preferences/', 'apps.accounts.views.accounts_settings'),
(r'^refrigerators/', 'apps.refrigerators.views.refrigerators'),
(r'^teachers/', 'apps.teachers.views.teachers'),
)
urlpatterns += staticfiles_urlpatterns()
|
8ec1ebb189d9386e4302f86a41d3092eb558d3d4
|
app/helpers/slugify.py
|
app/helpers/slugify.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import re
from unidecode import unidecode
def slugify(text, delim=u'-'):
"""Slugifier that handles Asian UTF-8 characters and generates an ASCII-only slug.
From: http://flask.pocoo.org/snippets/5/
This snippet by Armin Ronacher can be used freely for anything you like. Consider it public domain.
If you expect a lot of Asian characters or want to support them as well you can use the Unidecode package that handles them as well:
https://pypi.python.org/pypi/Unidecode/
"""
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
result = []
for word in _punct_re.split(text.lower()):
result.extend(unidecode(word).split())
return unicode(delim.join(result))
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import re
from unidecode import unidecode
def slugify(text, delim=u'-'):
"""Slugifier that handles Asian UTF-8 characters and generates an ASCII-only slug.
From: http://flask.pocoo.org/snippets/5/
This snippet by Armin Ronacher can be used freely for anything you like. Consider it public domain.
If you expect a lot of Asian characters or want to support them as well you can use the Unidecode package that handles them as well:
https://pypi.python.org/pypi/Unidecode/
"""
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
result = []
for word in _punct_re.split(text.lower()):
result.extend(unidecode(word).split())
return delim.join(result)
|
Remove call to unicode which does not exist on Python 3
|
Remove call to unicode which does not exist on Python 3
|
Python
|
mit
|
peterhil/ninhursag,peterhil/ninhursag,peterhil/ninhursag,peterhil/ninhursag
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import re
from unidecode import unidecode
def slugify(text, delim=u'-'):
"""Slugifier that handles Asian UTF-8 characters and generates an ASCII-only slug.
From: http://flask.pocoo.org/snippets/5/
This snippet by Armin Ronacher can be used freely for anything you like. Consider it public domain.
If you expect a lot of Asian characters or want to support them as well you can use the Unidecode package that handles them as well:
https://pypi.python.org/pypi/Unidecode/
"""
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
result = []
for word in _punct_re.split(text.lower()):
result.extend(unidecode(word).split())
return unicode(delim.join(result))
Remove call to unicode which does not exist on Python 3
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import re
from unidecode import unidecode
def slugify(text, delim=u'-'):
"""Slugifier that handles Asian UTF-8 characters and generates an ASCII-only slug.
From: http://flask.pocoo.org/snippets/5/
This snippet by Armin Ronacher can be used freely for anything you like. Consider it public domain.
If you expect a lot of Asian characters or want to support them as well you can use the Unidecode package that handles them as well:
https://pypi.python.org/pypi/Unidecode/
"""
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
result = []
for word in _punct_re.split(text.lower()):
result.extend(unidecode(word).split())
return delim.join(result)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import re
from unidecode import unidecode
def slugify(text, delim=u'-'):
"""Slugifier that handles Asian UTF-8 characters and generates an ASCII-only slug.
From: http://flask.pocoo.org/snippets/5/
This snippet by Armin Ronacher can be used freely for anything you like. Consider it public domain.
If you expect a lot of Asian characters or want to support them as well you can use the Unidecode package that handles them as well:
https://pypi.python.org/pypi/Unidecode/
"""
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
result = []
for word in _punct_re.split(text.lower()):
result.extend(unidecode(word).split())
return unicode(delim.join(result))
<commit_msg>Remove call to unicode which does not exist on Python 3<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import re
from unidecode import unidecode
def slugify(text, delim=u'-'):
"""Slugifier that handles Asian UTF-8 characters and generates an ASCII-only slug.
From: http://flask.pocoo.org/snippets/5/
This snippet by Armin Ronacher can be used freely for anything you like. Consider it public domain.
If you expect a lot of Asian characters or want to support them as well you can use the Unidecode package that handles them as well:
https://pypi.python.org/pypi/Unidecode/
"""
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
result = []
for word in _punct_re.split(text.lower()):
result.extend(unidecode(word).split())
return delim.join(result)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import re
from unidecode import unidecode
def slugify(text, delim=u'-'):
"""Slugifier that handles Asian UTF-8 characters and generates an ASCII-only slug.
From: http://flask.pocoo.org/snippets/5/
This snippet by Armin Ronacher can be used freely for anything you like. Consider it public domain.
If you expect a lot of Asian characters or want to support them as well you can use the Unidecode package that handles them as well:
https://pypi.python.org/pypi/Unidecode/
"""
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
result = []
for word in _punct_re.split(text.lower()):
result.extend(unidecode(word).split())
return unicode(delim.join(result))
Remove call to unicode which does not exist on Python 3#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import re
from unidecode import unidecode
def slugify(text, delim=u'-'):
"""Slugifier that handles Asian UTF-8 characters and generates an ASCII-only slug.
From: http://flask.pocoo.org/snippets/5/
This snippet by Armin Ronacher can be used freely for anything you like. Consider it public domain.
If you expect a lot of Asian characters or want to support them as well you can use the Unidecode package that handles them as well:
https://pypi.python.org/pypi/Unidecode/
"""
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
result = []
for word in _punct_re.split(text.lower()):
result.extend(unidecode(word).split())
return delim.join(result)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import re
from unidecode import unidecode
def slugify(text, delim=u'-'):
"""Slugifier that handles Asian UTF-8 characters and generates an ASCII-only slug.
From: http://flask.pocoo.org/snippets/5/
This snippet by Armin Ronacher can be used freely for anything you like. Consider it public domain.
If you expect a lot of Asian characters or want to support them as well you can use the Unidecode package that handles them as well:
https://pypi.python.org/pypi/Unidecode/
"""
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
result = []
for word in _punct_re.split(text.lower()):
result.extend(unidecode(word).split())
return unicode(delim.join(result))
<commit_msg>Remove call to unicode which does not exist on Python 3<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import re
from unidecode import unidecode
def slugify(text, delim=u'-'):
"""Slugifier that handles Asian UTF-8 characters and generates an ASCII-only slug.
From: http://flask.pocoo.org/snippets/5/
This snippet by Armin Ronacher can be used freely for anything you like. Consider it public domain.
If you expect a lot of Asian characters or want to support them as well you can use the Unidecode package that handles them as well:
https://pypi.python.org/pypi/Unidecode/
"""
_punct_re = re.compile(r'[\t !"#$%&\'()*\-/<=>?@\[\\\]^_`{|},.]+')
result = []
for word in _punct_re.split(text.lower()):
result.extend(unidecode(word).split())
return delim.join(result)
|
2f5e0e330de33376236ba7eef3a9ae20a0a38986
|
breakpad.py
|
breakpad.py
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Do you want to send a crash report [y/N]? ',
if sys.stdin.read(1).lower() == 'y':
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
#@atexit.register
def CheckForException():
if 'test' in sys.modules['__main__'].__file__:
# Probably a unit test.
return
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(''.join(traceback.format_tb(last_tb)))
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import socket
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Do you want to send a crash report [y/N]? ',
if sys.stdin.read(1).lower() != 'y':
return
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(''.join(traceback.format_tb(last_tb)))
if (not 'test' in sys.modules['__main__'].__file__ and
socket.gethostname().endswith('.google.com')):
# Skip unit tests and we don't want anything from non-googler.
atexit.register(CheckForException)
|
Add a check so non-google employee don't send crash dumps.
|
Add a check so non-google employee don't send crash dumps.
Add a warning message in case the check ever fail.
Review URL: http://codereview.chromium.org/460044
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@33700 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
|
Python
|
bsd-3-clause
|
svn2github/chromium-depot-tools,svn2github/chromium-depot-tools,svn2github/chromium-depot-tools
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Do you want to send a crash report [y/N]? ',
if sys.stdin.read(1).lower() == 'y':
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
#@atexit.register
def CheckForException():
if 'test' in sys.modules['__main__'].__file__:
# Probably a unit test.
return
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(''.join(traceback.format_tb(last_tb)))
Add a check so non-google employee don't send crash dumps.
Add a warning message in case the check ever fail.
Review URL: http://codereview.chromium.org/460044
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@33700 4ff67af0-8c30-449e-8e8b-ad334ec8d88c
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import socket
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Do you want to send a crash report [y/N]? ',
if sys.stdin.read(1).lower() != 'y':
return
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(''.join(traceback.format_tb(last_tb)))
if (not 'test' in sys.modules['__main__'].__file__ and
socket.gethostname().endswith('.google.com')):
# Skip unit tests and we don't want anything from non-googler.
atexit.register(CheckForException)
|
<commit_before># Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Do you want to send a crash report [y/N]? ',
if sys.stdin.read(1).lower() == 'y':
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
#@atexit.register
def CheckForException():
if 'test' in sys.modules['__main__'].__file__:
# Probably a unit test.
return
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(''.join(traceback.format_tb(last_tb)))
<commit_msg>Add a check so non-google employee don't send crash dumps.
Add a warning message in case the check ever fail.
Review URL: http://codereview.chromium.org/460044
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@33700 4ff67af0-8c30-449e-8e8b-ad334ec8d88c<commit_after>
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import socket
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Do you want to send a crash report [y/N]? ',
if sys.stdin.read(1).lower() != 'y':
return
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(''.join(traceback.format_tb(last_tb)))
if (not 'test' in sys.modules['__main__'].__file__ and
socket.gethostname().endswith('.google.com')):
# Skip unit tests and we don't want anything from non-googler.
atexit.register(CheckForException)
|
# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Do you want to send a crash report [y/N]? ',
if sys.stdin.read(1).lower() == 'y':
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
#@atexit.register
def CheckForException():
if 'test' in sys.modules['__main__'].__file__:
# Probably a unit test.
return
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(''.join(traceback.format_tb(last_tb)))
Add a check so non-google employee don't send crash dumps.
Add a warning message in case the check ever fail.
Review URL: http://codereview.chromium.org/460044
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@33700 4ff67af0-8c30-449e-8e8b-ad334ec8d88c# Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import socket
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Do you want to send a crash report [y/N]? ',
if sys.stdin.read(1).lower() != 'y':
return
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(''.join(traceback.format_tb(last_tb)))
if (not 'test' in sys.modules['__main__'].__file__ and
socket.gethostname().endswith('.google.com')):
# Skip unit tests and we don't want anything from non-googler.
atexit.register(CheckForException)
|
<commit_before># Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Do you want to send a crash report [y/N]? ',
if sys.stdin.read(1).lower() == 'y':
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
#@atexit.register
def CheckForException():
if 'test' in sys.modules['__main__'].__file__:
# Probably a unit test.
return
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(''.join(traceback.format_tb(last_tb)))
<commit_msg>Add a check so non-google employee don't send crash dumps.
Add a warning message in case the check ever fail.
Review URL: http://codereview.chromium.org/460044
git-svn-id: bd64dd6fa6f3f0ed0c0666d1018379882b742947@33700 4ff67af0-8c30-449e-8e8b-ad334ec8d88c<commit_after># Copyright (c) 2009 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Breakpad for Python.
Sends a notification when a process stops on an exception."""
import atexit
import getpass
import urllib
import traceback
import socket
import sys
def SendStack(stack, url='http://chromium-status.appspot.com/breakpad'):
print 'Do you want to send a crash report [y/N]? ',
if sys.stdin.read(1).lower() != 'y':
return
print 'Sending crash report ...'
try:
params = {
'args': sys.argv,
'stack': stack,
'user': getpass.getuser(),
}
request = urllib.urlopen(url, urllib.urlencode(params))
print request.read()
request.close()
except IOError:
print('There was a failure while trying to send the stack trace. Too bad.')
def CheckForException():
last_tb = getattr(sys, 'last_traceback', None)
if last_tb:
SendStack(''.join(traceback.format_tb(last_tb)))
if (not 'test' in sys.modules['__main__'].__file__ and
socket.gethostname().endswith('.google.com')):
# Skip unit tests and we don't want anything from non-googler.
atexit.register(CheckForException)
|
fb69c9b65b5c0f16eb58d530c130e1e9a1201de6
|
tests/multi_memory_mgmt_test.py
|
tests/multi_memory_mgmt_test.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
import unittest
import gc
debug = False
class MultiMemoryMgmtTest(unittest.TestCase):
def test_opensocketfunction_collection(self):
self.check_callback(pycurl.M_SOCKETFUNCTION)
def test_seekfunction_collection(self):
self.check_callback(pycurl.M_TIMERFUNCTION)
def check_callback(self, callback):
# Note: extracting a context manager seems to result in
# everything being garbage collected even if the C code
# does not clear the callback
object_count = 0
gc.collect()
# gc.collect() can create new objects... running it again here
# settles tracked object count for the actual test below
gc.collect()
object_count = len(gc.get_objects())
c = pycurl.CurlMulti()
c.setopt(callback, lambda x: True)
del c
gc.collect()
new_object_count = len(gc.get_objects())
self.assertEqual(new_object_count, object_count)
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
import unittest
import gc
debug = False
class MultiMemoryMgmtTest(unittest.TestCase):
def test_opensocketfunction_collection(self):
self.check_callback(pycurl.M_SOCKETFUNCTION)
def test_seekfunction_collection(self):
self.check_callback(pycurl.M_TIMERFUNCTION)
def check_callback(self, callback):
# Note: extracting a context manager seems to result in
# everything being garbage collected even if the C code
# does not clear the callback
object_count = 0
gc.collect()
# gc.collect() can create new objects... running it again here
# settles tracked object count for the actual test below
gc.collect()
object_count = len(gc.get_objects())
c = pycurl.CurlMulti()
c.setopt(callback, lambda x: True)
del c
gc.collect()
new_object_count = len(gc.get_objects())
# it seems that GC sometimes collects something that existed
# before this test ran, GH issues #273/#274
self.assertTrue(new_object_count in (object_count, object_count-1))
|
Apply object_count-1 allowance to multi memory management test
|
Apply object_count-1 allowance to multi memory management test
|
Python
|
lgpl-2.1
|
pycurl/pycurl,pycurl/pycurl,pycurl/pycurl
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
import unittest
import gc
debug = False
class MultiMemoryMgmtTest(unittest.TestCase):
def test_opensocketfunction_collection(self):
self.check_callback(pycurl.M_SOCKETFUNCTION)
def test_seekfunction_collection(self):
self.check_callback(pycurl.M_TIMERFUNCTION)
def check_callback(self, callback):
# Note: extracting a context manager seems to result in
# everything being garbage collected even if the C code
# does not clear the callback
object_count = 0
gc.collect()
# gc.collect() can create new objects... running it again here
# settles tracked object count for the actual test below
gc.collect()
object_count = len(gc.get_objects())
c = pycurl.CurlMulti()
c.setopt(callback, lambda x: True)
del c
gc.collect()
new_object_count = len(gc.get_objects())
self.assertEqual(new_object_count, object_count)
Apply object_count-1 allowance to multi memory management test
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
import unittest
import gc
debug = False
class MultiMemoryMgmtTest(unittest.TestCase):
def test_opensocketfunction_collection(self):
self.check_callback(pycurl.M_SOCKETFUNCTION)
def test_seekfunction_collection(self):
self.check_callback(pycurl.M_TIMERFUNCTION)
def check_callback(self, callback):
# Note: extracting a context manager seems to result in
# everything being garbage collected even if the C code
# does not clear the callback
object_count = 0
gc.collect()
# gc.collect() can create new objects... running it again here
# settles tracked object count for the actual test below
gc.collect()
object_count = len(gc.get_objects())
c = pycurl.CurlMulti()
c.setopt(callback, lambda x: True)
del c
gc.collect()
new_object_count = len(gc.get_objects())
# it seems that GC sometimes collects something that existed
# before this test ran, GH issues #273/#274
self.assertTrue(new_object_count in (object_count, object_count-1))
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
import unittest
import gc
debug = False
class MultiMemoryMgmtTest(unittest.TestCase):
def test_opensocketfunction_collection(self):
self.check_callback(pycurl.M_SOCKETFUNCTION)
def test_seekfunction_collection(self):
self.check_callback(pycurl.M_TIMERFUNCTION)
def check_callback(self, callback):
# Note: extracting a context manager seems to result in
# everything being garbage collected even if the C code
# does not clear the callback
object_count = 0
gc.collect()
# gc.collect() can create new objects... running it again here
# settles tracked object count for the actual test below
gc.collect()
object_count = len(gc.get_objects())
c = pycurl.CurlMulti()
c.setopt(callback, lambda x: True)
del c
gc.collect()
new_object_count = len(gc.get_objects())
self.assertEqual(new_object_count, object_count)
<commit_msg>Apply object_count-1 allowance to multi memory management test<commit_after>
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
import unittest
import gc
debug = False
class MultiMemoryMgmtTest(unittest.TestCase):
def test_opensocketfunction_collection(self):
self.check_callback(pycurl.M_SOCKETFUNCTION)
def test_seekfunction_collection(self):
self.check_callback(pycurl.M_TIMERFUNCTION)
def check_callback(self, callback):
# Note: extracting a context manager seems to result in
# everything being garbage collected even if the C code
# does not clear the callback
object_count = 0
gc.collect()
# gc.collect() can create new objects... running it again here
# settles tracked object count for the actual test below
gc.collect()
object_count = len(gc.get_objects())
c = pycurl.CurlMulti()
c.setopt(callback, lambda x: True)
del c
gc.collect()
new_object_count = len(gc.get_objects())
# it seems that GC sometimes collects something that existed
# before this test ran, GH issues #273/#274
self.assertTrue(new_object_count in (object_count, object_count-1))
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
import unittest
import gc
debug = False
class MultiMemoryMgmtTest(unittest.TestCase):
def test_opensocketfunction_collection(self):
self.check_callback(pycurl.M_SOCKETFUNCTION)
def test_seekfunction_collection(self):
self.check_callback(pycurl.M_TIMERFUNCTION)
def check_callback(self, callback):
# Note: extracting a context manager seems to result in
# everything being garbage collected even if the C code
# does not clear the callback
object_count = 0
gc.collect()
# gc.collect() can create new objects... running it again here
# settles tracked object count for the actual test below
gc.collect()
object_count = len(gc.get_objects())
c = pycurl.CurlMulti()
c.setopt(callback, lambda x: True)
del c
gc.collect()
new_object_count = len(gc.get_objects())
self.assertEqual(new_object_count, object_count)
Apply object_count-1 allowance to multi memory management test#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
import unittest
import gc
debug = False
class MultiMemoryMgmtTest(unittest.TestCase):
def test_opensocketfunction_collection(self):
self.check_callback(pycurl.M_SOCKETFUNCTION)
def test_seekfunction_collection(self):
self.check_callback(pycurl.M_TIMERFUNCTION)
def check_callback(self, callback):
# Note: extracting a context manager seems to result in
# everything being garbage collected even if the C code
# does not clear the callback
object_count = 0
gc.collect()
# gc.collect() can create new objects... running it again here
# settles tracked object count for the actual test below
gc.collect()
object_count = len(gc.get_objects())
c = pycurl.CurlMulti()
c.setopt(callback, lambda x: True)
del c
gc.collect()
new_object_count = len(gc.get_objects())
# it seems that GC sometimes collects something that existed
# before this test ran, GH issues #273/#274
self.assertTrue(new_object_count in (object_count, object_count-1))
|
<commit_before>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
import unittest
import gc
debug = False
class MultiMemoryMgmtTest(unittest.TestCase):
def test_opensocketfunction_collection(self):
self.check_callback(pycurl.M_SOCKETFUNCTION)
def test_seekfunction_collection(self):
self.check_callback(pycurl.M_TIMERFUNCTION)
def check_callback(self, callback):
# Note: extracting a context manager seems to result in
# everything being garbage collected even if the C code
# does not clear the callback
object_count = 0
gc.collect()
# gc.collect() can create new objects... running it again here
# settles tracked object count for the actual test below
gc.collect()
object_count = len(gc.get_objects())
c = pycurl.CurlMulti()
c.setopt(callback, lambda x: True)
del c
gc.collect()
new_object_count = len(gc.get_objects())
self.assertEqual(new_object_count, object_count)
<commit_msg>Apply object_count-1 allowance to multi memory management test<commit_after>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vi:ts=4:et
import pycurl
import unittest
import gc
debug = False
class MultiMemoryMgmtTest(unittest.TestCase):
def test_opensocketfunction_collection(self):
self.check_callback(pycurl.M_SOCKETFUNCTION)
def test_seekfunction_collection(self):
self.check_callback(pycurl.M_TIMERFUNCTION)
def check_callback(self, callback):
# Note: extracting a context manager seems to result in
# everything being garbage collected even if the C code
# does not clear the callback
object_count = 0
gc.collect()
# gc.collect() can create new objects... running it again here
# settles tracked object count for the actual test below
gc.collect()
object_count = len(gc.get_objects())
c = pycurl.CurlMulti()
c.setopt(callback, lambda x: True)
del c
gc.collect()
new_object_count = len(gc.get_objects())
# it seems that GC sometimes collects something that existed
# before this test ran, GH issues #273/#274
self.assertTrue(new_object_count in (object_count, object_count-1))
|
1d6fb58d0a6c1d1162e10a22ad68b841fdec834d
|
books/models.py
|
books/models.py
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models import fields
from django.utils import timezone
class Transaction(models.Model):
title = fields.CharField(max_length=255)
price = fields.DecimalField(max_digits=10, decimal_places=2)
created = fields.DateTimeField(auto_now=True)
modified = fields.DateTimeField(default=timezone.now)
user = models.ForeignKey(User)
def __str__(self):
return "{}_{}".format(self.title, self.price)
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models import fields
from django.utils import timezone
class Transaction(models.Model):
CATEGORY_CHOICES = (
(0, 'expense'),
(1, 'income'),
)
title = fields.CharField(max_length=255)
amount = fields.DecimalField(max_digits=10, decimal_places=2)
category = fields.CharField(max_length=1, choices=CATEGORY_CHOICES)
created = fields.DateTimeField(auto_now=True)
modified = fields.DateTimeField(default=timezone.now)
user = models.ForeignKey(User)
def __str__(self):
return "{}".format(self.title)
|
Use amount and category fields instead of price
|
Use amount and category fields instead of price
This is more convenient and more user friendly.. I guess.
|
Python
|
mit
|
trimailov/finance,trimailov/finance,trimailov/finance
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models import fields
from django.utils import timezone
class Transaction(models.Model):
title = fields.CharField(max_length=255)
price = fields.DecimalField(max_digits=10, decimal_places=2)
created = fields.DateTimeField(auto_now=True)
modified = fields.DateTimeField(default=timezone.now)
user = models.ForeignKey(User)
def __str__(self):
return "{}_{}".format(self.title, self.price)
Use amount and category fields instead of price
This is more convenient and more user friendly.. I guess.
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models import fields
from django.utils import timezone
class Transaction(models.Model):
CATEGORY_CHOICES = (
(0, 'expense'),
(1, 'income'),
)
title = fields.CharField(max_length=255)
amount = fields.DecimalField(max_digits=10, decimal_places=2)
category = fields.CharField(max_length=1, choices=CATEGORY_CHOICES)
created = fields.DateTimeField(auto_now=True)
modified = fields.DateTimeField(default=timezone.now)
user = models.ForeignKey(User)
def __str__(self):
return "{}".format(self.title)
|
<commit_before>from django.contrib.auth.models import User
from django.db import models
from django.db.models import fields
from django.utils import timezone
class Transaction(models.Model):
title = fields.CharField(max_length=255)
price = fields.DecimalField(max_digits=10, decimal_places=2)
created = fields.DateTimeField(auto_now=True)
modified = fields.DateTimeField(default=timezone.now)
user = models.ForeignKey(User)
def __str__(self):
return "{}_{}".format(self.title, self.price)
<commit_msg>Use amount and category fields instead of price
This is more convenient and more user friendly.. I guess.<commit_after>
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models import fields
from django.utils import timezone
class Transaction(models.Model):
CATEGORY_CHOICES = (
(0, 'expense'),
(1, 'income'),
)
title = fields.CharField(max_length=255)
amount = fields.DecimalField(max_digits=10, decimal_places=2)
category = fields.CharField(max_length=1, choices=CATEGORY_CHOICES)
created = fields.DateTimeField(auto_now=True)
modified = fields.DateTimeField(default=timezone.now)
user = models.ForeignKey(User)
def __str__(self):
return "{}".format(self.title)
|
from django.contrib.auth.models import User
from django.db import models
from django.db.models import fields
from django.utils import timezone
class Transaction(models.Model):
title = fields.CharField(max_length=255)
price = fields.DecimalField(max_digits=10, decimal_places=2)
created = fields.DateTimeField(auto_now=True)
modified = fields.DateTimeField(default=timezone.now)
user = models.ForeignKey(User)
def __str__(self):
return "{}_{}".format(self.title, self.price)
Use amount and category fields instead of price
This is more convenient and more user friendly.. I guess.from django.contrib.auth.models import User
from django.db import models
from django.db.models import fields
from django.utils import timezone
class Transaction(models.Model):
CATEGORY_CHOICES = (
(0, 'expense'),
(1, 'income'),
)
title = fields.CharField(max_length=255)
amount = fields.DecimalField(max_digits=10, decimal_places=2)
category = fields.CharField(max_length=1, choices=CATEGORY_CHOICES)
created = fields.DateTimeField(auto_now=True)
modified = fields.DateTimeField(default=timezone.now)
user = models.ForeignKey(User)
def __str__(self):
return "{}".format(self.title)
|
<commit_before>from django.contrib.auth.models import User
from django.db import models
from django.db.models import fields
from django.utils import timezone
class Transaction(models.Model):
title = fields.CharField(max_length=255)
price = fields.DecimalField(max_digits=10, decimal_places=2)
created = fields.DateTimeField(auto_now=True)
modified = fields.DateTimeField(default=timezone.now)
user = models.ForeignKey(User)
def __str__(self):
return "{}_{}".format(self.title, self.price)
<commit_msg>Use amount and category fields instead of price
This is more convenient and more user friendly.. I guess.<commit_after>from django.contrib.auth.models import User
from django.db import models
from django.db.models import fields
from django.utils import timezone
class Transaction(models.Model):
CATEGORY_CHOICES = (
(0, 'expense'),
(1, 'income'),
)
title = fields.CharField(max_length=255)
amount = fields.DecimalField(max_digits=10, decimal_places=2)
category = fields.CharField(max_length=1, choices=CATEGORY_CHOICES)
created = fields.DateTimeField(auto_now=True)
modified = fields.DateTimeField(default=timezone.now)
user = models.ForeignKey(User)
def __str__(self):
return "{}".format(self.title)
|
c34ebf0da60e1c4542fd69870202f49fe085dd67
|
BigStash/decorators.py
|
BigStash/decorators.py
|
from requests.exceptions import RequestException
from .error import BigStashError
from wrapt import decorator
@decorator
def json_response(wrapped, instance, args, kwargs):
try:
r = wrapped(*args, **kwargs)
r.raise_for_status()
return r.json()
except RequestException:
raise BigStashError
except ValueError:
raise BigStashError
|
from requests.exceptions import RequestException
from .error import BigStashError
from wrapt import decorator
@decorator
def json_response(wrapped, instance, args, kwargs):
try:
r = wrapped(*args, **kwargs)
r.raise_for_status()
return r.json()
except RequestException as e:
raise BigStashError(e)
except ValueError as e:
raise BigStashError(e)
@decorator
def no_content_response(wrapped, instance, args, kwargs):
try:
r = wrapped(*args, **kwargs)
r.raise_for_status()
except RequestException as e:
raise BigStashError(e)
|
Add decorator for empty responses and show error messages
|
Add decorator for empty responses and show error messages
- Add decorator for responses with no content, like delete requests.
- Show original error messages for now
|
Python
|
apache-2.0
|
longaccess/bigstash-python,longaccess/bigstash-python
|
from requests.exceptions import RequestException
from .error import BigStashError
from wrapt import decorator
@decorator
def json_response(wrapped, instance, args, kwargs):
try:
r = wrapped(*args, **kwargs)
r.raise_for_status()
return r.json()
except RequestException:
raise BigStashError
except ValueError:
raise BigStashError
Add decorator for empty responses and show error messages
- Add decorator for responses with no content, like delete requests.
- Show original error messages for now
|
from requests.exceptions import RequestException
from .error import BigStashError
from wrapt import decorator
@decorator
def json_response(wrapped, instance, args, kwargs):
try:
r = wrapped(*args, **kwargs)
r.raise_for_status()
return r.json()
except RequestException as e:
raise BigStashError(e)
except ValueError as e:
raise BigStashError(e)
@decorator
def no_content_response(wrapped, instance, args, kwargs):
try:
r = wrapped(*args, **kwargs)
r.raise_for_status()
except RequestException as e:
raise BigStashError(e)
|
<commit_before>from requests.exceptions import RequestException
from .error import BigStashError
from wrapt import decorator
@decorator
def json_response(wrapped, instance, args, kwargs):
try:
r = wrapped(*args, **kwargs)
r.raise_for_status()
return r.json()
except RequestException:
raise BigStashError
except ValueError:
raise BigStashError
<commit_msg>Add decorator for empty responses and show error messages
- Add decorator for responses with no content, like delete requests.
- Show original error messages for now<commit_after>
|
from requests.exceptions import RequestException
from .error import BigStashError
from wrapt import decorator
@decorator
def json_response(wrapped, instance, args, kwargs):
try:
r = wrapped(*args, **kwargs)
r.raise_for_status()
return r.json()
except RequestException as e:
raise BigStashError(e)
except ValueError as e:
raise BigStashError(e)
@decorator
def no_content_response(wrapped, instance, args, kwargs):
try:
r = wrapped(*args, **kwargs)
r.raise_for_status()
except RequestException as e:
raise BigStashError(e)
|
from requests.exceptions import RequestException
from .error import BigStashError
from wrapt import decorator
@decorator
def json_response(wrapped, instance, args, kwargs):
try:
r = wrapped(*args, **kwargs)
r.raise_for_status()
return r.json()
except RequestException:
raise BigStashError
except ValueError:
raise BigStashError
Add decorator for empty responses and show error messages
- Add decorator for responses with no content, like delete requests.
- Show original error messages for nowfrom requests.exceptions import RequestException
from .error import BigStashError
from wrapt import decorator
@decorator
def json_response(wrapped, instance, args, kwargs):
try:
r = wrapped(*args, **kwargs)
r.raise_for_status()
return r.json()
except RequestException as e:
raise BigStashError(e)
except ValueError as e:
raise BigStashError(e)
@decorator
def no_content_response(wrapped, instance, args, kwargs):
try:
r = wrapped(*args, **kwargs)
r.raise_for_status()
except RequestException as e:
raise BigStashError(e)
|
<commit_before>from requests.exceptions import RequestException
from .error import BigStashError
from wrapt import decorator
@decorator
def json_response(wrapped, instance, args, kwargs):
try:
r = wrapped(*args, **kwargs)
r.raise_for_status()
return r.json()
except RequestException:
raise BigStashError
except ValueError:
raise BigStashError
<commit_msg>Add decorator for empty responses and show error messages
- Add decorator for responses with no content, like delete requests.
- Show original error messages for now<commit_after>from requests.exceptions import RequestException
from .error import BigStashError
from wrapt import decorator
@decorator
def json_response(wrapped, instance, args, kwargs):
try:
r = wrapped(*args, **kwargs)
r.raise_for_status()
return r.json()
except RequestException as e:
raise BigStashError(e)
except ValueError as e:
raise BigStashError(e)
@decorator
def no_content_response(wrapped, instance, args, kwargs):
try:
r = wrapped(*args, **kwargs)
r.raise_for_status()
except RequestException as e:
raise BigStashError(e)
|
0f753f67c48b02b4ee7fdb67a416a5cc86f66e0b
|
LennardJones.py
|
LennardJones.py
|
from fluid import LJContainer
NUM_PARTICLES = 108
TIME_STEP = 0.001
class LennardJones:
def __init__(self, density, temperature):
#Initialize the container
container = LJContainer()
#Equilibriate the system
#Start measuring
while self.t < run_length:
#Calculate the forces
#Integrate equations of motion
t += TIME_STEP
#Sample averages
#Generate a plot of the energies (kinetic, potential, total)
if __name__ == "__main__":
LennardJones()
|
#!/usr/bin/env python
from fluid import LJContainer
PARTICLES = 108.0
TEMPERATURE = 2.0
DENSITY = 1.0
TIME_STEP = 0.001
STEPS = 2000
class LennardJones:
_t = 0
def __init__(self):
#Initialize the container
container = LJContainer(PARTICLES, DENSITY, TEMPERATURE)
#Equilibriate the system
#Start measuring
while self._t < STEPS:
#Calculate the forces
#Integrate equations of motion
self._t += TIME_STEP
#Sample averages
#Generate a plot of the energies (kinetic, potential, total)
if __name__ == "__main__":
LennardJones()
|
Modify simulation to use global parameters
|
Modify simulation to use global parameters
|
Python
|
mit
|
hkaju/LennardJones,hkaju/LennardJones,hkaju/LennardJones
|
from fluid import LJContainer
NUM_PARTICLES = 108
TIME_STEP = 0.001
class LennardJones:
def __init__(self, density, temperature):
#Initialize the container
container = LJContainer()
#Equilibriate the system
#Start measuring
while self.t < run_length:
#Calculate the forces
#Integrate equations of motion
t += TIME_STEP
#Sample averages
#Generate a plot of the energies (kinetic, potential, total)
if __name__ == "__main__":
LennardJones()
Modify simulation to use global parameters
|
#!/usr/bin/env python
from fluid import LJContainer
PARTICLES = 108.0
TEMPERATURE = 2.0
DENSITY = 1.0
TIME_STEP = 0.001
STEPS = 2000
class LennardJones:
_t = 0
def __init__(self):
#Initialize the container
container = LJContainer(PARTICLES, DENSITY, TEMPERATURE)
#Equilibriate the system
#Start measuring
while self._t < STEPS:
#Calculate the forces
#Integrate equations of motion
self._t += TIME_STEP
#Sample averages
#Generate a plot of the energies (kinetic, potential, total)
if __name__ == "__main__":
LennardJones()
|
<commit_before>from fluid import LJContainer
NUM_PARTICLES = 108
TIME_STEP = 0.001
class LennardJones:
def __init__(self, density, temperature):
#Initialize the container
container = LJContainer()
#Equilibriate the system
#Start measuring
while self.t < run_length:
#Calculate the forces
#Integrate equations of motion
t += TIME_STEP
#Sample averages
#Generate a plot of the energies (kinetic, potential, total)
if __name__ == "__main__":
LennardJones()
<commit_msg>Modify simulation to use global parameters<commit_after>
|
#!/usr/bin/env python
from fluid import LJContainer
PARTICLES = 108.0
TEMPERATURE = 2.0
DENSITY = 1.0
TIME_STEP = 0.001
STEPS = 2000
class LennardJones:
_t = 0
def __init__(self):
#Initialize the container
container = LJContainer(PARTICLES, DENSITY, TEMPERATURE)
#Equilibriate the system
#Start measuring
while self._t < STEPS:
#Calculate the forces
#Integrate equations of motion
self._t += TIME_STEP
#Sample averages
#Generate a plot of the energies (kinetic, potential, total)
if __name__ == "__main__":
LennardJones()
|
from fluid import LJContainer
NUM_PARTICLES = 108
TIME_STEP = 0.001
class LennardJones:
def __init__(self, density, temperature):
#Initialize the container
container = LJContainer()
#Equilibriate the system
#Start measuring
while self.t < run_length:
#Calculate the forces
#Integrate equations of motion
t += TIME_STEP
#Sample averages
#Generate a plot of the energies (kinetic, potential, total)
if __name__ == "__main__":
LennardJones()
Modify simulation to use global parameters#!/usr/bin/env python
from fluid import LJContainer
PARTICLES = 108.0
TEMPERATURE = 2.0
DENSITY = 1.0
TIME_STEP = 0.001
STEPS = 2000
class LennardJones:
_t = 0
def __init__(self):
#Initialize the container
container = LJContainer(PARTICLES, DENSITY, TEMPERATURE)
#Equilibriate the system
#Start measuring
while self._t < STEPS:
#Calculate the forces
#Integrate equations of motion
self._t += TIME_STEP
#Sample averages
#Generate a plot of the energies (kinetic, potential, total)
if __name__ == "__main__":
LennardJones()
|
<commit_before>from fluid import LJContainer
NUM_PARTICLES = 108
TIME_STEP = 0.001
class LennardJones:
def __init__(self, density, temperature):
#Initialize the container
container = LJContainer()
#Equilibriate the system
#Start measuring
while self.t < run_length:
#Calculate the forces
#Integrate equations of motion
t += TIME_STEP
#Sample averages
#Generate a plot of the energies (kinetic, potential, total)
if __name__ == "__main__":
LennardJones()
<commit_msg>Modify simulation to use global parameters<commit_after>#!/usr/bin/env python
from fluid import LJContainer
PARTICLES = 108.0
TEMPERATURE = 2.0
DENSITY = 1.0
TIME_STEP = 0.001
STEPS = 2000
class LennardJones:
_t = 0
def __init__(self):
#Initialize the container
container = LJContainer(PARTICLES, DENSITY, TEMPERATURE)
#Equilibriate the system
#Start measuring
while self._t < STEPS:
#Calculate the forces
#Integrate equations of motion
self._t += TIME_STEP
#Sample averages
#Generate a plot of the energies (kinetic, potential, total)
if __name__ == "__main__":
LennardJones()
|
3211e90bf13abb423d23b33a4a9802907e992f4e
|
eduid_signup/sna_callbacks.py
|
eduid_signup/sna_callbacks.py
|
import datetime
from pyramid.httpexceptions import HTTPFound
from pyramid.security import remember
def google_callback(request, user_id, attributes):
"""pyramid_sna calls this function aftera successfull authentication flow"""
# Create or update the user
user = request.db.users.find_one({'google_id': user_id})
if user is None: # first time
user_id = request.db.registered.insert({
"email": attributes["email"],
"date": datetime.datetime.utcnow(),
"verified": True,
"screen_name": attributes["screen_name"],
"first_name": attributes["first_name"],
"last_name": attributes["last_name"],
}, safe=True)
else:
user_id = user['_id']
# Create an authenticated session and send the user to the
# success screeen
remember_headers = remember(request, str(user_id))
return HTTPFound(request.route_url('success'), headers=remember_headers)
def facebook_callback(request, user_id, attributes):
pass
|
import datetime
from pyramid.httpexceptions import HTTPFound
from pyramid.security import remember
def create_or_update(request, provider, provider_user_id, attributes):
provider_key = '%s_id' % provider
# Create or update the user
user = request.db.users.find_one({provider_key: provider_user_id})
if user is None: # first time
user_id = request.db.registered.insert({
provider_key: provider_user_id,
"email": attributes["email"],
"date": datetime.datetime.utcnow(),
"verified": True,
"screen_name": attributes["screen_name"],
"first_name": attributes["first_name"],
"last_name": attributes["last_name"],
}, safe=True)
else:
user_id = user['_id']
# Create an authenticated session and send the user to the
# success screeen
remember_headers = remember(request, str(user_id))
return HTTPFound(request.route_url('success'), headers=remember_headers)
def google_callback(request, user_id, attributes):
return create_or_update(request, 'google', user_id, attributes)
def facebook_callback(request, user_id, attributes):
return create_or_update(request, 'facebook', user_id, attributes)
|
Refactor the Google sna callback since the Facebook one is almost the same
|
Refactor the Google sna callback since the Facebook one is almost the same
|
Python
|
bsd-3-clause
|
SUNET/eduid-signup,SUNET/eduid-signup,SUNET/eduid-signup
|
import datetime
from pyramid.httpexceptions import HTTPFound
from pyramid.security import remember
def google_callback(request, user_id, attributes):
"""pyramid_sna calls this function aftera successfull authentication flow"""
# Create or update the user
user = request.db.users.find_one({'google_id': user_id})
if user is None: # first time
user_id = request.db.registered.insert({
"email": attributes["email"],
"date": datetime.datetime.utcnow(),
"verified": True,
"screen_name": attributes["screen_name"],
"first_name": attributes["first_name"],
"last_name": attributes["last_name"],
}, safe=True)
else:
user_id = user['_id']
# Create an authenticated session and send the user to the
# success screeen
remember_headers = remember(request, str(user_id))
return HTTPFound(request.route_url('success'), headers=remember_headers)
def facebook_callback(request, user_id, attributes):
pass
Refactor the Google sna callback since the Facebook one is almost the same
|
import datetime
from pyramid.httpexceptions import HTTPFound
from pyramid.security import remember
def create_or_update(request, provider, provider_user_id, attributes):
provider_key = '%s_id' % provider
# Create or update the user
user = request.db.users.find_one({provider_key: provider_user_id})
if user is None: # first time
user_id = request.db.registered.insert({
provider_key: provider_user_id,
"email": attributes["email"],
"date": datetime.datetime.utcnow(),
"verified": True,
"screen_name": attributes["screen_name"],
"first_name": attributes["first_name"],
"last_name": attributes["last_name"],
}, safe=True)
else:
user_id = user['_id']
# Create an authenticated session and send the user to the
# success screeen
remember_headers = remember(request, str(user_id))
return HTTPFound(request.route_url('success'), headers=remember_headers)
def google_callback(request, user_id, attributes):
return create_or_update(request, 'google', user_id, attributes)
def facebook_callback(request, user_id, attributes):
return create_or_update(request, 'facebook', user_id, attributes)
|
<commit_before>import datetime
from pyramid.httpexceptions import HTTPFound
from pyramid.security import remember
def google_callback(request, user_id, attributes):
"""pyramid_sna calls this function aftera successfull authentication flow"""
# Create or update the user
user = request.db.users.find_one({'google_id': user_id})
if user is None: # first time
user_id = request.db.registered.insert({
"email": attributes["email"],
"date": datetime.datetime.utcnow(),
"verified": True,
"screen_name": attributes["screen_name"],
"first_name": attributes["first_name"],
"last_name": attributes["last_name"],
}, safe=True)
else:
user_id = user['_id']
# Create an authenticated session and send the user to the
# success screeen
remember_headers = remember(request, str(user_id))
return HTTPFound(request.route_url('success'), headers=remember_headers)
def facebook_callback(request, user_id, attributes):
pass
<commit_msg>Refactor the Google sna callback since the Facebook one is almost the same<commit_after>
|
import datetime
from pyramid.httpexceptions import HTTPFound
from pyramid.security import remember
def create_or_update(request, provider, provider_user_id, attributes):
provider_key = '%s_id' % provider
# Create or update the user
user = request.db.users.find_one({provider_key: provider_user_id})
if user is None: # first time
user_id = request.db.registered.insert({
provider_key: provider_user_id,
"email": attributes["email"],
"date": datetime.datetime.utcnow(),
"verified": True,
"screen_name": attributes["screen_name"],
"first_name": attributes["first_name"],
"last_name": attributes["last_name"],
}, safe=True)
else:
user_id = user['_id']
# Create an authenticated session and send the user to the
# success screeen
remember_headers = remember(request, str(user_id))
return HTTPFound(request.route_url('success'), headers=remember_headers)
def google_callback(request, user_id, attributes):
return create_or_update(request, 'google', user_id, attributes)
def facebook_callback(request, user_id, attributes):
return create_or_update(request, 'facebook', user_id, attributes)
|
import datetime
from pyramid.httpexceptions import HTTPFound
from pyramid.security import remember
def google_callback(request, user_id, attributes):
"""pyramid_sna calls this function aftera successfull authentication flow"""
# Create or update the user
user = request.db.users.find_one({'google_id': user_id})
if user is None: # first time
user_id = request.db.registered.insert({
"email": attributes["email"],
"date": datetime.datetime.utcnow(),
"verified": True,
"screen_name": attributes["screen_name"],
"first_name": attributes["first_name"],
"last_name": attributes["last_name"],
}, safe=True)
else:
user_id = user['_id']
# Create an authenticated session and send the user to the
# success screeen
remember_headers = remember(request, str(user_id))
return HTTPFound(request.route_url('success'), headers=remember_headers)
def facebook_callback(request, user_id, attributes):
pass
Refactor the Google sna callback since the Facebook one is almost the sameimport datetime
from pyramid.httpexceptions import HTTPFound
from pyramid.security import remember
def create_or_update(request, provider, provider_user_id, attributes):
provider_key = '%s_id' % provider
# Create or update the user
user = request.db.users.find_one({provider_key: provider_user_id})
if user is None: # first time
user_id = request.db.registered.insert({
provider_key: provider_user_id,
"email": attributes["email"],
"date": datetime.datetime.utcnow(),
"verified": True,
"screen_name": attributes["screen_name"],
"first_name": attributes["first_name"],
"last_name": attributes["last_name"],
}, safe=True)
else:
user_id = user['_id']
# Create an authenticated session and send the user to the
# success screeen
remember_headers = remember(request, str(user_id))
return HTTPFound(request.route_url('success'), headers=remember_headers)
def google_callback(request, user_id, attributes):
return create_or_update(request, 'google', user_id, attributes)
def facebook_callback(request, user_id, attributes):
return create_or_update(request, 'facebook', user_id, attributes)
|
<commit_before>import datetime
from pyramid.httpexceptions import HTTPFound
from pyramid.security import remember
def google_callback(request, user_id, attributes):
"""pyramid_sna calls this function aftera successfull authentication flow"""
# Create or update the user
user = request.db.users.find_one({'google_id': user_id})
if user is None: # first time
user_id = request.db.registered.insert({
"email": attributes["email"],
"date": datetime.datetime.utcnow(),
"verified": True,
"screen_name": attributes["screen_name"],
"first_name": attributes["first_name"],
"last_name": attributes["last_name"],
}, safe=True)
else:
user_id = user['_id']
# Create an authenticated session and send the user to the
# success screeen
remember_headers = remember(request, str(user_id))
return HTTPFound(request.route_url('success'), headers=remember_headers)
def facebook_callback(request, user_id, attributes):
pass
<commit_msg>Refactor the Google sna callback since the Facebook one is almost the same<commit_after>import datetime
from pyramid.httpexceptions import HTTPFound
from pyramid.security import remember
def create_or_update(request, provider, provider_user_id, attributes):
provider_key = '%s_id' % provider
# Create or update the user
user = request.db.users.find_one({provider_key: provider_user_id})
if user is None: # first time
user_id = request.db.registered.insert({
provider_key: provider_user_id,
"email": attributes["email"],
"date": datetime.datetime.utcnow(),
"verified": True,
"screen_name": attributes["screen_name"],
"first_name": attributes["first_name"],
"last_name": attributes["last_name"],
}, safe=True)
else:
user_id = user['_id']
# Create an authenticated session and send the user to the
# success screeen
remember_headers = remember(request, str(user_id))
return HTTPFound(request.route_url('success'), headers=remember_headers)
def google_callback(request, user_id, attributes):
return create_or_update(request, 'google', user_id, attributes)
def facebook_callback(request, user_id, attributes):
return create_or_update(request, 'facebook', user_id, attributes)
|
96ac90788adac986531aa854357a6c77b0f171d4
|
tmlib/errors.py
|
tmlib/errors.py
|
class NotSupportedError(Exception):
'''
Error class that is raised when a feature is not supported by the program.
'''
class MetadataError(Exception):
'''
Error class that is raised when a metadata element cannot be retrieved.
'''
class SubmissionError(Exception):
'''
Error class that is raised when submitted jobs failed.
'''
class CliArgError(Exception):
'''
Error class that is raised when the value of an command line argument is
invalid.
'''
class RegexError(Exception):
'''
Error class that is raised when a regular expression pattern didn't match.
'''
class StitchError(Exception):
'''
Error class that is raised when an error occurs upon stitching of
images for the generation of a mosaic.
'''
class PipelineError(Exception):
'''
Base class for jterator pipeline errors.
'''
class PipelineRunError(PipelineError):
'''
Error class that is raised when an error occurs upon running a jterator
pipeline.
'''
class PipelineDescriptionError(PipelineError):
'''
Error class that is raised when information in pipeline description is
missing or incorrect.
'''
class PipelineOSError(PipelineError):
'''
Error class that is raised when pipeline related files do not exist
on disk.
'''
class WorkflowError(Exception):
'''
Base class for workflow errors.
'''
class WorkflowNextStepError(WorkflowError):
'''
Error class that is raised when requirements for progressing to the next
step are not fulfilled.
'''
|
class NotSupportedError(Exception):
'''
Error class that is raised when a feature is not supported by the program.
'''
class MetadataError(Exception):
'''
Error class that is raised when a metadata element cannot be retrieved.
'''
class SubmissionError(Exception):
'''
Error class that is raised when submitted jobs failed.
'''
class CliArgError(Exception):
'''
Error class that is raised when the value of an command line argument is
invalid.
'''
class RegexError(Exception):
'''
Error class that is raised when a regular expression pattern didn't match.
'''
class StitchError(Exception):
'''
Error class that is raised when an error occurs upon stitching of
images for the generation of a mosaic.
'''
class PipelineError(Exception):
'''
Base class for jterator pipeline errors.
'''
class PipelineRunError(PipelineError):
'''
Error class that is raised when an error occurs upon running a jterator
pipeline.
'''
class PipelineDescriptionError(PipelineError):
'''
Error class that is raised when information in pipeline description is
missing or incorrect.
'''
class PipelineOSError(PipelineError):
'''
Error class that is raised when pipeline related files do not exist
on disk.
'''
class WorkflowError(Exception):
'''
Base class for workflow errors.
'''
class WorkflowArgsError(WorkflowError):
'''
Error class that is raised when arguments of a workflow step are
not correctly specified.
'''
class WorkflowNextStepError(WorkflowError):
'''
Error class that is raised when requirements for progressing to the next
step are not fulfilled.
'''
|
Add workflow specific error classes
|
Add workflow specific error classes
|
Python
|
agpl-3.0
|
TissueMAPS/TmLibrary,TissueMAPS/TmLibrary,TissueMAPS/TmLibrary,TissueMAPS/TmLibrary,TissueMAPS/TmLibrary
|
class NotSupportedError(Exception):
'''
Error class that is raised when a feature is not supported by the program.
'''
class MetadataError(Exception):
'''
Error class that is raised when a metadata element cannot be retrieved.
'''
class SubmissionError(Exception):
'''
Error class that is raised when submitted jobs failed.
'''
class CliArgError(Exception):
'''
Error class that is raised when the value of an command line argument is
invalid.
'''
class RegexError(Exception):
'''
Error class that is raised when a regular expression pattern didn't match.
'''
class StitchError(Exception):
'''
Error class that is raised when an error occurs upon stitching of
images for the generation of a mosaic.
'''
class PipelineError(Exception):
'''
Base class for jterator pipeline errors.
'''
class PipelineRunError(PipelineError):
'''
Error class that is raised when an error occurs upon running a jterator
pipeline.
'''
class PipelineDescriptionError(PipelineError):
'''
Error class that is raised when information in pipeline description is
missing or incorrect.
'''
class PipelineOSError(PipelineError):
'''
Error class that is raised when pipeline related files do not exist
on disk.
'''
class WorkflowError(Exception):
'''
Base class for workflow errors.
'''
class WorkflowNextStepError(WorkflowError):
'''
Error class that is raised when requirements for progressing to the next
step are not fulfilled.
'''
Add workflow specific error classes
|
class NotSupportedError(Exception):
'''
Error class that is raised when a feature is not supported by the program.
'''
class MetadataError(Exception):
'''
Error class that is raised when a metadata element cannot be retrieved.
'''
class SubmissionError(Exception):
'''
Error class that is raised when submitted jobs failed.
'''
class CliArgError(Exception):
'''
Error class that is raised when the value of an command line argument is
invalid.
'''
class RegexError(Exception):
'''
Error class that is raised when a regular expression pattern didn't match.
'''
class StitchError(Exception):
'''
Error class that is raised when an error occurs upon stitching of
images for the generation of a mosaic.
'''
class PipelineError(Exception):
'''
Base class for jterator pipeline errors.
'''
class PipelineRunError(PipelineError):
'''
Error class that is raised when an error occurs upon running a jterator
pipeline.
'''
class PipelineDescriptionError(PipelineError):
'''
Error class that is raised when information in pipeline description is
missing or incorrect.
'''
class PipelineOSError(PipelineError):
'''
Error class that is raised when pipeline related files do not exist
on disk.
'''
class WorkflowError(Exception):
'''
Base class for workflow errors.
'''
class WorkflowArgsError(WorkflowError):
'''
Error class that is raised when arguments of a workflow step are
not correctly specified.
'''
class WorkflowNextStepError(WorkflowError):
'''
Error class that is raised when requirements for progressing to the next
step are not fulfilled.
'''
|
<commit_before>class NotSupportedError(Exception):
'''
Error class that is raised when a feature is not supported by the program.
'''
class MetadataError(Exception):
'''
Error class that is raised when a metadata element cannot be retrieved.
'''
class SubmissionError(Exception):
'''
Error class that is raised when submitted jobs failed.
'''
class CliArgError(Exception):
'''
Error class that is raised when the value of an command line argument is
invalid.
'''
class RegexError(Exception):
'''
Error class that is raised when a regular expression pattern didn't match.
'''
class StitchError(Exception):
'''
Error class that is raised when an error occurs upon stitching of
images for the generation of a mosaic.
'''
class PipelineError(Exception):
'''
Base class for jterator pipeline errors.
'''
class PipelineRunError(PipelineError):
'''
Error class that is raised when an error occurs upon running a jterator
pipeline.
'''
class PipelineDescriptionError(PipelineError):
'''
Error class that is raised when information in pipeline description is
missing or incorrect.
'''
class PipelineOSError(PipelineError):
'''
Error class that is raised when pipeline related files do not exist
on disk.
'''
class WorkflowError(Exception):
'''
Base class for workflow errors.
'''
class WorkflowNextStepError(WorkflowError):
'''
Error class that is raised when requirements for progressing to the next
step are not fulfilled.
'''
<commit_msg>Add workflow specific error classes<commit_after>
|
class NotSupportedError(Exception):
'''
Error class that is raised when a feature is not supported by the program.
'''
class MetadataError(Exception):
'''
Error class that is raised when a metadata element cannot be retrieved.
'''
class SubmissionError(Exception):
'''
Error class that is raised when submitted jobs failed.
'''
class CliArgError(Exception):
'''
Error class that is raised when the value of an command line argument is
invalid.
'''
class RegexError(Exception):
'''
Error class that is raised when a regular expression pattern didn't match.
'''
class StitchError(Exception):
'''
Error class that is raised when an error occurs upon stitching of
images for the generation of a mosaic.
'''
class PipelineError(Exception):
'''
Base class for jterator pipeline errors.
'''
class PipelineRunError(PipelineError):
'''
Error class that is raised when an error occurs upon running a jterator
pipeline.
'''
class PipelineDescriptionError(PipelineError):
'''
Error class that is raised when information in pipeline description is
missing or incorrect.
'''
class PipelineOSError(PipelineError):
'''
Error class that is raised when pipeline related files do not exist
on disk.
'''
class WorkflowError(Exception):
'''
Base class for workflow errors.
'''
class WorkflowArgsError(WorkflowError):
'''
Error class that is raised when arguments of a workflow step are
not correctly specified.
'''
class WorkflowNextStepError(WorkflowError):
'''
Error class that is raised when requirements for progressing to the next
step are not fulfilled.
'''
|
class NotSupportedError(Exception):
'''
Error class that is raised when a feature is not supported by the program.
'''
class MetadataError(Exception):
'''
Error class that is raised when a metadata element cannot be retrieved.
'''
class SubmissionError(Exception):
'''
Error class that is raised when submitted jobs failed.
'''
class CliArgError(Exception):
'''
Error class that is raised when the value of an command line argument is
invalid.
'''
class RegexError(Exception):
'''
Error class that is raised when a regular expression pattern didn't match.
'''
class StitchError(Exception):
'''
Error class that is raised when an error occurs upon stitching of
images for the generation of a mosaic.
'''
class PipelineError(Exception):
'''
Base class for jterator pipeline errors.
'''
class PipelineRunError(PipelineError):
'''
Error class that is raised when an error occurs upon running a jterator
pipeline.
'''
class PipelineDescriptionError(PipelineError):
'''
Error class that is raised when information in pipeline description is
missing or incorrect.
'''
class PipelineOSError(PipelineError):
'''
Error class that is raised when pipeline related files do not exist
on disk.
'''
class WorkflowError(Exception):
'''
Base class for workflow errors.
'''
class WorkflowNextStepError(WorkflowError):
'''
Error class that is raised when requirements for progressing to the next
step are not fulfilled.
'''
Add workflow specific error classesclass NotSupportedError(Exception):
'''
Error class that is raised when a feature is not supported by the program.
'''
class MetadataError(Exception):
'''
Error class that is raised when a metadata element cannot be retrieved.
'''
class SubmissionError(Exception):
'''
Error class that is raised when submitted jobs failed.
'''
class CliArgError(Exception):
'''
Error class that is raised when the value of an command line argument is
invalid.
'''
class RegexError(Exception):
'''
Error class that is raised when a regular expression pattern didn't match.
'''
class StitchError(Exception):
'''
Error class that is raised when an error occurs upon stitching of
images for the generation of a mosaic.
'''
class PipelineError(Exception):
'''
Base class for jterator pipeline errors.
'''
class PipelineRunError(PipelineError):
'''
Error class that is raised when an error occurs upon running a jterator
pipeline.
'''
class PipelineDescriptionError(PipelineError):
'''
Error class that is raised when information in pipeline description is
missing or incorrect.
'''
class PipelineOSError(PipelineError):
'''
Error class that is raised when pipeline related files do not exist
on disk.
'''
class WorkflowError(Exception):
'''
Base class for workflow errors.
'''
class WorkflowArgsError(WorkflowError):
'''
Error class that is raised when arguments of a workflow step are
not correctly specified.
'''
class WorkflowNextStepError(WorkflowError):
'''
Error class that is raised when requirements for progressing to the next
step are not fulfilled.
'''
|
<commit_before>class NotSupportedError(Exception):
'''
Error class that is raised when a feature is not supported by the program.
'''
class MetadataError(Exception):
'''
Error class that is raised when a metadata element cannot be retrieved.
'''
class SubmissionError(Exception):
'''
Error class that is raised when submitted jobs failed.
'''
class CliArgError(Exception):
'''
Error class that is raised when the value of an command line argument is
invalid.
'''
class RegexError(Exception):
'''
Error class that is raised when a regular expression pattern didn't match.
'''
class StitchError(Exception):
'''
Error class that is raised when an error occurs upon stitching of
images for the generation of a mosaic.
'''
class PipelineError(Exception):
'''
Base class for jterator pipeline errors.
'''
class PipelineRunError(PipelineError):
'''
Error class that is raised when an error occurs upon running a jterator
pipeline.
'''
class PipelineDescriptionError(PipelineError):
'''
Error class that is raised when information in pipeline description is
missing or incorrect.
'''
class PipelineOSError(PipelineError):
'''
Error class that is raised when pipeline related files do not exist
on disk.
'''
class WorkflowError(Exception):
'''
Base class for workflow errors.
'''
class WorkflowNextStepError(WorkflowError):
'''
Error class that is raised when requirements for progressing to the next
step are not fulfilled.
'''
<commit_msg>Add workflow specific error classes<commit_after>class NotSupportedError(Exception):
'''
Error class that is raised when a feature is not supported by the program.
'''
class MetadataError(Exception):
'''
Error class that is raised when a metadata element cannot be retrieved.
'''
class SubmissionError(Exception):
'''
Error class that is raised when submitted jobs failed.
'''
class CliArgError(Exception):
'''
Error class that is raised when the value of an command line argument is
invalid.
'''
class RegexError(Exception):
'''
Error class that is raised when a regular expression pattern didn't match.
'''
class StitchError(Exception):
'''
Error class that is raised when an error occurs upon stitching of
images for the generation of a mosaic.
'''
class PipelineError(Exception):
'''
Base class for jterator pipeline errors.
'''
class PipelineRunError(PipelineError):
'''
Error class that is raised when an error occurs upon running a jterator
pipeline.
'''
class PipelineDescriptionError(PipelineError):
'''
Error class that is raised when information in pipeline description is
missing or incorrect.
'''
class PipelineOSError(PipelineError):
'''
Error class that is raised when pipeline related files do not exist
on disk.
'''
class WorkflowError(Exception):
'''
Base class for workflow errors.
'''
class WorkflowArgsError(WorkflowError):
'''
Error class that is raised when arguments of a workflow step are
not correctly specified.
'''
class WorkflowNextStepError(WorkflowError):
'''
Error class that is raised when requirements for progressing to the next
step are not fulfilled.
'''
|
9a896de52b353e17a4216fdaf1342275e1ecc30a
|
autoconf/raw.py
|
autoconf/raw.py
|
from _external import *
from m import *
from gomp import *
from lcms import *
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
dependencies = [
m,
gomp,
lcms,
]
)
|
from _external import *
from m import *
if not windows:
from gomp import *
from lcms import *
if windows:
tmpDep = [
m,
lcms,
]
else:
tmpDep = [
m,
gomp,
lcms,
]
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
dependencies = tmpDep
)
|
Remove gomp dependency for Raw win build
|
Remove gomp dependency for Raw win build
|
Python
|
mit
|
tuttleofx/sconsProject
|
from _external import *
from m import *
from gomp import *
from lcms import *
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
dependencies = [
m,
gomp,
lcms,
]
)
Remove gomp dependency for Raw win build
|
from _external import *
from m import *
if not windows:
from gomp import *
from lcms import *
if windows:
tmpDep = [
m,
lcms,
]
else:
tmpDep = [
m,
gomp,
lcms,
]
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
dependencies = tmpDep
)
|
<commit_before>from _external import *
from m import *
from gomp import *
from lcms import *
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
dependencies = [
m,
gomp,
lcms,
]
)
<commit_msg>Remove gomp dependency for Raw win build<commit_after>
|
from _external import *
from m import *
if not windows:
from gomp import *
from lcms import *
if windows:
tmpDep = [
m,
lcms,
]
else:
tmpDep = [
m,
gomp,
lcms,
]
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
dependencies = tmpDep
)
|
from _external import *
from m import *
from gomp import *
from lcms import *
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
dependencies = [
m,
gomp,
lcms,
]
)
Remove gomp dependency for Raw win buildfrom _external import *
from m import *
if not windows:
from gomp import *
from lcms import *
if windows:
tmpDep = [
m,
lcms,
]
else:
tmpDep = [
m,
gomp,
lcms,
]
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
dependencies = tmpDep
)
|
<commit_before>from _external import *
from m import *
from gomp import *
from lcms import *
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
dependencies = [
m,
gomp,
lcms,
]
)
<commit_msg>Remove gomp dependency for Raw win build<commit_after>from _external import *
from m import *
if not windows:
from gomp import *
from lcms import *
if windows:
tmpDep = [
m,
lcms,
]
else:
tmpDep = [
m,
gomp,
lcms,
]
raw = LibWithHeaderChecker(
'raw',
'libraw/libraw.h',
'c',
dependencies = tmpDep
)
|
a6a81790d43442f88738e5ae141f6b9c6d0efc74
|
authentication/urls.py
|
authentication/urls.py
|
from django.conf.urls import patterns, url
from authentication.views import user_login, user_logout
from authentication.views import approve, UnapprovedUsers, CustomAdminIndex
from authentication.views import BeneficiaryRegistrationView, DonorRegistrationView
urlpatterns = patterns('',
url(r'^register/donor$', DonorRegistrationView.as_view(), name='donor'),
url(r'^register/beneficiary$',
BeneficiaryRegistrationView.as_view(), name='beneficiary'),
url(r'^login/$', user_login, name='login'),
url(r'^logout/$', user_logout, name='logout'),
url(r'^$', CustomAdminIndex.as_view(), name='customadmin_index'),
url(r'^unapproved-users$', UnapprovedUsers.as_view(), name='unapproved'),
url(r'^approve/(?P<user_id>\d+)$', approve, name='approve'),
)
|
from django.conf.urls import patterns, url
from .views import user_login, user_logout
from .views import approve, UnapprovedUsers, CustomAdminIndex
from .views import BeneficiaryRegistrationView, DonorRegistrationView
urlpatterns = patterns('',
url(r'^register/donor$', DonorRegistrationView.as_view(), name='donor'),
url(r'^register/beneficiary$',
BeneficiaryRegistrationView.as_view(), name='beneficiary'),
url(r'^login/$', user_login, name='login'),
url(r'^logout/$', user_logout, name='logout'),
url(r'^$', CustomAdminIndex.as_view(), name='customadmin_index'),
url(r'^unapproved-users$', UnapprovedUsers.as_view(), name='unapproved'),
url(r'^approve/(?P<user_id>\d+)$', approve, name='approve'),
)
|
Use relative import for files inside the same package
|
Use relative import for files inside the same package
|
Python
|
bsd-3-clause
|
agiliq/fundraiser,febinstephen/django-fundrasiser-app,agiliq/fundraiser,febinstephen/django-fundrasiser-app,febinstephen/django-fundrasiser-app,agiliq/fundraiser
|
from django.conf.urls import patterns, url
from authentication.views import user_login, user_logout
from authentication.views import approve, UnapprovedUsers, CustomAdminIndex
from authentication.views import BeneficiaryRegistrationView, DonorRegistrationView
urlpatterns = patterns('',
url(r'^register/donor$', DonorRegistrationView.as_view(), name='donor'),
url(r'^register/beneficiary$',
BeneficiaryRegistrationView.as_view(), name='beneficiary'),
url(r'^login/$', user_login, name='login'),
url(r'^logout/$', user_logout, name='logout'),
url(r'^$', CustomAdminIndex.as_view(), name='customadmin_index'),
url(r'^unapproved-users$', UnapprovedUsers.as_view(), name='unapproved'),
url(r'^approve/(?P<user_id>\d+)$', approve, name='approve'),
)
Use relative import for files inside the same package
|
from django.conf.urls import patterns, url
from .views import user_login, user_logout
from .views import approve, UnapprovedUsers, CustomAdminIndex
from .views import BeneficiaryRegistrationView, DonorRegistrationView
urlpatterns = patterns('',
url(r'^register/donor$', DonorRegistrationView.as_view(), name='donor'),
url(r'^register/beneficiary$',
BeneficiaryRegistrationView.as_view(), name='beneficiary'),
url(r'^login/$', user_login, name='login'),
url(r'^logout/$', user_logout, name='logout'),
url(r'^$', CustomAdminIndex.as_view(), name='customadmin_index'),
url(r'^unapproved-users$', UnapprovedUsers.as_view(), name='unapproved'),
url(r'^approve/(?P<user_id>\d+)$', approve, name='approve'),
)
|
<commit_before>from django.conf.urls import patterns, url
from authentication.views import user_login, user_logout
from authentication.views import approve, UnapprovedUsers, CustomAdminIndex
from authentication.views import BeneficiaryRegistrationView, DonorRegistrationView
urlpatterns = patterns('',
url(r'^register/donor$', DonorRegistrationView.as_view(), name='donor'),
url(r'^register/beneficiary$',
BeneficiaryRegistrationView.as_view(), name='beneficiary'),
url(r'^login/$', user_login, name='login'),
url(r'^logout/$', user_logout, name='logout'),
url(r'^$', CustomAdminIndex.as_view(), name='customadmin_index'),
url(r'^unapproved-users$', UnapprovedUsers.as_view(), name='unapproved'),
url(r'^approve/(?P<user_id>\d+)$', approve, name='approve'),
)
<commit_msg>Use relative import for files inside the same package<commit_after>
|
from django.conf.urls import patterns, url
from .views import user_login, user_logout
from .views import approve, UnapprovedUsers, CustomAdminIndex
from .views import BeneficiaryRegistrationView, DonorRegistrationView
urlpatterns = patterns('',
url(r'^register/donor$', DonorRegistrationView.as_view(), name='donor'),
url(r'^register/beneficiary$',
BeneficiaryRegistrationView.as_view(), name='beneficiary'),
url(r'^login/$', user_login, name='login'),
url(r'^logout/$', user_logout, name='logout'),
url(r'^$', CustomAdminIndex.as_view(), name='customadmin_index'),
url(r'^unapproved-users$', UnapprovedUsers.as_view(), name='unapproved'),
url(r'^approve/(?P<user_id>\d+)$', approve, name='approve'),
)
|
from django.conf.urls import patterns, url
from authentication.views import user_login, user_logout
from authentication.views import approve, UnapprovedUsers, CustomAdminIndex
from authentication.views import BeneficiaryRegistrationView, DonorRegistrationView
urlpatterns = patterns('',
url(r'^register/donor$', DonorRegistrationView.as_view(), name='donor'),
url(r'^register/beneficiary$',
BeneficiaryRegistrationView.as_view(), name='beneficiary'),
url(r'^login/$', user_login, name='login'),
url(r'^logout/$', user_logout, name='logout'),
url(r'^$', CustomAdminIndex.as_view(), name='customadmin_index'),
url(r'^unapproved-users$', UnapprovedUsers.as_view(), name='unapproved'),
url(r'^approve/(?P<user_id>\d+)$', approve, name='approve'),
)
Use relative import for files inside the same packagefrom django.conf.urls import patterns, url
from .views import user_login, user_logout
from .views import approve, UnapprovedUsers, CustomAdminIndex
from .views import BeneficiaryRegistrationView, DonorRegistrationView
urlpatterns = patterns('',
url(r'^register/donor$', DonorRegistrationView.as_view(), name='donor'),
url(r'^register/beneficiary$',
BeneficiaryRegistrationView.as_view(), name='beneficiary'),
url(r'^login/$', user_login, name='login'),
url(r'^logout/$', user_logout, name='logout'),
url(r'^$', CustomAdminIndex.as_view(), name='customadmin_index'),
url(r'^unapproved-users$', UnapprovedUsers.as_view(), name='unapproved'),
url(r'^approve/(?P<user_id>\d+)$', approve, name='approve'),
)
|
<commit_before>from django.conf.urls import patterns, url
from authentication.views import user_login, user_logout
from authentication.views import approve, UnapprovedUsers, CustomAdminIndex
from authentication.views import BeneficiaryRegistrationView, DonorRegistrationView
urlpatterns = patterns('',
url(r'^register/donor$', DonorRegistrationView.as_view(), name='donor'),
url(r'^register/beneficiary$',
BeneficiaryRegistrationView.as_view(), name='beneficiary'),
url(r'^login/$', user_login, name='login'),
url(r'^logout/$', user_logout, name='logout'),
url(r'^$', CustomAdminIndex.as_view(), name='customadmin_index'),
url(r'^unapproved-users$', UnapprovedUsers.as_view(), name='unapproved'),
url(r'^approve/(?P<user_id>\d+)$', approve, name='approve'),
)
<commit_msg>Use relative import for files inside the same package<commit_after>from django.conf.urls import patterns, url
from .views import user_login, user_logout
from .views import approve, UnapprovedUsers, CustomAdminIndex
from .views import BeneficiaryRegistrationView, DonorRegistrationView
urlpatterns = patterns('',
url(r'^register/donor$', DonorRegistrationView.as_view(), name='donor'),
url(r'^register/beneficiary$',
BeneficiaryRegistrationView.as_view(), name='beneficiary'),
url(r'^login/$', user_login, name='login'),
url(r'^logout/$', user_logout, name='logout'),
url(r'^$', CustomAdminIndex.as_view(), name='customadmin_index'),
url(r'^unapproved-users$', UnapprovedUsers.as_view(), name='unapproved'),
url(r'^approve/(?P<user_id>\d+)$', approve, name='approve'),
)
|
453497b0755d8bc2d6bd6ccc3830394e50ed9a07
|
pywikibot/families/outreach_family.py
|
pywikibot/families/outreach_family.py
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# Outreach wiki custom family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = u'outreach'
self.langs = {
'outreach': 'outreach.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
def version(self, code):
return "1.24wmf6"
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# Outreach wiki custom family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = u'outreach'
self.langs = {
'outreach': 'outreach.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
|
Update mw version 1.24wmf11 derived from super class
|
Update mw version 1.24wmf11 derived from super class
Change-Id: If142c57a88179f80e2e652e844c7aadbc2468f7c
|
Python
|
mit
|
trishnaguha/pywikibot-core,Darkdadaah/pywikibot-core,VcamX/pywikibot-core,magul/pywikibot-core,PersianWikipedia/pywikibot-core,magul/pywikibot-core,icyflame/batman,Darkdadaah/pywikibot-core,wikimedia/pywikibot-core,happy5214/pywikibot-core,TridevGuha/pywikibot-core,wikimedia/pywikibot-core,hasteur/g13bot_tools_new,valhallasw/pywikibot-core,jayvdb/pywikibot-core,jayvdb/pywikibot-core,smalyshev/pywikibot-core,npdoty/pywikibot,hasteur/g13bot_tools_new,xZise/pywikibot-core,h4ck3rm1k3/pywikibot-core,darthbhyrava/pywikibot-local,h4ck3rm1k3/pywikibot-core,hasteur/g13bot_tools_new,happy5214/pywikibot-core,npdoty/pywikibot,emijrp/pywikibot-core
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# Outreach wiki custom family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = u'outreach'
self.langs = {
'outreach': 'outreach.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
def version(self, code):
return "1.24wmf6"
Update mw version 1.24wmf11 derived from super class
Change-Id: If142c57a88179f80e2e652e844c7aadbc2468f7c
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# Outreach wiki custom family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = u'outreach'
self.langs = {
'outreach': 'outreach.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# Outreach wiki custom family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = u'outreach'
self.langs = {
'outreach': 'outreach.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
def version(self, code):
return "1.24wmf6"
<commit_msg>Update mw version 1.24wmf11 derived from super class
Change-Id: If142c57a88179f80e2e652e844c7aadbc2468f7c<commit_after>
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# Outreach wiki custom family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = u'outreach'
self.langs = {
'outreach': 'outreach.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
|
# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# Outreach wiki custom family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = u'outreach'
self.langs = {
'outreach': 'outreach.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
def version(self, code):
return "1.24wmf6"
Update mw version 1.24wmf11 derived from super class
Change-Id: If142c57a88179f80e2e652e844c7aadbc2468f7c# -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# Outreach wiki custom family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = u'outreach'
self.langs = {
'outreach': 'outreach.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
|
<commit_before># -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# Outreach wiki custom family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = u'outreach'
self.langs = {
'outreach': 'outreach.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
def version(self, code):
return "1.24wmf6"
<commit_msg>Update mw version 1.24wmf11 derived from super class
Change-Id: If142c57a88179f80e2e652e844c7aadbc2468f7c<commit_after># -*- coding: utf-8 -*-
__version__ = '$Id$'
from pywikibot import family
# Outreach wiki custom family
class Family(family.WikimediaFamily):
def __init__(self):
super(Family, self).__init__()
self.name = u'outreach'
self.langs = {
'outreach': 'outreach.wikimedia.org',
}
self.interwiki_forward = 'wikipedia'
|
56184affcbc2b5185a011e689fbf4b9564d380f5
|
trakt/client.py
|
trakt/client.py
|
from trakt.core.configuration import ConfigurationManager
from trakt.core.http import HttpClient
from trakt.interfaces import construct_map
from trakt.interfaces.base import InterfaceProxy
import logging
__version__ = '2.1.0'
log = logging.getLogger(__name__)
class TraktClient(object):
base_url = 'https://api.trakt.tv'
version = __version__
__interfaces = None
def __init__(self, adapter_kwargs=None):
# Set parameter defaults
if adapter_kwargs is None:
adapter_kwargs = {}
adapter_kwargs.setdefault('max_retries', 3)
# Construct
self.http = HttpClient(self, adapter_kwargs)
self.configuration = ConfigurationManager()
self.__interfaces = construct_map(self)
def __getitem__(self, path):
parts = path.strip('/').split('/')
cur = self.__interfaces
while parts and type(cur) is dict:
key = parts.pop(0)
if key not in cur:
return None
cur = cur[key]
if type(cur) is dict:
cur = cur.get(None)
if parts:
return InterfaceProxy(cur, parts)
return cur
|
from trakt.core.configuration import ConfigurationManager
from trakt.core.http import HttpClient
from trakt.interfaces import construct_map
from trakt.interfaces.base import InterfaceProxy
import logging
__version__ = '2.1.0'
log = logging.getLogger(__name__)
class TraktClient(object):
base_url = 'https://api-v2launch.trakt.tv'
version = __version__
__interfaces = None
def __init__(self, adapter_kwargs=None):
# Set parameter defaults
if adapter_kwargs is None:
adapter_kwargs = {}
adapter_kwargs.setdefault('max_retries', 3)
# Construct
self.http = HttpClient(self, adapter_kwargs)
self.configuration = ConfigurationManager()
self.__interfaces = construct_map(self)
def __getitem__(self, path):
parts = path.strip('/').split('/')
cur = self.__interfaces
while parts and type(cur) is dict:
key = parts.pop(0)
if key not in cur:
return None
cur = cur[key]
if type(cur) is dict:
cur = cur.get(None)
if parts:
return InterfaceProxy(cur, parts)
return cur
|
Use new v2 api endpoint
|
Use new v2 api endpoint
|
Python
|
mit
|
fuzeman/trakt.py,shad7/trakt.py
|
from trakt.core.configuration import ConfigurationManager
from trakt.core.http import HttpClient
from trakt.interfaces import construct_map
from trakt.interfaces.base import InterfaceProxy
import logging
__version__ = '2.1.0'
log = logging.getLogger(__name__)
class TraktClient(object):
base_url = 'https://api.trakt.tv'
version = __version__
__interfaces = None
def __init__(self, adapter_kwargs=None):
# Set parameter defaults
if adapter_kwargs is None:
adapter_kwargs = {}
adapter_kwargs.setdefault('max_retries', 3)
# Construct
self.http = HttpClient(self, adapter_kwargs)
self.configuration = ConfigurationManager()
self.__interfaces = construct_map(self)
def __getitem__(self, path):
parts = path.strip('/').split('/')
cur = self.__interfaces
while parts and type(cur) is dict:
key = parts.pop(0)
if key not in cur:
return None
cur = cur[key]
if type(cur) is dict:
cur = cur.get(None)
if parts:
return InterfaceProxy(cur, parts)
return cur
Use new v2 api endpoint
|
from trakt.core.configuration import ConfigurationManager
from trakt.core.http import HttpClient
from trakt.interfaces import construct_map
from trakt.interfaces.base import InterfaceProxy
import logging
__version__ = '2.1.0'
log = logging.getLogger(__name__)
class TraktClient(object):
base_url = 'https://api-v2launch.trakt.tv'
version = __version__
__interfaces = None
def __init__(self, adapter_kwargs=None):
# Set parameter defaults
if adapter_kwargs is None:
adapter_kwargs = {}
adapter_kwargs.setdefault('max_retries', 3)
# Construct
self.http = HttpClient(self, adapter_kwargs)
self.configuration = ConfigurationManager()
self.__interfaces = construct_map(self)
def __getitem__(self, path):
parts = path.strip('/').split('/')
cur = self.__interfaces
while parts and type(cur) is dict:
key = parts.pop(0)
if key not in cur:
return None
cur = cur[key]
if type(cur) is dict:
cur = cur.get(None)
if parts:
return InterfaceProxy(cur, parts)
return cur
|
<commit_before>from trakt.core.configuration import ConfigurationManager
from trakt.core.http import HttpClient
from trakt.interfaces import construct_map
from trakt.interfaces.base import InterfaceProxy
import logging
__version__ = '2.1.0'
log = logging.getLogger(__name__)
class TraktClient(object):
base_url = 'https://api.trakt.tv'
version = __version__
__interfaces = None
def __init__(self, adapter_kwargs=None):
# Set parameter defaults
if adapter_kwargs is None:
adapter_kwargs = {}
adapter_kwargs.setdefault('max_retries', 3)
# Construct
self.http = HttpClient(self, adapter_kwargs)
self.configuration = ConfigurationManager()
self.__interfaces = construct_map(self)
def __getitem__(self, path):
parts = path.strip('/').split('/')
cur = self.__interfaces
while parts and type(cur) is dict:
key = parts.pop(0)
if key not in cur:
return None
cur = cur[key]
if type(cur) is dict:
cur = cur.get(None)
if parts:
return InterfaceProxy(cur, parts)
return cur
<commit_msg>Use new v2 api endpoint<commit_after>
|
from trakt.core.configuration import ConfigurationManager
from trakt.core.http import HttpClient
from trakt.interfaces import construct_map
from trakt.interfaces.base import InterfaceProxy
import logging
__version__ = '2.1.0'
log = logging.getLogger(__name__)
class TraktClient(object):
base_url = 'https://api-v2launch.trakt.tv'
version = __version__
__interfaces = None
def __init__(self, adapter_kwargs=None):
# Set parameter defaults
if adapter_kwargs is None:
adapter_kwargs = {}
adapter_kwargs.setdefault('max_retries', 3)
# Construct
self.http = HttpClient(self, adapter_kwargs)
self.configuration = ConfigurationManager()
self.__interfaces = construct_map(self)
def __getitem__(self, path):
parts = path.strip('/').split('/')
cur = self.__interfaces
while parts and type(cur) is dict:
key = parts.pop(0)
if key not in cur:
return None
cur = cur[key]
if type(cur) is dict:
cur = cur.get(None)
if parts:
return InterfaceProxy(cur, parts)
return cur
|
from trakt.core.configuration import ConfigurationManager
from trakt.core.http import HttpClient
from trakt.interfaces import construct_map
from trakt.interfaces.base import InterfaceProxy
import logging
__version__ = '2.1.0'
log = logging.getLogger(__name__)
class TraktClient(object):
base_url = 'https://api.trakt.tv'
version = __version__
__interfaces = None
def __init__(self, adapter_kwargs=None):
# Set parameter defaults
if adapter_kwargs is None:
adapter_kwargs = {}
adapter_kwargs.setdefault('max_retries', 3)
# Construct
self.http = HttpClient(self, adapter_kwargs)
self.configuration = ConfigurationManager()
self.__interfaces = construct_map(self)
def __getitem__(self, path):
parts = path.strip('/').split('/')
cur = self.__interfaces
while parts and type(cur) is dict:
key = parts.pop(0)
if key not in cur:
return None
cur = cur[key]
if type(cur) is dict:
cur = cur.get(None)
if parts:
return InterfaceProxy(cur, parts)
return cur
Use new v2 api endpointfrom trakt.core.configuration import ConfigurationManager
from trakt.core.http import HttpClient
from trakt.interfaces import construct_map
from trakt.interfaces.base import InterfaceProxy
import logging
__version__ = '2.1.0'
log = logging.getLogger(__name__)
class TraktClient(object):
base_url = 'https://api-v2launch.trakt.tv'
version = __version__
__interfaces = None
def __init__(self, adapter_kwargs=None):
# Set parameter defaults
if adapter_kwargs is None:
adapter_kwargs = {}
adapter_kwargs.setdefault('max_retries', 3)
# Construct
self.http = HttpClient(self, adapter_kwargs)
self.configuration = ConfigurationManager()
self.__interfaces = construct_map(self)
def __getitem__(self, path):
parts = path.strip('/').split('/')
cur = self.__interfaces
while parts and type(cur) is dict:
key = parts.pop(0)
if key not in cur:
return None
cur = cur[key]
if type(cur) is dict:
cur = cur.get(None)
if parts:
return InterfaceProxy(cur, parts)
return cur
|
<commit_before>from trakt.core.configuration import ConfigurationManager
from trakt.core.http import HttpClient
from trakt.interfaces import construct_map
from trakt.interfaces.base import InterfaceProxy
import logging
__version__ = '2.1.0'
log = logging.getLogger(__name__)
class TraktClient(object):
base_url = 'https://api.trakt.tv'
version = __version__
__interfaces = None
def __init__(self, adapter_kwargs=None):
# Set parameter defaults
if adapter_kwargs is None:
adapter_kwargs = {}
adapter_kwargs.setdefault('max_retries', 3)
# Construct
self.http = HttpClient(self, adapter_kwargs)
self.configuration = ConfigurationManager()
self.__interfaces = construct_map(self)
def __getitem__(self, path):
parts = path.strip('/').split('/')
cur = self.__interfaces
while parts and type(cur) is dict:
key = parts.pop(0)
if key not in cur:
return None
cur = cur[key]
if type(cur) is dict:
cur = cur.get(None)
if parts:
return InterfaceProxy(cur, parts)
return cur
<commit_msg>Use new v2 api endpoint<commit_after>from trakt.core.configuration import ConfigurationManager
from trakt.core.http import HttpClient
from trakt.interfaces import construct_map
from trakt.interfaces.base import InterfaceProxy
import logging
__version__ = '2.1.0'
log = logging.getLogger(__name__)
class TraktClient(object):
base_url = 'https://api-v2launch.trakt.tv'
version = __version__
__interfaces = None
def __init__(self, adapter_kwargs=None):
# Set parameter defaults
if adapter_kwargs is None:
adapter_kwargs = {}
adapter_kwargs.setdefault('max_retries', 3)
# Construct
self.http = HttpClient(self, adapter_kwargs)
self.configuration = ConfigurationManager()
self.__interfaces = construct_map(self)
def __getitem__(self, path):
parts = path.strip('/').split('/')
cur = self.__interfaces
while parts and type(cur) is dict:
key = parts.pop(0)
if key not in cur:
return None
cur = cur[key]
if type(cur) is dict:
cur = cur.get(None)
if parts:
return InterfaceProxy(cur, parts)
return cur
|
81e14c5e21eeea0776ba26d4a65dd3c364d41855
|
netmiko/hp/hp_procurve_ssh.py
|
netmiko/hp/hp_procurve_ssh.py
|
from netmiko.ssh_connection import SSHConnection
from netmiko.netmiko_globals import MAX_BUFFER
import time
class HPProcurveSSH(SSHConnection):
def session_preparation(self):
'''
Prepare the session after the connection has been established
'''
# HP uses - 'Press any key to continue'
time.sleep(1)
self.remote_conn.send("\n")
time.sleep(1)
# HP output contains VT100 escape codes
self.ansi_escape_codes = True
self.disable_paging(command="\nno page\n")
self.find_prompt()
def enable(self, default_username='manager'):
'''
Enter enable mode
'''
DEBUG = False
output = self.send_command('enable')
if 'sername' in output:
output += self.send_command(default_username)
if 'assword' in output:
output += self.send_command(self.secret)
if DEBUG:
print output
self.find_prompt()
self.clear_buffer()
|
from netmiko.ssh_connection import SSHConnection
from netmiko.netmiko_globals import MAX_BUFFER
import time
class HPProcurveSSH(SSHConnection):
def session_preparation(self):
'''
Prepare the session after the connection has been established
'''
# HP uses - 'Press any key to continue'
time.sleep(1)
self.remote_conn.send("\n")
time.sleep(1)
# HP output contains VT100 escape codes
self.ansi_escape_codes = True
self.disable_paging(command="\nno page\n")
self.find_prompt()
def enable_mode(self, default_username='manager'):
'''
Enter enable mode
'''
DEBUG = False
output = self.send_command('enable')
if 'sername' in output:
output += self.send_command(default_username)
if 'assword' in output:
output += self.send_command(self.secret)
if DEBUG:
print output
self.find_prompt()
self.clear_buffer()
|
Change function name enable to enable_mode
|
Change function name enable to enable_mode
|
Python
|
mit
|
shsingh/netmiko,rdezavalia/netmiko,enzzzy/netmiko,ktbyers/netmiko,jumpojoy/netmiko,fooelisa/netmiko,ktbyers/netmiko,shamanu4/netmiko,ivandgreat/netmiko,nitzmahone/netmiko,rdezavalia/netmiko,isponline/netmiko,brutus333/netmiko,mileswdavis/netmiko,shamanu4/netmiko,mzbenami/netmiko,rumo/netmiko,MikeOfNoTrades/netmiko,fooelisa/netmiko,shsingh/netmiko,nvoron23/netmiko,rumo/netmiko,enzzzy/netmiko,isidroamv/netmiko,isponline/netmiko,MikeOfNoTrades/netmiko,jumpojoy/netmiko,nitzmahone/netmiko,mzbenami/netmiko,mileswdavis/netmiko,brutus333/netmiko,jinesh-patel/netmiko,jinesh-patel/netmiko,isidroamv/netmiko,ivandgreat/netmiko
|
from netmiko.ssh_connection import SSHConnection
from netmiko.netmiko_globals import MAX_BUFFER
import time
class HPProcurveSSH(SSHConnection):
def session_preparation(self):
'''
Prepare the session after the connection has been established
'''
# HP uses - 'Press any key to continue'
time.sleep(1)
self.remote_conn.send("\n")
time.sleep(1)
# HP output contains VT100 escape codes
self.ansi_escape_codes = True
self.disable_paging(command="\nno page\n")
self.find_prompt()
def enable(self, default_username='manager'):
'''
Enter enable mode
'''
DEBUG = False
output = self.send_command('enable')
if 'sername' in output:
output += self.send_command(default_username)
if 'assword' in output:
output += self.send_command(self.secret)
if DEBUG:
print output
self.find_prompt()
self.clear_buffer()
Change function name enable to enable_mode
|
from netmiko.ssh_connection import SSHConnection
from netmiko.netmiko_globals import MAX_BUFFER
import time
class HPProcurveSSH(SSHConnection):
def session_preparation(self):
'''
Prepare the session after the connection has been established
'''
# HP uses - 'Press any key to continue'
time.sleep(1)
self.remote_conn.send("\n")
time.sleep(1)
# HP output contains VT100 escape codes
self.ansi_escape_codes = True
self.disable_paging(command="\nno page\n")
self.find_prompt()
def enable_mode(self, default_username='manager'):
'''
Enter enable mode
'''
DEBUG = False
output = self.send_command('enable')
if 'sername' in output:
output += self.send_command(default_username)
if 'assword' in output:
output += self.send_command(self.secret)
if DEBUG:
print output
self.find_prompt()
self.clear_buffer()
|
<commit_before>from netmiko.ssh_connection import SSHConnection
from netmiko.netmiko_globals import MAX_BUFFER
import time
class HPProcurveSSH(SSHConnection):
def session_preparation(self):
'''
Prepare the session after the connection has been established
'''
# HP uses - 'Press any key to continue'
time.sleep(1)
self.remote_conn.send("\n")
time.sleep(1)
# HP output contains VT100 escape codes
self.ansi_escape_codes = True
self.disable_paging(command="\nno page\n")
self.find_prompt()
def enable(self, default_username='manager'):
'''
Enter enable mode
'''
DEBUG = False
output = self.send_command('enable')
if 'sername' in output:
output += self.send_command(default_username)
if 'assword' in output:
output += self.send_command(self.secret)
if DEBUG:
print output
self.find_prompt()
self.clear_buffer()
<commit_msg>Change function name enable to enable_mode<commit_after>
|
from netmiko.ssh_connection import SSHConnection
from netmiko.netmiko_globals import MAX_BUFFER
import time
class HPProcurveSSH(SSHConnection):
def session_preparation(self):
'''
Prepare the session after the connection has been established
'''
# HP uses - 'Press any key to continue'
time.sleep(1)
self.remote_conn.send("\n")
time.sleep(1)
# HP output contains VT100 escape codes
self.ansi_escape_codes = True
self.disable_paging(command="\nno page\n")
self.find_prompt()
def enable_mode(self, default_username='manager'):
'''
Enter enable mode
'''
DEBUG = False
output = self.send_command('enable')
if 'sername' in output:
output += self.send_command(default_username)
if 'assword' in output:
output += self.send_command(self.secret)
if DEBUG:
print output
self.find_prompt()
self.clear_buffer()
|
from netmiko.ssh_connection import SSHConnection
from netmiko.netmiko_globals import MAX_BUFFER
import time
class HPProcurveSSH(SSHConnection):
def session_preparation(self):
'''
Prepare the session after the connection has been established
'''
# HP uses - 'Press any key to continue'
time.sleep(1)
self.remote_conn.send("\n")
time.sleep(1)
# HP output contains VT100 escape codes
self.ansi_escape_codes = True
self.disable_paging(command="\nno page\n")
self.find_prompt()
def enable(self, default_username='manager'):
'''
Enter enable mode
'''
DEBUG = False
output = self.send_command('enable')
if 'sername' in output:
output += self.send_command(default_username)
if 'assword' in output:
output += self.send_command(self.secret)
if DEBUG:
print output
self.find_prompt()
self.clear_buffer()
Change function name enable to enable_modefrom netmiko.ssh_connection import SSHConnection
from netmiko.netmiko_globals import MAX_BUFFER
import time
class HPProcurveSSH(SSHConnection):
def session_preparation(self):
'''
Prepare the session after the connection has been established
'''
# HP uses - 'Press any key to continue'
time.sleep(1)
self.remote_conn.send("\n")
time.sleep(1)
# HP output contains VT100 escape codes
self.ansi_escape_codes = True
self.disable_paging(command="\nno page\n")
self.find_prompt()
def enable_mode(self, default_username='manager'):
'''
Enter enable mode
'''
DEBUG = False
output = self.send_command('enable')
if 'sername' in output:
output += self.send_command(default_username)
if 'assword' in output:
output += self.send_command(self.secret)
if DEBUG:
print output
self.find_prompt()
self.clear_buffer()
|
<commit_before>from netmiko.ssh_connection import SSHConnection
from netmiko.netmiko_globals import MAX_BUFFER
import time
class HPProcurveSSH(SSHConnection):
def session_preparation(self):
'''
Prepare the session after the connection has been established
'''
# HP uses - 'Press any key to continue'
time.sleep(1)
self.remote_conn.send("\n")
time.sleep(1)
# HP output contains VT100 escape codes
self.ansi_escape_codes = True
self.disable_paging(command="\nno page\n")
self.find_prompt()
def enable(self, default_username='manager'):
'''
Enter enable mode
'''
DEBUG = False
output = self.send_command('enable')
if 'sername' in output:
output += self.send_command(default_username)
if 'assword' in output:
output += self.send_command(self.secret)
if DEBUG:
print output
self.find_prompt()
self.clear_buffer()
<commit_msg>Change function name enable to enable_mode<commit_after>from netmiko.ssh_connection import SSHConnection
from netmiko.netmiko_globals import MAX_BUFFER
import time
class HPProcurveSSH(SSHConnection):
def session_preparation(self):
'''
Prepare the session after the connection has been established
'''
# HP uses - 'Press any key to continue'
time.sleep(1)
self.remote_conn.send("\n")
time.sleep(1)
# HP output contains VT100 escape codes
self.ansi_escape_codes = True
self.disable_paging(command="\nno page\n")
self.find_prompt()
def enable_mode(self, default_username='manager'):
'''
Enter enable mode
'''
DEBUG = False
output = self.send_command('enable')
if 'sername' in output:
output += self.send_command(default_username)
if 'assword' in output:
output += self.send_command(self.secret)
if DEBUG:
print output
self.find_prompt()
self.clear_buffer()
|
185f88089cc0bfaa025d147f870245523307d03f
|
tests/utils.py
|
tests/utils.py
|
import unittest
from knights import compiler
class Mock(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class TemplateTestCase(unittest.TestCase):
def assertRendered(self, source, expected, context=None):
try:
tmpl = compiler.kompile(source)
rendered = tmpl()({} if context is None else context)
self.assertEqual(rendered, expected)
except Exception as e:
if hasattr(e, 'message'):
standardMsg = e.message
elif hasattr(e, 'args') and len(e.args) > 0:
standardMsg = e.args[0]
else:
standardMsg = ''
msg = 'Failed rendering template %s:\n%s: %s' % (
source, e.__class__.__name__, standardMsg)
self.fail(msg)
|
import unittest
from knights import compiler
class Mock(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class TemplateTestCase(unittest.TestCase):
def assertRendered(self, source, expected, context=None, debug=False):
try:
tmpl = compiler.kompile(source, debug)
rendered = tmpl()({} if context is None else context)
self.assertEqual(rendered, expected)
except Exception as e:
if hasattr(e, 'message'):
standardMsg = e.message
elif hasattr(e, 'args') and len(e.args) > 0:
standardMsg = e.args[0]
else:
standardMsg = ''
msg = 'Failed rendering template %s:\n%s: %s' % (
source, e.__class__.__name__, standardMsg)
self.fail(msg)
|
Add option for passing debug flag to template compiler
|
Add option for passing debug flag to template compiler
|
Python
|
mit
|
funkybob/knights-templater,funkybob/knights-templater
|
import unittest
from knights import compiler
class Mock(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class TemplateTestCase(unittest.TestCase):
def assertRendered(self, source, expected, context=None):
try:
tmpl = compiler.kompile(source)
rendered = tmpl()({} if context is None else context)
self.assertEqual(rendered, expected)
except Exception as e:
if hasattr(e, 'message'):
standardMsg = e.message
elif hasattr(e, 'args') and len(e.args) > 0:
standardMsg = e.args[0]
else:
standardMsg = ''
msg = 'Failed rendering template %s:\n%s: %s' % (
source, e.__class__.__name__, standardMsg)
self.fail(msg)
Add option for passing debug flag to template compiler
|
import unittest
from knights import compiler
class Mock(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class TemplateTestCase(unittest.TestCase):
def assertRendered(self, source, expected, context=None, debug=False):
try:
tmpl = compiler.kompile(source, debug)
rendered = tmpl()({} if context is None else context)
self.assertEqual(rendered, expected)
except Exception as e:
if hasattr(e, 'message'):
standardMsg = e.message
elif hasattr(e, 'args') and len(e.args) > 0:
standardMsg = e.args[0]
else:
standardMsg = ''
msg = 'Failed rendering template %s:\n%s: %s' % (
source, e.__class__.__name__, standardMsg)
self.fail(msg)
|
<commit_before>import unittest
from knights import compiler
class Mock(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class TemplateTestCase(unittest.TestCase):
def assertRendered(self, source, expected, context=None):
try:
tmpl = compiler.kompile(source)
rendered = tmpl()({} if context is None else context)
self.assertEqual(rendered, expected)
except Exception as e:
if hasattr(e, 'message'):
standardMsg = e.message
elif hasattr(e, 'args') and len(e.args) > 0:
standardMsg = e.args[0]
else:
standardMsg = ''
msg = 'Failed rendering template %s:\n%s: %s' % (
source, e.__class__.__name__, standardMsg)
self.fail(msg)
<commit_msg>Add option for passing debug flag to template compiler<commit_after>
|
import unittest
from knights import compiler
class Mock(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class TemplateTestCase(unittest.TestCase):
def assertRendered(self, source, expected, context=None, debug=False):
try:
tmpl = compiler.kompile(source, debug)
rendered = tmpl()({} if context is None else context)
self.assertEqual(rendered, expected)
except Exception as e:
if hasattr(e, 'message'):
standardMsg = e.message
elif hasattr(e, 'args') and len(e.args) > 0:
standardMsg = e.args[0]
else:
standardMsg = ''
msg = 'Failed rendering template %s:\n%s: %s' % (
source, e.__class__.__name__, standardMsg)
self.fail(msg)
|
import unittest
from knights import compiler
class Mock(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class TemplateTestCase(unittest.TestCase):
def assertRendered(self, source, expected, context=None):
try:
tmpl = compiler.kompile(source)
rendered = tmpl()({} if context is None else context)
self.assertEqual(rendered, expected)
except Exception as e:
if hasattr(e, 'message'):
standardMsg = e.message
elif hasattr(e, 'args') and len(e.args) > 0:
standardMsg = e.args[0]
else:
standardMsg = ''
msg = 'Failed rendering template %s:\n%s: %s' % (
source, e.__class__.__name__, standardMsg)
self.fail(msg)
Add option for passing debug flag to template compilerimport unittest
from knights import compiler
class Mock(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class TemplateTestCase(unittest.TestCase):
def assertRendered(self, source, expected, context=None, debug=False):
try:
tmpl = compiler.kompile(source, debug)
rendered = tmpl()({} if context is None else context)
self.assertEqual(rendered, expected)
except Exception as e:
if hasattr(e, 'message'):
standardMsg = e.message
elif hasattr(e, 'args') and len(e.args) > 0:
standardMsg = e.args[0]
else:
standardMsg = ''
msg = 'Failed rendering template %s:\n%s: %s' % (
source, e.__class__.__name__, standardMsg)
self.fail(msg)
|
<commit_before>import unittest
from knights import compiler
class Mock(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class TemplateTestCase(unittest.TestCase):
def assertRendered(self, source, expected, context=None):
try:
tmpl = compiler.kompile(source)
rendered = tmpl()({} if context is None else context)
self.assertEqual(rendered, expected)
except Exception as e:
if hasattr(e, 'message'):
standardMsg = e.message
elif hasattr(e, 'args') and len(e.args) > 0:
standardMsg = e.args[0]
else:
standardMsg = ''
msg = 'Failed rendering template %s:\n%s: %s' % (
source, e.__class__.__name__, standardMsg)
self.fail(msg)
<commit_msg>Add option for passing debug flag to template compiler<commit_after>import unittest
from knights import compiler
class Mock(object):
def __init__(self, **kwargs):
for k, v in kwargs.items():
setattr(self, k, v)
class TemplateTestCase(unittest.TestCase):
def assertRendered(self, source, expected, context=None, debug=False):
try:
tmpl = compiler.kompile(source, debug)
rendered = tmpl()({} if context is None else context)
self.assertEqual(rendered, expected)
except Exception as e:
if hasattr(e, 'message'):
standardMsg = e.message
elif hasattr(e, 'args') and len(e.args) > 0:
standardMsg = e.args[0]
else:
standardMsg = ''
msg = 'Failed rendering template %s:\n%s: %s' % (
source, e.__class__.__name__, standardMsg)
self.fail(msg)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.