commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
12ac8998531922ce0ee863a49e2dfb52fa13b8fd
|
metpy/plots/tests/test_util.py
|
metpy/plots/tests/test_util.py
|
# Copyright (c) 2008-2016 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Tests for the `_util` module."""
from datetime import datetime
import matplotlib.pyplot as plt
import pytest
from metpy.plots import add_logo, add_timestamp
# Fixture to make sure we have the right backend
from metpy.testing import patch_round, set_agg_backend # noqa: F401
@pytest.mark.mpl_image_compare(tolerance=0.021, remove_text=True)
def test_add_timestamp():
"""Test adding a timestamp to an axes object."""
fig = plt.figure(figsize=(9, 9))
ax = plt.subplot(1, 1, 1)
add_timestamp(ax, time=datetime(2017, 1, 1))
return fig
@pytest.mark.mpl_image_compare(tolerance=0.021, remove_text=True)
def test_add_logo_small():
"""Test adding a logo to a figure."""
fig = plt.figure(figsize=(9, 9))
add_logo(fig)
return fig
@pytest.mark.mpl_image_compare(tolerance=0.021, remove_text=True)
def test_add_logo_large():
"""Test adding a logo to a figure."""
fig = plt.figure(figsize=(9, 9))
add_logo(fig, size='large')
return fig
def test_add_logo_invalid_size():
"""Test adding a logo to a figure with an invalid size specification."""
fig = plt.figure(figsize=(9, 9))
with pytest.raises(ValueError):
add_logo(fig, size='jumbo')
|
Add tests for _util module
|
Add tests for _util module
|
Python
|
bsd-3-clause
|
dopplershift/MetPy,ahaberlie/MetPy,dopplershift/MetPy,ShawnMurd/MetPy,jrleeman/MetPy,Unidata/MetPy,ahaberlie/MetPy,jrleeman/MetPy,Unidata/MetPy
|
Add tests for _util module
|
# Copyright (c) 2008-2016 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Tests for the `_util` module."""
from datetime import datetime
import matplotlib.pyplot as plt
import pytest
from metpy.plots import add_logo, add_timestamp
# Fixture to make sure we have the right backend
from metpy.testing import patch_round, set_agg_backend # noqa: F401
@pytest.mark.mpl_image_compare(tolerance=0.021, remove_text=True)
def test_add_timestamp():
"""Test adding a timestamp to an axes object."""
fig = plt.figure(figsize=(9, 9))
ax = plt.subplot(1, 1, 1)
add_timestamp(ax, time=datetime(2017, 1, 1))
return fig
@pytest.mark.mpl_image_compare(tolerance=0.021, remove_text=True)
def test_add_logo_small():
"""Test adding a logo to a figure."""
fig = plt.figure(figsize=(9, 9))
add_logo(fig)
return fig
@pytest.mark.mpl_image_compare(tolerance=0.021, remove_text=True)
def test_add_logo_large():
"""Test adding a logo to a figure."""
fig = plt.figure(figsize=(9, 9))
add_logo(fig, size='large')
return fig
def test_add_logo_invalid_size():
"""Test adding a logo to a figure with an invalid size specification."""
fig = plt.figure(figsize=(9, 9))
with pytest.raises(ValueError):
add_logo(fig, size='jumbo')
|
<commit_before><commit_msg>Add tests for _util module<commit_after>
|
# Copyright (c) 2008-2016 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Tests for the `_util` module."""
from datetime import datetime
import matplotlib.pyplot as plt
import pytest
from metpy.plots import add_logo, add_timestamp
# Fixture to make sure we have the right backend
from metpy.testing import patch_round, set_agg_backend # noqa: F401
@pytest.mark.mpl_image_compare(tolerance=0.021, remove_text=True)
def test_add_timestamp():
"""Test adding a timestamp to an axes object."""
fig = plt.figure(figsize=(9, 9))
ax = plt.subplot(1, 1, 1)
add_timestamp(ax, time=datetime(2017, 1, 1))
return fig
@pytest.mark.mpl_image_compare(tolerance=0.021, remove_text=True)
def test_add_logo_small():
"""Test adding a logo to a figure."""
fig = plt.figure(figsize=(9, 9))
add_logo(fig)
return fig
@pytest.mark.mpl_image_compare(tolerance=0.021, remove_text=True)
def test_add_logo_large():
"""Test adding a logo to a figure."""
fig = plt.figure(figsize=(9, 9))
add_logo(fig, size='large')
return fig
def test_add_logo_invalid_size():
"""Test adding a logo to a figure with an invalid size specification."""
fig = plt.figure(figsize=(9, 9))
with pytest.raises(ValueError):
add_logo(fig, size='jumbo')
|
Add tests for _util module# Copyright (c) 2008-2016 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Tests for the `_util` module."""
from datetime import datetime
import matplotlib.pyplot as plt
import pytest
from metpy.plots import add_logo, add_timestamp
# Fixture to make sure we have the right backend
from metpy.testing import patch_round, set_agg_backend # noqa: F401
@pytest.mark.mpl_image_compare(tolerance=0.021, remove_text=True)
def test_add_timestamp():
"""Test adding a timestamp to an axes object."""
fig = plt.figure(figsize=(9, 9))
ax = plt.subplot(1, 1, 1)
add_timestamp(ax, time=datetime(2017, 1, 1))
return fig
@pytest.mark.mpl_image_compare(tolerance=0.021, remove_text=True)
def test_add_logo_small():
"""Test adding a logo to a figure."""
fig = plt.figure(figsize=(9, 9))
add_logo(fig)
return fig
@pytest.mark.mpl_image_compare(tolerance=0.021, remove_text=True)
def test_add_logo_large():
"""Test adding a logo to a figure."""
fig = plt.figure(figsize=(9, 9))
add_logo(fig, size='large')
return fig
def test_add_logo_invalid_size():
"""Test adding a logo to a figure with an invalid size specification."""
fig = plt.figure(figsize=(9, 9))
with pytest.raises(ValueError):
add_logo(fig, size='jumbo')
|
<commit_before><commit_msg>Add tests for _util module<commit_after># Copyright (c) 2008-2016 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""Tests for the `_util` module."""
from datetime import datetime
import matplotlib.pyplot as plt
import pytest
from metpy.plots import add_logo, add_timestamp
# Fixture to make sure we have the right backend
from metpy.testing import patch_round, set_agg_backend # noqa: F401
@pytest.mark.mpl_image_compare(tolerance=0.021, remove_text=True)
def test_add_timestamp():
"""Test adding a timestamp to an axes object."""
fig = plt.figure(figsize=(9, 9))
ax = plt.subplot(1, 1, 1)
add_timestamp(ax, time=datetime(2017, 1, 1))
return fig
@pytest.mark.mpl_image_compare(tolerance=0.021, remove_text=True)
def test_add_logo_small():
"""Test adding a logo to a figure."""
fig = plt.figure(figsize=(9, 9))
add_logo(fig)
return fig
@pytest.mark.mpl_image_compare(tolerance=0.021, remove_text=True)
def test_add_logo_large():
"""Test adding a logo to a figure."""
fig = plt.figure(figsize=(9, 9))
add_logo(fig, size='large')
return fig
def test_add_logo_invalid_size():
"""Test adding a logo to a figure with an invalid size specification."""
fig = plt.figure(figsize=(9, 9))
with pytest.raises(ValueError):
add_logo(fig, size='jumbo')
|
|
d59b9389cf295a444ad457ccb952234aa877bb29
|
src/ggrc/converters/snapshot_block.py
|
src/ggrc/converters/snapshot_block.py
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Module for snapshot block converter."""
class SnapshotBlockConverter(object):
"""Block converter for snapshots of a single object type."""
def __init__(self, converter, ids):
self.converter = converter
self.ids = ids
@staticmethod
def handle_row_data():
pass
@property
def name(self):
return "Snapshot"
@staticmethod
def to_array():
return [[]], [[]] # header and body
|
Add initial snapshot block converter
|
Add initial snapshot block converter
|
Python
|
apache-2.0
|
AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core
|
Add initial snapshot block converter
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Module for snapshot block converter."""
class SnapshotBlockConverter(object):
"""Block converter for snapshots of a single object type."""
def __init__(self, converter, ids):
self.converter = converter
self.ids = ids
@staticmethod
def handle_row_data():
pass
@property
def name(self):
return "Snapshot"
@staticmethod
def to_array():
return [[]], [[]] # header and body
|
<commit_before><commit_msg>Add initial snapshot block converter<commit_after>
|
# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Module for snapshot block converter."""
class SnapshotBlockConverter(object):
"""Block converter for snapshots of a single object type."""
def __init__(self, converter, ids):
self.converter = converter
self.ids = ids
@staticmethod
def handle_row_data():
pass
@property
def name(self):
return "Snapshot"
@staticmethod
def to_array():
return [[]], [[]] # header and body
|
Add initial snapshot block converter# Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Module for snapshot block converter."""
class SnapshotBlockConverter(object):
"""Block converter for snapshots of a single object type."""
def __init__(self, converter, ids):
self.converter = converter
self.ids = ids
@staticmethod
def handle_row_data():
pass
@property
def name(self):
return "Snapshot"
@staticmethod
def to_array():
return [[]], [[]] # header and body
|
<commit_before><commit_msg>Add initial snapshot block converter<commit_after># Copyright (C) 2017 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""Module for snapshot block converter."""
class SnapshotBlockConverter(object):
"""Block converter for snapshots of a single object type."""
def __init__(self, converter, ids):
self.converter = converter
self.ids = ids
@staticmethod
def handle_row_data():
pass
@property
def name(self):
return "Snapshot"
@staticmethod
def to_array():
return [[]], [[]] # header and body
|
|
f14913b76a4f6909130d5bf8eed9577740ff5b15
|
artists/migrations/0005_auto_20170120_1802.py
|
artists/migrations/0005_auto_20170120_1802.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-20 20:02
from __future__ import unicode_literals
import artists.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('artists', '0004_auto_20170120_1647'),
]
operations = [
migrations.AlterField(
model_name='artist',
name='photo',
field=models.ImageField(upload_to=artists.models.photo_path_and_name, verbose_name='Foto'),
),
]
|
Add artist profile picture path
|
Add artist profile picture path
|
Python
|
mit
|
perna/bandhunter,perna/bandhunter
|
Add artist profile picture path
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-20 20:02
from __future__ import unicode_literals
import artists.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('artists', '0004_auto_20170120_1647'),
]
operations = [
migrations.AlterField(
model_name='artist',
name='photo',
field=models.ImageField(upload_to=artists.models.photo_path_and_name, verbose_name='Foto'),
),
]
|
<commit_before><commit_msg>Add artist profile picture path<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-20 20:02
from __future__ import unicode_literals
import artists.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('artists', '0004_auto_20170120_1647'),
]
operations = [
migrations.AlterField(
model_name='artist',
name='photo',
field=models.ImageField(upload_to=artists.models.photo_path_and_name, verbose_name='Foto'),
),
]
|
Add artist profile picture path# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-20 20:02
from __future__ import unicode_literals
import artists.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('artists', '0004_auto_20170120_1647'),
]
operations = [
migrations.AlterField(
model_name='artist',
name='photo',
field=models.ImageField(upload_to=artists.models.photo_path_and_name, verbose_name='Foto'),
),
]
|
<commit_before><commit_msg>Add artist profile picture path<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-01-20 20:02
from __future__ import unicode_literals
import artists.models
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('artists', '0004_auto_20170120_1647'),
]
operations = [
migrations.AlterField(
model_name='artist',
name='photo',
field=models.ImageField(upload_to=artists.models.photo_path_and_name, verbose_name='Foto'),
),
]
|
|
94ea3abfa29b78bee82150706e9c6d25f2dbfb54
|
client/can_bridge_loopback_test.py
|
client/can_bridge_loopback_test.py
|
#!/usr/bin/env python3
import utils
import can
import can_bridge
import serial_datagrams
def read_frame(fdesc):
"""
Reads a full CAN datagram from the CAN <-> serial bridge.
"""
buf = bytes()
datagram = None
frame = serial_datagrams.read_datagram(fdesc)
frame = can_bridge.decode_frame(frame)
return frame
def send_frame(fdesc, frame):
bridge_frame = can_bridge.encode_frame_command(frame)
datagram = serial_datagrams.datagram_encode(bridge_frame)
fdesc.write(datagram)
fdesc.flush()
def main():
DATA = "CVRA"
parser = utils.ConnectionArgumentParser(description='Tests the CAN loopback over the Brige')
args = parser.parse_args()
connection = utils.open_connection(args)
frame = can.Frame(data=DATA.encode())
send_frame(connection, frame)
print("Sent {}".format(DATA))
answer = read_frame(connection)
print("Got {}".format(answer.data.decode()))
if __name__ == "__main__":
main()
|
Add a small program to test CAN loobpack
|
Add a small program to test CAN loobpack
|
Python
|
bsd-2-clause
|
cvra/can-bootloader,cvra/can-bootloader,cvra/can-bootloader,cvra/can-bootloader
|
Add a small program to test CAN loobpack
|
#!/usr/bin/env python3
import utils
import can
import can_bridge
import serial_datagrams
def read_frame(fdesc):
"""
Reads a full CAN datagram from the CAN <-> serial bridge.
"""
buf = bytes()
datagram = None
frame = serial_datagrams.read_datagram(fdesc)
frame = can_bridge.decode_frame(frame)
return frame
def send_frame(fdesc, frame):
bridge_frame = can_bridge.encode_frame_command(frame)
datagram = serial_datagrams.datagram_encode(bridge_frame)
fdesc.write(datagram)
fdesc.flush()
def main():
DATA = "CVRA"
parser = utils.ConnectionArgumentParser(description='Tests the CAN loopback over the Brige')
args = parser.parse_args()
connection = utils.open_connection(args)
frame = can.Frame(data=DATA.encode())
send_frame(connection, frame)
print("Sent {}".format(DATA))
answer = read_frame(connection)
print("Got {}".format(answer.data.decode()))
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add a small program to test CAN loobpack<commit_after>
|
#!/usr/bin/env python3
import utils
import can
import can_bridge
import serial_datagrams
def read_frame(fdesc):
"""
Reads a full CAN datagram from the CAN <-> serial bridge.
"""
buf = bytes()
datagram = None
frame = serial_datagrams.read_datagram(fdesc)
frame = can_bridge.decode_frame(frame)
return frame
def send_frame(fdesc, frame):
bridge_frame = can_bridge.encode_frame_command(frame)
datagram = serial_datagrams.datagram_encode(bridge_frame)
fdesc.write(datagram)
fdesc.flush()
def main():
DATA = "CVRA"
parser = utils.ConnectionArgumentParser(description='Tests the CAN loopback over the Brige')
args = parser.parse_args()
connection = utils.open_connection(args)
frame = can.Frame(data=DATA.encode())
send_frame(connection, frame)
print("Sent {}".format(DATA))
answer = read_frame(connection)
print("Got {}".format(answer.data.decode()))
if __name__ == "__main__":
main()
|
Add a small program to test CAN loobpack#!/usr/bin/env python3
import utils
import can
import can_bridge
import serial_datagrams
def read_frame(fdesc):
"""
Reads a full CAN datagram from the CAN <-> serial bridge.
"""
buf = bytes()
datagram = None
frame = serial_datagrams.read_datagram(fdesc)
frame = can_bridge.decode_frame(frame)
return frame
def send_frame(fdesc, frame):
bridge_frame = can_bridge.encode_frame_command(frame)
datagram = serial_datagrams.datagram_encode(bridge_frame)
fdesc.write(datagram)
fdesc.flush()
def main():
DATA = "CVRA"
parser = utils.ConnectionArgumentParser(description='Tests the CAN loopback over the Brige')
args = parser.parse_args()
connection = utils.open_connection(args)
frame = can.Frame(data=DATA.encode())
send_frame(connection, frame)
print("Sent {}".format(DATA))
answer = read_frame(connection)
print("Got {}".format(answer.data.decode()))
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add a small program to test CAN loobpack<commit_after>#!/usr/bin/env python3
import utils
import can
import can_bridge
import serial_datagrams
def read_frame(fdesc):
"""
Reads a full CAN datagram from the CAN <-> serial bridge.
"""
buf = bytes()
datagram = None
frame = serial_datagrams.read_datagram(fdesc)
frame = can_bridge.decode_frame(frame)
return frame
def send_frame(fdesc, frame):
bridge_frame = can_bridge.encode_frame_command(frame)
datagram = serial_datagrams.datagram_encode(bridge_frame)
fdesc.write(datagram)
fdesc.flush()
def main():
DATA = "CVRA"
parser = utils.ConnectionArgumentParser(description='Tests the CAN loopback over the Brige')
args = parser.parse_args()
connection = utils.open_connection(args)
frame = can.Frame(data=DATA.encode())
send_frame(connection, frame)
print("Sent {}".format(DATA))
answer = read_frame(connection)
print("Got {}".format(answer.data.decode()))
if __name__ == "__main__":
main()
|
|
0142da418f01dc5540aaf1952852780225baaa17
|
testapp/testapp/wsgi.py
|
testapp/testapp/wsgi.py
|
"""
WSGI config for idf project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testapp.settings")
application = get_wsgi_application()
|
Make test app runnable again
|
Make test app runnable again
Make the test app work with `python manager.py runserver` again, since
we use this for basic testing/debugging.
|
Python
|
isc
|
hobarrera/django-afip,hobarrera/django-afip
|
Make test app runnable again
Make the test app work with `python manager.py runserver` again, since
we use this for basic testing/debugging.
|
"""
WSGI config for idf project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testapp.settings")
application = get_wsgi_application()
|
<commit_before><commit_msg>Make test app runnable again
Make the test app work with `python manager.py runserver` again, since
we use this for basic testing/debugging.<commit_after>
|
"""
WSGI config for idf project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testapp.settings")
application = get_wsgi_application()
|
Make test app runnable again
Make the test app work with `python manager.py runserver` again, since
we use this for basic testing/debugging."""
WSGI config for idf project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testapp.settings")
application = get_wsgi_application()
|
<commit_before><commit_msg>Make test app runnable again
Make the test app work with `python manager.py runserver` again, since
we use this for basic testing/debugging.<commit_after>"""
WSGI config for idf project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "testapp.settings")
application = get_wsgi_application()
|
|
8e8f3502944295b019f919e703c2f1396153b45e
|
nettests/example_http.py
|
nettests/example_http.py
|
# -*- encoding: utf-8 -*-
#
# :authors: Arturo Filastò
# :licence: see LICENSE
from ooni.templates import http
class Example(http.HTTPTest):
inputs = ['http://google.com/', 'http://wikileaks.org/',
'http://torproject.org/']
def processResponseBody(self, body):
# XXX here shall go your logic
# for processing the body
if 'blocked' in body:
self.report['censored'] = True
else:
self.report['censored'] = False
def processResponseHeaders(self, headers):
# XXX place in here all the logic for handling the processing of HTTP
# Headers.
if headers.hasHeader('location'):
self.report['redirect'] = True
server = headers.getRawHeaders("Server")
if server:
self.report['http_server'] = str(server.pop())
|
Add an example of using the HTTP test template.
|
Add an example of using the HTTP test template.
|
Python
|
bsd-2-clause
|
kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,Karthikeyan-kkk/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,hackerberry/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,hackerberry/ooni-probe
|
Add an example of using the HTTP test template.
|
# -*- encoding: utf-8 -*-
#
# :authors: Arturo Filastò
# :licence: see LICENSE
from ooni.templates import http
class Example(http.HTTPTest):
inputs = ['http://google.com/', 'http://wikileaks.org/',
'http://torproject.org/']
def processResponseBody(self, body):
# XXX here shall go your logic
# for processing the body
if 'blocked' in body:
self.report['censored'] = True
else:
self.report['censored'] = False
def processResponseHeaders(self, headers):
# XXX place in here all the logic for handling the processing of HTTP
# Headers.
if headers.hasHeader('location'):
self.report['redirect'] = True
server = headers.getRawHeaders("Server")
if server:
self.report['http_server'] = str(server.pop())
|
<commit_before><commit_msg>Add an example of using the HTTP test template.<commit_after>
|
# -*- encoding: utf-8 -*-
#
# :authors: Arturo Filastò
# :licence: see LICENSE
from ooni.templates import http
class Example(http.HTTPTest):
inputs = ['http://google.com/', 'http://wikileaks.org/',
'http://torproject.org/']
def processResponseBody(self, body):
# XXX here shall go your logic
# for processing the body
if 'blocked' in body:
self.report['censored'] = True
else:
self.report['censored'] = False
def processResponseHeaders(self, headers):
# XXX place in here all the logic for handling the processing of HTTP
# Headers.
if headers.hasHeader('location'):
self.report['redirect'] = True
server = headers.getRawHeaders("Server")
if server:
self.report['http_server'] = str(server.pop())
|
Add an example of using the HTTP test template.# -*- encoding: utf-8 -*-
#
# :authors: Arturo Filastò
# :licence: see LICENSE
from ooni.templates import http
class Example(http.HTTPTest):
inputs = ['http://google.com/', 'http://wikileaks.org/',
'http://torproject.org/']
def processResponseBody(self, body):
# XXX here shall go your logic
# for processing the body
if 'blocked' in body:
self.report['censored'] = True
else:
self.report['censored'] = False
def processResponseHeaders(self, headers):
# XXX place in here all the logic for handling the processing of HTTP
# Headers.
if headers.hasHeader('location'):
self.report['redirect'] = True
server = headers.getRawHeaders("Server")
if server:
self.report['http_server'] = str(server.pop())
|
<commit_before><commit_msg>Add an example of using the HTTP test template.<commit_after># -*- encoding: utf-8 -*-
#
# :authors: Arturo Filastò
# :licence: see LICENSE
from ooni.templates import http
class Example(http.HTTPTest):
inputs = ['http://google.com/', 'http://wikileaks.org/',
'http://torproject.org/']
def processResponseBody(self, body):
# XXX here shall go your logic
# for processing the body
if 'blocked' in body:
self.report['censored'] = True
else:
self.report['censored'] = False
def processResponseHeaders(self, headers):
# XXX place in here all the logic for handling the processing of HTTP
# Headers.
if headers.hasHeader('location'):
self.report['redirect'] = True
server = headers.getRawHeaders("Server")
if server:
self.report['http_server'] = str(server.pop())
|
|
bb41a992fbdbac158244869584d5380dddc7f970
|
test_examples.py
|
test_examples.py
|
import os
import glob
import argparse
import FAUSTPy
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--path',
dest="examples_path",
default="/usr/share/faust-*/examples",
help="The path to the FAUST examples."
)
args = parser.parse_args()
fs = 48e3
for f in glob.glob(os.sep.join([args.examples_path, "*.dsp"])):
print(f)
dsp = FAUSTPy.FAUST(f, int(fs), "double")
|
Add a test script that compiles the FAUST examples.
|
Add a test script that compiles the FAUST examples.
|
Python
|
mit
|
marcecj/faust_python
|
Add a test script that compiles the FAUST examples.
|
import os
import glob
import argparse
import FAUSTPy
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--path',
dest="examples_path",
default="/usr/share/faust-*/examples",
help="The path to the FAUST examples."
)
args = parser.parse_args()
fs = 48e3
for f in glob.glob(os.sep.join([args.examples_path, "*.dsp"])):
print(f)
dsp = FAUSTPy.FAUST(f, int(fs), "double")
|
<commit_before><commit_msg>Add a test script that compiles the FAUST examples.<commit_after>
|
import os
import glob
import argparse
import FAUSTPy
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--path',
dest="examples_path",
default="/usr/share/faust-*/examples",
help="The path to the FAUST examples."
)
args = parser.parse_args()
fs = 48e3
for f in glob.glob(os.sep.join([args.examples_path, "*.dsp"])):
print(f)
dsp = FAUSTPy.FAUST(f, int(fs), "double")
|
Add a test script that compiles the FAUST examples.import os
import glob
import argparse
import FAUSTPy
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--path',
dest="examples_path",
default="/usr/share/faust-*/examples",
help="The path to the FAUST examples."
)
args = parser.parse_args()
fs = 48e3
for f in glob.glob(os.sep.join([args.examples_path, "*.dsp"])):
print(f)
dsp = FAUSTPy.FAUST(f, int(fs), "double")
|
<commit_before><commit_msg>Add a test script that compiles the FAUST examples.<commit_after>import os
import glob
import argparse
import FAUSTPy
parser = argparse.ArgumentParser()
parser.add_argument('-p', '--path',
dest="examples_path",
default="/usr/share/faust-*/examples",
help="The path to the FAUST examples."
)
args = parser.parse_args()
fs = 48e3
for f in glob.glob(os.sep.join([args.examples_path, "*.dsp"])):
print(f)
dsp = FAUSTPy.FAUST(f, int(fs), "double")
|
|
56788cc70d3b6f9375f70bd874cf4d31c3f8e3ec
|
photutils/utils/_parameters.py
|
photutils/utils/_parameters.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides parameter validation tools.
"""
import numpy as np
def as_pair(name, value, lower_bound=None, upper_bound=None):
"""
Define a pair of integer values as a 1D array.
Parameters
----------
name : str
The name of the parameter, which is used in error messages.
value : int or int array_like
The input value.
lower_bound : int or int array_like
A tuple defining the allowed lower bound of the value. The first
element is the bound and the second element indicates whether
the bound is exclusive (0) or inclusive (1).
upper_bound : int or int array_like
A tuple defining the allowed upper bound of the value. The first
element is the bound and the second element indicates whether
the bound is exclusive (0) or inclusive (1).
Returns
-------
result : (2,) `~numpy.ndarray`
The pair as a 1D array of two integers.
Examples
--------
>>> from photutils.utils._parameters import as_pair
>>> as_pair('myparam', 4)
array([4, 4])
>>> as_pair('myparam', (3, 4))
array([3, 4])
>>> as_pair('myparam', 0, lower_bound=(0, 0))
array([0, 0])
"""
value = np.atleast_1d(value)
if np.any(~np.isfinite(value)):
raise ValueError(f'{name} must be a finite value')
if lower_bound is not None:
if len(lower_bound) != 2:
raise ValueError('lower_bound must contain only 2 elements')
bound, inclusive = lower_bound
if inclusive == 1:
oper = '>'
mask = value <= bound
else:
oper = '>='
mask = value < bound
if np.any(mask):
raise ValueError(f'{name} must be {oper} {bound}')
if upper_bound is not None:
if len(upper_bound) != 2:
raise ValueError('upper_bound must contain only 2 elements')
bound, inclusive = upper_bound
if inclusive == 1:
oper = '<'
mask = value >= bound
else:
oper = '<='
mask = value > bound
if np.any(mask):
raise ValueError(f'{name} must be {oper} {bound}')
if len(value) == 1:
value = np.array((value[0], value[0]))
if len(value) != 2:
raise ValueError(f'{name} must have 1 or 2 elements')
if value.ndim != 1:
raise ValueError(f'{name} must be 1D')
if value.dtype.kind != 'i':
raise ValueError(f'{name} must have integer values')
return value
|
Add as_pair helper function for parameter validation
|
Add as_pair helper function for parameter validation
|
Python
|
bsd-3-clause
|
astropy/photutils,larrybradley/photutils
|
Add as_pair helper function for parameter validation
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides parameter validation tools.
"""
import numpy as np
def as_pair(name, value, lower_bound=None, upper_bound=None):
"""
Define a pair of integer values as a 1D array.
Parameters
----------
name : str
The name of the parameter, which is used in error messages.
value : int or int array_like
The input value.
lower_bound : int or int array_like
A tuple defining the allowed lower bound of the value. The first
element is the bound and the second element indicates whether
the bound is exclusive (0) or inclusive (1).
upper_bound : int or int array_like
A tuple defining the allowed upper bound of the value. The first
element is the bound and the second element indicates whether
the bound is exclusive (0) or inclusive (1).
Returns
-------
result : (2,) `~numpy.ndarray`
The pair as a 1D array of two integers.
Examples
--------
>>> from photutils.utils._parameters import as_pair
>>> as_pair('myparam', 4)
array([4, 4])
>>> as_pair('myparam', (3, 4))
array([3, 4])
>>> as_pair('myparam', 0, lower_bound=(0, 0))
array([0, 0])
"""
value = np.atleast_1d(value)
if np.any(~np.isfinite(value)):
raise ValueError(f'{name} must be a finite value')
if lower_bound is not None:
if len(lower_bound) != 2:
raise ValueError('lower_bound must contain only 2 elements')
bound, inclusive = lower_bound
if inclusive == 1:
oper = '>'
mask = value <= bound
else:
oper = '>='
mask = value < bound
if np.any(mask):
raise ValueError(f'{name} must be {oper} {bound}')
if upper_bound is not None:
if len(upper_bound) != 2:
raise ValueError('upper_bound must contain only 2 elements')
bound, inclusive = upper_bound
if inclusive == 1:
oper = '<'
mask = value >= bound
else:
oper = '<='
mask = value > bound
if np.any(mask):
raise ValueError(f'{name} must be {oper} {bound}')
if len(value) == 1:
value = np.array((value[0], value[0]))
if len(value) != 2:
raise ValueError(f'{name} must have 1 or 2 elements')
if value.ndim != 1:
raise ValueError(f'{name} must be 1D')
if value.dtype.kind != 'i':
raise ValueError(f'{name} must have integer values')
return value
|
<commit_before><commit_msg>Add as_pair helper function for parameter validation<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides parameter validation tools.
"""
import numpy as np
def as_pair(name, value, lower_bound=None, upper_bound=None):
"""
Define a pair of integer values as a 1D array.
Parameters
----------
name : str
The name of the parameter, which is used in error messages.
value : int or int array_like
The input value.
lower_bound : int or int array_like
A tuple defining the allowed lower bound of the value. The first
element is the bound and the second element indicates whether
the bound is exclusive (0) or inclusive (1).
upper_bound : int or int array_like
A tuple defining the allowed upper bound of the value. The first
element is the bound and the second element indicates whether
the bound is exclusive (0) or inclusive (1).
Returns
-------
result : (2,) `~numpy.ndarray`
The pair as a 1D array of two integers.
Examples
--------
>>> from photutils.utils._parameters import as_pair
>>> as_pair('myparam', 4)
array([4, 4])
>>> as_pair('myparam', (3, 4))
array([3, 4])
>>> as_pair('myparam', 0, lower_bound=(0, 0))
array([0, 0])
"""
value = np.atleast_1d(value)
if np.any(~np.isfinite(value)):
raise ValueError(f'{name} must be a finite value')
if lower_bound is not None:
if len(lower_bound) != 2:
raise ValueError('lower_bound must contain only 2 elements')
bound, inclusive = lower_bound
if inclusive == 1:
oper = '>'
mask = value <= bound
else:
oper = '>='
mask = value < bound
if np.any(mask):
raise ValueError(f'{name} must be {oper} {bound}')
if upper_bound is not None:
if len(upper_bound) != 2:
raise ValueError('upper_bound must contain only 2 elements')
bound, inclusive = upper_bound
if inclusive == 1:
oper = '<'
mask = value >= bound
else:
oper = '<='
mask = value > bound
if np.any(mask):
raise ValueError(f'{name} must be {oper} {bound}')
if len(value) == 1:
value = np.array((value[0], value[0]))
if len(value) != 2:
raise ValueError(f'{name} must have 1 or 2 elements')
if value.ndim != 1:
raise ValueError(f'{name} must be 1D')
if value.dtype.kind != 'i':
raise ValueError(f'{name} must have integer values')
return value
|
Add as_pair helper function for parameter validation# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides parameter validation tools.
"""
import numpy as np
def as_pair(name, value, lower_bound=None, upper_bound=None):
"""
Define a pair of integer values as a 1D array.
Parameters
----------
name : str
The name of the parameter, which is used in error messages.
value : int or int array_like
The input value.
lower_bound : int or int array_like
A tuple defining the allowed lower bound of the value. The first
element is the bound and the second element indicates whether
the bound is exclusive (0) or inclusive (1).
upper_bound : int or int array_like
A tuple defining the allowed upper bound of the value. The first
element is the bound and the second element indicates whether
the bound is exclusive (0) or inclusive (1).
Returns
-------
result : (2,) `~numpy.ndarray`
The pair as a 1D array of two integers.
Examples
--------
>>> from photutils.utils._parameters import as_pair
>>> as_pair('myparam', 4)
array([4, 4])
>>> as_pair('myparam', (3, 4))
array([3, 4])
>>> as_pair('myparam', 0, lower_bound=(0, 0))
array([0, 0])
"""
value = np.atleast_1d(value)
if np.any(~np.isfinite(value)):
raise ValueError(f'{name} must be a finite value')
if lower_bound is not None:
if len(lower_bound) != 2:
raise ValueError('lower_bound must contain only 2 elements')
bound, inclusive = lower_bound
if inclusive == 1:
oper = '>'
mask = value <= bound
else:
oper = '>='
mask = value < bound
if np.any(mask):
raise ValueError(f'{name} must be {oper} {bound}')
if upper_bound is not None:
if len(upper_bound) != 2:
raise ValueError('upper_bound must contain only 2 elements')
bound, inclusive = upper_bound
if inclusive == 1:
oper = '<'
mask = value >= bound
else:
oper = '<='
mask = value > bound
if np.any(mask):
raise ValueError(f'{name} must be {oper} {bound}')
if len(value) == 1:
value = np.array((value[0], value[0]))
if len(value) != 2:
raise ValueError(f'{name} must have 1 or 2 elements')
if value.ndim != 1:
raise ValueError(f'{name} must be 1D')
if value.dtype.kind != 'i':
raise ValueError(f'{name} must have integer values')
return value
|
<commit_before><commit_msg>Add as_pair helper function for parameter validation<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module provides parameter validation tools.
"""
import numpy as np
def as_pair(name, value, lower_bound=None, upper_bound=None):
"""
Define a pair of integer values as a 1D array.
Parameters
----------
name : str
The name of the parameter, which is used in error messages.
value : int or int array_like
The input value.
lower_bound : int or int array_like
A tuple defining the allowed lower bound of the value. The first
element is the bound and the second element indicates whether
the bound is exclusive (0) or inclusive (1).
upper_bound : int or int array_like
A tuple defining the allowed upper bound of the value. The first
element is the bound and the second element indicates whether
the bound is exclusive (0) or inclusive (1).
Returns
-------
result : (2,) `~numpy.ndarray`
The pair as a 1D array of two integers.
Examples
--------
>>> from photutils.utils._parameters import as_pair
>>> as_pair('myparam', 4)
array([4, 4])
>>> as_pair('myparam', (3, 4))
array([3, 4])
>>> as_pair('myparam', 0, lower_bound=(0, 0))
array([0, 0])
"""
value = np.atleast_1d(value)
if np.any(~np.isfinite(value)):
raise ValueError(f'{name} must be a finite value')
if lower_bound is not None:
if len(lower_bound) != 2:
raise ValueError('lower_bound must contain only 2 elements')
bound, inclusive = lower_bound
if inclusive == 1:
oper = '>'
mask = value <= bound
else:
oper = '>='
mask = value < bound
if np.any(mask):
raise ValueError(f'{name} must be {oper} {bound}')
if upper_bound is not None:
if len(upper_bound) != 2:
raise ValueError('upper_bound must contain only 2 elements')
bound, inclusive = upper_bound
if inclusive == 1:
oper = '<'
mask = value >= bound
else:
oper = '<='
mask = value > bound
if np.any(mask):
raise ValueError(f'{name} must be {oper} {bound}')
if len(value) == 1:
value = np.array((value[0], value[0]))
if len(value) != 2:
raise ValueError(f'{name} must have 1 or 2 elements')
if value.ndim != 1:
raise ValueError(f'{name} must be 1D')
if value.dtype.kind != 'i':
raise ValueError(f'{name} must have integer values')
return value
|
|
a540a68561db4067b66b4d4d0920b217fea4fda4
|
var/spack/packages/openssl/package.py
|
var/spack/packages/openssl/package.py
|
from spack import *
class Openssl(Package):
"""The OpenSSL Project is a collaborative effort to develop a
robust, commercial-grade, full-featured, and Open Source
toolkit implementing the Secure Sockets Layer (SSL v2/v3) and
Transport Layer Security (TLS v1) protocols as well as a
full-strength general purpose cryptography library."""
homepage = "http://www.openssl.org"
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a')
version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5')
depends_on("zlib")
parallel = False
def install(self, spec, prefix):
config = Executable("./config")
config("--prefix=%s" % prefix,
"--openssldir=%s/etc/openssl" % prefix,
"zlib",
"no-krb5",
"shared")
make()
make("install")
|
from spack import *
class Openssl(Package):
"""The OpenSSL Project is a collaborative effort to develop a
robust, commercial-grade, full-featured, and Open Source
toolkit implementing the Secure Sockets Layer (SSL v2/v3) and
Transport Layer Security (TLS v1) protocols as well as a
full-strength general purpose cryptography library."""
homepage = "http://www.openssl.org"
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a')
version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5')
depends_on("zlib")
parallel = False
def install(self, spec, prefix):
if spec.satisfies("=darwin-x86_64"):
perl = which('perl')
perl("./Configure",
"--prefix=%s" % prefix,
"--openssldir=%s/etc/openssl" % prefix,
"zlib",
"no-krb5",
"shared",
"darwin64-x86_64-cc")
perl('-pi', '-e', 's/-arch x86_64//g', 'Makefile')
else:
exit(1)
config = Executable("./config")
config("--prefix=%s" % prefix,
"--openssldir=%s/etc/openssl" % prefix,
"zlib",
"no-krb5",
"shared")
make()
make("install")
|
Make OpenSSL build on Darwin
|
Make OpenSSL build on Darwin
|
Python
|
lgpl-2.1
|
matthiasdiener/spack,EmreAtes/spack,krafczyk/spack,iulian787/spack,matthiasdiener/spack,tmerrick1/spack,krafczyk/spack,lgarren/spack,TheTimmy/spack,iulian787/spack,EmreAtes/spack,EmreAtes/spack,skosukhin/spack,krafczyk/spack,mfherbst/spack,LLNL/spack,mfherbst/spack,skosukhin/spack,krafczyk/spack,mfherbst/spack,tmerrick1/spack,lgarren/spack,matthiasdiener/spack,lgarren/spack,EmreAtes/spack,mfherbst/spack,LLNL/spack,tmerrick1/spack,TheTimmy/spack,skosukhin/spack,iulian787/spack,mfherbst/spack,skosukhin/spack,tmerrick1/spack,EmreAtes/spack,iulian787/spack,LLNL/spack,iulian787/spack,krafczyk/spack,lgarren/spack,skosukhin/spack,TheTimmy/spack,TheTimmy/spack,matthiasdiener/spack,lgarren/spack,TheTimmy/spack,tmerrick1/spack,matthiasdiener/spack,LLNL/spack,LLNL/spack
|
from spack import *
class Openssl(Package):
"""The OpenSSL Project is a collaborative effort to develop a
robust, commercial-grade, full-featured, and Open Source
toolkit implementing the Secure Sockets Layer (SSL v2/v3) and
Transport Layer Security (TLS v1) protocols as well as a
full-strength general purpose cryptography library."""
homepage = "http://www.openssl.org"
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a')
version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5')
depends_on("zlib")
parallel = False
def install(self, spec, prefix):
config = Executable("./config")
config("--prefix=%s" % prefix,
"--openssldir=%s/etc/openssl" % prefix,
"zlib",
"no-krb5",
"shared")
make()
make("install")
Make OpenSSL build on Darwin
|
from spack import *
class Openssl(Package):
"""The OpenSSL Project is a collaborative effort to develop a
robust, commercial-grade, full-featured, and Open Source
toolkit implementing the Secure Sockets Layer (SSL v2/v3) and
Transport Layer Security (TLS v1) protocols as well as a
full-strength general purpose cryptography library."""
homepage = "http://www.openssl.org"
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a')
version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5')
depends_on("zlib")
parallel = False
def install(self, spec, prefix):
if spec.satisfies("=darwin-x86_64"):
perl = which('perl')
perl("./Configure",
"--prefix=%s" % prefix,
"--openssldir=%s/etc/openssl" % prefix,
"zlib",
"no-krb5",
"shared",
"darwin64-x86_64-cc")
perl('-pi', '-e', 's/-arch x86_64//g', 'Makefile')
else:
exit(1)
config = Executable("./config")
config("--prefix=%s" % prefix,
"--openssldir=%s/etc/openssl" % prefix,
"zlib",
"no-krb5",
"shared")
make()
make("install")
|
<commit_before>from spack import *
class Openssl(Package):
"""The OpenSSL Project is a collaborative effort to develop a
robust, commercial-grade, full-featured, and Open Source
toolkit implementing the Secure Sockets Layer (SSL v2/v3) and
Transport Layer Security (TLS v1) protocols as well as a
full-strength general purpose cryptography library."""
homepage = "http://www.openssl.org"
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a')
version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5')
depends_on("zlib")
parallel = False
def install(self, spec, prefix):
config = Executable("./config")
config("--prefix=%s" % prefix,
"--openssldir=%s/etc/openssl" % prefix,
"zlib",
"no-krb5",
"shared")
make()
make("install")
<commit_msg>Make OpenSSL build on Darwin<commit_after>
|
from spack import *
class Openssl(Package):
"""The OpenSSL Project is a collaborative effort to develop a
robust, commercial-grade, full-featured, and Open Source
toolkit implementing the Secure Sockets Layer (SSL v2/v3) and
Transport Layer Security (TLS v1) protocols as well as a
full-strength general purpose cryptography library."""
homepage = "http://www.openssl.org"
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a')
version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5')
depends_on("zlib")
parallel = False
def install(self, spec, prefix):
if spec.satisfies("=darwin-x86_64"):
perl = which('perl')
perl("./Configure",
"--prefix=%s" % prefix,
"--openssldir=%s/etc/openssl" % prefix,
"zlib",
"no-krb5",
"shared",
"darwin64-x86_64-cc")
perl('-pi', '-e', 's/-arch x86_64//g', 'Makefile')
else:
exit(1)
config = Executable("./config")
config("--prefix=%s" % prefix,
"--openssldir=%s/etc/openssl" % prefix,
"zlib",
"no-krb5",
"shared")
make()
make("install")
|
from spack import *
class Openssl(Package):
"""The OpenSSL Project is a collaborative effort to develop a
robust, commercial-grade, full-featured, and Open Source
toolkit implementing the Secure Sockets Layer (SSL v2/v3) and
Transport Layer Security (TLS v1) protocols as well as a
full-strength general purpose cryptography library."""
homepage = "http://www.openssl.org"
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a')
version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5')
depends_on("zlib")
parallel = False
def install(self, spec, prefix):
config = Executable("./config")
config("--prefix=%s" % prefix,
"--openssldir=%s/etc/openssl" % prefix,
"zlib",
"no-krb5",
"shared")
make()
make("install")
Make OpenSSL build on Darwinfrom spack import *
class Openssl(Package):
"""The OpenSSL Project is a collaborative effort to develop a
robust, commercial-grade, full-featured, and Open Source
toolkit implementing the Secure Sockets Layer (SSL v2/v3) and
Transport Layer Security (TLS v1) protocols as well as a
full-strength general purpose cryptography library."""
homepage = "http://www.openssl.org"
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a')
version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5')
depends_on("zlib")
parallel = False
def install(self, spec, prefix):
if spec.satisfies("=darwin-x86_64"):
perl = which('perl')
perl("./Configure",
"--prefix=%s" % prefix,
"--openssldir=%s/etc/openssl" % prefix,
"zlib",
"no-krb5",
"shared",
"darwin64-x86_64-cc")
perl('-pi', '-e', 's/-arch x86_64//g', 'Makefile')
else:
exit(1)
config = Executable("./config")
config("--prefix=%s" % prefix,
"--openssldir=%s/etc/openssl" % prefix,
"zlib",
"no-krb5",
"shared")
make()
make("install")
|
<commit_before>from spack import *
class Openssl(Package):
"""The OpenSSL Project is a collaborative effort to develop a
robust, commercial-grade, full-featured, and Open Source
toolkit implementing the Secure Sockets Layer (SSL v2/v3) and
Transport Layer Security (TLS v1) protocols as well as a
full-strength general purpose cryptography library."""
homepage = "http://www.openssl.org"
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a')
version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5')
depends_on("zlib")
parallel = False
def install(self, spec, prefix):
config = Executable("./config")
config("--prefix=%s" % prefix,
"--openssldir=%s/etc/openssl" % prefix,
"zlib",
"no-krb5",
"shared")
make()
make("install")
<commit_msg>Make OpenSSL build on Darwin<commit_after>from spack import *
class Openssl(Package):
"""The OpenSSL Project is a collaborative effort to develop a
robust, commercial-grade, full-featured, and Open Source
toolkit implementing the Secure Sockets Layer (SSL v2/v3) and
Transport Layer Security (TLS v1) protocols as well as a
full-strength general purpose cryptography library."""
homepage = "http://www.openssl.org"
url = "http://www.openssl.org/source/openssl-1.0.1h.tar.gz"
version('1.0.1h', '8d6d684a9430d5cc98a62a5d8fbda8cf')
version('1.0.2d', '38dd619b2e77cbac69b99f52a053d25a')
version('1.0.2e', '5262bfa25b60ed9de9f28d5d52d77fc5')
depends_on("zlib")
parallel = False
def install(self, spec, prefix):
if spec.satisfies("=darwin-x86_64"):
perl = which('perl')
perl("./Configure",
"--prefix=%s" % prefix,
"--openssldir=%s/etc/openssl" % prefix,
"zlib",
"no-krb5",
"shared",
"darwin64-x86_64-cc")
perl('-pi', '-e', 's/-arch x86_64//g', 'Makefile')
else:
exit(1)
config = Executable("./config")
config("--prefix=%s" % prefix,
"--openssldir=%s/etc/openssl" % prefix,
"zlib",
"no-krb5",
"shared")
make()
make("install")
|
302927c95c5c75a1a8c10d2aa939f1a33dcdbdaa
|
algo_prep/dfs.py
|
algo_prep/dfs.py
|
class Node(object):
def __init__(self, value, children=None):
self.value = value
if children is None:
self.children = []
else:
self.children = children
def dfs(tree, value): # Check that value is in tree
if tree.value == value:
return True
else:
if tree.children != []:
return True in map(lambda subtree: dfs(subtree, value), tree.children)
else:
return False
if __name__ == '__main__':
test_tree = Node(5, [
Node(3, [
Node(15),
Node(12)]),
Node(7, [
Node(11, [
Node(1),
Node(22),
Node(13),
Node(8)]),
Node(14)])])
assert dfs(test_tree, 5)
assert dfs(test_tree, 15)
assert dfs(test_tree, 12)
assert dfs(test_tree, 14)
assert dfs(test_tree, 22)
assert not dfs(test_tree, 999)
assert not dfs(test_tree, 0)
assert not dfs(test_tree, 33)
|
Add simple DFS in python
|
Add simple DFS in python
|
Python
|
mit
|
WesleyAC/toybox,WesleyAC/toybox,WesleyAC/toybox,WesleyAC/toybox,WesleyAC/toybox
|
Add simple DFS in python
|
class Node(object):
def __init__(self, value, children=None):
self.value = value
if children is None:
self.children = []
else:
self.children = children
def dfs(tree, value): # Check that value is in tree
if tree.value == value:
return True
else:
if tree.children != []:
return True in map(lambda subtree: dfs(subtree, value), tree.children)
else:
return False
if __name__ == '__main__':
test_tree = Node(5, [
Node(3, [
Node(15),
Node(12)]),
Node(7, [
Node(11, [
Node(1),
Node(22),
Node(13),
Node(8)]),
Node(14)])])
assert dfs(test_tree, 5)
assert dfs(test_tree, 15)
assert dfs(test_tree, 12)
assert dfs(test_tree, 14)
assert dfs(test_tree, 22)
assert not dfs(test_tree, 999)
assert not dfs(test_tree, 0)
assert not dfs(test_tree, 33)
|
<commit_before><commit_msg>Add simple DFS in python<commit_after>
|
class Node(object):
def __init__(self, value, children=None):
self.value = value
if children is None:
self.children = []
else:
self.children = children
def dfs(tree, value): # Check that value is in tree
if tree.value == value:
return True
else:
if tree.children != []:
return True in map(lambda subtree: dfs(subtree, value), tree.children)
else:
return False
if __name__ == '__main__':
test_tree = Node(5, [
Node(3, [
Node(15),
Node(12)]),
Node(7, [
Node(11, [
Node(1),
Node(22),
Node(13),
Node(8)]),
Node(14)])])
assert dfs(test_tree, 5)
assert dfs(test_tree, 15)
assert dfs(test_tree, 12)
assert dfs(test_tree, 14)
assert dfs(test_tree, 22)
assert not dfs(test_tree, 999)
assert not dfs(test_tree, 0)
assert not dfs(test_tree, 33)
|
Add simple DFS in pythonclass Node(object):
def __init__(self, value, children=None):
self.value = value
if children is None:
self.children = []
else:
self.children = children
def dfs(tree, value): # Check that value is in tree
if tree.value == value:
return True
else:
if tree.children != []:
return True in map(lambda subtree: dfs(subtree, value), tree.children)
else:
return False
if __name__ == '__main__':
test_tree = Node(5, [
Node(3, [
Node(15),
Node(12)]),
Node(7, [
Node(11, [
Node(1),
Node(22),
Node(13),
Node(8)]),
Node(14)])])
assert dfs(test_tree, 5)
assert dfs(test_tree, 15)
assert dfs(test_tree, 12)
assert dfs(test_tree, 14)
assert dfs(test_tree, 22)
assert not dfs(test_tree, 999)
assert not dfs(test_tree, 0)
assert not dfs(test_tree, 33)
|
<commit_before><commit_msg>Add simple DFS in python<commit_after>class Node(object):
def __init__(self, value, children=None):
self.value = value
if children is None:
self.children = []
else:
self.children = children
def dfs(tree, value): # Check that value is in tree
if tree.value == value:
return True
else:
if tree.children != []:
return True in map(lambda subtree: dfs(subtree, value), tree.children)
else:
return False
if __name__ == '__main__':
test_tree = Node(5, [
Node(3, [
Node(15),
Node(12)]),
Node(7, [
Node(11, [
Node(1),
Node(22),
Node(13),
Node(8)]),
Node(14)])])
assert dfs(test_tree, 5)
assert dfs(test_tree, 15)
assert dfs(test_tree, 12)
assert dfs(test_tree, 14)
assert dfs(test_tree, 22)
assert not dfs(test_tree, 999)
assert not dfs(test_tree, 0)
assert not dfs(test_tree, 33)
|
|
5394e71f93fbd3602455217974d3a1777f7967af
|
cabby/geo/directions.py
|
cabby/geo/directions.py
|
# coding=utf-8
# Copyright 2020 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Basic functions for working with relative and absolute geo directions.'''
import enum
class Direction(enum.IntEnum):
AHEAD = 0
SLIGHT_LEFT = 1
LEFT = 2
SLIGHT_RIGHT = 3
RIGHT = 4
BEHIND = 5
def angle_in_360(angle: float) -> float:
if angle < 0:
return angle + 360
return angle
def get_egocentric_direction(angle: float) -> int:
angle = angle_in_360(angle)
if angle < 30 or angle > 330:
return Direction.AHEAD
elif angle < 60:
return Direction.SLIGHT_RIGHT
elif angle < 120:
return Direction.RIGHT
elif angle < 240:
return Direction.BEHIND
elif angle < 300:
return Direction.LEFT
else:
return Direction.SLIGHT_LEFT
|
Add module for enumerating direction types and converting angles into them.
|
Add module for enumerating direction types and converting angles into them.
|
Python
|
apache-2.0
|
googleinterns/cabby,googleinterns/cabby,googleinterns/cabby,googleinterns/cabby
|
Add module for enumerating direction types and converting angles into them.
|
# coding=utf-8
# Copyright 2020 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Basic functions for working with relative and absolute geo directions.'''
import enum
class Direction(enum.IntEnum):
AHEAD = 0
SLIGHT_LEFT = 1
LEFT = 2
SLIGHT_RIGHT = 3
RIGHT = 4
BEHIND = 5
def angle_in_360(angle: float) -> float:
if angle < 0:
return angle + 360
return angle
def get_egocentric_direction(angle: float) -> int:
angle = angle_in_360(angle)
if angle < 30 or angle > 330:
return Direction.AHEAD
elif angle < 60:
return Direction.SLIGHT_RIGHT
elif angle < 120:
return Direction.RIGHT
elif angle < 240:
return Direction.BEHIND
elif angle < 300:
return Direction.LEFT
else:
return Direction.SLIGHT_LEFT
|
<commit_before><commit_msg>Add module for enumerating direction types and converting angles into them.<commit_after>
|
# coding=utf-8
# Copyright 2020 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Basic functions for working with relative and absolute geo directions.'''
import enum
class Direction(enum.IntEnum):
AHEAD = 0
SLIGHT_LEFT = 1
LEFT = 2
SLIGHT_RIGHT = 3
RIGHT = 4
BEHIND = 5
def angle_in_360(angle: float) -> float:
if angle < 0:
return angle + 360
return angle
def get_egocentric_direction(angle: float) -> int:
angle = angle_in_360(angle)
if angle < 30 or angle > 330:
return Direction.AHEAD
elif angle < 60:
return Direction.SLIGHT_RIGHT
elif angle < 120:
return Direction.RIGHT
elif angle < 240:
return Direction.BEHIND
elif angle < 300:
return Direction.LEFT
else:
return Direction.SLIGHT_LEFT
|
Add module for enumerating direction types and converting angles into them.# coding=utf-8
# Copyright 2020 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Basic functions for working with relative and absolute geo directions.'''
import enum
class Direction(enum.IntEnum):
AHEAD = 0
SLIGHT_LEFT = 1
LEFT = 2
SLIGHT_RIGHT = 3
RIGHT = 4
BEHIND = 5
def angle_in_360(angle: float) -> float:
if angle < 0:
return angle + 360
return angle
def get_egocentric_direction(angle: float) -> int:
angle = angle_in_360(angle)
if angle < 30 or angle > 330:
return Direction.AHEAD
elif angle < 60:
return Direction.SLIGHT_RIGHT
elif angle < 120:
return Direction.RIGHT
elif angle < 240:
return Direction.BEHIND
elif angle < 300:
return Direction.LEFT
else:
return Direction.SLIGHT_LEFT
|
<commit_before><commit_msg>Add module for enumerating direction types and converting angles into them.<commit_after># coding=utf-8
# Copyright 2020 Google LLC
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Basic functions for working with relative and absolute geo directions.'''
import enum
class Direction(enum.IntEnum):
AHEAD = 0
SLIGHT_LEFT = 1
LEFT = 2
SLIGHT_RIGHT = 3
RIGHT = 4
BEHIND = 5
def angle_in_360(angle: float) -> float:
if angle < 0:
return angle + 360
return angle
def get_egocentric_direction(angle: float) -> int:
angle = angle_in_360(angle)
if angle < 30 or angle > 330:
return Direction.AHEAD
elif angle < 60:
return Direction.SLIGHT_RIGHT
elif angle < 120:
return Direction.RIGHT
elif angle < 240:
return Direction.BEHIND
elif angle < 300:
return Direction.LEFT
else:
return Direction.SLIGHT_LEFT
|
|
e1c091a4e31b346bbe9b2bffc33ccf4e82ef1beb
|
pre_commit_hooks/check_yaml.py
|
pre_commit_hooks/check_yaml.py
|
from __future__ import print_function
import argparse
import sys
import yaml
def check_yaml(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Yaml filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
yaml.load(open(filename))
except yaml.YAMLError as exc:
print(exc)
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_yaml())
|
from __future__ import print_function
import argparse
import sys
import yaml
try:
from yaml.cyaml import CLoader as Loader
except ImportError: # pragma: no cover (no libyaml-dev / pypy)
Loader = yaml.Loader
def check_yaml(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Yaml filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
yaml.load(open(filename), Loader=Loader)
except yaml.YAMLError as exc:
print(exc)
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_yaml())
|
Use the CLoader when present to validate yaml
|
Use the CLoader when present to validate yaml
|
Python
|
mit
|
jordant/pre-commit-hooks,Harwood/pre-commit-hooks,chriskuehl/pre-commit-hooks,pre-commit/pre-commit-hooks,bgschiller/pre-commit-hooks,arahayrabedian/pre-commit-hooks,Coverfox/pre-commit-hooks,dupuy/pre-commit-hooks,jordant/pre-commit-hooks
|
from __future__ import print_function
import argparse
import sys
import yaml
def check_yaml(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Yaml filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
yaml.load(open(filename))
except yaml.YAMLError as exc:
print(exc)
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_yaml())
Use the CLoader when present to validate yaml
|
from __future__ import print_function
import argparse
import sys
import yaml
try:
from yaml.cyaml import CLoader as Loader
except ImportError: # pragma: no cover (no libyaml-dev / pypy)
Loader = yaml.Loader
def check_yaml(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Yaml filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
yaml.load(open(filename), Loader=Loader)
except yaml.YAMLError as exc:
print(exc)
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_yaml())
|
<commit_before>from __future__ import print_function
import argparse
import sys
import yaml
def check_yaml(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Yaml filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
yaml.load(open(filename))
except yaml.YAMLError as exc:
print(exc)
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_yaml())
<commit_msg>Use the CLoader when present to validate yaml<commit_after>
|
from __future__ import print_function
import argparse
import sys
import yaml
try:
from yaml.cyaml import CLoader as Loader
except ImportError: # pragma: no cover (no libyaml-dev / pypy)
Loader = yaml.Loader
def check_yaml(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Yaml filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
yaml.load(open(filename), Loader=Loader)
except yaml.YAMLError as exc:
print(exc)
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_yaml())
|
from __future__ import print_function
import argparse
import sys
import yaml
def check_yaml(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Yaml filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
yaml.load(open(filename))
except yaml.YAMLError as exc:
print(exc)
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_yaml())
Use the CLoader when present to validate yamlfrom __future__ import print_function
import argparse
import sys
import yaml
try:
from yaml.cyaml import CLoader as Loader
except ImportError: # pragma: no cover (no libyaml-dev / pypy)
Loader = yaml.Loader
def check_yaml(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Yaml filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
yaml.load(open(filename), Loader=Loader)
except yaml.YAMLError as exc:
print(exc)
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_yaml())
|
<commit_before>from __future__ import print_function
import argparse
import sys
import yaml
def check_yaml(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Yaml filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
yaml.load(open(filename))
except yaml.YAMLError as exc:
print(exc)
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_yaml())
<commit_msg>Use the CLoader when present to validate yaml<commit_after>from __future__ import print_function
import argparse
import sys
import yaml
try:
from yaml.cyaml import CLoader as Loader
except ImportError: # pragma: no cover (no libyaml-dev / pypy)
Loader = yaml.Loader
def check_yaml(argv=None):
parser = argparse.ArgumentParser()
parser.add_argument('filenames', nargs='*', help='Yaml filenames to check.')
args = parser.parse_args(argv)
retval = 0
for filename in args.filenames:
try:
yaml.load(open(filename), Loader=Loader)
except yaml.YAMLError as exc:
print(exc)
retval = 1
return retval
if __name__ == '__main__':
sys.exit(check_yaml())
|
7b629cb8ad631ad10a818638af78e14efaa657a1
|
lintcode/Medium/107_Word_Break.py
|
lintcode/Medium/107_Word_Break.py
|
class Solution:
# @param s: A string s
# @param dict: A dictionary of words dict
def wordBreak(self, s, dict):
# write your code here
if (not s):
return True
dp = []
for i in range(1, len(s) + 1):
tmp = s[:i]
if (tmp in dict):
dp.append(True)
else:
tmpRes = False
for j in range(len(dp)):
if (dp[j] and tmp[j + 1:] in dict):
tmpRes = True
break
dp.append(tmpRes)
return dp[-1]
# JiuZhang
# if len(dict) == 0:
# return len(s) == 0
# n = len(s)
# f = [False] * (n + 1)
# f[0] = True
# maxLength = max([len(w) for w in dict])
# for i in xrange(1, n + 1):
# for j in range(1, min(i, maxLength) + 1):
# if not f[i - j]:
# continue
# if s[i - j:i] in dict:
# f[i] = True
# break
# return f[n]
|
Add solution to lintcode question 107
|
Add solution to lintcode question 107
|
Python
|
mit
|
Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode
|
Add solution to lintcode question 107
|
class Solution:
# @param s: A string s
# @param dict: A dictionary of words dict
def wordBreak(self, s, dict):
# write your code here
if (not s):
return True
dp = []
for i in range(1, len(s) + 1):
tmp = s[:i]
if (tmp in dict):
dp.append(True)
else:
tmpRes = False
for j in range(len(dp)):
if (dp[j] and tmp[j + 1:] in dict):
tmpRes = True
break
dp.append(tmpRes)
return dp[-1]
# JiuZhang
# if len(dict) == 0:
# return len(s) == 0
# n = len(s)
# f = [False] * (n + 1)
# f[0] = True
# maxLength = max([len(w) for w in dict])
# for i in xrange(1, n + 1):
# for j in range(1, min(i, maxLength) + 1):
# if not f[i - j]:
# continue
# if s[i - j:i] in dict:
# f[i] = True
# break
# return f[n]
|
<commit_before><commit_msg>Add solution to lintcode question 107<commit_after>
|
class Solution:
# @param s: A string s
# @param dict: A dictionary of words dict
def wordBreak(self, s, dict):
# write your code here
if (not s):
return True
dp = []
for i in range(1, len(s) + 1):
tmp = s[:i]
if (tmp in dict):
dp.append(True)
else:
tmpRes = False
for j in range(len(dp)):
if (dp[j] and tmp[j + 1:] in dict):
tmpRes = True
break
dp.append(tmpRes)
return dp[-1]
# JiuZhang
# if len(dict) == 0:
# return len(s) == 0
# n = len(s)
# f = [False] * (n + 1)
# f[0] = True
# maxLength = max([len(w) for w in dict])
# for i in xrange(1, n + 1):
# for j in range(1, min(i, maxLength) + 1):
# if not f[i - j]:
# continue
# if s[i - j:i] in dict:
# f[i] = True
# break
# return f[n]
|
Add solution to lintcode question 107class Solution:
# @param s: A string s
# @param dict: A dictionary of words dict
def wordBreak(self, s, dict):
# write your code here
if (not s):
return True
dp = []
for i in range(1, len(s) + 1):
tmp = s[:i]
if (tmp in dict):
dp.append(True)
else:
tmpRes = False
for j in range(len(dp)):
if (dp[j] and tmp[j + 1:] in dict):
tmpRes = True
break
dp.append(tmpRes)
return dp[-1]
# JiuZhang
# if len(dict) == 0:
# return len(s) == 0
# n = len(s)
# f = [False] * (n + 1)
# f[0] = True
# maxLength = max([len(w) for w in dict])
# for i in xrange(1, n + 1):
# for j in range(1, min(i, maxLength) + 1):
# if not f[i - j]:
# continue
# if s[i - j:i] in dict:
# f[i] = True
# break
# return f[n]
|
<commit_before><commit_msg>Add solution to lintcode question 107<commit_after>class Solution:
# @param s: A string s
# @param dict: A dictionary of words dict
def wordBreak(self, s, dict):
# write your code here
if (not s):
return True
dp = []
for i in range(1, len(s) + 1):
tmp = s[:i]
if (tmp in dict):
dp.append(True)
else:
tmpRes = False
for j in range(len(dp)):
if (dp[j] and tmp[j + 1:] in dict):
tmpRes = True
break
dp.append(tmpRes)
return dp[-1]
# JiuZhang
# if len(dict) == 0:
# return len(s) == 0
# n = len(s)
# f = [False] * (n + 1)
# f[0] = True
# maxLength = max([len(w) for w in dict])
# for i in xrange(1, n + 1):
# for j in range(1, min(i, maxLength) + 1):
# if not f[i - j]:
# continue
# if s[i - j:i] in dict:
# f[i] = True
# break
# return f[n]
|
|
719a175c2acc7d5bd4fedae1bd8d9bcd579ea0f4
|
angr/procedures/java_jni/GetArrayElements.py
|
angr/procedures/java_jni/GetArrayElements.py
|
from . import JNISimProcedure
from ...engines.soot.values.arrayref import SimSootValue_ArrayRef
class GetArrayElements(JNISimProcedure):
return_ty = 'reference'
def run(self, ptr_env, array, ptr_isCopy):
array_ref = self.lookup_local_reference(array)
elements = self.load_java_array(array_ref)
memory_addr = self.dump_in_native_memory(elements, array_ref.type)
self.dump_in_native_memory(data=0, data_type='boolean', addr=ptr_isCopy)
return memory_addr
def load_java_array(self, array_ref, start_idx=None, end_idx=None):
if start_idx is None:
start_idx = 0
if end_idx is None:
end_idx = self.state.solver.max(array_ref.size)
javavm_memory = self.state.get_javavm_view_of_plugin("memory")
values = []
for idx in range(start_idx, end_idx):
idx_array_ref = SimSootValue_ArrayRef.get_reference_with_idx(base=array_ref, idx=idx)
value = javavm_memory.load(idx_array_ref)
values.append(value)
return values
|
Add base for JNI function Get*ArrayElements
|
Add base for JNI function Get*ArrayElements
|
Python
|
bsd-2-clause
|
iamahuman/angr,angr/angr,schieb/angr,schieb/angr,iamahuman/angr,angr/angr,angr/angr,iamahuman/angr,schieb/angr
|
Add base for JNI function Get*ArrayElements
|
from . import JNISimProcedure
from ...engines.soot.values.arrayref import SimSootValue_ArrayRef
class GetArrayElements(JNISimProcedure):
return_ty = 'reference'
def run(self, ptr_env, array, ptr_isCopy):
array_ref = self.lookup_local_reference(array)
elements = self.load_java_array(array_ref)
memory_addr = self.dump_in_native_memory(elements, array_ref.type)
self.dump_in_native_memory(data=0, data_type='boolean', addr=ptr_isCopy)
return memory_addr
def load_java_array(self, array_ref, start_idx=None, end_idx=None):
if start_idx is None:
start_idx = 0
if end_idx is None:
end_idx = self.state.solver.max(array_ref.size)
javavm_memory = self.state.get_javavm_view_of_plugin("memory")
values = []
for idx in range(start_idx, end_idx):
idx_array_ref = SimSootValue_ArrayRef.get_reference_with_idx(base=array_ref, idx=idx)
value = javavm_memory.load(idx_array_ref)
values.append(value)
return values
|
<commit_before><commit_msg>Add base for JNI function Get*ArrayElements<commit_after>
|
from . import JNISimProcedure
from ...engines.soot.values.arrayref import SimSootValue_ArrayRef
class GetArrayElements(JNISimProcedure):
return_ty = 'reference'
def run(self, ptr_env, array, ptr_isCopy):
array_ref = self.lookup_local_reference(array)
elements = self.load_java_array(array_ref)
memory_addr = self.dump_in_native_memory(elements, array_ref.type)
self.dump_in_native_memory(data=0, data_type='boolean', addr=ptr_isCopy)
return memory_addr
def load_java_array(self, array_ref, start_idx=None, end_idx=None):
if start_idx is None:
start_idx = 0
if end_idx is None:
end_idx = self.state.solver.max(array_ref.size)
javavm_memory = self.state.get_javavm_view_of_plugin("memory")
values = []
for idx in range(start_idx, end_idx):
idx_array_ref = SimSootValue_ArrayRef.get_reference_with_idx(base=array_ref, idx=idx)
value = javavm_memory.load(idx_array_ref)
values.append(value)
return values
|
Add base for JNI function Get*ArrayElementsfrom . import JNISimProcedure
from ...engines.soot.values.arrayref import SimSootValue_ArrayRef
class GetArrayElements(JNISimProcedure):
return_ty = 'reference'
def run(self, ptr_env, array, ptr_isCopy):
array_ref = self.lookup_local_reference(array)
elements = self.load_java_array(array_ref)
memory_addr = self.dump_in_native_memory(elements, array_ref.type)
self.dump_in_native_memory(data=0, data_type='boolean', addr=ptr_isCopy)
return memory_addr
def load_java_array(self, array_ref, start_idx=None, end_idx=None):
if start_idx is None:
start_idx = 0
if end_idx is None:
end_idx = self.state.solver.max(array_ref.size)
javavm_memory = self.state.get_javavm_view_of_plugin("memory")
values = []
for idx in range(start_idx, end_idx):
idx_array_ref = SimSootValue_ArrayRef.get_reference_with_idx(base=array_ref, idx=idx)
value = javavm_memory.load(idx_array_ref)
values.append(value)
return values
|
<commit_before><commit_msg>Add base for JNI function Get*ArrayElements<commit_after>from . import JNISimProcedure
from ...engines.soot.values.arrayref import SimSootValue_ArrayRef
class GetArrayElements(JNISimProcedure):
return_ty = 'reference'
def run(self, ptr_env, array, ptr_isCopy):
array_ref = self.lookup_local_reference(array)
elements = self.load_java_array(array_ref)
memory_addr = self.dump_in_native_memory(elements, array_ref.type)
self.dump_in_native_memory(data=0, data_type='boolean', addr=ptr_isCopy)
return memory_addr
def load_java_array(self, array_ref, start_idx=None, end_idx=None):
if start_idx is None:
start_idx = 0
if end_idx is None:
end_idx = self.state.solver.max(array_ref.size)
javavm_memory = self.state.get_javavm_view_of_plugin("memory")
values = []
for idx in range(start_idx, end_idx):
idx_array_ref = SimSootValue_ArrayRef.get_reference_with_idx(base=array_ref, idx=idx)
value = javavm_memory.load(idx_array_ref)
values.append(value)
return values
|
|
f06b8964188e534f148c6fd19cddea4bad410e25
|
herders/migrations/0003_auto_20150705_0103.py
|
herders/migrations/0003_auto_20150705_0103.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('herders', '0002_team_leader'),
]
operations = [
migrations.AlterModelOptions(
name='team',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='teamgroup',
options={'ordering': ['name']},
),
migrations.AlterField(
model_name='team',
name='roster',
field=models.ManyToManyField(to='herders.MonsterInstance', blank=True),
),
]
|
Add migrations from previous model changes
|
Add migrations from previous model changes
|
Python
|
apache-2.0
|
porksmash/swarfarm,porksmash/swarfarm,PeteAndersen/swarfarm,PeteAndersen/swarfarm,PeteAndersen/swarfarm,PeteAndersen/swarfarm,porksmash/swarfarm,porksmash/swarfarm
|
Add migrations from previous model changes
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('herders', '0002_team_leader'),
]
operations = [
migrations.AlterModelOptions(
name='team',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='teamgroup',
options={'ordering': ['name']},
),
migrations.AlterField(
model_name='team',
name='roster',
field=models.ManyToManyField(to='herders.MonsterInstance', blank=True),
),
]
|
<commit_before><commit_msg>Add migrations from previous model changes<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('herders', '0002_team_leader'),
]
operations = [
migrations.AlterModelOptions(
name='team',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='teamgroup',
options={'ordering': ['name']},
),
migrations.AlterField(
model_name='team',
name='roster',
field=models.ManyToManyField(to='herders.MonsterInstance', blank=True),
),
]
|
Add migrations from previous model changes# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('herders', '0002_team_leader'),
]
operations = [
migrations.AlterModelOptions(
name='team',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='teamgroup',
options={'ordering': ['name']},
),
migrations.AlterField(
model_name='team',
name='roster',
field=models.ManyToManyField(to='herders.MonsterInstance', blank=True),
),
]
|
<commit_before><commit_msg>Add migrations from previous model changes<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('herders', '0002_team_leader'),
]
operations = [
migrations.AlterModelOptions(
name='team',
options={'ordering': ['name']},
),
migrations.AlterModelOptions(
name='teamgroup',
options={'ordering': ['name']},
),
migrations.AlterField(
model_name='team',
name='roster',
field=models.ManyToManyField(to='herders.MonsterInstance', blank=True),
),
]
|
|
586fc2f07dfb75a1f844eac96750c558e93a30f7
|
hs_core/migrations/0054_auto_20201028_1432.py
|
hs_core/migrations/0054_auto_20201028_1432.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-10-28 14:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hs_core', '0053_auto_20200826_1629'),
]
operations = [
migrations.AlterField(
model_name='tasknotification',
name='status',
field=models.CharField(choices=[('progress', 'Progress'), ('failed', 'Failed'), ('aborted', 'Aborted'), ('completed', 'Completed'), ('delivered', 'Delivered')], default='progress', max_length=20),
),
]
|
Add migration for task notifications
|
Add migration for task notifications
|
Python
|
bsd-3-clause
|
hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare
|
Add migration for task notifications
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-10-28 14:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hs_core', '0053_auto_20200826_1629'),
]
operations = [
migrations.AlterField(
model_name='tasknotification',
name='status',
field=models.CharField(choices=[('progress', 'Progress'), ('failed', 'Failed'), ('aborted', 'Aborted'), ('completed', 'Completed'), ('delivered', 'Delivered')], default='progress', max_length=20),
),
]
|
<commit_before><commit_msg>Add migration for task notifications<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-10-28 14:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hs_core', '0053_auto_20200826_1629'),
]
operations = [
migrations.AlterField(
model_name='tasknotification',
name='status',
field=models.CharField(choices=[('progress', 'Progress'), ('failed', 'Failed'), ('aborted', 'Aborted'), ('completed', 'Completed'), ('delivered', 'Delivered')], default='progress', max_length=20),
),
]
|
Add migration for task notifications# -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-10-28 14:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hs_core', '0053_auto_20200826_1629'),
]
operations = [
migrations.AlterField(
model_name='tasknotification',
name='status',
field=models.CharField(choices=[('progress', 'Progress'), ('failed', 'Failed'), ('aborted', 'Aborted'), ('completed', 'Completed'), ('delivered', 'Delivered')], default='progress', max_length=20),
),
]
|
<commit_before><commit_msg>Add migration for task notifications<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.29 on 2020-10-28 14:32
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('hs_core', '0053_auto_20200826_1629'),
]
operations = [
migrations.AlterField(
model_name='tasknotification',
name='status',
field=models.CharField(choices=[('progress', 'Progress'), ('failed', 'Failed'), ('aborted', 'Aborted'), ('completed', 'Completed'), ('delivered', 'Delivered')], default='progress', max_length=20),
),
]
|
|
ab09236b02ac3623d0f5f9cdc680a9bb5f641bdf
|
lib/windspharm/tests/test_coding_standards.py
|
lib/windspharm/tests/test_coding_standards.py
|
"""Tests coding standards compliance."""
# Copyright (c) 2016 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
import os
from nose.tools import assert_equal
import pep8
import windspharm
from windspharm.tests import VectorWindTest
class TestCodingStandards(VectorWindTest):
def test_pep8(self):
pep8style = pep8.StyleGuide(quiet=False)
base_paths = [os.path.dirname(windspharm.__file__)]
result = pep8style.check_files(base_paths)
assert_equal(result.total_errors, 0, "Found PEP8 style issues.")
|
Add a test for coding standards.
|
Add a test for coding standards.
|
Python
|
mit
|
ajdawson/windspharm
|
Add a test for coding standards.
|
"""Tests coding standards compliance."""
# Copyright (c) 2016 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
import os
from nose.tools import assert_equal
import pep8
import windspharm
from windspharm.tests import VectorWindTest
class TestCodingStandards(VectorWindTest):
def test_pep8(self):
pep8style = pep8.StyleGuide(quiet=False)
base_paths = [os.path.dirname(windspharm.__file__)]
result = pep8style.check_files(base_paths)
assert_equal(result.total_errors, 0, "Found PEP8 style issues.")
|
<commit_before><commit_msg>Add a test for coding standards.<commit_after>
|
"""Tests coding standards compliance."""
# Copyright (c) 2016 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
import os
from nose.tools import assert_equal
import pep8
import windspharm
from windspharm.tests import VectorWindTest
class TestCodingStandards(VectorWindTest):
def test_pep8(self):
pep8style = pep8.StyleGuide(quiet=False)
base_paths = [os.path.dirname(windspharm.__file__)]
result = pep8style.check_files(base_paths)
assert_equal(result.total_errors, 0, "Found PEP8 style issues.")
|
Add a test for coding standards."""Tests coding standards compliance."""
# Copyright (c) 2016 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
import os
from nose.tools import assert_equal
import pep8
import windspharm
from windspharm.tests import VectorWindTest
class TestCodingStandards(VectorWindTest):
def test_pep8(self):
pep8style = pep8.StyleGuide(quiet=False)
base_paths = [os.path.dirname(windspharm.__file__)]
result = pep8style.check_files(base_paths)
assert_equal(result.total_errors, 0, "Found PEP8 style issues.")
|
<commit_before><commit_msg>Add a test for coding standards.<commit_after>"""Tests coding standards compliance."""
# Copyright (c) 2016 Andrew Dawson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from __future__ import absolute_import
import os
from nose.tools import assert_equal
import pep8
import windspharm
from windspharm.tests import VectorWindTest
class TestCodingStandards(VectorWindTest):
def test_pep8(self):
pep8style = pep8.StyleGuide(quiet=False)
base_paths = [os.path.dirname(windspharm.__file__)]
result = pep8style.check_files(base_paths)
assert_equal(result.total_errors, 0, "Found PEP8 style issues.")
|
|
d70eb3aa6aeebd35e3753151281d558618eb76ee
|
i18n/tests/test_compiled_messages.py
|
i18n/tests/test_compiled_messages.py
|
"""
Test that the compiled .mo files match the translations in the
uncompiled .po files.
This is required because we are checking in the .mo files into
the repo, but compiling them is a manual process. We want to make
sure that we find out if someone forgets the compilation step.
"""
import ddt
import polib
from unittest import TestCase
from i18n.config import CONFIGURATION, LOCALE_DIR
@ddt.ddt
class TestCompiledMessages(TestCase):
"""
Test that mo files match their source po files
"""
PO_FILES = ['django.po', 'djangojs.po']
@ddt.data(*CONFIGURATION.locales)
def test_translated_messages(self, locale):
message_dir = LOCALE_DIR / locale / 'LC_MESSAGES'
for pofile_name in self.PO_FILES:
pofile_path = message_dir / pofile_name
pofile = polib.pofile(pofile_path)
mofile = polib.mofile(pofile_path.stripext() + '.mo')
po_entries = {entry.msgid: entry for entry in pofile.translated_entries()}
mo_entries = {entry.msgid: entry for entry in mofile.translated_entries()}
# Check that there are no entries in po that aren't in mo, and vice-versa
self.assertEquals(po_entries.viewkeys(), mo_entries.viewkeys())
for entry_id, po_entry in po_entries.iteritems():
mo_entry = mo_entries[entry_id]
for attr in ('msgstr', 'msgid_plural', 'msgstr_plural', 'msgctxt', 'obsolete', 'encoding'):
po_attr = getattr(po_entry, attr)
mo_attr = getattr(mo_entry, attr)
# The msgstr_plural in the mo_file is keyed on ints, but in the po_file it's
# keyed on strings. This normalizes them.
if attr == 'msgstr_plural':
po_attr = {int(key): val for (key, val) in po_attr.items()}
self.assertEquals(
po_attr,
mo_attr,
"When comparing {} for entry {!r}, {!r} from the .po file doesn't match {!r} from the .mo file".format(
attr,
entry_id,
po_attr,
mo_attr,
)
)
|
Add a test that validates that the .po and .mo files match for all active languages
|
Add a test that validates that the .po and .mo files match for all active languages
|
Python
|
agpl-3.0
|
jzoldak/edx-platform,jazkarta/edx-platform,Shrhawk/edx-platform,don-github/edx-platform,vismartltd/edx-platform,RPI-OPENEDX/edx-platform,dkarakats/edx-platform,jswope00/griffinx,ovnicraft/edx-platform,zerobatu/edx-platform,chrisndodge/edx-platform,rhndg/openedx,pepeportela/edx-platform,wwj718/ANALYSE,ahmadiga/min_edx,dcosentino/edx-platform,martynovp/edx-platform,rue89-tech/edx-platform,vasyarv/edx-platform,mcgachey/edx-platform,IndonesiaX/edx-platform,chand3040/cloud_that,LICEF/edx-platform,etzhou/edx-platform,solashirai/edx-platform,SravanthiSinha/edx-platform,jswope00/GAI,don-github/edx-platform,kmoocdev2/edx-platform,pabloborrego93/edx-platform,ahmadio/edx-platform,rue89-tech/edx-platform,10clouds/edx-platform,romain-li/edx-platform,doganov/edx-platform,Unow/edx-platform,ahmadio/edx-platform,ak2703/edx-platform,nanolearningllc/edx-platform-cypress,bitifirefly/edx-platform,fly19890211/edx-platform,cognitiveclass/edx-platform,cyanna/edx-platform,Softmotions/edx-platform,jswope00/griffinx,motion2015/a3,tiagochiavericosta/edx-platform,mbareta/edx-platform-ft,simbs/edx-platform,jswope00/griffinx,synergeticsedx/deployment-wipro,abdoosh00/edraak,shubhdev/openedx,philanthropy-u/edx-platform,hkawasaki/kawasaki-aio8-1,antonve/s4-project-mooc,chauhanhardik/populo,ampax/edx-platform-backup,edry/edx-platform,lduarte1991/edx-platform,hkawasaki/kawasaki-aio8-2,nagyistoce/edx-platform,SivilTaram/edx-platform,xuxiao19910803/edx,WatanabeYasumasa/edx-platform,ak2703/edx-platform,kmoocdev2/edx-platform,Edraak/edraak-platform,tanmaykm/edx-platform,inares/edx-platform,Lektorium-LLC/edx-platform,utecuy/edx-platform,RPI-OPENEDX/edx-platform,beacloudgenius/edx-platform,jswope00/GAI,jbassen/edx-platform,jazkarta/edx-platform-for-isc,polimediaupv/edx-platform,auferack08/edx-platform,xinjiguaike/edx-platform,ferabra/edx-platform,vasyarv/edx-platform,Edraak/edx-platform,Endika/edx-platform,mushtaqak/edx-platform,mushtaqak/edx-platform,cselis86/edx-platform,synergeticsedx/deployment-wipro,rue89-tech/edx-platform,eestay/edx-platform,wwj718/ANALYSE,hkawasaki/kawasaki-aio8-1,jazkarta/edx-platform,cecep-edu/edx-platform,philanthropy-u/edx-platform,appliedx/edx-platform,MakeHer/edx-platform,knehez/edx-platform,Edraak/edx-platform,etzhou/edx-platform,sameetb-cuelogic/edx-platform-test,Edraak/edraak-platform,xinjiguaike/edx-platform,atsolakid/edx-platform,unicri/edx-platform,defance/edx-platform,yokose-ks/edx-platform,mtlchun/edx,defance/edx-platform,MakeHer/edx-platform,IndonesiaX/edx-platform,vikas1885/test1,martynovp/edx-platform,shubhdev/edx-platform,Kalyzee/edx-platform,RPI-OPENEDX/edx-platform,peterm-itr/edx-platform,BehavioralInsightsTeam/edx-platform,LICEF/edx-platform,defance/edx-platform,leansoft/edx-platform,doismellburning/edx-platform,WatanabeYasumasa/edx-platform,jswope00/griffinx,jelugbo/tundex,ampax/edx-platform-backup,nttks/edx-platform,nagyistoce/edx-platform,dsajkl/reqiop,edx/edx-platform,Livit/Livit.Learn.EdX,Edraak/edx-platform,procangroup/edx-platform,analyseuc3m/ANALYSE-v1,sudheerchintala/LearnEraPlatForm,kmoocdev/edx-platform,eemirtekin/edx-platform,B-MOOC/edx-platform,kursitet/edx-platform,jamiefolsom/edx-platform,pepeportela/edx-platform,nttks/jenkins-test,adoosii/edx-platform,JCBarahona/edX,jbzdak/edx-platform,shubhdev/edx-platform,rhndg/openedx,pomegranited/edx-platform,BehavioralInsightsTeam/edx-platform,martynovp/edx-platform,zofuthan/edx-platform,doganov/edx-platform,pabloborrego93/edx-platform,devs1991/test_edx_docmode,chauhanhardik/populo,pku9104038/edx-platform,shashank971/edx-platform,hastexo/edx-platform,ovnicraft/edx-platform,mjirayu/sit_academy,shubhdev/edx-platform,arifsetiawan/edx-platform,shabab12/edx-platform,pelikanchik/edx-platform,wwj718/edx-platform,jamiefolsom/edx-platform,playm2mboy/edx-platform,eestay/edx-platform,edx-solutions/edx-platform,ESOedX/edx-platform,ahmadiga/min_edx,shashank971/edx-platform,mjirayu/sit_academy,eduNEXT/edx-platform,jbassen/edx-platform,motion2015/a3,mtlchun/edx,fly19890211/edx-platform,msegado/edx-platform,xuxiao19910803/edx,JCBarahona/edX,doismellburning/edx-platform,beni55/edx-platform,chrisndodge/edx-platform,jruiperezv/ANALYSE,chauhanhardik/populo_2,louyihua/edx-platform,IndonesiaX/edx-platform,Edraak/edraak-platform,jruiperezv/ANALYSE,stvstnfrd/edx-platform,valtech-mooc/edx-platform,dcosentino/edx-platform,hamzehd/edx-platform,chand3040/cloud_that,mtlchun/edx,cecep-edu/edx-platform,Ayub-Khan/edx-platform,zofuthan/edx-platform,jzoldak/edx-platform,miptliot/edx-platform,xuxiao19910803/edx-platform,chauhanhardik/populo,atsolakid/edx-platform,nagyistoce/edx-platform,4eek/edx-platform,mushtaqak/edx-platform,alexthered/kienhoc-platform,ahmedaljazzar/edx-platform,jbzdak/edx-platform,J861449197/edx-platform,ak2703/edx-platform,ferabra/edx-platform,alu042/edx-platform,mitocw/edx-platform,hkawasaki/kawasaki-aio8-1,tiagochiavericosta/edx-platform,ubc/edx-platform,wwj718/ANALYSE,cyanna/edx-platform,philanthropy-u/edx-platform,nikolas/edx-platform,BehavioralInsightsTeam/edx-platform,utecuy/edx-platform,fintech-circle/edx-platform,jamesblunt/edx-platform,chudaol/edx-platform,unicri/edx-platform,UOMx/edx-platform,ovnicraft/edx-platform,caesar2164/edx-platform,nanolearningllc/edx-platform-cypress-2,appsembler/edx-platform,mcgachey/edx-platform,kursitet/edx-platform,shurihell/testasia,zhenzhai/edx-platform,mitocw/edx-platform,fintech-circle/edx-platform,doganov/edx-platform,deepsrijit1105/edx-platform,pelikanchik/edx-platform,yokose-ks/edx-platform,hkawasaki/kawasaki-aio8-0,chauhanhardik/populo_2,chauhanhardik/populo,devs1991/test_edx_docmode,kursitet/edx-platform,a-parhom/edx-platform,hastexo/edx-platform,openfun/edx-platform,nanolearning/edx-platform,ubc/edx-platform,tanmaykm/edx-platform,louyihua/edx-platform,don-github/edx-platform,cecep-edu/edx-platform,amir-qayyum-khan/edx-platform,DNFcode/edx-platform,IndonesiaX/edx-platform,jazkarta/edx-platform,LICEF/edx-platform,eduNEXT/edunext-platform,openfun/edx-platform,arbrandes/edx-platform,kursitet/edx-platform,caesar2164/edx-platform,CourseTalk/edx-platform,ahmadio/edx-platform,cyanna/edx-platform,vikas1885/test1,pku9104038/edx-platform,atsolakid/edx-platform,Lektorium-LLC/edx-platform,jazztpt/edx-platform,motion2015/a3,jazztpt/edx-platform,xingyepei/edx-platform,openfun/edx-platform,shubhdev/edxOnBaadal,prarthitm/edxplatform,shubhdev/openedx,pku9104038/edx-platform,cyanna/edx-platform,dkarakats/edx-platform,yokose-ks/edx-platform,Shrhawk/edx-platform,alu042/edx-platform,arbrandes/edx-platform,IONISx/edx-platform,olexiim/edx-platform,jruiperezv/ANALYSE,DNFcode/edx-platform,gsehub/edx-platform,vismartltd/edx-platform,motion2015/edx-platform,mushtaqak/edx-platform,miptliot/edx-platform,MakeHer/edx-platform,iivic/BoiseStateX,benpatterson/edx-platform,mbareta/edx-platform-ft,bigdatauniversity/edx-platform,jamiefolsom/edx-platform,antoviaque/edx-platform,yokose-ks/edx-platform,abdoosh00/edraak,atsolakid/edx-platform,itsjeyd/edx-platform,shashank971/edx-platform,chudaol/edx-platform,cpennington/edx-platform,dcosentino/edx-platform,Edraak/circleci-edx-platform,eemirtekin/edx-platform,msegado/edx-platform,edry/edx-platform,hkawasaki/kawasaki-aio8-2,jswope00/GAI,Lektorium-LLC/edx-platform,kamalx/edx-platform,nanolearningllc/edx-platform-cypress-2,chauhanhardik/populo_2,kamalx/edx-platform,Endika/edx-platform,iivic/BoiseStateX,gymnasium/edx-platform,a-parhom/edx-platform,rismalrv/edx-platform,chudaol/edx-platform,tiagochiavericosta/edx-platform,abdoosh00/edraak,amir-qayyum-khan/edx-platform,mjirayu/sit_academy,benpatterson/edx-platform,nttks/edx-platform,eduNEXT/edx-platform,dsajkl/123,longmen21/edx-platform,wwj718/edx-platform,teltek/edx-platform,rue89-tech/edx-platform,shurihell/testasia,zubair-arbi/edx-platform,SivilTaram/edx-platform,jelugbo/tundex,wwj718/ANALYSE,dsajkl/123,shurihell/testasia,lduarte1991/edx-platform,kmoocdev/edx-platform,hamzehd/edx-platform,shabab12/edx-platform,valtech-mooc/edx-platform,DefyVentures/edx-platform,inares/edx-platform,SravanthiSinha/edx-platform,longmen21/edx-platform,nttks/jenkins-test,appliedx/edx-platform,proversity-org/edx-platform,zerobatu/edx-platform,utecuy/edx-platform,CourseTalk/edx-platform,dsajkl/123,louyihua/edx-platform,chudaol/edx-platform,nikolas/edx-platform,playm2mboy/edx-platform,marcore/edx-platform,bdero/edx-platform,angelapper/edx-platform,y12uc231/edx-platform,abdoosh00/edraak,jswope00/GAI,philanthropy-u/edx-platform,antoviaque/edx-platform,ZLLab-Mooc/edx-platform,MSOpenTech/edx-platform,ferabra/edx-platform,carsongee/edx-platform,y12uc231/edx-platform,mbareta/edx-platform-ft,rismalrv/edx-platform,itsjeyd/edx-platform,UXE/local-edx,jamesblunt/edx-platform,bigdatauniversity/edx-platform,longmen21/edx-platform,jolyonb/edx-platform,dkarakats/edx-platform,simbs/edx-platform,nanolearning/edx-platform,ampax/edx-platform,ovnicraft/edx-platform,EDUlib/edx-platform,Ayub-Khan/edx-platform,kxliugang/edx-platform,ahmadiga/min_edx,jjmiranda/edx-platform,beacloudgenius/edx-platform,ak2703/edx-platform,franosincic/edx-platform,analyseuc3m/ANALYSE-v1,beacloudgenius/edx-platform,sameetb-cuelogic/edx-platform-test,xinjiguaike/edx-platform,solashirai/edx-platform,devs1991/test_edx_docmode,martynovp/edx-platform,leansoft/edx-platform,LICEF/edx-platform,jonathan-beard/edx-platform,fintech-circle/edx-platform,gymnasium/edx-platform,Shrhawk/edx-platform,zofuthan/edx-platform,peterm-itr/edx-platform,shabab12/edx-platform,deepsrijit1105/edx-platform,nanolearningllc/edx-platform-cypress,appsembler/edx-platform,jamesblunt/edx-platform,SivilTaram/edx-platform,torchingloom/edx-platform,bitifirefly/edx-platform,Livit/Livit.Learn.EdX,MakeHer/edx-platform,sameetb-cuelogic/edx-platform-test,mahendra-r/edx-platform,SravanthiSinha/edx-platform,etzhou/edx-platform,xuxiao19910803/edx,AkA84/edx-platform,hastexo/edx-platform,alu042/edx-platform,kmoocdev2/edx-platform,jonathan-beard/edx-platform,alu042/edx-platform,jjmiranda/edx-platform,unicri/edx-platform,zadgroup/edx-platform,itsjeyd/edx-platform,polimediaupv/edx-platform,Livit/Livit.Learn.EdX,cecep-edu/edx-platform,pelikanchik/edx-platform,stvstnfrd/edx-platform,benpatterson/edx-platform,UOMx/edx-platform,analyseuc3m/ANALYSE-v1,jamesblunt/edx-platform,amir-qayyum-khan/edx-platform,msegado/edx-platform,eduNEXT/edunext-platform,bitifirefly/edx-platform,Kalyzee/edx-platform,torchingloom/edx-platform,torchingloom/edx-platform,nanolearningllc/edx-platform-cypress,Softmotions/edx-platform,jjmiranda/edx-platform,carsongee/edx-platform,synergeticsedx/deployment-wipro,naresh21/synergetics-edx-platform,auferack08/edx-platform,a-parhom/edx-platform,TeachAtTUM/edx-platform,procangroup/edx-platform,jolyonb/edx-platform,beacloudgenius/edx-platform,morenopc/edx-platform,cognitiveclass/edx-platform,etzhou/edx-platform,jonathan-beard/edx-platform,vismartltd/edx-platform,valtech-mooc/edx-platform,Edraak/edraak-platform,kmoocdev2/edx-platform,olexiim/edx-platform,peterm-itr/edx-platform,cognitiveclass/edx-platform,4eek/edx-platform,EDUlib/edx-platform,prarthitm/edxplatform,mjirayu/sit_academy,beni55/edx-platform,DefyVentures/edx-platform,shubhdev/edxOnBaadal,tanmaykm/edx-platform,wwj718/ANALYSE,tiagochiavericosta/edx-platform,sudheerchintala/LearnEraPlatForm,Lektorium-LLC/edx-platform,appliedx/edx-platform,chauhanhardik/populo,carsongee/edx-platform,vismartltd/edx-platform,romain-li/edx-platform,jazkarta/edx-platform-for-isc,bdero/edx-platform,ubc/edx-platform,10clouds/edx-platform,xingyepei/edx-platform,waheedahmed/edx-platform,nanolearningllc/edx-platform-cypress-2,mahendra-r/edx-platform,Livit/Livit.Learn.EdX,kmoocdev/edx-platform,10clouds/edx-platform,arbrandes/edx-platform,Shrhawk/edx-platform,kxliugang/edx-platform,dsajkl/123,devs1991/test_edx_docmode,auferack08/edx-platform,franosincic/edx-platform,shashank971/edx-platform,longmen21/edx-platform,hastexo/edx-platform,gsehub/edx-platform,LearnEra/LearnEraPlaftform,martynovp/edx-platform,CredoReference/edx-platform,OmarIthawi/edx-platform,shashank971/edx-platform,teltek/edx-platform,ampax/edx-platform-backup,ubc/edx-platform,olexiim/edx-platform,polimediaupv/edx-platform,sameetb-cuelogic/edx-platform-test,jonathan-beard/edx-platform,appliedx/edx-platform,antonve/s4-project-mooc,Edraak/circleci-edx-platform,halvertoluke/edx-platform,iivic/BoiseStateX,DefyVentures/edx-platform,doganov/edx-platform,ferabra/edx-platform,raccoongang/edx-platform,B-MOOC/edx-platform,J861449197/edx-platform,jelugbo/tundex,chand3040/cloud_that,rismalrv/edx-platform,halvertoluke/edx-platform,SivilTaram/edx-platform,ZLLab-Mooc/edx-platform,CredoReference/edx-platform,shubhdev/edx-platform,jswope00/griffinx,doismellburning/edx-platform,WatanabeYasumasa/edx-platform,hmcmooc/muddx-platform,caesar2164/edx-platform,eemirtekin/edx-platform,motion2015/edx-platform,UXE/local-edx,unicri/edx-platform,mjirayu/sit_academy,naresh21/synergetics-edx-platform,vasyarv/edx-platform,CredoReference/edx-platform,kursitet/edx-platform,sudheerchintala/LearnEraPlatForm,shubhdev/openedx,ovnicraft/edx-platform,rhndg/openedx,Edraak/circleci-edx-platform,ahmadiga/min_edx,unicri/edx-platform,jamiefolsom/edx-platform,zofuthan/edx-platform,edry/edx-platform,DefyVentures/edx-platform,alexthered/kienhoc-platform,andyzsf/edx,andyzsf/edx,vikas1885/test1,ZLLab-Mooc/edx-platform,Semi-global/edx-platform,zofuthan/edx-platform,waheedahmed/edx-platform,eemirtekin/edx-platform,nikolas/edx-platform,dsajkl/reqiop,nttks/edx-platform,torchingloom/edx-platform,DNFcode/edx-platform,jazkarta/edx-platform-for-isc,hkawasaki/kawasaki-aio8-1,Edraak/circleci-edx-platform,JCBarahona/edX,zadgroup/edx-platform,LICEF/edx-platform,pomegranited/edx-platform,angelapper/edx-platform,Edraak/circleci-edx-platform,cognitiveclass/edx-platform,RPI-OPENEDX/edx-platform,zubair-arbi/edx-platform,4eek/edx-platform,jamiefolsom/edx-platform,iivic/BoiseStateX,valtech-mooc/edx-platform,zhenzhai/edx-platform,jolyonb/edx-platform,DNFcode/edx-platform,OmarIthawi/edx-platform,openfun/edx-platform,edx/edx-platform,vikas1885/test1,arifsetiawan/edx-platform,jazkarta/edx-platform,kmoocdev/edx-platform,eestay/edx-platform,UOMx/edx-platform,hkawasaki/kawasaki-aio8-2,gsehub/edx-platform,AkA84/edx-platform,EDUlib/edx-platform,ZLLab-Mooc/edx-platform,benpatterson/edx-platform,y12uc231/edx-platform,motion2015/edx-platform,adoosii/edx-platform,hkawasaki/kawasaki-aio8-0,shubhdev/edxOnBaadal,devs1991/test_edx_docmode,motion2015/edx-platform,pabloborrego93/edx-platform,Unow/edx-platform,leansoft/edx-platform,ahmedaljazzar/edx-platform,auferack08/edx-platform,morenopc/edx-platform,hkawasaki/kawasaki-aio8-2,Unow/edx-platform,dcosentino/edx-platform,edx-solutions/edx-platform,doismellburning/edx-platform,Semi-global/edx-platform,olexiim/edx-platform,xuxiao19910803/edx,marcore/edx-platform,playm2mboy/edx-platform,dcosentino/edx-platform,beni55/edx-platform,mcgachey/edx-platform,arbrandes/edx-platform,kmoocdev2/edx-platform,JioEducation/edx-platform,rhndg/openedx,pepeportela/edx-platform,jruiperezv/ANALYSE,nanolearning/edx-platform,xinjiguaike/edx-platform,jzoldak/edx-platform,leansoft/edx-platform,eduNEXT/edunext-platform,arifsetiawan/edx-platform,waheedahmed/edx-platform,bigdatauniversity/edx-platform,nanolearning/edx-platform,don-github/edx-platform,OmarIthawi/edx-platform,vasyarv/edx-platform,marcore/edx-platform,wwj718/edx-platform,hamzehd/edx-platform,IONISx/edx-platform,chrisndodge/edx-platform,motion2015/a3,CredoReference/edx-platform,zubair-arbi/edx-platform,knehez/edx-platform,J861449197/edx-platform,beni55/edx-platform,knehez/edx-platform,cpennington/edx-platform,Ayub-Khan/edx-platform,chrisndodge/edx-platform,procangroup/edx-platform,nanolearningllc/edx-platform-cypress,mcgachey/edx-platform,vasyarv/edx-platform,mahendra-r/edx-platform,jazkarta/edx-platform-for-isc,ampax/edx-platform,jbassen/edx-platform,deepsrijit1105/edx-platform,gymnasium/edx-platform,mitocw/edx-platform,mahendra-r/edx-platform,vikas1885/test1,etzhou/edx-platform,sameetb-cuelogic/edx-platform-test,mbareta/edx-platform-ft,Stanford-Online/edx-platform,knehez/edx-platform,morenopc/edx-platform,ESOedX/edx-platform,waheedahmed/edx-platform,xinjiguaike/edx-platform,bigdatauniversity/edx-platform,rismalrv/edx-platform,CourseTalk/edx-platform,halvertoluke/edx-platform,louyihua/edx-platform,proversity-org/edx-platform,Stanford-Online/edx-platform,ahmadiga/min_edx,eestay/edx-platform,cpennington/edx-platform,jazztpt/edx-platform,mtlchun/edx,adoosii/edx-platform,JioEducation/edx-platform,UXE/local-edx,xuxiao19910803/edx-platform,halvertoluke/edx-platform,eduNEXT/edx-platform,kxliugang/edx-platform,ahmadio/edx-platform,dsajkl/reqiop,xingyepei/edx-platform,edx-solutions/edx-platform,simbs/edx-platform,y12uc231/edx-platform,bdero/edx-platform,wwj718/edx-platform,LearnEra/LearnEraPlaftform,Edraak/edx-platform,arifsetiawan/edx-platform,rue89-tech/edx-platform,B-MOOC/edx-platform,andyzsf/edx,xuxiao19910803/edx-platform,jbzdak/edx-platform,vismartltd/edx-platform,Endika/edx-platform,nttks/edx-platform,zhenzhai/edx-platform,cselis86/edx-platform,raccoongang/edx-platform,shubhdev/openedx,jruiperezv/ANALYSE,naresh21/synergetics-edx-platform,edry/edx-platform,zerobatu/edx-platform,longmen21/edx-platform,msegado/edx-platform,alexthered/kienhoc-platform,kxliugang/edx-platform,Semi-global/edx-platform,caesar2164/edx-platform,MSOpenTech/edx-platform,rhndg/openedx,fintech-circle/edx-platform,polimediaupv/edx-platform,4eek/edx-platform,IONISx/edx-platform,AkA84/edx-platform,kxliugang/edx-platform,jbzdak/edx-platform,antoviaque/edx-platform,DNFcode/edx-platform,doganov/edx-platform,fly19890211/edx-platform,devs1991/test_edx_docmode,arifsetiawan/edx-platform,OmarIthawi/edx-platform,nanolearningllc/edx-platform-cypress,romain-li/edx-platform,zadgroup/edx-platform,mitocw/edx-platform,kamalx/edx-platform,jamesblunt/edx-platform,MakeHer/edx-platform,deepsrijit1105/edx-platform,wwj718/edx-platform,fly19890211/edx-platform,bitifirefly/edx-platform,JCBarahona/edX,cognitiveclass/edx-platform,halvertoluke/edx-platform,solashirai/edx-platform,eestay/edx-platform,edx/edx-platform,alexthered/kienhoc-platform,knehez/edx-platform,chauhanhardik/populo_2,zhenzhai/edx-platform,Ayub-Khan/edx-platform,bigdatauniversity/edx-platform,Unow/edx-platform,bdero/edx-platform,ESOedX/edx-platform,jazkarta/edx-platform-for-isc,inares/edx-platform,chand3040/cloud_that,cecep-edu/edx-platform,pelikanchik/edx-platform,dkarakats/edx-platform,stvstnfrd/edx-platform,zubair-arbi/edx-platform,xuxiao19910803/edx-platform,hmcmooc/muddx-platform,shubhdev/edxOnBaadal,fly19890211/edx-platform,shubhdev/edxOnBaadal,Kalyzee/edx-platform,prarthitm/edxplatform,WatanabeYasumasa/edx-platform,IONISx/edx-platform,jelugbo/tundex,xingyepei/edx-platform,playm2mboy/edx-platform,don-github/edx-platform,benpatterson/edx-platform,mahendra-r/edx-platform,romain-li/edx-platform,Softmotions/edx-platform,Stanford-Online/edx-platform,angelapper/edx-platform,jbassen/edx-platform,amir-qayyum-khan/edx-platform,MSOpenTech/edx-platform,JioEducation/edx-platform,zadgroup/edx-platform,SravanthiSinha/edx-platform,jelugbo/tundex,bitifirefly/edx-platform,LearnEra/LearnEraPlaftform,cselis86/edx-platform,ESOedX/edx-platform,analyseuc3m/ANALYSE-v1,CourseTalk/edx-platform,openfun/edx-platform,tiagochiavericosta/edx-platform,10clouds/edx-platform,J861449197/edx-platform,devs1991/test_edx_docmode,TeachAtTUM/edx-platform,shabab12/edx-platform,nanolearningllc/edx-platform-cypress-2,gymnasium/edx-platform,inares/edx-platform,cyanna/edx-platform,Shrhawk/edx-platform,raccoongang/edx-platform,mtlchun/edx,BehavioralInsightsTeam/edx-platform,hamzehd/edx-platform,jolyonb/edx-platform,xingyepei/edx-platform,miptliot/edx-platform,cselis86/edx-platform,ampax/edx-platform,B-MOOC/edx-platform,ahmedaljazzar/edx-platform,UOMx/edx-platform,carsongee/edx-platform,EDUlib/edx-platform,romain-li/edx-platform,cselis86/edx-platform,ferabra/edx-platform,waheedahmed/edx-platform,chand3040/cloud_that,miptliot/edx-platform,TeachAtTUM/edx-platform,hmcmooc/muddx-platform,antonve/s4-project-mooc,msegado/edx-platform,solashirai/edx-platform,lduarte1991/edx-platform,nikolas/edx-platform,appliedx/edx-platform,nttks/jenkins-test,adoosii/edx-platform,LearnEra/LearnEraPlaftform,pomegranited/edx-platform,franosincic/edx-platform,edx/edx-platform,torchingloom/edx-platform,zubair-arbi/edx-platform,motion2015/edx-platform,Kalyzee/edx-platform,hkawasaki/kawasaki-aio8-0,jazkarta/edx-platform,mushtaqak/edx-platform,ZLLab-Mooc/edx-platform,ampax/edx-platform-backup,angelapper/edx-platform,andyzsf/edx,simbs/edx-platform,morenopc/edx-platform,playm2mboy/edx-platform,pomegranited/edx-platform,procangroup/edx-platform,4eek/edx-platform,hkawasaki/kawasaki-aio8-0,Semi-global/edx-platform,adoosii/edx-platform,SivilTaram/edx-platform,nagyistoce/edx-platform,teltek/edx-platform,teltek/edx-platform,chauhanhardik/populo_2,Softmotions/edx-platform,morenopc/edx-platform,shurihell/testasia,ampax/edx-platform-backup,pabloborrego93/edx-platform,jbzdak/edx-platform,jzoldak/edx-platform,inares/edx-platform,nttks/jenkins-test,olexiim/edx-platform,zerobatu/edx-platform,AkA84/edx-platform,sudheerchintala/LearnEraPlatForm,kmoocdev/edx-platform,appsembler/edx-platform,IONISx/edx-platform,kamalx/edx-platform,TeachAtTUM/edx-platform,peterm-itr/edx-platform,franosincic/edx-platform,nttks/edx-platform,simbs/edx-platform,antonve/s4-project-mooc,B-MOOC/edx-platform,nttks/jenkins-test,prarthitm/edxplatform,dsajkl/123,polimediaupv/edx-platform,JioEducation/edx-platform,raccoongang/edx-platform,alexthered/kienhoc-platform,devs1991/test_edx_docmode,lduarte1991/edx-platform,rismalrv/edx-platform,kamalx/edx-platform,AkA84/edx-platform,eduNEXT/edunext-platform,atsolakid/edx-platform,pku9104038/edx-platform,utecuy/edx-platform,antonve/s4-project-mooc,MSOpenTech/edx-platform,ahmedaljazzar/edx-platform,nikolas/edx-platform,iivic/BoiseStateX,SravanthiSinha/edx-platform,pomegranited/edx-platform,solashirai/edx-platform,defance/edx-platform,valtech-mooc/edx-platform,xuxiao19910803/edx,chudaol/edx-platform,zerobatu/edx-platform,zadgroup/edx-platform,Edraak/edx-platform,antoviaque/edx-platform,marcore/edx-platform,leansoft/edx-platform,eemirtekin/edx-platform,shurihell/testasia,synergeticsedx/deployment-wipro,jjmiranda/edx-platform,IndonesiaX/edx-platform,mcgachey/edx-platform,J861449197/edx-platform,itsjeyd/edx-platform,shubhdev/edx-platform,tanmaykm/edx-platform,ampax/edx-platform,stvstnfrd/edx-platform,dsajkl/reqiop,shubhdev/openedx,Semi-global/edx-platform,edx-solutions/edx-platform,nagyistoce/edx-platform,y12uc231/edx-platform,nanolearningllc/edx-platform-cypress-2,Ayub-Khan/edx-platform,jbassen/edx-platform,hmcmooc/muddx-platform,edry/edx-platform,xuxiao19910803/edx-platform,franosincic/edx-platform,gsehub/edx-platform,beacloudgenius/edx-platform,motion2015/a3,beni55/edx-platform,appsembler/edx-platform,naresh21/synergetics-edx-platform,ubc/edx-platform,proversity-org/edx-platform,Stanford-Online/edx-platform,cpennington/edx-platform,jonathan-beard/edx-platform,a-parhom/edx-platform,RPI-OPENEDX/edx-platform,JCBarahona/edX,Endika/edx-platform,Softmotions/edx-platform,Kalyzee/edx-platform,nanolearning/edx-platform,jazztpt/edx-platform,ak2703/edx-platform,hamzehd/edx-platform,dkarakats/edx-platform,utecuy/edx-platform,eduNEXT/edx-platform,proversity-org/edx-platform,MSOpenTech/edx-platform,doismellburning/edx-platform,yokose-ks/edx-platform,jazztpt/edx-platform,UXE/local-edx,DefyVentures/edx-platform,ahmadio/edx-platform,pepeportela/edx-platform,zhenzhai/edx-platform
|
Add a test that validates that the .po and .mo files match for all active languages
|
"""
Test that the compiled .mo files match the translations in the
uncompiled .po files.
This is required because we are checking in the .mo files into
the repo, but compiling them is a manual process. We want to make
sure that we find out if someone forgets the compilation step.
"""
import ddt
import polib
from unittest import TestCase
from i18n.config import CONFIGURATION, LOCALE_DIR
@ddt.ddt
class TestCompiledMessages(TestCase):
"""
Test that mo files match their source po files
"""
PO_FILES = ['django.po', 'djangojs.po']
@ddt.data(*CONFIGURATION.locales)
def test_translated_messages(self, locale):
message_dir = LOCALE_DIR / locale / 'LC_MESSAGES'
for pofile_name in self.PO_FILES:
pofile_path = message_dir / pofile_name
pofile = polib.pofile(pofile_path)
mofile = polib.mofile(pofile_path.stripext() + '.mo')
po_entries = {entry.msgid: entry for entry in pofile.translated_entries()}
mo_entries = {entry.msgid: entry for entry in mofile.translated_entries()}
# Check that there are no entries in po that aren't in mo, and vice-versa
self.assertEquals(po_entries.viewkeys(), mo_entries.viewkeys())
for entry_id, po_entry in po_entries.iteritems():
mo_entry = mo_entries[entry_id]
for attr in ('msgstr', 'msgid_plural', 'msgstr_plural', 'msgctxt', 'obsolete', 'encoding'):
po_attr = getattr(po_entry, attr)
mo_attr = getattr(mo_entry, attr)
# The msgstr_plural in the mo_file is keyed on ints, but in the po_file it's
# keyed on strings. This normalizes them.
if attr == 'msgstr_plural':
po_attr = {int(key): val for (key, val) in po_attr.items()}
self.assertEquals(
po_attr,
mo_attr,
"When comparing {} for entry {!r}, {!r} from the .po file doesn't match {!r} from the .mo file".format(
attr,
entry_id,
po_attr,
mo_attr,
)
)
|
<commit_before><commit_msg>Add a test that validates that the .po and .mo files match for all active languages<commit_after>
|
"""
Test that the compiled .mo files match the translations in the
uncompiled .po files.
This is required because we are checking in the .mo files into
the repo, but compiling them is a manual process. We want to make
sure that we find out if someone forgets the compilation step.
"""
import ddt
import polib
from unittest import TestCase
from i18n.config import CONFIGURATION, LOCALE_DIR
@ddt.ddt
class TestCompiledMessages(TestCase):
"""
Test that mo files match their source po files
"""
PO_FILES = ['django.po', 'djangojs.po']
@ddt.data(*CONFIGURATION.locales)
def test_translated_messages(self, locale):
message_dir = LOCALE_DIR / locale / 'LC_MESSAGES'
for pofile_name in self.PO_FILES:
pofile_path = message_dir / pofile_name
pofile = polib.pofile(pofile_path)
mofile = polib.mofile(pofile_path.stripext() + '.mo')
po_entries = {entry.msgid: entry for entry in pofile.translated_entries()}
mo_entries = {entry.msgid: entry for entry in mofile.translated_entries()}
# Check that there are no entries in po that aren't in mo, and vice-versa
self.assertEquals(po_entries.viewkeys(), mo_entries.viewkeys())
for entry_id, po_entry in po_entries.iteritems():
mo_entry = mo_entries[entry_id]
for attr in ('msgstr', 'msgid_plural', 'msgstr_plural', 'msgctxt', 'obsolete', 'encoding'):
po_attr = getattr(po_entry, attr)
mo_attr = getattr(mo_entry, attr)
# The msgstr_plural in the mo_file is keyed on ints, but in the po_file it's
# keyed on strings. This normalizes them.
if attr == 'msgstr_plural':
po_attr = {int(key): val for (key, val) in po_attr.items()}
self.assertEquals(
po_attr,
mo_attr,
"When comparing {} for entry {!r}, {!r} from the .po file doesn't match {!r} from the .mo file".format(
attr,
entry_id,
po_attr,
mo_attr,
)
)
|
Add a test that validates that the .po and .mo files match for all active languages"""
Test that the compiled .mo files match the translations in the
uncompiled .po files.
This is required because we are checking in the .mo files into
the repo, but compiling them is a manual process. We want to make
sure that we find out if someone forgets the compilation step.
"""
import ddt
import polib
from unittest import TestCase
from i18n.config import CONFIGURATION, LOCALE_DIR
@ddt.ddt
class TestCompiledMessages(TestCase):
"""
Test that mo files match their source po files
"""
PO_FILES = ['django.po', 'djangojs.po']
@ddt.data(*CONFIGURATION.locales)
def test_translated_messages(self, locale):
message_dir = LOCALE_DIR / locale / 'LC_MESSAGES'
for pofile_name in self.PO_FILES:
pofile_path = message_dir / pofile_name
pofile = polib.pofile(pofile_path)
mofile = polib.mofile(pofile_path.stripext() + '.mo')
po_entries = {entry.msgid: entry for entry in pofile.translated_entries()}
mo_entries = {entry.msgid: entry for entry in mofile.translated_entries()}
# Check that there are no entries in po that aren't in mo, and vice-versa
self.assertEquals(po_entries.viewkeys(), mo_entries.viewkeys())
for entry_id, po_entry in po_entries.iteritems():
mo_entry = mo_entries[entry_id]
for attr in ('msgstr', 'msgid_plural', 'msgstr_plural', 'msgctxt', 'obsolete', 'encoding'):
po_attr = getattr(po_entry, attr)
mo_attr = getattr(mo_entry, attr)
# The msgstr_plural in the mo_file is keyed on ints, but in the po_file it's
# keyed on strings. This normalizes them.
if attr == 'msgstr_plural':
po_attr = {int(key): val for (key, val) in po_attr.items()}
self.assertEquals(
po_attr,
mo_attr,
"When comparing {} for entry {!r}, {!r} from the .po file doesn't match {!r} from the .mo file".format(
attr,
entry_id,
po_attr,
mo_attr,
)
)
|
<commit_before><commit_msg>Add a test that validates that the .po and .mo files match for all active languages<commit_after>"""
Test that the compiled .mo files match the translations in the
uncompiled .po files.
This is required because we are checking in the .mo files into
the repo, but compiling them is a manual process. We want to make
sure that we find out if someone forgets the compilation step.
"""
import ddt
import polib
from unittest import TestCase
from i18n.config import CONFIGURATION, LOCALE_DIR
@ddt.ddt
class TestCompiledMessages(TestCase):
"""
Test that mo files match their source po files
"""
PO_FILES = ['django.po', 'djangojs.po']
@ddt.data(*CONFIGURATION.locales)
def test_translated_messages(self, locale):
message_dir = LOCALE_DIR / locale / 'LC_MESSAGES'
for pofile_name in self.PO_FILES:
pofile_path = message_dir / pofile_name
pofile = polib.pofile(pofile_path)
mofile = polib.mofile(pofile_path.stripext() + '.mo')
po_entries = {entry.msgid: entry for entry in pofile.translated_entries()}
mo_entries = {entry.msgid: entry for entry in mofile.translated_entries()}
# Check that there are no entries in po that aren't in mo, and vice-versa
self.assertEquals(po_entries.viewkeys(), mo_entries.viewkeys())
for entry_id, po_entry in po_entries.iteritems():
mo_entry = mo_entries[entry_id]
for attr in ('msgstr', 'msgid_plural', 'msgstr_plural', 'msgctxt', 'obsolete', 'encoding'):
po_attr = getattr(po_entry, attr)
mo_attr = getattr(mo_entry, attr)
# The msgstr_plural in the mo_file is keyed on ints, but in the po_file it's
# keyed on strings. This normalizes them.
if attr == 'msgstr_plural':
po_attr = {int(key): val for (key, val) in po_attr.items()}
self.assertEquals(
po_attr,
mo_attr,
"When comparing {} for entry {!r}, {!r} from the .po file doesn't match {!r} from the .mo file".format(
attr,
entry_id,
po_attr,
mo_attr,
)
)
|
|
528107bad66cc2eb9e56a7964e16eb66b4beddf7
|
fabfile.py
|
fabfile.py
|
from fabric.api import (
local,
settings,
task
)
from fabric.state import env
SWARM101_NETWORK = 'swarm101'
@task
def localhost():
env.run = local
@task
def swarm_init(subnet='192.168.0.0/24'):
env.run('docker swarm init')
command = 'docker network create -d overlay ' + \
'--subnet=' + subnet + ' ' + SWARM101_NETWORK
env.run(command)
@task
def swarm_leave():
with settings(warn_only=True):
env.run('docker swarm leave --force')
env.run('docker network rm ' + SWARM101_NETWORK)
@task
def build_images():
services = [
(
'bangkok',
'bangkok/Dockerfile',
'bangkok'
),
(
'munich',
'munich/Dockerfile',
'munich'
),
(
'tokyo',
'tokyo/Dockerfile',
'tokyo'
),
(
'nyc',
'nyc/Dockerfile',
'nyc'
),
(
'gateway',
'gateway/Dockerfile',
'gateway'
),
]
for name, dockerfile, path in services:
command = 'docker build -t ' + name + ':unstable -f ' + \
dockerfile + ' ' + path
env.run(command)
@task
def create_services(tag='unstable'):
services = [
'bangkok',
'munich',
'tokyo',
'nyc',
]
for service in services:
command = 'docker service create --name ' + \
service + ' --network ' + SWARM101_NETWORK + \
' ' + service + ':' + tag
env.run(command)
service = 'gateway'
command = 'docker service create --name ' + \
service + ' --network ' + SWARM101_NETWORK + \
' -p 8000:8000 ' + service + ':' + tag
env.run(command)
@task
def setup():
swarm_init()
build_images()
create_services(tag='unstable')
|
Use fabric to start/end a swarm
|
Use fabric to start/end a swarm
|
Python
|
mit
|
zkan/microservices-with-swarm-101,zkan/microservices-with-swarm-101,zkan/microservices-with-swarm-101,zkan/microservices-with-swarm-101
|
Use fabric to start/end a swarm
|
from fabric.api import (
local,
settings,
task
)
from fabric.state import env
SWARM101_NETWORK = 'swarm101'
@task
def localhost():
env.run = local
@task
def swarm_init(subnet='192.168.0.0/24'):
env.run('docker swarm init')
command = 'docker network create -d overlay ' + \
'--subnet=' + subnet + ' ' + SWARM101_NETWORK
env.run(command)
@task
def swarm_leave():
with settings(warn_only=True):
env.run('docker swarm leave --force')
env.run('docker network rm ' + SWARM101_NETWORK)
@task
def build_images():
services = [
(
'bangkok',
'bangkok/Dockerfile',
'bangkok'
),
(
'munich',
'munich/Dockerfile',
'munich'
),
(
'tokyo',
'tokyo/Dockerfile',
'tokyo'
),
(
'nyc',
'nyc/Dockerfile',
'nyc'
),
(
'gateway',
'gateway/Dockerfile',
'gateway'
),
]
for name, dockerfile, path in services:
command = 'docker build -t ' + name + ':unstable -f ' + \
dockerfile + ' ' + path
env.run(command)
@task
def create_services(tag='unstable'):
services = [
'bangkok',
'munich',
'tokyo',
'nyc',
]
for service in services:
command = 'docker service create --name ' + \
service + ' --network ' + SWARM101_NETWORK + \
' ' + service + ':' + tag
env.run(command)
service = 'gateway'
command = 'docker service create --name ' + \
service + ' --network ' + SWARM101_NETWORK + \
' -p 8000:8000 ' + service + ':' + tag
env.run(command)
@task
def setup():
swarm_init()
build_images()
create_services(tag='unstable')
|
<commit_before><commit_msg>Use fabric to start/end a swarm<commit_after>
|
from fabric.api import (
local,
settings,
task
)
from fabric.state import env
SWARM101_NETWORK = 'swarm101'
@task
def localhost():
env.run = local
@task
def swarm_init(subnet='192.168.0.0/24'):
env.run('docker swarm init')
command = 'docker network create -d overlay ' + \
'--subnet=' + subnet + ' ' + SWARM101_NETWORK
env.run(command)
@task
def swarm_leave():
with settings(warn_only=True):
env.run('docker swarm leave --force')
env.run('docker network rm ' + SWARM101_NETWORK)
@task
def build_images():
services = [
(
'bangkok',
'bangkok/Dockerfile',
'bangkok'
),
(
'munich',
'munich/Dockerfile',
'munich'
),
(
'tokyo',
'tokyo/Dockerfile',
'tokyo'
),
(
'nyc',
'nyc/Dockerfile',
'nyc'
),
(
'gateway',
'gateway/Dockerfile',
'gateway'
),
]
for name, dockerfile, path in services:
command = 'docker build -t ' + name + ':unstable -f ' + \
dockerfile + ' ' + path
env.run(command)
@task
def create_services(tag='unstable'):
services = [
'bangkok',
'munich',
'tokyo',
'nyc',
]
for service in services:
command = 'docker service create --name ' + \
service + ' --network ' + SWARM101_NETWORK + \
' ' + service + ':' + tag
env.run(command)
service = 'gateway'
command = 'docker service create --name ' + \
service + ' --network ' + SWARM101_NETWORK + \
' -p 8000:8000 ' + service + ':' + tag
env.run(command)
@task
def setup():
swarm_init()
build_images()
create_services(tag='unstable')
|
Use fabric to start/end a swarmfrom fabric.api import (
local,
settings,
task
)
from fabric.state import env
SWARM101_NETWORK = 'swarm101'
@task
def localhost():
env.run = local
@task
def swarm_init(subnet='192.168.0.0/24'):
env.run('docker swarm init')
command = 'docker network create -d overlay ' + \
'--subnet=' + subnet + ' ' + SWARM101_NETWORK
env.run(command)
@task
def swarm_leave():
with settings(warn_only=True):
env.run('docker swarm leave --force')
env.run('docker network rm ' + SWARM101_NETWORK)
@task
def build_images():
services = [
(
'bangkok',
'bangkok/Dockerfile',
'bangkok'
),
(
'munich',
'munich/Dockerfile',
'munich'
),
(
'tokyo',
'tokyo/Dockerfile',
'tokyo'
),
(
'nyc',
'nyc/Dockerfile',
'nyc'
),
(
'gateway',
'gateway/Dockerfile',
'gateway'
),
]
for name, dockerfile, path in services:
command = 'docker build -t ' + name + ':unstable -f ' + \
dockerfile + ' ' + path
env.run(command)
@task
def create_services(tag='unstable'):
services = [
'bangkok',
'munich',
'tokyo',
'nyc',
]
for service in services:
command = 'docker service create --name ' + \
service + ' --network ' + SWARM101_NETWORK + \
' ' + service + ':' + tag
env.run(command)
service = 'gateway'
command = 'docker service create --name ' + \
service + ' --network ' + SWARM101_NETWORK + \
' -p 8000:8000 ' + service + ':' + tag
env.run(command)
@task
def setup():
swarm_init()
build_images()
create_services(tag='unstable')
|
<commit_before><commit_msg>Use fabric to start/end a swarm<commit_after>from fabric.api import (
local,
settings,
task
)
from fabric.state import env
SWARM101_NETWORK = 'swarm101'
@task
def localhost():
env.run = local
@task
def swarm_init(subnet='192.168.0.0/24'):
env.run('docker swarm init')
command = 'docker network create -d overlay ' + \
'--subnet=' + subnet + ' ' + SWARM101_NETWORK
env.run(command)
@task
def swarm_leave():
with settings(warn_only=True):
env.run('docker swarm leave --force')
env.run('docker network rm ' + SWARM101_NETWORK)
@task
def build_images():
services = [
(
'bangkok',
'bangkok/Dockerfile',
'bangkok'
),
(
'munich',
'munich/Dockerfile',
'munich'
),
(
'tokyo',
'tokyo/Dockerfile',
'tokyo'
),
(
'nyc',
'nyc/Dockerfile',
'nyc'
),
(
'gateway',
'gateway/Dockerfile',
'gateway'
),
]
for name, dockerfile, path in services:
command = 'docker build -t ' + name + ':unstable -f ' + \
dockerfile + ' ' + path
env.run(command)
@task
def create_services(tag='unstable'):
services = [
'bangkok',
'munich',
'tokyo',
'nyc',
]
for service in services:
command = 'docker service create --name ' + \
service + ' --network ' + SWARM101_NETWORK + \
' ' + service + ':' + tag
env.run(command)
service = 'gateway'
command = 'docker service create --name ' + \
service + ' --network ' + SWARM101_NETWORK + \
' -p 8000:8000 ' + service + ':' + tag
env.run(command)
@task
def setup():
swarm_init()
build_images()
create_services(tag='unstable')
|
|
635d7beaff06d76bcdeddb386875f29fe132fb91
|
Applications/SegmentVesselsCNNSeeds/SegmentVesselsCNNSeeds.py
|
Applications/SegmentVesselsCNNSeeds/SegmentVesselsCNNSeeds.py
|
#!/usr/bin/env python
import os.path
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
|
#!/usr/bin/env python
import errno
import os
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
try:
os.mkdir(args.outputDir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
|
Create the output directory as necessary
|
Create the output directory as necessary
|
Python
|
apache-2.0
|
aylward/ITKTubeTK,KitwareMedical/TubeTK,KitwareMedical/ITKTubeTK,KitwareMedical/TubeTK,aylward/ITKTubeTK,KitwareMedical/ITKTubeTK,thewtex/TubeTK,thewtex/TubeTK,aylward/ITKTubeTK,KitwareMedical/TubeTK,KitwareMedical/ITKTubeTK,thewtex/TubeTK,thewtex/TubeTK,KitwareMedical/ITKTubeTK,KitwareMedical/TubeTK,aylward/ITKTubeTK
|
#!/usr/bin/env python
import os.path
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
Create the output directory as necessary
|
#!/usr/bin/env python
import errno
import os
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
try:
os.mkdir(args.outputDir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
|
<commit_before>#!/usr/bin/env python
import os.path
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
<commit_msg>Create the output directory as necessary<commit_after>
|
#!/usr/bin/env python
import errno
import os
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
try:
os.mkdir(args.outputDir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
|
#!/usr/bin/env python
import os.path
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
Create the output directory as necessary#!/usr/bin/env python
import errno
import os
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
try:
os.mkdir(args.outputDir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
|
<commit_before>#!/usr/bin/env python
import os.path
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
<commit_msg>Create the output directory as necessary<commit_after>#!/usr/bin/env python
import errno
import os
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
try:
os.mkdir(args.outputDir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
|
87f14e78e649487776585c0bb6f8253a12985ac5
|
Applications/SegmentVesselsCNNSeeds/SegmentVesselsCNNSeeds.py
|
Applications/SegmentVesselsCNNSeeds/SegmentVesselsCNNSeeds.py
|
#!/usr/bin/env python
import errno
import os
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
try:
os.mkdir(args.outputDir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
|
#!/usr/bin/env python
import errno
import os
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
try:
os.mkdir(args.outputDir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
|
Create directory, if necessary, before preprocessing files as well
|
Create directory, if necessary, before preprocessing files as well
|
Python
|
apache-2.0
|
KitwareMedical/ITKTubeTK,KitwareMedical/TubeTK,thewtex/TubeTK,thewtex/TubeTK,KitwareMedical/TubeTK,thewtex/TubeTK,aylward/ITKTubeTK,aylward/ITKTubeTK,KitwareMedical/TubeTK,KitwareMedical/ITKTubeTK,KitwareMedical/ITKTubeTK,thewtex/TubeTK,KitwareMedical/TubeTK,KitwareMedical/ITKTubeTK,aylward/ITKTubeTK,aylward/ITKTubeTK
|
#!/usr/bin/env python
import errno
import os
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
try:
os.mkdir(args.outputDir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
Create directory, if necessary, before preprocessing files as well
|
#!/usr/bin/env python
import errno
import os
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
try:
os.mkdir(args.outputDir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
|
<commit_before>#!/usr/bin/env python
import errno
import os
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
try:
os.mkdir(args.outputDir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
<commit_msg>Create directory, if necessary, before preprocessing files as well<commit_after>
|
#!/usr/bin/env python
import errno
import os
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
try:
os.mkdir(args.outputDir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
|
#!/usr/bin/env python
import errno
import os
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
try:
os.mkdir(args.outputDir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
Create directory, if necessary, before preprocessing files as well#!/usr/bin/env python
import errno
import os
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
try:
os.mkdir(args.outputDir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
|
<commit_before>#!/usr/bin/env python
import errno
import os
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
try:
os.mkdir(args.outputDir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
<commit_msg>Create directory, if necessary, before preprocessing files as well<commit_after>#!/usr/bin/env python
import errno
import os
import ctk_cli
import keras.models as M
from tubetk.vseg.cnn import deploy, utils
script_params = utils.script_params
def main(args):
utils.set_params_path(args.params)
if (args.resampled is None) ^ (script_params['RESAMPLE_SPACING'] is None or args.preprocessed is None):
raise ValueError("A resampled image should be supplied iff resampling is"
" enabled in the parameters file and a preprocessed"
" image is given.")
try:
os.mkdir(args.outputDir)
except OSError as e:
if e.errno != errno.EEXIST:
raise
if args.preprocessed is None:
args.resampled, args.preprocessed = deploy.prep(args.inputImage, args.outputDir)
elif args.resampled is None:
args.resampled = args.inputImage
model = M.load_model(args.model)
prefix = os.path.join(args.outputDir, os.path.splitext(os.path.basename(args.inputImage))[0])
deploy.generate_seed_points(model, args.preprocessed, prefix)
deploy.segmentTubes(args.resampled, args.vascularModelFile, prefix,
script_params['VESSEL_SEED_PROBABILITY'],
script_params['VESSEL_SCALE'])
if __name__ == '__main__':
main(ctk_cli.CLIArgumentParser().parse_args())
|
fe98703f789976df76a3275c8449d53f89a58ec1
|
behave_django/testcase.py
|
behave_django/testcase.py
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
class BehaviorDrivenTestCase(StaticLiveServerTestCase):
"""
Test case attached to the context during behave execution
This test case prevents the regular tests from running.
"""
def runTest(*args, **kwargs):
pass
class ExistingDatabaseTestCase(BehaviorDrivenTestCase):
"""
Test case used for the --use-existing-database setup
This test case prevents fixtures from being loaded to the database in use.
"""
def _fixture_setup(self):
pass
def _fixture_teardown(self):
pass
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
class BehaviorDrivenTestCase(StaticLiveServerTestCase):
"""
Test case attached to the context during behave execution
This test case prevents the regular tests from running.
"""
def runTest(self):
pass
class ExistingDatabaseTestCase(BehaviorDrivenTestCase):
"""
Test case used for the --use-existing-database setup
This test case prevents fixtures from being loaded to the database in use.
"""
def _fixture_setup(self):
pass
def _fixture_teardown(self):
pass
|
Fix Landscape complaint "Method has no argument"
|
Fix Landscape complaint "Method has no argument"
|
Python
|
mit
|
behave/behave-django,bittner/behave-django,bittner/behave-django,behave/behave-django
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
class BehaviorDrivenTestCase(StaticLiveServerTestCase):
"""
Test case attached to the context during behave execution
This test case prevents the regular tests from running.
"""
def runTest(*args, **kwargs):
pass
class ExistingDatabaseTestCase(BehaviorDrivenTestCase):
"""
Test case used for the --use-existing-database setup
This test case prevents fixtures from being loaded to the database in use.
"""
def _fixture_setup(self):
pass
def _fixture_teardown(self):
pass
Fix Landscape complaint "Method has no argument"
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
class BehaviorDrivenTestCase(StaticLiveServerTestCase):
"""
Test case attached to the context during behave execution
This test case prevents the regular tests from running.
"""
def runTest(self):
pass
class ExistingDatabaseTestCase(BehaviorDrivenTestCase):
"""
Test case used for the --use-existing-database setup
This test case prevents fixtures from being loaded to the database in use.
"""
def _fixture_setup(self):
pass
def _fixture_teardown(self):
pass
|
<commit_before>from django.contrib.staticfiles.testing import StaticLiveServerTestCase
class BehaviorDrivenTestCase(StaticLiveServerTestCase):
"""
Test case attached to the context during behave execution
This test case prevents the regular tests from running.
"""
def runTest(*args, **kwargs):
pass
class ExistingDatabaseTestCase(BehaviorDrivenTestCase):
"""
Test case used for the --use-existing-database setup
This test case prevents fixtures from being loaded to the database in use.
"""
def _fixture_setup(self):
pass
def _fixture_teardown(self):
pass
<commit_msg>Fix Landscape complaint "Method has no argument"<commit_after>
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
class BehaviorDrivenTestCase(StaticLiveServerTestCase):
"""
Test case attached to the context during behave execution
This test case prevents the regular tests from running.
"""
def runTest(self):
pass
class ExistingDatabaseTestCase(BehaviorDrivenTestCase):
"""
Test case used for the --use-existing-database setup
This test case prevents fixtures from being loaded to the database in use.
"""
def _fixture_setup(self):
pass
def _fixture_teardown(self):
pass
|
from django.contrib.staticfiles.testing import StaticLiveServerTestCase
class BehaviorDrivenTestCase(StaticLiveServerTestCase):
"""
Test case attached to the context during behave execution
This test case prevents the regular tests from running.
"""
def runTest(*args, **kwargs):
pass
class ExistingDatabaseTestCase(BehaviorDrivenTestCase):
"""
Test case used for the --use-existing-database setup
This test case prevents fixtures from being loaded to the database in use.
"""
def _fixture_setup(self):
pass
def _fixture_teardown(self):
pass
Fix Landscape complaint "Method has no argument"from django.contrib.staticfiles.testing import StaticLiveServerTestCase
class BehaviorDrivenTestCase(StaticLiveServerTestCase):
"""
Test case attached to the context during behave execution
This test case prevents the regular tests from running.
"""
def runTest(self):
pass
class ExistingDatabaseTestCase(BehaviorDrivenTestCase):
"""
Test case used for the --use-existing-database setup
This test case prevents fixtures from being loaded to the database in use.
"""
def _fixture_setup(self):
pass
def _fixture_teardown(self):
pass
|
<commit_before>from django.contrib.staticfiles.testing import StaticLiveServerTestCase
class BehaviorDrivenTestCase(StaticLiveServerTestCase):
"""
Test case attached to the context during behave execution
This test case prevents the regular tests from running.
"""
def runTest(*args, **kwargs):
pass
class ExistingDatabaseTestCase(BehaviorDrivenTestCase):
"""
Test case used for the --use-existing-database setup
This test case prevents fixtures from being loaded to the database in use.
"""
def _fixture_setup(self):
pass
def _fixture_teardown(self):
pass
<commit_msg>Fix Landscape complaint "Method has no argument"<commit_after>from django.contrib.staticfiles.testing import StaticLiveServerTestCase
class BehaviorDrivenTestCase(StaticLiveServerTestCase):
"""
Test case attached to the context during behave execution
This test case prevents the regular tests from running.
"""
def runTest(self):
pass
class ExistingDatabaseTestCase(BehaviorDrivenTestCase):
"""
Test case used for the --use-existing-database setup
This test case prevents fixtures from being loaded to the database in use.
"""
def _fixture_setup(self):
pass
def _fixture_teardown(self):
pass
|
47ca83aae45ac4d71c10f4a93c644d5df7e8e0e3
|
models/a3c/test_data_generator.py
|
models/a3c/test_data_generator.py
|
# Generate test data in FDAX datafile format
# Hack as needed
from math import sin, pi
price_anchor = 9000.0
price_swing = 200.0
period = 1000
with open('FDAX_19700101.csv', 'w') as f:
for i in range(10000):
price = price_anchor + price_swing * sin(2*pi*i/period)
f.write('{} {} {} {} {} {} {} {}\n'.format(i, price, 1, 0, price-0.5, 1, price+0.5, 1))
|
Add test data generator (to be hacked as needed)
|
Add test data generator (to be hacked as needed)
|
Python
|
mit
|
lukovkin/ufcnn-keras,lukovkin/ufcnn-keras
|
Add test data generator (to be hacked as needed)
|
# Generate test data in FDAX datafile format
# Hack as needed
from math import sin, pi
price_anchor = 9000.0
price_swing = 200.0
period = 1000
with open('FDAX_19700101.csv', 'w') as f:
for i in range(10000):
price = price_anchor + price_swing * sin(2*pi*i/period)
f.write('{} {} {} {} {} {} {} {}\n'.format(i, price, 1, 0, price-0.5, 1, price+0.5, 1))
|
<commit_before><commit_msg>Add test data generator (to be hacked as needed)<commit_after>
|
# Generate test data in FDAX datafile format
# Hack as needed
from math import sin, pi
price_anchor = 9000.0
price_swing = 200.0
period = 1000
with open('FDAX_19700101.csv', 'w') as f:
for i in range(10000):
price = price_anchor + price_swing * sin(2*pi*i/period)
f.write('{} {} {} {} {} {} {} {}\n'.format(i, price, 1, 0, price-0.5, 1, price+0.5, 1))
|
Add test data generator (to be hacked as needed)# Generate test data in FDAX datafile format
# Hack as needed
from math import sin, pi
price_anchor = 9000.0
price_swing = 200.0
period = 1000
with open('FDAX_19700101.csv', 'w') as f:
for i in range(10000):
price = price_anchor + price_swing * sin(2*pi*i/period)
f.write('{} {} {} {} {} {} {} {}\n'.format(i, price, 1, 0, price-0.5, 1, price+0.5, 1))
|
<commit_before><commit_msg>Add test data generator (to be hacked as needed)<commit_after># Generate test data in FDAX datafile format
# Hack as needed
from math import sin, pi
price_anchor = 9000.0
price_swing = 200.0
period = 1000
with open('FDAX_19700101.csv', 'w') as f:
for i in range(10000):
price = price_anchor + price_swing * sin(2*pi*i/period)
f.write('{} {} {} {} {} {} {} {}\n'.format(i, price, 1, 0, price-0.5, 1, price+0.5, 1))
|
|
03634f90e95c1c218e9060aa531da2c4a77fe52d
|
oidc_provider/tests/test_userinfo_endpoint.py
|
oidc_provider/tests/test_userinfo_endpoint.py
|
from datetime import timedelta
from django.core.urlresolvers import reverse
from django.test import RequestFactory
from django.test import TestCase
from django.utils import timezone
from oidc_provider.lib.utils.token import *
from oidc_provider.models import *
from oidc_provider.tests.utils import *
from oidc_provider.views import userinfo
class UserInfoTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.user = create_fake_user()
self.client = create_fake_client(response_type='code')
def _create_token(self):
"""
Generate a valid token.
"""
id_token_dic = create_id_token(self.user, self.client.client_id)
token = create_token(
user=self.user,
client=self.client,
id_token_dic=id_token_dic,
scope=['openid', 'email'])
token.save()
return token
def _post_request(self, access_token):
"""
Makes a request to the userinfo endpoint by sending the
`post_data` parameters using the 'multipart/form-data'
format.
"""
url = reverse('oidc_provider:userinfo')
request = self.factory.post(url,
data={},
content_type='multipart/form-data')
request.META['HTTP_AUTHORIZATION'] = 'Bearer ' + access_token
response = userinfo(request)
return response
def test_response_with_valid_token(self):
token = self._create_token()
# Test a valid request to the userinfo endpoint.
response = self._post_request(token.access_token)
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(response.content), True)
def test_response_with_expired_token(self):
token = self._create_token()
# Make token expired.
token.expires_at = timezone.now() - timedelta(hours=1)
token.save()
response = self._post_request(token.access_token)
self.assertEqual(response.status_code, 401)
try:
is_header_field_ok = 'invalid_token' in response['WWW-Authenticate']
except KeyError:
is_header_field_ok = False
self.assertEqual(is_header_field_ok, True)
|
Add tests for userinfo endpoint.
|
Add tests for userinfo endpoint.
|
Python
|
mit
|
wojtek-fliposports/django-oidc-provider,bunnyinc/django-oidc-provider,juanifioren/django-oidc-provider,django-py/django-openid-provider,wayward710/django-oidc-provider,Sjord/django-oidc-provider,Sjord/django-oidc-provider,ByteInternet/django-oidc-provider,torreco/django-oidc-provider,bunnyinc/django-oidc-provider,django-py/django-openid-provider,nmohoric/django-oidc-provider,wayward710/django-oidc-provider,wojtek-fliposports/django-oidc-provider,juanifioren/django-oidc-provider,nmohoric/django-oidc-provider,torreco/django-oidc-provider,ByteInternet/django-oidc-provider
|
Add tests for userinfo endpoint.
|
from datetime import timedelta
from django.core.urlresolvers import reverse
from django.test import RequestFactory
from django.test import TestCase
from django.utils import timezone
from oidc_provider.lib.utils.token import *
from oidc_provider.models import *
from oidc_provider.tests.utils import *
from oidc_provider.views import userinfo
class UserInfoTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.user = create_fake_user()
self.client = create_fake_client(response_type='code')
def _create_token(self):
"""
Generate a valid token.
"""
id_token_dic = create_id_token(self.user, self.client.client_id)
token = create_token(
user=self.user,
client=self.client,
id_token_dic=id_token_dic,
scope=['openid', 'email'])
token.save()
return token
def _post_request(self, access_token):
"""
Makes a request to the userinfo endpoint by sending the
`post_data` parameters using the 'multipart/form-data'
format.
"""
url = reverse('oidc_provider:userinfo')
request = self.factory.post(url,
data={},
content_type='multipart/form-data')
request.META['HTTP_AUTHORIZATION'] = 'Bearer ' + access_token
response = userinfo(request)
return response
def test_response_with_valid_token(self):
token = self._create_token()
# Test a valid request to the userinfo endpoint.
response = self._post_request(token.access_token)
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(response.content), True)
def test_response_with_expired_token(self):
token = self._create_token()
# Make token expired.
token.expires_at = timezone.now() - timedelta(hours=1)
token.save()
response = self._post_request(token.access_token)
self.assertEqual(response.status_code, 401)
try:
is_header_field_ok = 'invalid_token' in response['WWW-Authenticate']
except KeyError:
is_header_field_ok = False
self.assertEqual(is_header_field_ok, True)
|
<commit_before><commit_msg>Add tests for userinfo endpoint.<commit_after>
|
from datetime import timedelta
from django.core.urlresolvers import reverse
from django.test import RequestFactory
from django.test import TestCase
from django.utils import timezone
from oidc_provider.lib.utils.token import *
from oidc_provider.models import *
from oidc_provider.tests.utils import *
from oidc_provider.views import userinfo
class UserInfoTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.user = create_fake_user()
self.client = create_fake_client(response_type='code')
def _create_token(self):
"""
Generate a valid token.
"""
id_token_dic = create_id_token(self.user, self.client.client_id)
token = create_token(
user=self.user,
client=self.client,
id_token_dic=id_token_dic,
scope=['openid', 'email'])
token.save()
return token
def _post_request(self, access_token):
"""
Makes a request to the userinfo endpoint by sending the
`post_data` parameters using the 'multipart/form-data'
format.
"""
url = reverse('oidc_provider:userinfo')
request = self.factory.post(url,
data={},
content_type='multipart/form-data')
request.META['HTTP_AUTHORIZATION'] = 'Bearer ' + access_token
response = userinfo(request)
return response
def test_response_with_valid_token(self):
token = self._create_token()
# Test a valid request to the userinfo endpoint.
response = self._post_request(token.access_token)
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(response.content), True)
def test_response_with_expired_token(self):
token = self._create_token()
# Make token expired.
token.expires_at = timezone.now() - timedelta(hours=1)
token.save()
response = self._post_request(token.access_token)
self.assertEqual(response.status_code, 401)
try:
is_header_field_ok = 'invalid_token' in response['WWW-Authenticate']
except KeyError:
is_header_field_ok = False
self.assertEqual(is_header_field_ok, True)
|
Add tests for userinfo endpoint.from datetime import timedelta
from django.core.urlresolvers import reverse
from django.test import RequestFactory
from django.test import TestCase
from django.utils import timezone
from oidc_provider.lib.utils.token import *
from oidc_provider.models import *
from oidc_provider.tests.utils import *
from oidc_provider.views import userinfo
class UserInfoTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.user = create_fake_user()
self.client = create_fake_client(response_type='code')
def _create_token(self):
"""
Generate a valid token.
"""
id_token_dic = create_id_token(self.user, self.client.client_id)
token = create_token(
user=self.user,
client=self.client,
id_token_dic=id_token_dic,
scope=['openid', 'email'])
token.save()
return token
def _post_request(self, access_token):
"""
Makes a request to the userinfo endpoint by sending the
`post_data` parameters using the 'multipart/form-data'
format.
"""
url = reverse('oidc_provider:userinfo')
request = self.factory.post(url,
data={},
content_type='multipart/form-data')
request.META['HTTP_AUTHORIZATION'] = 'Bearer ' + access_token
response = userinfo(request)
return response
def test_response_with_valid_token(self):
token = self._create_token()
# Test a valid request to the userinfo endpoint.
response = self._post_request(token.access_token)
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(response.content), True)
def test_response_with_expired_token(self):
token = self._create_token()
# Make token expired.
token.expires_at = timezone.now() - timedelta(hours=1)
token.save()
response = self._post_request(token.access_token)
self.assertEqual(response.status_code, 401)
try:
is_header_field_ok = 'invalid_token' in response['WWW-Authenticate']
except KeyError:
is_header_field_ok = False
self.assertEqual(is_header_field_ok, True)
|
<commit_before><commit_msg>Add tests for userinfo endpoint.<commit_after>from datetime import timedelta
from django.core.urlresolvers import reverse
from django.test import RequestFactory
from django.test import TestCase
from django.utils import timezone
from oidc_provider.lib.utils.token import *
from oidc_provider.models import *
from oidc_provider.tests.utils import *
from oidc_provider.views import userinfo
class UserInfoTestCase(TestCase):
def setUp(self):
self.factory = RequestFactory()
self.user = create_fake_user()
self.client = create_fake_client(response_type='code')
def _create_token(self):
"""
Generate a valid token.
"""
id_token_dic = create_id_token(self.user, self.client.client_id)
token = create_token(
user=self.user,
client=self.client,
id_token_dic=id_token_dic,
scope=['openid', 'email'])
token.save()
return token
def _post_request(self, access_token):
"""
Makes a request to the userinfo endpoint by sending the
`post_data` parameters using the 'multipart/form-data'
format.
"""
url = reverse('oidc_provider:userinfo')
request = self.factory.post(url,
data={},
content_type='multipart/form-data')
request.META['HTTP_AUTHORIZATION'] = 'Bearer ' + access_token
response = userinfo(request)
return response
def test_response_with_valid_token(self):
token = self._create_token()
# Test a valid request to the userinfo endpoint.
response = self._post_request(token.access_token)
self.assertEqual(response.status_code, 200)
self.assertEqual(bool(response.content), True)
def test_response_with_expired_token(self):
token = self._create_token()
# Make token expired.
token.expires_at = timezone.now() - timedelta(hours=1)
token.save()
response = self._post_request(token.access_token)
self.assertEqual(response.status_code, 401)
try:
is_header_field_ok = 'invalid_token' in response['WWW-Authenticate']
except KeyError:
is_header_field_ok = False
self.assertEqual(is_header_field_ok, True)
|
|
7ed56fcbe3c4a9c465bf3b67260fc5e326339ba1
|
statsmodels/graphics/tests/test_factorplots.py
|
statsmodels/graphics/tests/test_factorplots.py
|
import numpy as np
from pandas import Series
import matplotlib.pyplot as plt
from statsmodels.graphics.factorplots import interaction_plot
class TestInteractionPlot(object):
@classmethod
def setupClass(cls):
np.random.seed(12345)
cls.weight = np.random.randint(1,4,size=60)
cls.duration = np.random.randint(1,3,size=60)
cls.days = np.log(np.random.randint(1,30, size=60))
def test_plot_both(self):
fig = interaction_plot(self.weight, self.duration, self.days,
colors=['red','blue'], markers=['D','^'], ms=10)
plt.close(fig)
def test_plot_rainbow(self):
fig = interaction_plot(self.weight, self.duration, self.days,
markers=['D','^'], ms=10)
plt.close(fig)
def test_plot_pandas(self):
weight = Series(self.weight, name='Weight')
duration = Series(self.duration, name='Duration')
days = Series(self.days, name='Days')
fig = interaction_plot(weight, duration, days,
markers=['D','^'], ms=10)
ax = fig.axes[0]
trace = ax.get_legend().get_title().get_text()
assert trace == 'Duration'
assert ax.get_ylabel() == 'mean of Days'
assert ax.get_xlabel() == 'Weight'
plt.close(fig)
|
Add smoke tests for interaction_plot
|
ENH: Add smoke tests for interaction_plot
|
Python
|
bsd-3-clause
|
bsipocz/statsmodels,hlin117/statsmodels,wzbozon/statsmodels,bashtage/statsmodels,DonBeo/statsmodels,statsmodels/statsmodels,ChadFulton/statsmodels,gef756/statsmodels,astocko/statsmodels,josef-pkt/statsmodels,wwf5067/statsmodels,statsmodels/statsmodels,yl565/statsmodels,phobson/statsmodels,hlin117/statsmodels,bert9bert/statsmodels,wzbozon/statsmodels,YihaoLu/statsmodels,nvoron23/statsmodels,wdurhamh/statsmodels,jstoxrocky/statsmodels,jseabold/statsmodels,nguyentu1602/statsmodels,edhuckle/statsmodels,pprett/statsmodels,bashtage/statsmodels,astocko/statsmodels,hainm/statsmodels,hainm/statsmodels,jseabold/statsmodels,bzero/statsmodels,hlin117/statsmodels,bashtage/statsmodels,yarikoptic/pystatsmodels,bert9bert/statsmodels,alekz112/statsmodels,phobson/statsmodels,cbmoore/statsmodels,saketkc/statsmodels,musically-ut/statsmodels,bsipocz/statsmodels,yl565/statsmodels,detrout/debian-statsmodels,josef-pkt/statsmodels,yarikoptic/pystatsmodels,adammenges/statsmodels,alekz112/statsmodels,statsmodels/statsmodels,bavardage/statsmodels,statsmodels/statsmodels,jseabold/statsmodels,nvoron23/statsmodels,DonBeo/statsmodels,edhuckle/statsmodels,astocko/statsmodels,hainm/statsmodels,edhuckle/statsmodels,kiyoto/statsmodels,ChadFulton/statsmodels,rgommers/statsmodels,bert9bert/statsmodels,musically-ut/statsmodels,DonBeo/statsmodels,edhuckle/statsmodels,adammenges/statsmodels,jseabold/statsmodels,bashtage/statsmodels,wdurhamh/statsmodels,musically-ut/statsmodels,huongttlan/statsmodels,kiyoto/statsmodels,rgommers/statsmodels,bashtage/statsmodels,huongttlan/statsmodels,jseabold/statsmodels,bert9bert/statsmodels,cbmoore/statsmodels,phobson/statsmodels,YihaoLu/statsmodels,Averroes/statsmodels,adammenges/statsmodels,nvoron23/statsmodels,huongttlan/statsmodels,bashtage/statsmodels,Averroes/statsmodels,wdurhamh/statsmodels,josef-pkt/statsmodels,kiyoto/statsmodels,astocko/statsmodels,phobson/statsmodels,edhuckle/statsmodels,wwf5067/statsmodels,wkfwkf/statsmodels,Averroes/statsmodels,saketkc/statsmodels,nvoron23/statsmodels,rgommers/statsmodels,statsmodels/statsmodels,YihaoLu/statsmodels,huongttlan/statsmodels,YihaoLu/statsmodels,wzbozon/statsmodels,josef-pkt/statsmodels,alekz112/statsmodels,rgommers/statsmodels,gef756/statsmodels,gef756/statsmodels,cbmoore/statsmodels,statsmodels/statsmodels,wwf5067/statsmodels,josef-pkt/statsmodels,ChadFulton/statsmodels,bavardage/statsmodels,kiyoto/statsmodels,DonBeo/statsmodels,pprett/statsmodels,waynenilsen/statsmodels,nguyentu1602/statsmodels,DonBeo/statsmodels,musically-ut/statsmodels,wkfwkf/statsmodels,phobson/statsmodels,nguyentu1602/statsmodels,yl565/statsmodels,detrout/debian-statsmodels,saketkc/statsmodels,bavardage/statsmodels,saketkc/statsmodels,cbmoore/statsmodels,wzbozon/statsmodels,wzbozon/statsmodels,cbmoore/statsmodels,waynenilsen/statsmodels,josef-pkt/statsmodels,bavardage/statsmodels,bsipocz/statsmodels,ChadFulton/statsmodels,alekz112/statsmodels,wdurhamh/statsmodels,bzero/statsmodels,bzero/statsmodels,jstoxrocky/statsmodels,adammenges/statsmodels,wkfwkf/statsmodels,pprett/statsmodels,jstoxrocky/statsmodels,detrout/debian-statsmodels,pprett/statsmodels,yl565/statsmodels,Averroes/statsmodels,jstoxrocky/statsmodels,YihaoLu/statsmodels,nvoron23/statsmodels,kiyoto/statsmodels,hlin117/statsmodels,nguyentu1602/statsmodels,saketkc/statsmodels,wwf5067/statsmodels,bsipocz/statsmodels,bzero/statsmodels,rgommers/statsmodels,bzero/statsmodels,ChadFulton/statsmodels,hainm/statsmodels,bert9bert/statsmodels,ChadFulton/statsmodels,detrout/debian-statsmodels,bavardage/statsmodels,wkfwkf/statsmodels,wkfwkf/statsmodels,gef756/statsmodels,wdurhamh/statsmodels,yarikoptic/pystatsmodels,waynenilsen/statsmodels,gef756/statsmodels,yl565/statsmodels,waynenilsen/statsmodels
|
ENH: Add smoke tests for interaction_plot
|
import numpy as np
from pandas import Series
import matplotlib.pyplot as plt
from statsmodels.graphics.factorplots import interaction_plot
class TestInteractionPlot(object):
@classmethod
def setupClass(cls):
np.random.seed(12345)
cls.weight = np.random.randint(1,4,size=60)
cls.duration = np.random.randint(1,3,size=60)
cls.days = np.log(np.random.randint(1,30, size=60))
def test_plot_both(self):
fig = interaction_plot(self.weight, self.duration, self.days,
colors=['red','blue'], markers=['D','^'], ms=10)
plt.close(fig)
def test_plot_rainbow(self):
fig = interaction_plot(self.weight, self.duration, self.days,
markers=['D','^'], ms=10)
plt.close(fig)
def test_plot_pandas(self):
weight = Series(self.weight, name='Weight')
duration = Series(self.duration, name='Duration')
days = Series(self.days, name='Days')
fig = interaction_plot(weight, duration, days,
markers=['D','^'], ms=10)
ax = fig.axes[0]
trace = ax.get_legend().get_title().get_text()
assert trace == 'Duration'
assert ax.get_ylabel() == 'mean of Days'
assert ax.get_xlabel() == 'Weight'
plt.close(fig)
|
<commit_before><commit_msg>ENH: Add smoke tests for interaction_plot<commit_after>
|
import numpy as np
from pandas import Series
import matplotlib.pyplot as plt
from statsmodels.graphics.factorplots import interaction_plot
class TestInteractionPlot(object):
@classmethod
def setupClass(cls):
np.random.seed(12345)
cls.weight = np.random.randint(1,4,size=60)
cls.duration = np.random.randint(1,3,size=60)
cls.days = np.log(np.random.randint(1,30, size=60))
def test_plot_both(self):
fig = interaction_plot(self.weight, self.duration, self.days,
colors=['red','blue'], markers=['D','^'], ms=10)
plt.close(fig)
def test_plot_rainbow(self):
fig = interaction_plot(self.weight, self.duration, self.days,
markers=['D','^'], ms=10)
plt.close(fig)
def test_plot_pandas(self):
weight = Series(self.weight, name='Weight')
duration = Series(self.duration, name='Duration')
days = Series(self.days, name='Days')
fig = interaction_plot(weight, duration, days,
markers=['D','^'], ms=10)
ax = fig.axes[0]
trace = ax.get_legend().get_title().get_text()
assert trace == 'Duration'
assert ax.get_ylabel() == 'mean of Days'
assert ax.get_xlabel() == 'Weight'
plt.close(fig)
|
ENH: Add smoke tests for interaction_plotimport numpy as np
from pandas import Series
import matplotlib.pyplot as plt
from statsmodels.graphics.factorplots import interaction_plot
class TestInteractionPlot(object):
@classmethod
def setupClass(cls):
np.random.seed(12345)
cls.weight = np.random.randint(1,4,size=60)
cls.duration = np.random.randint(1,3,size=60)
cls.days = np.log(np.random.randint(1,30, size=60))
def test_plot_both(self):
fig = interaction_plot(self.weight, self.duration, self.days,
colors=['red','blue'], markers=['D','^'], ms=10)
plt.close(fig)
def test_plot_rainbow(self):
fig = interaction_plot(self.weight, self.duration, self.days,
markers=['D','^'], ms=10)
plt.close(fig)
def test_plot_pandas(self):
weight = Series(self.weight, name='Weight')
duration = Series(self.duration, name='Duration')
days = Series(self.days, name='Days')
fig = interaction_plot(weight, duration, days,
markers=['D','^'], ms=10)
ax = fig.axes[0]
trace = ax.get_legend().get_title().get_text()
assert trace == 'Duration'
assert ax.get_ylabel() == 'mean of Days'
assert ax.get_xlabel() == 'Weight'
plt.close(fig)
|
<commit_before><commit_msg>ENH: Add smoke tests for interaction_plot<commit_after>import numpy as np
from pandas import Series
import matplotlib.pyplot as plt
from statsmodels.graphics.factorplots import interaction_plot
class TestInteractionPlot(object):
@classmethod
def setupClass(cls):
np.random.seed(12345)
cls.weight = np.random.randint(1,4,size=60)
cls.duration = np.random.randint(1,3,size=60)
cls.days = np.log(np.random.randint(1,30, size=60))
def test_plot_both(self):
fig = interaction_plot(self.weight, self.duration, self.days,
colors=['red','blue'], markers=['D','^'], ms=10)
plt.close(fig)
def test_plot_rainbow(self):
fig = interaction_plot(self.weight, self.duration, self.days,
markers=['D','^'], ms=10)
plt.close(fig)
def test_plot_pandas(self):
weight = Series(self.weight, name='Weight')
duration = Series(self.duration, name='Duration')
days = Series(self.days, name='Days')
fig = interaction_plot(weight, duration, days,
markers=['D','^'], ms=10)
ax = fig.axes[0]
trace = ax.get_legend().get_title().get_text()
assert trace == 'Duration'
assert ax.get_ylabel() == 'mean of Days'
assert ax.get_xlabel() == 'Weight'
plt.close(fig)
|
|
778713a632e10fa63e7ff653f1e1300d0f4fcaed
|
main.py
|
main.py
|
#ODB2 datalogger
import odb
connection = obd.OBD()
while true:
request = connection.query(obd.commands.RPM)
if not r.is_null():
print(r.value)
|
Test script for obd library
|
Test script for obd library
|
Python
|
mit
|
ProtaconSolutions/iot-hackday-2015-obd2
|
Test script for obd library
|
#ODB2 datalogger
import odb
connection = obd.OBD()
while true:
request = connection.query(obd.commands.RPM)
if not r.is_null():
print(r.value)
|
<commit_before><commit_msg>Test script for obd library<commit_after>
|
#ODB2 datalogger
import odb
connection = obd.OBD()
while true:
request = connection.query(obd.commands.RPM)
if not r.is_null():
print(r.value)
|
Test script for obd library#ODB2 datalogger
import odb
connection = obd.OBD()
while true:
request = connection.query(obd.commands.RPM)
if not r.is_null():
print(r.value)
|
<commit_before><commit_msg>Test script for obd library<commit_after>#ODB2 datalogger
import odb
connection = obd.OBD()
while true:
request = connection.query(obd.commands.RPM)
if not r.is_null():
print(r.value)
|
|
00d957a2dbfe6a0e15ce609a3994d4065daf0f0d
|
tests/test_playplaylist.py
|
tests/test_playplaylist.py
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import os
import os.path
import nose.tools as nose
from tests.utils import run_filter
def test_query_ignore_case():
"""should ignore case when querying playlists"""
results = run_filter('playplaylist', 'GuARDians')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_query_trim_whitespace():
"""should trim whitespace when querying playlists"""
results = run_filter('playplaylist', ' guardians ')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_query_partial():
"""should match partial queries when querying playlists"""
results = run_filter('playplaylist', 'of the gal')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_result_title():
"""playlist result should display playlist name in title"""
results = run_filter('playplaylist', 'guardians')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_result_subtitle():
"""playlist result should display artist name in subtitle"""
results = run_filter('playplaylist', 'guardians')
nose.assert_regexp_matches(
results[0]['subtitle'], r'\d+ songs, \d+(:\d+)+ in length')
def test_result_valid():
"""playlist result should be actionable"""
results = run_filter('playplaylist', 'guardians')
nose.assert_equal(results[0]['valid'], 'yes')
def test_result_artwork():
"""playlist result should display correct artwork as icon"""
results = run_filter('playplaylist', 'guardians')
nose.assert_true(
os.path.isabs(results[0]['icon']['path']),
'artwork path is not an absolute path')
nose.assert_true(
os.path.exists(results[0]['icon']['path']),
'artwork path does not exist')
def test_no_results():
"""should return 'No Results Found' in the case of no playlist results"""
results = run_filter('playplaylist', 'zxy')
nose.assert_equal(results[0]['title'], 'No Results Found')
nose.assert_equal(results[0]['subtitle'], 'No playlists matching \'zxy\'')
nose.assert_equal(results[0]['valid'], 'no')
nose.assert_equal(results[0]['icon']['path'],
'resources/icon-noartwork.png')
nose.assert_equal(len(results), 1)
|
Add tests for playplaylist filter
|
Add tests for playplaylist filter
|
Python
|
mit
|
caleb531/play-song,caleb531/play-song
|
Add tests for playplaylist filter
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import os
import os.path
import nose.tools as nose
from tests.utils import run_filter
def test_query_ignore_case():
"""should ignore case when querying playlists"""
results = run_filter('playplaylist', 'GuARDians')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_query_trim_whitespace():
"""should trim whitespace when querying playlists"""
results = run_filter('playplaylist', ' guardians ')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_query_partial():
"""should match partial queries when querying playlists"""
results = run_filter('playplaylist', 'of the gal')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_result_title():
"""playlist result should display playlist name in title"""
results = run_filter('playplaylist', 'guardians')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_result_subtitle():
"""playlist result should display artist name in subtitle"""
results = run_filter('playplaylist', 'guardians')
nose.assert_regexp_matches(
results[0]['subtitle'], r'\d+ songs, \d+(:\d+)+ in length')
def test_result_valid():
"""playlist result should be actionable"""
results = run_filter('playplaylist', 'guardians')
nose.assert_equal(results[0]['valid'], 'yes')
def test_result_artwork():
"""playlist result should display correct artwork as icon"""
results = run_filter('playplaylist', 'guardians')
nose.assert_true(
os.path.isabs(results[0]['icon']['path']),
'artwork path is not an absolute path')
nose.assert_true(
os.path.exists(results[0]['icon']['path']),
'artwork path does not exist')
def test_no_results():
"""should return 'No Results Found' in the case of no playlist results"""
results = run_filter('playplaylist', 'zxy')
nose.assert_equal(results[0]['title'], 'No Results Found')
nose.assert_equal(results[0]['subtitle'], 'No playlists matching \'zxy\'')
nose.assert_equal(results[0]['valid'], 'no')
nose.assert_equal(results[0]['icon']['path'],
'resources/icon-noartwork.png')
nose.assert_equal(len(results), 1)
|
<commit_before><commit_msg>Add tests for playplaylist filter<commit_after>
|
#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import os
import os.path
import nose.tools as nose
from tests.utils import run_filter
def test_query_ignore_case():
"""should ignore case when querying playlists"""
results = run_filter('playplaylist', 'GuARDians')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_query_trim_whitespace():
"""should trim whitespace when querying playlists"""
results = run_filter('playplaylist', ' guardians ')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_query_partial():
"""should match partial queries when querying playlists"""
results = run_filter('playplaylist', 'of the gal')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_result_title():
"""playlist result should display playlist name in title"""
results = run_filter('playplaylist', 'guardians')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_result_subtitle():
"""playlist result should display artist name in subtitle"""
results = run_filter('playplaylist', 'guardians')
nose.assert_regexp_matches(
results[0]['subtitle'], r'\d+ songs, \d+(:\d+)+ in length')
def test_result_valid():
"""playlist result should be actionable"""
results = run_filter('playplaylist', 'guardians')
nose.assert_equal(results[0]['valid'], 'yes')
def test_result_artwork():
"""playlist result should display correct artwork as icon"""
results = run_filter('playplaylist', 'guardians')
nose.assert_true(
os.path.isabs(results[0]['icon']['path']),
'artwork path is not an absolute path')
nose.assert_true(
os.path.exists(results[0]['icon']['path']),
'artwork path does not exist')
def test_no_results():
"""should return 'No Results Found' in the case of no playlist results"""
results = run_filter('playplaylist', 'zxy')
nose.assert_equal(results[0]['title'], 'No Results Found')
nose.assert_equal(results[0]['subtitle'], 'No playlists matching \'zxy\'')
nose.assert_equal(results[0]['valid'], 'no')
nose.assert_equal(results[0]['icon']['path'],
'resources/icon-noartwork.png')
nose.assert_equal(len(results), 1)
|
Add tests for playplaylist filter#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import os
import os.path
import nose.tools as nose
from tests.utils import run_filter
def test_query_ignore_case():
"""should ignore case when querying playlists"""
results = run_filter('playplaylist', 'GuARDians')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_query_trim_whitespace():
"""should trim whitespace when querying playlists"""
results = run_filter('playplaylist', ' guardians ')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_query_partial():
"""should match partial queries when querying playlists"""
results = run_filter('playplaylist', 'of the gal')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_result_title():
"""playlist result should display playlist name in title"""
results = run_filter('playplaylist', 'guardians')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_result_subtitle():
"""playlist result should display artist name in subtitle"""
results = run_filter('playplaylist', 'guardians')
nose.assert_regexp_matches(
results[0]['subtitle'], r'\d+ songs, \d+(:\d+)+ in length')
def test_result_valid():
"""playlist result should be actionable"""
results = run_filter('playplaylist', 'guardians')
nose.assert_equal(results[0]['valid'], 'yes')
def test_result_artwork():
"""playlist result should display correct artwork as icon"""
results = run_filter('playplaylist', 'guardians')
nose.assert_true(
os.path.isabs(results[0]['icon']['path']),
'artwork path is not an absolute path')
nose.assert_true(
os.path.exists(results[0]['icon']['path']),
'artwork path does not exist')
def test_no_results():
"""should return 'No Results Found' in the case of no playlist results"""
results = run_filter('playplaylist', 'zxy')
nose.assert_equal(results[0]['title'], 'No Results Found')
nose.assert_equal(results[0]['subtitle'], 'No playlists matching \'zxy\'')
nose.assert_equal(results[0]['valid'], 'no')
nose.assert_equal(results[0]['icon']['path'],
'resources/icon-noartwork.png')
nose.assert_equal(len(results), 1)
|
<commit_before><commit_msg>Add tests for playplaylist filter<commit_after>#!/usr/bin/env python
# coding=utf-8
from __future__ import print_function, unicode_literals
import os
import os.path
import nose.tools as nose
from tests.utils import run_filter
def test_query_ignore_case():
"""should ignore case when querying playlists"""
results = run_filter('playplaylist', 'GuARDians')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_query_trim_whitespace():
"""should trim whitespace when querying playlists"""
results = run_filter('playplaylist', ' guardians ')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_query_partial():
"""should match partial queries when querying playlists"""
results = run_filter('playplaylist', 'of the gal')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_result_title():
"""playlist result should display playlist name in title"""
results = run_filter('playplaylist', 'guardians')
nose.assert_equal(results[0]['title'], 'Guardians of the Galaxy')
def test_result_subtitle():
"""playlist result should display artist name in subtitle"""
results = run_filter('playplaylist', 'guardians')
nose.assert_regexp_matches(
results[0]['subtitle'], r'\d+ songs, \d+(:\d+)+ in length')
def test_result_valid():
"""playlist result should be actionable"""
results = run_filter('playplaylist', 'guardians')
nose.assert_equal(results[0]['valid'], 'yes')
def test_result_artwork():
"""playlist result should display correct artwork as icon"""
results = run_filter('playplaylist', 'guardians')
nose.assert_true(
os.path.isabs(results[0]['icon']['path']),
'artwork path is not an absolute path')
nose.assert_true(
os.path.exists(results[0]['icon']['path']),
'artwork path does not exist')
def test_no_results():
"""should return 'No Results Found' in the case of no playlist results"""
results = run_filter('playplaylist', 'zxy')
nose.assert_equal(results[0]['title'], 'No Results Found')
nose.assert_equal(results[0]['subtitle'], 'No playlists matching \'zxy\'')
nose.assert_equal(results[0]['valid'], 'no')
nose.assert_equal(results[0]['icon']['path'],
'resources/icon-noartwork.png')
nose.assert_equal(len(results), 1)
|
|
be9426b353013c19a8b158032486ad08f9a33e5f
|
tests/tests_imagemagick.py
|
tests/tests_imagemagick.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import shutil
import unittest
class TestBinariesInPath(unittest.TestCase):
"""
TODO: Docstring
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_identify_binary_in_path(self):
self.assertIsNotNone(shutil.which('identify'))
if __name__ == "__main__":
unittest.main()
|
Test if identify binary is in PATH
|
Test if identify binary is in PATH
|
Python
|
bsd-2-clause
|
sjktje/sjkscan,sjktje/sjkscan
|
Test if identify binary is in PATH
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import shutil
import unittest
class TestBinariesInPath(unittest.TestCase):
"""
TODO: Docstring
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_identify_binary_in_path(self):
self.assertIsNotNone(shutil.which('identify'))
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Test if identify binary is in PATH<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import shutil
import unittest
class TestBinariesInPath(unittest.TestCase):
"""
TODO: Docstring
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_identify_binary_in_path(self):
self.assertIsNotNone(shutil.which('identify'))
if __name__ == "__main__":
unittest.main()
|
Test if identify binary is in PATH#!/usr/bin/env python
# -*- coding: utf-8 -*-
import shutil
import unittest
class TestBinariesInPath(unittest.TestCase):
"""
TODO: Docstring
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_identify_binary_in_path(self):
self.assertIsNotNone(shutil.which('identify'))
if __name__ == "__main__":
unittest.main()
|
<commit_before><commit_msg>Test if identify binary is in PATH<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import shutil
import unittest
class TestBinariesInPath(unittest.TestCase):
"""
TODO: Docstring
"""
def setUp(self):
pass
def tearDown(self):
pass
def test_identify_binary_in_path(self):
self.assertIsNotNone(shutil.which('identify'))
if __name__ == "__main__":
unittest.main()
|
|
f64bb3ffb8840986bc5fc6d0a85c3ec9fb375e22
|
vlog_to_pbtype.py
|
vlog_to_pbtype.py
|
#!/usr/bin/python3
import yosys_exec
import lxml.etree as ET
import argparse, re
import os, tempfile
from yosys_json import YosysJson
"""
Convert a Verilog simulation model to a VPR `pb_type.xml`
The following Verilog attributes are allowed on instances:
- (* PB_PATH="../path/to/pb_type.xml" *) : import the pb_type for a given instance
from an external XML file instead of including it inline.
- (* GENERATE_PB *) : generate (and overwrite if applicable) the above file, as well as including it
The following are allowed on both instances (except where PB_PATH is set but GENERATE_PB isn't), and
the top level module.
- (* BLIF_MODEL=".latch" *) : specify the corresponding blif_model for a given instance
- (* CLASS="flipflop" *) : specify the corresponding class for a given instance
- (* PB_NAME="name" *) : override the name of the pb_type (default: cell type)
The following are allowed on nets within modules:
- (* SETUP="clk 10e-12" *) : specify setup time for a given clock
- (* HOLD="clk 10e-12" *) : specify hold time for a given clock
- (* CLK_TO_Q="clk 10e-12" *) : specify clock-to-output time for a given clock
- (* PB_MUX=1 *) : if the signal is driven by a $mux cell, generate a pb_type <mux> element for it
"""
parser = argparse.ArgumentParser(description='Convert a Verilog simulation model into a VPR pb_type.xml file')
parser.add_argument('infiles', metavar='input.v', type=str, nargs='+',
help='one or more Verilog input files')
parser.add_argument('--top', help='top level module, not needed if only one module across all files')
parser.add_argument('-o', help='output filename, default model.xml')
args = parser.parse_args()
vjson = yosys_exec.vlog_to_json(args.infiles, False, False)
yj = YosysJson(vjson)
|
Add a specification for the pb_type.xml generator
|
Add a specification for the pb_type.xml generator
Signed-off-by: David Shah <4be9b043912c80de45ffb490ebd07e45bc0fcd34@gmail.com>
|
Python
|
isc
|
SymbiFlow/python-symbiflow-v2x,SymbiFlow/python-symbiflow-v2x
|
Add a specification for the pb_type.xml generator
Signed-off-by: David Shah <4be9b043912c80de45ffb490ebd07e45bc0fcd34@gmail.com>
|
#!/usr/bin/python3
import yosys_exec
import lxml.etree as ET
import argparse, re
import os, tempfile
from yosys_json import YosysJson
"""
Convert a Verilog simulation model to a VPR `pb_type.xml`
The following Verilog attributes are allowed on instances:
- (* PB_PATH="../path/to/pb_type.xml" *) : import the pb_type for a given instance
from an external XML file instead of including it inline.
- (* GENERATE_PB *) : generate (and overwrite if applicable) the above file, as well as including it
The following are allowed on both instances (except where PB_PATH is set but GENERATE_PB isn't), and
the top level module.
- (* BLIF_MODEL=".latch" *) : specify the corresponding blif_model for a given instance
- (* CLASS="flipflop" *) : specify the corresponding class for a given instance
- (* PB_NAME="name" *) : override the name of the pb_type (default: cell type)
The following are allowed on nets within modules:
- (* SETUP="clk 10e-12" *) : specify setup time for a given clock
- (* HOLD="clk 10e-12" *) : specify hold time for a given clock
- (* CLK_TO_Q="clk 10e-12" *) : specify clock-to-output time for a given clock
- (* PB_MUX=1 *) : if the signal is driven by a $mux cell, generate a pb_type <mux> element for it
"""
parser = argparse.ArgumentParser(description='Convert a Verilog simulation model into a VPR pb_type.xml file')
parser.add_argument('infiles', metavar='input.v', type=str, nargs='+',
help='one or more Verilog input files')
parser.add_argument('--top', help='top level module, not needed if only one module across all files')
parser.add_argument('-o', help='output filename, default model.xml')
args = parser.parse_args()
vjson = yosys_exec.vlog_to_json(args.infiles, False, False)
yj = YosysJson(vjson)
|
<commit_before><commit_msg>Add a specification for the pb_type.xml generator
Signed-off-by: David Shah <4be9b043912c80de45ffb490ebd07e45bc0fcd34@gmail.com><commit_after>
|
#!/usr/bin/python3
import yosys_exec
import lxml.etree as ET
import argparse, re
import os, tempfile
from yosys_json import YosysJson
"""
Convert a Verilog simulation model to a VPR `pb_type.xml`
The following Verilog attributes are allowed on instances:
- (* PB_PATH="../path/to/pb_type.xml" *) : import the pb_type for a given instance
from an external XML file instead of including it inline.
- (* GENERATE_PB *) : generate (and overwrite if applicable) the above file, as well as including it
The following are allowed on both instances (except where PB_PATH is set but GENERATE_PB isn't), and
the top level module.
- (* BLIF_MODEL=".latch" *) : specify the corresponding blif_model for a given instance
- (* CLASS="flipflop" *) : specify the corresponding class for a given instance
- (* PB_NAME="name" *) : override the name of the pb_type (default: cell type)
The following are allowed on nets within modules:
- (* SETUP="clk 10e-12" *) : specify setup time for a given clock
- (* HOLD="clk 10e-12" *) : specify hold time for a given clock
- (* CLK_TO_Q="clk 10e-12" *) : specify clock-to-output time for a given clock
- (* PB_MUX=1 *) : if the signal is driven by a $mux cell, generate a pb_type <mux> element for it
"""
parser = argparse.ArgumentParser(description='Convert a Verilog simulation model into a VPR pb_type.xml file')
parser.add_argument('infiles', metavar='input.v', type=str, nargs='+',
help='one or more Verilog input files')
parser.add_argument('--top', help='top level module, not needed if only one module across all files')
parser.add_argument('-o', help='output filename, default model.xml')
args = parser.parse_args()
vjson = yosys_exec.vlog_to_json(args.infiles, False, False)
yj = YosysJson(vjson)
|
Add a specification for the pb_type.xml generator
Signed-off-by: David Shah <4be9b043912c80de45ffb490ebd07e45bc0fcd34@gmail.com>#!/usr/bin/python3
import yosys_exec
import lxml.etree as ET
import argparse, re
import os, tempfile
from yosys_json import YosysJson
"""
Convert a Verilog simulation model to a VPR `pb_type.xml`
The following Verilog attributes are allowed on instances:
- (* PB_PATH="../path/to/pb_type.xml" *) : import the pb_type for a given instance
from an external XML file instead of including it inline.
- (* GENERATE_PB *) : generate (and overwrite if applicable) the above file, as well as including it
The following are allowed on both instances (except where PB_PATH is set but GENERATE_PB isn't), and
the top level module.
- (* BLIF_MODEL=".latch" *) : specify the corresponding blif_model for a given instance
- (* CLASS="flipflop" *) : specify the corresponding class for a given instance
- (* PB_NAME="name" *) : override the name of the pb_type (default: cell type)
The following are allowed on nets within modules:
- (* SETUP="clk 10e-12" *) : specify setup time for a given clock
- (* HOLD="clk 10e-12" *) : specify hold time for a given clock
- (* CLK_TO_Q="clk 10e-12" *) : specify clock-to-output time for a given clock
- (* PB_MUX=1 *) : if the signal is driven by a $mux cell, generate a pb_type <mux> element for it
"""
parser = argparse.ArgumentParser(description='Convert a Verilog simulation model into a VPR pb_type.xml file')
parser.add_argument('infiles', metavar='input.v', type=str, nargs='+',
help='one or more Verilog input files')
parser.add_argument('--top', help='top level module, not needed if only one module across all files')
parser.add_argument('-o', help='output filename, default model.xml')
args = parser.parse_args()
vjson = yosys_exec.vlog_to_json(args.infiles, False, False)
yj = YosysJson(vjson)
|
<commit_before><commit_msg>Add a specification for the pb_type.xml generator
Signed-off-by: David Shah <4be9b043912c80de45ffb490ebd07e45bc0fcd34@gmail.com><commit_after>#!/usr/bin/python3
import yosys_exec
import lxml.etree as ET
import argparse, re
import os, tempfile
from yosys_json import YosysJson
"""
Convert a Verilog simulation model to a VPR `pb_type.xml`
The following Verilog attributes are allowed on instances:
- (* PB_PATH="../path/to/pb_type.xml" *) : import the pb_type for a given instance
from an external XML file instead of including it inline.
- (* GENERATE_PB *) : generate (and overwrite if applicable) the above file, as well as including it
The following are allowed on both instances (except where PB_PATH is set but GENERATE_PB isn't), and
the top level module.
- (* BLIF_MODEL=".latch" *) : specify the corresponding blif_model for a given instance
- (* CLASS="flipflop" *) : specify the corresponding class for a given instance
- (* PB_NAME="name" *) : override the name of the pb_type (default: cell type)
The following are allowed on nets within modules:
- (* SETUP="clk 10e-12" *) : specify setup time for a given clock
- (* HOLD="clk 10e-12" *) : specify hold time for a given clock
- (* CLK_TO_Q="clk 10e-12" *) : specify clock-to-output time for a given clock
- (* PB_MUX=1 *) : if the signal is driven by a $mux cell, generate a pb_type <mux> element for it
"""
parser = argparse.ArgumentParser(description='Convert a Verilog simulation model into a VPR pb_type.xml file')
parser.add_argument('infiles', metavar='input.v', type=str, nargs='+',
help='one or more Verilog input files')
parser.add_argument('--top', help='top level module, not needed if only one module across all files')
parser.add_argument('-o', help='output filename, default model.xml')
args = parser.parse_args()
vjson = yosys_exec.vlog_to_json(args.infiles, False, False)
yj = YosysJson(vjson)
|
|
d8d2f54ef8d75003dd6da9e936647af81479f6e1
|
ulc.py
|
ulc.py
|
import wx
from wx.lib.agw import ultimatelistctrl as ULC
class TestUltimateListCtrl(ULC.UltimateListCtrl):
def __init__(self, parent, log):
ULC.UltimateListCtrl.__init__(self, parent, -1,
agwStyle=wx.LC_REPORT |
wx.LC_VIRTUAL |
wx.LC_HRULES |
wx.LC_VRULES)
self.InsertColumn(0, "First")
self.InsertColumn(1, "Second")
self.InsertColumn(2, "Third")
self.SetColumnWidth(0, 175)
self.SetColumnWidth(1, 175)
self.SetColumnWidth(2, 175)
# After setting the column width you can specify that
# this column expands to fill the window. Only one
# column may be specified.
self.SetColumnWidth(2, ULC.ULC_AUTOSIZE_FILL)
self.SetItemCount(1000)
#---------------------------------------------------
# These methods are callbacks for implementing the
# "virtualness" of the list... Normally you would
# determine the text, attributes and/or image based
# on values from some external data source, but for
# this demo we'll just calculate them
def OnGetItemText(self, item, col):
return "Item%d, column %d" % (item, col)
def OnGetItemToolTip(self, item, col):
return None
def OnGetItemTextColour(self, item, col):
return None
def OnGetItemColumnImage(self, item, column):
return []
#---------------------------------------------------------------------------
class TestFrame(wx.Frame):
def __init__(self, parent, log):
wx.Frame.__init__(self, parent, -1, "Ultimate", size=(700, 600))
panel = wx.Panel(self, -1)
sizer = wx.BoxSizer(wx.VERTICAL)
listCtrl = TestUltimateListCtrl(panel, log)
sizer.Add(listCtrl, 1, wx.EXPAND)
panel.SetSizer(sizer)
sizer.Layout()
self.CenterOnScreen()
self.Show()
#---------------------------------------------------------------------------
if __name__ == '__main__':
import sys
app = wx.App(0)
frame = TestFrame(None, sys.stdout)
frame.Show(True)
app.MainLoop()
|
Add UltimateListCtrl sample for virtual
|
Add UltimateListCtrl sample for virtual
|
Python
|
mit
|
kosystem/Outliner
|
Add UltimateListCtrl sample for virtual
|
import wx
from wx.lib.agw import ultimatelistctrl as ULC
class TestUltimateListCtrl(ULC.UltimateListCtrl):
def __init__(self, parent, log):
ULC.UltimateListCtrl.__init__(self, parent, -1,
agwStyle=wx.LC_REPORT |
wx.LC_VIRTUAL |
wx.LC_HRULES |
wx.LC_VRULES)
self.InsertColumn(0, "First")
self.InsertColumn(1, "Second")
self.InsertColumn(2, "Third")
self.SetColumnWidth(0, 175)
self.SetColumnWidth(1, 175)
self.SetColumnWidth(2, 175)
# After setting the column width you can specify that
# this column expands to fill the window. Only one
# column may be specified.
self.SetColumnWidth(2, ULC.ULC_AUTOSIZE_FILL)
self.SetItemCount(1000)
#---------------------------------------------------
# These methods are callbacks for implementing the
# "virtualness" of the list... Normally you would
# determine the text, attributes and/or image based
# on values from some external data source, but for
# this demo we'll just calculate them
def OnGetItemText(self, item, col):
return "Item%d, column %d" % (item, col)
def OnGetItemToolTip(self, item, col):
return None
def OnGetItemTextColour(self, item, col):
return None
def OnGetItemColumnImage(self, item, column):
return []
#---------------------------------------------------------------------------
class TestFrame(wx.Frame):
def __init__(self, parent, log):
wx.Frame.__init__(self, parent, -1, "Ultimate", size=(700, 600))
panel = wx.Panel(self, -1)
sizer = wx.BoxSizer(wx.VERTICAL)
listCtrl = TestUltimateListCtrl(panel, log)
sizer.Add(listCtrl, 1, wx.EXPAND)
panel.SetSizer(sizer)
sizer.Layout()
self.CenterOnScreen()
self.Show()
#---------------------------------------------------------------------------
if __name__ == '__main__':
import sys
app = wx.App(0)
frame = TestFrame(None, sys.stdout)
frame.Show(True)
app.MainLoop()
|
<commit_before><commit_msg>Add UltimateListCtrl sample for virtual<commit_after>
|
import wx
from wx.lib.agw import ultimatelistctrl as ULC
class TestUltimateListCtrl(ULC.UltimateListCtrl):
def __init__(self, parent, log):
ULC.UltimateListCtrl.__init__(self, parent, -1,
agwStyle=wx.LC_REPORT |
wx.LC_VIRTUAL |
wx.LC_HRULES |
wx.LC_VRULES)
self.InsertColumn(0, "First")
self.InsertColumn(1, "Second")
self.InsertColumn(2, "Third")
self.SetColumnWidth(0, 175)
self.SetColumnWidth(1, 175)
self.SetColumnWidth(2, 175)
# After setting the column width you can specify that
# this column expands to fill the window. Only one
# column may be specified.
self.SetColumnWidth(2, ULC.ULC_AUTOSIZE_FILL)
self.SetItemCount(1000)
#---------------------------------------------------
# These methods are callbacks for implementing the
# "virtualness" of the list... Normally you would
# determine the text, attributes and/or image based
# on values from some external data source, but for
# this demo we'll just calculate them
def OnGetItemText(self, item, col):
return "Item%d, column %d" % (item, col)
def OnGetItemToolTip(self, item, col):
return None
def OnGetItemTextColour(self, item, col):
return None
def OnGetItemColumnImage(self, item, column):
return []
#---------------------------------------------------------------------------
class TestFrame(wx.Frame):
def __init__(self, parent, log):
wx.Frame.__init__(self, parent, -1, "Ultimate", size=(700, 600))
panel = wx.Panel(self, -1)
sizer = wx.BoxSizer(wx.VERTICAL)
listCtrl = TestUltimateListCtrl(panel, log)
sizer.Add(listCtrl, 1, wx.EXPAND)
panel.SetSizer(sizer)
sizer.Layout()
self.CenterOnScreen()
self.Show()
#---------------------------------------------------------------------------
if __name__ == '__main__':
import sys
app = wx.App(0)
frame = TestFrame(None, sys.stdout)
frame.Show(True)
app.MainLoop()
|
Add UltimateListCtrl sample for virtualimport wx
from wx.lib.agw import ultimatelistctrl as ULC
class TestUltimateListCtrl(ULC.UltimateListCtrl):
def __init__(self, parent, log):
ULC.UltimateListCtrl.__init__(self, parent, -1,
agwStyle=wx.LC_REPORT |
wx.LC_VIRTUAL |
wx.LC_HRULES |
wx.LC_VRULES)
self.InsertColumn(0, "First")
self.InsertColumn(1, "Second")
self.InsertColumn(2, "Third")
self.SetColumnWidth(0, 175)
self.SetColumnWidth(1, 175)
self.SetColumnWidth(2, 175)
# After setting the column width you can specify that
# this column expands to fill the window. Only one
# column may be specified.
self.SetColumnWidth(2, ULC.ULC_AUTOSIZE_FILL)
self.SetItemCount(1000)
#---------------------------------------------------
# These methods are callbacks for implementing the
# "virtualness" of the list... Normally you would
# determine the text, attributes and/or image based
# on values from some external data source, but for
# this demo we'll just calculate them
def OnGetItemText(self, item, col):
return "Item%d, column %d" % (item, col)
def OnGetItemToolTip(self, item, col):
return None
def OnGetItemTextColour(self, item, col):
return None
def OnGetItemColumnImage(self, item, column):
return []
#---------------------------------------------------------------------------
class TestFrame(wx.Frame):
def __init__(self, parent, log):
wx.Frame.__init__(self, parent, -1, "Ultimate", size=(700, 600))
panel = wx.Panel(self, -1)
sizer = wx.BoxSizer(wx.VERTICAL)
listCtrl = TestUltimateListCtrl(panel, log)
sizer.Add(listCtrl, 1, wx.EXPAND)
panel.SetSizer(sizer)
sizer.Layout()
self.CenterOnScreen()
self.Show()
#---------------------------------------------------------------------------
if __name__ == '__main__':
import sys
app = wx.App(0)
frame = TestFrame(None, sys.stdout)
frame.Show(True)
app.MainLoop()
|
<commit_before><commit_msg>Add UltimateListCtrl sample for virtual<commit_after>import wx
from wx.lib.agw import ultimatelistctrl as ULC
class TestUltimateListCtrl(ULC.UltimateListCtrl):
def __init__(self, parent, log):
ULC.UltimateListCtrl.__init__(self, parent, -1,
agwStyle=wx.LC_REPORT |
wx.LC_VIRTUAL |
wx.LC_HRULES |
wx.LC_VRULES)
self.InsertColumn(0, "First")
self.InsertColumn(1, "Second")
self.InsertColumn(2, "Third")
self.SetColumnWidth(0, 175)
self.SetColumnWidth(1, 175)
self.SetColumnWidth(2, 175)
# After setting the column width you can specify that
# this column expands to fill the window. Only one
# column may be specified.
self.SetColumnWidth(2, ULC.ULC_AUTOSIZE_FILL)
self.SetItemCount(1000)
#---------------------------------------------------
# These methods are callbacks for implementing the
# "virtualness" of the list... Normally you would
# determine the text, attributes and/or image based
# on values from some external data source, but for
# this demo we'll just calculate them
def OnGetItemText(self, item, col):
return "Item%d, column %d" % (item, col)
def OnGetItemToolTip(self, item, col):
return None
def OnGetItemTextColour(self, item, col):
return None
def OnGetItemColumnImage(self, item, column):
return []
#---------------------------------------------------------------------------
class TestFrame(wx.Frame):
def __init__(self, parent, log):
wx.Frame.__init__(self, parent, -1, "Ultimate", size=(700, 600))
panel = wx.Panel(self, -1)
sizer = wx.BoxSizer(wx.VERTICAL)
listCtrl = TestUltimateListCtrl(panel, log)
sizer.Add(listCtrl, 1, wx.EXPAND)
panel.SetSizer(sizer)
sizer.Layout()
self.CenterOnScreen()
self.Show()
#---------------------------------------------------------------------------
if __name__ == '__main__':
import sys
app = wx.App(0)
frame = TestFrame(None, sys.stdout)
frame.Show(True)
app.MainLoop()
|
|
eca6c15957e1ae3718ca2f258c09d53db0308a54
|
bathy/extract_UTM.py
|
bathy/extract_UTM.py
|
#!/usr/bin/env python
import numpy
import matplotlib.pyplot as plt
import mpl_toolkits.basemap.pyproj as pyproj
import extract_bathy
import bathy
def convert_UTM2longlat(file_path, zone=14, out_path=None):
data = numpy.loadtxt(file_path)
projector = pyproj.Proj(proj='utm',zone='14')
longitude, latitude = projector(data[:,0], data[:,1], inverse=True)
z = -data[:,2]
if isinstance(out_path, basestring):
write_bathy(out_path, longitude, latitude, z)
return longitude, latitude, z
def write_bathy(file_path, longitude, latitude, z, topotype=1):
bathy_file = open(file_path, 'w')
for (i, depth) in enumerate(z):
bathy_file.write("%s %s %s\n" % (longitude[i], latitude[i], depth))
bathy_file.close()
if __name__ == "__main__":
# base_bathy_file = "mexican_coast_pacific.tt3"
base_bathy_file = "srtm_17_09.tt3"
region_bathy = "acapulco30m.xyz"
conv_region_bathy = "acapulco_converted_30m.xyz"
new_bathy = "acapulco_projected_30m.tt3"
# Convert and write out UTM data
acapulco = convert_UTM2longlat(region_bathy, out_path=conv_region_bathy)
# Find extent of acapulco data
rect = [numpy.min(acapulco[0]), numpy.max(acapulco[0]),
numpy.min(acapulco[1]), numpy.max(acapulco[1])]
# Extract and project data
Z, delta = extract_bathy.extract(conv_region_bathy, base_bathy_file,
extent=rect, no_data_value=-9999)
extract_bathy.write_bathy(new_bathy, Z, (rect[0], rect[2]), delta,
no_data_value=-9999)
# write_bathy(new_bathy, )
# Z, delta = extract_bathy.extract(new_bathy, "mexican_coast_pacific.tt3",
# extent=rect)
# Plot new data
bathy.plot(new_bathy, coastlines=True)
plt.show()
|
Add UTM bathy extraction function
|
Add UTM bathy extraction function
|
Python
|
mit
|
mandli/compsyn-geoclaw
|
Add UTM bathy extraction function
|
#!/usr/bin/env python
import numpy
import matplotlib.pyplot as plt
import mpl_toolkits.basemap.pyproj as pyproj
import extract_bathy
import bathy
def convert_UTM2longlat(file_path, zone=14, out_path=None):
data = numpy.loadtxt(file_path)
projector = pyproj.Proj(proj='utm',zone='14')
longitude, latitude = projector(data[:,0], data[:,1], inverse=True)
z = -data[:,2]
if isinstance(out_path, basestring):
write_bathy(out_path, longitude, latitude, z)
return longitude, latitude, z
def write_bathy(file_path, longitude, latitude, z, topotype=1):
bathy_file = open(file_path, 'w')
for (i, depth) in enumerate(z):
bathy_file.write("%s %s %s\n" % (longitude[i], latitude[i], depth))
bathy_file.close()
if __name__ == "__main__":
# base_bathy_file = "mexican_coast_pacific.tt3"
base_bathy_file = "srtm_17_09.tt3"
region_bathy = "acapulco30m.xyz"
conv_region_bathy = "acapulco_converted_30m.xyz"
new_bathy = "acapulco_projected_30m.tt3"
# Convert and write out UTM data
acapulco = convert_UTM2longlat(region_bathy, out_path=conv_region_bathy)
# Find extent of acapulco data
rect = [numpy.min(acapulco[0]), numpy.max(acapulco[0]),
numpy.min(acapulco[1]), numpy.max(acapulco[1])]
# Extract and project data
Z, delta = extract_bathy.extract(conv_region_bathy, base_bathy_file,
extent=rect, no_data_value=-9999)
extract_bathy.write_bathy(new_bathy, Z, (rect[0], rect[2]), delta,
no_data_value=-9999)
# write_bathy(new_bathy, )
# Z, delta = extract_bathy.extract(new_bathy, "mexican_coast_pacific.tt3",
# extent=rect)
# Plot new data
bathy.plot(new_bathy, coastlines=True)
plt.show()
|
<commit_before><commit_msg>Add UTM bathy extraction function<commit_after>
|
#!/usr/bin/env python
import numpy
import matplotlib.pyplot as plt
import mpl_toolkits.basemap.pyproj as pyproj
import extract_bathy
import bathy
def convert_UTM2longlat(file_path, zone=14, out_path=None):
data = numpy.loadtxt(file_path)
projector = pyproj.Proj(proj='utm',zone='14')
longitude, latitude = projector(data[:,0], data[:,1], inverse=True)
z = -data[:,2]
if isinstance(out_path, basestring):
write_bathy(out_path, longitude, latitude, z)
return longitude, latitude, z
def write_bathy(file_path, longitude, latitude, z, topotype=1):
bathy_file = open(file_path, 'w')
for (i, depth) in enumerate(z):
bathy_file.write("%s %s %s\n" % (longitude[i], latitude[i], depth))
bathy_file.close()
if __name__ == "__main__":
# base_bathy_file = "mexican_coast_pacific.tt3"
base_bathy_file = "srtm_17_09.tt3"
region_bathy = "acapulco30m.xyz"
conv_region_bathy = "acapulco_converted_30m.xyz"
new_bathy = "acapulco_projected_30m.tt3"
# Convert and write out UTM data
acapulco = convert_UTM2longlat(region_bathy, out_path=conv_region_bathy)
# Find extent of acapulco data
rect = [numpy.min(acapulco[0]), numpy.max(acapulco[0]),
numpy.min(acapulco[1]), numpy.max(acapulco[1])]
# Extract and project data
Z, delta = extract_bathy.extract(conv_region_bathy, base_bathy_file,
extent=rect, no_data_value=-9999)
extract_bathy.write_bathy(new_bathy, Z, (rect[0], rect[2]), delta,
no_data_value=-9999)
# write_bathy(new_bathy, )
# Z, delta = extract_bathy.extract(new_bathy, "mexican_coast_pacific.tt3",
# extent=rect)
# Plot new data
bathy.plot(new_bathy, coastlines=True)
plt.show()
|
Add UTM bathy extraction function#!/usr/bin/env python
import numpy
import matplotlib.pyplot as plt
import mpl_toolkits.basemap.pyproj as pyproj
import extract_bathy
import bathy
def convert_UTM2longlat(file_path, zone=14, out_path=None):
data = numpy.loadtxt(file_path)
projector = pyproj.Proj(proj='utm',zone='14')
longitude, latitude = projector(data[:,0], data[:,1], inverse=True)
z = -data[:,2]
if isinstance(out_path, basestring):
write_bathy(out_path, longitude, latitude, z)
return longitude, latitude, z
def write_bathy(file_path, longitude, latitude, z, topotype=1):
bathy_file = open(file_path, 'w')
for (i, depth) in enumerate(z):
bathy_file.write("%s %s %s\n" % (longitude[i], latitude[i], depth))
bathy_file.close()
if __name__ == "__main__":
# base_bathy_file = "mexican_coast_pacific.tt3"
base_bathy_file = "srtm_17_09.tt3"
region_bathy = "acapulco30m.xyz"
conv_region_bathy = "acapulco_converted_30m.xyz"
new_bathy = "acapulco_projected_30m.tt3"
# Convert and write out UTM data
acapulco = convert_UTM2longlat(region_bathy, out_path=conv_region_bathy)
# Find extent of acapulco data
rect = [numpy.min(acapulco[0]), numpy.max(acapulco[0]),
numpy.min(acapulco[1]), numpy.max(acapulco[1])]
# Extract and project data
Z, delta = extract_bathy.extract(conv_region_bathy, base_bathy_file,
extent=rect, no_data_value=-9999)
extract_bathy.write_bathy(new_bathy, Z, (rect[0], rect[2]), delta,
no_data_value=-9999)
# write_bathy(new_bathy, )
# Z, delta = extract_bathy.extract(new_bathy, "mexican_coast_pacific.tt3",
# extent=rect)
# Plot new data
bathy.plot(new_bathy, coastlines=True)
plt.show()
|
<commit_before><commit_msg>Add UTM bathy extraction function<commit_after>#!/usr/bin/env python
import numpy
import matplotlib.pyplot as plt
import mpl_toolkits.basemap.pyproj as pyproj
import extract_bathy
import bathy
def convert_UTM2longlat(file_path, zone=14, out_path=None):
data = numpy.loadtxt(file_path)
projector = pyproj.Proj(proj='utm',zone='14')
longitude, latitude = projector(data[:,0], data[:,1], inverse=True)
z = -data[:,2]
if isinstance(out_path, basestring):
write_bathy(out_path, longitude, latitude, z)
return longitude, latitude, z
def write_bathy(file_path, longitude, latitude, z, topotype=1):
bathy_file = open(file_path, 'w')
for (i, depth) in enumerate(z):
bathy_file.write("%s %s %s\n" % (longitude[i], latitude[i], depth))
bathy_file.close()
if __name__ == "__main__":
# base_bathy_file = "mexican_coast_pacific.tt3"
base_bathy_file = "srtm_17_09.tt3"
region_bathy = "acapulco30m.xyz"
conv_region_bathy = "acapulco_converted_30m.xyz"
new_bathy = "acapulco_projected_30m.tt3"
# Convert and write out UTM data
acapulco = convert_UTM2longlat(region_bathy, out_path=conv_region_bathy)
# Find extent of acapulco data
rect = [numpy.min(acapulco[0]), numpy.max(acapulco[0]),
numpy.min(acapulco[1]), numpy.max(acapulco[1])]
# Extract and project data
Z, delta = extract_bathy.extract(conv_region_bathy, base_bathy_file,
extent=rect, no_data_value=-9999)
extract_bathy.write_bathy(new_bathy, Z, (rect[0], rect[2]), delta,
no_data_value=-9999)
# write_bathy(new_bathy, )
# Z, delta = extract_bathy.extract(new_bathy, "mexican_coast_pacific.tt3",
# extent=rect)
# Plot new data
bathy.plot(new_bathy, coastlines=True)
plt.show()
|
|
1db62b97364d530efe6bc641029ed716bfd7a45f
|
saleor/userprofile/test_userprofile.py
|
saleor/userprofile/test_userprofile.py
|
import i18naddress
import pytest
from . import forms
@pytest.mark.parametrize('country', ['CN', 'PL', 'US'])
def test_address_form_for_country(country):
data = {
'first_name': 'John',
'last_name': 'Doe',
'country': country}
form = forms.AddressForm(data)
errors = form.errors
required = i18naddress.validate_areas(country)[0]
if 'street_address' in required:
assert 'street_address_1' in errors
if 'city' in required:
assert 'city' in errors
if 'city_area' in required:
assert 'city_area' in errors
if 'country_area' in required:
assert 'country_area' in errors
if 'postal_code' in required:
assert 'postal_code' in errors
|
Add basic tests for address validation
|
Add basic tests for address validation
|
Python
|
bsd-3-clause
|
maferelo/saleor,maferelo/saleor,itbabu/saleor,mociepka/saleor,tfroehlich82/saleor,UITools/saleor,UITools/saleor,rodrigozn/CW-Shop,car3oon/saleor,KenMutemi/saleor,HyperManTT/ECommerceSaleor,UITools/saleor,rodrigozn/CW-Shop,KenMutemi/saleor,HyperManTT/ECommerceSaleor,itbabu/saleor,laosunhust/saleor,car3oon/saleor,tfroehlich82/saleor,UITools/saleor,rodrigozn/CW-Shop,laosunhust/saleor,tfroehlich82/saleor,laosunhust/saleor,rchav/vinerack,jreigel/saleor,rchav/vinerack,UITools/saleor,mociepka/saleor,jreigel/saleor,rchav/vinerack,KenMutemi/saleor,itbabu/saleor,jreigel/saleor,car3oon/saleor,mociepka/saleor,laosunhust/saleor,maferelo/saleor,HyperManTT/ECommerceSaleor
|
Add basic tests for address validation
|
import i18naddress
import pytest
from . import forms
@pytest.mark.parametrize('country', ['CN', 'PL', 'US'])
def test_address_form_for_country(country):
data = {
'first_name': 'John',
'last_name': 'Doe',
'country': country}
form = forms.AddressForm(data)
errors = form.errors
required = i18naddress.validate_areas(country)[0]
if 'street_address' in required:
assert 'street_address_1' in errors
if 'city' in required:
assert 'city' in errors
if 'city_area' in required:
assert 'city_area' in errors
if 'country_area' in required:
assert 'country_area' in errors
if 'postal_code' in required:
assert 'postal_code' in errors
|
<commit_before><commit_msg>Add basic tests for address validation<commit_after>
|
import i18naddress
import pytest
from . import forms
@pytest.mark.parametrize('country', ['CN', 'PL', 'US'])
def test_address_form_for_country(country):
data = {
'first_name': 'John',
'last_name': 'Doe',
'country': country}
form = forms.AddressForm(data)
errors = form.errors
required = i18naddress.validate_areas(country)[0]
if 'street_address' in required:
assert 'street_address_1' in errors
if 'city' in required:
assert 'city' in errors
if 'city_area' in required:
assert 'city_area' in errors
if 'country_area' in required:
assert 'country_area' in errors
if 'postal_code' in required:
assert 'postal_code' in errors
|
Add basic tests for address validationimport i18naddress
import pytest
from . import forms
@pytest.mark.parametrize('country', ['CN', 'PL', 'US'])
def test_address_form_for_country(country):
data = {
'first_name': 'John',
'last_name': 'Doe',
'country': country}
form = forms.AddressForm(data)
errors = form.errors
required = i18naddress.validate_areas(country)[0]
if 'street_address' in required:
assert 'street_address_1' in errors
if 'city' in required:
assert 'city' in errors
if 'city_area' in required:
assert 'city_area' in errors
if 'country_area' in required:
assert 'country_area' in errors
if 'postal_code' in required:
assert 'postal_code' in errors
|
<commit_before><commit_msg>Add basic tests for address validation<commit_after>import i18naddress
import pytest
from . import forms
@pytest.mark.parametrize('country', ['CN', 'PL', 'US'])
def test_address_form_for_country(country):
data = {
'first_name': 'John',
'last_name': 'Doe',
'country': country}
form = forms.AddressForm(data)
errors = form.errors
required = i18naddress.validate_areas(country)[0]
if 'street_address' in required:
assert 'street_address_1' in errors
if 'city' in required:
assert 'city' in errors
if 'city_area' in required:
assert 'city_area' in errors
if 'country_area' in required:
assert 'country_area' in errors
if 'postal_code' in required:
assert 'postal_code' in errors
|
|
3965effea3a251d69141e4ef4df6de8a2d5a5089
|
shuup_tests/browser/admin/test_menu.py
|
shuup_tests/browser/admin/test_menu.py
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import os
import pytest
from shuup.testing.browser_utils import wait_until_condition
from shuup.testing.factories import get_default_shop
from shuup.testing.utils import initialize_admin_browser_test
pytestmark = pytest.mark.skipif(os.environ.get("SHUUP_BROWSER_TESTS", "0") != "1", reason="No browser tests run.")
@pytest.mark.browser
@pytest.mark.djangodb
def test_menu(browser, admin_user, live_server, settings):
shop = get_default_shop()
initialize_admin_browser_test(browser, live_server, settings)
browser.find_by_css(".menu-list li").first.click()
wait_until_condition(browser, lambda x: x.is_text_present("Add a product to see it in your store"))
# Make sure that the menu is clickable in small devices
browser.driver.set_window_size(480, 960)
browser.find_by_css("#menu-button").first.click()
browser.find_by_css(".menu-list li").first.click()
wait_until_condition(browser, lambda x: x.is_text_present("Add a product to see it in your store"))
|
Add browser test for admin menu
|
Add browser test for admin menu
Refs SH-435
|
Python
|
agpl-3.0
|
suutari-ai/shoop,suutari/shoop,suutari/shoop,shoopio/shoop,shoopio/shoop,shoopio/shoop,suutari-ai/shoop,suutari-ai/shoop,suutari/shoop
|
Add browser test for admin menu
Refs SH-435
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import os
import pytest
from shuup.testing.browser_utils import wait_until_condition
from shuup.testing.factories import get_default_shop
from shuup.testing.utils import initialize_admin_browser_test
pytestmark = pytest.mark.skipif(os.environ.get("SHUUP_BROWSER_TESTS", "0") != "1", reason="No browser tests run.")
@pytest.mark.browser
@pytest.mark.djangodb
def test_menu(browser, admin_user, live_server, settings):
shop = get_default_shop()
initialize_admin_browser_test(browser, live_server, settings)
browser.find_by_css(".menu-list li").first.click()
wait_until_condition(browser, lambda x: x.is_text_present("Add a product to see it in your store"))
# Make sure that the menu is clickable in small devices
browser.driver.set_window_size(480, 960)
browser.find_by_css("#menu-button").first.click()
browser.find_by_css(".menu-list li").first.click()
wait_until_condition(browser, lambda x: x.is_text_present("Add a product to see it in your store"))
|
<commit_before><commit_msg>Add browser test for admin menu
Refs SH-435<commit_after>
|
# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import os
import pytest
from shuup.testing.browser_utils import wait_until_condition
from shuup.testing.factories import get_default_shop
from shuup.testing.utils import initialize_admin_browser_test
pytestmark = pytest.mark.skipif(os.environ.get("SHUUP_BROWSER_TESTS", "0") != "1", reason="No browser tests run.")
@pytest.mark.browser
@pytest.mark.djangodb
def test_menu(browser, admin_user, live_server, settings):
shop = get_default_shop()
initialize_admin_browser_test(browser, live_server, settings)
browser.find_by_css(".menu-list li").first.click()
wait_until_condition(browser, lambda x: x.is_text_present("Add a product to see it in your store"))
# Make sure that the menu is clickable in small devices
browser.driver.set_window_size(480, 960)
browser.find_by_css("#menu-button").first.click()
browser.find_by_css(".menu-list li").first.click()
wait_until_condition(browser, lambda x: x.is_text_present("Add a product to see it in your store"))
|
Add browser test for admin menu
Refs SH-435# -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import os
import pytest
from shuup.testing.browser_utils import wait_until_condition
from shuup.testing.factories import get_default_shop
from shuup.testing.utils import initialize_admin_browser_test
pytestmark = pytest.mark.skipif(os.environ.get("SHUUP_BROWSER_TESTS", "0") != "1", reason="No browser tests run.")
@pytest.mark.browser
@pytest.mark.djangodb
def test_menu(browser, admin_user, live_server, settings):
shop = get_default_shop()
initialize_admin_browser_test(browser, live_server, settings)
browser.find_by_css(".menu-list li").first.click()
wait_until_condition(browser, lambda x: x.is_text_present("Add a product to see it in your store"))
# Make sure that the menu is clickable in small devices
browser.driver.set_window_size(480, 960)
browser.find_by_css("#menu-button").first.click()
browser.find_by_css(".menu-list li").first.click()
wait_until_condition(browser, lambda x: x.is_text_present("Add a product to see it in your store"))
|
<commit_before><commit_msg>Add browser test for admin menu
Refs SH-435<commit_after># -*- coding: utf-8 -*-
# This file is part of Shuup.
#
# Copyright (c) 2012-2016, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the AGPLv3 license found in the
# LICENSE file in the root directory of this source tree.
import os
import pytest
from shuup.testing.browser_utils import wait_until_condition
from shuup.testing.factories import get_default_shop
from shuup.testing.utils import initialize_admin_browser_test
pytestmark = pytest.mark.skipif(os.environ.get("SHUUP_BROWSER_TESTS", "0") != "1", reason="No browser tests run.")
@pytest.mark.browser
@pytest.mark.djangodb
def test_menu(browser, admin_user, live_server, settings):
shop = get_default_shop()
initialize_admin_browser_test(browser, live_server, settings)
browser.find_by_css(".menu-list li").first.click()
wait_until_condition(browser, lambda x: x.is_text_present("Add a product to see it in your store"))
# Make sure that the menu is clickable in small devices
browser.driver.set_window_size(480, 960)
browser.find_by_css("#menu-button").first.click()
browser.find_by_css(".menu-list li").first.click()
wait_until_condition(browser, lambda x: x.is_text_present("Add a product to see it in your store"))
|
|
f92ab83f4f8d9df627492af6b6f52bb08822485a
|
server/ntb/macros/en_to_no_macro.py
|
server/ntb/macros/en_to_no_macro.py
|
"""
En to NO Metadata Macro will perform the following changes to current content item:
- change the byline to "(NPK-NTB)"
- change the signoff to "npk@npk.no"
- change the body footer to "(©NPK)" - NB: copyrightsign, not @
- change the service to "NPKSisteNytt"
"""
def en_to_no_metadata_macro(item, **kwargs):
item['byline'] = '(NPK-NTB)'
item['sign_off'] = 'npk@npk.no'
item['body_footer'] = '(©NPK)'
item['language'] = 'nn-NO'
item['anpa_category'] = [
{
'qcode': 's',
'single_value': True,
'name': 'NPKSisteNytt',
'language': 'nn-NO',
'scheme': None
}
]
return item
name = 'EN to NO Metadata Macro'
label = 'EN to NO Metadata Macro'
callback = en_to_no_metadata_macro
access_type = 'backend'
action_type = 'direct'
from_languages = ['en']
to_languages = ['nn-NO', 'nb-NO']
|
Add translate example macro for NTB
|
feat(macro): Add translate example macro for NTB
|
Python
|
agpl-3.0
|
petrjasek/superdesk-ntb,petrjasek/superdesk-ntb,superdesk/superdesk-ntb,ioanpocol/superdesk-ntb,ioanpocol/superdesk-ntb,petrjasek/superdesk-ntb,superdesk/superdesk-ntb,superdesk/superdesk-ntb,ioanpocol/superdesk-ntb,superdesk/superdesk-ntb,petrjasek/superdesk-ntb
|
feat(macro): Add translate example macro for NTB
|
"""
En to NO Metadata Macro will perform the following changes to current content item:
- change the byline to "(NPK-NTB)"
- change the signoff to "npk@npk.no"
- change the body footer to "(©NPK)" - NB: copyrightsign, not @
- change the service to "NPKSisteNytt"
"""
def en_to_no_metadata_macro(item, **kwargs):
item['byline'] = '(NPK-NTB)'
item['sign_off'] = 'npk@npk.no'
item['body_footer'] = '(©NPK)'
item['language'] = 'nn-NO'
item['anpa_category'] = [
{
'qcode': 's',
'single_value': True,
'name': 'NPKSisteNytt',
'language': 'nn-NO',
'scheme': None
}
]
return item
name = 'EN to NO Metadata Macro'
label = 'EN to NO Metadata Macro'
callback = en_to_no_metadata_macro
access_type = 'backend'
action_type = 'direct'
from_languages = ['en']
to_languages = ['nn-NO', 'nb-NO']
|
<commit_before><commit_msg>feat(macro): Add translate example macro for NTB<commit_after>
|
"""
En to NO Metadata Macro will perform the following changes to current content item:
- change the byline to "(NPK-NTB)"
- change the signoff to "npk@npk.no"
- change the body footer to "(©NPK)" - NB: copyrightsign, not @
- change the service to "NPKSisteNytt"
"""
def en_to_no_metadata_macro(item, **kwargs):
item['byline'] = '(NPK-NTB)'
item['sign_off'] = 'npk@npk.no'
item['body_footer'] = '(©NPK)'
item['language'] = 'nn-NO'
item['anpa_category'] = [
{
'qcode': 's',
'single_value': True,
'name': 'NPKSisteNytt',
'language': 'nn-NO',
'scheme': None
}
]
return item
name = 'EN to NO Metadata Macro'
label = 'EN to NO Metadata Macro'
callback = en_to_no_metadata_macro
access_type = 'backend'
action_type = 'direct'
from_languages = ['en']
to_languages = ['nn-NO', 'nb-NO']
|
feat(macro): Add translate example macro for NTB"""
En to NO Metadata Macro will perform the following changes to current content item:
- change the byline to "(NPK-NTB)"
- change the signoff to "npk@npk.no"
- change the body footer to "(©NPK)" - NB: copyrightsign, not @
- change the service to "NPKSisteNytt"
"""
def en_to_no_metadata_macro(item, **kwargs):
item['byline'] = '(NPK-NTB)'
item['sign_off'] = 'npk@npk.no'
item['body_footer'] = '(©NPK)'
item['language'] = 'nn-NO'
item['anpa_category'] = [
{
'qcode': 's',
'single_value': True,
'name': 'NPKSisteNytt',
'language': 'nn-NO',
'scheme': None
}
]
return item
name = 'EN to NO Metadata Macro'
label = 'EN to NO Metadata Macro'
callback = en_to_no_metadata_macro
access_type = 'backend'
action_type = 'direct'
from_languages = ['en']
to_languages = ['nn-NO', 'nb-NO']
|
<commit_before><commit_msg>feat(macro): Add translate example macro for NTB<commit_after>"""
En to NO Metadata Macro will perform the following changes to current content item:
- change the byline to "(NPK-NTB)"
- change the signoff to "npk@npk.no"
- change the body footer to "(©NPK)" - NB: copyrightsign, not @
- change the service to "NPKSisteNytt"
"""
def en_to_no_metadata_macro(item, **kwargs):
item['byline'] = '(NPK-NTB)'
item['sign_off'] = 'npk@npk.no'
item['body_footer'] = '(©NPK)'
item['language'] = 'nn-NO'
item['anpa_category'] = [
{
'qcode': 's',
'single_value': True,
'name': 'NPKSisteNytt',
'language': 'nn-NO',
'scheme': None
}
]
return item
name = 'EN to NO Metadata Macro'
label = 'EN to NO Metadata Macro'
callback = en_to_no_metadata_macro
access_type = 'backend'
action_type = 'direct'
from_languages = ['en']
to_languages = ['nn-NO', 'nb-NO']
|
|
83b82b873224c601ca7efbec96e8fa26a9a284f4
|
skan/test/test_vendored_correlate.py
|
skan/test/test_vendored_correlate.py
|
from contextlib import contextmanager
from time import time
import numpy as np
from skan.vendored import thresholding as th
@contextmanager
def timer():
result = [0.]
t = time()
yield result
result[0] = time() - t
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1[0] < 2 * t0[0]
|
Add test for new fast correlate
|
Add test for new fast correlate
|
Python
|
bsd-3-clause
|
jni/skan
|
Add test for new fast correlate
|
from contextlib import contextmanager
from time import time
import numpy as np
from skan.vendored import thresholding as th
@contextmanager
def timer():
result = [0.]
t = time()
yield result
result[0] = time() - t
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1[0] < 2 * t0[0]
|
<commit_before><commit_msg>Add test for new fast correlate<commit_after>
|
from contextlib import contextmanager
from time import time
import numpy as np
from skan.vendored import thresholding as th
@contextmanager
def timer():
result = [0.]
t = time()
yield result
result[0] = time() - t
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1[0] < 2 * t0[0]
|
Add test for new fast correlatefrom contextlib import contextmanager
from time import time
import numpy as np
from skan.vendored import thresholding as th
@contextmanager
def timer():
result = [0.]
t = time()
yield result
result[0] = time() - t
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1[0] < 2 * t0[0]
|
<commit_before><commit_msg>Add test for new fast correlate<commit_after>from contextlib import contextmanager
from time import time
import numpy as np
from skan.vendored import thresholding as th
@contextmanager
def timer():
result = [0.]
t = time()
yield result
result[0] = time() - t
def test_fast_sauvola():
image = np.random.rand(512, 512)
w0 = 25
w1 = 251
_ = th.threshold_sauvola(image, window_size=3)
with timer() as t0:
th.threshold_sauvola(image, window_size=w0)
with timer() as t1:
th.threshold_sauvola(image, window_size=w1)
assert t1[0] < 2 * t0[0]
|
|
17d1a9f41f91c03b640a112313c9afb068a4ead4
|
auth_mac/tools.py
|
auth_mac/tools.py
|
import datetime
import hmac, hashlib, base64
def _build_authheader(method, data):
datastr = ", ".join(['{0}="{1}"'.format(x, y) for x, y in data.iteritems()])
return "{0} {1}".format(method, datastr)
class Signature(object):
"A class to ease the creation of MAC signatures"
MAC = None
def __init__(self, credentials, host="example.com", port=80):
self.MAC = credentials
self.host = host
self.port = port
self.ext = ""
def get_timestamp(self):
timestamp = datetime.datetime.utcnow() - datetime.datetime(1970,1,1)
return timestamp.days * 24 * 3600 + timestamp.seconds
def get_nonce(self):
return User.objects.make_random_password(8)
def sign_request(self, uri, method="GET", timestamp=None, nonce=None):
"""Signs a request to a specified URI and returns the signature"""
if not timestamp:
self.timestamp = self.get_timestamp()
timestamp = self.timestamp
if not nonce:
self.nonce = self.get_nonce()
nonce = self.nonce
self.nonce = nonce
self.timestamp = timestamp
method = method.upper()
if not method in ("GET", "POST"):
raise RuntimeError("HTTP Method {0} not supported!".format(method))
data = [timestamp, nonce, method, uri, self.host, self.port, self.ext]
data = [str(x) for x in data]
self.base_string = "\n".join(data) + "\n"
# print repr(basestr)
# print "Signing with key '{0}'".format(self.MAC.key)
hm = hmac.new(self.MAC.key, self.base_string, hashlib.sha1)
self.signature = base64.b64encode(hm.digest())
# print self.signature
return self.signature
def get_header(self):
# {"id": "h480djs93hd8","ts": "1336363200","nonce":"dj83hs9s","mac":"bhCQXTVyfj5cmA9uKkPFx1zeOXM="}
data = {"id": self.MAC.identifier,
"ts": self.timestamp,
"nonce": self.nonce,
"mac": self.signature }
return _build_authheader("MAC", data)
|
Move the signature class into it's own tool .py
|
Move the signature class into it's own tool .py
|
Python
|
mit
|
ndevenish/auth_mac
|
Move the signature class into it's own tool .py
|
import datetime
import hmac, hashlib, base64
def _build_authheader(method, data):
datastr = ", ".join(['{0}="{1}"'.format(x, y) for x, y in data.iteritems()])
return "{0} {1}".format(method, datastr)
class Signature(object):
"A class to ease the creation of MAC signatures"
MAC = None
def __init__(self, credentials, host="example.com", port=80):
self.MAC = credentials
self.host = host
self.port = port
self.ext = ""
def get_timestamp(self):
timestamp = datetime.datetime.utcnow() - datetime.datetime(1970,1,1)
return timestamp.days * 24 * 3600 + timestamp.seconds
def get_nonce(self):
return User.objects.make_random_password(8)
def sign_request(self, uri, method="GET", timestamp=None, nonce=None):
"""Signs a request to a specified URI and returns the signature"""
if not timestamp:
self.timestamp = self.get_timestamp()
timestamp = self.timestamp
if not nonce:
self.nonce = self.get_nonce()
nonce = self.nonce
self.nonce = nonce
self.timestamp = timestamp
method = method.upper()
if not method in ("GET", "POST"):
raise RuntimeError("HTTP Method {0} not supported!".format(method))
data = [timestamp, nonce, method, uri, self.host, self.port, self.ext]
data = [str(x) for x in data]
self.base_string = "\n".join(data) + "\n"
# print repr(basestr)
# print "Signing with key '{0}'".format(self.MAC.key)
hm = hmac.new(self.MAC.key, self.base_string, hashlib.sha1)
self.signature = base64.b64encode(hm.digest())
# print self.signature
return self.signature
def get_header(self):
# {"id": "h480djs93hd8","ts": "1336363200","nonce":"dj83hs9s","mac":"bhCQXTVyfj5cmA9uKkPFx1zeOXM="}
data = {"id": self.MAC.identifier,
"ts": self.timestamp,
"nonce": self.nonce,
"mac": self.signature }
return _build_authheader("MAC", data)
|
<commit_before><commit_msg>Move the signature class into it's own tool .py<commit_after>
|
import datetime
import hmac, hashlib, base64
def _build_authheader(method, data):
datastr = ", ".join(['{0}="{1}"'.format(x, y) for x, y in data.iteritems()])
return "{0} {1}".format(method, datastr)
class Signature(object):
"A class to ease the creation of MAC signatures"
MAC = None
def __init__(self, credentials, host="example.com", port=80):
self.MAC = credentials
self.host = host
self.port = port
self.ext = ""
def get_timestamp(self):
timestamp = datetime.datetime.utcnow() - datetime.datetime(1970,1,1)
return timestamp.days * 24 * 3600 + timestamp.seconds
def get_nonce(self):
return User.objects.make_random_password(8)
def sign_request(self, uri, method="GET", timestamp=None, nonce=None):
"""Signs a request to a specified URI and returns the signature"""
if not timestamp:
self.timestamp = self.get_timestamp()
timestamp = self.timestamp
if not nonce:
self.nonce = self.get_nonce()
nonce = self.nonce
self.nonce = nonce
self.timestamp = timestamp
method = method.upper()
if not method in ("GET", "POST"):
raise RuntimeError("HTTP Method {0} not supported!".format(method))
data = [timestamp, nonce, method, uri, self.host, self.port, self.ext]
data = [str(x) for x in data]
self.base_string = "\n".join(data) + "\n"
# print repr(basestr)
# print "Signing with key '{0}'".format(self.MAC.key)
hm = hmac.new(self.MAC.key, self.base_string, hashlib.sha1)
self.signature = base64.b64encode(hm.digest())
# print self.signature
return self.signature
def get_header(self):
# {"id": "h480djs93hd8","ts": "1336363200","nonce":"dj83hs9s","mac":"bhCQXTVyfj5cmA9uKkPFx1zeOXM="}
data = {"id": self.MAC.identifier,
"ts": self.timestamp,
"nonce": self.nonce,
"mac": self.signature }
return _build_authheader("MAC", data)
|
Move the signature class into it's own tool .py
import datetime
import hmac, hashlib, base64
def _build_authheader(method, data):
datastr = ", ".join(['{0}="{1}"'.format(x, y) for x, y in data.iteritems()])
return "{0} {1}".format(method, datastr)
class Signature(object):
"A class to ease the creation of MAC signatures"
MAC = None
def __init__(self, credentials, host="example.com", port=80):
self.MAC = credentials
self.host = host
self.port = port
self.ext = ""
def get_timestamp(self):
timestamp = datetime.datetime.utcnow() - datetime.datetime(1970,1,1)
return timestamp.days * 24 * 3600 + timestamp.seconds
def get_nonce(self):
return User.objects.make_random_password(8)
def sign_request(self, uri, method="GET", timestamp=None, nonce=None):
"""Signs a request to a specified URI and returns the signature"""
if not timestamp:
self.timestamp = self.get_timestamp()
timestamp = self.timestamp
if not nonce:
self.nonce = self.get_nonce()
nonce = self.nonce
self.nonce = nonce
self.timestamp = timestamp
method = method.upper()
if not method in ("GET", "POST"):
raise RuntimeError("HTTP Method {0} not supported!".format(method))
data = [timestamp, nonce, method, uri, self.host, self.port, self.ext]
data = [str(x) for x in data]
self.base_string = "\n".join(data) + "\n"
# print repr(basestr)
# print "Signing with key '{0}'".format(self.MAC.key)
hm = hmac.new(self.MAC.key, self.base_string, hashlib.sha1)
self.signature = base64.b64encode(hm.digest())
# print self.signature
return self.signature
def get_header(self):
# {"id": "h480djs93hd8","ts": "1336363200","nonce":"dj83hs9s","mac":"bhCQXTVyfj5cmA9uKkPFx1zeOXM="}
data = {"id": self.MAC.identifier,
"ts": self.timestamp,
"nonce": self.nonce,
"mac": self.signature }
return _build_authheader("MAC", data)
|
<commit_before><commit_msg>Move the signature class into it's own tool .py<commit_after>
import datetime
import hmac, hashlib, base64
def _build_authheader(method, data):
datastr = ", ".join(['{0}="{1}"'.format(x, y) for x, y in data.iteritems()])
return "{0} {1}".format(method, datastr)
class Signature(object):
"A class to ease the creation of MAC signatures"
MAC = None
def __init__(self, credentials, host="example.com", port=80):
self.MAC = credentials
self.host = host
self.port = port
self.ext = ""
def get_timestamp(self):
timestamp = datetime.datetime.utcnow() - datetime.datetime(1970,1,1)
return timestamp.days * 24 * 3600 + timestamp.seconds
def get_nonce(self):
return User.objects.make_random_password(8)
def sign_request(self, uri, method="GET", timestamp=None, nonce=None):
"""Signs a request to a specified URI and returns the signature"""
if not timestamp:
self.timestamp = self.get_timestamp()
timestamp = self.timestamp
if not nonce:
self.nonce = self.get_nonce()
nonce = self.nonce
self.nonce = nonce
self.timestamp = timestamp
method = method.upper()
if not method in ("GET", "POST"):
raise RuntimeError("HTTP Method {0} not supported!".format(method))
data = [timestamp, nonce, method, uri, self.host, self.port, self.ext]
data = [str(x) for x in data]
self.base_string = "\n".join(data) + "\n"
# print repr(basestr)
# print "Signing with key '{0}'".format(self.MAC.key)
hm = hmac.new(self.MAC.key, self.base_string, hashlib.sha1)
self.signature = base64.b64encode(hm.digest())
# print self.signature
return self.signature
def get_header(self):
# {"id": "h480djs93hd8","ts": "1336363200","nonce":"dj83hs9s","mac":"bhCQXTVyfj5cmA9uKkPFx1zeOXM="}
data = {"id": self.MAC.identifier,
"ts": self.timestamp,
"nonce": self.nonce,
"mac": self.signature }
return _build_authheader("MAC", data)
|
|
6b7b83ce2d54b43ae3b5bd5fa67ee34e603ccacb
|
gene.py
|
gene.py
|
import random
HUMAN_DNA_ELEMENTS = set(["A","C","G","T"])
def clone_gene(g):
return Gene(g.dna, g.elements, g.name)
class Gene:
# Gene - a sequence of DNAs.
# dna - a list which contains items from elements.
# elements - a set of element which is the basic component of dna.
def __init__(self, dna, elements=HUMAN_DNA_ELEMENTS, name=""):
assert type(dna) == list
assert type(elements) == set
self.__elements = elements
self.__name = name
self.__set_dna(dna)
def __get_dna(self):
return self.__dna
def __set_dna(self, dna):
assert type(dna) == list
assert all(elem in self.__elements for elem in dna)
self.__dna = dna
def __get_dna_length(self):
return len(self.__dna)
def __get_gene_name(self):
return self.__name
def __get_gene_elements(self):
return self.__elements
dna = property(__get_dna, __set_dna)
length = property(__get_dna_length)
name = property(__get_gene_name)
elements = property(__get_gene_elements)
def mutate(self, prob=0.5):
new_dna = []
for elem in self.dna:
if random.random() < prob:
elem = random.choice(list(self.__elements.difference([elem])))
new_dna.append(elem)
self.dna = new_dna
|
Add a basic Gene class to contain a sequence of dna and support mutation.
|
Add a basic Gene class to contain a sequence of dna and support mutation.
|
Python
|
mit
|
PyOCL/TSP,PyOCL/OpenCLGA,PyOCL/OpenCLGA,PyOCL/oclGA,PyOCL/oclGA,PyOCL/TSP,PyOCL/oclGA,PyOCL/OpenCLGA,PyOCL/oclGA
|
Add a basic Gene class to contain a sequence of dna and support mutation.
|
import random
HUMAN_DNA_ELEMENTS = set(["A","C","G","T"])
def clone_gene(g):
return Gene(g.dna, g.elements, g.name)
class Gene:
# Gene - a sequence of DNAs.
# dna - a list which contains items from elements.
# elements - a set of element which is the basic component of dna.
def __init__(self, dna, elements=HUMAN_DNA_ELEMENTS, name=""):
assert type(dna) == list
assert type(elements) == set
self.__elements = elements
self.__name = name
self.__set_dna(dna)
def __get_dna(self):
return self.__dna
def __set_dna(self, dna):
assert type(dna) == list
assert all(elem in self.__elements for elem in dna)
self.__dna = dna
def __get_dna_length(self):
return len(self.__dna)
def __get_gene_name(self):
return self.__name
def __get_gene_elements(self):
return self.__elements
dna = property(__get_dna, __set_dna)
length = property(__get_dna_length)
name = property(__get_gene_name)
elements = property(__get_gene_elements)
def mutate(self, prob=0.5):
new_dna = []
for elem in self.dna:
if random.random() < prob:
elem = random.choice(list(self.__elements.difference([elem])))
new_dna.append(elem)
self.dna = new_dna
|
<commit_before><commit_msg>Add a basic Gene class to contain a sequence of dna and support mutation.<commit_after>
|
import random
HUMAN_DNA_ELEMENTS = set(["A","C","G","T"])
def clone_gene(g):
return Gene(g.dna, g.elements, g.name)
class Gene:
# Gene - a sequence of DNAs.
# dna - a list which contains items from elements.
# elements - a set of element which is the basic component of dna.
def __init__(self, dna, elements=HUMAN_DNA_ELEMENTS, name=""):
assert type(dna) == list
assert type(elements) == set
self.__elements = elements
self.__name = name
self.__set_dna(dna)
def __get_dna(self):
return self.__dna
def __set_dna(self, dna):
assert type(dna) == list
assert all(elem in self.__elements for elem in dna)
self.__dna = dna
def __get_dna_length(self):
return len(self.__dna)
def __get_gene_name(self):
return self.__name
def __get_gene_elements(self):
return self.__elements
dna = property(__get_dna, __set_dna)
length = property(__get_dna_length)
name = property(__get_gene_name)
elements = property(__get_gene_elements)
def mutate(self, prob=0.5):
new_dna = []
for elem in self.dna:
if random.random() < prob:
elem = random.choice(list(self.__elements.difference([elem])))
new_dna.append(elem)
self.dna = new_dna
|
Add a basic Gene class to contain a sequence of dna and support mutation.import random
HUMAN_DNA_ELEMENTS = set(["A","C","G","T"])
def clone_gene(g):
return Gene(g.dna, g.elements, g.name)
class Gene:
# Gene - a sequence of DNAs.
# dna - a list which contains items from elements.
# elements - a set of element which is the basic component of dna.
def __init__(self, dna, elements=HUMAN_DNA_ELEMENTS, name=""):
assert type(dna) == list
assert type(elements) == set
self.__elements = elements
self.__name = name
self.__set_dna(dna)
def __get_dna(self):
return self.__dna
def __set_dna(self, dna):
assert type(dna) == list
assert all(elem in self.__elements for elem in dna)
self.__dna = dna
def __get_dna_length(self):
return len(self.__dna)
def __get_gene_name(self):
return self.__name
def __get_gene_elements(self):
return self.__elements
dna = property(__get_dna, __set_dna)
length = property(__get_dna_length)
name = property(__get_gene_name)
elements = property(__get_gene_elements)
def mutate(self, prob=0.5):
new_dna = []
for elem in self.dna:
if random.random() < prob:
elem = random.choice(list(self.__elements.difference([elem])))
new_dna.append(elem)
self.dna = new_dna
|
<commit_before><commit_msg>Add a basic Gene class to contain a sequence of dna and support mutation.<commit_after>import random
HUMAN_DNA_ELEMENTS = set(["A","C","G","T"])
def clone_gene(g):
return Gene(g.dna, g.elements, g.name)
class Gene:
# Gene - a sequence of DNAs.
# dna - a list which contains items from elements.
# elements - a set of element which is the basic component of dna.
def __init__(self, dna, elements=HUMAN_DNA_ELEMENTS, name=""):
assert type(dna) == list
assert type(elements) == set
self.__elements = elements
self.__name = name
self.__set_dna(dna)
def __get_dna(self):
return self.__dna
def __set_dna(self, dna):
assert type(dna) == list
assert all(elem in self.__elements for elem in dna)
self.__dna = dna
def __get_dna_length(self):
return len(self.__dna)
def __get_gene_name(self):
return self.__name
def __get_gene_elements(self):
return self.__elements
dna = property(__get_dna, __set_dna)
length = property(__get_dna_length)
name = property(__get_gene_name)
elements = property(__get_gene_elements)
def mutate(self, prob=0.5):
new_dna = []
for elem in self.dna:
if random.random() < prob:
elem = random.choice(list(self.__elements.difference([elem])))
new_dna.append(elem)
self.dna = new_dna
|
|
df8239b846b2f61b59751071c402f5d6b626abb1
|
scripts/mbedtls_dev/bignum_mod_raw.py
|
scripts/mbedtls_dev/bignum_mod_raw.py
|
"""Framework classes for generation of bignum mod_raw test cases."""
# Copyright The Mbed TLS Contributors
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
from abc import ABCMeta
from typing import Dict, Iterator, List, Tuple
from . import test_data_generation
from . import bignum_common
class BignumCoreTarget(test_data_generation.BaseTarget, metaclass=ABCMeta):
#pylint: disable=abstract-method
"""Target for bignum mod_raw test case generation."""
target_basename = 'test_suite_bignum_mod_raw.generated'
|
Add script for generating mod_raw test cases
|
Add script for generating mod_raw test cases
This commit only adds the boilerplate, no actual tests are added.
Signed-off-by: Janos Follath <b175c17abe0474719672be1367bb75ee28bd5c71@arm.com>
|
Python
|
apache-2.0
|
Mbed-TLS/mbedtls,ARMmbed/mbedtls,Mbed-TLS/mbedtls,Mbed-TLS/mbedtls,ARMmbed/mbedtls,ARMmbed/mbedtls,Mbed-TLS/mbedtls,ARMmbed/mbedtls
|
Add script for generating mod_raw test cases
This commit only adds the boilerplate, no actual tests are added.
Signed-off-by: Janos Follath <b175c17abe0474719672be1367bb75ee28bd5c71@arm.com>
|
"""Framework classes for generation of bignum mod_raw test cases."""
# Copyright The Mbed TLS Contributors
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
from abc import ABCMeta
from typing import Dict, Iterator, List, Tuple
from . import test_data_generation
from . import bignum_common
class BignumCoreTarget(test_data_generation.BaseTarget, metaclass=ABCMeta):
#pylint: disable=abstract-method
"""Target for bignum mod_raw test case generation."""
target_basename = 'test_suite_bignum_mod_raw.generated'
|
<commit_before><commit_msg>Add script for generating mod_raw test cases
This commit only adds the boilerplate, no actual tests are added.
Signed-off-by: Janos Follath <b175c17abe0474719672be1367bb75ee28bd5c71@arm.com><commit_after>
|
"""Framework classes for generation of bignum mod_raw test cases."""
# Copyright The Mbed TLS Contributors
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
from abc import ABCMeta
from typing import Dict, Iterator, List, Tuple
from . import test_data_generation
from . import bignum_common
class BignumCoreTarget(test_data_generation.BaseTarget, metaclass=ABCMeta):
#pylint: disable=abstract-method
"""Target for bignum mod_raw test case generation."""
target_basename = 'test_suite_bignum_mod_raw.generated'
|
Add script for generating mod_raw test cases
This commit only adds the boilerplate, no actual tests are added.
Signed-off-by: Janos Follath <b175c17abe0474719672be1367bb75ee28bd5c71@arm.com>"""Framework classes for generation of bignum mod_raw test cases."""
# Copyright The Mbed TLS Contributors
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
from abc import ABCMeta
from typing import Dict, Iterator, List, Tuple
from . import test_data_generation
from . import bignum_common
class BignumCoreTarget(test_data_generation.BaseTarget, metaclass=ABCMeta):
#pylint: disable=abstract-method
"""Target for bignum mod_raw test case generation."""
target_basename = 'test_suite_bignum_mod_raw.generated'
|
<commit_before><commit_msg>Add script for generating mod_raw test cases
This commit only adds the boilerplate, no actual tests are added.
Signed-off-by: Janos Follath <b175c17abe0474719672be1367bb75ee28bd5c71@arm.com><commit_after>"""Framework classes for generation of bignum mod_raw test cases."""
# Copyright The Mbed TLS Contributors
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import random
from abc import ABCMeta
from typing import Dict, Iterator, List, Tuple
from . import test_data_generation
from . import bignum_common
class BignumCoreTarget(test_data_generation.BaseTarget, metaclass=ABCMeta):
#pylint: disable=abstract-method
"""Target for bignum mod_raw test case generation."""
target_basename = 'test_suite_bignum_mod_raw.generated'
|
|
ef6d88b1caa830a2b095d80dff7d32f624517149
|
src/Scripts/endtoend-constants.py
|
src/Scripts/endtoend-constants.py
|
from num2words import num2words
# Generate BitFunnel chunk data, with documents consisting of:
# {'one'}, {'one', 'two'}, {'one', 'two', 'three'}, ...
def print_numbers(num):
all_numbers = ""
for i in range(1, num+1):
english_number = num2words(i).replace(' ','-')
all_numbers += english_number + "\\0"
print(all_numbers)
def print_doc(id, name):
print('\"{0:0>16x}\\0\"'.format(id))
print('\"01\\0{}\\0\" \"{}\\0\\0\"'.format(name, id))
print('\"00\\0\"')
print_numbers(id)
print('\"\\0\\0\"')
for i in range (1, 64+1):
print_doc(i, 'Sequential')
|
Add script for generating simple chunk data.
|
Add script for generating simple chunk data.
|
Python
|
mit
|
danluu/BitFunnel,danluu/BitFunnel,BitFunnel/BitFunnel,danluu/BitFunnel,BitFunnel/BitFunnel,BitFunnel/BitFunnel,BitFunnel/BitFunnel,BitFunnel/BitFunnel,danluu/BitFunnel,BitFunnel/BitFunnel,danluu/BitFunnel,danluu/BitFunnel
|
Add script for generating simple chunk data.
|
from num2words import num2words
# Generate BitFunnel chunk data, with documents consisting of:
# {'one'}, {'one', 'two'}, {'one', 'two', 'three'}, ...
def print_numbers(num):
all_numbers = ""
for i in range(1, num+1):
english_number = num2words(i).replace(' ','-')
all_numbers += english_number + "\\0"
print(all_numbers)
def print_doc(id, name):
print('\"{0:0>16x}\\0\"'.format(id))
print('\"01\\0{}\\0\" \"{}\\0\\0\"'.format(name, id))
print('\"00\\0\"')
print_numbers(id)
print('\"\\0\\0\"')
for i in range (1, 64+1):
print_doc(i, 'Sequential')
|
<commit_before><commit_msg>Add script for generating simple chunk data.<commit_after>
|
from num2words import num2words
# Generate BitFunnel chunk data, with documents consisting of:
# {'one'}, {'one', 'two'}, {'one', 'two', 'three'}, ...
def print_numbers(num):
all_numbers = ""
for i in range(1, num+1):
english_number = num2words(i).replace(' ','-')
all_numbers += english_number + "\\0"
print(all_numbers)
def print_doc(id, name):
print('\"{0:0>16x}\\0\"'.format(id))
print('\"01\\0{}\\0\" \"{}\\0\\0\"'.format(name, id))
print('\"00\\0\"')
print_numbers(id)
print('\"\\0\\0\"')
for i in range (1, 64+1):
print_doc(i, 'Sequential')
|
Add script for generating simple chunk data.from num2words import num2words
# Generate BitFunnel chunk data, with documents consisting of:
# {'one'}, {'one', 'two'}, {'one', 'two', 'three'}, ...
def print_numbers(num):
all_numbers = ""
for i in range(1, num+1):
english_number = num2words(i).replace(' ','-')
all_numbers += english_number + "\\0"
print(all_numbers)
def print_doc(id, name):
print('\"{0:0>16x}\\0\"'.format(id))
print('\"01\\0{}\\0\" \"{}\\0\\0\"'.format(name, id))
print('\"00\\0\"')
print_numbers(id)
print('\"\\0\\0\"')
for i in range (1, 64+1):
print_doc(i, 'Sequential')
|
<commit_before><commit_msg>Add script for generating simple chunk data.<commit_after>from num2words import num2words
# Generate BitFunnel chunk data, with documents consisting of:
# {'one'}, {'one', 'two'}, {'one', 'two', 'three'}, ...
def print_numbers(num):
all_numbers = ""
for i in range(1, num+1):
english_number = num2words(i).replace(' ','-')
all_numbers += english_number + "\\0"
print(all_numbers)
def print_doc(id, name):
print('\"{0:0>16x}\\0\"'.format(id))
print('\"01\\0{}\\0\" \"{}\\0\\0\"'.format(name, id))
print('\"00\\0\"')
print_numbers(id)
print('\"\\0\\0\"')
for i in range (1, 64+1):
print_doc(i, 'Sequential')
|
|
dad3a70d6fe3677f8324c1f177831d7eedd3fac5
|
tests/processes/test_command.py
|
tests/processes/test_command.py
|
# Built-in module #
# Internal modules #
from plumbing.processes import prll_map
def add(xy):
x,y = xy
return x+y
inputs = [(5,5), (2,2), (20,30)]
results = prll_map(add, inputs)
print results
|
Test file for parallel map
|
Test file for parallel map
|
Python
|
mit
|
xapple/plumbing
|
Test file for parallel map
|
# Built-in module #
# Internal modules #
from plumbing.processes import prll_map
def add(xy):
x,y = xy
return x+y
inputs = [(5,5), (2,2), (20,30)]
results = prll_map(add, inputs)
print results
|
<commit_before><commit_msg>Test file for parallel map<commit_after>
|
# Built-in module #
# Internal modules #
from plumbing.processes import prll_map
def add(xy):
x,y = xy
return x+y
inputs = [(5,5), (2,2), (20,30)]
results = prll_map(add, inputs)
print results
|
Test file for parallel map# Built-in module #
# Internal modules #
from plumbing.processes import prll_map
def add(xy):
x,y = xy
return x+y
inputs = [(5,5), (2,2), (20,30)]
results = prll_map(add, inputs)
print results
|
<commit_before><commit_msg>Test file for parallel map<commit_after># Built-in module #
# Internal modules #
from plumbing.processes import prll_map
def add(xy):
x,y = xy
return x+y
inputs = [(5,5), (2,2), (20,30)]
results = prll_map(add, inputs)
print results
|
|
e8baa80d9e958ce59c46c0cf4c43bdadac2707cd
|
joommf/minimisation_example.py
|
joommf/minimisation_example.py
|
def main():
from sim import Sim
from mesh import Mesh
from energies.exchange import Exchange
from energies.demag import Demag
from energies.zeeman import Zeeman
from drivers import evolver
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='small_example_min', debug=True)
# Add energies.
sim.add_energy(Exchange(A))
sim.add_energy(Demag())
sim.add_energy(Zeeman(H))
sim.add_evolver(evolver.Minimiser(m_init, Ms, 'test'))
# Set initial magnetisation.
# Run simulation.
sim.minimise()
print("Done")
if __name__ == "__main__":
main()
|
def main():
from sim import Sim
from mesh import Mesh
from energies.exchange import Exchange
from energies.demag import Demag
from energies.zeeman import Zeeman
from drivers import evolver
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='minimisation_example', debug=True)
# Add energies.
sim.add_energy(Exchange(A))
sim.add_energy(Demag())
sim.add_energy(Zeeman(H))
sim.add_evolver(evolver.Minimiser(m_init, Ms, 'test'))
# Set initial magnetisation.
# Run simulation.
sim.minimise()
print("Done")
if __name__ == "__main__":
main()
|
Change minimisation example mif name
|
Change minimisation example mif name
|
Python
|
bsd-2-clause
|
ryanpepper/oommf-python,fangohr/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,fangohr/oommf-python,ryanpepper/oommf-python,ryanpepper/oommf-python
|
def main():
from sim import Sim
from mesh import Mesh
from energies.exchange import Exchange
from energies.demag import Demag
from energies.zeeman import Zeeman
from drivers import evolver
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='small_example_min', debug=True)
# Add energies.
sim.add_energy(Exchange(A))
sim.add_energy(Demag())
sim.add_energy(Zeeman(H))
sim.add_evolver(evolver.Minimiser(m_init, Ms, 'test'))
# Set initial magnetisation.
# Run simulation.
sim.minimise()
print("Done")
if __name__ == "__main__":
main()
Change minimisation example mif name
|
def main():
from sim import Sim
from mesh import Mesh
from energies.exchange import Exchange
from energies.demag import Demag
from energies.zeeman import Zeeman
from drivers import evolver
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='minimisation_example', debug=True)
# Add energies.
sim.add_energy(Exchange(A))
sim.add_energy(Demag())
sim.add_energy(Zeeman(H))
sim.add_evolver(evolver.Minimiser(m_init, Ms, 'test'))
# Set initial magnetisation.
# Run simulation.
sim.minimise()
print("Done")
if __name__ == "__main__":
main()
|
<commit_before>def main():
from sim import Sim
from mesh import Mesh
from energies.exchange import Exchange
from energies.demag import Demag
from energies.zeeman import Zeeman
from drivers import evolver
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='small_example_min', debug=True)
# Add energies.
sim.add_energy(Exchange(A))
sim.add_energy(Demag())
sim.add_energy(Zeeman(H))
sim.add_evolver(evolver.Minimiser(m_init, Ms, 'test'))
# Set initial magnetisation.
# Run simulation.
sim.minimise()
print("Done")
if __name__ == "__main__":
main()
<commit_msg>Change minimisation example mif name<commit_after>
|
def main():
from sim import Sim
from mesh import Mesh
from energies.exchange import Exchange
from energies.demag import Demag
from energies.zeeman import Zeeman
from drivers import evolver
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='minimisation_example', debug=True)
# Add energies.
sim.add_energy(Exchange(A))
sim.add_energy(Demag())
sim.add_energy(Zeeman(H))
sim.add_evolver(evolver.Minimiser(m_init, Ms, 'test'))
# Set initial magnetisation.
# Run simulation.
sim.minimise()
print("Done")
if __name__ == "__main__":
main()
|
def main():
from sim import Sim
from mesh import Mesh
from energies.exchange import Exchange
from energies.demag import Demag
from energies.zeeman import Zeeman
from drivers import evolver
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='small_example_min', debug=True)
# Add energies.
sim.add_energy(Exchange(A))
sim.add_energy(Demag())
sim.add_energy(Zeeman(H))
sim.add_evolver(evolver.Minimiser(m_init, Ms, 'test'))
# Set initial magnetisation.
# Run simulation.
sim.minimise()
print("Done")
if __name__ == "__main__":
main()
Change minimisation example mif namedef main():
from sim import Sim
from mesh import Mesh
from energies.exchange import Exchange
from energies.demag import Demag
from energies.zeeman import Zeeman
from drivers import evolver
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='minimisation_example', debug=True)
# Add energies.
sim.add_energy(Exchange(A))
sim.add_energy(Demag())
sim.add_energy(Zeeman(H))
sim.add_evolver(evolver.Minimiser(m_init, Ms, 'test'))
# Set initial magnetisation.
# Run simulation.
sim.minimise()
print("Done")
if __name__ == "__main__":
main()
|
<commit_before>def main():
from sim import Sim
from mesh import Mesh
from energies.exchange import Exchange
from energies.demag import Demag
from energies.zeeman import Zeeman
from drivers import evolver
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='small_example_min', debug=True)
# Add energies.
sim.add_energy(Exchange(A))
sim.add_energy(Demag())
sim.add_energy(Zeeman(H))
sim.add_evolver(evolver.Minimiser(m_init, Ms, 'test'))
# Set initial magnetisation.
# Run simulation.
sim.minimise()
print("Done")
if __name__ == "__main__":
main()
<commit_msg>Change minimisation example mif name<commit_after>def main():
from sim import Sim
from mesh import Mesh
from energies.exchange import Exchange
from energies.demag import Demag
from energies.zeeman import Zeeman
from drivers import evolver
# Mesh specification.
lx = ly = lz = 50e-9 # x, y, and z dimensions (m)
dx = dy = dz = 5e-9 # x, y, and z cell dimensions (m)
Ms = 8e5 # saturation magnetisation (A/m)
A = 1e-11 # exchange energy constant (J/m)
H = (1e3, 0, 0) # external magnetic field (A/m)
m_init = (0, 0, 1) # initial magnetisation
# Create a mesh.
mesh = Mesh((lx, ly, lz), (dx, dy, dz))
# Create a simulation object.
sim = Sim(mesh, Ms, name='minimisation_example', debug=True)
# Add energies.
sim.add_energy(Exchange(A))
sim.add_energy(Demag())
sim.add_energy(Zeeman(H))
sim.add_evolver(evolver.Minimiser(m_init, Ms, 'test'))
# Set initial magnetisation.
# Run simulation.
sim.minimise()
print("Done")
if __name__ == "__main__":
main()
|
800beaf2cbff9fe42fe8879d5fbeaa650cb23980
|
tools/humann2/transform_json_to_pkl.py
|
tools/humann2/transform_json_to_pkl.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import cPickle as pickle
import bz2
import json
import argparse
def transform_json_to_pkl(args):
with open(args.json_input, 'r') as json_file:
json_str = json_file.read()
metadata = json.loads(json_str)
for marker in metadata["markers"]:
metadata["markers"][marker]["ext"] = set(metadata["markers"][marker]["ext"])
pkl_output = bz2.BZ2File(args.pkl_output, 'w')
pickle.dump(metadata, pkl_output, pickle.HIGHEST_PROTOCOL)
pkl_output.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--json_input', required=True)
parser.add_argument('--pkl_output', required=True)
args = parser.parse_args()
transform_json_to_pkl(args)
|
Add python script to manage custom metaphlan2 db
|
Add python script to manage custom metaphlan2 db
|
Python
|
apache-2.0
|
ASaiM/galaxytools,ASaiM/galaxytools
|
Add python script to manage custom metaphlan2 db
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import cPickle as pickle
import bz2
import json
import argparse
def transform_json_to_pkl(args):
with open(args.json_input, 'r') as json_file:
json_str = json_file.read()
metadata = json.loads(json_str)
for marker in metadata["markers"]:
metadata["markers"][marker]["ext"] = set(metadata["markers"][marker]["ext"])
pkl_output = bz2.BZ2File(args.pkl_output, 'w')
pickle.dump(metadata, pkl_output, pickle.HIGHEST_PROTOCOL)
pkl_output.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--json_input', required=True)
parser.add_argument('--pkl_output', required=True)
args = parser.parse_args()
transform_json_to_pkl(args)
|
<commit_before><commit_msg>Add python script to manage custom metaphlan2 db<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import cPickle as pickle
import bz2
import json
import argparse
def transform_json_to_pkl(args):
with open(args.json_input, 'r') as json_file:
json_str = json_file.read()
metadata = json.loads(json_str)
for marker in metadata["markers"]:
metadata["markers"][marker]["ext"] = set(metadata["markers"][marker]["ext"])
pkl_output = bz2.BZ2File(args.pkl_output, 'w')
pickle.dump(metadata, pkl_output, pickle.HIGHEST_PROTOCOL)
pkl_output.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--json_input', required=True)
parser.add_argument('--pkl_output', required=True)
args = parser.parse_args()
transform_json_to_pkl(args)
|
Add python script to manage custom metaphlan2 db#!/usr/bin/env python
# -*- coding: utf-8 -*-
import cPickle as pickle
import bz2
import json
import argparse
def transform_json_to_pkl(args):
with open(args.json_input, 'r') as json_file:
json_str = json_file.read()
metadata = json.loads(json_str)
for marker in metadata["markers"]:
metadata["markers"][marker]["ext"] = set(metadata["markers"][marker]["ext"])
pkl_output = bz2.BZ2File(args.pkl_output, 'w')
pickle.dump(metadata, pkl_output, pickle.HIGHEST_PROTOCOL)
pkl_output.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--json_input', required=True)
parser.add_argument('--pkl_output', required=True)
args = parser.parse_args()
transform_json_to_pkl(args)
|
<commit_before><commit_msg>Add python script to manage custom metaphlan2 db<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import cPickle as pickle
import bz2
import json
import argparse
def transform_json_to_pkl(args):
with open(args.json_input, 'r') as json_file:
json_str = json_file.read()
metadata = json.loads(json_str)
for marker in metadata["markers"]:
metadata["markers"][marker]["ext"] = set(metadata["markers"][marker]["ext"])
pkl_output = bz2.BZ2File(args.pkl_output, 'w')
pickle.dump(metadata, pkl_output, pickle.HIGHEST_PROTOCOL)
pkl_output.close()
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--json_input', required=True)
parser.add_argument('--pkl_output', required=True)
args = parser.parse_args()
transform_json_to_pkl(args)
|
|
027c107df3deb0a3d6c7b913eb6841f43adeb3e1
|
wagtail/wagtaildocs/migrations/0006_copy_document_permissions_to_collections.py
|
wagtail/wagtaildocs/migrations/0006_copy_document_permissions_to_collections.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def get_document_permissions(apps):
# return a queryset of the 'add_document' and 'change_document' permissions
Permission = apps.get_model('auth.Permission')
ContentType = apps.get_model('contenttypes.ContentType')
document_content_type, _created = ContentType.objects.get_or_create(
model='document',
app_label='wagtaildocs',
)
return Permission.objects.filter(
content_type=document_content_type,
codename__in=['add_document', 'change_document']
)
def copy_document_permissions_to_collections(apps, schema_editor):
Collection = apps.get_model('wagtailcore.Collection')
Group = apps.get_model('auth.Group')
GroupCollectionPermission = apps.get_model('wagtailcore.GroupCollectionPermission')
root_collection = Collection.objects.get(depth=1)
for permission in get_document_permissions(apps):
for group in Group.objects.filter(permissions=permission):
GroupCollectionPermission.objects.create(
group=group,
collection=root_collection,
permission=permission
)
def remove_document_permissions_from_collections(apps, schema_editor):
GroupCollectionPermission = apps.get_model('wagtailcore.GroupCollectionPermission')
document_permissions = get_document_permissions(apps)
GroupCollectionPermission.objects.filter(permission__in=document_permissions).delete()
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0026_group_collection_permission'),
('wagtaildocs', '0005_document_collection'),
]
operations = [
migrations.RunPython(
copy_document_permissions_to_collections,
remove_document_permissions_from_collections),
]
|
Add migration to copy global document permissions to the default collection
|
Add migration to copy global document permissions to the default collection
|
Python
|
bsd-3-clause
|
rsalmaso/wagtail,timorieber/wagtail,kurtrwall/wagtail,rsalmaso/wagtail,FlipperPA/wagtail,torchbox/wagtail,gasman/wagtail,Toshakins/wagtail,mikedingjan/wagtail,iansprice/wagtail,mikedingjan/wagtail,thenewguy/wagtail,hamsterbacke23/wagtail,kurtw/wagtail,zerolab/wagtail,chrxr/wagtail,quru/wagtail,quru/wagtail,zerolab/wagtail,gogobook/wagtail,kurtw/wagtail,davecranwell/wagtail,wagtail/wagtail,Toshakins/wagtail,nimasmi/wagtail,nealtodd/wagtail,quru/wagtail,gasman/wagtail,takeflight/wagtail,gogobook/wagtail,kaedroho/wagtail,quru/wagtail,jnns/wagtail,chrxr/wagtail,kurtw/wagtail,davecranwell/wagtail,torchbox/wagtail,mixxorz/wagtail,nilnvoid/wagtail,nimasmi/wagtail,mixxorz/wagtail,nutztherookie/wagtail,chrxr/wagtail,nimasmi/wagtail,jnns/wagtail,iansprice/wagtail,hamsterbacke23/wagtail,thenewguy/wagtail,FlipperPA/wagtail,nilnvoid/wagtail,rsalmaso/wagtail,takeflight/wagtail,gogobook/wagtail,gasman/wagtail,takeflight/wagtail,mixxorz/wagtail,wagtail/wagtail,chrxr/wagtail,iansprice/wagtail,zerolab/wagtail,zerolab/wagtail,mikedingjan/wagtail,kaedroho/wagtail,kaedroho/wagtail,gogobook/wagtail,kaedroho/wagtail,iansprice/wagtail,mixxorz/wagtail,Toshakins/wagtail,torchbox/wagtail,kurtrwall/wagtail,gasman/wagtail,davecranwell/wagtail,nimasmi/wagtail,nutztherookie/wagtail,thenewguy/wagtail,nutztherookie/wagtail,rsalmaso/wagtail,torchbox/wagtail,FlipperPA/wagtail,rsalmaso/wagtail,gasman/wagtail,nutztherookie/wagtail,hamsterbacke23/wagtail,nealtodd/wagtail,jnns/wagtail,mixxorz/wagtail,Toshakins/wagtail,takeflight/wagtail,kurtw/wagtail,wagtail/wagtail,thenewguy/wagtail,wagtail/wagtail,nealtodd/wagtail,thenewguy/wagtail,mikedingjan/wagtail,nilnvoid/wagtail,nealtodd/wagtail,timorieber/wagtail,timorieber/wagtail,hamsterbacke23/wagtail,wagtail/wagtail,kurtrwall/wagtail,timorieber/wagtail,nilnvoid/wagtail,jnns/wagtail,zerolab/wagtail,kurtrwall/wagtail,FlipperPA/wagtail,davecranwell/wagtail,kaedroho/wagtail
|
Add migration to copy global document permissions to the default collection
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def get_document_permissions(apps):
# return a queryset of the 'add_document' and 'change_document' permissions
Permission = apps.get_model('auth.Permission')
ContentType = apps.get_model('contenttypes.ContentType')
document_content_type, _created = ContentType.objects.get_or_create(
model='document',
app_label='wagtaildocs',
)
return Permission.objects.filter(
content_type=document_content_type,
codename__in=['add_document', 'change_document']
)
def copy_document_permissions_to_collections(apps, schema_editor):
Collection = apps.get_model('wagtailcore.Collection')
Group = apps.get_model('auth.Group')
GroupCollectionPermission = apps.get_model('wagtailcore.GroupCollectionPermission')
root_collection = Collection.objects.get(depth=1)
for permission in get_document_permissions(apps):
for group in Group.objects.filter(permissions=permission):
GroupCollectionPermission.objects.create(
group=group,
collection=root_collection,
permission=permission
)
def remove_document_permissions_from_collections(apps, schema_editor):
GroupCollectionPermission = apps.get_model('wagtailcore.GroupCollectionPermission')
document_permissions = get_document_permissions(apps)
GroupCollectionPermission.objects.filter(permission__in=document_permissions).delete()
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0026_group_collection_permission'),
('wagtaildocs', '0005_document_collection'),
]
operations = [
migrations.RunPython(
copy_document_permissions_to_collections,
remove_document_permissions_from_collections),
]
|
<commit_before><commit_msg>Add migration to copy global document permissions to the default collection<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def get_document_permissions(apps):
# return a queryset of the 'add_document' and 'change_document' permissions
Permission = apps.get_model('auth.Permission')
ContentType = apps.get_model('contenttypes.ContentType')
document_content_type, _created = ContentType.objects.get_or_create(
model='document',
app_label='wagtaildocs',
)
return Permission.objects.filter(
content_type=document_content_type,
codename__in=['add_document', 'change_document']
)
def copy_document_permissions_to_collections(apps, schema_editor):
Collection = apps.get_model('wagtailcore.Collection')
Group = apps.get_model('auth.Group')
GroupCollectionPermission = apps.get_model('wagtailcore.GroupCollectionPermission')
root_collection = Collection.objects.get(depth=1)
for permission in get_document_permissions(apps):
for group in Group.objects.filter(permissions=permission):
GroupCollectionPermission.objects.create(
group=group,
collection=root_collection,
permission=permission
)
def remove_document_permissions_from_collections(apps, schema_editor):
GroupCollectionPermission = apps.get_model('wagtailcore.GroupCollectionPermission')
document_permissions = get_document_permissions(apps)
GroupCollectionPermission.objects.filter(permission__in=document_permissions).delete()
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0026_group_collection_permission'),
('wagtaildocs', '0005_document_collection'),
]
operations = [
migrations.RunPython(
copy_document_permissions_to_collections,
remove_document_permissions_from_collections),
]
|
Add migration to copy global document permissions to the default collection# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def get_document_permissions(apps):
# return a queryset of the 'add_document' and 'change_document' permissions
Permission = apps.get_model('auth.Permission')
ContentType = apps.get_model('contenttypes.ContentType')
document_content_type, _created = ContentType.objects.get_or_create(
model='document',
app_label='wagtaildocs',
)
return Permission.objects.filter(
content_type=document_content_type,
codename__in=['add_document', 'change_document']
)
def copy_document_permissions_to_collections(apps, schema_editor):
Collection = apps.get_model('wagtailcore.Collection')
Group = apps.get_model('auth.Group')
GroupCollectionPermission = apps.get_model('wagtailcore.GroupCollectionPermission')
root_collection = Collection.objects.get(depth=1)
for permission in get_document_permissions(apps):
for group in Group.objects.filter(permissions=permission):
GroupCollectionPermission.objects.create(
group=group,
collection=root_collection,
permission=permission
)
def remove_document_permissions_from_collections(apps, schema_editor):
GroupCollectionPermission = apps.get_model('wagtailcore.GroupCollectionPermission')
document_permissions = get_document_permissions(apps)
GroupCollectionPermission.objects.filter(permission__in=document_permissions).delete()
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0026_group_collection_permission'),
('wagtaildocs', '0005_document_collection'),
]
operations = [
migrations.RunPython(
copy_document_permissions_to_collections,
remove_document_permissions_from_collections),
]
|
<commit_before><commit_msg>Add migration to copy global document permissions to the default collection<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
def get_document_permissions(apps):
# return a queryset of the 'add_document' and 'change_document' permissions
Permission = apps.get_model('auth.Permission')
ContentType = apps.get_model('contenttypes.ContentType')
document_content_type, _created = ContentType.objects.get_or_create(
model='document',
app_label='wagtaildocs',
)
return Permission.objects.filter(
content_type=document_content_type,
codename__in=['add_document', 'change_document']
)
def copy_document_permissions_to_collections(apps, schema_editor):
Collection = apps.get_model('wagtailcore.Collection')
Group = apps.get_model('auth.Group')
GroupCollectionPermission = apps.get_model('wagtailcore.GroupCollectionPermission')
root_collection = Collection.objects.get(depth=1)
for permission in get_document_permissions(apps):
for group in Group.objects.filter(permissions=permission):
GroupCollectionPermission.objects.create(
group=group,
collection=root_collection,
permission=permission
)
def remove_document_permissions_from_collections(apps, schema_editor):
GroupCollectionPermission = apps.get_model('wagtailcore.GroupCollectionPermission')
document_permissions = get_document_permissions(apps)
GroupCollectionPermission.objects.filter(permission__in=document_permissions).delete()
class Migration(migrations.Migration):
dependencies = [
('wagtailcore', '0026_group_collection_permission'),
('wagtaildocs', '0005_document_collection'),
]
operations = [
migrations.RunPython(
copy_document_permissions_to_collections,
remove_document_permissions_from_collections),
]
|
|
c796603748c1382688293d7df2947942f1c0b157
|
wsgi.py
|
wsgi.py
|
#!/usr/bin/python
import sys
import logging
logging.basicConfig(stream=sys.stderr)
sys.path.insert(0,"/var/www/grand-bargain-monitoring/")
from src.app import app as application
application.secret_key = 'Add your secret key'
|
Add WSGI file for deployment with apache
|
Add WSGI file for deployment with apache
Note using this means that the Apache config files (typically in /etc/apache2/sites-available)
should be congifured to point to this WSGI file. See Digital Ocean tutorial for reference:
https://www.digitalocean.com/community/tutorials/how-to-deploy-a-flask-application-on-an-ubuntu-vps
|
Python
|
mit
|
devinit/grand-bargain-monitoring,devinit/grand-bargain-monitoring,devinit/grand-bargain-monitoring
|
Add WSGI file for deployment with apache
Note using this means that the Apache config files (typically in /etc/apache2/sites-available)
should be congifured to point to this WSGI file. See Digital Ocean tutorial for reference:
https://www.digitalocean.com/community/tutorials/how-to-deploy-a-flask-application-on-an-ubuntu-vps
|
#!/usr/bin/python
import sys
import logging
logging.basicConfig(stream=sys.stderr)
sys.path.insert(0,"/var/www/grand-bargain-monitoring/")
from src.app import app as application
application.secret_key = 'Add your secret key'
|
<commit_before><commit_msg>Add WSGI file for deployment with apache
Note using this means that the Apache config files (typically in /etc/apache2/sites-available)
should be congifured to point to this WSGI file. See Digital Ocean tutorial for reference:
https://www.digitalocean.com/community/tutorials/how-to-deploy-a-flask-application-on-an-ubuntu-vps<commit_after>
|
#!/usr/bin/python
import sys
import logging
logging.basicConfig(stream=sys.stderr)
sys.path.insert(0,"/var/www/grand-bargain-monitoring/")
from src.app import app as application
application.secret_key = 'Add your secret key'
|
Add WSGI file for deployment with apache
Note using this means that the Apache config files (typically in /etc/apache2/sites-available)
should be congifured to point to this WSGI file. See Digital Ocean tutorial for reference:
https://www.digitalocean.com/community/tutorials/how-to-deploy-a-flask-application-on-an-ubuntu-vps#!/usr/bin/python
import sys
import logging
logging.basicConfig(stream=sys.stderr)
sys.path.insert(0,"/var/www/grand-bargain-monitoring/")
from src.app import app as application
application.secret_key = 'Add your secret key'
|
<commit_before><commit_msg>Add WSGI file for deployment with apache
Note using this means that the Apache config files (typically in /etc/apache2/sites-available)
should be congifured to point to this WSGI file. See Digital Ocean tutorial for reference:
https://www.digitalocean.com/community/tutorials/how-to-deploy-a-flask-application-on-an-ubuntu-vps<commit_after>#!/usr/bin/python
import sys
import logging
logging.basicConfig(stream=sys.stderr)
sys.path.insert(0,"/var/www/grand-bargain-monitoring/")
from src.app import app as application
application.secret_key = 'Add your secret key'
|
|
a730bb6d273dcc4355ae783af96a2368098c366f
|
tests/sentry/receivers/test_core.py
|
tests/sentry/receivers/test_core.py
|
# coding: utf-8
from __future__ import absolute_import
from django.conf import settings
from sentry.models import Project, Team, User
from sentry.receivers.core import create_default_project
from sentry.testutils import TestCase
class CreateDefaultProjectTest(TestCase):
def test_simple(self):
user, _ = User.objects.get_or_create(is_superuser=True, defaults={
'username': 'test'
})
Team.objects.filter(project__id=settings.SENTRY_PROJECT).delete()
Project.objects.filter(id=settings.SENTRY_PROJECT).delete()
create_default_project(created_models=[Project])
project = Project.objects.filter(id=settings.SENTRY_PROJECT)
assert project.exists() is True
project = project.get()
assert project.owner == user
assert project.public is False
assert project.name == 'Sentry (Internal)'
assert project.slug == 'sentry'
team = project.team
assert team.owner == user
assert team.slug == 'sentry'
|
# coding: utf-8
from __future__ import absolute_import
from django.conf import settings
from sentry.models import Project, ProjectKey, User
from sentry.receivers.core import create_default_project
from sentry.testutils import TestCase
class CreateDefaultProjectTest(TestCase):
def test_simple(self):
user, _ = User.objects.get_or_create(is_superuser=True, defaults={
'username': 'test'
})
Project.objects.filter(id=settings.SENTRY_PROJECT).delete()
create_default_project(created_models=[Project])
project = Project.objects.get(id=settings.SENTRY_PROJECT)
assert project.owner == user
assert project.public is False
assert project.name == 'Sentry (Internal)'
assert project.slug == 'sentry'
team = project.team
assert team.owner == user
assert team.slug == 'sentry'
pk = ProjectKey.objects.get(project=project)
assert not pk.roles.api
assert pk.roles.store
assert pk.user is None
|
Add tests to confirm store/not api is default
|
Add tests to confirm store/not api is default
|
Python
|
bsd-3-clause
|
boneyao/sentry,wujuguang/sentry,mitsuhiko/sentry,imankulov/sentry,gg7/sentry,imankulov/sentry,beeftornado/sentry,zenefits/sentry,JamesMura/sentry,JamesMura/sentry,looker/sentry,fuziontech/sentry,songyi199111/sentry,wong2/sentry,argonemyth/sentry,boneyao/sentry,ewdurbin/sentry,wujuguang/sentry,camilonova/sentry,mvaled/sentry,looker/sentry,nicholasserra/sentry,1tush/sentry,Kryz/sentry,gencer/sentry,kevinastone/sentry,alexm92/sentry,looker/sentry,JackDanger/sentry,jean/sentry,BuildingLink/sentry,alexm92/sentry,kevinastone/sentry,BayanGroup/sentry,daevaorn/sentry,TedaLIEz/sentry,fuziontech/sentry,BayanGroup/sentry,JackDanger/sentry,jean/sentry,gg7/sentry,nicholasserra/sentry,Kryz/sentry,ewdurbin/sentry,vperron/sentry,ifduyue/sentry,mvaled/sentry,JamesMura/sentry,fotinakis/sentry,pauloschilling/sentry,fuziontech/sentry,ngonzalvez/sentry,llonchj/sentry,jokey2k/sentry,camilonova/sentry,vperron/sentry,hongliang5623/sentry,BuildingLink/sentry,gg7/sentry,argonemyth/sentry,mvaled/sentry,jean/sentry,felixbuenemann/sentry,JTCunning/sentry,wong2/sentry,Kryz/sentry,jean/sentry,JackDanger/sentry,gencer/sentry,kevinlondon/sentry,hongliang5623/sentry,mvaled/sentry,ngonzalvez/sentry,kevinastone/sentry,kevinlondon/sentry,jean/sentry,JamesMura/sentry,pauloschilling/sentry,jokey2k/sentry,felixbuenemann/sentry,argonemyth/sentry,zenefits/sentry,gencer/sentry,drcapulet/sentry,hongliang5623/sentry,zenefits/sentry,daevaorn/sentry,korealerts1/sentry,songyi199111/sentry,beeftornado/sentry,Natim/sentry,BuildingLink/sentry,drcapulet/sentry,zenefits/sentry,wujuguang/sentry,boneyao/sentry,daevaorn/sentry,ifduyue/sentry,fotinakis/sentry,felixbuenemann/sentry,looker/sentry,mitsuhiko/sentry,ewdurbin/sentry,vperron/sentry,songyi199111/sentry,mvaled/sentry,TedaLIEz/sentry,JTCunning/sentry,gencer/sentry,fotinakis/sentry,TedaLIEz/sentry,1tush/sentry,jokey2k/sentry,ifduyue/sentry,nicholasserra/sentry,JTCunning/sentry,korealerts1/sentry,kevinlondon/sentry,llonchj/sentry,alexm92/sentry,ifduyue/sentry,BayanGroup/sentry,zenefits/sentry,ngonzalvez/sentry,daevaorn/sentry,1tush/sentry,BuildingLink/sentry,looker/sentry,wong2/sentry,beeftornado/sentry,Natim/sentry,pauloschilling/sentry,gencer/sentry,mvaled/sentry,BuildingLink/sentry,ifduyue/sentry,korealerts1/sentry,fotinakis/sentry,JamesMura/sentry,drcapulet/sentry,imankulov/sentry,camilonova/sentry,Natim/sentry,llonchj/sentry
|
# coding: utf-8
from __future__ import absolute_import
from django.conf import settings
from sentry.models import Project, Team, User
from sentry.receivers.core import create_default_project
from sentry.testutils import TestCase
class CreateDefaultProjectTest(TestCase):
def test_simple(self):
user, _ = User.objects.get_or_create(is_superuser=True, defaults={
'username': 'test'
})
Team.objects.filter(project__id=settings.SENTRY_PROJECT).delete()
Project.objects.filter(id=settings.SENTRY_PROJECT).delete()
create_default_project(created_models=[Project])
project = Project.objects.filter(id=settings.SENTRY_PROJECT)
assert project.exists() is True
project = project.get()
assert project.owner == user
assert project.public is False
assert project.name == 'Sentry (Internal)'
assert project.slug == 'sentry'
team = project.team
assert team.owner == user
assert team.slug == 'sentry'
Add tests to confirm store/not api is default
|
# coding: utf-8
from __future__ import absolute_import
from django.conf import settings
from sentry.models import Project, ProjectKey, User
from sentry.receivers.core import create_default_project
from sentry.testutils import TestCase
class CreateDefaultProjectTest(TestCase):
def test_simple(self):
user, _ = User.objects.get_or_create(is_superuser=True, defaults={
'username': 'test'
})
Project.objects.filter(id=settings.SENTRY_PROJECT).delete()
create_default_project(created_models=[Project])
project = Project.objects.get(id=settings.SENTRY_PROJECT)
assert project.owner == user
assert project.public is False
assert project.name == 'Sentry (Internal)'
assert project.slug == 'sentry'
team = project.team
assert team.owner == user
assert team.slug == 'sentry'
pk = ProjectKey.objects.get(project=project)
assert not pk.roles.api
assert pk.roles.store
assert pk.user is None
|
<commit_before># coding: utf-8
from __future__ import absolute_import
from django.conf import settings
from sentry.models import Project, Team, User
from sentry.receivers.core import create_default_project
from sentry.testutils import TestCase
class CreateDefaultProjectTest(TestCase):
def test_simple(self):
user, _ = User.objects.get_or_create(is_superuser=True, defaults={
'username': 'test'
})
Team.objects.filter(project__id=settings.SENTRY_PROJECT).delete()
Project.objects.filter(id=settings.SENTRY_PROJECT).delete()
create_default_project(created_models=[Project])
project = Project.objects.filter(id=settings.SENTRY_PROJECT)
assert project.exists() is True
project = project.get()
assert project.owner == user
assert project.public is False
assert project.name == 'Sentry (Internal)'
assert project.slug == 'sentry'
team = project.team
assert team.owner == user
assert team.slug == 'sentry'
<commit_msg>Add tests to confirm store/not api is default<commit_after>
|
# coding: utf-8
from __future__ import absolute_import
from django.conf import settings
from sentry.models import Project, ProjectKey, User
from sentry.receivers.core import create_default_project
from sentry.testutils import TestCase
class CreateDefaultProjectTest(TestCase):
def test_simple(self):
user, _ = User.objects.get_or_create(is_superuser=True, defaults={
'username': 'test'
})
Project.objects.filter(id=settings.SENTRY_PROJECT).delete()
create_default_project(created_models=[Project])
project = Project.objects.get(id=settings.SENTRY_PROJECT)
assert project.owner == user
assert project.public is False
assert project.name == 'Sentry (Internal)'
assert project.slug == 'sentry'
team = project.team
assert team.owner == user
assert team.slug == 'sentry'
pk = ProjectKey.objects.get(project=project)
assert not pk.roles.api
assert pk.roles.store
assert pk.user is None
|
# coding: utf-8
from __future__ import absolute_import
from django.conf import settings
from sentry.models import Project, Team, User
from sentry.receivers.core import create_default_project
from sentry.testutils import TestCase
class CreateDefaultProjectTest(TestCase):
def test_simple(self):
user, _ = User.objects.get_or_create(is_superuser=True, defaults={
'username': 'test'
})
Team.objects.filter(project__id=settings.SENTRY_PROJECT).delete()
Project.objects.filter(id=settings.SENTRY_PROJECT).delete()
create_default_project(created_models=[Project])
project = Project.objects.filter(id=settings.SENTRY_PROJECT)
assert project.exists() is True
project = project.get()
assert project.owner == user
assert project.public is False
assert project.name == 'Sentry (Internal)'
assert project.slug == 'sentry'
team = project.team
assert team.owner == user
assert team.slug == 'sentry'
Add tests to confirm store/not api is default# coding: utf-8
from __future__ import absolute_import
from django.conf import settings
from sentry.models import Project, ProjectKey, User
from sentry.receivers.core import create_default_project
from sentry.testutils import TestCase
class CreateDefaultProjectTest(TestCase):
def test_simple(self):
user, _ = User.objects.get_or_create(is_superuser=True, defaults={
'username': 'test'
})
Project.objects.filter(id=settings.SENTRY_PROJECT).delete()
create_default_project(created_models=[Project])
project = Project.objects.get(id=settings.SENTRY_PROJECT)
assert project.owner == user
assert project.public is False
assert project.name == 'Sentry (Internal)'
assert project.slug == 'sentry'
team = project.team
assert team.owner == user
assert team.slug == 'sentry'
pk = ProjectKey.objects.get(project=project)
assert not pk.roles.api
assert pk.roles.store
assert pk.user is None
|
<commit_before># coding: utf-8
from __future__ import absolute_import
from django.conf import settings
from sentry.models import Project, Team, User
from sentry.receivers.core import create_default_project
from sentry.testutils import TestCase
class CreateDefaultProjectTest(TestCase):
def test_simple(self):
user, _ = User.objects.get_or_create(is_superuser=True, defaults={
'username': 'test'
})
Team.objects.filter(project__id=settings.SENTRY_PROJECT).delete()
Project.objects.filter(id=settings.SENTRY_PROJECT).delete()
create_default_project(created_models=[Project])
project = Project.objects.filter(id=settings.SENTRY_PROJECT)
assert project.exists() is True
project = project.get()
assert project.owner == user
assert project.public is False
assert project.name == 'Sentry (Internal)'
assert project.slug == 'sentry'
team = project.team
assert team.owner == user
assert team.slug == 'sentry'
<commit_msg>Add tests to confirm store/not api is default<commit_after># coding: utf-8
from __future__ import absolute_import
from django.conf import settings
from sentry.models import Project, ProjectKey, User
from sentry.receivers.core import create_default_project
from sentry.testutils import TestCase
class CreateDefaultProjectTest(TestCase):
def test_simple(self):
user, _ = User.objects.get_or_create(is_superuser=True, defaults={
'username': 'test'
})
Project.objects.filter(id=settings.SENTRY_PROJECT).delete()
create_default_project(created_models=[Project])
project = Project.objects.get(id=settings.SENTRY_PROJECT)
assert project.owner == user
assert project.public is False
assert project.name == 'Sentry (Internal)'
assert project.slug == 'sentry'
team = project.team
assert team.owner == user
assert team.slug == 'sentry'
pk = ProjectKey.objects.get(project=project)
assert not pk.roles.api
assert pk.roles.store
assert pk.user is None
|
5257f0531dcb95f1520950a3ab54e384dcfe888e
|
tests/wrappers_tests/test_render.py
|
tests/wrappers_tests/test_render.py
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import unittest
from chainer import testing
import mock
import chainerrl
@testing.parameterize(*testing.product({
'render_kwargs': [
{},
{'mode': 'human'},
{'mode': 'rgb_array'},
]
}))
class TestRender(unittest.TestCase):
def test(self):
orig_env = mock.Mock()
# Reaches the terminal state after five actions
orig_env.reset.side_effect = [
('state', 0),
('state', 3),
]
orig_env.step.side_effect = [
(('state', 1), 0, False, {}),
(('state', 2), 1, True, {}),
]
env = chainerrl.wrappers.Render(orig_env, **self.render_kwargs)
# Not called env.render yet
self.assertEqual(orig_env.render.call_count, 0)
obs = env.reset()
self.assertEqual(obs, ('state', 0))
# Called once
self.assertEqual(orig_env.render.call_count, 1)
obs, reward, done, info = env.step(0)
self.assertEqual(obs, ('state', 1))
self.assertEqual(reward, 0)
self.assertEqual(done, False)
self.assertEqual(info, {})
# Called twice
self.assertEqual(orig_env.render.call_count, 2)
obs, reward, done, info = env.step(0)
self.assertEqual(obs, ('state', 2))
self.assertEqual(reward, 1)
self.assertEqual(done, True)
self.assertEqual(info, {})
# Called thrice
self.assertEqual(orig_env.render.call_count, 3)
obs = env.reset()
self.assertEqual(obs, ('state', 3))
# Called four times
self.assertEqual(orig_env.render.call_count, 4)
# All the calls should receive correct kwargs
for call in orig_env.render.call_args_list:
args, kwargs = call
self.assertEqual(len(args), 0)
self.assertEqual(kwargs, self.render_kwargs)
|
Add tests for Render wrapper
|
Add tests for Render wrapper
|
Python
|
mit
|
toslunar/chainerrl,toslunar/chainerrl
|
Add tests for Render wrapper
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import unittest
from chainer import testing
import mock
import chainerrl
@testing.parameterize(*testing.product({
'render_kwargs': [
{},
{'mode': 'human'},
{'mode': 'rgb_array'},
]
}))
class TestRender(unittest.TestCase):
def test(self):
orig_env = mock.Mock()
# Reaches the terminal state after five actions
orig_env.reset.side_effect = [
('state', 0),
('state', 3),
]
orig_env.step.side_effect = [
(('state', 1), 0, False, {}),
(('state', 2), 1, True, {}),
]
env = chainerrl.wrappers.Render(orig_env, **self.render_kwargs)
# Not called env.render yet
self.assertEqual(orig_env.render.call_count, 0)
obs = env.reset()
self.assertEqual(obs, ('state', 0))
# Called once
self.assertEqual(orig_env.render.call_count, 1)
obs, reward, done, info = env.step(0)
self.assertEqual(obs, ('state', 1))
self.assertEqual(reward, 0)
self.assertEqual(done, False)
self.assertEqual(info, {})
# Called twice
self.assertEqual(orig_env.render.call_count, 2)
obs, reward, done, info = env.step(0)
self.assertEqual(obs, ('state', 2))
self.assertEqual(reward, 1)
self.assertEqual(done, True)
self.assertEqual(info, {})
# Called thrice
self.assertEqual(orig_env.render.call_count, 3)
obs = env.reset()
self.assertEqual(obs, ('state', 3))
# Called four times
self.assertEqual(orig_env.render.call_count, 4)
# All the calls should receive correct kwargs
for call in orig_env.render.call_args_list:
args, kwargs = call
self.assertEqual(len(args), 0)
self.assertEqual(kwargs, self.render_kwargs)
|
<commit_before><commit_msg>Add tests for Render wrapper<commit_after>
|
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import unittest
from chainer import testing
import mock
import chainerrl
@testing.parameterize(*testing.product({
'render_kwargs': [
{},
{'mode': 'human'},
{'mode': 'rgb_array'},
]
}))
class TestRender(unittest.TestCase):
def test(self):
orig_env = mock.Mock()
# Reaches the terminal state after five actions
orig_env.reset.side_effect = [
('state', 0),
('state', 3),
]
orig_env.step.side_effect = [
(('state', 1), 0, False, {}),
(('state', 2), 1, True, {}),
]
env = chainerrl.wrappers.Render(orig_env, **self.render_kwargs)
# Not called env.render yet
self.assertEqual(orig_env.render.call_count, 0)
obs = env.reset()
self.assertEqual(obs, ('state', 0))
# Called once
self.assertEqual(orig_env.render.call_count, 1)
obs, reward, done, info = env.step(0)
self.assertEqual(obs, ('state', 1))
self.assertEqual(reward, 0)
self.assertEqual(done, False)
self.assertEqual(info, {})
# Called twice
self.assertEqual(orig_env.render.call_count, 2)
obs, reward, done, info = env.step(0)
self.assertEqual(obs, ('state', 2))
self.assertEqual(reward, 1)
self.assertEqual(done, True)
self.assertEqual(info, {})
# Called thrice
self.assertEqual(orig_env.render.call_count, 3)
obs = env.reset()
self.assertEqual(obs, ('state', 3))
# Called four times
self.assertEqual(orig_env.render.call_count, 4)
# All the calls should receive correct kwargs
for call in orig_env.render.call_args_list:
args, kwargs = call
self.assertEqual(len(args), 0)
self.assertEqual(kwargs, self.render_kwargs)
|
Add tests for Render wrapperfrom __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import unittest
from chainer import testing
import mock
import chainerrl
@testing.parameterize(*testing.product({
'render_kwargs': [
{},
{'mode': 'human'},
{'mode': 'rgb_array'},
]
}))
class TestRender(unittest.TestCase):
def test(self):
orig_env = mock.Mock()
# Reaches the terminal state after five actions
orig_env.reset.side_effect = [
('state', 0),
('state', 3),
]
orig_env.step.side_effect = [
(('state', 1), 0, False, {}),
(('state', 2), 1, True, {}),
]
env = chainerrl.wrappers.Render(orig_env, **self.render_kwargs)
# Not called env.render yet
self.assertEqual(orig_env.render.call_count, 0)
obs = env.reset()
self.assertEqual(obs, ('state', 0))
# Called once
self.assertEqual(orig_env.render.call_count, 1)
obs, reward, done, info = env.step(0)
self.assertEqual(obs, ('state', 1))
self.assertEqual(reward, 0)
self.assertEqual(done, False)
self.assertEqual(info, {})
# Called twice
self.assertEqual(orig_env.render.call_count, 2)
obs, reward, done, info = env.step(0)
self.assertEqual(obs, ('state', 2))
self.assertEqual(reward, 1)
self.assertEqual(done, True)
self.assertEqual(info, {})
# Called thrice
self.assertEqual(orig_env.render.call_count, 3)
obs = env.reset()
self.assertEqual(obs, ('state', 3))
# Called four times
self.assertEqual(orig_env.render.call_count, 4)
# All the calls should receive correct kwargs
for call in orig_env.render.call_args_list:
args, kwargs = call
self.assertEqual(len(args), 0)
self.assertEqual(kwargs, self.render_kwargs)
|
<commit_before><commit_msg>Add tests for Render wrapper<commit_after>from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from __future__ import absolute_import
from builtins import * # NOQA
from future import standard_library
standard_library.install_aliases() # NOQA
import unittest
from chainer import testing
import mock
import chainerrl
@testing.parameterize(*testing.product({
'render_kwargs': [
{},
{'mode': 'human'},
{'mode': 'rgb_array'},
]
}))
class TestRender(unittest.TestCase):
def test(self):
orig_env = mock.Mock()
# Reaches the terminal state after five actions
orig_env.reset.side_effect = [
('state', 0),
('state', 3),
]
orig_env.step.side_effect = [
(('state', 1), 0, False, {}),
(('state', 2), 1, True, {}),
]
env = chainerrl.wrappers.Render(orig_env, **self.render_kwargs)
# Not called env.render yet
self.assertEqual(orig_env.render.call_count, 0)
obs = env.reset()
self.assertEqual(obs, ('state', 0))
# Called once
self.assertEqual(orig_env.render.call_count, 1)
obs, reward, done, info = env.step(0)
self.assertEqual(obs, ('state', 1))
self.assertEqual(reward, 0)
self.assertEqual(done, False)
self.assertEqual(info, {})
# Called twice
self.assertEqual(orig_env.render.call_count, 2)
obs, reward, done, info = env.step(0)
self.assertEqual(obs, ('state', 2))
self.assertEqual(reward, 1)
self.assertEqual(done, True)
self.assertEqual(info, {})
# Called thrice
self.assertEqual(orig_env.render.call_count, 3)
obs = env.reset()
self.assertEqual(obs, ('state', 3))
# Called four times
self.assertEqual(orig_env.render.call_count, 4)
# All the calls should receive correct kwargs
for call in orig_env.render.call_args_list:
args, kwargs = call
self.assertEqual(len(args), 0)
self.assertEqual(kwargs, self.render_kwargs)
|
|
4c6ee099aa06278d974ebe557e81f04885a1cf4e
|
glanerbeard/default_settings.py
|
glanerbeard/default_settings.py
|
DEV_LISTEN_HOST = '127.0.0.1'
DEV_LISTEN_PORT = 5000
SERVERS = {}
API_KEYS = {}
LOGLEVEL = 'info'
|
Add a file to hold default settings.
|
Add a file to hold default settings.
|
Python
|
apache-2.0
|
daenney/glanerbeard
|
Add a file to hold default settings.
|
DEV_LISTEN_HOST = '127.0.0.1'
DEV_LISTEN_PORT = 5000
SERVERS = {}
API_KEYS = {}
LOGLEVEL = 'info'
|
<commit_before><commit_msg>Add a file to hold default settings.<commit_after>
|
DEV_LISTEN_HOST = '127.0.0.1'
DEV_LISTEN_PORT = 5000
SERVERS = {}
API_KEYS = {}
LOGLEVEL = 'info'
|
Add a file to hold default settings.DEV_LISTEN_HOST = '127.0.0.1'
DEV_LISTEN_PORT = 5000
SERVERS = {}
API_KEYS = {}
LOGLEVEL = 'info'
|
<commit_before><commit_msg>Add a file to hold default settings.<commit_after>DEV_LISTEN_HOST = '127.0.0.1'
DEV_LISTEN_PORT = 5000
SERVERS = {}
API_KEYS = {}
LOGLEVEL = 'info'
|
|
93be4fe47f01c0a02baefc39502af87b1d276bcc
|
download_data.py
|
download_data.py
|
import argparse
import subprocess as sp
import os
"""
Script for downloading the data sets from Kaggle
Usage:
python download_data.py
Run
python download_data.py --help
for help on the usage of command line arguments
Note: Kaggle requires a user to accept the rules of a competition
before they can download any data. So, downloading the data sets
from commandline requires Kaggle user specific data. Make sure
that DATA_DIRECTORY contains a "cookies.txt" from
"https:www.kaggle.com". You can get a copy of your cookies using
an extension like this:
"https://chrome.google.com/webstore/detail/cookiestxt/njabckikapfpffapmjgojcnbfjonfjfg?hl=en"
"""
COOKIES = 'cookies.txt'
TRAIN = 'train.csv'
TEST = 'test.csv'
def download(url, directory, cookie):
curl_bin = [
'curl',
'-L',
'--cookie', cookie,
'-o', directory,
url
]
sp.run(curl_bin)
def get_dataset(source, directory):
train_path = os.path.join(directory, TRAIN)
if os.path.exists(train_path):
print('%s already exists' % train_path)
else:
url = os.path.join(source, TRAIN)
print('downloading %s ...' % url)
download(url, train_path, os.path.join(directory, COOKIES))
print('done')
test_path = os.path.join(directory, TEST)
if os.path.exists(test_path):
print('%s already exists' % test_path)
else:
url = os.path.join(source, TEST)
print('downloading %s ...' % url)
download(url, test_path, os.path.join(directory, COOKIES))
print('done')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-D', '--data_directory',
default='./data/',
help='directory to download the data sets')
parser.add_argument('-L', '--source_url',
default='https://www.kaggle.com/c/digit-recognizer/download/',
help='url to the data sets')
args = parser.parse_args()
get_dataset(args.source_url, args.data_directory)
|
Add script for downloading data sets
|
Add script for downloading data sets
|
Python
|
mit
|
priyathamkat/digit-recognizer
|
Add script for downloading data sets
|
import argparse
import subprocess as sp
import os
"""
Script for downloading the data sets from Kaggle
Usage:
python download_data.py
Run
python download_data.py --help
for help on the usage of command line arguments
Note: Kaggle requires a user to accept the rules of a competition
before they can download any data. So, downloading the data sets
from commandline requires Kaggle user specific data. Make sure
that DATA_DIRECTORY contains a "cookies.txt" from
"https:www.kaggle.com". You can get a copy of your cookies using
an extension like this:
"https://chrome.google.com/webstore/detail/cookiestxt/njabckikapfpffapmjgojcnbfjonfjfg?hl=en"
"""
COOKIES = 'cookies.txt'
TRAIN = 'train.csv'
TEST = 'test.csv'
def download(url, directory, cookie):
curl_bin = [
'curl',
'-L',
'--cookie', cookie,
'-o', directory,
url
]
sp.run(curl_bin)
def get_dataset(source, directory):
train_path = os.path.join(directory, TRAIN)
if os.path.exists(train_path):
print('%s already exists' % train_path)
else:
url = os.path.join(source, TRAIN)
print('downloading %s ...' % url)
download(url, train_path, os.path.join(directory, COOKIES))
print('done')
test_path = os.path.join(directory, TEST)
if os.path.exists(test_path):
print('%s already exists' % test_path)
else:
url = os.path.join(source, TEST)
print('downloading %s ...' % url)
download(url, test_path, os.path.join(directory, COOKIES))
print('done')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-D', '--data_directory',
default='./data/',
help='directory to download the data sets')
parser.add_argument('-L', '--source_url',
default='https://www.kaggle.com/c/digit-recognizer/download/',
help='url to the data sets')
args = parser.parse_args()
get_dataset(args.source_url, args.data_directory)
|
<commit_before><commit_msg>Add script for downloading data sets<commit_after>
|
import argparse
import subprocess as sp
import os
"""
Script for downloading the data sets from Kaggle
Usage:
python download_data.py
Run
python download_data.py --help
for help on the usage of command line arguments
Note: Kaggle requires a user to accept the rules of a competition
before they can download any data. So, downloading the data sets
from commandline requires Kaggle user specific data. Make sure
that DATA_DIRECTORY contains a "cookies.txt" from
"https:www.kaggle.com". You can get a copy of your cookies using
an extension like this:
"https://chrome.google.com/webstore/detail/cookiestxt/njabckikapfpffapmjgojcnbfjonfjfg?hl=en"
"""
COOKIES = 'cookies.txt'
TRAIN = 'train.csv'
TEST = 'test.csv'
def download(url, directory, cookie):
curl_bin = [
'curl',
'-L',
'--cookie', cookie,
'-o', directory,
url
]
sp.run(curl_bin)
def get_dataset(source, directory):
train_path = os.path.join(directory, TRAIN)
if os.path.exists(train_path):
print('%s already exists' % train_path)
else:
url = os.path.join(source, TRAIN)
print('downloading %s ...' % url)
download(url, train_path, os.path.join(directory, COOKIES))
print('done')
test_path = os.path.join(directory, TEST)
if os.path.exists(test_path):
print('%s already exists' % test_path)
else:
url = os.path.join(source, TEST)
print('downloading %s ...' % url)
download(url, test_path, os.path.join(directory, COOKIES))
print('done')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-D', '--data_directory',
default='./data/',
help='directory to download the data sets')
parser.add_argument('-L', '--source_url',
default='https://www.kaggle.com/c/digit-recognizer/download/',
help='url to the data sets')
args = parser.parse_args()
get_dataset(args.source_url, args.data_directory)
|
Add script for downloading data setsimport argparse
import subprocess as sp
import os
"""
Script for downloading the data sets from Kaggle
Usage:
python download_data.py
Run
python download_data.py --help
for help on the usage of command line arguments
Note: Kaggle requires a user to accept the rules of a competition
before they can download any data. So, downloading the data sets
from commandline requires Kaggle user specific data. Make sure
that DATA_DIRECTORY contains a "cookies.txt" from
"https:www.kaggle.com". You can get a copy of your cookies using
an extension like this:
"https://chrome.google.com/webstore/detail/cookiestxt/njabckikapfpffapmjgojcnbfjonfjfg?hl=en"
"""
COOKIES = 'cookies.txt'
TRAIN = 'train.csv'
TEST = 'test.csv'
def download(url, directory, cookie):
curl_bin = [
'curl',
'-L',
'--cookie', cookie,
'-o', directory,
url
]
sp.run(curl_bin)
def get_dataset(source, directory):
train_path = os.path.join(directory, TRAIN)
if os.path.exists(train_path):
print('%s already exists' % train_path)
else:
url = os.path.join(source, TRAIN)
print('downloading %s ...' % url)
download(url, train_path, os.path.join(directory, COOKIES))
print('done')
test_path = os.path.join(directory, TEST)
if os.path.exists(test_path):
print('%s already exists' % test_path)
else:
url = os.path.join(source, TEST)
print('downloading %s ...' % url)
download(url, test_path, os.path.join(directory, COOKIES))
print('done')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-D', '--data_directory',
default='./data/',
help='directory to download the data sets')
parser.add_argument('-L', '--source_url',
default='https://www.kaggle.com/c/digit-recognizer/download/',
help='url to the data sets')
args = parser.parse_args()
get_dataset(args.source_url, args.data_directory)
|
<commit_before><commit_msg>Add script for downloading data sets<commit_after>import argparse
import subprocess as sp
import os
"""
Script for downloading the data sets from Kaggle
Usage:
python download_data.py
Run
python download_data.py --help
for help on the usage of command line arguments
Note: Kaggle requires a user to accept the rules of a competition
before they can download any data. So, downloading the data sets
from commandline requires Kaggle user specific data. Make sure
that DATA_DIRECTORY contains a "cookies.txt" from
"https:www.kaggle.com". You can get a copy of your cookies using
an extension like this:
"https://chrome.google.com/webstore/detail/cookiestxt/njabckikapfpffapmjgojcnbfjonfjfg?hl=en"
"""
COOKIES = 'cookies.txt'
TRAIN = 'train.csv'
TEST = 'test.csv'
def download(url, directory, cookie):
curl_bin = [
'curl',
'-L',
'--cookie', cookie,
'-o', directory,
url
]
sp.run(curl_bin)
def get_dataset(source, directory):
train_path = os.path.join(directory, TRAIN)
if os.path.exists(train_path):
print('%s already exists' % train_path)
else:
url = os.path.join(source, TRAIN)
print('downloading %s ...' % url)
download(url, train_path, os.path.join(directory, COOKIES))
print('done')
test_path = os.path.join(directory, TEST)
if os.path.exists(test_path):
print('%s already exists' % test_path)
else:
url = os.path.join(source, TEST)
print('downloading %s ...' % url)
download(url, test_path, os.path.join(directory, COOKIES))
print('done')
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('-D', '--data_directory',
default='./data/',
help='directory to download the data sets')
parser.add_argument('-L', '--source_url',
default='https://www.kaggle.com/c/digit-recognizer/download/',
help='url to the data sets')
args = parser.parse_args()
get_dataset(args.source_url, args.data_directory)
|
|
1160fc4f645ad36b106ddba6e248c81601f2cb26
|
lc0042_trapping_rain_water.py
|
lc0042_trapping_rain_water.py
|
"""Leetcode 42. Trapping Rain Water
Hard
URL: https://leetcode.com/problems/trapping-rain-water/
Given n non-negative integers representing an elevation map where the width of each
bar is 1, compute how much water it is able to trap after raining.
The above elevation map is represented by array [0,1,0,2,1,0,1,3,2,1,2,1].
In this case, 6 units of rain water (blue section) are being trapped.
Example:
Input: [0,1,0,2,1,0,1,3,2,1,2,1]
Output: 6
"""
class SolutionTwoPointersLeftmaxRightmaxHeights(object):
def trap(self, height):
"""
:type height: List[int]
:rtype: int
"""
# Apply two pointers from both sides with leftmax & rightmax heights.
n = len(height)
# Edge cases.
if not height or n < 3:
return 0
# Start checking max left & right heights to check walls from both sides.
h_leftmax, h_rightmax = height[0], height[n - 1]
# Start moving two pointers from 2nd last pos from both sides.
left, right = 1, n - 2
water = 0
# Iteratively move two pointers until they meet with each other.
while left <= right:
# Move the lower wall since the taller one may leak water.
if h_leftmax <= h_rightmax:
h_leftmax = max(h_leftmax, height[left])
water += h_leftmax - height[left]
left += 1
else:
h_rightmax = max(h_rightmax, height[right])
water += h_rightmax - height[right]
right -= 1
return water
def main():
height = [0,1,0,2,1,0,1,3,2,1,2,1]
print SolutionTwoPointersLeftmaxRightmaxHeights().trap(height)
if __name__ == '__main__':
main()
|
Complete two pointers w/ leftmax/rightmax heights sol
|
Complete two pointers w/ leftmax/rightmax heights sol
|
Python
|
bsd-2-clause
|
bowen0701/algorithms_data_structures
|
Complete two pointers w/ leftmax/rightmax heights sol
|
"""Leetcode 42. Trapping Rain Water
Hard
URL: https://leetcode.com/problems/trapping-rain-water/
Given n non-negative integers representing an elevation map where the width of each
bar is 1, compute how much water it is able to trap after raining.
The above elevation map is represented by array [0,1,0,2,1,0,1,3,2,1,2,1].
In this case, 6 units of rain water (blue section) are being trapped.
Example:
Input: [0,1,0,2,1,0,1,3,2,1,2,1]
Output: 6
"""
class SolutionTwoPointersLeftmaxRightmaxHeights(object):
def trap(self, height):
"""
:type height: List[int]
:rtype: int
"""
# Apply two pointers from both sides with leftmax & rightmax heights.
n = len(height)
# Edge cases.
if not height or n < 3:
return 0
# Start checking max left & right heights to check walls from both sides.
h_leftmax, h_rightmax = height[0], height[n - 1]
# Start moving two pointers from 2nd last pos from both sides.
left, right = 1, n - 2
water = 0
# Iteratively move two pointers until they meet with each other.
while left <= right:
# Move the lower wall since the taller one may leak water.
if h_leftmax <= h_rightmax:
h_leftmax = max(h_leftmax, height[left])
water += h_leftmax - height[left]
left += 1
else:
h_rightmax = max(h_rightmax, height[right])
water += h_rightmax - height[right]
right -= 1
return water
def main():
height = [0,1,0,2,1,0,1,3,2,1,2,1]
print SolutionTwoPointersLeftmaxRightmaxHeights().trap(height)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Complete two pointers w/ leftmax/rightmax heights sol<commit_after>
|
"""Leetcode 42. Trapping Rain Water
Hard
URL: https://leetcode.com/problems/trapping-rain-water/
Given n non-negative integers representing an elevation map where the width of each
bar is 1, compute how much water it is able to trap after raining.
The above elevation map is represented by array [0,1,0,2,1,0,1,3,2,1,2,1].
In this case, 6 units of rain water (blue section) are being trapped.
Example:
Input: [0,1,0,2,1,0,1,3,2,1,2,1]
Output: 6
"""
class SolutionTwoPointersLeftmaxRightmaxHeights(object):
def trap(self, height):
"""
:type height: List[int]
:rtype: int
"""
# Apply two pointers from both sides with leftmax & rightmax heights.
n = len(height)
# Edge cases.
if not height or n < 3:
return 0
# Start checking max left & right heights to check walls from both sides.
h_leftmax, h_rightmax = height[0], height[n - 1]
# Start moving two pointers from 2nd last pos from both sides.
left, right = 1, n - 2
water = 0
# Iteratively move two pointers until they meet with each other.
while left <= right:
# Move the lower wall since the taller one may leak water.
if h_leftmax <= h_rightmax:
h_leftmax = max(h_leftmax, height[left])
water += h_leftmax - height[left]
left += 1
else:
h_rightmax = max(h_rightmax, height[right])
water += h_rightmax - height[right]
right -= 1
return water
def main():
height = [0,1,0,2,1,0,1,3,2,1,2,1]
print SolutionTwoPointersLeftmaxRightmaxHeights().trap(height)
if __name__ == '__main__':
main()
|
Complete two pointers w/ leftmax/rightmax heights sol"""Leetcode 42. Trapping Rain Water
Hard
URL: https://leetcode.com/problems/trapping-rain-water/
Given n non-negative integers representing an elevation map where the width of each
bar is 1, compute how much water it is able to trap after raining.
The above elevation map is represented by array [0,1,0,2,1,0,1,3,2,1,2,1].
In this case, 6 units of rain water (blue section) are being trapped.
Example:
Input: [0,1,0,2,1,0,1,3,2,1,2,1]
Output: 6
"""
class SolutionTwoPointersLeftmaxRightmaxHeights(object):
def trap(self, height):
"""
:type height: List[int]
:rtype: int
"""
# Apply two pointers from both sides with leftmax & rightmax heights.
n = len(height)
# Edge cases.
if not height or n < 3:
return 0
# Start checking max left & right heights to check walls from both sides.
h_leftmax, h_rightmax = height[0], height[n - 1]
# Start moving two pointers from 2nd last pos from both sides.
left, right = 1, n - 2
water = 0
# Iteratively move two pointers until they meet with each other.
while left <= right:
# Move the lower wall since the taller one may leak water.
if h_leftmax <= h_rightmax:
h_leftmax = max(h_leftmax, height[left])
water += h_leftmax - height[left]
left += 1
else:
h_rightmax = max(h_rightmax, height[right])
water += h_rightmax - height[right]
right -= 1
return water
def main():
height = [0,1,0,2,1,0,1,3,2,1,2,1]
print SolutionTwoPointersLeftmaxRightmaxHeights().trap(height)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Complete two pointers w/ leftmax/rightmax heights sol<commit_after>"""Leetcode 42. Trapping Rain Water
Hard
URL: https://leetcode.com/problems/trapping-rain-water/
Given n non-negative integers representing an elevation map where the width of each
bar is 1, compute how much water it is able to trap after raining.
The above elevation map is represented by array [0,1,0,2,1,0,1,3,2,1,2,1].
In this case, 6 units of rain water (blue section) are being trapped.
Example:
Input: [0,1,0,2,1,0,1,3,2,1,2,1]
Output: 6
"""
class SolutionTwoPointersLeftmaxRightmaxHeights(object):
def trap(self, height):
"""
:type height: List[int]
:rtype: int
"""
# Apply two pointers from both sides with leftmax & rightmax heights.
n = len(height)
# Edge cases.
if not height or n < 3:
return 0
# Start checking max left & right heights to check walls from both sides.
h_leftmax, h_rightmax = height[0], height[n - 1]
# Start moving two pointers from 2nd last pos from both sides.
left, right = 1, n - 2
water = 0
# Iteratively move two pointers until they meet with each other.
while left <= right:
# Move the lower wall since the taller one may leak water.
if h_leftmax <= h_rightmax:
h_leftmax = max(h_leftmax, height[left])
water += h_leftmax - height[left]
left += 1
else:
h_rightmax = max(h_rightmax, height[right])
water += h_rightmax - height[right]
right -= 1
return water
def main():
height = [0,1,0,2,1,0,1,3,2,1,2,1]
print SolutionTwoPointersLeftmaxRightmaxHeights().trap(height)
if __name__ == '__main__':
main()
|
|
1bb610bdbb83586dc499c29934959db1feb39853
|
tests/unit/utils/test_jinja.py
|
tests/unit/utils/test_jinja.py
|
# -*- coding: utf-8 -*-
'''
Tests for salt.utils.jinja
'''
# Import Python libs
from __future__ import absolute_import, unicode_literals, print_function
# Import Salt libs
import salt.utils.jinja
from tests.support.unit import TestCase
class JinjaTestCase(TestCase):
def test_tojson(self):
'''
Test the tojson filter for those using Jinja < 2.9. Non-ascii unicode
content should be dumped with ensure_ascii=True.
'''
data = {'Non-ascii words': ['süß', 'спам', 'яйца']}
result = salt.utils.jinja.tojson(data)
expected = '{"Non-ascii words": ["s\\u00fc\\u00df", "\\u0441\\u043f\\u0430\\u043c", "\\u044f\\u0439\\u0446\\u0430"]}'
assert result == expected, result
|
Add unit test for tojson filter
|
Add unit test for tojson filter
|
Python
|
apache-2.0
|
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
|
Add unit test for tojson filter
|
# -*- coding: utf-8 -*-
'''
Tests for salt.utils.jinja
'''
# Import Python libs
from __future__ import absolute_import, unicode_literals, print_function
# Import Salt libs
import salt.utils.jinja
from tests.support.unit import TestCase
class JinjaTestCase(TestCase):
def test_tojson(self):
'''
Test the tojson filter for those using Jinja < 2.9. Non-ascii unicode
content should be dumped with ensure_ascii=True.
'''
data = {'Non-ascii words': ['süß', 'спам', 'яйца']}
result = salt.utils.jinja.tojson(data)
expected = '{"Non-ascii words": ["s\\u00fc\\u00df", "\\u0441\\u043f\\u0430\\u043c", "\\u044f\\u0439\\u0446\\u0430"]}'
assert result == expected, result
|
<commit_before><commit_msg>Add unit test for tojson filter<commit_after>
|
# -*- coding: utf-8 -*-
'''
Tests for salt.utils.jinja
'''
# Import Python libs
from __future__ import absolute_import, unicode_literals, print_function
# Import Salt libs
import salt.utils.jinja
from tests.support.unit import TestCase
class JinjaTestCase(TestCase):
def test_tojson(self):
'''
Test the tojson filter for those using Jinja < 2.9. Non-ascii unicode
content should be dumped with ensure_ascii=True.
'''
data = {'Non-ascii words': ['süß', 'спам', 'яйца']}
result = salt.utils.jinja.tojson(data)
expected = '{"Non-ascii words": ["s\\u00fc\\u00df", "\\u0441\\u043f\\u0430\\u043c", "\\u044f\\u0439\\u0446\\u0430"]}'
assert result == expected, result
|
Add unit test for tojson filter# -*- coding: utf-8 -*-
'''
Tests for salt.utils.jinja
'''
# Import Python libs
from __future__ import absolute_import, unicode_literals, print_function
# Import Salt libs
import salt.utils.jinja
from tests.support.unit import TestCase
class JinjaTestCase(TestCase):
def test_tojson(self):
'''
Test the tojson filter for those using Jinja < 2.9. Non-ascii unicode
content should be dumped with ensure_ascii=True.
'''
data = {'Non-ascii words': ['süß', 'спам', 'яйца']}
result = salt.utils.jinja.tojson(data)
expected = '{"Non-ascii words": ["s\\u00fc\\u00df", "\\u0441\\u043f\\u0430\\u043c", "\\u044f\\u0439\\u0446\\u0430"]}'
assert result == expected, result
|
<commit_before><commit_msg>Add unit test for tojson filter<commit_after># -*- coding: utf-8 -*-
'''
Tests for salt.utils.jinja
'''
# Import Python libs
from __future__ import absolute_import, unicode_literals, print_function
# Import Salt libs
import salt.utils.jinja
from tests.support.unit import TestCase
class JinjaTestCase(TestCase):
def test_tojson(self):
'''
Test the tojson filter for those using Jinja < 2.9. Non-ascii unicode
content should be dumped with ensure_ascii=True.
'''
data = {'Non-ascii words': ['süß', 'спам', 'яйца']}
result = salt.utils.jinja.tojson(data)
expected = '{"Non-ascii words": ["s\\u00fc\\u00df", "\\u0441\\u043f\\u0430\\u043c", "\\u044f\\u0439\\u0446\\u0430"]}'
assert result == expected, result
|
|
e4975030f82a79632e73810de3bcad8d68abd8a9
|
braid/dumper.py
|
braid/dumper.py
|
import os
from fabric.api import run, get
from braid import utils
from pipes import quote
def dump(spec, localfile):
"""
C{spec} is a dictionary of filenames/dirnames in the tarball to locations
on the disk from where the contents have to be retrieved.
Home relative (~/...) file names are not supported by the transformation
rules as they are converted by the shell before being passed to tar.
"""
_, ext = os.path.splitext(localfile)
with utils.tempfile(suffix=ext) as temp:
cmd = [
'tar',
'--create',
'--file={}'.format(temp),
'--auto-compress',
'-h', # Follow symbolic links
'--verbose',
'--show-transformed-names',
]
for destination, source in spec.iteritems():
cmd.extend([
'\\\n',
' {:30s}'.format(source),
'--transform',
quote('s!^{}!{}!'.format(source.lstrip('/'), destination)),
])
run(' '.join(cmd))
get(temp, localfile)
|
Support for creating tarballs of arbitrary locations on disk and download them locally.
|
Support for creating tarballs of arbitrary locations on disk and download them locally.
|
Python
|
mit
|
alex/braid,alex/braid
|
Support for creating tarballs of arbitrary locations on disk and download them locally.
|
import os
from fabric.api import run, get
from braid import utils
from pipes import quote
def dump(spec, localfile):
"""
C{spec} is a dictionary of filenames/dirnames in the tarball to locations
on the disk from where the contents have to be retrieved.
Home relative (~/...) file names are not supported by the transformation
rules as they are converted by the shell before being passed to tar.
"""
_, ext = os.path.splitext(localfile)
with utils.tempfile(suffix=ext) as temp:
cmd = [
'tar',
'--create',
'--file={}'.format(temp),
'--auto-compress',
'-h', # Follow symbolic links
'--verbose',
'--show-transformed-names',
]
for destination, source in spec.iteritems():
cmd.extend([
'\\\n',
' {:30s}'.format(source),
'--transform',
quote('s!^{}!{}!'.format(source.lstrip('/'), destination)),
])
run(' '.join(cmd))
get(temp, localfile)
|
<commit_before><commit_msg>Support for creating tarballs of arbitrary locations on disk and download them locally.<commit_after>
|
import os
from fabric.api import run, get
from braid import utils
from pipes import quote
def dump(spec, localfile):
"""
C{spec} is a dictionary of filenames/dirnames in the tarball to locations
on the disk from where the contents have to be retrieved.
Home relative (~/...) file names are not supported by the transformation
rules as they are converted by the shell before being passed to tar.
"""
_, ext = os.path.splitext(localfile)
with utils.tempfile(suffix=ext) as temp:
cmd = [
'tar',
'--create',
'--file={}'.format(temp),
'--auto-compress',
'-h', # Follow symbolic links
'--verbose',
'--show-transformed-names',
]
for destination, source in spec.iteritems():
cmd.extend([
'\\\n',
' {:30s}'.format(source),
'--transform',
quote('s!^{}!{}!'.format(source.lstrip('/'), destination)),
])
run(' '.join(cmd))
get(temp, localfile)
|
Support for creating tarballs of arbitrary locations on disk and download them locally.import os
from fabric.api import run, get
from braid import utils
from pipes import quote
def dump(spec, localfile):
"""
C{spec} is a dictionary of filenames/dirnames in the tarball to locations
on the disk from where the contents have to be retrieved.
Home relative (~/...) file names are not supported by the transformation
rules as they are converted by the shell before being passed to tar.
"""
_, ext = os.path.splitext(localfile)
with utils.tempfile(suffix=ext) as temp:
cmd = [
'tar',
'--create',
'--file={}'.format(temp),
'--auto-compress',
'-h', # Follow symbolic links
'--verbose',
'--show-transformed-names',
]
for destination, source in spec.iteritems():
cmd.extend([
'\\\n',
' {:30s}'.format(source),
'--transform',
quote('s!^{}!{}!'.format(source.lstrip('/'), destination)),
])
run(' '.join(cmd))
get(temp, localfile)
|
<commit_before><commit_msg>Support for creating tarballs of arbitrary locations on disk and download them locally.<commit_after>import os
from fabric.api import run, get
from braid import utils
from pipes import quote
def dump(spec, localfile):
"""
C{spec} is a dictionary of filenames/dirnames in the tarball to locations
on the disk from where the contents have to be retrieved.
Home relative (~/...) file names are not supported by the transformation
rules as they are converted by the shell before being passed to tar.
"""
_, ext = os.path.splitext(localfile)
with utils.tempfile(suffix=ext) as temp:
cmd = [
'tar',
'--create',
'--file={}'.format(temp),
'--auto-compress',
'-h', # Follow symbolic links
'--verbose',
'--show-transformed-names',
]
for destination, source in spec.iteritems():
cmd.extend([
'\\\n',
' {:30s}'.format(source),
'--transform',
quote('s!^{}!{}!'.format(source.lstrip('/'), destination)),
])
run(' '.join(cmd))
get(temp, localfile)
|
|
a39d02f0b42b99403171e929eca97eaa6293de4f
|
examples/dump_machine_status.py
|
examples/dump_machine_status.py
|
"""
Read some statistics from a connected machine, and dump them to the console.
Requires these modules:
* pySerial: http://pypi.python.org/pypi/pyserial
"""
# To use this example without installing s3g, we need this hack:
import os, sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import s3g
import serial
import optparse
parser = optparse.OptionParser()
parser.add_option("-s", "--serialport", dest="serialportname",
help="serial port (ex: /dev/ttyUSB0)", default="/dev/ttyACM0")
parser.add_option("-p", "--oscport", dest="oscport",
help="OSC port to listen on", default="10000")
(options, args) = parser.parse_args()
r = s3g.s3g()
r.file = serial.Serial(options.serialportname, 115200, timeout=0)
print "firmware version: %i"%(r.GetVersion())
print "build name: %s"%(r.GetBuildName())
print "SD Card name: " + r.GetNextFilename(True)
while True:
filename = r.GetNextFilename(False)
if filename == '\x00':
break
print ' ' + filename
|
Add stats dump example (used as a sanity check of the protocol against the machine)
|
Add stats dump example (used as a sanity check of the protocol against the machine)
|
Python
|
agpl-3.0
|
makerbot/s3g,makerbot/s3g,Jnesselr/s3g,makerbot/s3g,makerbot/s3g,Jnesselr/s3g
|
Add stats dump example (used as a sanity check of the protocol against the machine)
|
"""
Read some statistics from a connected machine, and dump them to the console.
Requires these modules:
* pySerial: http://pypi.python.org/pypi/pyserial
"""
# To use this example without installing s3g, we need this hack:
import os, sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import s3g
import serial
import optparse
parser = optparse.OptionParser()
parser.add_option("-s", "--serialport", dest="serialportname",
help="serial port (ex: /dev/ttyUSB0)", default="/dev/ttyACM0")
parser.add_option("-p", "--oscport", dest="oscport",
help="OSC port to listen on", default="10000")
(options, args) = parser.parse_args()
r = s3g.s3g()
r.file = serial.Serial(options.serialportname, 115200, timeout=0)
print "firmware version: %i"%(r.GetVersion())
print "build name: %s"%(r.GetBuildName())
print "SD Card name: " + r.GetNextFilename(True)
while True:
filename = r.GetNextFilename(False)
if filename == '\x00':
break
print ' ' + filename
|
<commit_before><commit_msg>Add stats dump example (used as a sanity check of the protocol against the machine)<commit_after>
|
"""
Read some statistics from a connected machine, and dump them to the console.
Requires these modules:
* pySerial: http://pypi.python.org/pypi/pyserial
"""
# To use this example without installing s3g, we need this hack:
import os, sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import s3g
import serial
import optparse
parser = optparse.OptionParser()
parser.add_option("-s", "--serialport", dest="serialportname",
help="serial port (ex: /dev/ttyUSB0)", default="/dev/ttyACM0")
parser.add_option("-p", "--oscport", dest="oscport",
help="OSC port to listen on", default="10000")
(options, args) = parser.parse_args()
r = s3g.s3g()
r.file = serial.Serial(options.serialportname, 115200, timeout=0)
print "firmware version: %i"%(r.GetVersion())
print "build name: %s"%(r.GetBuildName())
print "SD Card name: " + r.GetNextFilename(True)
while True:
filename = r.GetNextFilename(False)
if filename == '\x00':
break
print ' ' + filename
|
Add stats dump example (used as a sanity check of the protocol against the machine)"""
Read some statistics from a connected machine, and dump them to the console.
Requires these modules:
* pySerial: http://pypi.python.org/pypi/pyserial
"""
# To use this example without installing s3g, we need this hack:
import os, sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import s3g
import serial
import optparse
parser = optparse.OptionParser()
parser.add_option("-s", "--serialport", dest="serialportname",
help="serial port (ex: /dev/ttyUSB0)", default="/dev/ttyACM0")
parser.add_option("-p", "--oscport", dest="oscport",
help="OSC port to listen on", default="10000")
(options, args) = parser.parse_args()
r = s3g.s3g()
r.file = serial.Serial(options.serialportname, 115200, timeout=0)
print "firmware version: %i"%(r.GetVersion())
print "build name: %s"%(r.GetBuildName())
print "SD Card name: " + r.GetNextFilename(True)
while True:
filename = r.GetNextFilename(False)
if filename == '\x00':
break
print ' ' + filename
|
<commit_before><commit_msg>Add stats dump example (used as a sanity check of the protocol against the machine)<commit_after>"""
Read some statistics from a connected machine, and dump them to the console.
Requires these modules:
* pySerial: http://pypi.python.org/pypi/pyserial
"""
# To use this example without installing s3g, we need this hack:
import os, sys
lib_path = os.path.abspath('../')
sys.path.append(lib_path)
import s3g
import serial
import optparse
parser = optparse.OptionParser()
parser.add_option("-s", "--serialport", dest="serialportname",
help="serial port (ex: /dev/ttyUSB0)", default="/dev/ttyACM0")
parser.add_option("-p", "--oscport", dest="oscport",
help="OSC port to listen on", default="10000")
(options, args) = parser.parse_args()
r = s3g.s3g()
r.file = serial.Serial(options.serialportname, 115200, timeout=0)
print "firmware version: %i"%(r.GetVersion())
print "build name: %s"%(r.GetBuildName())
print "SD Card name: " + r.GetNextFilename(True)
while True:
filename = r.GetNextFilename(False)
if filename == '\x00':
break
print ' ' + filename
|
|
335d2fe16ca3402785f4b51b2dcc8195fe8a2906
|
kerastuner/engine/hypermodel.py
|
kerastuner/engine/hypermodel.py
|
# Copyright 2019 The Keras Tuner Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"HyperModel base class."
class HyperModel(object):
def __init__(self, name=None, tunable=True):
self.name = name
def build(self, hp):
raise NotImplementedError
class DefaultHyperModel(HyperModel):
def __init__(self, build, name=None, tunable=True):
super(DefaultHyperModel, self).__init__(name=name)
self.build = build
|
Add base HyperModel class and DefaultHyperModel.
|
Add base HyperModel class and DefaultHyperModel.
|
Python
|
apache-2.0
|
keras-team/keras-tuner,keras-team/keras-tuner
|
Add base HyperModel class and DefaultHyperModel.
|
# Copyright 2019 The Keras Tuner Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"HyperModel base class."
class HyperModel(object):
def __init__(self, name=None, tunable=True):
self.name = name
def build(self, hp):
raise NotImplementedError
class DefaultHyperModel(HyperModel):
def __init__(self, build, name=None, tunable=True):
super(DefaultHyperModel, self).__init__(name=name)
self.build = build
|
<commit_before><commit_msg>Add base HyperModel class and DefaultHyperModel.<commit_after>
|
# Copyright 2019 The Keras Tuner Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"HyperModel base class."
class HyperModel(object):
def __init__(self, name=None, tunable=True):
self.name = name
def build(self, hp):
raise NotImplementedError
class DefaultHyperModel(HyperModel):
def __init__(self, build, name=None, tunable=True):
super(DefaultHyperModel, self).__init__(name=name)
self.build = build
|
Add base HyperModel class and DefaultHyperModel.# Copyright 2019 The Keras Tuner Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"HyperModel base class."
class HyperModel(object):
def __init__(self, name=None, tunable=True):
self.name = name
def build(self, hp):
raise NotImplementedError
class DefaultHyperModel(HyperModel):
def __init__(self, build, name=None, tunable=True):
super(DefaultHyperModel, self).__init__(name=name)
self.build = build
|
<commit_before><commit_msg>Add base HyperModel class and DefaultHyperModel.<commit_after># Copyright 2019 The Keras Tuner Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"HyperModel base class."
class HyperModel(object):
def __init__(self, name=None, tunable=True):
self.name = name
def build(self, hp):
raise NotImplementedError
class DefaultHyperModel(HyperModel):
def __init__(self, build, name=None, tunable=True):
super(DefaultHyperModel, self).__init__(name=name)
self.build = build
|
|
c95b1d7be730f4f4dc39f73f0adb4574c99861fb
|
gpi/cdf97_GPI.py
|
gpi/cdf97_GPI.py
|
from __future__ import absolute_import, division, print_function
import os
# gpi, future
import gpi
from bart.gpi.borg import IFilePath, OFilePath, Command
# bart
import bart
base_path = bart.__path__[0] # library base for executables
import bart.python.cfl as cfl
class ExternalNode(gpi.NodeAPI):
"""Usage: cdf97 [-i] bitmask <input> <output>
Perform a wavelet (cdf97) transform.
-i inverse
-h help
"""
def initUI(self):
# Widgets
self.addWidget('SpinBox', 'bitmask', val=3)
self.addWidget('PushButton', 'compute', toggle=True)
self.addWidget(
'PushButton', 'direction', button_title='FORWARD', toggle=True)
# IO Ports
self.addInPort('input', 'NPYarray')
self.addOutPort('output', 'NPYarray')
return 0
def validate(self):
'''update the widget bounds based on the input data
'''
if 'direction' in self.widgetEvents():
direction = self.getVal('direction')
if direction:
self.setAttr('direction', button_title="INVERSE")
else:
self.setAttr('direction', button_title="FORWARD")
return 0
def compute(self):
if self.getVal('compute'):
direction = self.getVal('direction')
bm = self.getVal('bitmask')
kspace = self.getData('input')
# load up arguments list
args = [base_path+'/cdf97']
if direction != 0:
args += ['-i']
args += [str(bm)]
in1 = IFilePath(cfl.writecfl, kspace, asuffix=['.cfl', '.hdr'])
args += [in1]
# setup file for getting data from external command
out = OFilePath(cfl.readcfl, asuffix=['.cfl','.hdr'])
args += [out]
# run commandline
print(Command(*args))
self.setData('output', out.data())
out.close()
return 0
|
Add wavelet transform node from Andrew Li
|
Add wavelet transform node from Andrew Li
|
Python
|
bsd-3-clause
|
nckz/bart,nckz/bart,nckz/bart,nckz/bart,nckz/bart
|
Add wavelet transform node from Andrew Li
|
from __future__ import absolute_import, division, print_function
import os
# gpi, future
import gpi
from bart.gpi.borg import IFilePath, OFilePath, Command
# bart
import bart
base_path = bart.__path__[0] # library base for executables
import bart.python.cfl as cfl
class ExternalNode(gpi.NodeAPI):
"""Usage: cdf97 [-i] bitmask <input> <output>
Perform a wavelet (cdf97) transform.
-i inverse
-h help
"""
def initUI(self):
# Widgets
self.addWidget('SpinBox', 'bitmask', val=3)
self.addWidget('PushButton', 'compute', toggle=True)
self.addWidget(
'PushButton', 'direction', button_title='FORWARD', toggle=True)
# IO Ports
self.addInPort('input', 'NPYarray')
self.addOutPort('output', 'NPYarray')
return 0
def validate(self):
'''update the widget bounds based on the input data
'''
if 'direction' in self.widgetEvents():
direction = self.getVal('direction')
if direction:
self.setAttr('direction', button_title="INVERSE")
else:
self.setAttr('direction', button_title="FORWARD")
return 0
def compute(self):
if self.getVal('compute'):
direction = self.getVal('direction')
bm = self.getVal('bitmask')
kspace = self.getData('input')
# load up arguments list
args = [base_path+'/cdf97']
if direction != 0:
args += ['-i']
args += [str(bm)]
in1 = IFilePath(cfl.writecfl, kspace, asuffix=['.cfl', '.hdr'])
args += [in1]
# setup file for getting data from external command
out = OFilePath(cfl.readcfl, asuffix=['.cfl','.hdr'])
args += [out]
# run commandline
print(Command(*args))
self.setData('output', out.data())
out.close()
return 0
|
<commit_before><commit_msg>Add wavelet transform node from Andrew Li<commit_after>
|
from __future__ import absolute_import, division, print_function
import os
# gpi, future
import gpi
from bart.gpi.borg import IFilePath, OFilePath, Command
# bart
import bart
base_path = bart.__path__[0] # library base for executables
import bart.python.cfl as cfl
class ExternalNode(gpi.NodeAPI):
"""Usage: cdf97 [-i] bitmask <input> <output>
Perform a wavelet (cdf97) transform.
-i inverse
-h help
"""
def initUI(self):
# Widgets
self.addWidget('SpinBox', 'bitmask', val=3)
self.addWidget('PushButton', 'compute', toggle=True)
self.addWidget(
'PushButton', 'direction', button_title='FORWARD', toggle=True)
# IO Ports
self.addInPort('input', 'NPYarray')
self.addOutPort('output', 'NPYarray')
return 0
def validate(self):
'''update the widget bounds based on the input data
'''
if 'direction' in self.widgetEvents():
direction = self.getVal('direction')
if direction:
self.setAttr('direction', button_title="INVERSE")
else:
self.setAttr('direction', button_title="FORWARD")
return 0
def compute(self):
if self.getVal('compute'):
direction = self.getVal('direction')
bm = self.getVal('bitmask')
kspace = self.getData('input')
# load up arguments list
args = [base_path+'/cdf97']
if direction != 0:
args += ['-i']
args += [str(bm)]
in1 = IFilePath(cfl.writecfl, kspace, asuffix=['.cfl', '.hdr'])
args += [in1]
# setup file for getting data from external command
out = OFilePath(cfl.readcfl, asuffix=['.cfl','.hdr'])
args += [out]
# run commandline
print(Command(*args))
self.setData('output', out.data())
out.close()
return 0
|
Add wavelet transform node from Andrew Lifrom __future__ import absolute_import, division, print_function
import os
# gpi, future
import gpi
from bart.gpi.borg import IFilePath, OFilePath, Command
# bart
import bart
base_path = bart.__path__[0] # library base for executables
import bart.python.cfl as cfl
class ExternalNode(gpi.NodeAPI):
"""Usage: cdf97 [-i] bitmask <input> <output>
Perform a wavelet (cdf97) transform.
-i inverse
-h help
"""
def initUI(self):
# Widgets
self.addWidget('SpinBox', 'bitmask', val=3)
self.addWidget('PushButton', 'compute', toggle=True)
self.addWidget(
'PushButton', 'direction', button_title='FORWARD', toggle=True)
# IO Ports
self.addInPort('input', 'NPYarray')
self.addOutPort('output', 'NPYarray')
return 0
def validate(self):
'''update the widget bounds based on the input data
'''
if 'direction' in self.widgetEvents():
direction = self.getVal('direction')
if direction:
self.setAttr('direction', button_title="INVERSE")
else:
self.setAttr('direction', button_title="FORWARD")
return 0
def compute(self):
if self.getVal('compute'):
direction = self.getVal('direction')
bm = self.getVal('bitmask')
kspace = self.getData('input')
# load up arguments list
args = [base_path+'/cdf97']
if direction != 0:
args += ['-i']
args += [str(bm)]
in1 = IFilePath(cfl.writecfl, kspace, asuffix=['.cfl', '.hdr'])
args += [in1]
# setup file for getting data from external command
out = OFilePath(cfl.readcfl, asuffix=['.cfl','.hdr'])
args += [out]
# run commandline
print(Command(*args))
self.setData('output', out.data())
out.close()
return 0
|
<commit_before><commit_msg>Add wavelet transform node from Andrew Li<commit_after>from __future__ import absolute_import, division, print_function
import os
# gpi, future
import gpi
from bart.gpi.borg import IFilePath, OFilePath, Command
# bart
import bart
base_path = bart.__path__[0] # library base for executables
import bart.python.cfl as cfl
class ExternalNode(gpi.NodeAPI):
"""Usage: cdf97 [-i] bitmask <input> <output>
Perform a wavelet (cdf97) transform.
-i inverse
-h help
"""
def initUI(self):
# Widgets
self.addWidget('SpinBox', 'bitmask', val=3)
self.addWidget('PushButton', 'compute', toggle=True)
self.addWidget(
'PushButton', 'direction', button_title='FORWARD', toggle=True)
# IO Ports
self.addInPort('input', 'NPYarray')
self.addOutPort('output', 'NPYarray')
return 0
def validate(self):
'''update the widget bounds based on the input data
'''
if 'direction' in self.widgetEvents():
direction = self.getVal('direction')
if direction:
self.setAttr('direction', button_title="INVERSE")
else:
self.setAttr('direction', button_title="FORWARD")
return 0
def compute(self):
if self.getVal('compute'):
direction = self.getVal('direction')
bm = self.getVal('bitmask')
kspace = self.getData('input')
# load up arguments list
args = [base_path+'/cdf97']
if direction != 0:
args += ['-i']
args += [str(bm)]
in1 = IFilePath(cfl.writecfl, kspace, asuffix=['.cfl', '.hdr'])
args += [in1]
# setup file for getting data from external command
out = OFilePath(cfl.readcfl, asuffix=['.cfl','.hdr'])
args += [out]
# run commandline
print(Command(*args))
self.setData('output', out.data())
out.close()
return 0
|
|
681fdb7d94d662cfae7040df6960739cdff41ab6
|
firecares/firestation/management/commands/departments_to_es.py
|
firecares/firestation/management/commands/departments_to_es.py
|
import json
import elasticsearch
from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
from elasticsearch import Elasticsearch, RequestsHttpConnection
from requests_aws4auth import AWS4Auth
from django.conf import settings
class Command(BaseCommand):
help = 'Loads all departments in ElasticSearch.'
def add_arguments(self, parser):
parser.add_argument('--host', dest='host', default='localhost:9200')
parser.add_argument('--region', dest='region', default='us-east-1')
def handle(self, *args, **options):
host = options.get('host')
region = options.get('region')
awsauth = AWS4Auth(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, region, 'es')
es = Elasticsearch(
hosts=[{'host': host, 'port': 443}],
http_auth=awsauth,
use_ssl=True,
verify_certs=True,
connection_class=RequestsHttpConnection
)
for fd in FireDepartment.objects.all():
out = dict(id=fd.id,
fd_id=fd.fdid,
name=fd.name,
address_line1=fd.headquarters_address.address_line1,
address_line2=fd.headquarters_address.address_line2,
city=fd.headquarters_address.city,
state=fd.headquarters_address.state_province,
postal_code=fd.headquarters_address.postal_code,
country=fd.headquarters_address.country.iso_code,
modifed=fd.modified.isoformat())
res = es.index(index='firecares', doc_type='department', id=fd.id, body=json.dumps(out))
print res
print 'Wrote {} to ES'.format(fd.name)
|
Add script that exports departments to elastic search.
|
Add script that exports departments to elastic search.
|
Python
|
mit
|
FireCARES/firecares,HunterConnelly/firecares,FireCARES/firecares,FireCARES/firecares,HunterConnelly/firecares,HunterConnelly/firecares,HunterConnelly/firecares,FireCARES/firecares,FireCARES/firecares
|
Add script that exports departments to elastic search.
|
import json
import elasticsearch
from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
from elasticsearch import Elasticsearch, RequestsHttpConnection
from requests_aws4auth import AWS4Auth
from django.conf import settings
class Command(BaseCommand):
help = 'Loads all departments in ElasticSearch.'
def add_arguments(self, parser):
parser.add_argument('--host', dest='host', default='localhost:9200')
parser.add_argument('--region', dest='region', default='us-east-1')
def handle(self, *args, **options):
host = options.get('host')
region = options.get('region')
awsauth = AWS4Auth(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, region, 'es')
es = Elasticsearch(
hosts=[{'host': host, 'port': 443}],
http_auth=awsauth,
use_ssl=True,
verify_certs=True,
connection_class=RequestsHttpConnection
)
for fd in FireDepartment.objects.all():
out = dict(id=fd.id,
fd_id=fd.fdid,
name=fd.name,
address_line1=fd.headquarters_address.address_line1,
address_line2=fd.headquarters_address.address_line2,
city=fd.headquarters_address.city,
state=fd.headquarters_address.state_province,
postal_code=fd.headquarters_address.postal_code,
country=fd.headquarters_address.country.iso_code,
modifed=fd.modified.isoformat())
res = es.index(index='firecares', doc_type='department', id=fd.id, body=json.dumps(out))
print res
print 'Wrote {} to ES'.format(fd.name)
|
<commit_before><commit_msg>Add script that exports departments to elastic search.<commit_after>
|
import json
import elasticsearch
from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
from elasticsearch import Elasticsearch, RequestsHttpConnection
from requests_aws4auth import AWS4Auth
from django.conf import settings
class Command(BaseCommand):
help = 'Loads all departments in ElasticSearch.'
def add_arguments(self, parser):
parser.add_argument('--host', dest='host', default='localhost:9200')
parser.add_argument('--region', dest='region', default='us-east-1')
def handle(self, *args, **options):
host = options.get('host')
region = options.get('region')
awsauth = AWS4Auth(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, region, 'es')
es = Elasticsearch(
hosts=[{'host': host, 'port': 443}],
http_auth=awsauth,
use_ssl=True,
verify_certs=True,
connection_class=RequestsHttpConnection
)
for fd in FireDepartment.objects.all():
out = dict(id=fd.id,
fd_id=fd.fdid,
name=fd.name,
address_line1=fd.headquarters_address.address_line1,
address_line2=fd.headquarters_address.address_line2,
city=fd.headquarters_address.city,
state=fd.headquarters_address.state_province,
postal_code=fd.headquarters_address.postal_code,
country=fd.headquarters_address.country.iso_code,
modifed=fd.modified.isoformat())
res = es.index(index='firecares', doc_type='department', id=fd.id, body=json.dumps(out))
print res
print 'Wrote {} to ES'.format(fd.name)
|
Add script that exports departments to elastic search.import json
import elasticsearch
from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
from elasticsearch import Elasticsearch, RequestsHttpConnection
from requests_aws4auth import AWS4Auth
from django.conf import settings
class Command(BaseCommand):
help = 'Loads all departments in ElasticSearch.'
def add_arguments(self, parser):
parser.add_argument('--host', dest='host', default='localhost:9200')
parser.add_argument('--region', dest='region', default='us-east-1')
def handle(self, *args, **options):
host = options.get('host')
region = options.get('region')
awsauth = AWS4Auth(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, region, 'es')
es = Elasticsearch(
hosts=[{'host': host, 'port': 443}],
http_auth=awsauth,
use_ssl=True,
verify_certs=True,
connection_class=RequestsHttpConnection
)
for fd in FireDepartment.objects.all():
out = dict(id=fd.id,
fd_id=fd.fdid,
name=fd.name,
address_line1=fd.headquarters_address.address_line1,
address_line2=fd.headquarters_address.address_line2,
city=fd.headquarters_address.city,
state=fd.headquarters_address.state_province,
postal_code=fd.headquarters_address.postal_code,
country=fd.headquarters_address.country.iso_code,
modifed=fd.modified.isoformat())
res = es.index(index='firecares', doc_type='department', id=fd.id, body=json.dumps(out))
print res
print 'Wrote {} to ES'.format(fd.name)
|
<commit_before><commit_msg>Add script that exports departments to elastic search.<commit_after>import json
import elasticsearch
from django.core.management.base import BaseCommand
from firecares.firestation.models import FireDepartment
from elasticsearch import Elasticsearch, RequestsHttpConnection
from requests_aws4auth import AWS4Auth
from django.conf import settings
class Command(BaseCommand):
help = 'Loads all departments in ElasticSearch.'
def add_arguments(self, parser):
parser.add_argument('--host', dest='host', default='localhost:9200')
parser.add_argument('--region', dest='region', default='us-east-1')
def handle(self, *args, **options):
host = options.get('host')
region = options.get('region')
awsauth = AWS4Auth(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_ACCESS_KEY, region, 'es')
es = Elasticsearch(
hosts=[{'host': host, 'port': 443}],
http_auth=awsauth,
use_ssl=True,
verify_certs=True,
connection_class=RequestsHttpConnection
)
for fd in FireDepartment.objects.all():
out = dict(id=fd.id,
fd_id=fd.fdid,
name=fd.name,
address_line1=fd.headquarters_address.address_line1,
address_line2=fd.headquarters_address.address_line2,
city=fd.headquarters_address.city,
state=fd.headquarters_address.state_province,
postal_code=fd.headquarters_address.postal_code,
country=fd.headquarters_address.country.iso_code,
modifed=fd.modified.isoformat())
res = es.index(index='firecares', doc_type='department', id=fd.id, body=json.dumps(out))
print res
print 'Wrote {} to ES'.format(fd.name)
|
|
4230c72fd66fde18b2d697206a60c0346f3df797
|
tests/visual_tests/test_python.py
|
tests/visual_tests/test_python.py
|
#!/usr/bin/env python
import mapnik
import sys
class MyText(mapnik.FormatingNode):
def __init__(self):
mapnik.FormatingNode.__init__(self)
self.expr = mapnik.Expression("[name]")
def apply(self, properties, feature, output):
colors = [mapnik.Color('red'),
mapnik.Color('green'),
mapnik.Color('blue')]
text = "Test" #self.expr.evaluate(feature)
i = 0
my_properties = properties #mapnik.CharProperties(properties)
for char in text:
my_properties.fill = colors[i % len(colors)]
output.append(my_properties, char)
i += 1
m = mapnik.Map(600,300)
m.background = mapnik.Color('white')
text = mapnik.TextSymbolizer()
text.face_name = 'DejaVu Sans Book'
point = mapnik.PointSymbolizer()
rule = mapnik.Rule()
rule.symbols.append(text)
rule.symbols.append(point)
style = mapnik.Style()
style.rules.append(rule)
m.append_style('Style', style)
layer = mapnik.Layer('Layer')
layer.datasource = mapnik.Shapefile(file="points.shp")
layer.styles.append('Style')
m.layers.append(layer)
m.zoom_all()
format_trees = [
('TextNode', mapnik.FormatingTextNode(mapnik.Expression("[name]"))),
('MyText', MyText())
]
for format_tree in format_trees:
text.placements.defaults.format_tree = format_tree[1]
mapnik.render_to_file(m, 'python-%s.png' % format_tree[0], 'png')
|
Test case for python bindings.
|
Test case for python bindings.
|
Python
|
lgpl-2.1
|
davenquinn/python-mapnik,rouault/mapnik,qianwenming/mapnik,Mappy/mapnik,davenquinn/python-mapnik,qianwenming/mapnik,yohanboniface/python-mapnik,stefanklug/mapnik,Airphrame/mapnik,rouault/mapnik,garnertb/python-mapnik,jwomeara/mapnik,mbrukman/mapnik,Airphrame/mapnik,pnorman/mapnik,lightmare/mapnik,mbrukman/mapnik,CartoDB/mapnik,mapycz/mapnik,Mappy/mapnik,Uli1/mapnik,manz/python-mapnik,tomhughes/python-mapnik,strk/mapnik,yiqingj/work,tomhughes/mapnik,cjmayo/mapnik,jwomeara/mapnik,kapouer/mapnik,cjmayo/mapnik,pnorman/mapnik,pramsey/mapnik,Uli1/mapnik,qianwenming/mapnik,garnertb/python-mapnik,mapycz/mapnik,CartoDB/mapnik,kapouer/mapnik,CartoDB/mapnik,yiqingj/work,mbrukman/mapnik,rouault/mapnik,tomhughes/python-mapnik,tomhughes/mapnik,rouault/mapnik,Airphrame/mapnik,mapycz/python-mapnik,strk/mapnik,Airphrame/mapnik,mapnik/mapnik,mapnik/mapnik,mbrukman/mapnik,mapycz/mapnik,pnorman/mapnik,lightmare/mapnik,manz/python-mapnik,lightmare/mapnik,cjmayo/mapnik,cjmayo/mapnik,qianwenming/mapnik,sebastic/python-mapnik,zerebubuth/mapnik,yohanboniface/python-mapnik,qianwenming/mapnik,sebastic/python-mapnik,mapnik/python-mapnik,mapnik/mapnik,jwomeara/mapnik,mapnik/mapnik,tomhughes/python-mapnik,naturalatlas/mapnik,yiqingj/work,Mappy/mapnik,Uli1/mapnik,manz/python-mapnik,strk/mapnik,mapnik/python-mapnik,sebastic/python-mapnik,mapycz/python-mapnik,pnorman/mapnik,whuaegeanse/mapnik,stefanklug/mapnik,whuaegeanse/mapnik,naturalatlas/mapnik,yiqingj/work,naturalatlas/mapnik,pramsey/mapnik,lightmare/mapnik,tomhughes/mapnik,garnertb/python-mapnik,jwomeara/mapnik,whuaegeanse/mapnik,kapouer/mapnik,pramsey/mapnik,Mappy/mapnik,stefanklug/mapnik,zerebubuth/mapnik,pramsey/mapnik,davenquinn/python-mapnik,tomhughes/mapnik,naturalatlas/mapnik,kapouer/mapnik,whuaegeanse/mapnik,Uli1/mapnik,yohanboniface/python-mapnik,strk/mapnik,stefanklug/mapnik,zerebubuth/mapnik,mapnik/python-mapnik
|
Test case for python bindings.
|
#!/usr/bin/env python
import mapnik
import sys
class MyText(mapnik.FormatingNode):
def __init__(self):
mapnik.FormatingNode.__init__(self)
self.expr = mapnik.Expression("[name]")
def apply(self, properties, feature, output):
colors = [mapnik.Color('red'),
mapnik.Color('green'),
mapnik.Color('blue')]
text = "Test" #self.expr.evaluate(feature)
i = 0
my_properties = properties #mapnik.CharProperties(properties)
for char in text:
my_properties.fill = colors[i % len(colors)]
output.append(my_properties, char)
i += 1
m = mapnik.Map(600,300)
m.background = mapnik.Color('white')
text = mapnik.TextSymbolizer()
text.face_name = 'DejaVu Sans Book'
point = mapnik.PointSymbolizer()
rule = mapnik.Rule()
rule.symbols.append(text)
rule.symbols.append(point)
style = mapnik.Style()
style.rules.append(rule)
m.append_style('Style', style)
layer = mapnik.Layer('Layer')
layer.datasource = mapnik.Shapefile(file="points.shp")
layer.styles.append('Style')
m.layers.append(layer)
m.zoom_all()
format_trees = [
('TextNode', mapnik.FormatingTextNode(mapnik.Expression("[name]"))),
('MyText', MyText())
]
for format_tree in format_trees:
text.placements.defaults.format_tree = format_tree[1]
mapnik.render_to_file(m, 'python-%s.png' % format_tree[0], 'png')
|
<commit_before><commit_msg>Test case for python bindings.<commit_after>
|
#!/usr/bin/env python
import mapnik
import sys
class MyText(mapnik.FormatingNode):
def __init__(self):
mapnik.FormatingNode.__init__(self)
self.expr = mapnik.Expression("[name]")
def apply(self, properties, feature, output):
colors = [mapnik.Color('red'),
mapnik.Color('green'),
mapnik.Color('blue')]
text = "Test" #self.expr.evaluate(feature)
i = 0
my_properties = properties #mapnik.CharProperties(properties)
for char in text:
my_properties.fill = colors[i % len(colors)]
output.append(my_properties, char)
i += 1
m = mapnik.Map(600,300)
m.background = mapnik.Color('white')
text = mapnik.TextSymbolizer()
text.face_name = 'DejaVu Sans Book'
point = mapnik.PointSymbolizer()
rule = mapnik.Rule()
rule.symbols.append(text)
rule.symbols.append(point)
style = mapnik.Style()
style.rules.append(rule)
m.append_style('Style', style)
layer = mapnik.Layer('Layer')
layer.datasource = mapnik.Shapefile(file="points.shp")
layer.styles.append('Style')
m.layers.append(layer)
m.zoom_all()
format_trees = [
('TextNode', mapnik.FormatingTextNode(mapnik.Expression("[name]"))),
('MyText', MyText())
]
for format_tree in format_trees:
text.placements.defaults.format_tree = format_tree[1]
mapnik.render_to_file(m, 'python-%s.png' % format_tree[0], 'png')
|
Test case for python bindings.#!/usr/bin/env python
import mapnik
import sys
class MyText(mapnik.FormatingNode):
def __init__(self):
mapnik.FormatingNode.__init__(self)
self.expr = mapnik.Expression("[name]")
def apply(self, properties, feature, output):
colors = [mapnik.Color('red'),
mapnik.Color('green'),
mapnik.Color('blue')]
text = "Test" #self.expr.evaluate(feature)
i = 0
my_properties = properties #mapnik.CharProperties(properties)
for char in text:
my_properties.fill = colors[i % len(colors)]
output.append(my_properties, char)
i += 1
m = mapnik.Map(600,300)
m.background = mapnik.Color('white')
text = mapnik.TextSymbolizer()
text.face_name = 'DejaVu Sans Book'
point = mapnik.PointSymbolizer()
rule = mapnik.Rule()
rule.symbols.append(text)
rule.symbols.append(point)
style = mapnik.Style()
style.rules.append(rule)
m.append_style('Style', style)
layer = mapnik.Layer('Layer')
layer.datasource = mapnik.Shapefile(file="points.shp")
layer.styles.append('Style')
m.layers.append(layer)
m.zoom_all()
format_trees = [
('TextNode', mapnik.FormatingTextNode(mapnik.Expression("[name]"))),
('MyText', MyText())
]
for format_tree in format_trees:
text.placements.defaults.format_tree = format_tree[1]
mapnik.render_to_file(m, 'python-%s.png' % format_tree[0], 'png')
|
<commit_before><commit_msg>Test case for python bindings.<commit_after>#!/usr/bin/env python
import mapnik
import sys
class MyText(mapnik.FormatingNode):
def __init__(self):
mapnik.FormatingNode.__init__(self)
self.expr = mapnik.Expression("[name]")
def apply(self, properties, feature, output):
colors = [mapnik.Color('red'),
mapnik.Color('green'),
mapnik.Color('blue')]
text = "Test" #self.expr.evaluate(feature)
i = 0
my_properties = properties #mapnik.CharProperties(properties)
for char in text:
my_properties.fill = colors[i % len(colors)]
output.append(my_properties, char)
i += 1
m = mapnik.Map(600,300)
m.background = mapnik.Color('white')
text = mapnik.TextSymbolizer()
text.face_name = 'DejaVu Sans Book'
point = mapnik.PointSymbolizer()
rule = mapnik.Rule()
rule.symbols.append(text)
rule.symbols.append(point)
style = mapnik.Style()
style.rules.append(rule)
m.append_style('Style', style)
layer = mapnik.Layer('Layer')
layer.datasource = mapnik.Shapefile(file="points.shp")
layer.styles.append('Style')
m.layers.append(layer)
m.zoom_all()
format_trees = [
('TextNode', mapnik.FormatingTextNode(mapnik.Expression("[name]"))),
('MyText', MyText())
]
for format_tree in format_trees:
text.placements.defaults.format_tree = format_tree[1]
mapnik.render_to_file(m, 'python-%s.png' % format_tree[0], 'png')
|
|
7cb08c04277a5d764d542e0718ad0e9ade1312e7
|
filterkeys.py
|
filterkeys.py
|
#!/usr/bin/env python
import pprint
import sys
import redis
import yajl
def main():
lines = [l.strip() for l in sys.stdin.xreadlines()]
r = redis.Redis()
keys = r.keys().split(' ')
assert r.dbsize() == len(keys), (len(keys), 'Not enough keys!')
print '>>> Exactly %d keys to iterate through' % len(keys)
for line in lines:
if not line:
continue
print '>> Filtering %s' % line
matches = []
for key in keys:
value = None
try:
value = yajl.loads(r.get(key))
if not value.get('url'):
continue
if value['url'].find(line) >= 0:
matches.append(key)
except ValueError:
pass
print '>>> Found %d bad keys' % len(matches)
for badkey in matches:
r.delete(badkey)
return 0
if __name__ == '__main__':
exit(main())
|
Add a really expensive script to iterate and obliterate spammy domains from the data store
|
Add a really expensive script to iterate and obliterate spammy domains from the data store
|
Python
|
mit
|
rtyler/urlenco.de
|
Add a really expensive script to iterate and obliterate spammy domains from the data store
|
#!/usr/bin/env python
import pprint
import sys
import redis
import yajl
def main():
lines = [l.strip() for l in sys.stdin.xreadlines()]
r = redis.Redis()
keys = r.keys().split(' ')
assert r.dbsize() == len(keys), (len(keys), 'Not enough keys!')
print '>>> Exactly %d keys to iterate through' % len(keys)
for line in lines:
if not line:
continue
print '>> Filtering %s' % line
matches = []
for key in keys:
value = None
try:
value = yajl.loads(r.get(key))
if not value.get('url'):
continue
if value['url'].find(line) >= 0:
matches.append(key)
except ValueError:
pass
print '>>> Found %d bad keys' % len(matches)
for badkey in matches:
r.delete(badkey)
return 0
if __name__ == '__main__':
exit(main())
|
<commit_before><commit_msg>Add a really expensive script to iterate and obliterate spammy domains from the data store<commit_after>
|
#!/usr/bin/env python
import pprint
import sys
import redis
import yajl
def main():
lines = [l.strip() for l in sys.stdin.xreadlines()]
r = redis.Redis()
keys = r.keys().split(' ')
assert r.dbsize() == len(keys), (len(keys), 'Not enough keys!')
print '>>> Exactly %d keys to iterate through' % len(keys)
for line in lines:
if not line:
continue
print '>> Filtering %s' % line
matches = []
for key in keys:
value = None
try:
value = yajl.loads(r.get(key))
if not value.get('url'):
continue
if value['url'].find(line) >= 0:
matches.append(key)
except ValueError:
pass
print '>>> Found %d bad keys' % len(matches)
for badkey in matches:
r.delete(badkey)
return 0
if __name__ == '__main__':
exit(main())
|
Add a really expensive script to iterate and obliterate spammy domains from the data store#!/usr/bin/env python
import pprint
import sys
import redis
import yajl
def main():
lines = [l.strip() for l in sys.stdin.xreadlines()]
r = redis.Redis()
keys = r.keys().split(' ')
assert r.dbsize() == len(keys), (len(keys), 'Not enough keys!')
print '>>> Exactly %d keys to iterate through' % len(keys)
for line in lines:
if not line:
continue
print '>> Filtering %s' % line
matches = []
for key in keys:
value = None
try:
value = yajl.loads(r.get(key))
if not value.get('url'):
continue
if value['url'].find(line) >= 0:
matches.append(key)
except ValueError:
pass
print '>>> Found %d bad keys' % len(matches)
for badkey in matches:
r.delete(badkey)
return 0
if __name__ == '__main__':
exit(main())
|
<commit_before><commit_msg>Add a really expensive script to iterate and obliterate spammy domains from the data store<commit_after>#!/usr/bin/env python
import pprint
import sys
import redis
import yajl
def main():
lines = [l.strip() for l in sys.stdin.xreadlines()]
r = redis.Redis()
keys = r.keys().split(' ')
assert r.dbsize() == len(keys), (len(keys), 'Not enough keys!')
print '>>> Exactly %d keys to iterate through' % len(keys)
for line in lines:
if not line:
continue
print '>> Filtering %s' % line
matches = []
for key in keys:
value = None
try:
value = yajl.loads(r.get(key))
if not value.get('url'):
continue
if value['url'].find(line) >= 0:
matches.append(key)
except ValueError:
pass
print '>>> Found %d bad keys' % len(matches)
for badkey in matches:
r.delete(badkey)
return 0
if __name__ == '__main__':
exit(main())
|
|
dd697f34c5f1877f91447397dc0f4d78472573a2
|
tests/test_runner.py
|
tests/test_runner.py
|
import unittest
from mo.runner import Variable
class TestVariable(unittest.TestCase):
def test_default(self):
v = Variable('name', {'default': 'default'})
self.assertEqual(v.value, 'default')
self.assertEqual(str(v), 'default')
def test_value(self):
v = Variable('name', {'default': 'default'}, 'value')
self.assertEqual(v.value, 'value')
self.assertEqual(str(v), 'value')
def test_str(self):
v = Variable('name', {'default': 'abc'})
self.assertEqual(str(v), v.value)
|
Add some tests for variables
|
Add some tests for variables
|
Python
|
mit
|
thomasleese/mo
|
Add some tests for variables
|
import unittest
from mo.runner import Variable
class TestVariable(unittest.TestCase):
def test_default(self):
v = Variable('name', {'default': 'default'})
self.assertEqual(v.value, 'default')
self.assertEqual(str(v), 'default')
def test_value(self):
v = Variable('name', {'default': 'default'}, 'value')
self.assertEqual(v.value, 'value')
self.assertEqual(str(v), 'value')
def test_str(self):
v = Variable('name', {'default': 'abc'})
self.assertEqual(str(v), v.value)
|
<commit_before><commit_msg>Add some tests for variables<commit_after>
|
import unittest
from mo.runner import Variable
class TestVariable(unittest.TestCase):
def test_default(self):
v = Variable('name', {'default': 'default'})
self.assertEqual(v.value, 'default')
self.assertEqual(str(v), 'default')
def test_value(self):
v = Variable('name', {'default': 'default'}, 'value')
self.assertEqual(v.value, 'value')
self.assertEqual(str(v), 'value')
def test_str(self):
v = Variable('name', {'default': 'abc'})
self.assertEqual(str(v), v.value)
|
Add some tests for variablesimport unittest
from mo.runner import Variable
class TestVariable(unittest.TestCase):
def test_default(self):
v = Variable('name', {'default': 'default'})
self.assertEqual(v.value, 'default')
self.assertEqual(str(v), 'default')
def test_value(self):
v = Variable('name', {'default': 'default'}, 'value')
self.assertEqual(v.value, 'value')
self.assertEqual(str(v), 'value')
def test_str(self):
v = Variable('name', {'default': 'abc'})
self.assertEqual(str(v), v.value)
|
<commit_before><commit_msg>Add some tests for variables<commit_after>import unittest
from mo.runner import Variable
class TestVariable(unittest.TestCase):
def test_default(self):
v = Variable('name', {'default': 'default'})
self.assertEqual(v.value, 'default')
self.assertEqual(str(v), 'default')
def test_value(self):
v = Variable('name', {'default': 'default'}, 'value')
self.assertEqual(v.value, 'value')
self.assertEqual(str(v), 'value')
def test_str(self):
v = Variable('name', {'default': 'abc'})
self.assertEqual(str(v), v.value)
|
|
1033fec3742160262be49652ae85ecbcd5e4c4f6
|
c_major_7.py
|
c_major_7.py
|
from s4ils import *
s = Session()
with s[INIT]:
s.engine = c.Engine() | s
s.fm = s.engine.new_module(rv.m.Fm) | s
s.engine.output << s.fm | s
s.track1 = s.engine.track(1)
s.track2 = s.engine.track(2)
s.halfnote = (2, 0)
with s[0, 0]:
note = s.fm.note_on(n.C4, 10) | s.track1 | s
note.off() | s + s.halfnote
with s[0, 6]:
(s.fm.note_on(n.E4, 20) | s.track2 | s).off() | s + (4, 0)
with s[0, 12]:
for offset, note in enumerate([n.G4, n.B4, n.C5, n.E5, n.G5, n.B5, n.C6, n.E6, n.G6, n.B6]):
track = s.engine.track(3 + offset)
start = offset * 6
vel = (5 + offset) * 6
(s.fm.note_on(note, vel) | track | s + start).off() | s + start + s.halfnote
if __name__ == '__main__':
play(s)
input()
|
Add a s4ils example that uses more math
|
Add a s4ils example that uses more math
|
Python
|
unlicense
|
metrasynth/gallery,metrasynth/gallery,metrasynth/gallery
|
Add a s4ils example that uses more math
|
from s4ils import *
s = Session()
with s[INIT]:
s.engine = c.Engine() | s
s.fm = s.engine.new_module(rv.m.Fm) | s
s.engine.output << s.fm | s
s.track1 = s.engine.track(1)
s.track2 = s.engine.track(2)
s.halfnote = (2, 0)
with s[0, 0]:
note = s.fm.note_on(n.C4, 10) | s.track1 | s
note.off() | s + s.halfnote
with s[0, 6]:
(s.fm.note_on(n.E4, 20) | s.track2 | s).off() | s + (4, 0)
with s[0, 12]:
for offset, note in enumerate([n.G4, n.B4, n.C5, n.E5, n.G5, n.B5, n.C6, n.E6, n.G6, n.B6]):
track = s.engine.track(3 + offset)
start = offset * 6
vel = (5 + offset) * 6
(s.fm.note_on(note, vel) | track | s + start).off() | s + start + s.halfnote
if __name__ == '__main__':
play(s)
input()
|
<commit_before><commit_msg>Add a s4ils example that uses more math<commit_after>
|
from s4ils import *
s = Session()
with s[INIT]:
s.engine = c.Engine() | s
s.fm = s.engine.new_module(rv.m.Fm) | s
s.engine.output << s.fm | s
s.track1 = s.engine.track(1)
s.track2 = s.engine.track(2)
s.halfnote = (2, 0)
with s[0, 0]:
note = s.fm.note_on(n.C4, 10) | s.track1 | s
note.off() | s + s.halfnote
with s[0, 6]:
(s.fm.note_on(n.E4, 20) | s.track2 | s).off() | s + (4, 0)
with s[0, 12]:
for offset, note in enumerate([n.G4, n.B4, n.C5, n.E5, n.G5, n.B5, n.C6, n.E6, n.G6, n.B6]):
track = s.engine.track(3 + offset)
start = offset * 6
vel = (5 + offset) * 6
(s.fm.note_on(note, vel) | track | s + start).off() | s + start + s.halfnote
if __name__ == '__main__':
play(s)
input()
|
Add a s4ils example that uses more mathfrom s4ils import *
s = Session()
with s[INIT]:
s.engine = c.Engine() | s
s.fm = s.engine.new_module(rv.m.Fm) | s
s.engine.output << s.fm | s
s.track1 = s.engine.track(1)
s.track2 = s.engine.track(2)
s.halfnote = (2, 0)
with s[0, 0]:
note = s.fm.note_on(n.C4, 10) | s.track1 | s
note.off() | s + s.halfnote
with s[0, 6]:
(s.fm.note_on(n.E4, 20) | s.track2 | s).off() | s + (4, 0)
with s[0, 12]:
for offset, note in enumerate([n.G4, n.B4, n.C5, n.E5, n.G5, n.B5, n.C6, n.E6, n.G6, n.B6]):
track = s.engine.track(3 + offset)
start = offset * 6
vel = (5 + offset) * 6
(s.fm.note_on(note, vel) | track | s + start).off() | s + start + s.halfnote
if __name__ == '__main__':
play(s)
input()
|
<commit_before><commit_msg>Add a s4ils example that uses more math<commit_after>from s4ils import *
s = Session()
with s[INIT]:
s.engine = c.Engine() | s
s.fm = s.engine.new_module(rv.m.Fm) | s
s.engine.output << s.fm | s
s.track1 = s.engine.track(1)
s.track2 = s.engine.track(2)
s.halfnote = (2, 0)
with s[0, 0]:
note = s.fm.note_on(n.C4, 10) | s.track1 | s
note.off() | s + s.halfnote
with s[0, 6]:
(s.fm.note_on(n.E4, 20) | s.track2 | s).off() | s + (4, 0)
with s[0, 12]:
for offset, note in enumerate([n.G4, n.B4, n.C5, n.E5, n.G5, n.B5, n.C6, n.E6, n.G6, n.B6]):
track = s.engine.track(3 + offset)
start = offset * 6
vel = (5 + offset) * 6
(s.fm.note_on(note, vel) | track | s + start).off() | s + start + s.halfnote
if __name__ == '__main__':
play(s)
input()
|
|
0844ab636761b7fa47c63c15333546404466ac03
|
gevent_tools/tests/test_util.py
|
gevent_tools/tests/test_util.py
|
from nose.tools import with_setup, raises
from gevent_tools import util
from unittest import TestCase
class TestUtil(TestCase):
def setUp(self):
class Foo(object):
bar = util.defaultproperty(set)
def baz(self):
ret = []
for item in self.bar:
ret.append(item)
return ret
self.foo = Foo()
self.fuz = Foo()
def test_defaultproperty(self):
assert self.foo.bar == set(), "foo.bar is a set"
assert isinstance(self.foo.bar, set), "foo.bar is a set"
def test_default_property_mutable(self):
s = self.foo.bar
s2 = self.fuz.bar
s.add(3)
s2.add(5)
assert self.foo.bar == set([3]), "can mutate reference"
assert self.fuz.bar == set([5]), "but not all references"
def test_default_is_iterable(self):
assert self.foo.baz() == [], "can iterate over self reference"
def test_add_ass_first_dereference_works(self):
self.foo.bar.add(3)
assert self.foo.bar == set([3]), "previous line throws exceptino on fail"
|
Remove the need to call super.__init__ by lazily instatiating instance variables.
|
Remove the need to call super.__init__ by lazily instatiating instance variables.
|
Python
|
mit
|
progrium/ginkgo
|
Remove the need to call super.__init__ by lazily instatiating instance variables.
|
from nose.tools import with_setup, raises
from gevent_tools import util
from unittest import TestCase
class TestUtil(TestCase):
def setUp(self):
class Foo(object):
bar = util.defaultproperty(set)
def baz(self):
ret = []
for item in self.bar:
ret.append(item)
return ret
self.foo = Foo()
self.fuz = Foo()
def test_defaultproperty(self):
assert self.foo.bar == set(), "foo.bar is a set"
assert isinstance(self.foo.bar, set), "foo.bar is a set"
def test_default_property_mutable(self):
s = self.foo.bar
s2 = self.fuz.bar
s.add(3)
s2.add(5)
assert self.foo.bar == set([3]), "can mutate reference"
assert self.fuz.bar == set([5]), "but not all references"
def test_default_is_iterable(self):
assert self.foo.baz() == [], "can iterate over self reference"
def test_add_ass_first_dereference_works(self):
self.foo.bar.add(3)
assert self.foo.bar == set([3]), "previous line throws exceptino on fail"
|
<commit_before><commit_msg>Remove the need to call super.__init__ by lazily instatiating instance variables.<commit_after>
|
from nose.tools import with_setup, raises
from gevent_tools import util
from unittest import TestCase
class TestUtil(TestCase):
def setUp(self):
class Foo(object):
bar = util.defaultproperty(set)
def baz(self):
ret = []
for item in self.bar:
ret.append(item)
return ret
self.foo = Foo()
self.fuz = Foo()
def test_defaultproperty(self):
assert self.foo.bar == set(), "foo.bar is a set"
assert isinstance(self.foo.bar, set), "foo.bar is a set"
def test_default_property_mutable(self):
s = self.foo.bar
s2 = self.fuz.bar
s.add(3)
s2.add(5)
assert self.foo.bar == set([3]), "can mutate reference"
assert self.fuz.bar == set([5]), "but not all references"
def test_default_is_iterable(self):
assert self.foo.baz() == [], "can iterate over self reference"
def test_add_ass_first_dereference_works(self):
self.foo.bar.add(3)
assert self.foo.bar == set([3]), "previous line throws exceptino on fail"
|
Remove the need to call super.__init__ by lazily instatiating instance variables.from nose.tools import with_setup, raises
from gevent_tools import util
from unittest import TestCase
class TestUtil(TestCase):
def setUp(self):
class Foo(object):
bar = util.defaultproperty(set)
def baz(self):
ret = []
for item in self.bar:
ret.append(item)
return ret
self.foo = Foo()
self.fuz = Foo()
def test_defaultproperty(self):
assert self.foo.bar == set(), "foo.bar is a set"
assert isinstance(self.foo.bar, set), "foo.bar is a set"
def test_default_property_mutable(self):
s = self.foo.bar
s2 = self.fuz.bar
s.add(3)
s2.add(5)
assert self.foo.bar == set([3]), "can mutate reference"
assert self.fuz.bar == set([5]), "but not all references"
def test_default_is_iterable(self):
assert self.foo.baz() == [], "can iterate over self reference"
def test_add_ass_first_dereference_works(self):
self.foo.bar.add(3)
assert self.foo.bar == set([3]), "previous line throws exceptino on fail"
|
<commit_before><commit_msg>Remove the need to call super.__init__ by lazily instatiating instance variables.<commit_after>from nose.tools import with_setup, raises
from gevent_tools import util
from unittest import TestCase
class TestUtil(TestCase):
def setUp(self):
class Foo(object):
bar = util.defaultproperty(set)
def baz(self):
ret = []
for item in self.bar:
ret.append(item)
return ret
self.foo = Foo()
self.fuz = Foo()
def test_defaultproperty(self):
assert self.foo.bar == set(), "foo.bar is a set"
assert isinstance(self.foo.bar, set), "foo.bar is a set"
def test_default_property_mutable(self):
s = self.foo.bar
s2 = self.fuz.bar
s.add(3)
s2.add(5)
assert self.foo.bar == set([3]), "can mutate reference"
assert self.fuz.bar == set([5]), "but not all references"
def test_default_is_iterable(self):
assert self.foo.baz() == [], "can iterate over self reference"
def test_add_ass_first_dereference_works(self):
self.foo.bar.add(3)
assert self.foo.bar == set([3]), "previous line throws exceptino on fail"
|
|
56b6d98f0ad7cd9d074a9b372913dcbed693757b
|
scripts/migration/ensure_bookmark_uniqueness_legacy.py
|
scripts/migration/ensure_bookmark_uniqueness_legacy.py
|
import argparse
import logging
from modularodm import Q
from framework.auth.core import User
from framework.transactions.context import TokuTransaction
from scripts import utils as script_utils
from website.app import init_app
from website.project import Node
logger = logging.getLogger(__name__)
def get_targets():
logger.info('Acquiring targets...')
targets = [u for u in User.find() if Node.find(Q('is_bookmark_collection', 'eq', True) & Q('is_deleted', 'eq', False) & Q('contributors', 'eq', u._id)).count() > 1]
logger.info('Found {} target users.'.format(len(targets)))
return targets
def migrate():
targets = get_targets()
total = len(targets)
for i, user in enumerate(targets):
logger.info('({}/{}) Preparing to migrate User {}'.format(i + 1, total, user._id))
bookmarks = Node.find(Q('is_bookmark_collection', 'eq', True) & Q('contributors', 'eq', user._id))
if sum([bool(n.nodes) for n in bookmarks]) > 1:
raise Exception('Expected no users to have more than one bookmark with .nodes, {} violated'.format(user._id))
bookmark_to_keep = None
for n in bookmarks:
if n.nodes:
bookmark_to_keep = n
bookmark_to_keep = bookmark_to_keep or bookmarks[0]
logger.info('Marking Node {} as primary Bookmark Collection for User {}, preparing to delete others'.format(bookmark_to_keep._id, user._id))
for n in bookmarks:
if n._id != bookmark_to_keep._id:
n.is_deleted = True
n.save()
logger.info('Successfully migrated User {}'.format(user._id))
logger.info('Successfully migrated {} users'.format(total))
def main():
parser = argparse.ArgumentParser(
description='Ensures every confirmed user has only one bookmark collection.'
)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Run migration and roll back changes to db',
)
pargs = parser.parse_args()
if not pargs.dry_run:
script_utils.add_file_logger(logger, __file__)
init_app(set_backends=True, routes=False)
with TokuTransaction():
migrate()
if pargs.dry_run:
raise Exception('Dry Run -- Transaction aborted.')
if __name__ == '__main__':
main()
|
Add script from master to ensure bookmark uniqueness in modm
|
Add script from master to ensure bookmark uniqueness in modm
|
Python
|
apache-2.0
|
mattclark/osf.io,chrisseto/osf.io,crcresearch/osf.io,cslzchen/osf.io,erinspace/osf.io,binoculars/osf.io,leb2dg/osf.io,chennan47/osf.io,baylee-d/osf.io,laurenrevere/osf.io,laurenrevere/osf.io,aaxelb/osf.io,mattclark/osf.io,Nesiehr/osf.io,felliott/osf.io,Nesiehr/osf.io,felliott/osf.io,saradbowman/osf.io,cwisecarver/osf.io,cslzchen/osf.io,baylee-d/osf.io,HalcyonChimera/osf.io,cslzchen/osf.io,chennan47/osf.io,chrisseto/osf.io,mattclark/osf.io,hmoco/osf.io,caseyrollins/osf.io,TomBaxter/osf.io,Johnetordoff/osf.io,aaxelb/osf.io,adlius/osf.io,crcresearch/osf.io,brianjgeiger/osf.io,pattisdr/osf.io,leb2dg/osf.io,caneruguz/osf.io,Nesiehr/osf.io,aaxelb/osf.io,icereval/osf.io,Nesiehr/osf.io,HalcyonChimera/osf.io,icereval/osf.io,Johnetordoff/osf.io,HalcyonChimera/osf.io,brianjgeiger/osf.io,TomBaxter/osf.io,hmoco/osf.io,mfraezz/osf.io,adlius/osf.io,adlius/osf.io,caneruguz/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,cwisecarver/osf.io,icereval/osf.io,CenterForOpenScience/osf.io,caseyrollins/osf.io,adlius/osf.io,chrisseto/osf.io,felliott/osf.io,crcresearch/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,leb2dg/osf.io,binoculars/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,TomBaxter/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,laurenrevere/osf.io,caneruguz/osf.io,caneruguz/osf.io,saradbowman/osf.io,mfraezz/osf.io,erinspace/osf.io,pattisdr/osf.io,sloria/osf.io,erinspace/osf.io,mfraezz/osf.io,cwisecarver/osf.io,aaxelb/osf.io,mfraezz/osf.io,binoculars/osf.io,chennan47/osf.io,cwisecarver/osf.io,sloria/osf.io,leb2dg/osf.io,chrisseto/osf.io,Johnetordoff/osf.io,sloria/osf.io,pattisdr/osf.io,hmoco/osf.io,caseyrollins/osf.io,hmoco/osf.io,felliott/osf.io
|
Add script from master to ensure bookmark uniqueness in modm
|
import argparse
import logging
from modularodm import Q
from framework.auth.core import User
from framework.transactions.context import TokuTransaction
from scripts import utils as script_utils
from website.app import init_app
from website.project import Node
logger = logging.getLogger(__name__)
def get_targets():
logger.info('Acquiring targets...')
targets = [u for u in User.find() if Node.find(Q('is_bookmark_collection', 'eq', True) & Q('is_deleted', 'eq', False) & Q('contributors', 'eq', u._id)).count() > 1]
logger.info('Found {} target users.'.format(len(targets)))
return targets
def migrate():
targets = get_targets()
total = len(targets)
for i, user in enumerate(targets):
logger.info('({}/{}) Preparing to migrate User {}'.format(i + 1, total, user._id))
bookmarks = Node.find(Q('is_bookmark_collection', 'eq', True) & Q('contributors', 'eq', user._id))
if sum([bool(n.nodes) for n in bookmarks]) > 1:
raise Exception('Expected no users to have more than one bookmark with .nodes, {} violated'.format(user._id))
bookmark_to_keep = None
for n in bookmarks:
if n.nodes:
bookmark_to_keep = n
bookmark_to_keep = bookmark_to_keep or bookmarks[0]
logger.info('Marking Node {} as primary Bookmark Collection for User {}, preparing to delete others'.format(bookmark_to_keep._id, user._id))
for n in bookmarks:
if n._id != bookmark_to_keep._id:
n.is_deleted = True
n.save()
logger.info('Successfully migrated User {}'.format(user._id))
logger.info('Successfully migrated {} users'.format(total))
def main():
parser = argparse.ArgumentParser(
description='Ensures every confirmed user has only one bookmark collection.'
)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Run migration and roll back changes to db',
)
pargs = parser.parse_args()
if not pargs.dry_run:
script_utils.add_file_logger(logger, __file__)
init_app(set_backends=True, routes=False)
with TokuTransaction():
migrate()
if pargs.dry_run:
raise Exception('Dry Run -- Transaction aborted.')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script from master to ensure bookmark uniqueness in modm<commit_after>
|
import argparse
import logging
from modularodm import Q
from framework.auth.core import User
from framework.transactions.context import TokuTransaction
from scripts import utils as script_utils
from website.app import init_app
from website.project import Node
logger = logging.getLogger(__name__)
def get_targets():
logger.info('Acquiring targets...')
targets = [u for u in User.find() if Node.find(Q('is_bookmark_collection', 'eq', True) & Q('is_deleted', 'eq', False) & Q('contributors', 'eq', u._id)).count() > 1]
logger.info('Found {} target users.'.format(len(targets)))
return targets
def migrate():
targets = get_targets()
total = len(targets)
for i, user in enumerate(targets):
logger.info('({}/{}) Preparing to migrate User {}'.format(i + 1, total, user._id))
bookmarks = Node.find(Q('is_bookmark_collection', 'eq', True) & Q('contributors', 'eq', user._id))
if sum([bool(n.nodes) for n in bookmarks]) > 1:
raise Exception('Expected no users to have more than one bookmark with .nodes, {} violated'.format(user._id))
bookmark_to_keep = None
for n in bookmarks:
if n.nodes:
bookmark_to_keep = n
bookmark_to_keep = bookmark_to_keep or bookmarks[0]
logger.info('Marking Node {} as primary Bookmark Collection for User {}, preparing to delete others'.format(bookmark_to_keep._id, user._id))
for n in bookmarks:
if n._id != bookmark_to_keep._id:
n.is_deleted = True
n.save()
logger.info('Successfully migrated User {}'.format(user._id))
logger.info('Successfully migrated {} users'.format(total))
def main():
parser = argparse.ArgumentParser(
description='Ensures every confirmed user has only one bookmark collection.'
)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Run migration and roll back changes to db',
)
pargs = parser.parse_args()
if not pargs.dry_run:
script_utils.add_file_logger(logger, __file__)
init_app(set_backends=True, routes=False)
with TokuTransaction():
migrate()
if pargs.dry_run:
raise Exception('Dry Run -- Transaction aborted.')
if __name__ == '__main__':
main()
|
Add script from master to ensure bookmark uniqueness in modmimport argparse
import logging
from modularodm import Q
from framework.auth.core import User
from framework.transactions.context import TokuTransaction
from scripts import utils as script_utils
from website.app import init_app
from website.project import Node
logger = logging.getLogger(__name__)
def get_targets():
logger.info('Acquiring targets...')
targets = [u for u in User.find() if Node.find(Q('is_bookmark_collection', 'eq', True) & Q('is_deleted', 'eq', False) & Q('contributors', 'eq', u._id)).count() > 1]
logger.info('Found {} target users.'.format(len(targets)))
return targets
def migrate():
targets = get_targets()
total = len(targets)
for i, user in enumerate(targets):
logger.info('({}/{}) Preparing to migrate User {}'.format(i + 1, total, user._id))
bookmarks = Node.find(Q('is_bookmark_collection', 'eq', True) & Q('contributors', 'eq', user._id))
if sum([bool(n.nodes) for n in bookmarks]) > 1:
raise Exception('Expected no users to have more than one bookmark with .nodes, {} violated'.format(user._id))
bookmark_to_keep = None
for n in bookmarks:
if n.nodes:
bookmark_to_keep = n
bookmark_to_keep = bookmark_to_keep or bookmarks[0]
logger.info('Marking Node {} as primary Bookmark Collection for User {}, preparing to delete others'.format(bookmark_to_keep._id, user._id))
for n in bookmarks:
if n._id != bookmark_to_keep._id:
n.is_deleted = True
n.save()
logger.info('Successfully migrated User {}'.format(user._id))
logger.info('Successfully migrated {} users'.format(total))
def main():
parser = argparse.ArgumentParser(
description='Ensures every confirmed user has only one bookmark collection.'
)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Run migration and roll back changes to db',
)
pargs = parser.parse_args()
if not pargs.dry_run:
script_utils.add_file_logger(logger, __file__)
init_app(set_backends=True, routes=False)
with TokuTransaction():
migrate()
if pargs.dry_run:
raise Exception('Dry Run -- Transaction aborted.')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script from master to ensure bookmark uniqueness in modm<commit_after>import argparse
import logging
from modularodm import Q
from framework.auth.core import User
from framework.transactions.context import TokuTransaction
from scripts import utils as script_utils
from website.app import init_app
from website.project import Node
logger = logging.getLogger(__name__)
def get_targets():
logger.info('Acquiring targets...')
targets = [u for u in User.find() if Node.find(Q('is_bookmark_collection', 'eq', True) & Q('is_deleted', 'eq', False) & Q('contributors', 'eq', u._id)).count() > 1]
logger.info('Found {} target users.'.format(len(targets)))
return targets
def migrate():
targets = get_targets()
total = len(targets)
for i, user in enumerate(targets):
logger.info('({}/{}) Preparing to migrate User {}'.format(i + 1, total, user._id))
bookmarks = Node.find(Q('is_bookmark_collection', 'eq', True) & Q('contributors', 'eq', user._id))
if sum([bool(n.nodes) for n in bookmarks]) > 1:
raise Exception('Expected no users to have more than one bookmark with .nodes, {} violated'.format(user._id))
bookmark_to_keep = None
for n in bookmarks:
if n.nodes:
bookmark_to_keep = n
bookmark_to_keep = bookmark_to_keep or bookmarks[0]
logger.info('Marking Node {} as primary Bookmark Collection for User {}, preparing to delete others'.format(bookmark_to_keep._id, user._id))
for n in bookmarks:
if n._id != bookmark_to_keep._id:
n.is_deleted = True
n.save()
logger.info('Successfully migrated User {}'.format(user._id))
logger.info('Successfully migrated {} users'.format(total))
def main():
parser = argparse.ArgumentParser(
description='Ensures every confirmed user has only one bookmark collection.'
)
parser.add_argument(
'--dry',
action='store_true',
dest='dry_run',
help='Run migration and roll back changes to db',
)
pargs = parser.parse_args()
if not pargs.dry_run:
script_utils.add_file_logger(logger, __file__)
init_app(set_backends=True, routes=False)
with TokuTransaction():
migrate()
if pargs.dry_run:
raise Exception('Dry Run -- Transaction aborted.')
if __name__ == '__main__':
main()
|
|
afb09aa0c95c21f286ce6cecbb49e06e9aed4bbf
|
benchmark/profile_efm32_fast.py
|
benchmark/profile_efm32_fast.py
|
# Boards to test.
BOARDS = [
"stk3200",
"slstk3401a",
"stk3600",
"stk3700",
"stk3800",
"slwstk6220a",
]
# Applications to compile (relative to RIOT-OS root).
APPLICATIONS = [
"examples/default",
"examples/hello-world",
"tests/minimal",
"tests/periph_adc",
"tests/periph_cpuid",
"tests/periph_dac",
"tests/periph_gpio",
"tests/periph_i2c",
"tests/periph_pwm",
"tests/periph_rtc",
"tests/periph_rtt",
"tests/periph_spi",
"tests/periph_timer",
"tests/periph_uart",
"tests/shell"
]
# Compile settings (e.g. debug settings).
SETTINGS = [
[],
]
|
Add quick benchmark profile for EFM32 targets.
|
Add quick benchmark profile for EFM32 targets.
|
Python
|
mit
|
basilfx/EFM2Riot,basilfx/EFM2Riot,basilfx/EFM2Riot,basilfx/EFM2Riot,basilfx/EFM2Riot
|
Add quick benchmark profile for EFM32 targets.
|
# Boards to test.
BOARDS = [
"stk3200",
"slstk3401a",
"stk3600",
"stk3700",
"stk3800",
"slwstk6220a",
]
# Applications to compile (relative to RIOT-OS root).
APPLICATIONS = [
"examples/default",
"examples/hello-world",
"tests/minimal",
"tests/periph_adc",
"tests/periph_cpuid",
"tests/periph_dac",
"tests/periph_gpio",
"tests/periph_i2c",
"tests/periph_pwm",
"tests/periph_rtc",
"tests/periph_rtt",
"tests/periph_spi",
"tests/periph_timer",
"tests/periph_uart",
"tests/shell"
]
# Compile settings (e.g. debug settings).
SETTINGS = [
[],
]
|
<commit_before><commit_msg>Add quick benchmark profile for EFM32 targets.<commit_after>
|
# Boards to test.
BOARDS = [
"stk3200",
"slstk3401a",
"stk3600",
"stk3700",
"stk3800",
"slwstk6220a",
]
# Applications to compile (relative to RIOT-OS root).
APPLICATIONS = [
"examples/default",
"examples/hello-world",
"tests/minimal",
"tests/periph_adc",
"tests/periph_cpuid",
"tests/periph_dac",
"tests/periph_gpio",
"tests/periph_i2c",
"tests/periph_pwm",
"tests/periph_rtc",
"tests/periph_rtt",
"tests/periph_spi",
"tests/periph_timer",
"tests/periph_uart",
"tests/shell"
]
# Compile settings (e.g. debug settings).
SETTINGS = [
[],
]
|
Add quick benchmark profile for EFM32 targets.# Boards to test.
BOARDS = [
"stk3200",
"slstk3401a",
"stk3600",
"stk3700",
"stk3800",
"slwstk6220a",
]
# Applications to compile (relative to RIOT-OS root).
APPLICATIONS = [
"examples/default",
"examples/hello-world",
"tests/minimal",
"tests/periph_adc",
"tests/periph_cpuid",
"tests/periph_dac",
"tests/periph_gpio",
"tests/periph_i2c",
"tests/periph_pwm",
"tests/periph_rtc",
"tests/periph_rtt",
"tests/periph_spi",
"tests/periph_timer",
"tests/periph_uart",
"tests/shell"
]
# Compile settings (e.g. debug settings).
SETTINGS = [
[],
]
|
<commit_before><commit_msg>Add quick benchmark profile for EFM32 targets.<commit_after># Boards to test.
BOARDS = [
"stk3200",
"slstk3401a",
"stk3600",
"stk3700",
"stk3800",
"slwstk6220a",
]
# Applications to compile (relative to RIOT-OS root).
APPLICATIONS = [
"examples/default",
"examples/hello-world",
"tests/minimal",
"tests/periph_adc",
"tests/periph_cpuid",
"tests/periph_dac",
"tests/periph_gpio",
"tests/periph_i2c",
"tests/periph_pwm",
"tests/periph_rtc",
"tests/periph_rtt",
"tests/periph_spi",
"tests/periph_timer",
"tests/periph_uart",
"tests/shell"
]
# Compile settings (e.g. debug settings).
SETTINGS = [
[],
]
|
|
6a5c2714a6d120c75ed65ae8d8d686ae692f7cb7
|
gnocchi/indexer/alembic/versions/ffc7bbeec0b0_migrate_legacy_resources_to_db2.py
|
gnocchi/indexer/alembic/versions/ffc7bbeec0b0_migrate_legacy_resources_to_db2.py
|
# Copyright 2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""migrate_legacy_resources_to_db2
Revision ID: ffc7bbeec0b0
Revises: 8f376189b9eb
Create Date: 2016-04-14 15:57:13.072128
"""
import json
from alembic import op
import sqlalchemy as sa
from gnocchi.indexer import sqlalchemy_legacy_resources as legacy
# revision identifiers, used by Alembic.
revision = 'ffc7bbeec0b0'
down_revision = '8f376189b9eb'
branch_labels = None
depends_on = None
def upgrade():
bind = op.get_bind()
resource_type = sa.Table(
'resource_type', sa.MetaData(),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('tablename', sa.String(18), nullable=False),
sa.Column('attributes', sa.Text, nullable=False)
)
resource_type_names = [rt.name for rt in
list(bind.execute(resource_type.select()))]
for name, attributes in legacy.ceilometer_resources.items():
if name in resource_type_names:
continue
tablename = legacy.ceilometer_tablenames.get(name, name)
text_attributes = json.dumps(attributes)
op.execute(resource_type.insert().values({
resource_type.c.attributes: text_attributes,
resource_type.c.name: name,
resource_type.c.tablename: tablename,
}))
|
Fix broken ceilometer resources migration script
|
Fix broken ceilometer resources migration script
Migration script 828c16f70cce have a bug, the following command doesn't
insert anything:
op.execute(resource_type.insert().from_select(
['name'], sa.select([resource.c.type]).distinct()))
So all migration scripts that manipulates resource_type next, do not
update upgrade the resource_type for legacy ceilometer resources.
This change fixes that.
Change-Id: I5f8c47721c313eea3936132f3140793ec80ef3b1
|
Python
|
apache-2.0
|
leandroreox/gnocchi,gnocchixyz/gnocchi,gnocchixyz/gnocchi,sileht/gnocchi,sileht/gnocchi,leandroreox/gnocchi
|
Fix broken ceilometer resources migration script
Migration script 828c16f70cce have a bug, the following command doesn't
insert anything:
op.execute(resource_type.insert().from_select(
['name'], sa.select([resource.c.type]).distinct()))
So all migration scripts that manipulates resource_type next, do not
update upgrade the resource_type for legacy ceilometer resources.
This change fixes that.
Change-Id: I5f8c47721c313eea3936132f3140793ec80ef3b1
|
# Copyright 2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""migrate_legacy_resources_to_db2
Revision ID: ffc7bbeec0b0
Revises: 8f376189b9eb
Create Date: 2016-04-14 15:57:13.072128
"""
import json
from alembic import op
import sqlalchemy as sa
from gnocchi.indexer import sqlalchemy_legacy_resources as legacy
# revision identifiers, used by Alembic.
revision = 'ffc7bbeec0b0'
down_revision = '8f376189b9eb'
branch_labels = None
depends_on = None
def upgrade():
bind = op.get_bind()
resource_type = sa.Table(
'resource_type', sa.MetaData(),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('tablename', sa.String(18), nullable=False),
sa.Column('attributes', sa.Text, nullable=False)
)
resource_type_names = [rt.name for rt in
list(bind.execute(resource_type.select()))]
for name, attributes in legacy.ceilometer_resources.items():
if name in resource_type_names:
continue
tablename = legacy.ceilometer_tablenames.get(name, name)
text_attributes = json.dumps(attributes)
op.execute(resource_type.insert().values({
resource_type.c.attributes: text_attributes,
resource_type.c.name: name,
resource_type.c.tablename: tablename,
}))
|
<commit_before><commit_msg>Fix broken ceilometer resources migration script
Migration script 828c16f70cce have a bug, the following command doesn't
insert anything:
op.execute(resource_type.insert().from_select(
['name'], sa.select([resource.c.type]).distinct()))
So all migration scripts that manipulates resource_type next, do not
update upgrade the resource_type for legacy ceilometer resources.
This change fixes that.
Change-Id: I5f8c47721c313eea3936132f3140793ec80ef3b1<commit_after>
|
# Copyright 2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""migrate_legacy_resources_to_db2
Revision ID: ffc7bbeec0b0
Revises: 8f376189b9eb
Create Date: 2016-04-14 15:57:13.072128
"""
import json
from alembic import op
import sqlalchemy as sa
from gnocchi.indexer import sqlalchemy_legacy_resources as legacy
# revision identifiers, used by Alembic.
revision = 'ffc7bbeec0b0'
down_revision = '8f376189b9eb'
branch_labels = None
depends_on = None
def upgrade():
bind = op.get_bind()
resource_type = sa.Table(
'resource_type', sa.MetaData(),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('tablename', sa.String(18), nullable=False),
sa.Column('attributes', sa.Text, nullable=False)
)
resource_type_names = [rt.name for rt in
list(bind.execute(resource_type.select()))]
for name, attributes in legacy.ceilometer_resources.items():
if name in resource_type_names:
continue
tablename = legacy.ceilometer_tablenames.get(name, name)
text_attributes = json.dumps(attributes)
op.execute(resource_type.insert().values({
resource_type.c.attributes: text_attributes,
resource_type.c.name: name,
resource_type.c.tablename: tablename,
}))
|
Fix broken ceilometer resources migration script
Migration script 828c16f70cce have a bug, the following command doesn't
insert anything:
op.execute(resource_type.insert().from_select(
['name'], sa.select([resource.c.type]).distinct()))
So all migration scripts that manipulates resource_type next, do not
update upgrade the resource_type for legacy ceilometer resources.
This change fixes that.
Change-Id: I5f8c47721c313eea3936132f3140793ec80ef3b1# Copyright 2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""migrate_legacy_resources_to_db2
Revision ID: ffc7bbeec0b0
Revises: 8f376189b9eb
Create Date: 2016-04-14 15:57:13.072128
"""
import json
from alembic import op
import sqlalchemy as sa
from gnocchi.indexer import sqlalchemy_legacy_resources as legacy
# revision identifiers, used by Alembic.
revision = 'ffc7bbeec0b0'
down_revision = '8f376189b9eb'
branch_labels = None
depends_on = None
def upgrade():
bind = op.get_bind()
resource_type = sa.Table(
'resource_type', sa.MetaData(),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('tablename', sa.String(18), nullable=False),
sa.Column('attributes', sa.Text, nullable=False)
)
resource_type_names = [rt.name for rt in
list(bind.execute(resource_type.select()))]
for name, attributes in legacy.ceilometer_resources.items():
if name in resource_type_names:
continue
tablename = legacy.ceilometer_tablenames.get(name, name)
text_attributes = json.dumps(attributes)
op.execute(resource_type.insert().values({
resource_type.c.attributes: text_attributes,
resource_type.c.name: name,
resource_type.c.tablename: tablename,
}))
|
<commit_before><commit_msg>Fix broken ceilometer resources migration script
Migration script 828c16f70cce have a bug, the following command doesn't
insert anything:
op.execute(resource_type.insert().from_select(
['name'], sa.select([resource.c.type]).distinct()))
So all migration scripts that manipulates resource_type next, do not
update upgrade the resource_type for legacy ceilometer resources.
This change fixes that.
Change-Id: I5f8c47721c313eea3936132f3140793ec80ef3b1<commit_after># Copyright 2016 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""migrate_legacy_resources_to_db2
Revision ID: ffc7bbeec0b0
Revises: 8f376189b9eb
Create Date: 2016-04-14 15:57:13.072128
"""
import json
from alembic import op
import sqlalchemy as sa
from gnocchi.indexer import sqlalchemy_legacy_resources as legacy
# revision identifiers, used by Alembic.
revision = 'ffc7bbeec0b0'
down_revision = '8f376189b9eb'
branch_labels = None
depends_on = None
def upgrade():
bind = op.get_bind()
resource_type = sa.Table(
'resource_type', sa.MetaData(),
sa.Column('name', sa.String(255), nullable=False),
sa.Column('tablename', sa.String(18), nullable=False),
sa.Column('attributes', sa.Text, nullable=False)
)
resource_type_names = [rt.name for rt in
list(bind.execute(resource_type.select()))]
for name, attributes in legacy.ceilometer_resources.items():
if name in resource_type_names:
continue
tablename = legacy.ceilometer_tablenames.get(name, name)
text_attributes = json.dumps(attributes)
op.execute(resource_type.insert().values({
resource_type.c.attributes: text_attributes,
resource_type.c.name: name,
resource_type.c.tablename: tablename,
}))
|
|
64d6e641351580a88663b3a31b965e2a31910c57
|
crust/api.py
|
crust/api.py
|
from . import requests
class Api(object):
def __init__(self, url, session=None, *args, **kwargs):
super(Api, self).__init__(*args, **kwargs)
if session is None:
session = requests.session()
self.url = url
self.session = session
|
Create the starts of an Api object
|
Create the starts of an Api object
|
Python
|
bsd-2-clause
|
dstufft/crust
|
Create the starts of an Api object
|
from . import requests
class Api(object):
def __init__(self, url, session=None, *args, **kwargs):
super(Api, self).__init__(*args, **kwargs)
if session is None:
session = requests.session()
self.url = url
self.session = session
|
<commit_before><commit_msg>Create the starts of an Api object<commit_after>
|
from . import requests
class Api(object):
def __init__(self, url, session=None, *args, **kwargs):
super(Api, self).__init__(*args, **kwargs)
if session is None:
session = requests.session()
self.url = url
self.session = session
|
Create the starts of an Api objectfrom . import requests
class Api(object):
def __init__(self, url, session=None, *args, **kwargs):
super(Api, self).__init__(*args, **kwargs)
if session is None:
session = requests.session()
self.url = url
self.session = session
|
<commit_before><commit_msg>Create the starts of an Api object<commit_after>from . import requests
class Api(object):
def __init__(self, url, session=None, *args, **kwargs):
super(Api, self).__init__(*args, **kwargs)
if session is None:
session = requests.session()
self.url = url
self.session = session
|
|
11f29a3c01d3e9174ce977dd4b6e735e6ae5206f
|
web/premises/migrations/0008_auto_20141023_0133.py
|
web/premises/migrations/0008_auto_20141023_0133.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('premises', '0007_auto_20141020_2215'),
]
operations = [
migrations.AddField(
model_name='contention',
name='date_modification',
field=models.DateTimeField(default=datetime.datetime(2014, 10, 23, 1, 33, 8, 359184), auto_now=True, auto_now_add=True),
preserve_default=False,
),
migrations.AlterField(
model_name='premise',
name='is_approved',
field=models.BooleanField(default=False, verbose_name=b'Yay\xc4\xb1nla'),
),
migrations.AlterField(
model_name='premise',
name='parent',
field=models.ForeignKey(related_name=b'children', blank=True, to='premises.Premise', help_text=b'\xc3\x96nermenin \xc3\xb6nc\xc3\xbcl\xc3\xbc. E\xc4\x9fer bo\xc5\x9f b\xc4\xb1rak\xc4\xb1l\xc4\xb1rsaana arg\xc3\xbcman\xc4\xb1n bir \xc3\xb6nermesi olur.', null=True, verbose_name=b'\xc3\x96nc\xc3\xbcl\xc3\xbc'),
),
]
|
Add migration file for modification date
|
Add migration file for modification date
|
Python
|
mit
|
Arthur2e5/arguman.org,bahattincinic/arguman.org,Arthur2e5/arguman.org,donkawechico/arguman.org,arguman/arguman.org,donkawechico/arguman.org,bahattincinic/arguman.org,bahattincinic/arguman.org,Arthur2e5/arguman.org,beratdogan/arguman.org,taiansu/arguman.org,donkawechico/arguman.org,arguman/arguman.org,omeripek/arguman.org,taiansu/arguman.org,omeripek/arguman.org,bahattincinic/arguman.org,arguman/arguman.org,beratdogan/arguman.org,taiansu/arguman.org,arguman/arguman.org,taiansu/arguman.org,omeripek/arguman.org,donkawechico/arguman.org,Arthur2e5/arguman.org
|
Add migration file for modification date
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('premises', '0007_auto_20141020_2215'),
]
operations = [
migrations.AddField(
model_name='contention',
name='date_modification',
field=models.DateTimeField(default=datetime.datetime(2014, 10, 23, 1, 33, 8, 359184), auto_now=True, auto_now_add=True),
preserve_default=False,
),
migrations.AlterField(
model_name='premise',
name='is_approved',
field=models.BooleanField(default=False, verbose_name=b'Yay\xc4\xb1nla'),
),
migrations.AlterField(
model_name='premise',
name='parent',
field=models.ForeignKey(related_name=b'children', blank=True, to='premises.Premise', help_text=b'\xc3\x96nermenin \xc3\xb6nc\xc3\xbcl\xc3\xbc. E\xc4\x9fer bo\xc5\x9f b\xc4\xb1rak\xc4\xb1l\xc4\xb1rsaana arg\xc3\xbcman\xc4\xb1n bir \xc3\xb6nermesi olur.', null=True, verbose_name=b'\xc3\x96nc\xc3\xbcl\xc3\xbc'),
),
]
|
<commit_before><commit_msg>Add migration file for modification date<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('premises', '0007_auto_20141020_2215'),
]
operations = [
migrations.AddField(
model_name='contention',
name='date_modification',
field=models.DateTimeField(default=datetime.datetime(2014, 10, 23, 1, 33, 8, 359184), auto_now=True, auto_now_add=True),
preserve_default=False,
),
migrations.AlterField(
model_name='premise',
name='is_approved',
field=models.BooleanField(default=False, verbose_name=b'Yay\xc4\xb1nla'),
),
migrations.AlterField(
model_name='premise',
name='parent',
field=models.ForeignKey(related_name=b'children', blank=True, to='premises.Premise', help_text=b'\xc3\x96nermenin \xc3\xb6nc\xc3\xbcl\xc3\xbc. E\xc4\x9fer bo\xc5\x9f b\xc4\xb1rak\xc4\xb1l\xc4\xb1rsaana arg\xc3\xbcman\xc4\xb1n bir \xc3\xb6nermesi olur.', null=True, verbose_name=b'\xc3\x96nc\xc3\xbcl\xc3\xbc'),
),
]
|
Add migration file for modification date# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('premises', '0007_auto_20141020_2215'),
]
operations = [
migrations.AddField(
model_name='contention',
name='date_modification',
field=models.DateTimeField(default=datetime.datetime(2014, 10, 23, 1, 33, 8, 359184), auto_now=True, auto_now_add=True),
preserve_default=False,
),
migrations.AlterField(
model_name='premise',
name='is_approved',
field=models.BooleanField(default=False, verbose_name=b'Yay\xc4\xb1nla'),
),
migrations.AlterField(
model_name='premise',
name='parent',
field=models.ForeignKey(related_name=b'children', blank=True, to='premises.Premise', help_text=b'\xc3\x96nermenin \xc3\xb6nc\xc3\xbcl\xc3\xbc. E\xc4\x9fer bo\xc5\x9f b\xc4\xb1rak\xc4\xb1l\xc4\xb1rsaana arg\xc3\xbcman\xc4\xb1n bir \xc3\xb6nermesi olur.', null=True, verbose_name=b'\xc3\x96nc\xc3\xbcl\xc3\xbc'),
),
]
|
<commit_before><commit_msg>Add migration file for modification date<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import datetime
class Migration(migrations.Migration):
dependencies = [
('premises', '0007_auto_20141020_2215'),
]
operations = [
migrations.AddField(
model_name='contention',
name='date_modification',
field=models.DateTimeField(default=datetime.datetime(2014, 10, 23, 1, 33, 8, 359184), auto_now=True, auto_now_add=True),
preserve_default=False,
),
migrations.AlterField(
model_name='premise',
name='is_approved',
field=models.BooleanField(default=False, verbose_name=b'Yay\xc4\xb1nla'),
),
migrations.AlterField(
model_name='premise',
name='parent',
field=models.ForeignKey(related_name=b'children', blank=True, to='premises.Premise', help_text=b'\xc3\x96nermenin \xc3\xb6nc\xc3\xbcl\xc3\xbc. E\xc4\x9fer bo\xc5\x9f b\xc4\xb1rak\xc4\xb1l\xc4\xb1rsaana arg\xc3\xbcman\xc4\xb1n bir \xc3\xb6nermesi olur.', null=True, verbose_name=b'\xc3\x96nc\xc3\xbcl\xc3\xbc'),
),
]
|
|
f0d32fbd369be3c0109059cdf2cdb8ca6291320f
|
pull.py
|
pull.py
|
import gdata.youtube
import gdata.youtube.service
yt_service = gdata.youtube.service.YouTubeService()
yt_service.ssl = True
baseURL = "http://gdata.youtube.com/feeds/api/playlists/"
list_ids = ['RDrVqAdIMQZlk']
videos = []
#iterates through all playlist ids
for pid in list_ids:
#fetches the playlist
videoFeed = yt_service.GetYouTubePlaylistVideoFeed(playlist_id = pid)
#iterates through each video
for video in videoFeed.entry:
if video.link:
#gets http link and removes gdata tag
videos.append(video.link[0].href.replace("&feature=youtube_gdata", ""))
#order not preserved but ensures if a video will only be downloaded one time
videos = list(set(videos))
print videos
|
Set up to take a list of playlist ids and return all urls
|
Set up to take a list of playlist ids and return all urls
|
Python
|
mit
|
dgriff03/playlist_puller
|
Set up to take a list of playlist ids and return all urls
|
import gdata.youtube
import gdata.youtube.service
yt_service = gdata.youtube.service.YouTubeService()
yt_service.ssl = True
baseURL = "http://gdata.youtube.com/feeds/api/playlists/"
list_ids = ['RDrVqAdIMQZlk']
videos = []
#iterates through all playlist ids
for pid in list_ids:
#fetches the playlist
videoFeed = yt_service.GetYouTubePlaylistVideoFeed(playlist_id = pid)
#iterates through each video
for video in videoFeed.entry:
if video.link:
#gets http link and removes gdata tag
videos.append(video.link[0].href.replace("&feature=youtube_gdata", ""))
#order not preserved but ensures if a video will only be downloaded one time
videos = list(set(videos))
print videos
|
<commit_before><commit_msg>Set up to take a list of playlist ids and return all urls<commit_after>
|
import gdata.youtube
import gdata.youtube.service
yt_service = gdata.youtube.service.YouTubeService()
yt_service.ssl = True
baseURL = "http://gdata.youtube.com/feeds/api/playlists/"
list_ids = ['RDrVqAdIMQZlk']
videos = []
#iterates through all playlist ids
for pid in list_ids:
#fetches the playlist
videoFeed = yt_service.GetYouTubePlaylistVideoFeed(playlist_id = pid)
#iterates through each video
for video in videoFeed.entry:
if video.link:
#gets http link and removes gdata tag
videos.append(video.link[0].href.replace("&feature=youtube_gdata", ""))
#order not preserved but ensures if a video will only be downloaded one time
videos = list(set(videos))
print videos
|
Set up to take a list of playlist ids and return all urlsimport gdata.youtube
import gdata.youtube.service
yt_service = gdata.youtube.service.YouTubeService()
yt_service.ssl = True
baseURL = "http://gdata.youtube.com/feeds/api/playlists/"
list_ids = ['RDrVqAdIMQZlk']
videos = []
#iterates through all playlist ids
for pid in list_ids:
#fetches the playlist
videoFeed = yt_service.GetYouTubePlaylistVideoFeed(playlist_id = pid)
#iterates through each video
for video in videoFeed.entry:
if video.link:
#gets http link and removes gdata tag
videos.append(video.link[0].href.replace("&feature=youtube_gdata", ""))
#order not preserved but ensures if a video will only be downloaded one time
videos = list(set(videos))
print videos
|
<commit_before><commit_msg>Set up to take a list of playlist ids and return all urls<commit_after>import gdata.youtube
import gdata.youtube.service
yt_service = gdata.youtube.service.YouTubeService()
yt_service.ssl = True
baseURL = "http://gdata.youtube.com/feeds/api/playlists/"
list_ids = ['RDrVqAdIMQZlk']
videos = []
#iterates through all playlist ids
for pid in list_ids:
#fetches the playlist
videoFeed = yt_service.GetYouTubePlaylistVideoFeed(playlist_id = pid)
#iterates through each video
for video in videoFeed.entry:
if video.link:
#gets http link and removes gdata tag
videos.append(video.link[0].href.replace("&feature=youtube_gdata", ""))
#order not preserved but ensures if a video will only be downloaded one time
videos = list(set(videos))
print videos
|
|
4cab8d8de1d72e662bb1116773b4196ffb44dac5
|
sample-code/examples/python/android_web_view.py
|
sample-code/examples/python/android_web_view.py
|
import os
import glob
import unittest
from time import sleep
from selenium import webdriver
class TestAndroidWebView(unittest.TestCase):
def setUp(self):
app = os.path.abspath(
glob.glob(os.path.join(os.path.dirname(__file__),
'../../apps/WebViewDemo/target')+'/*.apk')[0])
desired_caps = {
'device': 'selendroid',
'app': app,
'browserName':"native-android-driver",
'app-package': 'org.openqa.selendroid.testapp',
'app-activity': 'HomeScreenActivity'
}
self.driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
def test(self):
button = self.driver.find_element_by_name('buttonStartWebviewCD')
button.click()
self.driver.switch_to_window('WEBVIEW')
input_field = self.driver.find_element_by_id('name_input')
input_field.send_keys('Mathieu')
input_field.submit()
def tearDown(self):
self.driver.quit()
if __name__ == '__main__':
unittest.main()
|
Add new test for android web view.
|
Add new test for android web view.
|
Python
|
apache-2.0
|
appium/appium,appium/appium,appium/appium,appium/appium,Sw0rdstream/appium,appium/appium,appium/appium
|
Add new test for android web view.
|
import os
import glob
import unittest
from time import sleep
from selenium import webdriver
class TestAndroidWebView(unittest.TestCase):
def setUp(self):
app = os.path.abspath(
glob.glob(os.path.join(os.path.dirname(__file__),
'../../apps/WebViewDemo/target')+'/*.apk')[0])
desired_caps = {
'device': 'selendroid',
'app': app,
'browserName':"native-android-driver",
'app-package': 'org.openqa.selendroid.testapp',
'app-activity': 'HomeScreenActivity'
}
self.driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
def test(self):
button = self.driver.find_element_by_name('buttonStartWebviewCD')
button.click()
self.driver.switch_to_window('WEBVIEW')
input_field = self.driver.find_element_by_id('name_input')
input_field.send_keys('Mathieu')
input_field.submit()
def tearDown(self):
self.driver.quit()
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add new test for android web view.<commit_after>
|
import os
import glob
import unittest
from time import sleep
from selenium import webdriver
class TestAndroidWebView(unittest.TestCase):
def setUp(self):
app = os.path.abspath(
glob.glob(os.path.join(os.path.dirname(__file__),
'../../apps/WebViewDemo/target')+'/*.apk')[0])
desired_caps = {
'device': 'selendroid',
'app': app,
'browserName':"native-android-driver",
'app-package': 'org.openqa.selendroid.testapp',
'app-activity': 'HomeScreenActivity'
}
self.driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
def test(self):
button = self.driver.find_element_by_name('buttonStartWebviewCD')
button.click()
self.driver.switch_to_window('WEBVIEW')
input_field = self.driver.find_element_by_id('name_input')
input_field.send_keys('Mathieu')
input_field.submit()
def tearDown(self):
self.driver.quit()
if __name__ == '__main__':
unittest.main()
|
Add new test for android web view.import os
import glob
import unittest
from time import sleep
from selenium import webdriver
class TestAndroidWebView(unittest.TestCase):
def setUp(self):
app = os.path.abspath(
glob.glob(os.path.join(os.path.dirname(__file__),
'../../apps/WebViewDemo/target')+'/*.apk')[0])
desired_caps = {
'device': 'selendroid',
'app': app,
'browserName':"native-android-driver",
'app-package': 'org.openqa.selendroid.testapp',
'app-activity': 'HomeScreenActivity'
}
self.driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
def test(self):
button = self.driver.find_element_by_name('buttonStartWebviewCD')
button.click()
self.driver.switch_to_window('WEBVIEW')
input_field = self.driver.find_element_by_id('name_input')
input_field.send_keys('Mathieu')
input_field.submit()
def tearDown(self):
self.driver.quit()
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add new test for android web view.<commit_after>import os
import glob
import unittest
from time import sleep
from selenium import webdriver
class TestAndroidWebView(unittest.TestCase):
def setUp(self):
app = os.path.abspath(
glob.glob(os.path.join(os.path.dirname(__file__),
'../../apps/WebViewDemo/target')+'/*.apk')[0])
desired_caps = {
'device': 'selendroid',
'app': app,
'browserName':"native-android-driver",
'app-package': 'org.openqa.selendroid.testapp',
'app-activity': 'HomeScreenActivity'
}
self.driver = webdriver.Remote('http://localhost:4723/wd/hub', desired_caps)
def test(self):
button = self.driver.find_element_by_name('buttonStartWebviewCD')
button.click()
self.driver.switch_to_window('WEBVIEW')
input_field = self.driver.find_element_by_id('name_input')
input_field.send_keys('Mathieu')
input_field.submit()
def tearDown(self):
self.driver.quit()
if __name__ == '__main__':
unittest.main()
|
|
48de6e68b6c869f9249388f4c4a15eb29dcb1a47
|
scripts/migration/repoint_bad_googledoc_sfns.py
|
scripts/migration/repoint_bad_googledoc_sfns.py
|
# -*- coding: utf-8 -*-
"""Migrates GoogleDrive files that have unescaped paths and have a counterpart StoredFileNode. This repoints
the Guid for the unescaped StoredFileNode to the corrct StoredFileNode.
This is a one-off script, run as a prerequisite to scripts.migration.migrate_googledoc_paths.
"""
import sys
import logging
from website.app import init_app
from framework.transactions.context import TokuTransaction
from scripts import utils as script_utils
from website.models import Guid, StoredFileNode
logger = logging.getLogger(__name__)
targets = [
{'guid': 'zcjr2', 'good': u'56a42d8f594d900182308a09', 'bad': '56a7cfc49ad5a1017af77922'},
{'guid': 'nv3xr', 'good': u'57347795594d9000492aaa9a', 'bad': '5734e7d99ad5a101fa57ce7d'},
{'guid': 'm5nxj', 'good': u'58089970594d9001f1622e35', 'bad': '58452885594d900046bac4db'},
]
def migrate():
for target in targets:
guid = Guid.load(target['guid'])
good_sfn = StoredFileNode.load(target['good'])
bad_sfn = StoredFileNode.load(target['bad'])
logger.info('Repointing Guid {} referent to StoredFileNode {}'.format(target['guid'], target['good']))
guid.referent = good_sfn
guid.save()
logger.info('Removing StoredFileNode {}'.format(target['bad']))
StoredFileNode.remove_one(bad_sfn)
def main():
dry = '--dry' in sys.argv
init_app(set_backends=True, routes=False)
if not dry:
script_utils.add_file_logger(logger, __file__)
with TokuTransaction():
migrate()
if dry:
raise RuntimeError('Dry Run -- Transaction rolled back')
if __name__ == '__main__':
main()
|
Add prerequisite script for gdrive migration
|
Add prerequisite script for gdrive migration
|
Python
|
apache-2.0
|
sloria/osf.io,caseyrollins/osf.io,mfraezz/osf.io,icereval/osf.io,chrisseto/osf.io,adlius/osf.io,TomBaxter/osf.io,chennan47/osf.io,acshi/osf.io,erinspace/osf.io,HalcyonChimera/osf.io,chennan47/osf.io,pattisdr/osf.io,mfraezz/osf.io,alexschiller/osf.io,sloria/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,saradbowman/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,crcresearch/osf.io,cslzchen/osf.io,alexschiller/osf.io,pattisdr/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,pattisdr/osf.io,icereval/osf.io,binoculars/osf.io,chrisseto/osf.io,crcresearch/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,felliott/osf.io,Johnetordoff/osf.io,baylee-d/osf.io,acshi/osf.io,crcresearch/osf.io,caneruguz/osf.io,hmoco/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,icereval/osf.io,aaxelb/osf.io,mluo613/osf.io,Nesiehr/osf.io,CenterForOpenScience/osf.io,laurenrevere/osf.io,leb2dg/osf.io,cwisecarver/osf.io,mluo613/osf.io,alexschiller/osf.io,felliott/osf.io,aaxelb/osf.io,sloria/osf.io,hmoco/osf.io,mluo613/osf.io,caneruguz/osf.io,laurenrevere/osf.io,saradbowman/osf.io,cwisecarver/osf.io,acshi/osf.io,binoculars/osf.io,alexschiller/osf.io,hmoco/osf.io,baylee-d/osf.io,mattclark/osf.io,Nesiehr/osf.io,mluo613/osf.io,monikagrabowska/osf.io,monikagrabowska/osf.io,leb2dg/osf.io,chrisseto/osf.io,adlius/osf.io,mattclark/osf.io,monikagrabowska/osf.io,felliott/osf.io,acshi/osf.io,mfraezz/osf.io,binoculars/osf.io,alexschiller/osf.io,adlius/osf.io,brianjgeiger/osf.io,erinspace/osf.io,brianjgeiger/osf.io,adlius/osf.io,chennan47/osf.io,caneruguz/osf.io,TomBaxter/osf.io,acshi/osf.io,aaxelb/osf.io,felliott/osf.io,cslzchen/osf.io,laurenrevere/osf.io,erinspace/osf.io,Nesiehr/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,Johnetordoff/osf.io,CenterForOpenScience/osf.io,mluo613/osf.io,monikagrabowska/osf.io,Johnetordoff/osf.io,leb2dg/osf.io,hmoco/osf.io,leb2dg/osf.io,Johnetordoff/osf.io,Nesiehr/osf.io,monikagrabowska/osf.io,cwisecarver/osf.io,HalcyonChimera/osf.io,baylee-d/osf.io,cwisecarver/osf.io,caneruguz/osf.io,TomBaxter/osf.io,mattclark/osf.io
|
Add prerequisite script for gdrive migration
|
# -*- coding: utf-8 -*-
"""Migrates GoogleDrive files that have unescaped paths and have a counterpart StoredFileNode. This repoints
the Guid for the unescaped StoredFileNode to the corrct StoredFileNode.
This is a one-off script, run as a prerequisite to scripts.migration.migrate_googledoc_paths.
"""
import sys
import logging
from website.app import init_app
from framework.transactions.context import TokuTransaction
from scripts import utils as script_utils
from website.models import Guid, StoredFileNode
logger = logging.getLogger(__name__)
targets = [
{'guid': 'zcjr2', 'good': u'56a42d8f594d900182308a09', 'bad': '56a7cfc49ad5a1017af77922'},
{'guid': 'nv3xr', 'good': u'57347795594d9000492aaa9a', 'bad': '5734e7d99ad5a101fa57ce7d'},
{'guid': 'm5nxj', 'good': u'58089970594d9001f1622e35', 'bad': '58452885594d900046bac4db'},
]
def migrate():
for target in targets:
guid = Guid.load(target['guid'])
good_sfn = StoredFileNode.load(target['good'])
bad_sfn = StoredFileNode.load(target['bad'])
logger.info('Repointing Guid {} referent to StoredFileNode {}'.format(target['guid'], target['good']))
guid.referent = good_sfn
guid.save()
logger.info('Removing StoredFileNode {}'.format(target['bad']))
StoredFileNode.remove_one(bad_sfn)
def main():
dry = '--dry' in sys.argv
init_app(set_backends=True, routes=False)
if not dry:
script_utils.add_file_logger(logger, __file__)
with TokuTransaction():
migrate()
if dry:
raise RuntimeError('Dry Run -- Transaction rolled back')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add prerequisite script for gdrive migration<commit_after>
|
# -*- coding: utf-8 -*-
"""Migrates GoogleDrive files that have unescaped paths and have a counterpart StoredFileNode. This repoints
the Guid for the unescaped StoredFileNode to the corrct StoredFileNode.
This is a one-off script, run as a prerequisite to scripts.migration.migrate_googledoc_paths.
"""
import sys
import logging
from website.app import init_app
from framework.transactions.context import TokuTransaction
from scripts import utils as script_utils
from website.models import Guid, StoredFileNode
logger = logging.getLogger(__name__)
targets = [
{'guid': 'zcjr2', 'good': u'56a42d8f594d900182308a09', 'bad': '56a7cfc49ad5a1017af77922'},
{'guid': 'nv3xr', 'good': u'57347795594d9000492aaa9a', 'bad': '5734e7d99ad5a101fa57ce7d'},
{'guid': 'm5nxj', 'good': u'58089970594d9001f1622e35', 'bad': '58452885594d900046bac4db'},
]
def migrate():
for target in targets:
guid = Guid.load(target['guid'])
good_sfn = StoredFileNode.load(target['good'])
bad_sfn = StoredFileNode.load(target['bad'])
logger.info('Repointing Guid {} referent to StoredFileNode {}'.format(target['guid'], target['good']))
guid.referent = good_sfn
guid.save()
logger.info('Removing StoredFileNode {}'.format(target['bad']))
StoredFileNode.remove_one(bad_sfn)
def main():
dry = '--dry' in sys.argv
init_app(set_backends=True, routes=False)
if not dry:
script_utils.add_file_logger(logger, __file__)
with TokuTransaction():
migrate()
if dry:
raise RuntimeError('Dry Run -- Transaction rolled back')
if __name__ == '__main__':
main()
|
Add prerequisite script for gdrive migration# -*- coding: utf-8 -*-
"""Migrates GoogleDrive files that have unescaped paths and have a counterpart StoredFileNode. This repoints
the Guid for the unescaped StoredFileNode to the corrct StoredFileNode.
This is a one-off script, run as a prerequisite to scripts.migration.migrate_googledoc_paths.
"""
import sys
import logging
from website.app import init_app
from framework.transactions.context import TokuTransaction
from scripts import utils as script_utils
from website.models import Guid, StoredFileNode
logger = logging.getLogger(__name__)
targets = [
{'guid': 'zcjr2', 'good': u'56a42d8f594d900182308a09', 'bad': '56a7cfc49ad5a1017af77922'},
{'guid': 'nv3xr', 'good': u'57347795594d9000492aaa9a', 'bad': '5734e7d99ad5a101fa57ce7d'},
{'guid': 'm5nxj', 'good': u'58089970594d9001f1622e35', 'bad': '58452885594d900046bac4db'},
]
def migrate():
for target in targets:
guid = Guid.load(target['guid'])
good_sfn = StoredFileNode.load(target['good'])
bad_sfn = StoredFileNode.load(target['bad'])
logger.info('Repointing Guid {} referent to StoredFileNode {}'.format(target['guid'], target['good']))
guid.referent = good_sfn
guid.save()
logger.info('Removing StoredFileNode {}'.format(target['bad']))
StoredFileNode.remove_one(bad_sfn)
def main():
dry = '--dry' in sys.argv
init_app(set_backends=True, routes=False)
if not dry:
script_utils.add_file_logger(logger, __file__)
with TokuTransaction():
migrate()
if dry:
raise RuntimeError('Dry Run -- Transaction rolled back')
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add prerequisite script for gdrive migration<commit_after># -*- coding: utf-8 -*-
"""Migrates GoogleDrive files that have unescaped paths and have a counterpart StoredFileNode. This repoints
the Guid for the unescaped StoredFileNode to the corrct StoredFileNode.
This is a one-off script, run as a prerequisite to scripts.migration.migrate_googledoc_paths.
"""
import sys
import logging
from website.app import init_app
from framework.transactions.context import TokuTransaction
from scripts import utils as script_utils
from website.models import Guid, StoredFileNode
logger = logging.getLogger(__name__)
targets = [
{'guid': 'zcjr2', 'good': u'56a42d8f594d900182308a09', 'bad': '56a7cfc49ad5a1017af77922'},
{'guid': 'nv3xr', 'good': u'57347795594d9000492aaa9a', 'bad': '5734e7d99ad5a101fa57ce7d'},
{'guid': 'm5nxj', 'good': u'58089970594d9001f1622e35', 'bad': '58452885594d900046bac4db'},
]
def migrate():
for target in targets:
guid = Guid.load(target['guid'])
good_sfn = StoredFileNode.load(target['good'])
bad_sfn = StoredFileNode.load(target['bad'])
logger.info('Repointing Guid {} referent to StoredFileNode {}'.format(target['guid'], target['good']))
guid.referent = good_sfn
guid.save()
logger.info('Removing StoredFileNode {}'.format(target['bad']))
StoredFileNode.remove_one(bad_sfn)
def main():
dry = '--dry' in sys.argv
init_app(set_backends=True, routes=False)
if not dry:
script_utils.add_file_logger(logger, __file__)
with TokuTransaction():
migrate()
if dry:
raise RuntimeError('Dry Run -- Transaction rolled back')
if __name__ == '__main__':
main()
|
|
a596781889a3991d14440dbf889767200e9d6681
|
examples/players/fish_player.py
|
examples/players/fish_player.py
|
from pypokerengine.players import BasePokerPlayer
class FishPlayer(BasePokerPlayer): # Do not forget to make parent class as "BasePokerPlayer"
# we define the logic to make an action through this method. (so this method would be the core of your AI)
def declare_action(self, valid_actions, hole_card, round_state):
# valid_actions format => [raise_action_info, call_action_info, fold_action_info]
call_action_info = valid_actions[1]
action, amount = call_action_info["action"], call_action_info["amount"]
return action, amount # action returned here is sent to the poker engine
def receive_game_start_message(self, game_info):
pass
def receive_round_start_message(self, round_count, hole_card, seats):
pass
def receive_street_start_message(self, street, round_state):
pass
def receive_game_update_message(self, action, round_state):
pass
def receive_round_result_message(self, winners, hand_info, round_state):
pass
|
Add simple example player for README tutorial
|
Add simple example player for README tutorial
|
Python
|
mit
|
ishikota/PyPokerEngine
|
Add simple example player for README tutorial
|
from pypokerengine.players import BasePokerPlayer
class FishPlayer(BasePokerPlayer): # Do not forget to make parent class as "BasePokerPlayer"
# we define the logic to make an action through this method. (so this method would be the core of your AI)
def declare_action(self, valid_actions, hole_card, round_state):
# valid_actions format => [raise_action_info, call_action_info, fold_action_info]
call_action_info = valid_actions[1]
action, amount = call_action_info["action"], call_action_info["amount"]
return action, amount # action returned here is sent to the poker engine
def receive_game_start_message(self, game_info):
pass
def receive_round_start_message(self, round_count, hole_card, seats):
pass
def receive_street_start_message(self, street, round_state):
pass
def receive_game_update_message(self, action, round_state):
pass
def receive_round_result_message(self, winners, hand_info, round_state):
pass
|
<commit_before><commit_msg>Add simple example player for README tutorial<commit_after>
|
from pypokerengine.players import BasePokerPlayer
class FishPlayer(BasePokerPlayer): # Do not forget to make parent class as "BasePokerPlayer"
# we define the logic to make an action through this method. (so this method would be the core of your AI)
def declare_action(self, valid_actions, hole_card, round_state):
# valid_actions format => [raise_action_info, call_action_info, fold_action_info]
call_action_info = valid_actions[1]
action, amount = call_action_info["action"], call_action_info["amount"]
return action, amount # action returned here is sent to the poker engine
def receive_game_start_message(self, game_info):
pass
def receive_round_start_message(self, round_count, hole_card, seats):
pass
def receive_street_start_message(self, street, round_state):
pass
def receive_game_update_message(self, action, round_state):
pass
def receive_round_result_message(self, winners, hand_info, round_state):
pass
|
Add simple example player for README tutorialfrom pypokerengine.players import BasePokerPlayer
class FishPlayer(BasePokerPlayer): # Do not forget to make parent class as "BasePokerPlayer"
# we define the logic to make an action through this method. (so this method would be the core of your AI)
def declare_action(self, valid_actions, hole_card, round_state):
# valid_actions format => [raise_action_info, call_action_info, fold_action_info]
call_action_info = valid_actions[1]
action, amount = call_action_info["action"], call_action_info["amount"]
return action, amount # action returned here is sent to the poker engine
def receive_game_start_message(self, game_info):
pass
def receive_round_start_message(self, round_count, hole_card, seats):
pass
def receive_street_start_message(self, street, round_state):
pass
def receive_game_update_message(self, action, round_state):
pass
def receive_round_result_message(self, winners, hand_info, round_state):
pass
|
<commit_before><commit_msg>Add simple example player for README tutorial<commit_after>from pypokerengine.players import BasePokerPlayer
class FishPlayer(BasePokerPlayer): # Do not forget to make parent class as "BasePokerPlayer"
# we define the logic to make an action through this method. (so this method would be the core of your AI)
def declare_action(self, valid_actions, hole_card, round_state):
# valid_actions format => [raise_action_info, call_action_info, fold_action_info]
call_action_info = valid_actions[1]
action, amount = call_action_info["action"], call_action_info["amount"]
return action, amount # action returned here is sent to the poker engine
def receive_game_start_message(self, game_info):
pass
def receive_round_start_message(self, round_count, hole_card, seats):
pass
def receive_street_start_message(self, street, round_state):
pass
def receive_game_update_message(self, action, round_state):
pass
def receive_round_result_message(self, winners, hand_info, round_state):
pass
|
|
5b434ae55d39075dad6c62ef48f8a133636d3f1e
|
robot-vision/findbin.py
|
robot-vision/findbin.py
|
#!/usr/bin/env python3
from networktables import NetworkTable
from optparse import OptionParser
import time
import logging
logging.basicConfig(level=logging.DEBUG)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('--on', action='store_true', default=False)
parser.add_option('--off', action='store_true', default=False)
options, args = parser.parse_args()
if len(args) == 0:
parser.error("Specify robot IP")
NetworkTable.setIPAddress(args[0])
NetworkTable.setClientMode()
NetworkTable.initialize()
sd = NetworkTable.getTable('SmartDashboard')
time.sleep(2)
if options.on:
sd.putBoolean('findBin', True)
elif options.off:
sd.putBoolean('findBin', False)
else:
parser.error("Specify either --on or --off")
time.sleep(1)
|
Add utility to turn findBin variable on/off
|
Add utility to turn findBin variable on/off
|
Python
|
apache-2.0
|
frc1418/2015-vision,frc1418/2015-vision,CarterFendley/2015-vision,CarterFendley/2015-vision
|
Add utility to turn findBin variable on/off
|
#!/usr/bin/env python3
from networktables import NetworkTable
from optparse import OptionParser
import time
import logging
logging.basicConfig(level=logging.DEBUG)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('--on', action='store_true', default=False)
parser.add_option('--off', action='store_true', default=False)
options, args = parser.parse_args()
if len(args) == 0:
parser.error("Specify robot IP")
NetworkTable.setIPAddress(args[0])
NetworkTable.setClientMode()
NetworkTable.initialize()
sd = NetworkTable.getTable('SmartDashboard')
time.sleep(2)
if options.on:
sd.putBoolean('findBin', True)
elif options.off:
sd.putBoolean('findBin', False)
else:
parser.error("Specify either --on or --off")
time.sleep(1)
|
<commit_before><commit_msg>Add utility to turn findBin variable on/off<commit_after>
|
#!/usr/bin/env python3
from networktables import NetworkTable
from optparse import OptionParser
import time
import logging
logging.basicConfig(level=logging.DEBUG)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('--on', action='store_true', default=False)
parser.add_option('--off', action='store_true', default=False)
options, args = parser.parse_args()
if len(args) == 0:
parser.error("Specify robot IP")
NetworkTable.setIPAddress(args[0])
NetworkTable.setClientMode()
NetworkTable.initialize()
sd = NetworkTable.getTable('SmartDashboard')
time.sleep(2)
if options.on:
sd.putBoolean('findBin', True)
elif options.off:
sd.putBoolean('findBin', False)
else:
parser.error("Specify either --on or --off")
time.sleep(1)
|
Add utility to turn findBin variable on/off#!/usr/bin/env python3
from networktables import NetworkTable
from optparse import OptionParser
import time
import logging
logging.basicConfig(level=logging.DEBUG)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('--on', action='store_true', default=False)
parser.add_option('--off', action='store_true', default=False)
options, args = parser.parse_args()
if len(args) == 0:
parser.error("Specify robot IP")
NetworkTable.setIPAddress(args[0])
NetworkTable.setClientMode()
NetworkTable.initialize()
sd = NetworkTable.getTable('SmartDashboard')
time.sleep(2)
if options.on:
sd.putBoolean('findBin', True)
elif options.off:
sd.putBoolean('findBin', False)
else:
parser.error("Specify either --on or --off")
time.sleep(1)
|
<commit_before><commit_msg>Add utility to turn findBin variable on/off<commit_after>#!/usr/bin/env python3
from networktables import NetworkTable
from optparse import OptionParser
import time
import logging
logging.basicConfig(level=logging.DEBUG)
if __name__ == '__main__':
parser = OptionParser()
parser.add_option('--on', action='store_true', default=False)
parser.add_option('--off', action='store_true', default=False)
options, args = parser.parse_args()
if len(args) == 0:
parser.error("Specify robot IP")
NetworkTable.setIPAddress(args[0])
NetworkTable.setClientMode()
NetworkTable.initialize()
sd = NetworkTable.getTable('SmartDashboard')
time.sleep(2)
if options.on:
sd.putBoolean('findBin', True)
elif options.off:
sd.putBoolean('findBin', False)
else:
parser.error("Specify either --on or --off")
time.sleep(1)
|
|
97bffbb336dbf71132171346fa948e5b9f59c60d
|
setup.py
|
setup.py
|
from setuptools import setup
setup(
name = "django-redis-cache",
url = "http://github.com/sebleier/django-redis-cache/",
author = "Sean Bleier",
author_email = "sebleier@gmail.com",
version = "0.9.7",
packages = ["redis_cache"],
description = "Redis Cache Backend for Django",
install_requires=['redis>=2.4.5',],
classifiers = [
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
"Environment :: Web Environment",
"Framework :: Django",
],
)
|
from setuptools import setup
setup(
name = "django-redis-cache",
url = "http://github.com/sebleier/django-redis-cache/",
author = "Sean Bleier",
author_email = "sebleier@gmail.com",
version = "0.9.7",
packages = ["redis_cache"],
description = "Redis Cache Backend for Django",
install_requires=['redis>=2.4.5',],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
"Environment :: Web Environment",
"Framework :: Django",
],
)
|
Add Python version trove classifiers.
|
Add Python version trove classifiers.
|
Python
|
bsd-3-clause
|
maikelwever/django-redis-cache,carltongibson/django-redis-cache,carltongibson/django-redis-cache,chripede/django-redis-cache,chripede/django-redis-cache,maikelwever/django-redis-cache
|
from setuptools import setup
setup(
name = "django-redis-cache",
url = "http://github.com/sebleier/django-redis-cache/",
author = "Sean Bleier",
author_email = "sebleier@gmail.com",
version = "0.9.7",
packages = ["redis_cache"],
description = "Redis Cache Backend for Django",
install_requires=['redis>=2.4.5',],
classifiers = [
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
"Environment :: Web Environment",
"Framework :: Django",
],
)
Add Python version trove classifiers.
|
from setuptools import setup
setup(
name = "django-redis-cache",
url = "http://github.com/sebleier/django-redis-cache/",
author = "Sean Bleier",
author_email = "sebleier@gmail.com",
version = "0.9.7",
packages = ["redis_cache"],
description = "Redis Cache Backend for Django",
install_requires=['redis>=2.4.5',],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
"Environment :: Web Environment",
"Framework :: Django",
],
)
|
<commit_before>from setuptools import setup
setup(
name = "django-redis-cache",
url = "http://github.com/sebleier/django-redis-cache/",
author = "Sean Bleier",
author_email = "sebleier@gmail.com",
version = "0.9.7",
packages = ["redis_cache"],
description = "Redis Cache Backend for Django",
install_requires=['redis>=2.4.5',],
classifiers = [
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
"Environment :: Web Environment",
"Framework :: Django",
],
)
<commit_msg>Add Python version trove classifiers.<commit_after>
|
from setuptools import setup
setup(
name = "django-redis-cache",
url = "http://github.com/sebleier/django-redis-cache/",
author = "Sean Bleier",
author_email = "sebleier@gmail.com",
version = "0.9.7",
packages = ["redis_cache"],
description = "Redis Cache Backend for Django",
install_requires=['redis>=2.4.5',],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
"Environment :: Web Environment",
"Framework :: Django",
],
)
|
from setuptools import setup
setup(
name = "django-redis-cache",
url = "http://github.com/sebleier/django-redis-cache/",
author = "Sean Bleier",
author_email = "sebleier@gmail.com",
version = "0.9.7",
packages = ["redis_cache"],
description = "Redis Cache Backend for Django",
install_requires=['redis>=2.4.5',],
classifiers = [
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
"Environment :: Web Environment",
"Framework :: Django",
],
)
Add Python version trove classifiers.from setuptools import setup
setup(
name = "django-redis-cache",
url = "http://github.com/sebleier/django-redis-cache/",
author = "Sean Bleier",
author_email = "sebleier@gmail.com",
version = "0.9.7",
packages = ["redis_cache"],
description = "Redis Cache Backend for Django",
install_requires=['redis>=2.4.5',],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
"Environment :: Web Environment",
"Framework :: Django",
],
)
|
<commit_before>from setuptools import setup
setup(
name = "django-redis-cache",
url = "http://github.com/sebleier/django-redis-cache/",
author = "Sean Bleier",
author_email = "sebleier@gmail.com",
version = "0.9.7",
packages = ["redis_cache"],
description = "Redis Cache Backend for Django",
install_requires=['redis>=2.4.5',],
classifiers = [
"Programming Language :: Python",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
"Environment :: Web Environment",
"Framework :: Django",
],
)
<commit_msg>Add Python version trove classifiers.<commit_after>from setuptools import setup
setup(
name = "django-redis-cache",
url = "http://github.com/sebleier/django-redis-cache/",
author = "Sean Bleier",
author_email = "sebleier@gmail.com",
version = "0.9.7",
packages = ["redis_cache"],
description = "Redis Cache Backend for Django",
install_requires=['redis>=2.4.5',],
classifiers = [
"Programming Language :: Python",
"Programming Language :: Python :: 2.6",
"Programming Language :: Python :: 2.7",
"Programming Language :: Python :: 3.3",
"Operating System :: OS Independent",
"Topic :: Software Development :: Libraries",
"Topic :: Utilities",
"Environment :: Web Environment",
"Framework :: Django",
],
)
|
ffdfa7729c865c8e999bdfd4af49e3beb6899970
|
test.py
|
test.py
|
import pdb
import time
import collections
import random
import itertools
import xmlrpclib
import dis
server_url = 'http://127.0.0.1:20738/RPC2'
server = xmlrpclib.Server(server_url)
G = server.ubigraph
def erdos ():
vert_ids = range(0,1000)
p = 0.001
G.clear()
pdb.set_trace()
for id in vert_ids:
G.new_vertex_w_id(id)
edge_count = 0
for i,j in itertools.combinations(vert_ids, 2):
r = random.random()
if r <= p:
edge_count += 1
id = G.new_edge(i, j)
#G.set_edge_attribute(id, 'oriented', 'true')
#G.set_edge_attribute(id, 'arrow', 'true')
#G.set_edge_attribute(id, 'showstrain', 'true')
#G.set_edge_attribute(id, 'strength', '0.0')
print edge_count
def groupbycount (ids):
return [len(list(li[1])) for li in itertools.groupby(sorted(ids))]
def preferential ():
G.clear()
size = 1000
G.new_vertex_w_id(0)
ids = [0]
for i in range(1, size):
G.new_vertex_w_id(i)
j = random.choice(ids)
G.new_edge(i,j)
ids += [i,j]
hist = groupbycount(ids)
histprime = collections.defaultdict(lambda:0)
for i in hist:
histprime[i] += 1
print sorted([(k,v) for k,v in histprime.items()])
if __name__ == '__main__':
#preferential()
erdos()
|
Add code to generate Erdos and preferential attachment graphs
|
Add code to generate Erdos and preferential attachment graphs
|
Python
|
mit
|
hausdorff/python-api-breakin
|
Add code to generate Erdos and preferential attachment graphs
|
import pdb
import time
import collections
import random
import itertools
import xmlrpclib
import dis
server_url = 'http://127.0.0.1:20738/RPC2'
server = xmlrpclib.Server(server_url)
G = server.ubigraph
def erdos ():
vert_ids = range(0,1000)
p = 0.001
G.clear()
pdb.set_trace()
for id in vert_ids:
G.new_vertex_w_id(id)
edge_count = 0
for i,j in itertools.combinations(vert_ids, 2):
r = random.random()
if r <= p:
edge_count += 1
id = G.new_edge(i, j)
#G.set_edge_attribute(id, 'oriented', 'true')
#G.set_edge_attribute(id, 'arrow', 'true')
#G.set_edge_attribute(id, 'showstrain', 'true')
#G.set_edge_attribute(id, 'strength', '0.0')
print edge_count
def groupbycount (ids):
return [len(list(li[1])) for li in itertools.groupby(sorted(ids))]
def preferential ():
G.clear()
size = 1000
G.new_vertex_w_id(0)
ids = [0]
for i in range(1, size):
G.new_vertex_w_id(i)
j = random.choice(ids)
G.new_edge(i,j)
ids += [i,j]
hist = groupbycount(ids)
histprime = collections.defaultdict(lambda:0)
for i in hist:
histprime[i] += 1
print sorted([(k,v) for k,v in histprime.items()])
if __name__ == '__main__':
#preferential()
erdos()
|
<commit_before><commit_msg>Add code to generate Erdos and preferential attachment graphs<commit_after>
|
import pdb
import time
import collections
import random
import itertools
import xmlrpclib
import dis
server_url = 'http://127.0.0.1:20738/RPC2'
server = xmlrpclib.Server(server_url)
G = server.ubigraph
def erdos ():
vert_ids = range(0,1000)
p = 0.001
G.clear()
pdb.set_trace()
for id in vert_ids:
G.new_vertex_w_id(id)
edge_count = 0
for i,j in itertools.combinations(vert_ids, 2):
r = random.random()
if r <= p:
edge_count += 1
id = G.new_edge(i, j)
#G.set_edge_attribute(id, 'oriented', 'true')
#G.set_edge_attribute(id, 'arrow', 'true')
#G.set_edge_attribute(id, 'showstrain', 'true')
#G.set_edge_attribute(id, 'strength', '0.0')
print edge_count
def groupbycount (ids):
return [len(list(li[1])) for li in itertools.groupby(sorted(ids))]
def preferential ():
G.clear()
size = 1000
G.new_vertex_w_id(0)
ids = [0]
for i in range(1, size):
G.new_vertex_w_id(i)
j = random.choice(ids)
G.new_edge(i,j)
ids += [i,j]
hist = groupbycount(ids)
histprime = collections.defaultdict(lambda:0)
for i in hist:
histprime[i] += 1
print sorted([(k,v) for k,v in histprime.items()])
if __name__ == '__main__':
#preferential()
erdos()
|
Add code to generate Erdos and preferential attachment graphsimport pdb
import time
import collections
import random
import itertools
import xmlrpclib
import dis
server_url = 'http://127.0.0.1:20738/RPC2'
server = xmlrpclib.Server(server_url)
G = server.ubigraph
def erdos ():
vert_ids = range(0,1000)
p = 0.001
G.clear()
pdb.set_trace()
for id in vert_ids:
G.new_vertex_w_id(id)
edge_count = 0
for i,j in itertools.combinations(vert_ids, 2):
r = random.random()
if r <= p:
edge_count += 1
id = G.new_edge(i, j)
#G.set_edge_attribute(id, 'oriented', 'true')
#G.set_edge_attribute(id, 'arrow', 'true')
#G.set_edge_attribute(id, 'showstrain', 'true')
#G.set_edge_attribute(id, 'strength', '0.0')
print edge_count
def groupbycount (ids):
return [len(list(li[1])) for li in itertools.groupby(sorted(ids))]
def preferential ():
G.clear()
size = 1000
G.new_vertex_w_id(0)
ids = [0]
for i in range(1, size):
G.new_vertex_w_id(i)
j = random.choice(ids)
G.new_edge(i,j)
ids += [i,j]
hist = groupbycount(ids)
histprime = collections.defaultdict(lambda:0)
for i in hist:
histprime[i] += 1
print sorted([(k,v) for k,v in histprime.items()])
if __name__ == '__main__':
#preferential()
erdos()
|
<commit_before><commit_msg>Add code to generate Erdos and preferential attachment graphs<commit_after>import pdb
import time
import collections
import random
import itertools
import xmlrpclib
import dis
server_url = 'http://127.0.0.1:20738/RPC2'
server = xmlrpclib.Server(server_url)
G = server.ubigraph
def erdos ():
vert_ids = range(0,1000)
p = 0.001
G.clear()
pdb.set_trace()
for id in vert_ids:
G.new_vertex_w_id(id)
edge_count = 0
for i,j in itertools.combinations(vert_ids, 2):
r = random.random()
if r <= p:
edge_count += 1
id = G.new_edge(i, j)
#G.set_edge_attribute(id, 'oriented', 'true')
#G.set_edge_attribute(id, 'arrow', 'true')
#G.set_edge_attribute(id, 'showstrain', 'true')
#G.set_edge_attribute(id, 'strength', '0.0')
print edge_count
def groupbycount (ids):
return [len(list(li[1])) for li in itertools.groupby(sorted(ids))]
def preferential ():
G.clear()
size = 1000
G.new_vertex_w_id(0)
ids = [0]
for i in range(1, size):
G.new_vertex_w_id(i)
j = random.choice(ids)
G.new_edge(i,j)
ids += [i,j]
hist = groupbycount(ids)
histprime = collections.defaultdict(lambda:0)
for i in hist:
histprime[i] += 1
print sorted([(k,v) for k,v in histprime.items()])
if __name__ == '__main__':
#preferential()
erdos()
|
|
a08b469c50557803bad6f5f7b432f8b332b49421
|
raiden/tests/unit/test_utils.py
|
raiden/tests/unit/test_utils.py
|
# -*- coding: utf-8 -*-
from raiden.utils import safe_equal_attributes
class Slotted(object):
__slots__ = (
'a',
'b',
)
def __eq__(self, other):
if isinstance(other, Slotted):
return (
safe_equal_attributes('a', self, other) and
safe_equal_attributes('b', self, other)
)
return False
def __ne__(self, other):
return not self.__eq__(other)
def test_safe_equal_attributes():
slotted = Slotted()
slotted.a = 1
slotted.b = 2
equal = Slotted()
equal.a = 1
equal.b = 2
notequal = Slotted()
notequal.a = 1
notequal.b = 3
none = Slotted()
none.a = 1
none.b = None
incomplete = Slotted()
incomplete.a = 1
equal_incomplete = Slotted()
equal_incomplete.a = 1
assert slotted == equal
assert slotted != notequal
assert slotted != incomplete
assert slotted != none
assert incomplete == equal_incomplete
|
Add unit test for safe_equal_attributes
|
Add unit test for safe_equal_attributes
|
Python
|
mit
|
tomashaber/raiden,hackaugusto/raiden,hackaugusto/raiden,tomashaber/raiden,tomashaber/raiden,tomashaber/raiden,tomashaber/raiden
|
Add unit test for safe_equal_attributes
|
# -*- coding: utf-8 -*-
from raiden.utils import safe_equal_attributes
class Slotted(object):
__slots__ = (
'a',
'b',
)
def __eq__(self, other):
if isinstance(other, Slotted):
return (
safe_equal_attributes('a', self, other) and
safe_equal_attributes('b', self, other)
)
return False
def __ne__(self, other):
return not self.__eq__(other)
def test_safe_equal_attributes():
slotted = Slotted()
slotted.a = 1
slotted.b = 2
equal = Slotted()
equal.a = 1
equal.b = 2
notequal = Slotted()
notequal.a = 1
notequal.b = 3
none = Slotted()
none.a = 1
none.b = None
incomplete = Slotted()
incomplete.a = 1
equal_incomplete = Slotted()
equal_incomplete.a = 1
assert slotted == equal
assert slotted != notequal
assert slotted != incomplete
assert slotted != none
assert incomplete == equal_incomplete
|
<commit_before><commit_msg>Add unit test for safe_equal_attributes<commit_after>
|
# -*- coding: utf-8 -*-
from raiden.utils import safe_equal_attributes
class Slotted(object):
__slots__ = (
'a',
'b',
)
def __eq__(self, other):
if isinstance(other, Slotted):
return (
safe_equal_attributes('a', self, other) and
safe_equal_attributes('b', self, other)
)
return False
def __ne__(self, other):
return not self.__eq__(other)
def test_safe_equal_attributes():
slotted = Slotted()
slotted.a = 1
slotted.b = 2
equal = Slotted()
equal.a = 1
equal.b = 2
notequal = Slotted()
notequal.a = 1
notequal.b = 3
none = Slotted()
none.a = 1
none.b = None
incomplete = Slotted()
incomplete.a = 1
equal_incomplete = Slotted()
equal_incomplete.a = 1
assert slotted == equal
assert slotted != notequal
assert slotted != incomplete
assert slotted != none
assert incomplete == equal_incomplete
|
Add unit test for safe_equal_attributes# -*- coding: utf-8 -*-
from raiden.utils import safe_equal_attributes
class Slotted(object):
__slots__ = (
'a',
'b',
)
def __eq__(self, other):
if isinstance(other, Slotted):
return (
safe_equal_attributes('a', self, other) and
safe_equal_attributes('b', self, other)
)
return False
def __ne__(self, other):
return not self.__eq__(other)
def test_safe_equal_attributes():
slotted = Slotted()
slotted.a = 1
slotted.b = 2
equal = Slotted()
equal.a = 1
equal.b = 2
notequal = Slotted()
notequal.a = 1
notequal.b = 3
none = Slotted()
none.a = 1
none.b = None
incomplete = Slotted()
incomplete.a = 1
equal_incomplete = Slotted()
equal_incomplete.a = 1
assert slotted == equal
assert slotted != notequal
assert slotted != incomplete
assert slotted != none
assert incomplete == equal_incomplete
|
<commit_before><commit_msg>Add unit test for safe_equal_attributes<commit_after># -*- coding: utf-8 -*-
from raiden.utils import safe_equal_attributes
class Slotted(object):
__slots__ = (
'a',
'b',
)
def __eq__(self, other):
if isinstance(other, Slotted):
return (
safe_equal_attributes('a', self, other) and
safe_equal_attributes('b', self, other)
)
return False
def __ne__(self, other):
return not self.__eq__(other)
def test_safe_equal_attributes():
slotted = Slotted()
slotted.a = 1
slotted.b = 2
equal = Slotted()
equal.a = 1
equal.b = 2
notequal = Slotted()
notequal.a = 1
notequal.b = 3
none = Slotted()
none.a = 1
none.b = None
incomplete = Slotted()
incomplete.a = 1
equal_incomplete = Slotted()
equal_incomplete.a = 1
assert slotted == equal
assert slotted != notequal
assert slotted != incomplete
assert slotted != none
assert incomplete == equal_incomplete
|
|
40adfc3aa00fddd55d138d5af8fd77e2347879c1
|
testrunner.py
|
testrunner.py
|
def run_tests(name, globals_dict):
import sys
RED = '\033[91m'
BOLD = '\033[1m'
END = '\033[0m'
print (BOLD + 'Running tests for ' + name + ' ...' + END)
tests = [globals_dict.get(fn) for fn in globals_dict if fn.startswith('test_')]
for each in tests:
if callable(each):
sys.stdout.write('\n-> ' + each.__name__[5:] + ' ... ')
result = each()
if result:
sys.stdout.write('OK')
else:
sys.stdout.write(RED + 'FAIL' + END)
|
Add basic unit testing functionality
|
Add basic unit testing functionality
|
Python
|
mit
|
kashifrazzaqui/again
|
Add basic unit testing functionality
|
def run_tests(name, globals_dict):
import sys
RED = '\033[91m'
BOLD = '\033[1m'
END = '\033[0m'
print (BOLD + 'Running tests for ' + name + ' ...' + END)
tests = [globals_dict.get(fn) for fn in globals_dict if fn.startswith('test_')]
for each in tests:
if callable(each):
sys.stdout.write('\n-> ' + each.__name__[5:] + ' ... ')
result = each()
if result:
sys.stdout.write('OK')
else:
sys.stdout.write(RED + 'FAIL' + END)
|
<commit_before><commit_msg>Add basic unit testing functionality<commit_after>
|
def run_tests(name, globals_dict):
import sys
RED = '\033[91m'
BOLD = '\033[1m'
END = '\033[0m'
print (BOLD + 'Running tests for ' + name + ' ...' + END)
tests = [globals_dict.get(fn) for fn in globals_dict if fn.startswith('test_')]
for each in tests:
if callable(each):
sys.stdout.write('\n-> ' + each.__name__[5:] + ' ... ')
result = each()
if result:
sys.stdout.write('OK')
else:
sys.stdout.write(RED + 'FAIL' + END)
|
Add basic unit testing functionality
def run_tests(name, globals_dict):
import sys
RED = '\033[91m'
BOLD = '\033[1m'
END = '\033[0m'
print (BOLD + 'Running tests for ' + name + ' ...' + END)
tests = [globals_dict.get(fn) for fn in globals_dict if fn.startswith('test_')]
for each in tests:
if callable(each):
sys.stdout.write('\n-> ' + each.__name__[5:] + ' ... ')
result = each()
if result:
sys.stdout.write('OK')
else:
sys.stdout.write(RED + 'FAIL' + END)
|
<commit_before><commit_msg>Add basic unit testing functionality<commit_after>
def run_tests(name, globals_dict):
import sys
RED = '\033[91m'
BOLD = '\033[1m'
END = '\033[0m'
print (BOLD + 'Running tests for ' + name + ' ...' + END)
tests = [globals_dict.get(fn) for fn in globals_dict if fn.startswith('test_')]
for each in tests:
if callable(each):
sys.stdout.write('\n-> ' + each.__name__[5:] + ' ... ')
result = each()
if result:
sys.stdout.write('OK')
else:
sys.stdout.write(RED + 'FAIL' + END)
|
|
7837709de7de97ecad91fb0eed6abe0d1c3be9d1
|
rst2pdf/tests/input/test_180.py
|
rst2pdf/tests/input/test_180.py
|
# -*- coding: utf-8 -*-
from reportlab.platypus import SimpleDocTemplate
from reportlab.platypus.paragraph import Paragraph
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.colors import Color
from reportlab.platypus.flowables import _listWrapOn, _FUZZ
from wordaxe.rl.NewParagraph import Paragraph
from wordaxe.rl.styles import ParagraphStyle, getSampleStyleSheet
def go():
styles = getSampleStyleSheet()
style=styles['Normal']
p1 = Paragraph('This is a paragraph', style )
print p1.wrap(500,701)
print p1._cache['avail']
print len(p1.split(500,701))
print len(p1.split(500,700))
go()
|
Test case for wordaxe bug
|
Test case for wordaxe bug
|
Python
|
mit
|
liuyi1112/rst2pdf,rst2pdf/rst2pdf,rst2pdf/rst2pdf,pombreda/rst2pdf,liuyi1112/rst2pdf,pombreda/rst2pdf
|
Test case for wordaxe bug
|
# -*- coding: utf-8 -*-
from reportlab.platypus import SimpleDocTemplate
from reportlab.platypus.paragraph import Paragraph
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.colors import Color
from reportlab.platypus.flowables import _listWrapOn, _FUZZ
from wordaxe.rl.NewParagraph import Paragraph
from wordaxe.rl.styles import ParagraphStyle, getSampleStyleSheet
def go():
styles = getSampleStyleSheet()
style=styles['Normal']
p1 = Paragraph('This is a paragraph', style )
print p1.wrap(500,701)
print p1._cache['avail']
print len(p1.split(500,701))
print len(p1.split(500,700))
go()
|
<commit_before><commit_msg>Test case for wordaxe bug<commit_after>
|
# -*- coding: utf-8 -*-
from reportlab.platypus import SimpleDocTemplate
from reportlab.platypus.paragraph import Paragraph
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.colors import Color
from reportlab.platypus.flowables import _listWrapOn, _FUZZ
from wordaxe.rl.NewParagraph import Paragraph
from wordaxe.rl.styles import ParagraphStyle, getSampleStyleSheet
def go():
styles = getSampleStyleSheet()
style=styles['Normal']
p1 = Paragraph('This is a paragraph', style )
print p1.wrap(500,701)
print p1._cache['avail']
print len(p1.split(500,701))
print len(p1.split(500,700))
go()
|
Test case for wordaxe bug# -*- coding: utf-8 -*-
from reportlab.platypus import SimpleDocTemplate
from reportlab.platypus.paragraph import Paragraph
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.colors import Color
from reportlab.platypus.flowables import _listWrapOn, _FUZZ
from wordaxe.rl.NewParagraph import Paragraph
from wordaxe.rl.styles import ParagraphStyle, getSampleStyleSheet
def go():
styles = getSampleStyleSheet()
style=styles['Normal']
p1 = Paragraph('This is a paragraph', style )
print p1.wrap(500,701)
print p1._cache['avail']
print len(p1.split(500,701))
print len(p1.split(500,700))
go()
|
<commit_before><commit_msg>Test case for wordaxe bug<commit_after># -*- coding: utf-8 -*-
from reportlab.platypus import SimpleDocTemplate
from reportlab.platypus.paragraph import Paragraph
from reportlab.lib.styles import getSampleStyleSheet, ParagraphStyle
from reportlab.lib.colors import Color
from reportlab.platypus.flowables import _listWrapOn, _FUZZ
from wordaxe.rl.NewParagraph import Paragraph
from wordaxe.rl.styles import ParagraphStyle, getSampleStyleSheet
def go():
styles = getSampleStyleSheet()
style=styles['Normal']
p1 = Paragraph('This is a paragraph', style )
print p1.wrap(500,701)
print p1._cache['avail']
print len(p1.split(500,701))
print len(p1.split(500,700))
go()
|
|
cfe8a492c55779d0b4d998b6e3d6104c64794954
|
prob3.py
|
prob3.py
|
# projecteuler.net/problem=3
def LargestPrimeFactor():
res = FindLargestPrimeNumber(600851475143)
print(res)
def FindLargestPrimeNumber(n):
i = n
while i > 1:
i = i - 1
if n % i == 0:
j = i
prime = True
while j > 2:
j = j - 1
if i % j == 0:
prime = False
break
if prime:
return i
if __name__ == "__main__":
LargestPrimeFactor()
|
Add euler project 3 problem
|
Add euler project 3 problem
|
Python
|
apache-2.0
|
yuriyshapovalov/Prototypes,yuriyshapovalov/Prototypes,yuriyshapovalov/Prototypes
|
Add euler project 3 problem
|
# projecteuler.net/problem=3
def LargestPrimeFactor():
res = FindLargestPrimeNumber(600851475143)
print(res)
def FindLargestPrimeNumber(n):
i = n
while i > 1:
i = i - 1
if n % i == 0:
j = i
prime = True
while j > 2:
j = j - 1
if i % j == 0:
prime = False
break
if prime:
return i
if __name__ == "__main__":
LargestPrimeFactor()
|
<commit_before><commit_msg>Add euler project 3 problem<commit_after>
|
# projecteuler.net/problem=3
def LargestPrimeFactor():
res = FindLargestPrimeNumber(600851475143)
print(res)
def FindLargestPrimeNumber(n):
i = n
while i > 1:
i = i - 1
if n % i == 0:
j = i
prime = True
while j > 2:
j = j - 1
if i % j == 0:
prime = False
break
if prime:
return i
if __name__ == "__main__":
LargestPrimeFactor()
|
Add euler project 3 problem# projecteuler.net/problem=3
def LargestPrimeFactor():
res = FindLargestPrimeNumber(600851475143)
print(res)
def FindLargestPrimeNumber(n):
i = n
while i > 1:
i = i - 1
if n % i == 0:
j = i
prime = True
while j > 2:
j = j - 1
if i % j == 0:
prime = False
break
if prime:
return i
if __name__ == "__main__":
LargestPrimeFactor()
|
<commit_before><commit_msg>Add euler project 3 problem<commit_after># projecteuler.net/problem=3
def LargestPrimeFactor():
res = FindLargestPrimeNumber(600851475143)
print(res)
def FindLargestPrimeNumber(n):
i = n
while i > 1:
i = i - 1
if n % i == 0:
j = i
prime = True
while j > 2:
j = j - 1
if i % j == 0:
prime = False
break
if prime:
return i
if __name__ == "__main__":
LargestPrimeFactor()
|
|
13f5eea9472f19c19c082cbb4a0be3509b0d083f
|
astropy/utils/tests/test_parsing.py
|
astropy/utils/tests/test_parsing.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import importlib
import sys
from textwrap import dedent
import pytest
from astropy.utils.parsing import lex, yacc, TAB_HEADER
def _docstring_canary():
"""Docstring that's here just to check for -OO."""
@pytest.mark.skipif(not _docstring_canary.__doc__, reason="Test cannot be run with -OO")
def test_generate_parser(tmp_path, monkeypatch):
# Write Python code into the temporary directory, so that the
# generated tables will also go into the temporary directory.
lexer_file = tmp_path / 'test_parsing_lexer.py'
lexer_file.write_text(dedent(r"""
from astropy.utils.parsing import lex
def make_lexer():
tokens = ('NUMBER', 'PLUS')
t_PLUS = r'\+'
def t_NUMBER(t):
r'\d+'
t.value = int(t.value)
return t
return lex('test_parsing_lextab', 'test_parsing_lexer')
"""))
parser_file = tmp_path / 'test_parsing_parser.py'
parser_file.write_text(dedent(r"""
from astropy.utils.parsing import yacc
def make_parser():
tokens = ('NUMBER', 'PLUS')
def p_expression_number(p):
'expression : NUMBER'
p[0] = p[1]
def p_expression_plus(p):
'expression : expression PLUS NUMBER'
p[0] = p[1] + p[3]
return yacc('test_parsing_parsetab', 'test_parsing_parser')
"""))
monkeypatch.syspath_prepend(tmp_path)
lexer_mod = importlib.import_module('test_parsing_lexer')
lexer = lexer_mod.make_lexer()
parser_mod = importlib.import_module('test_parsing_parser')
parser = parser_mod.make_parser()
result = parser.parse('1+2+3', lexer=lexer)
assert result == 6
lextab = (tmp_path / 'test_parsing_lextab.py').read_text()
assert lextab.startswith(TAB_HEADER.format(package='test_parsing_lexer'))
parsetab = (tmp_path / 'test_parsing_parsetab.py').read_text()
assert parsetab.startswith(TAB_HEADER.format(package='test_parsing_parser'))
|
Add a unit test for utils.parsing
|
Add a unit test for utils.parsing
Most of the coverage comes from the main test suite, but this covers the
case where the lextab/parsetab does not yet exist and has to be
generated.
|
Python
|
bsd-3-clause
|
astropy/astropy,aleksandr-bakanov/astropy,saimn/astropy,mhvk/astropy,dhomeier/astropy,dhomeier/astropy,astropy/astropy,StuartLittlefair/astropy,aleksandr-bakanov/astropy,larrybradley/astropy,saimn/astropy,mhvk/astropy,saimn/astropy,StuartLittlefair/astropy,dhomeier/astropy,dhomeier/astropy,aleksandr-bakanov/astropy,aleksandr-bakanov/astropy,lpsinger/astropy,astropy/astropy,astropy/astropy,pllim/astropy,larrybradley/astropy,lpsinger/astropy,lpsinger/astropy,saimn/astropy,pllim/astropy,lpsinger/astropy,mhvk/astropy,StuartLittlefair/astropy,astropy/astropy,pllim/astropy,pllim/astropy,larrybradley/astropy,lpsinger/astropy,mhvk/astropy,saimn/astropy,StuartLittlefair/astropy,pllim/astropy,dhomeier/astropy,larrybradley/astropy,mhvk/astropy,StuartLittlefair/astropy,larrybradley/astropy
|
Add a unit test for utils.parsing
Most of the coverage comes from the main test suite, but this covers the
case where the lextab/parsetab does not yet exist and has to be
generated.
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import importlib
import sys
from textwrap import dedent
import pytest
from astropy.utils.parsing import lex, yacc, TAB_HEADER
def _docstring_canary():
"""Docstring that's here just to check for -OO."""
@pytest.mark.skipif(not _docstring_canary.__doc__, reason="Test cannot be run with -OO")
def test_generate_parser(tmp_path, monkeypatch):
# Write Python code into the temporary directory, so that the
# generated tables will also go into the temporary directory.
lexer_file = tmp_path / 'test_parsing_lexer.py'
lexer_file.write_text(dedent(r"""
from astropy.utils.parsing import lex
def make_lexer():
tokens = ('NUMBER', 'PLUS')
t_PLUS = r'\+'
def t_NUMBER(t):
r'\d+'
t.value = int(t.value)
return t
return lex('test_parsing_lextab', 'test_parsing_lexer')
"""))
parser_file = tmp_path / 'test_parsing_parser.py'
parser_file.write_text(dedent(r"""
from astropy.utils.parsing import yacc
def make_parser():
tokens = ('NUMBER', 'PLUS')
def p_expression_number(p):
'expression : NUMBER'
p[0] = p[1]
def p_expression_plus(p):
'expression : expression PLUS NUMBER'
p[0] = p[1] + p[3]
return yacc('test_parsing_parsetab', 'test_parsing_parser')
"""))
monkeypatch.syspath_prepend(tmp_path)
lexer_mod = importlib.import_module('test_parsing_lexer')
lexer = lexer_mod.make_lexer()
parser_mod = importlib.import_module('test_parsing_parser')
parser = parser_mod.make_parser()
result = parser.parse('1+2+3', lexer=lexer)
assert result == 6
lextab = (tmp_path / 'test_parsing_lextab.py').read_text()
assert lextab.startswith(TAB_HEADER.format(package='test_parsing_lexer'))
parsetab = (tmp_path / 'test_parsing_parsetab.py').read_text()
assert parsetab.startswith(TAB_HEADER.format(package='test_parsing_parser'))
|
<commit_before><commit_msg>Add a unit test for utils.parsing
Most of the coverage comes from the main test suite, but this covers the
case where the lextab/parsetab does not yet exist and has to be
generated.<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import importlib
import sys
from textwrap import dedent
import pytest
from astropy.utils.parsing import lex, yacc, TAB_HEADER
def _docstring_canary():
"""Docstring that's here just to check for -OO."""
@pytest.mark.skipif(not _docstring_canary.__doc__, reason="Test cannot be run with -OO")
def test_generate_parser(tmp_path, monkeypatch):
# Write Python code into the temporary directory, so that the
# generated tables will also go into the temporary directory.
lexer_file = tmp_path / 'test_parsing_lexer.py'
lexer_file.write_text(dedent(r"""
from astropy.utils.parsing import lex
def make_lexer():
tokens = ('NUMBER', 'PLUS')
t_PLUS = r'\+'
def t_NUMBER(t):
r'\d+'
t.value = int(t.value)
return t
return lex('test_parsing_lextab', 'test_parsing_lexer')
"""))
parser_file = tmp_path / 'test_parsing_parser.py'
parser_file.write_text(dedent(r"""
from astropy.utils.parsing import yacc
def make_parser():
tokens = ('NUMBER', 'PLUS')
def p_expression_number(p):
'expression : NUMBER'
p[0] = p[1]
def p_expression_plus(p):
'expression : expression PLUS NUMBER'
p[0] = p[1] + p[3]
return yacc('test_parsing_parsetab', 'test_parsing_parser')
"""))
monkeypatch.syspath_prepend(tmp_path)
lexer_mod = importlib.import_module('test_parsing_lexer')
lexer = lexer_mod.make_lexer()
parser_mod = importlib.import_module('test_parsing_parser')
parser = parser_mod.make_parser()
result = parser.parse('1+2+3', lexer=lexer)
assert result == 6
lextab = (tmp_path / 'test_parsing_lextab.py').read_text()
assert lextab.startswith(TAB_HEADER.format(package='test_parsing_lexer'))
parsetab = (tmp_path / 'test_parsing_parsetab.py').read_text()
assert parsetab.startswith(TAB_HEADER.format(package='test_parsing_parser'))
|
Add a unit test for utils.parsing
Most of the coverage comes from the main test suite, but this covers the
case where the lextab/parsetab does not yet exist and has to be
generated.# Licensed under a 3-clause BSD style license - see LICENSE.rst
import importlib
import sys
from textwrap import dedent
import pytest
from astropy.utils.parsing import lex, yacc, TAB_HEADER
def _docstring_canary():
"""Docstring that's here just to check for -OO."""
@pytest.mark.skipif(not _docstring_canary.__doc__, reason="Test cannot be run with -OO")
def test_generate_parser(tmp_path, monkeypatch):
# Write Python code into the temporary directory, so that the
# generated tables will also go into the temporary directory.
lexer_file = tmp_path / 'test_parsing_lexer.py'
lexer_file.write_text(dedent(r"""
from astropy.utils.parsing import lex
def make_lexer():
tokens = ('NUMBER', 'PLUS')
t_PLUS = r'\+'
def t_NUMBER(t):
r'\d+'
t.value = int(t.value)
return t
return lex('test_parsing_lextab', 'test_parsing_lexer')
"""))
parser_file = tmp_path / 'test_parsing_parser.py'
parser_file.write_text(dedent(r"""
from astropy.utils.parsing import yacc
def make_parser():
tokens = ('NUMBER', 'PLUS')
def p_expression_number(p):
'expression : NUMBER'
p[0] = p[1]
def p_expression_plus(p):
'expression : expression PLUS NUMBER'
p[0] = p[1] + p[3]
return yacc('test_parsing_parsetab', 'test_parsing_parser')
"""))
monkeypatch.syspath_prepend(tmp_path)
lexer_mod = importlib.import_module('test_parsing_lexer')
lexer = lexer_mod.make_lexer()
parser_mod = importlib.import_module('test_parsing_parser')
parser = parser_mod.make_parser()
result = parser.parse('1+2+3', lexer=lexer)
assert result == 6
lextab = (tmp_path / 'test_parsing_lextab.py').read_text()
assert lextab.startswith(TAB_HEADER.format(package='test_parsing_lexer'))
parsetab = (tmp_path / 'test_parsing_parsetab.py').read_text()
assert parsetab.startswith(TAB_HEADER.format(package='test_parsing_parser'))
|
<commit_before><commit_msg>Add a unit test for utils.parsing
Most of the coverage comes from the main test suite, but this covers the
case where the lextab/parsetab does not yet exist and has to be
generated.<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
import importlib
import sys
from textwrap import dedent
import pytest
from astropy.utils.parsing import lex, yacc, TAB_HEADER
def _docstring_canary():
"""Docstring that's here just to check for -OO."""
@pytest.mark.skipif(not _docstring_canary.__doc__, reason="Test cannot be run with -OO")
def test_generate_parser(tmp_path, monkeypatch):
# Write Python code into the temporary directory, so that the
# generated tables will also go into the temporary directory.
lexer_file = tmp_path / 'test_parsing_lexer.py'
lexer_file.write_text(dedent(r"""
from astropy.utils.parsing import lex
def make_lexer():
tokens = ('NUMBER', 'PLUS')
t_PLUS = r'\+'
def t_NUMBER(t):
r'\d+'
t.value = int(t.value)
return t
return lex('test_parsing_lextab', 'test_parsing_lexer')
"""))
parser_file = tmp_path / 'test_parsing_parser.py'
parser_file.write_text(dedent(r"""
from astropy.utils.parsing import yacc
def make_parser():
tokens = ('NUMBER', 'PLUS')
def p_expression_number(p):
'expression : NUMBER'
p[0] = p[1]
def p_expression_plus(p):
'expression : expression PLUS NUMBER'
p[0] = p[1] + p[3]
return yacc('test_parsing_parsetab', 'test_parsing_parser')
"""))
monkeypatch.syspath_prepend(tmp_path)
lexer_mod = importlib.import_module('test_parsing_lexer')
lexer = lexer_mod.make_lexer()
parser_mod = importlib.import_module('test_parsing_parser')
parser = parser_mod.make_parser()
result = parser.parse('1+2+3', lexer=lexer)
assert result == 6
lextab = (tmp_path / 'test_parsing_lextab.py').read_text()
assert lextab.startswith(TAB_HEADER.format(package='test_parsing_lexer'))
parsetab = (tmp_path / 'test_parsing_parsetab.py').read_text()
assert parsetab.startswith(TAB_HEADER.format(package='test_parsing_parser'))
|
|
4138cc2833a267150dff994b65af59e85a038da4
|
migrations/versions/900_add_brief_is_a_copy.py
|
migrations/versions/900_add_brief_is_a_copy.py
|
"""Add Brief.is_a_copy boolean, default False, nullable False
Revision ID: 890
Revises: 880
Create Date: 2017-06-01 11:24:53.346954
"""
# revision identifiers, used by Alembic.
revision = '900'
down_revision = '890'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('briefs', sa.Column('is_a_copy', sa.Boolean(), server_default=sa.text(u'false'), nullable=False))
def downgrade():
op.drop_column('briefs', 'is_a_copy')
|
Add Briefs.is_a_copy to replace Briefs.copied_from_brief_id
|
Add Briefs.is_a_copy to replace Briefs.copied_from_brief_id
|
Python
|
mit
|
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
|
Add Briefs.is_a_copy to replace Briefs.copied_from_brief_id
|
"""Add Brief.is_a_copy boolean, default False, nullable False
Revision ID: 890
Revises: 880
Create Date: 2017-06-01 11:24:53.346954
"""
# revision identifiers, used by Alembic.
revision = '900'
down_revision = '890'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('briefs', sa.Column('is_a_copy', sa.Boolean(), server_default=sa.text(u'false'), nullable=False))
def downgrade():
op.drop_column('briefs', 'is_a_copy')
|
<commit_before><commit_msg>Add Briefs.is_a_copy to replace Briefs.copied_from_brief_id<commit_after>
|
"""Add Brief.is_a_copy boolean, default False, nullable False
Revision ID: 890
Revises: 880
Create Date: 2017-06-01 11:24:53.346954
"""
# revision identifiers, used by Alembic.
revision = '900'
down_revision = '890'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('briefs', sa.Column('is_a_copy', sa.Boolean(), server_default=sa.text(u'false'), nullable=False))
def downgrade():
op.drop_column('briefs', 'is_a_copy')
|
Add Briefs.is_a_copy to replace Briefs.copied_from_brief_id"""Add Brief.is_a_copy boolean, default False, nullable False
Revision ID: 890
Revises: 880
Create Date: 2017-06-01 11:24:53.346954
"""
# revision identifiers, used by Alembic.
revision = '900'
down_revision = '890'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('briefs', sa.Column('is_a_copy', sa.Boolean(), server_default=sa.text(u'false'), nullable=False))
def downgrade():
op.drop_column('briefs', 'is_a_copy')
|
<commit_before><commit_msg>Add Briefs.is_a_copy to replace Briefs.copied_from_brief_id<commit_after>"""Add Brief.is_a_copy boolean, default False, nullable False
Revision ID: 890
Revises: 880
Create Date: 2017-06-01 11:24:53.346954
"""
# revision identifiers, used by Alembic.
revision = '900'
down_revision = '890'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.add_column('briefs', sa.Column('is_a_copy', sa.Boolean(), server_default=sa.text(u'false'), nullable=False))
def downgrade():
op.drop_column('briefs', 'is_a_copy')
|
|
81d76fc7394a92e8495afe2aa616e55b000a2ddd
|
setup.py
|
setup.py
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2',
packages=['todoist'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.1',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
Fix problem with missing files from the PyPI package.
|
Fix problem with missing files from the PyPI package.
|
Python
|
mit
|
electronick1/todoist-python,Doist/todoist-python
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2',
packages=['todoist'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Fix problem with missing files from the PyPI package.
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.1',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
<commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2',
packages=['todoist'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Fix problem with missing files from the PyPI package.<commit_after>
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.1',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2',
packages=['todoist'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
Fix problem with missing files from the PyPI package.# -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.1',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
<commit_before># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2',
packages=['todoist'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
<commit_msg>Fix problem with missing files from the PyPI package.<commit_after># -*- coding: utf-8 -*-
import os
from setuptools import setup
def read(fname):
try:
return open(os.path.join(os.path.dirname(__file__), fname)).read()
except:
return ''
setup(
name='todoist-python',
version='0.2.1',
packages=['todoist', 'todoist.managers'],
author='Doist Team',
author_email='info@todoist.com',
license='BSD',
description='todoist-python - The official Todoist Python API library',
long_description = read('README.md'),
install_requires=[
'requests',
],
# see here for complete list of classifiers
# http://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=(
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
),
)
|
52c60eae9b750bcc85739b355d7b70eb238a0309
|
tasks.py
|
tasks.py
|
#!/usr/bin/env python3
import subprocess
from invoke import task
@task
def test():
# invoke.run() does not color output, unfortunately
nosetests = subprocess.Popen(['nosetests-3.4', '--rednose'])
nosetests.wait()
@task
def cover():
nosetests = subprocess.Popen(['nosetests-3.4', '--with-coverage',
'--cover-erase', '--cover-html'])
nosetests.wait()
|
Add task file using invoke task runner
|
Add task file using invoke task runner
|
Python
|
mit
|
caleb531/ssh-wp-backup,caleb531/ssh-wp-backup
|
Add task file using invoke task runner
|
#!/usr/bin/env python3
import subprocess
from invoke import task
@task
def test():
# invoke.run() does not color output, unfortunately
nosetests = subprocess.Popen(['nosetests-3.4', '--rednose'])
nosetests.wait()
@task
def cover():
nosetests = subprocess.Popen(['nosetests-3.4', '--with-coverage',
'--cover-erase', '--cover-html'])
nosetests.wait()
|
<commit_before><commit_msg>Add task file using invoke task runner<commit_after>
|
#!/usr/bin/env python3
import subprocess
from invoke import task
@task
def test():
# invoke.run() does not color output, unfortunately
nosetests = subprocess.Popen(['nosetests-3.4', '--rednose'])
nosetests.wait()
@task
def cover():
nosetests = subprocess.Popen(['nosetests-3.4', '--with-coverage',
'--cover-erase', '--cover-html'])
nosetests.wait()
|
Add task file using invoke task runner#!/usr/bin/env python3
import subprocess
from invoke import task
@task
def test():
# invoke.run() does not color output, unfortunately
nosetests = subprocess.Popen(['nosetests-3.4', '--rednose'])
nosetests.wait()
@task
def cover():
nosetests = subprocess.Popen(['nosetests-3.4', '--with-coverage',
'--cover-erase', '--cover-html'])
nosetests.wait()
|
<commit_before><commit_msg>Add task file using invoke task runner<commit_after>#!/usr/bin/env python3
import subprocess
from invoke import task
@task
def test():
# invoke.run() does not color output, unfortunately
nosetests = subprocess.Popen(['nosetests-3.4', '--rednose'])
nosetests.wait()
@task
def cover():
nosetests = subprocess.Popen(['nosetests-3.4', '--with-coverage',
'--cover-erase', '--cover-html'])
nosetests.wait()
|
|
d672a413b613e1f6772ca74d07cb680b3a55bdfa
|
iss.py
|
iss.py
|
import requests
from datetime import datetime
def get_next_pass(lat, lon):
iss_url = 'http://api.open-notify.org/iss-pass.json'
location = {'lat': lat, 'lon': lon}
response = requests.get(iss_url, params=location).json()
next_pass = response['response'][0]['risetime']
return datetime.fromtimestamp(next_pass)
|
Implement function for retrieving ISS timestamps
|
Implement function for retrieving ISS timestamps
|
Python
|
mit
|
sagnew/ISSNotifications,sagnew/ISSNotifications,sagnew/ISSNotifications
|
Implement function for retrieving ISS timestamps
|
import requests
from datetime import datetime
def get_next_pass(lat, lon):
iss_url = 'http://api.open-notify.org/iss-pass.json'
location = {'lat': lat, 'lon': lon}
response = requests.get(iss_url, params=location).json()
next_pass = response['response'][0]['risetime']
return datetime.fromtimestamp(next_pass)
|
<commit_before><commit_msg>Implement function for retrieving ISS timestamps<commit_after>
|
import requests
from datetime import datetime
def get_next_pass(lat, lon):
iss_url = 'http://api.open-notify.org/iss-pass.json'
location = {'lat': lat, 'lon': lon}
response = requests.get(iss_url, params=location).json()
next_pass = response['response'][0]['risetime']
return datetime.fromtimestamp(next_pass)
|
Implement function for retrieving ISS timestampsimport requests
from datetime import datetime
def get_next_pass(lat, lon):
iss_url = 'http://api.open-notify.org/iss-pass.json'
location = {'lat': lat, 'lon': lon}
response = requests.get(iss_url, params=location).json()
next_pass = response['response'][0]['risetime']
return datetime.fromtimestamp(next_pass)
|
<commit_before><commit_msg>Implement function for retrieving ISS timestamps<commit_after>import requests
from datetime import datetime
def get_next_pass(lat, lon):
iss_url = 'http://api.open-notify.org/iss-pass.json'
location = {'lat': lat, 'lon': lon}
response = requests.get(iss_url, params=location).json()
next_pass = response['response'][0]['risetime']
return datetime.fromtimestamp(next_pass)
|
|
513cc91530657173de6c5dd35a10a1b496d31042
|
mpy.py
|
mpy.py
|
with open('generic.txt') as old:
for line in old:
line.strip()
mlength = len(line)-1
with open('generic-'+str(mlength)+'.txt', 'a') as new:
new.write(line)
|
Add script to process domain name
|
Add script to process domain name
|
Python
|
mit
|
luongnv89/uri_proc,luongnv89/uri_proc
|
Add script to process domain name
|
with open('generic.txt') as old:
for line in old:
line.strip()
mlength = len(line)-1
with open('generic-'+str(mlength)+'.txt', 'a') as new:
new.write(line)
|
<commit_before><commit_msg>Add script to process domain name<commit_after>
|
with open('generic.txt') as old:
for line in old:
line.strip()
mlength = len(line)-1
with open('generic-'+str(mlength)+'.txt', 'a') as new:
new.write(line)
|
Add script to process domain namewith open('generic.txt') as old:
for line in old:
line.strip()
mlength = len(line)-1
with open('generic-'+str(mlength)+'.txt', 'a') as new:
new.write(line)
|
<commit_before><commit_msg>Add script to process domain name<commit_after>with open('generic.txt') as old:
for line in old:
line.strip()
mlength = len(line)-1
with open('generic-'+str(mlength)+'.txt', 'a') as new:
new.write(line)
|
|
9e01cc5c4a0e148842c2e7fc5726dc7ee4325472
|
utils.py
|
utils.py
|
""" Standard shared utilities. """
import time
class PhasedLoopLimitter:
""" Constrains a loop to running a particular number of iterations per second.
"""
def __init__(self, rate):
""" Args:
rate: How much time should elapse between each cycle. """
self.__ticks = rate
self.__last_run_time = 0
def set_rate(self, rate):
""" Sets the target rate for this loop.
Args:
rate: How much time should elapse between each cycle. """
self.__ticks = rate
def limit(self):
""" Should be called every iteration of a loop. It checks that the proper
amount of time has passed and delays execution until it has.
Args:
rate: The target number of iterations per second. """
new_time = self.__do_limit()
self.__last_run_time = new_time
def __do_limit(self):
""" Only does the waiting, does not update __last_run_time.
Returns:
The time it got when it started. """
new_time = time.time()
elapsed = new_time - self.__last_run_time
sleep_for = max(0, self.__ticks - elapsed)
time.sleep(sleep_for)
return new_time
|
Add phased loop limitter class.
|
Add phased loop limitter class.
This is needed for Aggregator to work.
|
Python
|
mit
|
NepalRobotics/Core
|
Add phased loop limitter class.
This is needed for Aggregator to work.
|
""" Standard shared utilities. """
import time
class PhasedLoopLimitter:
""" Constrains a loop to running a particular number of iterations per second.
"""
def __init__(self, rate):
""" Args:
rate: How much time should elapse between each cycle. """
self.__ticks = rate
self.__last_run_time = 0
def set_rate(self, rate):
""" Sets the target rate for this loop.
Args:
rate: How much time should elapse between each cycle. """
self.__ticks = rate
def limit(self):
""" Should be called every iteration of a loop. It checks that the proper
amount of time has passed and delays execution until it has.
Args:
rate: The target number of iterations per second. """
new_time = self.__do_limit()
self.__last_run_time = new_time
def __do_limit(self):
""" Only does the waiting, does not update __last_run_time.
Returns:
The time it got when it started. """
new_time = time.time()
elapsed = new_time - self.__last_run_time
sleep_for = max(0, self.__ticks - elapsed)
time.sleep(sleep_for)
return new_time
|
<commit_before><commit_msg>Add phased loop limitter class.
This is needed for Aggregator to work.<commit_after>
|
""" Standard shared utilities. """
import time
class PhasedLoopLimitter:
""" Constrains a loop to running a particular number of iterations per second.
"""
def __init__(self, rate):
""" Args:
rate: How much time should elapse between each cycle. """
self.__ticks = rate
self.__last_run_time = 0
def set_rate(self, rate):
""" Sets the target rate for this loop.
Args:
rate: How much time should elapse between each cycle. """
self.__ticks = rate
def limit(self):
""" Should be called every iteration of a loop. It checks that the proper
amount of time has passed and delays execution until it has.
Args:
rate: The target number of iterations per second. """
new_time = self.__do_limit()
self.__last_run_time = new_time
def __do_limit(self):
""" Only does the waiting, does not update __last_run_time.
Returns:
The time it got when it started. """
new_time = time.time()
elapsed = new_time - self.__last_run_time
sleep_for = max(0, self.__ticks - elapsed)
time.sleep(sleep_for)
return new_time
|
Add phased loop limitter class.
This is needed for Aggregator to work.""" Standard shared utilities. """
import time
class PhasedLoopLimitter:
""" Constrains a loop to running a particular number of iterations per second.
"""
def __init__(self, rate):
""" Args:
rate: How much time should elapse between each cycle. """
self.__ticks = rate
self.__last_run_time = 0
def set_rate(self, rate):
""" Sets the target rate for this loop.
Args:
rate: How much time should elapse between each cycle. """
self.__ticks = rate
def limit(self):
""" Should be called every iteration of a loop. It checks that the proper
amount of time has passed and delays execution until it has.
Args:
rate: The target number of iterations per second. """
new_time = self.__do_limit()
self.__last_run_time = new_time
def __do_limit(self):
""" Only does the waiting, does not update __last_run_time.
Returns:
The time it got when it started. """
new_time = time.time()
elapsed = new_time - self.__last_run_time
sleep_for = max(0, self.__ticks - elapsed)
time.sleep(sleep_for)
return new_time
|
<commit_before><commit_msg>Add phased loop limitter class.
This is needed for Aggregator to work.<commit_after>""" Standard shared utilities. """
import time
class PhasedLoopLimitter:
""" Constrains a loop to running a particular number of iterations per second.
"""
def __init__(self, rate):
""" Args:
rate: How much time should elapse between each cycle. """
self.__ticks = rate
self.__last_run_time = 0
def set_rate(self, rate):
""" Sets the target rate for this loop.
Args:
rate: How much time should elapse between each cycle. """
self.__ticks = rate
def limit(self):
""" Should be called every iteration of a loop. It checks that the proper
amount of time has passed and delays execution until it has.
Args:
rate: The target number of iterations per second. """
new_time = self.__do_limit()
self.__last_run_time = new_time
def __do_limit(self):
""" Only does the waiting, does not update __last_run_time.
Returns:
The time it got when it started. """
new_time = time.time()
elapsed = new_time - self.__last_run_time
sleep_for = max(0, self.__ticks - elapsed)
time.sleep(sleep_for)
return new_time
|
|
f9aa5263f70658705e37ec3b1cc20b2a556f1089
|
views.py
|
views.py
|
from flask.ext.restful import Resource, abort
from finder.models import Card
from finder import api
#fields, marshal_with
#gen_fields = {
# 'key': fields.Raw,
# 'salt': fields.Raw,
# 'expiration': fields.Raw
#}
class Greet(Resource):
#@marshal_with(gen_fields)
def get(self, name):
if name in ['test']:
abort(403, message="Must provide a real name.")
return {'msg': 'Hello, %s' % name}
api.add_resource(Greet, '/hello/<string:name>')
|
Add test view for debugging
|
Add test view for debugging
|
Python
|
mit
|
numberoverzero/finder
|
Add test view for debugging
|
from flask.ext.restful import Resource, abort
from finder.models import Card
from finder import api
#fields, marshal_with
#gen_fields = {
# 'key': fields.Raw,
# 'salt': fields.Raw,
# 'expiration': fields.Raw
#}
class Greet(Resource):
#@marshal_with(gen_fields)
def get(self, name):
if name in ['test']:
abort(403, message="Must provide a real name.")
return {'msg': 'Hello, %s' % name}
api.add_resource(Greet, '/hello/<string:name>')
|
<commit_before><commit_msg>Add test view for debugging<commit_after>
|
from flask.ext.restful import Resource, abort
from finder.models import Card
from finder import api
#fields, marshal_with
#gen_fields = {
# 'key': fields.Raw,
# 'salt': fields.Raw,
# 'expiration': fields.Raw
#}
class Greet(Resource):
#@marshal_with(gen_fields)
def get(self, name):
if name in ['test']:
abort(403, message="Must provide a real name.")
return {'msg': 'Hello, %s' % name}
api.add_resource(Greet, '/hello/<string:name>')
|
Add test view for debuggingfrom flask.ext.restful import Resource, abort
from finder.models import Card
from finder import api
#fields, marshal_with
#gen_fields = {
# 'key': fields.Raw,
# 'salt': fields.Raw,
# 'expiration': fields.Raw
#}
class Greet(Resource):
#@marshal_with(gen_fields)
def get(self, name):
if name in ['test']:
abort(403, message="Must provide a real name.")
return {'msg': 'Hello, %s' % name}
api.add_resource(Greet, '/hello/<string:name>')
|
<commit_before><commit_msg>Add test view for debugging<commit_after>from flask.ext.restful import Resource, abort
from finder.models import Card
from finder import api
#fields, marshal_with
#gen_fields = {
# 'key': fields.Raw,
# 'salt': fields.Raw,
# 'expiration': fields.Raw
#}
class Greet(Resource):
#@marshal_with(gen_fields)
def get(self, name):
if name in ['test']:
abort(403, message="Must provide a real name.")
return {'msg': 'Hello, %s' % name}
api.add_resource(Greet, '/hello/<string:name>')
|
|
51566a873372b23b5c05d376d346dab063f87437
|
photutils/utils/tests/test_quantity_helpers.py
|
photutils/utils/tests/test_quantity_helpers.py
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for the _quantity_helpers module.
"""
import astropy.units as u
import numpy as np
from numpy.testing import assert_equal
import pytest
from .._quantity_helpers import process_quantities
@pytest.mark.parametrize('all_units', (False, True))
def test_units(all_units):
if all_units:
unit = u.Jy
else:
unit = 1.0
arrs = (np.ones(3) * unit, np.ones(3) * unit, np.ones(3) * unit)
names = ('a', 'b', 'c')
arrs2, unit2 = process_quantities(arrs, names)
if all_units:
assert unit2 == unit
for (arr, arr2) in zip(arrs, arrs2):
assert_equal(arr.value, arr2)
else:
assert unit2 is None
assert arrs2 == arrs
def test_mixed_units():
arrs = (np.ones(3) * u.Jy, np.ones(3) * u.km)
names = ('a', 'b')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
arrs = (np.ones(3) * u.Jy, np.ones(3))
names = ('a', 'b')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
unit = u.Jy
arrs = (np.ones(3) * unit, np.ones(3), np.ones(3) * unit)
names = ('a', 'b', 'c')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
unit = u.Jy
arrs = (np.ones(3) * unit, np.ones(3), np.ones(3) * u.km)
names = ('a', 'b', 'c')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
|
Add tests for quantity helpers
|
Add tests for quantity helpers
|
Python
|
bsd-3-clause
|
larrybradley/photutils,astropy/photutils
|
Add tests for quantity helpers
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for the _quantity_helpers module.
"""
import astropy.units as u
import numpy as np
from numpy.testing import assert_equal
import pytest
from .._quantity_helpers import process_quantities
@pytest.mark.parametrize('all_units', (False, True))
def test_units(all_units):
if all_units:
unit = u.Jy
else:
unit = 1.0
arrs = (np.ones(3) * unit, np.ones(3) * unit, np.ones(3) * unit)
names = ('a', 'b', 'c')
arrs2, unit2 = process_quantities(arrs, names)
if all_units:
assert unit2 == unit
for (arr, arr2) in zip(arrs, arrs2):
assert_equal(arr.value, arr2)
else:
assert unit2 is None
assert arrs2 == arrs
def test_mixed_units():
arrs = (np.ones(3) * u.Jy, np.ones(3) * u.km)
names = ('a', 'b')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
arrs = (np.ones(3) * u.Jy, np.ones(3))
names = ('a', 'b')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
unit = u.Jy
arrs = (np.ones(3) * unit, np.ones(3), np.ones(3) * unit)
names = ('a', 'b', 'c')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
unit = u.Jy
arrs = (np.ones(3) * unit, np.ones(3), np.ones(3) * u.km)
names = ('a', 'b', 'c')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
|
<commit_before><commit_msg>Add tests for quantity helpers<commit_after>
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for the _quantity_helpers module.
"""
import astropy.units as u
import numpy as np
from numpy.testing import assert_equal
import pytest
from .._quantity_helpers import process_quantities
@pytest.mark.parametrize('all_units', (False, True))
def test_units(all_units):
if all_units:
unit = u.Jy
else:
unit = 1.0
arrs = (np.ones(3) * unit, np.ones(3) * unit, np.ones(3) * unit)
names = ('a', 'b', 'c')
arrs2, unit2 = process_quantities(arrs, names)
if all_units:
assert unit2 == unit
for (arr, arr2) in zip(arrs, arrs2):
assert_equal(arr.value, arr2)
else:
assert unit2 is None
assert arrs2 == arrs
def test_mixed_units():
arrs = (np.ones(3) * u.Jy, np.ones(3) * u.km)
names = ('a', 'b')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
arrs = (np.ones(3) * u.Jy, np.ones(3))
names = ('a', 'b')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
unit = u.Jy
arrs = (np.ones(3) * unit, np.ones(3), np.ones(3) * unit)
names = ('a', 'b', 'c')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
unit = u.Jy
arrs = (np.ones(3) * unit, np.ones(3), np.ones(3) * u.km)
names = ('a', 'b', 'c')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
|
Add tests for quantity helpers# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for the _quantity_helpers module.
"""
import astropy.units as u
import numpy as np
from numpy.testing import assert_equal
import pytest
from .._quantity_helpers import process_quantities
@pytest.mark.parametrize('all_units', (False, True))
def test_units(all_units):
if all_units:
unit = u.Jy
else:
unit = 1.0
arrs = (np.ones(3) * unit, np.ones(3) * unit, np.ones(3) * unit)
names = ('a', 'b', 'c')
arrs2, unit2 = process_quantities(arrs, names)
if all_units:
assert unit2 == unit
for (arr, arr2) in zip(arrs, arrs2):
assert_equal(arr.value, arr2)
else:
assert unit2 is None
assert arrs2 == arrs
def test_mixed_units():
arrs = (np.ones(3) * u.Jy, np.ones(3) * u.km)
names = ('a', 'b')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
arrs = (np.ones(3) * u.Jy, np.ones(3))
names = ('a', 'b')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
unit = u.Jy
arrs = (np.ones(3) * unit, np.ones(3), np.ones(3) * unit)
names = ('a', 'b', 'c')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
unit = u.Jy
arrs = (np.ones(3) * unit, np.ones(3), np.ones(3) * u.km)
names = ('a', 'b', 'c')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
|
<commit_before><commit_msg>Add tests for quantity helpers<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests for the _quantity_helpers module.
"""
import astropy.units as u
import numpy as np
from numpy.testing import assert_equal
import pytest
from .._quantity_helpers import process_quantities
@pytest.mark.parametrize('all_units', (False, True))
def test_units(all_units):
if all_units:
unit = u.Jy
else:
unit = 1.0
arrs = (np.ones(3) * unit, np.ones(3) * unit, np.ones(3) * unit)
names = ('a', 'b', 'c')
arrs2, unit2 = process_quantities(arrs, names)
if all_units:
assert unit2 == unit
for (arr, arr2) in zip(arrs, arrs2):
assert_equal(arr.value, arr2)
else:
assert unit2 is None
assert arrs2 == arrs
def test_mixed_units():
arrs = (np.ones(3) * u.Jy, np.ones(3) * u.km)
names = ('a', 'b')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
arrs = (np.ones(3) * u.Jy, np.ones(3))
names = ('a', 'b')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
unit = u.Jy
arrs = (np.ones(3) * unit, np.ones(3), np.ones(3) * unit)
names = ('a', 'b', 'c')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
unit = u.Jy
arrs = (np.ones(3) * unit, np.ones(3), np.ones(3) * u.km)
names = ('a', 'b', 'c')
with pytest.raises(ValueError):
_, _ = process_quantities(arrs, names)
|
|
6885eb0cb50f2af39075f6bec13e95c648f3a19f
|
py/construct-binary-tree-from-preorder-and-inorder-traversal.py
|
py/construct-binary-tree-from-preorder-and-inorder-traversal.py
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def buildTree(self, preorder, inorder):
"""
:type preorder: List[int]
:type inorder: List[int]
:rtype: TreeNode
"""
def build(pre, pre_start, pre_end, in_pos, in_start, in_end):
if pre_start == pre_end:
return
cur_root = pre[pre_start]
root_pos = in_pos[cur_root]
left_size = root_pos - in_start
cur = TreeNode(cur_root)
cur.left = build(pre, pre_start + 1, pre_start + 1 + left_size, in_pos, in_start, root_pos)
cur.right = build(pre, pre_start + 1 + left_size, pre_end, in_pos, root_pos + 1, in_end)
return cur
in_pos = {v: pos for (pos, v) in enumerate(inorder)}
return build(preorder, 0, len(preorder), in_pos, 0, len(inorder))
|
Add py solution for 105. Construct Binary Tree from Preorder and Inorder Traversal
|
Add py solution for 105. Construct Binary Tree from Preorder and Inorder Traversal
105. Construct Binary Tree from Preorder and Inorder Traversal: https://leetcode.com/problems/construct-binary-tree-from-preorder-and-inorder-traversal/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 105. Construct Binary Tree from Preorder and Inorder Traversal
105. Construct Binary Tree from Preorder and Inorder Traversal: https://leetcode.com/problems/construct-binary-tree-from-preorder-and-inorder-traversal/
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def buildTree(self, preorder, inorder):
"""
:type preorder: List[int]
:type inorder: List[int]
:rtype: TreeNode
"""
def build(pre, pre_start, pre_end, in_pos, in_start, in_end):
if pre_start == pre_end:
return
cur_root = pre[pre_start]
root_pos = in_pos[cur_root]
left_size = root_pos - in_start
cur = TreeNode(cur_root)
cur.left = build(pre, pre_start + 1, pre_start + 1 + left_size, in_pos, in_start, root_pos)
cur.right = build(pre, pre_start + 1 + left_size, pre_end, in_pos, root_pos + 1, in_end)
return cur
in_pos = {v: pos for (pos, v) in enumerate(inorder)}
return build(preorder, 0, len(preorder), in_pos, 0, len(inorder))
|
<commit_before><commit_msg>Add py solution for 105. Construct Binary Tree from Preorder and Inorder Traversal
105. Construct Binary Tree from Preorder and Inorder Traversal: https://leetcode.com/problems/construct-binary-tree-from-preorder-and-inorder-traversal/<commit_after>
|
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def buildTree(self, preorder, inorder):
"""
:type preorder: List[int]
:type inorder: List[int]
:rtype: TreeNode
"""
def build(pre, pre_start, pre_end, in_pos, in_start, in_end):
if pre_start == pre_end:
return
cur_root = pre[pre_start]
root_pos = in_pos[cur_root]
left_size = root_pos - in_start
cur = TreeNode(cur_root)
cur.left = build(pre, pre_start + 1, pre_start + 1 + left_size, in_pos, in_start, root_pos)
cur.right = build(pre, pre_start + 1 + left_size, pre_end, in_pos, root_pos + 1, in_end)
return cur
in_pos = {v: pos for (pos, v) in enumerate(inorder)}
return build(preorder, 0, len(preorder), in_pos, 0, len(inorder))
|
Add py solution for 105. Construct Binary Tree from Preorder and Inorder Traversal
105. Construct Binary Tree from Preorder and Inorder Traversal: https://leetcode.com/problems/construct-binary-tree-from-preorder-and-inorder-traversal/# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def buildTree(self, preorder, inorder):
"""
:type preorder: List[int]
:type inorder: List[int]
:rtype: TreeNode
"""
def build(pre, pre_start, pre_end, in_pos, in_start, in_end):
if pre_start == pre_end:
return
cur_root = pre[pre_start]
root_pos = in_pos[cur_root]
left_size = root_pos - in_start
cur = TreeNode(cur_root)
cur.left = build(pre, pre_start + 1, pre_start + 1 + left_size, in_pos, in_start, root_pos)
cur.right = build(pre, pre_start + 1 + left_size, pre_end, in_pos, root_pos + 1, in_end)
return cur
in_pos = {v: pos for (pos, v) in enumerate(inorder)}
return build(preorder, 0, len(preorder), in_pos, 0, len(inorder))
|
<commit_before><commit_msg>Add py solution for 105. Construct Binary Tree from Preorder and Inorder Traversal
105. Construct Binary Tree from Preorder and Inorder Traversal: https://leetcode.com/problems/construct-binary-tree-from-preorder-and-inorder-traversal/<commit_after># Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def buildTree(self, preorder, inorder):
"""
:type preorder: List[int]
:type inorder: List[int]
:rtype: TreeNode
"""
def build(pre, pre_start, pre_end, in_pos, in_start, in_end):
if pre_start == pre_end:
return
cur_root = pre[pre_start]
root_pos = in_pos[cur_root]
left_size = root_pos - in_start
cur = TreeNode(cur_root)
cur.left = build(pre, pre_start + 1, pre_start + 1 + left_size, in_pos, in_start, root_pos)
cur.right = build(pre, pre_start + 1 + left_size, pre_end, in_pos, root_pos + 1, in_end)
return cur
in_pos = {v: pos for (pos, v) in enumerate(inorder)}
return build(preorder, 0, len(preorder), in_pos, 0, len(inorder))
|
|
927d3666e45f8f1e34584f244de7bf5df42f82e2
|
AESECB.py
|
AESECB.py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys
from Crypto.Cipher import AES
import base64
class ECB:
def __init__(self, key):
self.key = key.encode()
self.ECBCipher = AES.new(key, AES.MODE_ECB)
def encrypt(self, text):
"""
Encrypt a plain text (byte string) using AES in EBC mode.
"""
return self.ECBCipher.encrypt(text)
def decrypt(self, ciphertext):
"""
Decrypt a ciphertext (byte string) which has been encrypted using AES in EBC.
"""
return self.ECBCipher.decrypt(ciphertext)
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage:\t{} [path/]filename".format(sys.argv[0]))
sys.exit()
try:
KEY = "YELLOW SUBMARINE"
cipher = ECB(KEY)
with open(sys.argv[1]) as f:
ciphertext = base64.b64decode(f.read())
print(cipher.decrypt(ciphertext))
except IOError:
print("Couldn't open the file {}".format(sys.argv[1]))
|
Add encryption and decryption using ECB
|
Add encryption and decryption using ECB
|
Python
|
mit
|
lttviet/randomPy
|
Add encryption and decryption using ECB
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys
from Crypto.Cipher import AES
import base64
class ECB:
def __init__(self, key):
self.key = key.encode()
self.ECBCipher = AES.new(key, AES.MODE_ECB)
def encrypt(self, text):
"""
Encrypt a plain text (byte string) using AES in EBC mode.
"""
return self.ECBCipher.encrypt(text)
def decrypt(self, ciphertext):
"""
Decrypt a ciphertext (byte string) which has been encrypted using AES in EBC.
"""
return self.ECBCipher.decrypt(ciphertext)
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage:\t{} [path/]filename".format(sys.argv[0]))
sys.exit()
try:
KEY = "YELLOW SUBMARINE"
cipher = ECB(KEY)
with open(sys.argv[1]) as f:
ciphertext = base64.b64decode(f.read())
print(cipher.decrypt(ciphertext))
except IOError:
print("Couldn't open the file {}".format(sys.argv[1]))
|
<commit_before><commit_msg>Add encryption and decryption using ECB<commit_after>
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys
from Crypto.Cipher import AES
import base64
class ECB:
def __init__(self, key):
self.key = key.encode()
self.ECBCipher = AES.new(key, AES.MODE_ECB)
def encrypt(self, text):
"""
Encrypt a plain text (byte string) using AES in EBC mode.
"""
return self.ECBCipher.encrypt(text)
def decrypt(self, ciphertext):
"""
Decrypt a ciphertext (byte string) which has been encrypted using AES in EBC.
"""
return self.ECBCipher.decrypt(ciphertext)
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage:\t{} [path/]filename".format(sys.argv[0]))
sys.exit()
try:
KEY = "YELLOW SUBMARINE"
cipher = ECB(KEY)
with open(sys.argv[1]) as f:
ciphertext = base64.b64decode(f.read())
print(cipher.decrypt(ciphertext))
except IOError:
print("Couldn't open the file {}".format(sys.argv[1]))
|
Add encryption and decryption using ECB#!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys
from Crypto.Cipher import AES
import base64
class ECB:
def __init__(self, key):
self.key = key.encode()
self.ECBCipher = AES.new(key, AES.MODE_ECB)
def encrypt(self, text):
"""
Encrypt a plain text (byte string) using AES in EBC mode.
"""
return self.ECBCipher.encrypt(text)
def decrypt(self, ciphertext):
"""
Decrypt a ciphertext (byte string) which has been encrypted using AES in EBC.
"""
return self.ECBCipher.decrypt(ciphertext)
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage:\t{} [path/]filename".format(sys.argv[0]))
sys.exit()
try:
KEY = "YELLOW SUBMARINE"
cipher = ECB(KEY)
with open(sys.argv[1]) as f:
ciphertext = base64.b64decode(f.read())
print(cipher.decrypt(ciphertext))
except IOError:
print("Couldn't open the file {}".format(sys.argv[1]))
|
<commit_before><commit_msg>Add encryption and decryption using ECB<commit_after>#!/usr/bin/python3
# -*- coding: utf-8 -*-
import sys
from Crypto.Cipher import AES
import base64
class ECB:
def __init__(self, key):
self.key = key.encode()
self.ECBCipher = AES.new(key, AES.MODE_ECB)
def encrypt(self, text):
"""
Encrypt a plain text (byte string) using AES in EBC mode.
"""
return self.ECBCipher.encrypt(text)
def decrypt(self, ciphertext):
"""
Decrypt a ciphertext (byte string) which has been encrypted using AES in EBC.
"""
return self.ECBCipher.decrypt(ciphertext)
if __name__ == "__main__":
if len(sys.argv) != 2:
print("Usage:\t{} [path/]filename".format(sys.argv[0]))
sys.exit()
try:
KEY = "YELLOW SUBMARINE"
cipher = ECB(KEY)
with open(sys.argv[1]) as f:
ciphertext = base64.b64decode(f.read())
print(cipher.decrypt(ciphertext))
except IOError:
print("Couldn't open the file {}".format(sys.argv[1]))
|
|
4b399ae2e29a37e6707d0745e0c829976223f84d
|
falafel/mappers/tests/test_xfs_info.py
|
falafel/mappers/tests/test_xfs_info.py
|
from falafel.mappers import xfs_info
from falafel.tests import context_wrap
class TestXFSInfo():
def test_example_xfs_info(self):
xfs_info = xfs_info.XFSInfo(context_wrap("""
meta-data=/dev/sda isize=256 agcount=32, agsize=16777184 blks
= sectsz=512 attr=2
data = bsize=4096 blocks=536869888, imaxpct=5
= sunit=32 swidth=128 blks
naming =version 2 bsize=4096
log =internal bsize=4096 blocks=32768, version=2
= sectsz=512 sunit=32 blks, lazy-count=1
realtime =none extsz=524288 blocks=0, rtextents=0
""".strip()))
# Section checks
assert 'meta-data' in xfs_info
assert 'data' in xfs_info
assert 'naming' in xfs_info
assert 'log' in xfs_info
assert 'realtime' in xfs_info
# Section specifier checks
assert xfs_info['meta-data']['specifier'] = '/dev/sda'
assert xfs_info['naming']['specifier'] = 'version'
assert xfs_info['naming']['specifier value'] = '2'
assert xfs_info['log']['specifier'] = 'internal'
assert xfs_info['realtime']['specifier'] = 'none'
# Data checks
assert xfs_info['meta-data']['isize'] = 256
assert xfs_info['meta-data']['agcount'] = 32
assert xfs_info['meta-data']['agsize'] = '16777184 blks'
assert xfs_info['meta-data']['sectsz'] = 512
assert xfs_info['meta-data']['attr'] = 2
assert xfs_info['data']['bsize'] = 4096
assert xfs_info['data']['blocks'] = 536869888
assert xfs_info['data']['imaxpct'] = 5
assert xfs_info['data']['sunit'] = 32
assert xfs_info['data']['swidth'] = '128 blks'
assert xfs_info['naming']['bsize'] = 4096
assert xfs_info['log']['bsize'] = 4096
assert xfs_info['log']['blocks'] = 32768
assert xfs_info['log']['version'] = 2
assert xfs_info['log']['sectsz'] = 512
assert xfs_info['log']['sunit'] = '32 blks'
assert xfs_info['log']['lazy-count'] = 1
assert xfs_info['realtime']['extsz'] = 524288
assert xfs_info['realtime']['blocks'] = 0
assert xfs_info['realtime']['rtextents'] = 0
|
Test of xfs_info mapper with info from 'man xfs_info'
|
Test of xfs_info mapper with info from 'man xfs_info'
|
Python
|
apache-2.0
|
RedHatInsights/insights-core,RedHatInsights/insights-core
|
Test of xfs_info mapper with info from 'man xfs_info'
|
from falafel.mappers import xfs_info
from falafel.tests import context_wrap
class TestXFSInfo():
def test_example_xfs_info(self):
xfs_info = xfs_info.XFSInfo(context_wrap("""
meta-data=/dev/sda isize=256 agcount=32, agsize=16777184 blks
= sectsz=512 attr=2
data = bsize=4096 blocks=536869888, imaxpct=5
= sunit=32 swidth=128 blks
naming =version 2 bsize=4096
log =internal bsize=4096 blocks=32768, version=2
= sectsz=512 sunit=32 blks, lazy-count=1
realtime =none extsz=524288 blocks=0, rtextents=0
""".strip()))
# Section checks
assert 'meta-data' in xfs_info
assert 'data' in xfs_info
assert 'naming' in xfs_info
assert 'log' in xfs_info
assert 'realtime' in xfs_info
# Section specifier checks
assert xfs_info['meta-data']['specifier'] = '/dev/sda'
assert xfs_info['naming']['specifier'] = 'version'
assert xfs_info['naming']['specifier value'] = '2'
assert xfs_info['log']['specifier'] = 'internal'
assert xfs_info['realtime']['specifier'] = 'none'
# Data checks
assert xfs_info['meta-data']['isize'] = 256
assert xfs_info['meta-data']['agcount'] = 32
assert xfs_info['meta-data']['agsize'] = '16777184 blks'
assert xfs_info['meta-data']['sectsz'] = 512
assert xfs_info['meta-data']['attr'] = 2
assert xfs_info['data']['bsize'] = 4096
assert xfs_info['data']['blocks'] = 536869888
assert xfs_info['data']['imaxpct'] = 5
assert xfs_info['data']['sunit'] = 32
assert xfs_info['data']['swidth'] = '128 blks'
assert xfs_info['naming']['bsize'] = 4096
assert xfs_info['log']['bsize'] = 4096
assert xfs_info['log']['blocks'] = 32768
assert xfs_info['log']['version'] = 2
assert xfs_info['log']['sectsz'] = 512
assert xfs_info['log']['sunit'] = '32 blks'
assert xfs_info['log']['lazy-count'] = 1
assert xfs_info['realtime']['extsz'] = 524288
assert xfs_info['realtime']['blocks'] = 0
assert xfs_info['realtime']['rtextents'] = 0
|
<commit_before><commit_msg>Test of xfs_info mapper with info from 'man xfs_info'<commit_after>
|
from falafel.mappers import xfs_info
from falafel.tests import context_wrap
class TestXFSInfo():
def test_example_xfs_info(self):
xfs_info = xfs_info.XFSInfo(context_wrap("""
meta-data=/dev/sda isize=256 agcount=32, agsize=16777184 blks
= sectsz=512 attr=2
data = bsize=4096 blocks=536869888, imaxpct=5
= sunit=32 swidth=128 blks
naming =version 2 bsize=4096
log =internal bsize=4096 blocks=32768, version=2
= sectsz=512 sunit=32 blks, lazy-count=1
realtime =none extsz=524288 blocks=0, rtextents=0
""".strip()))
# Section checks
assert 'meta-data' in xfs_info
assert 'data' in xfs_info
assert 'naming' in xfs_info
assert 'log' in xfs_info
assert 'realtime' in xfs_info
# Section specifier checks
assert xfs_info['meta-data']['specifier'] = '/dev/sda'
assert xfs_info['naming']['specifier'] = 'version'
assert xfs_info['naming']['specifier value'] = '2'
assert xfs_info['log']['specifier'] = 'internal'
assert xfs_info['realtime']['specifier'] = 'none'
# Data checks
assert xfs_info['meta-data']['isize'] = 256
assert xfs_info['meta-data']['agcount'] = 32
assert xfs_info['meta-data']['agsize'] = '16777184 blks'
assert xfs_info['meta-data']['sectsz'] = 512
assert xfs_info['meta-data']['attr'] = 2
assert xfs_info['data']['bsize'] = 4096
assert xfs_info['data']['blocks'] = 536869888
assert xfs_info['data']['imaxpct'] = 5
assert xfs_info['data']['sunit'] = 32
assert xfs_info['data']['swidth'] = '128 blks'
assert xfs_info['naming']['bsize'] = 4096
assert xfs_info['log']['bsize'] = 4096
assert xfs_info['log']['blocks'] = 32768
assert xfs_info['log']['version'] = 2
assert xfs_info['log']['sectsz'] = 512
assert xfs_info['log']['sunit'] = '32 blks'
assert xfs_info['log']['lazy-count'] = 1
assert xfs_info['realtime']['extsz'] = 524288
assert xfs_info['realtime']['blocks'] = 0
assert xfs_info['realtime']['rtextents'] = 0
|
Test of xfs_info mapper with info from 'man xfs_info'from falafel.mappers import xfs_info
from falafel.tests import context_wrap
class TestXFSInfo():
def test_example_xfs_info(self):
xfs_info = xfs_info.XFSInfo(context_wrap("""
meta-data=/dev/sda isize=256 agcount=32, agsize=16777184 blks
= sectsz=512 attr=2
data = bsize=4096 blocks=536869888, imaxpct=5
= sunit=32 swidth=128 blks
naming =version 2 bsize=4096
log =internal bsize=4096 blocks=32768, version=2
= sectsz=512 sunit=32 blks, lazy-count=1
realtime =none extsz=524288 blocks=0, rtextents=0
""".strip()))
# Section checks
assert 'meta-data' in xfs_info
assert 'data' in xfs_info
assert 'naming' in xfs_info
assert 'log' in xfs_info
assert 'realtime' in xfs_info
# Section specifier checks
assert xfs_info['meta-data']['specifier'] = '/dev/sda'
assert xfs_info['naming']['specifier'] = 'version'
assert xfs_info['naming']['specifier value'] = '2'
assert xfs_info['log']['specifier'] = 'internal'
assert xfs_info['realtime']['specifier'] = 'none'
# Data checks
assert xfs_info['meta-data']['isize'] = 256
assert xfs_info['meta-data']['agcount'] = 32
assert xfs_info['meta-data']['agsize'] = '16777184 blks'
assert xfs_info['meta-data']['sectsz'] = 512
assert xfs_info['meta-data']['attr'] = 2
assert xfs_info['data']['bsize'] = 4096
assert xfs_info['data']['blocks'] = 536869888
assert xfs_info['data']['imaxpct'] = 5
assert xfs_info['data']['sunit'] = 32
assert xfs_info['data']['swidth'] = '128 blks'
assert xfs_info['naming']['bsize'] = 4096
assert xfs_info['log']['bsize'] = 4096
assert xfs_info['log']['blocks'] = 32768
assert xfs_info['log']['version'] = 2
assert xfs_info['log']['sectsz'] = 512
assert xfs_info['log']['sunit'] = '32 blks'
assert xfs_info['log']['lazy-count'] = 1
assert xfs_info['realtime']['extsz'] = 524288
assert xfs_info['realtime']['blocks'] = 0
assert xfs_info['realtime']['rtextents'] = 0
|
<commit_before><commit_msg>Test of xfs_info mapper with info from 'man xfs_info'<commit_after>from falafel.mappers import xfs_info
from falafel.tests import context_wrap
class TestXFSInfo():
def test_example_xfs_info(self):
xfs_info = xfs_info.XFSInfo(context_wrap("""
meta-data=/dev/sda isize=256 agcount=32, agsize=16777184 blks
= sectsz=512 attr=2
data = bsize=4096 blocks=536869888, imaxpct=5
= sunit=32 swidth=128 blks
naming =version 2 bsize=4096
log =internal bsize=4096 blocks=32768, version=2
= sectsz=512 sunit=32 blks, lazy-count=1
realtime =none extsz=524288 blocks=0, rtextents=0
""".strip()))
# Section checks
assert 'meta-data' in xfs_info
assert 'data' in xfs_info
assert 'naming' in xfs_info
assert 'log' in xfs_info
assert 'realtime' in xfs_info
# Section specifier checks
assert xfs_info['meta-data']['specifier'] = '/dev/sda'
assert xfs_info['naming']['specifier'] = 'version'
assert xfs_info['naming']['specifier value'] = '2'
assert xfs_info['log']['specifier'] = 'internal'
assert xfs_info['realtime']['specifier'] = 'none'
# Data checks
assert xfs_info['meta-data']['isize'] = 256
assert xfs_info['meta-data']['agcount'] = 32
assert xfs_info['meta-data']['agsize'] = '16777184 blks'
assert xfs_info['meta-data']['sectsz'] = 512
assert xfs_info['meta-data']['attr'] = 2
assert xfs_info['data']['bsize'] = 4096
assert xfs_info['data']['blocks'] = 536869888
assert xfs_info['data']['imaxpct'] = 5
assert xfs_info['data']['sunit'] = 32
assert xfs_info['data']['swidth'] = '128 blks'
assert xfs_info['naming']['bsize'] = 4096
assert xfs_info['log']['bsize'] = 4096
assert xfs_info['log']['blocks'] = 32768
assert xfs_info['log']['version'] = 2
assert xfs_info['log']['sectsz'] = 512
assert xfs_info['log']['sunit'] = '32 blks'
assert xfs_info['log']['lazy-count'] = 1
assert xfs_info['realtime']['extsz'] = 524288
assert xfs_info['realtime']['blocks'] = 0
assert xfs_info['realtime']['rtextents'] = 0
|
|
6d5a715b4d6ee2a471cafb4f31eb9b1716f34884
|
snippets/base/migrations/0025_dailychannelmetrics_dailycountrymetrics_dailysnippetsmetrics.py
|
snippets/base/migrations/0025_dailychannelmetrics_dailycountrymetrics_dailysnippetsmetrics.py
|
# Generated by Django 2.2.6 on 2019-12-04 18:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('base', '0024_auto_20191119_1424'),
]
operations = [
migrations.CreateModel(
name='DailyCountryMetrics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_fetched_on', models.DateTimeField(auto_now_add=True)),
('country', models.CharField(max_length=255)),
('date', models.DateField(editable=False)),
('impressions', models.PositiveIntegerField(default=0, editable=False)),
('clicks', models.PositiveIntegerField(default=0, editable=False)),
('blocks', models.PositiveIntegerField(default=0, editable=False)),
],
options={
'verbose_name_plural': 'Daily Country Metrics',
'unique_together': {('country', 'date')},
},
),
migrations.CreateModel(
name='DailyChannelMetrics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_fetched_on', models.DateTimeField(auto_now_add=True)),
('channel', models.CharField(max_length=255)),
('date', models.DateField(editable=False)),
('impressions', models.PositiveIntegerField(default=0, editable=False)),
('clicks', models.PositiveIntegerField(default=0, editable=False)),
('blocks', models.PositiveIntegerField(default=0, editable=False)),
],
options={
'verbose_name_plural': 'Daily Channel Metrics',
'unique_together': {('channel', 'date')},
},
),
migrations.CreateModel(
name='DailySnippetsMetrics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_fetched_on', models.DateTimeField(auto_now_add=True)),
('date', models.DateField(editable=False)),
('impressions', models.PositiveIntegerField(default=0, editable=False)),
('clicks', models.PositiveIntegerField(default=0, editable=False)),
('blocks', models.PositiveIntegerField(default=0, editable=False)),
('snippet', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='base.ASRSnippet')),
],
options={
'verbose_name_plural': 'Daily Snippets Metrics',
'unique_together': {('snippet', 'date')},
},
),
]
|
Add migrations for Daily{Channel,Country,Snippets}Metrics models
|
Add migrations for Daily{Channel,Country,Snippets}Metrics models
|
Python
|
mpl-2.0
|
glogiotatidis/snippets-service,glogiotatidis/snippets-service,glogiotatidis/snippets-service,glogiotatidis/snippets-service,mozmar/snippets-service,mozmar/snippets-service,mozmar/snippets-service,mozmar/snippets-service
|
Add migrations for Daily{Channel,Country,Snippets}Metrics models
|
# Generated by Django 2.2.6 on 2019-12-04 18:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('base', '0024_auto_20191119_1424'),
]
operations = [
migrations.CreateModel(
name='DailyCountryMetrics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_fetched_on', models.DateTimeField(auto_now_add=True)),
('country', models.CharField(max_length=255)),
('date', models.DateField(editable=False)),
('impressions', models.PositiveIntegerField(default=0, editable=False)),
('clicks', models.PositiveIntegerField(default=0, editable=False)),
('blocks', models.PositiveIntegerField(default=0, editable=False)),
],
options={
'verbose_name_plural': 'Daily Country Metrics',
'unique_together': {('country', 'date')},
},
),
migrations.CreateModel(
name='DailyChannelMetrics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_fetched_on', models.DateTimeField(auto_now_add=True)),
('channel', models.CharField(max_length=255)),
('date', models.DateField(editable=False)),
('impressions', models.PositiveIntegerField(default=0, editable=False)),
('clicks', models.PositiveIntegerField(default=0, editable=False)),
('blocks', models.PositiveIntegerField(default=0, editable=False)),
],
options={
'verbose_name_plural': 'Daily Channel Metrics',
'unique_together': {('channel', 'date')},
},
),
migrations.CreateModel(
name='DailySnippetsMetrics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_fetched_on', models.DateTimeField(auto_now_add=True)),
('date', models.DateField(editable=False)),
('impressions', models.PositiveIntegerField(default=0, editable=False)),
('clicks', models.PositiveIntegerField(default=0, editable=False)),
('blocks', models.PositiveIntegerField(default=0, editable=False)),
('snippet', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='base.ASRSnippet')),
],
options={
'verbose_name_plural': 'Daily Snippets Metrics',
'unique_together': {('snippet', 'date')},
},
),
]
|
<commit_before><commit_msg>Add migrations for Daily{Channel,Country,Snippets}Metrics models<commit_after>
|
# Generated by Django 2.2.6 on 2019-12-04 18:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('base', '0024_auto_20191119_1424'),
]
operations = [
migrations.CreateModel(
name='DailyCountryMetrics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_fetched_on', models.DateTimeField(auto_now_add=True)),
('country', models.CharField(max_length=255)),
('date', models.DateField(editable=False)),
('impressions', models.PositiveIntegerField(default=0, editable=False)),
('clicks', models.PositiveIntegerField(default=0, editable=False)),
('blocks', models.PositiveIntegerField(default=0, editable=False)),
],
options={
'verbose_name_plural': 'Daily Country Metrics',
'unique_together': {('country', 'date')},
},
),
migrations.CreateModel(
name='DailyChannelMetrics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_fetched_on', models.DateTimeField(auto_now_add=True)),
('channel', models.CharField(max_length=255)),
('date', models.DateField(editable=False)),
('impressions', models.PositiveIntegerField(default=0, editable=False)),
('clicks', models.PositiveIntegerField(default=0, editable=False)),
('blocks', models.PositiveIntegerField(default=0, editable=False)),
],
options={
'verbose_name_plural': 'Daily Channel Metrics',
'unique_together': {('channel', 'date')},
},
),
migrations.CreateModel(
name='DailySnippetsMetrics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_fetched_on', models.DateTimeField(auto_now_add=True)),
('date', models.DateField(editable=False)),
('impressions', models.PositiveIntegerField(default=0, editable=False)),
('clicks', models.PositiveIntegerField(default=0, editable=False)),
('blocks', models.PositiveIntegerField(default=0, editable=False)),
('snippet', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='base.ASRSnippet')),
],
options={
'verbose_name_plural': 'Daily Snippets Metrics',
'unique_together': {('snippet', 'date')},
},
),
]
|
Add migrations for Daily{Channel,Country,Snippets}Metrics models# Generated by Django 2.2.6 on 2019-12-04 18:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('base', '0024_auto_20191119_1424'),
]
operations = [
migrations.CreateModel(
name='DailyCountryMetrics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_fetched_on', models.DateTimeField(auto_now_add=True)),
('country', models.CharField(max_length=255)),
('date', models.DateField(editable=False)),
('impressions', models.PositiveIntegerField(default=0, editable=False)),
('clicks', models.PositiveIntegerField(default=0, editable=False)),
('blocks', models.PositiveIntegerField(default=0, editable=False)),
],
options={
'verbose_name_plural': 'Daily Country Metrics',
'unique_together': {('country', 'date')},
},
),
migrations.CreateModel(
name='DailyChannelMetrics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_fetched_on', models.DateTimeField(auto_now_add=True)),
('channel', models.CharField(max_length=255)),
('date', models.DateField(editable=False)),
('impressions', models.PositiveIntegerField(default=0, editable=False)),
('clicks', models.PositiveIntegerField(default=0, editable=False)),
('blocks', models.PositiveIntegerField(default=0, editable=False)),
],
options={
'verbose_name_plural': 'Daily Channel Metrics',
'unique_together': {('channel', 'date')},
},
),
migrations.CreateModel(
name='DailySnippetsMetrics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_fetched_on', models.DateTimeField(auto_now_add=True)),
('date', models.DateField(editable=False)),
('impressions', models.PositiveIntegerField(default=0, editable=False)),
('clicks', models.PositiveIntegerField(default=0, editable=False)),
('blocks', models.PositiveIntegerField(default=0, editable=False)),
('snippet', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='base.ASRSnippet')),
],
options={
'verbose_name_plural': 'Daily Snippets Metrics',
'unique_together': {('snippet', 'date')},
},
),
]
|
<commit_before><commit_msg>Add migrations for Daily{Channel,Country,Snippets}Metrics models<commit_after># Generated by Django 2.2.6 on 2019-12-04 18:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('base', '0024_auto_20191119_1424'),
]
operations = [
migrations.CreateModel(
name='DailyCountryMetrics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_fetched_on', models.DateTimeField(auto_now_add=True)),
('country', models.CharField(max_length=255)),
('date', models.DateField(editable=False)),
('impressions', models.PositiveIntegerField(default=0, editable=False)),
('clicks', models.PositiveIntegerField(default=0, editable=False)),
('blocks', models.PositiveIntegerField(default=0, editable=False)),
],
options={
'verbose_name_plural': 'Daily Country Metrics',
'unique_together': {('country', 'date')},
},
),
migrations.CreateModel(
name='DailyChannelMetrics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_fetched_on', models.DateTimeField(auto_now_add=True)),
('channel', models.CharField(max_length=255)),
('date', models.DateField(editable=False)),
('impressions', models.PositiveIntegerField(default=0, editable=False)),
('clicks', models.PositiveIntegerField(default=0, editable=False)),
('blocks', models.PositiveIntegerField(default=0, editable=False)),
],
options={
'verbose_name_plural': 'Daily Channel Metrics',
'unique_together': {('channel', 'date')},
},
),
migrations.CreateModel(
name='DailySnippetsMetrics',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('data_fetched_on', models.DateTimeField(auto_now_add=True)),
('date', models.DateField(editable=False)),
('impressions', models.PositiveIntegerField(default=0, editable=False)),
('clicks', models.PositiveIntegerField(default=0, editable=False)),
('blocks', models.PositiveIntegerField(default=0, editable=False)),
('snippet', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='base.ASRSnippet')),
],
options={
'verbose_name_plural': 'Daily Snippets Metrics',
'unique_together': {('snippet', 'date')},
},
),
]
|
|
d7c8c43854f30d46f9a6d7367700e4fe5dc0b48d
|
setup.py
|
setup.py
|
#!/usr/bin/env python
from distutils.core import setup
# README = "/".join([os.path.dirname(__file__), "README.md"])
# with open(README) as file:
# long_description = file.read()
setup(
name='graphitesend',
version='0.4.0',
description='A simple interface for sending metrics to Graphite',
author='Danny Lawrence',
author_email='dannyla@linux.com',
url='https://github.com/daniellawrence/graphitesend',
# package_dir={'': ''},
packages=['graphitesend'],
long_description="https://github.com/daniellawrence/graphitesend",
entry_points={
'console_scripts': [
'graphitesend = graphitesend.graphitesend:cli',
],
}
)
|
#!/usr/bin/env python
from distutils.core import setup
# README = "/".join([os.path.dirname(__file__), "README.md"])
# with open(README) as file:
# long_description = file.read()
setup(
name='graphitesend',
version='0.4.0',
description='A simple interface for sending metrics to Graphite',
author='Danny Lawrence',
author_email='dannyla@linux.com',
url='https://github.com/daniellawrence/graphitesend',
# package_dir={'': ''},
packages=['graphitesend'],
long_description="https://github.com/daniellawrence/graphitesend",
entry_points={
'console_scripts': [
'graphitesend = graphitesend.graphitesend:cli',
],
},
extras_require = {
'asynchronous': ['gevent>=1.0.0'],
}
)
|
Add gevent as an extra requirement
|
Add gevent as an extra requirement
|
Python
|
apache-2.0
|
daniellawrence/graphitesend,numberly/graphitesend,PabloLefort/graphitesend
|
#!/usr/bin/env python
from distutils.core import setup
# README = "/".join([os.path.dirname(__file__), "README.md"])
# with open(README) as file:
# long_description = file.read()
setup(
name='graphitesend',
version='0.4.0',
description='A simple interface for sending metrics to Graphite',
author='Danny Lawrence',
author_email='dannyla@linux.com',
url='https://github.com/daniellawrence/graphitesend',
# package_dir={'': ''},
packages=['graphitesend'],
long_description="https://github.com/daniellawrence/graphitesend",
entry_points={
'console_scripts': [
'graphitesend = graphitesend.graphitesend:cli',
],
}
)
Add gevent as an extra requirement
|
#!/usr/bin/env python
from distutils.core import setup
# README = "/".join([os.path.dirname(__file__), "README.md"])
# with open(README) as file:
# long_description = file.read()
setup(
name='graphitesend',
version='0.4.0',
description='A simple interface for sending metrics to Graphite',
author='Danny Lawrence',
author_email='dannyla@linux.com',
url='https://github.com/daniellawrence/graphitesend',
# package_dir={'': ''},
packages=['graphitesend'],
long_description="https://github.com/daniellawrence/graphitesend",
entry_points={
'console_scripts': [
'graphitesend = graphitesend.graphitesend:cli',
],
},
extras_require = {
'asynchronous': ['gevent>=1.0.0'],
}
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
# README = "/".join([os.path.dirname(__file__), "README.md"])
# with open(README) as file:
# long_description = file.read()
setup(
name='graphitesend',
version='0.4.0',
description='A simple interface for sending metrics to Graphite',
author='Danny Lawrence',
author_email='dannyla@linux.com',
url='https://github.com/daniellawrence/graphitesend',
# package_dir={'': ''},
packages=['graphitesend'],
long_description="https://github.com/daniellawrence/graphitesend",
entry_points={
'console_scripts': [
'graphitesend = graphitesend.graphitesend:cli',
],
}
)
<commit_msg>Add gevent as an extra requirement<commit_after>
|
#!/usr/bin/env python
from distutils.core import setup
# README = "/".join([os.path.dirname(__file__), "README.md"])
# with open(README) as file:
# long_description = file.read()
setup(
name='graphitesend',
version='0.4.0',
description='A simple interface for sending metrics to Graphite',
author='Danny Lawrence',
author_email='dannyla@linux.com',
url='https://github.com/daniellawrence/graphitesend',
# package_dir={'': ''},
packages=['graphitesend'],
long_description="https://github.com/daniellawrence/graphitesend",
entry_points={
'console_scripts': [
'graphitesend = graphitesend.graphitesend:cli',
],
},
extras_require = {
'asynchronous': ['gevent>=1.0.0'],
}
)
|
#!/usr/bin/env python
from distutils.core import setup
# README = "/".join([os.path.dirname(__file__), "README.md"])
# with open(README) as file:
# long_description = file.read()
setup(
name='graphitesend',
version='0.4.0',
description='A simple interface for sending metrics to Graphite',
author='Danny Lawrence',
author_email='dannyla@linux.com',
url='https://github.com/daniellawrence/graphitesend',
# package_dir={'': ''},
packages=['graphitesend'],
long_description="https://github.com/daniellawrence/graphitesend",
entry_points={
'console_scripts': [
'graphitesend = graphitesend.graphitesend:cli',
],
}
)
Add gevent as an extra requirement#!/usr/bin/env python
from distutils.core import setup
# README = "/".join([os.path.dirname(__file__), "README.md"])
# with open(README) as file:
# long_description = file.read()
setup(
name='graphitesend',
version='0.4.0',
description='A simple interface for sending metrics to Graphite',
author='Danny Lawrence',
author_email='dannyla@linux.com',
url='https://github.com/daniellawrence/graphitesend',
# package_dir={'': ''},
packages=['graphitesend'],
long_description="https://github.com/daniellawrence/graphitesend",
entry_points={
'console_scripts': [
'graphitesend = graphitesend.graphitesend:cli',
],
},
extras_require = {
'asynchronous': ['gevent>=1.0.0'],
}
)
|
<commit_before>#!/usr/bin/env python
from distutils.core import setup
# README = "/".join([os.path.dirname(__file__), "README.md"])
# with open(README) as file:
# long_description = file.read()
setup(
name='graphitesend',
version='0.4.0',
description='A simple interface for sending metrics to Graphite',
author='Danny Lawrence',
author_email='dannyla@linux.com',
url='https://github.com/daniellawrence/graphitesend',
# package_dir={'': ''},
packages=['graphitesend'],
long_description="https://github.com/daniellawrence/graphitesend",
entry_points={
'console_scripts': [
'graphitesend = graphitesend.graphitesend:cli',
],
}
)
<commit_msg>Add gevent as an extra requirement<commit_after>#!/usr/bin/env python
from distutils.core import setup
# README = "/".join([os.path.dirname(__file__), "README.md"])
# with open(README) as file:
# long_description = file.read()
setup(
name='graphitesend',
version='0.4.0',
description='A simple interface for sending metrics to Graphite',
author='Danny Lawrence',
author_email='dannyla@linux.com',
url='https://github.com/daniellawrence/graphitesend',
# package_dir={'': ''},
packages=['graphitesend'],
long_description="https://github.com/daniellawrence/graphitesend",
entry_points={
'console_scripts': [
'graphitesend = graphitesend.graphitesend:cli',
],
},
extras_require = {
'asynchronous': ['gevent>=1.0.0'],
}
)
|
43bc683d80c620d9752e68a317d51891a00a060a
|
common/djangoapps/split_modulestore_django/migrations/0002_data_migration.py
|
common/djangoapps/split_modulestore_django/migrations/0002_data_migration.py
|
from django.db import migrations, models
from django.db.utils import IntegrityError
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from ..models import SplitModulestoreCourseIndex as SplitModulestoreCourseIndex_Real
def forwards_func(apps, schema_editor):
"""
Copy all course index data from MongoDB to MySQL, unless it's already present in MySQL.
This migration is used as part of an upgrade path from storing course indexes in MongoDB to storing them in MySQL.
On edX.org, we began writing to MySQL+MongoDB before we deployed this migration, so some courses are already in
MySQL. But any courses that haven't been modified recently would only be in MongoDB and need to be copied over to
MySQL before we can switch reading course indexes to MySQL.
"""
db_alias = schema_editor.connection.alias
SplitModulestoreCourseIndex = apps.get_model("split_modulestore_django", "SplitModulestoreCourseIndex")
split_modulestore = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.split)
for course_index in split_modulestore.db_connection.find_matching_course_indexes(force_mongo=True):
data = SplitModulestoreCourseIndex_Real.fields_from_v1_schema(course_index)
course_id = data["course_id"]
try:
mysql_entry = SplitModulestoreCourseIndex.objects.get(course_id=course_id)
# This course index ("active version") already exists in MySQL.
# Let's just make sure it's the latest version. If the MongoDB somehow contains a newer version, something
# has gone wrong and we should investigate to ensure we're not losing any data.
if mysql_entry.edited_on < data["edited_on"]:
raise ValueError(
f"Course {course_id} already exists in MySQL but the MongoDB version is newer. "
"That's unexpected because since the course index table was added to MySQL, there has never been a "
"time when we would write course_indexes updates only to MongoDB without also writing to MySQL."
)
except SplitModulestoreCourseIndex.DoesNotExist:
# This course exists in MongoDB but hasn't yet been migrated to MySQL. Do that now.
SplitModulestoreCourseIndex(**data).save(using=db_alias)
def reverse_func(apps, schema_editor):
"""
Reversing the data migration is a no-op, because edX.org used a migration path path that started with writing to
both MySQL+MongoDB while still reading from MongoDB, then later executed this data migration, then later cut over to
reading from MySQL only. If we reversed this by deleting all entries, it would undo any writes that took place
before this data migration, which are unrelated.
"""
pass
class Migration(migrations.Migration):
dependencies = [
('split_modulestore_django', '0001_initial'),
]
operations = [
migrations.RunPython(forwards_func, reverse_func),
]
|
Add a data migration to copy all course index data into MySQL
|
feat: Add a data migration to copy all course index data into MySQL
|
Python
|
agpl-3.0
|
arbrandes/edx-platform,angelapper/edx-platform,eduNEXT/edx-platform,edx/edx-platform,angelapper/edx-platform,arbrandes/edx-platform,arbrandes/edx-platform,eduNEXT/edx-platform,edx/edx-platform,edx/edx-platform,arbrandes/edx-platform,edx/edx-platform,eduNEXT/edx-platform,angelapper/edx-platform,angelapper/edx-platform,eduNEXT/edx-platform
|
feat: Add a data migration to copy all course index data into MySQL
|
from django.db import migrations, models
from django.db.utils import IntegrityError
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from ..models import SplitModulestoreCourseIndex as SplitModulestoreCourseIndex_Real
def forwards_func(apps, schema_editor):
"""
Copy all course index data from MongoDB to MySQL, unless it's already present in MySQL.
This migration is used as part of an upgrade path from storing course indexes in MongoDB to storing them in MySQL.
On edX.org, we began writing to MySQL+MongoDB before we deployed this migration, so some courses are already in
MySQL. But any courses that haven't been modified recently would only be in MongoDB and need to be copied over to
MySQL before we can switch reading course indexes to MySQL.
"""
db_alias = schema_editor.connection.alias
SplitModulestoreCourseIndex = apps.get_model("split_modulestore_django", "SplitModulestoreCourseIndex")
split_modulestore = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.split)
for course_index in split_modulestore.db_connection.find_matching_course_indexes(force_mongo=True):
data = SplitModulestoreCourseIndex_Real.fields_from_v1_schema(course_index)
course_id = data["course_id"]
try:
mysql_entry = SplitModulestoreCourseIndex.objects.get(course_id=course_id)
# This course index ("active version") already exists in MySQL.
# Let's just make sure it's the latest version. If the MongoDB somehow contains a newer version, something
# has gone wrong and we should investigate to ensure we're not losing any data.
if mysql_entry.edited_on < data["edited_on"]:
raise ValueError(
f"Course {course_id} already exists in MySQL but the MongoDB version is newer. "
"That's unexpected because since the course index table was added to MySQL, there has never been a "
"time when we would write course_indexes updates only to MongoDB without also writing to MySQL."
)
except SplitModulestoreCourseIndex.DoesNotExist:
# This course exists in MongoDB but hasn't yet been migrated to MySQL. Do that now.
SplitModulestoreCourseIndex(**data).save(using=db_alias)
def reverse_func(apps, schema_editor):
"""
Reversing the data migration is a no-op, because edX.org used a migration path path that started with writing to
both MySQL+MongoDB while still reading from MongoDB, then later executed this data migration, then later cut over to
reading from MySQL only. If we reversed this by deleting all entries, it would undo any writes that took place
before this data migration, which are unrelated.
"""
pass
class Migration(migrations.Migration):
dependencies = [
('split_modulestore_django', '0001_initial'),
]
operations = [
migrations.RunPython(forwards_func, reverse_func),
]
|
<commit_before><commit_msg>feat: Add a data migration to copy all course index data into MySQL<commit_after>
|
from django.db import migrations, models
from django.db.utils import IntegrityError
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from ..models import SplitModulestoreCourseIndex as SplitModulestoreCourseIndex_Real
def forwards_func(apps, schema_editor):
"""
Copy all course index data from MongoDB to MySQL, unless it's already present in MySQL.
This migration is used as part of an upgrade path from storing course indexes in MongoDB to storing them in MySQL.
On edX.org, we began writing to MySQL+MongoDB before we deployed this migration, so some courses are already in
MySQL. But any courses that haven't been modified recently would only be in MongoDB and need to be copied over to
MySQL before we can switch reading course indexes to MySQL.
"""
db_alias = schema_editor.connection.alias
SplitModulestoreCourseIndex = apps.get_model("split_modulestore_django", "SplitModulestoreCourseIndex")
split_modulestore = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.split)
for course_index in split_modulestore.db_connection.find_matching_course_indexes(force_mongo=True):
data = SplitModulestoreCourseIndex_Real.fields_from_v1_schema(course_index)
course_id = data["course_id"]
try:
mysql_entry = SplitModulestoreCourseIndex.objects.get(course_id=course_id)
# This course index ("active version") already exists in MySQL.
# Let's just make sure it's the latest version. If the MongoDB somehow contains a newer version, something
# has gone wrong and we should investigate to ensure we're not losing any data.
if mysql_entry.edited_on < data["edited_on"]:
raise ValueError(
f"Course {course_id} already exists in MySQL but the MongoDB version is newer. "
"That's unexpected because since the course index table was added to MySQL, there has never been a "
"time when we would write course_indexes updates only to MongoDB without also writing to MySQL."
)
except SplitModulestoreCourseIndex.DoesNotExist:
# This course exists in MongoDB but hasn't yet been migrated to MySQL. Do that now.
SplitModulestoreCourseIndex(**data).save(using=db_alias)
def reverse_func(apps, schema_editor):
"""
Reversing the data migration is a no-op, because edX.org used a migration path path that started with writing to
both MySQL+MongoDB while still reading from MongoDB, then later executed this data migration, then later cut over to
reading from MySQL only. If we reversed this by deleting all entries, it would undo any writes that took place
before this data migration, which are unrelated.
"""
pass
class Migration(migrations.Migration):
dependencies = [
('split_modulestore_django', '0001_initial'),
]
operations = [
migrations.RunPython(forwards_func, reverse_func),
]
|
feat: Add a data migration to copy all course index data into MySQLfrom django.db import migrations, models
from django.db.utils import IntegrityError
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from ..models import SplitModulestoreCourseIndex as SplitModulestoreCourseIndex_Real
def forwards_func(apps, schema_editor):
"""
Copy all course index data from MongoDB to MySQL, unless it's already present in MySQL.
This migration is used as part of an upgrade path from storing course indexes in MongoDB to storing them in MySQL.
On edX.org, we began writing to MySQL+MongoDB before we deployed this migration, so some courses are already in
MySQL. But any courses that haven't been modified recently would only be in MongoDB and need to be copied over to
MySQL before we can switch reading course indexes to MySQL.
"""
db_alias = schema_editor.connection.alias
SplitModulestoreCourseIndex = apps.get_model("split_modulestore_django", "SplitModulestoreCourseIndex")
split_modulestore = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.split)
for course_index in split_modulestore.db_connection.find_matching_course_indexes(force_mongo=True):
data = SplitModulestoreCourseIndex_Real.fields_from_v1_schema(course_index)
course_id = data["course_id"]
try:
mysql_entry = SplitModulestoreCourseIndex.objects.get(course_id=course_id)
# This course index ("active version") already exists in MySQL.
# Let's just make sure it's the latest version. If the MongoDB somehow contains a newer version, something
# has gone wrong and we should investigate to ensure we're not losing any data.
if mysql_entry.edited_on < data["edited_on"]:
raise ValueError(
f"Course {course_id} already exists in MySQL but the MongoDB version is newer. "
"That's unexpected because since the course index table was added to MySQL, there has never been a "
"time when we would write course_indexes updates only to MongoDB without also writing to MySQL."
)
except SplitModulestoreCourseIndex.DoesNotExist:
# This course exists in MongoDB but hasn't yet been migrated to MySQL. Do that now.
SplitModulestoreCourseIndex(**data).save(using=db_alias)
def reverse_func(apps, schema_editor):
"""
Reversing the data migration is a no-op, because edX.org used a migration path path that started with writing to
both MySQL+MongoDB while still reading from MongoDB, then later executed this data migration, then later cut over to
reading from MySQL only. If we reversed this by deleting all entries, it would undo any writes that took place
before this data migration, which are unrelated.
"""
pass
class Migration(migrations.Migration):
dependencies = [
('split_modulestore_django', '0001_initial'),
]
operations = [
migrations.RunPython(forwards_func, reverse_func),
]
|
<commit_before><commit_msg>feat: Add a data migration to copy all course index data into MySQL<commit_after>from django.db import migrations, models
from django.db.utils import IntegrityError
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.django import modulestore
from ..models import SplitModulestoreCourseIndex as SplitModulestoreCourseIndex_Real
def forwards_func(apps, schema_editor):
"""
Copy all course index data from MongoDB to MySQL, unless it's already present in MySQL.
This migration is used as part of an upgrade path from storing course indexes in MongoDB to storing them in MySQL.
On edX.org, we began writing to MySQL+MongoDB before we deployed this migration, so some courses are already in
MySQL. But any courses that haven't been modified recently would only be in MongoDB and need to be copied over to
MySQL before we can switch reading course indexes to MySQL.
"""
db_alias = schema_editor.connection.alias
SplitModulestoreCourseIndex = apps.get_model("split_modulestore_django", "SplitModulestoreCourseIndex")
split_modulestore = modulestore()._get_modulestore_by_type(ModuleStoreEnum.Type.split)
for course_index in split_modulestore.db_connection.find_matching_course_indexes(force_mongo=True):
data = SplitModulestoreCourseIndex_Real.fields_from_v1_schema(course_index)
course_id = data["course_id"]
try:
mysql_entry = SplitModulestoreCourseIndex.objects.get(course_id=course_id)
# This course index ("active version") already exists in MySQL.
# Let's just make sure it's the latest version. If the MongoDB somehow contains a newer version, something
# has gone wrong and we should investigate to ensure we're not losing any data.
if mysql_entry.edited_on < data["edited_on"]:
raise ValueError(
f"Course {course_id} already exists in MySQL but the MongoDB version is newer. "
"That's unexpected because since the course index table was added to MySQL, there has never been a "
"time when we would write course_indexes updates only to MongoDB without also writing to MySQL."
)
except SplitModulestoreCourseIndex.DoesNotExist:
# This course exists in MongoDB but hasn't yet been migrated to MySQL. Do that now.
SplitModulestoreCourseIndex(**data).save(using=db_alias)
def reverse_func(apps, schema_editor):
"""
Reversing the data migration is a no-op, because edX.org used a migration path path that started with writing to
both MySQL+MongoDB while still reading from MongoDB, then later executed this data migration, then later cut over to
reading from MySQL only. If we reversed this by deleting all entries, it would undo any writes that took place
before this data migration, which are unrelated.
"""
pass
class Migration(migrations.Migration):
dependencies = [
('split_modulestore_django', '0001_initial'),
]
operations = [
migrations.RunPython(forwards_func, reverse_func),
]
|
|
1704b5febfae5b3d980fc2a6bd599bbe6aee1147
|
zonegen.py
|
zonegen.py
|
#!/usr/bin/env python3
import lglass.generators.dns
import lglass.database
if __name__ == "__main__":
import argparse
argparser = argparse.ArgumentParser(description="Delegation-only zone file generator")
argparser.add_argument("--database", "--db", "-d", help="Whois database",
type=str, default=".")
argparser.add_argument("--zone", "-z", help="Zone", required=True)
argparser.add_argument("--nameserver", "-n", action="append", help="Nameserver")
argparser.add_argument("--master", "-m", help="Master nameserver")
argparser.add_argument("--email", "-e", help="Email address of zone maintainer", required=True)
args = argparser.parse_args()
db = lglass.database.FileDatabase(args.database)
domains = (db.get(*spec) for spec in db.list() if spec[0] == "dns" and spec[1].endswith("." + args.zone))
master_nameserver = args.master
if master_nameserver is None:
if args.nameserver:
master_nameserver = args.nameserver[0]
else:
master_nameserver = args.zone
soa = lglass.generators.dns.generate_soa(args.zone,
master_nameserver,
args.email)
zone = lglass.generators.dns.generate_zone(
args.zone,
domains,
soa=soa,
nameservers=args.nameserver
)
print("\n".join(zone))
|
Add simple zone generator script
|
Add simple zone generator script
|
Python
|
mit
|
fritz0705/lglass
|
Add simple zone generator script
|
#!/usr/bin/env python3
import lglass.generators.dns
import lglass.database
if __name__ == "__main__":
import argparse
argparser = argparse.ArgumentParser(description="Delegation-only zone file generator")
argparser.add_argument("--database", "--db", "-d", help="Whois database",
type=str, default=".")
argparser.add_argument("--zone", "-z", help="Zone", required=True)
argparser.add_argument("--nameserver", "-n", action="append", help="Nameserver")
argparser.add_argument("--master", "-m", help="Master nameserver")
argparser.add_argument("--email", "-e", help="Email address of zone maintainer", required=True)
args = argparser.parse_args()
db = lglass.database.FileDatabase(args.database)
domains = (db.get(*spec) for spec in db.list() if spec[0] == "dns" and spec[1].endswith("." + args.zone))
master_nameserver = args.master
if master_nameserver is None:
if args.nameserver:
master_nameserver = args.nameserver[0]
else:
master_nameserver = args.zone
soa = lglass.generators.dns.generate_soa(args.zone,
master_nameserver,
args.email)
zone = lglass.generators.dns.generate_zone(
args.zone,
domains,
soa=soa,
nameservers=args.nameserver
)
print("\n".join(zone))
|
<commit_before><commit_msg>Add simple zone generator script<commit_after>
|
#!/usr/bin/env python3
import lglass.generators.dns
import lglass.database
if __name__ == "__main__":
import argparse
argparser = argparse.ArgumentParser(description="Delegation-only zone file generator")
argparser.add_argument("--database", "--db", "-d", help="Whois database",
type=str, default=".")
argparser.add_argument("--zone", "-z", help="Zone", required=True)
argparser.add_argument("--nameserver", "-n", action="append", help="Nameserver")
argparser.add_argument("--master", "-m", help="Master nameserver")
argparser.add_argument("--email", "-e", help="Email address of zone maintainer", required=True)
args = argparser.parse_args()
db = lglass.database.FileDatabase(args.database)
domains = (db.get(*spec) for spec in db.list() if spec[0] == "dns" and spec[1].endswith("." + args.zone))
master_nameserver = args.master
if master_nameserver is None:
if args.nameserver:
master_nameserver = args.nameserver[0]
else:
master_nameserver = args.zone
soa = lglass.generators.dns.generate_soa(args.zone,
master_nameserver,
args.email)
zone = lglass.generators.dns.generate_zone(
args.zone,
domains,
soa=soa,
nameservers=args.nameserver
)
print("\n".join(zone))
|
Add simple zone generator script#!/usr/bin/env python3
import lglass.generators.dns
import lglass.database
if __name__ == "__main__":
import argparse
argparser = argparse.ArgumentParser(description="Delegation-only zone file generator")
argparser.add_argument("--database", "--db", "-d", help="Whois database",
type=str, default=".")
argparser.add_argument("--zone", "-z", help="Zone", required=True)
argparser.add_argument("--nameserver", "-n", action="append", help="Nameserver")
argparser.add_argument("--master", "-m", help="Master nameserver")
argparser.add_argument("--email", "-e", help="Email address of zone maintainer", required=True)
args = argparser.parse_args()
db = lglass.database.FileDatabase(args.database)
domains = (db.get(*spec) for spec in db.list() if spec[0] == "dns" and spec[1].endswith("." + args.zone))
master_nameserver = args.master
if master_nameserver is None:
if args.nameserver:
master_nameserver = args.nameserver[0]
else:
master_nameserver = args.zone
soa = lglass.generators.dns.generate_soa(args.zone,
master_nameserver,
args.email)
zone = lglass.generators.dns.generate_zone(
args.zone,
domains,
soa=soa,
nameservers=args.nameserver
)
print("\n".join(zone))
|
<commit_before><commit_msg>Add simple zone generator script<commit_after>#!/usr/bin/env python3
import lglass.generators.dns
import lglass.database
if __name__ == "__main__":
import argparse
argparser = argparse.ArgumentParser(description="Delegation-only zone file generator")
argparser.add_argument("--database", "--db", "-d", help="Whois database",
type=str, default=".")
argparser.add_argument("--zone", "-z", help="Zone", required=True)
argparser.add_argument("--nameserver", "-n", action="append", help="Nameserver")
argparser.add_argument("--master", "-m", help="Master nameserver")
argparser.add_argument("--email", "-e", help="Email address of zone maintainer", required=True)
args = argparser.parse_args()
db = lglass.database.FileDatabase(args.database)
domains = (db.get(*spec) for spec in db.list() if spec[0] == "dns" and spec[1].endswith("." + args.zone))
master_nameserver = args.master
if master_nameserver is None:
if args.nameserver:
master_nameserver = args.nameserver[0]
else:
master_nameserver = args.zone
soa = lglass.generators.dns.generate_soa(args.zone,
master_nameserver,
args.email)
zone = lglass.generators.dns.generate_zone(
args.zone,
domains,
soa=soa,
nameservers=args.nameserver
)
print("\n".join(zone))
|
|
32d1e6981d92d19a58f7a56910bd8709a5bbfc27
|
python/src/twoSum/testTwoSum.py
|
python/src/twoSum/testTwoSum.py
|
import unittest
from twoSum import Solution
class TestTwoSum(unittest.TestCase):
def testLeetCodeExamplePasses(self):
solution = Solution()
numbers = [2, 7, 11, 15]
target = 9
self.assertEqual(solution.twoSum(numbers, target), (1, 2))
def testUnorderedList(self):
solution = Solution()
numbers = [3, 2, 4]
target = 6
self.assertEqual(solution.twoSum(numbers, target), (2, 3))
if __name__ == '__main__':
unittest.main()
|
Add two test cases for twoSum problem.
|
Add two test cases for twoSum problem.
|
Python
|
mit
|
TheGhostHuCodes/leetCode
|
Add two test cases for twoSum problem.
|
import unittest
from twoSum import Solution
class TestTwoSum(unittest.TestCase):
def testLeetCodeExamplePasses(self):
solution = Solution()
numbers = [2, 7, 11, 15]
target = 9
self.assertEqual(solution.twoSum(numbers, target), (1, 2))
def testUnorderedList(self):
solution = Solution()
numbers = [3, 2, 4]
target = 6
self.assertEqual(solution.twoSum(numbers, target), (2, 3))
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add two test cases for twoSum problem.<commit_after>
|
import unittest
from twoSum import Solution
class TestTwoSum(unittest.TestCase):
def testLeetCodeExamplePasses(self):
solution = Solution()
numbers = [2, 7, 11, 15]
target = 9
self.assertEqual(solution.twoSum(numbers, target), (1, 2))
def testUnorderedList(self):
solution = Solution()
numbers = [3, 2, 4]
target = 6
self.assertEqual(solution.twoSum(numbers, target), (2, 3))
if __name__ == '__main__':
unittest.main()
|
Add two test cases for twoSum problem.import unittest
from twoSum import Solution
class TestTwoSum(unittest.TestCase):
def testLeetCodeExamplePasses(self):
solution = Solution()
numbers = [2, 7, 11, 15]
target = 9
self.assertEqual(solution.twoSum(numbers, target), (1, 2))
def testUnorderedList(self):
solution = Solution()
numbers = [3, 2, 4]
target = 6
self.assertEqual(solution.twoSum(numbers, target), (2, 3))
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add two test cases for twoSum problem.<commit_after>import unittest
from twoSum import Solution
class TestTwoSum(unittest.TestCase):
def testLeetCodeExamplePasses(self):
solution = Solution()
numbers = [2, 7, 11, 15]
target = 9
self.assertEqual(solution.twoSum(numbers, target), (1, 2))
def testUnorderedList(self):
solution = Solution()
numbers = [3, 2, 4]
target = 6
self.assertEqual(solution.twoSum(numbers, target), (2, 3))
if __name__ == '__main__':
unittest.main()
|
|
02ab3c0629100f5d55f9864f6d5183860a6c16fd
|
tests/cli/test_map_predict.py
|
tests/cli/test_map_predict.py
|
""" Test ``yatsm map predict ...``
"""
from click.testing import CliRunner
import numpy as np
from yatsm.cli.main import cli
# Truth for diagonals
diag = np.eye(5).astype(bool)
# SWIR answers
BAND_SWIR = 4
pred_swir = np.array([-9999, 723, 1279, 3261, 2885], dtype=np.int16)
def test_map_predict_pass_1(example_results, tmpdir, read_image):
""" Make a map of predictions
"""
image = tmpdir.join('predict.gtif').strpath
runner = CliRunner()
result = runner.invoke(
cli, [
'-v', 'map',
'--root', example_results['root'],
'--result', example_results['results_dir'],
'--image', example_results['example_img'],
'predict', '2005-06-01', image
]
)
img = read_image(image)
assert result.exit_code == 0
assert img.shape == (7, 5, 5)
np.testing.assert_equal(img[BAND_SWIR, diag], pred_swir)
|
Add test for mapping predictions
|
Add test for mapping predictions
|
Python
|
mit
|
valpasq/yatsm,c11/yatsm,ceholden/yatsm,c11/yatsm,valpasq/yatsm,ceholden/yatsm
|
Add test for mapping predictions
|
""" Test ``yatsm map predict ...``
"""
from click.testing import CliRunner
import numpy as np
from yatsm.cli.main import cli
# Truth for diagonals
diag = np.eye(5).astype(bool)
# SWIR answers
BAND_SWIR = 4
pred_swir = np.array([-9999, 723, 1279, 3261, 2885], dtype=np.int16)
def test_map_predict_pass_1(example_results, tmpdir, read_image):
""" Make a map of predictions
"""
image = tmpdir.join('predict.gtif').strpath
runner = CliRunner()
result = runner.invoke(
cli, [
'-v', 'map',
'--root', example_results['root'],
'--result', example_results['results_dir'],
'--image', example_results['example_img'],
'predict', '2005-06-01', image
]
)
img = read_image(image)
assert result.exit_code == 0
assert img.shape == (7, 5, 5)
np.testing.assert_equal(img[BAND_SWIR, diag], pred_swir)
|
<commit_before><commit_msg>Add test for mapping predictions<commit_after>
|
""" Test ``yatsm map predict ...``
"""
from click.testing import CliRunner
import numpy as np
from yatsm.cli.main import cli
# Truth for diagonals
diag = np.eye(5).astype(bool)
# SWIR answers
BAND_SWIR = 4
pred_swir = np.array([-9999, 723, 1279, 3261, 2885], dtype=np.int16)
def test_map_predict_pass_1(example_results, tmpdir, read_image):
""" Make a map of predictions
"""
image = tmpdir.join('predict.gtif').strpath
runner = CliRunner()
result = runner.invoke(
cli, [
'-v', 'map',
'--root', example_results['root'],
'--result', example_results['results_dir'],
'--image', example_results['example_img'],
'predict', '2005-06-01', image
]
)
img = read_image(image)
assert result.exit_code == 0
assert img.shape == (7, 5, 5)
np.testing.assert_equal(img[BAND_SWIR, diag], pred_swir)
|
Add test for mapping predictions""" Test ``yatsm map predict ...``
"""
from click.testing import CliRunner
import numpy as np
from yatsm.cli.main import cli
# Truth for diagonals
diag = np.eye(5).astype(bool)
# SWIR answers
BAND_SWIR = 4
pred_swir = np.array([-9999, 723, 1279, 3261, 2885], dtype=np.int16)
def test_map_predict_pass_1(example_results, tmpdir, read_image):
""" Make a map of predictions
"""
image = tmpdir.join('predict.gtif').strpath
runner = CliRunner()
result = runner.invoke(
cli, [
'-v', 'map',
'--root', example_results['root'],
'--result', example_results['results_dir'],
'--image', example_results['example_img'],
'predict', '2005-06-01', image
]
)
img = read_image(image)
assert result.exit_code == 0
assert img.shape == (7, 5, 5)
np.testing.assert_equal(img[BAND_SWIR, diag], pred_swir)
|
<commit_before><commit_msg>Add test for mapping predictions<commit_after>""" Test ``yatsm map predict ...``
"""
from click.testing import CliRunner
import numpy as np
from yatsm.cli.main import cli
# Truth for diagonals
diag = np.eye(5).astype(bool)
# SWIR answers
BAND_SWIR = 4
pred_swir = np.array([-9999, 723, 1279, 3261, 2885], dtype=np.int16)
def test_map_predict_pass_1(example_results, tmpdir, read_image):
""" Make a map of predictions
"""
image = tmpdir.join('predict.gtif').strpath
runner = CliRunner()
result = runner.invoke(
cli, [
'-v', 'map',
'--root', example_results['root'],
'--result', example_results['results_dir'],
'--image', example_results['example_img'],
'predict', '2005-06-01', image
]
)
img = read_image(image)
assert result.exit_code == 0
assert img.shape == (7, 5, 5)
np.testing.assert_equal(img[BAND_SWIR, diag], pred_swir)
|
|
93685b0c74ca6574f4e4db31edf8360b6c0cdca8
|
resolvebackup.py
|
resolvebackup.py
|
#! /usr/bin/env python
# Davinci Resolve 12 and above Disk Database project backup utility.
# Igor Ridanovic, HDhead.com
import os
import sys
import getpass
import time
from shutil import make_archive
from datetime import datetime
import tkMessageBox
# Report error and exit
def errorwindow(report):
tkMessageBox.showinfo('Resolve Project Backup', report)
sys.exit(report)
# User set number of minutes between backups
interval = 120
# End of User configuration
sleeptime = interval * 60
version = 1.0
currentUser = getpass.getuser()
# Determine the host operating system and set OS specific variables
hostOS = sys.platform
if hostOS == 'win32':
eol = '\r\n'
sourcePath = 'C:\ProgramData\Blackmagic Design\DaVinci Resolve\Support\Resolve Disk Database\Resolve Projects'
destPath = os.path.join('C:\Users', currentUser, 'Documents\ResolveProjectBackup')
elif hostOS == 'darwin':
eol = '\n'
sourcepath = '/Library/Application Support/Blackmagic Design/DaVinci Resolve/Resolve Disk Database/Resolve Projects'
destPath = os.path.join('/Users', currentUser, 'Documents/ResolveProjectBackup')
else:
errorwindow('%s OS is not supported' %hostOS)
def wincompliance(ts):
"""remove space and colons from timestamp for Windows compliance"""
noSpace = 'T'.join(ts.split())
noColon = '-'.join(noSpace.split(':'))
return noColon
# Verify if paths are valid. Create destination directory if missing.
if not os.path.isdir(sourcePath):
errorwindow('The Resolve disk database root is not found at ' + sourcePath)
if not os.path.isdir(destPath):
os.makedirs(destPath)
# Create log file if missing or open if exists
logName = 'ResolveBackup.log'
logPath = os.path.join(destPath, logName)
if not os.path.isfile(logPath):
logfile = open(logPath, 'w')
logfile.write('Resolve Disk Database Backup Tool V%s. HDhead.com' %version)
logfile.write(eol)
logfile.close()
# Infinite backup loop
while 1 == 1:
# Create backup
timeStamp = str(datetime.now())[:-7]
backupName = 'ResolveProjBackup_' + wincompliance(timeStamp)
savePath = os.path.join(destPath, backupName)
make_archive(savePath, 'zip', sourcePath)
# Write a log entry
logfile = open(logPath, 'a')
logfile.write('Created %s.zip'%backupName)
logfile.write(eol)
logfile.close()
time.sleep(sleeptime)
|
Resolve Disk Database backup tool
|
Resolve Disk Database backup tool
|
Python
|
mit
|
IgorRidanovic/resolve-backup
|
Resolve Disk Database backup tool
|
#! /usr/bin/env python
# Davinci Resolve 12 and above Disk Database project backup utility.
# Igor Ridanovic, HDhead.com
import os
import sys
import getpass
import time
from shutil import make_archive
from datetime import datetime
import tkMessageBox
# Report error and exit
def errorwindow(report):
tkMessageBox.showinfo('Resolve Project Backup', report)
sys.exit(report)
# User set number of minutes between backups
interval = 120
# End of User configuration
sleeptime = interval * 60
version = 1.0
currentUser = getpass.getuser()
# Determine the host operating system and set OS specific variables
hostOS = sys.platform
if hostOS == 'win32':
eol = '\r\n'
sourcePath = 'C:\ProgramData\Blackmagic Design\DaVinci Resolve\Support\Resolve Disk Database\Resolve Projects'
destPath = os.path.join('C:\Users', currentUser, 'Documents\ResolveProjectBackup')
elif hostOS == 'darwin':
eol = '\n'
sourcepath = '/Library/Application Support/Blackmagic Design/DaVinci Resolve/Resolve Disk Database/Resolve Projects'
destPath = os.path.join('/Users', currentUser, 'Documents/ResolveProjectBackup')
else:
errorwindow('%s OS is not supported' %hostOS)
def wincompliance(ts):
"""remove space and colons from timestamp for Windows compliance"""
noSpace = 'T'.join(ts.split())
noColon = '-'.join(noSpace.split(':'))
return noColon
# Verify if paths are valid. Create destination directory if missing.
if not os.path.isdir(sourcePath):
errorwindow('The Resolve disk database root is not found at ' + sourcePath)
if not os.path.isdir(destPath):
os.makedirs(destPath)
# Create log file if missing or open if exists
logName = 'ResolveBackup.log'
logPath = os.path.join(destPath, logName)
if not os.path.isfile(logPath):
logfile = open(logPath, 'w')
logfile.write('Resolve Disk Database Backup Tool V%s. HDhead.com' %version)
logfile.write(eol)
logfile.close()
# Infinite backup loop
while 1 == 1:
# Create backup
timeStamp = str(datetime.now())[:-7]
backupName = 'ResolveProjBackup_' + wincompliance(timeStamp)
savePath = os.path.join(destPath, backupName)
make_archive(savePath, 'zip', sourcePath)
# Write a log entry
logfile = open(logPath, 'a')
logfile.write('Created %s.zip'%backupName)
logfile.write(eol)
logfile.close()
time.sleep(sleeptime)
|
<commit_before><commit_msg>Resolve Disk Database backup tool<commit_after>
|
#! /usr/bin/env python
# Davinci Resolve 12 and above Disk Database project backup utility.
# Igor Ridanovic, HDhead.com
import os
import sys
import getpass
import time
from shutil import make_archive
from datetime import datetime
import tkMessageBox
# Report error and exit
def errorwindow(report):
tkMessageBox.showinfo('Resolve Project Backup', report)
sys.exit(report)
# User set number of minutes between backups
interval = 120
# End of User configuration
sleeptime = interval * 60
version = 1.0
currentUser = getpass.getuser()
# Determine the host operating system and set OS specific variables
hostOS = sys.platform
if hostOS == 'win32':
eol = '\r\n'
sourcePath = 'C:\ProgramData\Blackmagic Design\DaVinci Resolve\Support\Resolve Disk Database\Resolve Projects'
destPath = os.path.join('C:\Users', currentUser, 'Documents\ResolveProjectBackup')
elif hostOS == 'darwin':
eol = '\n'
sourcepath = '/Library/Application Support/Blackmagic Design/DaVinci Resolve/Resolve Disk Database/Resolve Projects'
destPath = os.path.join('/Users', currentUser, 'Documents/ResolveProjectBackup')
else:
errorwindow('%s OS is not supported' %hostOS)
def wincompliance(ts):
"""remove space and colons from timestamp for Windows compliance"""
noSpace = 'T'.join(ts.split())
noColon = '-'.join(noSpace.split(':'))
return noColon
# Verify if paths are valid. Create destination directory if missing.
if not os.path.isdir(sourcePath):
errorwindow('The Resolve disk database root is not found at ' + sourcePath)
if not os.path.isdir(destPath):
os.makedirs(destPath)
# Create log file if missing or open if exists
logName = 'ResolveBackup.log'
logPath = os.path.join(destPath, logName)
if not os.path.isfile(logPath):
logfile = open(logPath, 'w')
logfile.write('Resolve Disk Database Backup Tool V%s. HDhead.com' %version)
logfile.write(eol)
logfile.close()
# Infinite backup loop
while 1 == 1:
# Create backup
timeStamp = str(datetime.now())[:-7]
backupName = 'ResolveProjBackup_' + wincompliance(timeStamp)
savePath = os.path.join(destPath, backupName)
make_archive(savePath, 'zip', sourcePath)
# Write a log entry
logfile = open(logPath, 'a')
logfile.write('Created %s.zip'%backupName)
logfile.write(eol)
logfile.close()
time.sleep(sleeptime)
|
Resolve Disk Database backup tool#! /usr/bin/env python
# Davinci Resolve 12 and above Disk Database project backup utility.
# Igor Ridanovic, HDhead.com
import os
import sys
import getpass
import time
from shutil import make_archive
from datetime import datetime
import tkMessageBox
# Report error and exit
def errorwindow(report):
tkMessageBox.showinfo('Resolve Project Backup', report)
sys.exit(report)
# User set number of minutes between backups
interval = 120
# End of User configuration
sleeptime = interval * 60
version = 1.0
currentUser = getpass.getuser()
# Determine the host operating system and set OS specific variables
hostOS = sys.platform
if hostOS == 'win32':
eol = '\r\n'
sourcePath = 'C:\ProgramData\Blackmagic Design\DaVinci Resolve\Support\Resolve Disk Database\Resolve Projects'
destPath = os.path.join('C:\Users', currentUser, 'Documents\ResolveProjectBackup')
elif hostOS == 'darwin':
eol = '\n'
sourcepath = '/Library/Application Support/Blackmagic Design/DaVinci Resolve/Resolve Disk Database/Resolve Projects'
destPath = os.path.join('/Users', currentUser, 'Documents/ResolveProjectBackup')
else:
errorwindow('%s OS is not supported' %hostOS)
def wincompliance(ts):
"""remove space and colons from timestamp for Windows compliance"""
noSpace = 'T'.join(ts.split())
noColon = '-'.join(noSpace.split(':'))
return noColon
# Verify if paths are valid. Create destination directory if missing.
if not os.path.isdir(sourcePath):
errorwindow('The Resolve disk database root is not found at ' + sourcePath)
if not os.path.isdir(destPath):
os.makedirs(destPath)
# Create log file if missing or open if exists
logName = 'ResolveBackup.log'
logPath = os.path.join(destPath, logName)
if not os.path.isfile(logPath):
logfile = open(logPath, 'w')
logfile.write('Resolve Disk Database Backup Tool V%s. HDhead.com' %version)
logfile.write(eol)
logfile.close()
# Infinite backup loop
while 1 == 1:
# Create backup
timeStamp = str(datetime.now())[:-7]
backupName = 'ResolveProjBackup_' + wincompliance(timeStamp)
savePath = os.path.join(destPath, backupName)
make_archive(savePath, 'zip', sourcePath)
# Write a log entry
logfile = open(logPath, 'a')
logfile.write('Created %s.zip'%backupName)
logfile.write(eol)
logfile.close()
time.sleep(sleeptime)
|
<commit_before><commit_msg>Resolve Disk Database backup tool<commit_after>#! /usr/bin/env python
# Davinci Resolve 12 and above Disk Database project backup utility.
# Igor Ridanovic, HDhead.com
import os
import sys
import getpass
import time
from shutil import make_archive
from datetime import datetime
import tkMessageBox
# Report error and exit
def errorwindow(report):
tkMessageBox.showinfo('Resolve Project Backup', report)
sys.exit(report)
# User set number of minutes between backups
interval = 120
# End of User configuration
sleeptime = interval * 60
version = 1.0
currentUser = getpass.getuser()
# Determine the host operating system and set OS specific variables
hostOS = sys.platform
if hostOS == 'win32':
eol = '\r\n'
sourcePath = 'C:\ProgramData\Blackmagic Design\DaVinci Resolve\Support\Resolve Disk Database\Resolve Projects'
destPath = os.path.join('C:\Users', currentUser, 'Documents\ResolveProjectBackup')
elif hostOS == 'darwin':
eol = '\n'
sourcepath = '/Library/Application Support/Blackmagic Design/DaVinci Resolve/Resolve Disk Database/Resolve Projects'
destPath = os.path.join('/Users', currentUser, 'Documents/ResolveProjectBackup')
else:
errorwindow('%s OS is not supported' %hostOS)
def wincompliance(ts):
"""remove space and colons from timestamp for Windows compliance"""
noSpace = 'T'.join(ts.split())
noColon = '-'.join(noSpace.split(':'))
return noColon
# Verify if paths are valid. Create destination directory if missing.
if not os.path.isdir(sourcePath):
errorwindow('The Resolve disk database root is not found at ' + sourcePath)
if not os.path.isdir(destPath):
os.makedirs(destPath)
# Create log file if missing or open if exists
logName = 'ResolveBackup.log'
logPath = os.path.join(destPath, logName)
if not os.path.isfile(logPath):
logfile = open(logPath, 'w')
logfile.write('Resolve Disk Database Backup Tool V%s. HDhead.com' %version)
logfile.write(eol)
logfile.close()
# Infinite backup loop
while 1 == 1:
# Create backup
timeStamp = str(datetime.now())[:-7]
backupName = 'ResolveProjBackup_' + wincompliance(timeStamp)
savePath = os.path.join(destPath, backupName)
make_archive(savePath, 'zip', sourcePath)
# Write a log entry
logfile = open(logPath, 'a')
logfile.write('Created %s.zip'%backupName)
logfile.write(eol)
logfile.close()
time.sleep(sleeptime)
|
|
d8bd544112ee268a58cc24be148d4b672a488128
|
parse_dump.py
|
parse_dump.py
|
#!/usr/bin/python
#
# Copyright (c) 2007-2008 The PyAMF Project.
# See LICENSE for details.
"""
Extracts and displays information for files that contain AMF data.
"""
import glob
from optparse import OptionParser
from fnmatch import fnmatch
import pyamf
from pyamf import remoting
def parse_options():
"""
Parse command-line arguments.
"""
parser = OptionParser()
parser.add_option("-d", "--debug", action="store_true", dest="debug",
default=False, help="Enable debugging")
parser.add_option("--dump", action="store_true", dest="dump",
default=False, help="Shows a hexdump of the file")
parser.add_option("--strict", action="store_true", dest="strict",
default=False, help="""Option to decode typed but unaliased classes """
"""without raising UnknownClassAlias""")
return parser.parse_args()
def read_file(fname):
"""
Read file containing AMF data.
"""
f = file(fname, "r")
data = f.read()
f.close()
return data
def main():
"""
Run AMF decoder on input file.
"""
(options, args) = parse_options()
print 'Using pyamf from: %s' % (pyamf,)
print 'Strict = ' + str(options.strict)
for arg in args:
for fname in glob.glob(arg):
if fnmatch(fname, '*.amf*'):
body = read_file(fname)
try:
print "\nDecoding file:", fname
request = remoting.decode(body, None, options.strict)
if options.debug:
for name, message in request:
print " %s: %s" % (name, message)
except pyamf.UnknownClassAlias, c:
if options.debug:
print '\n Warning: %s' % c
except pyamf.DecodeError, c:
if options.debug:
print '\n Warning: %s' % c
except:
raise
if options.dump:
print
print pyamf.util.hexdump(body)
print "-" * 80
if __name__ == '__main__':
main()
|
Move ParseDump script to dumps folder.
|
Move ParseDump script to dumps folder.
|
Python
|
mit
|
thijstriemstra/pyamf-dumps
|
Move ParseDump script to dumps folder.
|
#!/usr/bin/python
#
# Copyright (c) 2007-2008 The PyAMF Project.
# See LICENSE for details.
"""
Extracts and displays information for files that contain AMF data.
"""
import glob
from optparse import OptionParser
from fnmatch import fnmatch
import pyamf
from pyamf import remoting
def parse_options():
"""
Parse command-line arguments.
"""
parser = OptionParser()
parser.add_option("-d", "--debug", action="store_true", dest="debug",
default=False, help="Enable debugging")
parser.add_option("--dump", action="store_true", dest="dump",
default=False, help="Shows a hexdump of the file")
parser.add_option("--strict", action="store_true", dest="strict",
default=False, help="""Option to decode typed but unaliased classes """
"""without raising UnknownClassAlias""")
return parser.parse_args()
def read_file(fname):
"""
Read file containing AMF data.
"""
f = file(fname, "r")
data = f.read()
f.close()
return data
def main():
"""
Run AMF decoder on input file.
"""
(options, args) = parse_options()
print 'Using pyamf from: %s' % (pyamf,)
print 'Strict = ' + str(options.strict)
for arg in args:
for fname in glob.glob(arg):
if fnmatch(fname, '*.amf*'):
body = read_file(fname)
try:
print "\nDecoding file:", fname
request = remoting.decode(body, None, options.strict)
if options.debug:
for name, message in request:
print " %s: %s" % (name, message)
except pyamf.UnknownClassAlias, c:
if options.debug:
print '\n Warning: %s' % c
except pyamf.DecodeError, c:
if options.debug:
print '\n Warning: %s' % c
except:
raise
if options.dump:
print
print pyamf.util.hexdump(body)
print "-" * 80
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Move ParseDump script to dumps folder.<commit_after>
|
#!/usr/bin/python
#
# Copyright (c) 2007-2008 The PyAMF Project.
# See LICENSE for details.
"""
Extracts and displays information for files that contain AMF data.
"""
import glob
from optparse import OptionParser
from fnmatch import fnmatch
import pyamf
from pyamf import remoting
def parse_options():
"""
Parse command-line arguments.
"""
parser = OptionParser()
parser.add_option("-d", "--debug", action="store_true", dest="debug",
default=False, help="Enable debugging")
parser.add_option("--dump", action="store_true", dest="dump",
default=False, help="Shows a hexdump of the file")
parser.add_option("--strict", action="store_true", dest="strict",
default=False, help="""Option to decode typed but unaliased classes """
"""without raising UnknownClassAlias""")
return parser.parse_args()
def read_file(fname):
"""
Read file containing AMF data.
"""
f = file(fname, "r")
data = f.read()
f.close()
return data
def main():
"""
Run AMF decoder on input file.
"""
(options, args) = parse_options()
print 'Using pyamf from: %s' % (pyamf,)
print 'Strict = ' + str(options.strict)
for arg in args:
for fname in glob.glob(arg):
if fnmatch(fname, '*.amf*'):
body = read_file(fname)
try:
print "\nDecoding file:", fname
request = remoting.decode(body, None, options.strict)
if options.debug:
for name, message in request:
print " %s: %s" % (name, message)
except pyamf.UnknownClassAlias, c:
if options.debug:
print '\n Warning: %s' % c
except pyamf.DecodeError, c:
if options.debug:
print '\n Warning: %s' % c
except:
raise
if options.dump:
print
print pyamf.util.hexdump(body)
print "-" * 80
if __name__ == '__main__':
main()
|
Move ParseDump script to dumps folder.#!/usr/bin/python
#
# Copyright (c) 2007-2008 The PyAMF Project.
# See LICENSE for details.
"""
Extracts and displays information for files that contain AMF data.
"""
import glob
from optparse import OptionParser
from fnmatch import fnmatch
import pyamf
from pyamf import remoting
def parse_options():
"""
Parse command-line arguments.
"""
parser = OptionParser()
parser.add_option("-d", "--debug", action="store_true", dest="debug",
default=False, help="Enable debugging")
parser.add_option("--dump", action="store_true", dest="dump",
default=False, help="Shows a hexdump of the file")
parser.add_option("--strict", action="store_true", dest="strict",
default=False, help="""Option to decode typed but unaliased classes """
"""without raising UnknownClassAlias""")
return parser.parse_args()
def read_file(fname):
"""
Read file containing AMF data.
"""
f = file(fname, "r")
data = f.read()
f.close()
return data
def main():
"""
Run AMF decoder on input file.
"""
(options, args) = parse_options()
print 'Using pyamf from: %s' % (pyamf,)
print 'Strict = ' + str(options.strict)
for arg in args:
for fname in glob.glob(arg):
if fnmatch(fname, '*.amf*'):
body = read_file(fname)
try:
print "\nDecoding file:", fname
request = remoting.decode(body, None, options.strict)
if options.debug:
for name, message in request:
print " %s: %s" % (name, message)
except pyamf.UnknownClassAlias, c:
if options.debug:
print '\n Warning: %s' % c
except pyamf.DecodeError, c:
if options.debug:
print '\n Warning: %s' % c
except:
raise
if options.dump:
print
print pyamf.util.hexdump(body)
print "-" * 80
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Move ParseDump script to dumps folder.<commit_after>#!/usr/bin/python
#
# Copyright (c) 2007-2008 The PyAMF Project.
# See LICENSE for details.
"""
Extracts and displays information for files that contain AMF data.
"""
import glob
from optparse import OptionParser
from fnmatch import fnmatch
import pyamf
from pyamf import remoting
def parse_options():
"""
Parse command-line arguments.
"""
parser = OptionParser()
parser.add_option("-d", "--debug", action="store_true", dest="debug",
default=False, help="Enable debugging")
parser.add_option("--dump", action="store_true", dest="dump",
default=False, help="Shows a hexdump of the file")
parser.add_option("--strict", action="store_true", dest="strict",
default=False, help="""Option to decode typed but unaliased classes """
"""without raising UnknownClassAlias""")
return parser.parse_args()
def read_file(fname):
"""
Read file containing AMF data.
"""
f = file(fname, "r")
data = f.read()
f.close()
return data
def main():
"""
Run AMF decoder on input file.
"""
(options, args) = parse_options()
print 'Using pyamf from: %s' % (pyamf,)
print 'Strict = ' + str(options.strict)
for arg in args:
for fname in glob.glob(arg):
if fnmatch(fname, '*.amf*'):
body = read_file(fname)
try:
print "\nDecoding file:", fname
request = remoting.decode(body, None, options.strict)
if options.debug:
for name, message in request:
print " %s: %s" % (name, message)
except pyamf.UnknownClassAlias, c:
if options.debug:
print '\n Warning: %s' % c
except pyamf.DecodeError, c:
if options.debug:
print '\n Warning: %s' % c
except:
raise
if options.dump:
print
print pyamf.util.hexdump(body)
print "-" * 80
if __name__ == '__main__':
main()
|
|
7c6dfd810e0567a0aa24515f8d57a5d609e38e95
|
ideascube/conf/kb_esp_avanti.py
|
ideascube/conf/kb_esp_avanti.py
|
# -*- coding: utf-8 -*-
"""KoomBook conf"""
from .kb import * # noqa
LANGUAGE_CODE = 'es'
IDEASCUBE_NAME = 'Avanti'
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'gutenberg',
'lang': 'es',
},
{
'id': 'wikipedia',
'languages': ['es']
},
{
'id': 'khanacademy',
},
]
|
Add conf file for Avanti KoomBook
|
Add conf file for Avanti KoomBook
|
Python
|
agpl-3.0
|
ideascube/ideascube,ideascube/ideascube,ideascube/ideascube,ideascube/ideascube
|
Add conf file for Avanti KoomBook
|
# -*- coding: utf-8 -*-
"""KoomBook conf"""
from .kb import * # noqa
LANGUAGE_CODE = 'es'
IDEASCUBE_NAME = 'Avanti'
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'gutenberg',
'lang': 'es',
},
{
'id': 'wikipedia',
'languages': ['es']
},
{
'id': 'khanacademy',
},
]
|
<commit_before><commit_msg>Add conf file for Avanti KoomBook<commit_after>
|
# -*- coding: utf-8 -*-
"""KoomBook conf"""
from .kb import * # noqa
LANGUAGE_CODE = 'es'
IDEASCUBE_NAME = 'Avanti'
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'gutenberg',
'lang': 'es',
},
{
'id': 'wikipedia',
'languages': ['es']
},
{
'id': 'khanacademy',
},
]
|
Add conf file for Avanti KoomBook# -*- coding: utf-8 -*-
"""KoomBook conf"""
from .kb import * # noqa
LANGUAGE_CODE = 'es'
IDEASCUBE_NAME = 'Avanti'
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'gutenberg',
'lang': 'es',
},
{
'id': 'wikipedia',
'languages': ['es']
},
{
'id': 'khanacademy',
},
]
|
<commit_before><commit_msg>Add conf file for Avanti KoomBook<commit_after># -*- coding: utf-8 -*-
"""KoomBook conf"""
from .kb import * # noqa
LANGUAGE_CODE = 'es'
IDEASCUBE_NAME = 'Avanti'
HOME_CARDS = STAFF_HOME_CARDS + [
{
'id': 'blog',
},
{
'id': 'mediacenter',
},
{
'id': 'gutenberg',
'lang': 'es',
},
{
'id': 'wikipedia',
'languages': ['es']
},
{
'id': 'khanacademy',
},
]
|
|
328bb2cd8eb55d1a1627f96c7a8ab77972ab49c9
|
mistral/db/sqlalchemy/migration/alembic_migrations/versions/026_optimize_task_expression_func.py
|
mistral/db/sqlalchemy/migration/alembic_migrations/versions/026_optimize_task_expression_func.py
|
# Copyright 2018 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Optimize task expression function
Revision ID: 026
Revises: 025
Create Date: 2018-22-03 15:23:04.904251
"""
# revision identifiers, used by Alembic.
revision = '026'
down_revision = '025'
from alembic import op
def upgrade():
op.create_index('action_executions_v2_task_execution_id',
'action_executions_v2',
['task_execution_id'])
op.create_index('workflow_executions_v2_task_execution_id',
'workflow_executions_v2',
['task_execution_id'])
|
Add new indexes to optimize `task().result` expression function
|
Add new indexes to optimize `task().result` expression function
Change-Id: I94707368dbfe363a2b909985d3bd24dbae8e67f6
|
Python
|
apache-2.0
|
openstack/mistral,openstack/mistral
|
Add new indexes to optimize `task().result` expression function
Change-Id: I94707368dbfe363a2b909985d3bd24dbae8e67f6
|
# Copyright 2018 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Optimize task expression function
Revision ID: 026
Revises: 025
Create Date: 2018-22-03 15:23:04.904251
"""
# revision identifiers, used by Alembic.
revision = '026'
down_revision = '025'
from alembic import op
def upgrade():
op.create_index('action_executions_v2_task_execution_id',
'action_executions_v2',
['task_execution_id'])
op.create_index('workflow_executions_v2_task_execution_id',
'workflow_executions_v2',
['task_execution_id'])
|
<commit_before><commit_msg>Add new indexes to optimize `task().result` expression function
Change-Id: I94707368dbfe363a2b909985d3bd24dbae8e67f6<commit_after>
|
# Copyright 2018 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Optimize task expression function
Revision ID: 026
Revises: 025
Create Date: 2018-22-03 15:23:04.904251
"""
# revision identifiers, used by Alembic.
revision = '026'
down_revision = '025'
from alembic import op
def upgrade():
op.create_index('action_executions_v2_task_execution_id',
'action_executions_v2',
['task_execution_id'])
op.create_index('workflow_executions_v2_task_execution_id',
'workflow_executions_v2',
['task_execution_id'])
|
Add new indexes to optimize `task().result` expression function
Change-Id: I94707368dbfe363a2b909985d3bd24dbae8e67f6# Copyright 2018 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Optimize task expression function
Revision ID: 026
Revises: 025
Create Date: 2018-22-03 15:23:04.904251
"""
# revision identifiers, used by Alembic.
revision = '026'
down_revision = '025'
from alembic import op
def upgrade():
op.create_index('action_executions_v2_task_execution_id',
'action_executions_v2',
['task_execution_id'])
op.create_index('workflow_executions_v2_task_execution_id',
'workflow_executions_v2',
['task_execution_id'])
|
<commit_before><commit_msg>Add new indexes to optimize `task().result` expression function
Change-Id: I94707368dbfe363a2b909985d3bd24dbae8e67f6<commit_after># Copyright 2018 OpenStack Foundation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Optimize task expression function
Revision ID: 026
Revises: 025
Create Date: 2018-22-03 15:23:04.904251
"""
# revision identifiers, used by Alembic.
revision = '026'
down_revision = '025'
from alembic import op
def upgrade():
op.create_index('action_executions_v2_task_execution_id',
'action_executions_v2',
['task_execution_id'])
op.create_index('workflow_executions_v2_task_execution_id',
'workflow_executions_v2',
['task_execution_id'])
|
|
86fdb5e1fbf473cd152769428e14dc1a6362ce39
|
ris_widget/qdelegates/tint_delegate.py
|
ris_widget/qdelegates/tint_delegate.py
|
# The MIT License (MIT)
#
# Copyright (c) 2015 WUSTL ZPLAB
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Authors: Erik Hvatum <ice.rikh@gmail.com>
from PyQt5 import Qt
class TintDelegate(Qt.QStyledItemDelegate):
def createEditor(self, parent, option, midx):
if midx.isValid():
e = Qt.QColorDialog(parent)
e.setOptions(Qt.QColorDialog.ShowAlphaChannel)# | Qt.QColorDialog.NoButtons)
return e
def setEditorData(self, e, midx):
d = midx.data()
if isinstance(d, Qt.QVariant):
d = d.value()
e.setCurrentColor(Qt.QColor(*(int(c*255) for c in d)))
def setModelData(self, e, model, midx):
color = e.currentColor()
model.setData(midx, (color.redF(), color.greenF(), color.blueF(), color.alphaF()))
|
Add accidentally omitted TintDelegate source file.
|
Add accidentally omitted TintDelegate source file.
|
Python
|
mit
|
zpincus/RisWidget,zpincus/RisWidget,erikhvatum/RisWidget,erikhvatum/RisWidget
|
Add accidentally omitted TintDelegate source file.
|
# The MIT License (MIT)
#
# Copyright (c) 2015 WUSTL ZPLAB
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Authors: Erik Hvatum <ice.rikh@gmail.com>
from PyQt5 import Qt
class TintDelegate(Qt.QStyledItemDelegate):
def createEditor(self, parent, option, midx):
if midx.isValid():
e = Qt.QColorDialog(parent)
e.setOptions(Qt.QColorDialog.ShowAlphaChannel)# | Qt.QColorDialog.NoButtons)
return e
def setEditorData(self, e, midx):
d = midx.data()
if isinstance(d, Qt.QVariant):
d = d.value()
e.setCurrentColor(Qt.QColor(*(int(c*255) for c in d)))
def setModelData(self, e, model, midx):
color = e.currentColor()
model.setData(midx, (color.redF(), color.greenF(), color.blueF(), color.alphaF()))
|
<commit_before><commit_msg>Add accidentally omitted TintDelegate source file.<commit_after>
|
# The MIT License (MIT)
#
# Copyright (c) 2015 WUSTL ZPLAB
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Authors: Erik Hvatum <ice.rikh@gmail.com>
from PyQt5 import Qt
class TintDelegate(Qt.QStyledItemDelegate):
def createEditor(self, parent, option, midx):
if midx.isValid():
e = Qt.QColorDialog(parent)
e.setOptions(Qt.QColorDialog.ShowAlphaChannel)# | Qt.QColorDialog.NoButtons)
return e
def setEditorData(self, e, midx):
d = midx.data()
if isinstance(d, Qt.QVariant):
d = d.value()
e.setCurrentColor(Qt.QColor(*(int(c*255) for c in d)))
def setModelData(self, e, model, midx):
color = e.currentColor()
model.setData(midx, (color.redF(), color.greenF(), color.blueF(), color.alphaF()))
|
Add accidentally omitted TintDelegate source file.# The MIT License (MIT)
#
# Copyright (c) 2015 WUSTL ZPLAB
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Authors: Erik Hvatum <ice.rikh@gmail.com>
from PyQt5 import Qt
class TintDelegate(Qt.QStyledItemDelegate):
def createEditor(self, parent, option, midx):
if midx.isValid():
e = Qt.QColorDialog(parent)
e.setOptions(Qt.QColorDialog.ShowAlphaChannel)# | Qt.QColorDialog.NoButtons)
return e
def setEditorData(self, e, midx):
d = midx.data()
if isinstance(d, Qt.QVariant):
d = d.value()
e.setCurrentColor(Qt.QColor(*(int(c*255) for c in d)))
def setModelData(self, e, model, midx):
color = e.currentColor()
model.setData(midx, (color.redF(), color.greenF(), color.blueF(), color.alphaF()))
|
<commit_before><commit_msg>Add accidentally omitted TintDelegate source file.<commit_after># The MIT License (MIT)
#
# Copyright (c) 2015 WUSTL ZPLAB
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# Authors: Erik Hvatum <ice.rikh@gmail.com>
from PyQt5 import Qt
class TintDelegate(Qt.QStyledItemDelegate):
def createEditor(self, parent, option, midx):
if midx.isValid():
e = Qt.QColorDialog(parent)
e.setOptions(Qt.QColorDialog.ShowAlphaChannel)# | Qt.QColorDialog.NoButtons)
return e
def setEditorData(self, e, midx):
d = midx.data()
if isinstance(d, Qt.QVariant):
d = d.value()
e.setCurrentColor(Qt.QColor(*(int(c*255) for c in d)))
def setModelData(self, e, model, midx):
color = e.currentColor()
model.setData(midx, (color.redF(), color.greenF(), color.blueF(), color.alphaF()))
|
|
be2c64f5e30196653f33c3b1bca4a093e7fdd2ff
|
recipes/skia.py
|
recipes/skia.py
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import recipe_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Skia(recipe_util.Recipe):
"""Basic Recipe class for the Skia repository."""
@staticmethod
def fetch_spec(_props):
solution = {
'name' : 'skia',
'url' : 'https://skia.googlesource.com/skia.git',
'deps_file': 'DEPS',
'managed' : False,
}
spec = {
'solutions': [solution]
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'skia'
def main(argv=None):
return Skia().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Add fetch recipe for the Skia repository.
|
Add fetch recipe for the Skia repository.
Tested with the following command lines:
$ cd somewhere
$ mkdir some-test-dir
$ cd some-test-dir
$ fetch skia
$ cd skia
# confirm it is what we expected.
BUG=None
TEST=see above
R=agable@chromium.org
Review URL: https://codereview.chromium.org/746363003
git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@293135 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
primiano/depot_tools,sarvex/depot-tools,HackFisher/depot_tools,HackFisher/depot_tools,Midrya/chromium,npe9/depot_tools,eatbyte/depot_tools,airtimemedia/depot_tools,fracting/depot_tools,disigma/depot_tools,gcodetogit/depot_tools,fracting/depot_tools,mlufei/depot_tools,duanwujie/depot_tools,azunite/chrome_build,aleonliao/depot_tools,liaorubei/depot_tools,npe9/depot_tools,cpanelli/-git-clone-https-chromium.googlesource.com-chromium-tools-depot_tools,eatbyte/depot_tools,Midrya/chromium,liaorubei/depot_tools,ajohnson23/depot_tools,CoherentLabs/depot_tools,duongbaoduy/gtools,gcodetogit/depot_tools,duongbaoduy/gtools,liaorubei/depot_tools,aleonliao/depot_tools,primiano/depot_tools,G-P-S/depot_tools,xuyuhan/depot_tools,npe9/depot_tools,liaorubei/depot_tools,SuYiling/chrome_depot_tools,duanwujie/depot_tools,withtone/depot_tools,sarvex/depot-tools,fanjunwei/depot_tools,HackFisher/depot_tools,chinmaygarde/depot_tools,Midrya/chromium,azureplus/chromium_depot_tools,azunite/chrome_build,mlufei/depot_tools,ajohnson23/depot_tools,ajohnson23/depot_tools,disigma/depot_tools,sarvex/depot-tools,azureplus/chromium_depot_tools,primiano/depot_tools,sarvex/depot-tools,cpanelli/-git-clone-https-chromium.googlesource.com-chromium-tools-depot_tools,fanjunwei/depot_tools,kaiix/depot_tools,azureplus/chromium_depot_tools,fanjunwei/depot_tools,azunite/chrome_build,kaiix/depot_tools,airtimemedia/depot_tools,xuyuhan/depot_tools,xuyuhan/depot_tools,G-P-S/depot_tools,G-P-S/depot_tools,gcodetogit/depot_tools,hsharsha/depot_tools,SuYiling/chrome_depot_tools,airtimemedia/depot_tools,HackFisher/depot_tools,eatbyte/depot_tools,withtone/depot_tools,kaiix/depot_tools,hsharsha/depot_tools,disigma/depot_tools,SuYiling/chrome_depot_tools,eatbyte/depot_tools,npe9/depot_tools,airtimemedia/depot_tools,mlufei/depot_tools,hsharsha/depot_tools,chinmaygarde/depot_tools,duongbaoduy/gtools,G-P-S/depot_tools,chinmaygarde/depot_tools,fracting/depot_tools,CoherentLabs/depot_tools,cpanelli/-git-clone-https-chromium.googlesource.com-chromium-tools-depot_tools,fanjunwei/depot_tools,aleonliao/depot_tools,xuyuhan/depot_tools,duanwujie/depot_tools,withtone/depot_tools
|
Add fetch recipe for the Skia repository.
Tested with the following command lines:
$ cd somewhere
$ mkdir some-test-dir
$ cd some-test-dir
$ fetch skia
$ cd skia
# confirm it is what we expected.
BUG=None
TEST=see above
R=agable@chromium.org
Review URL: https://codereview.chromium.org/746363003
git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@293135 0039d316-1c4b-4281-b951-d872f2087c98
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import recipe_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Skia(recipe_util.Recipe):
"""Basic Recipe class for the Skia repository."""
@staticmethod
def fetch_spec(_props):
solution = {
'name' : 'skia',
'url' : 'https://skia.googlesource.com/skia.git',
'deps_file': 'DEPS',
'managed' : False,
}
spec = {
'solutions': [solution]
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'skia'
def main(argv=None):
return Skia().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before><commit_msg>Add fetch recipe for the Skia repository.
Tested with the following command lines:
$ cd somewhere
$ mkdir some-test-dir
$ cd some-test-dir
$ fetch skia
$ cd skia
# confirm it is what we expected.
BUG=None
TEST=see above
R=agable@chromium.org
Review URL: https://codereview.chromium.org/746363003
git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@293135 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import recipe_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Skia(recipe_util.Recipe):
"""Basic Recipe class for the Skia repository."""
@staticmethod
def fetch_spec(_props):
solution = {
'name' : 'skia',
'url' : 'https://skia.googlesource.com/skia.git',
'deps_file': 'DEPS',
'managed' : False,
}
spec = {
'solutions': [solution]
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'skia'
def main(argv=None):
return Skia().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Add fetch recipe for the Skia repository.
Tested with the following command lines:
$ cd somewhere
$ mkdir some-test-dir
$ cd some-test-dir
$ fetch skia
$ cd skia
# confirm it is what we expected.
BUG=None
TEST=see above
R=agable@chromium.org
Review URL: https://codereview.chromium.org/746363003
git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@293135 0039d316-1c4b-4281-b951-d872f2087c98# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import recipe_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Skia(recipe_util.Recipe):
"""Basic Recipe class for the Skia repository."""
@staticmethod
def fetch_spec(_props):
solution = {
'name' : 'skia',
'url' : 'https://skia.googlesource.com/skia.git',
'deps_file': 'DEPS',
'managed' : False,
}
spec = {
'solutions': [solution]
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'skia'
def main(argv=None):
return Skia().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before><commit_msg>Add fetch recipe for the Skia repository.
Tested with the following command lines:
$ cd somewhere
$ mkdir some-test-dir
$ cd some-test-dir
$ fetch skia
$ cd skia
# confirm it is what we expected.
BUG=None
TEST=see above
R=agable@chromium.org
Review URL: https://codereview.chromium.org/746363003
git-svn-id: fd409f4bdeea2bb50a5d34bb4d4bfc2046a5a3dd@293135 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import recipe_util # pylint: disable=F0401
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=W0232
class Skia(recipe_util.Recipe):
"""Basic Recipe class for the Skia repository."""
@staticmethod
def fetch_spec(_props):
solution = {
'name' : 'skia',
'url' : 'https://skia.googlesource.com/skia.git',
'deps_file': 'DEPS',
'managed' : False,
}
spec = {
'solutions': [solution]
}
return {
'type': 'gclient_git',
'gclient_git_spec': spec,
}
@staticmethod
def expected_root(_props):
return 'skia'
def main(argv=None):
return Skia().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.