commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
5e5e58b705d30df62423ec8bb6018c6807114580
providers/io/osf/registrations/apps.py
providers/io/osf/registrations/apps.py
from share.provider import ProviderAppConfig from .harvester import OSFRegistrationsHarvester class AppConfig(ProviderAppConfig): name = 'providers.io.osf.registrations' version = '0.0.1' title = 'osf_registrations' long_title = 'Open Science Framework Registrations' home_page = 'http://api.osf.io/registrations/' harvester = OSFRegistrationsHarvester
Add the app config for osf registrations
Add the app config for osf registrations
Python
apache-2.0
laurenbarker/SHARE,aaxelb/SHARE,aaxelb/SHARE,zamattiac/SHARE,zamattiac/SHARE,laurenbarker/SHARE,CenterForOpenScience/SHARE,laurenbarker/SHARE,aaxelb/SHARE,zamattiac/SHARE,CenterForOpenScience/SHARE,CenterForOpenScience/SHARE
Add the app config for osf registrations
from share.provider import ProviderAppConfig from .harvester import OSFRegistrationsHarvester class AppConfig(ProviderAppConfig): name = 'providers.io.osf.registrations' version = '0.0.1' title = 'osf_registrations' long_title = 'Open Science Framework Registrations' home_page = 'http://api.osf.io/registrations/' harvester = OSFRegistrationsHarvester
<commit_before><commit_msg>Add the app config for osf registrations<commit_after>
from share.provider import ProviderAppConfig from .harvester import OSFRegistrationsHarvester class AppConfig(ProviderAppConfig): name = 'providers.io.osf.registrations' version = '0.0.1' title = 'osf_registrations' long_title = 'Open Science Framework Registrations' home_page = 'http://api.osf.io/registrations/' harvester = OSFRegistrationsHarvester
Add the app config for osf registrationsfrom share.provider import ProviderAppConfig from .harvester import OSFRegistrationsHarvester class AppConfig(ProviderAppConfig): name = 'providers.io.osf.registrations' version = '0.0.1' title = 'osf_registrations' long_title = 'Open Science Framework Registrations' home_page = 'http://api.osf.io/registrations/' harvester = OSFRegistrationsHarvester
<commit_before><commit_msg>Add the app config for osf registrations<commit_after>from share.provider import ProviderAppConfig from .harvester import OSFRegistrationsHarvester class AppConfig(ProviderAppConfig): name = 'providers.io.osf.registrations' version = '0.0.1' title = 'osf_registrations' long_title = 'Open Science Framework Registrations' home_page = 'http://api.osf.io/registrations/' harvester = OSFRegistrationsHarvester
04fa680f4be4afc44dc0df3834b096d8fa7a05ac
nbgrader/tests/apps/test_nbgrader_update.py
nbgrader/tests/apps/test_nbgrader_update.py
from os.path import join from .. import run_nbgrader from .base import BaseTestApp class TestNbGraderUpdate(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["update", "--help-all"]) def test_no_args(self): """Is there an error if no arguments are given?""" run_nbgrader(["update"], retcode=1) def test_update(self, db, course_dir): with open("nbgrader_config.py", "a") as fh: fh.write("""c.NbGrader.db_assignments = [dict(name='ps1', duedate='2015-02-02 14:58:23.948203 PST')]\n""") fh.write("""c.NbGrader.db_students = [dict(id="foo"), dict(id="bar")]""") self._copy_file(join("files", "test-v0.ipynb"), join(course_dir, "source", "ps1", "p1.ipynb")) run_nbgrader(["assign", "ps1", "--db", db], retcode=1) # now update the metadata run_nbgrader(["update", course_dir]) # now assign should suceed run_nbgrader(["assign", "ps1", "--db", db]) # autograde should fail on old metadata, too self._copy_file(join("files", "test-v0.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p1.ipynb")) run_nbgrader(["autograde", "ps1", "--db", db], retcode=1) # now update the metadata run_nbgrader(["update", course_dir]) # now autograde should suceed run_nbgrader(["autograde", "ps1", "--db", db])
Add tests for nbgrader update
Add tests for nbgrader update
Python
bsd-3-clause
jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jupyter/nbgrader,jupyter/nbgrader,jhamrick/nbgrader,jhamrick/nbgrader,jupyter/nbgrader
Add tests for nbgrader update
from os.path import join from .. import run_nbgrader from .base import BaseTestApp class TestNbGraderUpdate(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["update", "--help-all"]) def test_no_args(self): """Is there an error if no arguments are given?""" run_nbgrader(["update"], retcode=1) def test_update(self, db, course_dir): with open("nbgrader_config.py", "a") as fh: fh.write("""c.NbGrader.db_assignments = [dict(name='ps1', duedate='2015-02-02 14:58:23.948203 PST')]\n""") fh.write("""c.NbGrader.db_students = [dict(id="foo"), dict(id="bar")]""") self._copy_file(join("files", "test-v0.ipynb"), join(course_dir, "source", "ps1", "p1.ipynb")) run_nbgrader(["assign", "ps1", "--db", db], retcode=1) # now update the metadata run_nbgrader(["update", course_dir]) # now assign should suceed run_nbgrader(["assign", "ps1", "--db", db]) # autograde should fail on old metadata, too self._copy_file(join("files", "test-v0.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p1.ipynb")) run_nbgrader(["autograde", "ps1", "--db", db], retcode=1) # now update the metadata run_nbgrader(["update", course_dir]) # now autograde should suceed run_nbgrader(["autograde", "ps1", "--db", db])
<commit_before><commit_msg>Add tests for nbgrader update<commit_after>
from os.path import join from .. import run_nbgrader from .base import BaseTestApp class TestNbGraderUpdate(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["update", "--help-all"]) def test_no_args(self): """Is there an error if no arguments are given?""" run_nbgrader(["update"], retcode=1) def test_update(self, db, course_dir): with open("nbgrader_config.py", "a") as fh: fh.write("""c.NbGrader.db_assignments = [dict(name='ps1', duedate='2015-02-02 14:58:23.948203 PST')]\n""") fh.write("""c.NbGrader.db_students = [dict(id="foo"), dict(id="bar")]""") self._copy_file(join("files", "test-v0.ipynb"), join(course_dir, "source", "ps1", "p1.ipynb")) run_nbgrader(["assign", "ps1", "--db", db], retcode=1) # now update the metadata run_nbgrader(["update", course_dir]) # now assign should suceed run_nbgrader(["assign", "ps1", "--db", db]) # autograde should fail on old metadata, too self._copy_file(join("files", "test-v0.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p1.ipynb")) run_nbgrader(["autograde", "ps1", "--db", db], retcode=1) # now update the metadata run_nbgrader(["update", course_dir]) # now autograde should suceed run_nbgrader(["autograde", "ps1", "--db", db])
Add tests for nbgrader updatefrom os.path import join from .. import run_nbgrader from .base import BaseTestApp class TestNbGraderUpdate(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["update", "--help-all"]) def test_no_args(self): """Is there an error if no arguments are given?""" run_nbgrader(["update"], retcode=1) def test_update(self, db, course_dir): with open("nbgrader_config.py", "a") as fh: fh.write("""c.NbGrader.db_assignments = [dict(name='ps1', duedate='2015-02-02 14:58:23.948203 PST')]\n""") fh.write("""c.NbGrader.db_students = [dict(id="foo"), dict(id="bar")]""") self._copy_file(join("files", "test-v0.ipynb"), join(course_dir, "source", "ps1", "p1.ipynb")) run_nbgrader(["assign", "ps1", "--db", db], retcode=1) # now update the metadata run_nbgrader(["update", course_dir]) # now assign should suceed run_nbgrader(["assign", "ps1", "--db", db]) # autograde should fail on old metadata, too self._copy_file(join("files", "test-v0.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p1.ipynb")) run_nbgrader(["autograde", "ps1", "--db", db], retcode=1) # now update the metadata run_nbgrader(["update", course_dir]) # now autograde should suceed run_nbgrader(["autograde", "ps1", "--db", db])
<commit_before><commit_msg>Add tests for nbgrader update<commit_after>from os.path import join from .. import run_nbgrader from .base import BaseTestApp class TestNbGraderUpdate(BaseTestApp): def test_help(self): """Does the help display without error?""" run_nbgrader(["update", "--help-all"]) def test_no_args(self): """Is there an error if no arguments are given?""" run_nbgrader(["update"], retcode=1) def test_update(self, db, course_dir): with open("nbgrader_config.py", "a") as fh: fh.write("""c.NbGrader.db_assignments = [dict(name='ps1', duedate='2015-02-02 14:58:23.948203 PST')]\n""") fh.write("""c.NbGrader.db_students = [dict(id="foo"), dict(id="bar")]""") self._copy_file(join("files", "test-v0.ipynb"), join(course_dir, "source", "ps1", "p1.ipynb")) run_nbgrader(["assign", "ps1", "--db", db], retcode=1) # now update the metadata run_nbgrader(["update", course_dir]) # now assign should suceed run_nbgrader(["assign", "ps1", "--db", db]) # autograde should fail on old metadata, too self._copy_file(join("files", "test-v0.ipynb"), join(course_dir, "submitted", "foo", "ps1", "p1.ipynb")) run_nbgrader(["autograde", "ps1", "--db", db], retcode=1) # now update the metadata run_nbgrader(["update", course_dir]) # now autograde should suceed run_nbgrader(["autograde", "ps1", "--db", db])
5ef616d0563f9e4f29ef7eaa3c163d24cf3e131f
spyder_memory_profiler/widgets/tests/test_memoryprofiler.py
spyder_memory_profiler/widgets/tests/test_memoryprofiler.py
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright (c) Spyder Project Developers # # Licensed under the terms of the MIT License # (see LICENSE.txt for details) # ----------------------------------------------------------------------------- """Tests for memoryprofiler.py.""" # Standard library imports import os # Third party imports from pytestqt import qtbot from qtpy.QtCore import Qt from spyder.utils.qthelpers import qapplication MAIN_APP = qapplication() # Local imports from spyder_memory_profiler.widgets.memoryprofiler import MemoryProfilerWidget try: from unittest.mock import Mock except ImportError: from mock import Mock # Python 2 TEST_SCRIPT = \ """@profile def foo(): a = [1] * (10 ** 6) b = [2] * (2 * 10 ** 7) del b return a foo()""" def test_profile_and_display_results(qtbot, tmpdir, monkeypatch): """Run profiler on simple script and check that results are okay.""" os.chdir(tmpdir.strpath) testfilename = tmpdir.join('test_foo.py').strpath with open(testfilename, 'w') as f: f.write(TEST_SCRIPT) MockQMessageBox = Mock() monkeypatch.setattr('spyder_memory_profiler.widgets.memoryprofiler.QMessageBox', MockQMessageBox) widget = MemoryProfilerWidget(None) qtbot.addWidget(widget) widget.analyze(testfilename) qtbot.wait(2000) # wait for tests to run MockQMessageBox.assert_not_called() dt = widget.datatree assert dt.topLevelItemCount() == 1 # number of functions profiled top = dt.topLevelItem(0) assert top.data(0, Qt.DisplayRole).startswith('foo ') assert top.childCount() == 6 for i in range(6): assert top.child(i).data(0, Qt.DisplayRole) == i + 1 # line no # column 2 has increment (in Mib); displayed as 'xxx MiB' so need to strip # last 4 characters assert float(top.child(2).data(2, Qt.DisplayRole)[:-4]) >= 7 # increment (MiB) assert float(top.child(2).data(2, Qt.DisplayRole)[:-4]) <= 8 assert float(top.child(3).data(2, Qt.DisplayRole)[:-4]) >= 150 assert float(top.child(3).data(2, Qt.DisplayRole)[:-4]) <= 160 assert float(top.child(4).data(2, Qt.DisplayRole)[:-4]) >= -160 assert float(top.child(4).data(2, Qt.DisplayRole)[:-4]) <= -150 assert float(top.child(5).data(2, Qt.DisplayRole)[:-4]) == 0
Add simple integration test for memory profiler
Add simple integration test for memory profiler
Python
mit
jitseniesen/spyder-memory-profiler,jitseniesen/spyder-memory-profiler,spyder-ide/spyder.memory_profiler
Add simple integration test for memory profiler
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright (c) Spyder Project Developers # # Licensed under the terms of the MIT License # (see LICENSE.txt for details) # ----------------------------------------------------------------------------- """Tests for memoryprofiler.py.""" # Standard library imports import os # Third party imports from pytestqt import qtbot from qtpy.QtCore import Qt from spyder.utils.qthelpers import qapplication MAIN_APP = qapplication() # Local imports from spyder_memory_profiler.widgets.memoryprofiler import MemoryProfilerWidget try: from unittest.mock import Mock except ImportError: from mock import Mock # Python 2 TEST_SCRIPT = \ """@profile def foo(): a = [1] * (10 ** 6) b = [2] * (2 * 10 ** 7) del b return a foo()""" def test_profile_and_display_results(qtbot, tmpdir, monkeypatch): """Run profiler on simple script and check that results are okay.""" os.chdir(tmpdir.strpath) testfilename = tmpdir.join('test_foo.py').strpath with open(testfilename, 'w') as f: f.write(TEST_SCRIPT) MockQMessageBox = Mock() monkeypatch.setattr('spyder_memory_profiler.widgets.memoryprofiler.QMessageBox', MockQMessageBox) widget = MemoryProfilerWidget(None) qtbot.addWidget(widget) widget.analyze(testfilename) qtbot.wait(2000) # wait for tests to run MockQMessageBox.assert_not_called() dt = widget.datatree assert dt.topLevelItemCount() == 1 # number of functions profiled top = dt.topLevelItem(0) assert top.data(0, Qt.DisplayRole).startswith('foo ') assert top.childCount() == 6 for i in range(6): assert top.child(i).data(0, Qt.DisplayRole) == i + 1 # line no # column 2 has increment (in Mib); displayed as 'xxx MiB' so need to strip # last 4 characters assert float(top.child(2).data(2, Qt.DisplayRole)[:-4]) >= 7 # increment (MiB) assert float(top.child(2).data(2, Qt.DisplayRole)[:-4]) <= 8 assert float(top.child(3).data(2, Qt.DisplayRole)[:-4]) >= 150 assert float(top.child(3).data(2, Qt.DisplayRole)[:-4]) <= 160 assert float(top.child(4).data(2, Qt.DisplayRole)[:-4]) >= -160 assert float(top.child(4).data(2, Qt.DisplayRole)[:-4]) <= -150 assert float(top.child(5).data(2, Qt.DisplayRole)[:-4]) == 0
<commit_before><commit_msg>Add simple integration test for memory profiler<commit_after>
# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright (c) Spyder Project Developers # # Licensed under the terms of the MIT License # (see LICENSE.txt for details) # ----------------------------------------------------------------------------- """Tests for memoryprofiler.py.""" # Standard library imports import os # Third party imports from pytestqt import qtbot from qtpy.QtCore import Qt from spyder.utils.qthelpers import qapplication MAIN_APP = qapplication() # Local imports from spyder_memory_profiler.widgets.memoryprofiler import MemoryProfilerWidget try: from unittest.mock import Mock except ImportError: from mock import Mock # Python 2 TEST_SCRIPT = \ """@profile def foo(): a = [1] * (10 ** 6) b = [2] * (2 * 10 ** 7) del b return a foo()""" def test_profile_and_display_results(qtbot, tmpdir, monkeypatch): """Run profiler on simple script and check that results are okay.""" os.chdir(tmpdir.strpath) testfilename = tmpdir.join('test_foo.py').strpath with open(testfilename, 'w') as f: f.write(TEST_SCRIPT) MockQMessageBox = Mock() monkeypatch.setattr('spyder_memory_profiler.widgets.memoryprofiler.QMessageBox', MockQMessageBox) widget = MemoryProfilerWidget(None) qtbot.addWidget(widget) widget.analyze(testfilename) qtbot.wait(2000) # wait for tests to run MockQMessageBox.assert_not_called() dt = widget.datatree assert dt.topLevelItemCount() == 1 # number of functions profiled top = dt.topLevelItem(0) assert top.data(0, Qt.DisplayRole).startswith('foo ') assert top.childCount() == 6 for i in range(6): assert top.child(i).data(0, Qt.DisplayRole) == i + 1 # line no # column 2 has increment (in Mib); displayed as 'xxx MiB' so need to strip # last 4 characters assert float(top.child(2).data(2, Qt.DisplayRole)[:-4]) >= 7 # increment (MiB) assert float(top.child(2).data(2, Qt.DisplayRole)[:-4]) <= 8 assert float(top.child(3).data(2, Qt.DisplayRole)[:-4]) >= 150 assert float(top.child(3).data(2, Qt.DisplayRole)[:-4]) <= 160 assert float(top.child(4).data(2, Qt.DisplayRole)[:-4]) >= -160 assert float(top.child(4).data(2, Qt.DisplayRole)[:-4]) <= -150 assert float(top.child(5).data(2, Qt.DisplayRole)[:-4]) == 0
Add simple integration test for memory profiler# -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright (c) Spyder Project Developers # # Licensed under the terms of the MIT License # (see LICENSE.txt for details) # ----------------------------------------------------------------------------- """Tests for memoryprofiler.py.""" # Standard library imports import os # Third party imports from pytestqt import qtbot from qtpy.QtCore import Qt from spyder.utils.qthelpers import qapplication MAIN_APP = qapplication() # Local imports from spyder_memory_profiler.widgets.memoryprofiler import MemoryProfilerWidget try: from unittest.mock import Mock except ImportError: from mock import Mock # Python 2 TEST_SCRIPT = \ """@profile def foo(): a = [1] * (10 ** 6) b = [2] * (2 * 10 ** 7) del b return a foo()""" def test_profile_and_display_results(qtbot, tmpdir, monkeypatch): """Run profiler on simple script and check that results are okay.""" os.chdir(tmpdir.strpath) testfilename = tmpdir.join('test_foo.py').strpath with open(testfilename, 'w') as f: f.write(TEST_SCRIPT) MockQMessageBox = Mock() monkeypatch.setattr('spyder_memory_profiler.widgets.memoryprofiler.QMessageBox', MockQMessageBox) widget = MemoryProfilerWidget(None) qtbot.addWidget(widget) widget.analyze(testfilename) qtbot.wait(2000) # wait for tests to run MockQMessageBox.assert_not_called() dt = widget.datatree assert dt.topLevelItemCount() == 1 # number of functions profiled top = dt.topLevelItem(0) assert top.data(0, Qt.DisplayRole).startswith('foo ') assert top.childCount() == 6 for i in range(6): assert top.child(i).data(0, Qt.DisplayRole) == i + 1 # line no # column 2 has increment (in Mib); displayed as 'xxx MiB' so need to strip # last 4 characters assert float(top.child(2).data(2, Qt.DisplayRole)[:-4]) >= 7 # increment (MiB) assert float(top.child(2).data(2, Qt.DisplayRole)[:-4]) <= 8 assert float(top.child(3).data(2, Qt.DisplayRole)[:-4]) >= 150 assert float(top.child(3).data(2, Qt.DisplayRole)[:-4]) <= 160 assert float(top.child(4).data(2, Qt.DisplayRole)[:-4]) >= -160 assert float(top.child(4).data(2, Qt.DisplayRole)[:-4]) <= -150 assert float(top.child(5).data(2, Qt.DisplayRole)[:-4]) == 0
<commit_before><commit_msg>Add simple integration test for memory profiler<commit_after># -*- coding: utf-8 -*- # ----------------------------------------------------------------------------- # Copyright (c) Spyder Project Developers # # Licensed under the terms of the MIT License # (see LICENSE.txt for details) # ----------------------------------------------------------------------------- """Tests for memoryprofiler.py.""" # Standard library imports import os # Third party imports from pytestqt import qtbot from qtpy.QtCore import Qt from spyder.utils.qthelpers import qapplication MAIN_APP = qapplication() # Local imports from spyder_memory_profiler.widgets.memoryprofiler import MemoryProfilerWidget try: from unittest.mock import Mock except ImportError: from mock import Mock # Python 2 TEST_SCRIPT = \ """@profile def foo(): a = [1] * (10 ** 6) b = [2] * (2 * 10 ** 7) del b return a foo()""" def test_profile_and_display_results(qtbot, tmpdir, monkeypatch): """Run profiler on simple script and check that results are okay.""" os.chdir(tmpdir.strpath) testfilename = tmpdir.join('test_foo.py').strpath with open(testfilename, 'w') as f: f.write(TEST_SCRIPT) MockQMessageBox = Mock() monkeypatch.setattr('spyder_memory_profiler.widgets.memoryprofiler.QMessageBox', MockQMessageBox) widget = MemoryProfilerWidget(None) qtbot.addWidget(widget) widget.analyze(testfilename) qtbot.wait(2000) # wait for tests to run MockQMessageBox.assert_not_called() dt = widget.datatree assert dt.topLevelItemCount() == 1 # number of functions profiled top = dt.topLevelItem(0) assert top.data(0, Qt.DisplayRole).startswith('foo ') assert top.childCount() == 6 for i in range(6): assert top.child(i).data(0, Qt.DisplayRole) == i + 1 # line no # column 2 has increment (in Mib); displayed as 'xxx MiB' so need to strip # last 4 characters assert float(top.child(2).data(2, Qt.DisplayRole)[:-4]) >= 7 # increment (MiB) assert float(top.child(2).data(2, Qt.DisplayRole)[:-4]) <= 8 assert float(top.child(3).data(2, Qt.DisplayRole)[:-4]) >= 150 assert float(top.child(3).data(2, Qt.DisplayRole)[:-4]) <= 160 assert float(top.child(4).data(2, Qt.DisplayRole)[:-4]) >= -160 assert float(top.child(4).data(2, Qt.DisplayRole)[:-4]) <= -150 assert float(top.child(5).data(2, Qt.DisplayRole)[:-4]) == 0
efd3fbf51be25b46e3d5dd0cb76aaab60de7e4c8
tryit.py
tryit.py
import json from ranch import Address, InvalidAddressException filename = input('Read data from: [data/export.json] ') if filename == '': filename = 'data/export.json' with open(filename, 'r') as data: specs = json.load(data) a = Address(specs) while not a.is_valid(): fields = a.get_field_types() last_field = fields[-1] if len(fields) > 1: for field in fields[:-1]: if field[0] not in a.fields: last_field = field break try: a.set_field(last_field[0], input(str(last_field[0]) + ': ')) except InvalidAddressException as e: print('Error:', str(e)) print(a)
Add a file allowing you to try it manually
Add a file allowing you to try it manually
Python
apache-2.0
3DHubs/Ranch
Add a file allowing you to try it manually
import json from ranch import Address, InvalidAddressException filename = input('Read data from: [data/export.json] ') if filename == '': filename = 'data/export.json' with open(filename, 'r') as data: specs = json.load(data) a = Address(specs) while not a.is_valid(): fields = a.get_field_types() last_field = fields[-1] if len(fields) > 1: for field in fields[:-1]: if field[0] not in a.fields: last_field = field break try: a.set_field(last_field[0], input(str(last_field[0]) + ': ')) except InvalidAddressException as e: print('Error:', str(e)) print(a)
<commit_before><commit_msg>Add a file allowing you to try it manually<commit_after>
import json from ranch import Address, InvalidAddressException filename = input('Read data from: [data/export.json] ') if filename == '': filename = 'data/export.json' with open(filename, 'r') as data: specs = json.load(data) a = Address(specs) while not a.is_valid(): fields = a.get_field_types() last_field = fields[-1] if len(fields) > 1: for field in fields[:-1]: if field[0] not in a.fields: last_field = field break try: a.set_field(last_field[0], input(str(last_field[0]) + ': ')) except InvalidAddressException as e: print('Error:', str(e)) print(a)
Add a file allowing you to try it manuallyimport json from ranch import Address, InvalidAddressException filename = input('Read data from: [data/export.json] ') if filename == '': filename = 'data/export.json' with open(filename, 'r') as data: specs = json.load(data) a = Address(specs) while not a.is_valid(): fields = a.get_field_types() last_field = fields[-1] if len(fields) > 1: for field in fields[:-1]: if field[0] not in a.fields: last_field = field break try: a.set_field(last_field[0], input(str(last_field[0]) + ': ')) except InvalidAddressException as e: print('Error:', str(e)) print(a)
<commit_before><commit_msg>Add a file allowing you to try it manually<commit_after>import json from ranch import Address, InvalidAddressException filename = input('Read data from: [data/export.json] ') if filename == '': filename = 'data/export.json' with open(filename, 'r') as data: specs = json.load(data) a = Address(specs) while not a.is_valid(): fields = a.get_field_types() last_field = fields[-1] if len(fields) > 1: for field in fields[:-1]: if field[0] not in a.fields: last_field = field break try: a.set_field(last_field[0], input(str(last_field[0]) + ': ')) except InvalidAddressException as e: print('Error:', str(e)) print(a)
d32414164552f48226842176e05229f6895e3c1d
wafer/tests/utils.py
wafer/tests/utils.py
"""Utilities for testing wafer.""" from django.contrib.auth import get_user_model from django.contrib.auth.models import Permission def create_user(username, email=None, superuser=False, perms=()): if superuser: create = get_user_model().objects.create_superuser else: create = get_user_model().objects.create_user if email is None: email = "%s@example.com" % username user = create(username, email, "%s_password" % username) for codename in perms: perm = Permission.objects.get(codename=codename) user.user_permissions.add(perm) if perms: user = get_user_model().objects.get(pk=user.pk) return user
Add test utility for creating users.
Add test utility for creating users.
Python
isc
CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer
Add test utility for creating users.
"""Utilities for testing wafer.""" from django.contrib.auth import get_user_model from django.contrib.auth.models import Permission def create_user(username, email=None, superuser=False, perms=()): if superuser: create = get_user_model().objects.create_superuser else: create = get_user_model().objects.create_user if email is None: email = "%s@example.com" % username user = create(username, email, "%s_password" % username) for codename in perms: perm = Permission.objects.get(codename=codename) user.user_permissions.add(perm) if perms: user = get_user_model().objects.get(pk=user.pk) return user
<commit_before><commit_msg>Add test utility for creating users.<commit_after>
"""Utilities for testing wafer.""" from django.contrib.auth import get_user_model from django.contrib.auth.models import Permission def create_user(username, email=None, superuser=False, perms=()): if superuser: create = get_user_model().objects.create_superuser else: create = get_user_model().objects.create_user if email is None: email = "%s@example.com" % username user = create(username, email, "%s_password" % username) for codename in perms: perm = Permission.objects.get(codename=codename) user.user_permissions.add(perm) if perms: user = get_user_model().objects.get(pk=user.pk) return user
Add test utility for creating users."""Utilities for testing wafer.""" from django.contrib.auth import get_user_model from django.contrib.auth.models import Permission def create_user(username, email=None, superuser=False, perms=()): if superuser: create = get_user_model().objects.create_superuser else: create = get_user_model().objects.create_user if email is None: email = "%s@example.com" % username user = create(username, email, "%s_password" % username) for codename in perms: perm = Permission.objects.get(codename=codename) user.user_permissions.add(perm) if perms: user = get_user_model().objects.get(pk=user.pk) return user
<commit_before><commit_msg>Add test utility for creating users.<commit_after>"""Utilities for testing wafer.""" from django.contrib.auth import get_user_model from django.contrib.auth.models import Permission def create_user(username, email=None, superuser=False, perms=()): if superuser: create = get_user_model().objects.create_superuser else: create = get_user_model().objects.create_user if email is None: email = "%s@example.com" % username user = create(username, email, "%s_password" % username) for codename in perms: perm = Permission.objects.get(codename=codename) user.user_permissions.add(perm) if perms: user = get_user_model().objects.get(pk=user.pk) return user
f30d459b8527074e50de504695491ad17bb18f0e
tests/pytests/unit/states/test_saltmod.py
tests/pytests/unit/states/test_saltmod.py
import pytest import salt.modules.saltutil as saltutil import salt.states.saltmod as saltmod from tests.support.mock import create_autospec, patch @pytest.fixture(autouse=True) def setup_loader(request): setup_loader_modules = {saltmod: {"__opts__": {"__role": "testsuite"}}} with pytest.helpers.loader_mock(request, setup_loader_modules) as loader_mock: yield loader_mock @pytest.fixture def fake_cmd(): fake_cmd = create_autospec(saltutil.cmd) with patch.dict(saltmod.__salt__, {"saltutil.cmd": fake_cmd}): yield fake_cmd @pytest.mark.parametrize( "exclude", [True, False], ) def test_exclude_parameter_gets_passed(exclude, fake_cmd): """ Smoke test for for salt.states.statemod.state(). Ensures that we don't take an exception if optional parameters are not specified in __opts__ or __env__. """ args = ("webserver_setup", "webserver2") expected_exclude = exclude kwargs = { "tgt_type": "glob", "exclude": expected_exclude, "highstate": True, } saltmod.state(*args, **kwargs) call = fake_cmd.call_args[1] assert call["kwarg"]["exclude"] == expected_exclude def test_exclude_parameter_is_not_passed_if_not_provided(fake_cmd): # Make sure we don't barf on existing behavior args = ("webserver_setup", "webserver2") kwargs_without_exclude = { "tgt_type": "glob", "highstate": True, } saltmod.state(*args, **kwargs_without_exclude) call = fake_cmd.call_args[1] assert "exclude" not in call["kwarg"]
Add tests for exclude passing
Add tests for exclude passing
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
Add tests for exclude passing
import pytest import salt.modules.saltutil as saltutil import salt.states.saltmod as saltmod from tests.support.mock import create_autospec, patch @pytest.fixture(autouse=True) def setup_loader(request): setup_loader_modules = {saltmod: {"__opts__": {"__role": "testsuite"}}} with pytest.helpers.loader_mock(request, setup_loader_modules) as loader_mock: yield loader_mock @pytest.fixture def fake_cmd(): fake_cmd = create_autospec(saltutil.cmd) with patch.dict(saltmod.__salt__, {"saltutil.cmd": fake_cmd}): yield fake_cmd @pytest.mark.parametrize( "exclude", [True, False], ) def test_exclude_parameter_gets_passed(exclude, fake_cmd): """ Smoke test for for salt.states.statemod.state(). Ensures that we don't take an exception if optional parameters are not specified in __opts__ or __env__. """ args = ("webserver_setup", "webserver2") expected_exclude = exclude kwargs = { "tgt_type": "glob", "exclude": expected_exclude, "highstate": True, } saltmod.state(*args, **kwargs) call = fake_cmd.call_args[1] assert call["kwarg"]["exclude"] == expected_exclude def test_exclude_parameter_is_not_passed_if_not_provided(fake_cmd): # Make sure we don't barf on existing behavior args = ("webserver_setup", "webserver2") kwargs_without_exclude = { "tgt_type": "glob", "highstate": True, } saltmod.state(*args, **kwargs_without_exclude) call = fake_cmd.call_args[1] assert "exclude" not in call["kwarg"]
<commit_before><commit_msg>Add tests for exclude passing<commit_after>
import pytest import salt.modules.saltutil as saltutil import salt.states.saltmod as saltmod from tests.support.mock import create_autospec, patch @pytest.fixture(autouse=True) def setup_loader(request): setup_loader_modules = {saltmod: {"__opts__": {"__role": "testsuite"}}} with pytest.helpers.loader_mock(request, setup_loader_modules) as loader_mock: yield loader_mock @pytest.fixture def fake_cmd(): fake_cmd = create_autospec(saltutil.cmd) with patch.dict(saltmod.__salt__, {"saltutil.cmd": fake_cmd}): yield fake_cmd @pytest.mark.parametrize( "exclude", [True, False], ) def test_exclude_parameter_gets_passed(exclude, fake_cmd): """ Smoke test for for salt.states.statemod.state(). Ensures that we don't take an exception if optional parameters are not specified in __opts__ or __env__. """ args = ("webserver_setup", "webserver2") expected_exclude = exclude kwargs = { "tgt_type": "glob", "exclude": expected_exclude, "highstate": True, } saltmod.state(*args, **kwargs) call = fake_cmd.call_args[1] assert call["kwarg"]["exclude"] == expected_exclude def test_exclude_parameter_is_not_passed_if_not_provided(fake_cmd): # Make sure we don't barf on existing behavior args = ("webserver_setup", "webserver2") kwargs_without_exclude = { "tgt_type": "glob", "highstate": True, } saltmod.state(*args, **kwargs_without_exclude) call = fake_cmd.call_args[1] assert "exclude" not in call["kwarg"]
Add tests for exclude passingimport pytest import salt.modules.saltutil as saltutil import salt.states.saltmod as saltmod from tests.support.mock import create_autospec, patch @pytest.fixture(autouse=True) def setup_loader(request): setup_loader_modules = {saltmod: {"__opts__": {"__role": "testsuite"}}} with pytest.helpers.loader_mock(request, setup_loader_modules) as loader_mock: yield loader_mock @pytest.fixture def fake_cmd(): fake_cmd = create_autospec(saltutil.cmd) with patch.dict(saltmod.__salt__, {"saltutil.cmd": fake_cmd}): yield fake_cmd @pytest.mark.parametrize( "exclude", [True, False], ) def test_exclude_parameter_gets_passed(exclude, fake_cmd): """ Smoke test for for salt.states.statemod.state(). Ensures that we don't take an exception if optional parameters are not specified in __opts__ or __env__. """ args = ("webserver_setup", "webserver2") expected_exclude = exclude kwargs = { "tgt_type": "glob", "exclude": expected_exclude, "highstate": True, } saltmod.state(*args, **kwargs) call = fake_cmd.call_args[1] assert call["kwarg"]["exclude"] == expected_exclude def test_exclude_parameter_is_not_passed_if_not_provided(fake_cmd): # Make sure we don't barf on existing behavior args = ("webserver_setup", "webserver2") kwargs_without_exclude = { "tgt_type": "glob", "highstate": True, } saltmod.state(*args, **kwargs_without_exclude) call = fake_cmd.call_args[1] assert "exclude" not in call["kwarg"]
<commit_before><commit_msg>Add tests for exclude passing<commit_after>import pytest import salt.modules.saltutil as saltutil import salt.states.saltmod as saltmod from tests.support.mock import create_autospec, patch @pytest.fixture(autouse=True) def setup_loader(request): setup_loader_modules = {saltmod: {"__opts__": {"__role": "testsuite"}}} with pytest.helpers.loader_mock(request, setup_loader_modules) as loader_mock: yield loader_mock @pytest.fixture def fake_cmd(): fake_cmd = create_autospec(saltutil.cmd) with patch.dict(saltmod.__salt__, {"saltutil.cmd": fake_cmd}): yield fake_cmd @pytest.mark.parametrize( "exclude", [True, False], ) def test_exclude_parameter_gets_passed(exclude, fake_cmd): """ Smoke test for for salt.states.statemod.state(). Ensures that we don't take an exception if optional parameters are not specified in __opts__ or __env__. """ args = ("webserver_setup", "webserver2") expected_exclude = exclude kwargs = { "tgt_type": "glob", "exclude": expected_exclude, "highstate": True, } saltmod.state(*args, **kwargs) call = fake_cmd.call_args[1] assert call["kwarg"]["exclude"] == expected_exclude def test_exclude_parameter_is_not_passed_if_not_provided(fake_cmd): # Make sure we don't barf on existing behavior args = ("webserver_setup", "webserver2") kwargs_without_exclude = { "tgt_type": "glob", "highstate": True, } saltmod.state(*args, **kwargs_without_exclude) call = fake_cmd.call_args[1] assert "exclude" not in call["kwarg"]
6143408507468c1718999bc2bc16d7e394741e29
tests/unit/utils/test_timed_subprocess.py
tests/unit/utils/test_timed_subprocess.py
# -*- coding: utf-8 -*- # Import python libs from __future__ import absolute_import, print_function, unicode_literals # Import Salt Testing libs from tests.support.unit import TestCase # Import salt libs import salt.utils.timed_subprocess as timed_subprocess class TestTimedSubprocess(TestCase): def test_timedproc_with_shell_true_and_list_args(self): ''' This test confirms the fix for the regression introduced in 1f7d50d. The TimedProc dunder init would result in a traceback if the args were passed as a list and shell=True was set. ''' p = timed_subprocess.TimedProc(['echo', 'foo'], shell=True) del p # Don't need this anymore
Add unit test for TimedProc regression
Add unit test for TimedProc regression
Python
apache-2.0
saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt,saltstack/salt
Add unit test for TimedProc regression
# -*- coding: utf-8 -*- # Import python libs from __future__ import absolute_import, print_function, unicode_literals # Import Salt Testing libs from tests.support.unit import TestCase # Import salt libs import salt.utils.timed_subprocess as timed_subprocess class TestTimedSubprocess(TestCase): def test_timedproc_with_shell_true_and_list_args(self): ''' This test confirms the fix for the regression introduced in 1f7d50d. The TimedProc dunder init would result in a traceback if the args were passed as a list and shell=True was set. ''' p = timed_subprocess.TimedProc(['echo', 'foo'], shell=True) del p # Don't need this anymore
<commit_before><commit_msg>Add unit test for TimedProc regression<commit_after>
# -*- coding: utf-8 -*- # Import python libs from __future__ import absolute_import, print_function, unicode_literals # Import Salt Testing libs from tests.support.unit import TestCase # Import salt libs import salt.utils.timed_subprocess as timed_subprocess class TestTimedSubprocess(TestCase): def test_timedproc_with_shell_true_and_list_args(self): ''' This test confirms the fix for the regression introduced in 1f7d50d. The TimedProc dunder init would result in a traceback if the args were passed as a list and shell=True was set. ''' p = timed_subprocess.TimedProc(['echo', 'foo'], shell=True) del p # Don't need this anymore
Add unit test for TimedProc regression# -*- coding: utf-8 -*- # Import python libs from __future__ import absolute_import, print_function, unicode_literals # Import Salt Testing libs from tests.support.unit import TestCase # Import salt libs import salt.utils.timed_subprocess as timed_subprocess class TestTimedSubprocess(TestCase): def test_timedproc_with_shell_true_and_list_args(self): ''' This test confirms the fix for the regression introduced in 1f7d50d. The TimedProc dunder init would result in a traceback if the args were passed as a list and shell=True was set. ''' p = timed_subprocess.TimedProc(['echo', 'foo'], shell=True) del p # Don't need this anymore
<commit_before><commit_msg>Add unit test for TimedProc regression<commit_after># -*- coding: utf-8 -*- # Import python libs from __future__ import absolute_import, print_function, unicode_literals # Import Salt Testing libs from tests.support.unit import TestCase # Import salt libs import salt.utils.timed_subprocess as timed_subprocess class TestTimedSubprocess(TestCase): def test_timedproc_with_shell_true_and_list_args(self): ''' This test confirms the fix for the regression introduced in 1f7d50d. The TimedProc dunder init would result in a traceback if the args were passed as a list and shell=True was set. ''' p = timed_subprocess.TimedProc(['echo', 'foo'], shell=True) del p # Don't need this anymore
39fc0fe91ca4bf787ceeab9ff594168f70fe0dba
src/python2/dump_source.py
src/python2/dump_source.py
# Import the JModelica.org Python packages import pymodelica from pymodelica.compiler_wrappers import ModelicaCompiler # Create a compiler and compiler target object mc = ModelicaCompiler() # Build trees as if for an FMU or Model Exchange v 1.0 #target = mc.create_target_object("me", "1.0") source = mc.parse_model("CauerLowPassAnalog.mo") indent_amount = 2 def dump(src, fid, indent=0): ind = " " * (indent_amount * indent) try: fid.write(ind + src.getNodeName() + "\n") except: fid.write(ind + "exception: " + str(src) + "\n") try: for idx in range(src.numChild): dump(src.children[idx], fid, indent+1) except: fid.write(ind + "(exception)\n") # dump the filter instance with open('out.txt', 'w') as fid: dump(source, fid, 0) print "DONE!"
Add a new version of dump source that actually marches along the nodes
Add a new version of dump source that actually marches along the nodes The content of the nodes need to better done. However, this example does show that we can access the elements of the source AST including annotations.
Python
mit
michael-okeefe/soep-sandbox
Add a new version of dump source that actually marches along the nodes The content of the nodes need to better done. However, this example does show that we can access the elements of the source AST including annotations.
# Import the JModelica.org Python packages import pymodelica from pymodelica.compiler_wrappers import ModelicaCompiler # Create a compiler and compiler target object mc = ModelicaCompiler() # Build trees as if for an FMU or Model Exchange v 1.0 #target = mc.create_target_object("me", "1.0") source = mc.parse_model("CauerLowPassAnalog.mo") indent_amount = 2 def dump(src, fid, indent=0): ind = " " * (indent_amount * indent) try: fid.write(ind + src.getNodeName() + "\n") except: fid.write(ind + "exception: " + str(src) + "\n") try: for idx in range(src.numChild): dump(src.children[idx], fid, indent+1) except: fid.write(ind + "(exception)\n") # dump the filter instance with open('out.txt', 'w') as fid: dump(source, fid, 0) print "DONE!"
<commit_before><commit_msg>Add a new version of dump source that actually marches along the nodes The content of the nodes need to better done. However, this example does show that we can access the elements of the source AST including annotations.<commit_after>
# Import the JModelica.org Python packages import pymodelica from pymodelica.compiler_wrappers import ModelicaCompiler # Create a compiler and compiler target object mc = ModelicaCompiler() # Build trees as if for an FMU or Model Exchange v 1.0 #target = mc.create_target_object("me", "1.0") source = mc.parse_model("CauerLowPassAnalog.mo") indent_amount = 2 def dump(src, fid, indent=0): ind = " " * (indent_amount * indent) try: fid.write(ind + src.getNodeName() + "\n") except: fid.write(ind + "exception: " + str(src) + "\n") try: for idx in range(src.numChild): dump(src.children[idx], fid, indent+1) except: fid.write(ind + "(exception)\n") # dump the filter instance with open('out.txt', 'w') as fid: dump(source, fid, 0) print "DONE!"
Add a new version of dump source that actually marches along the nodes The content of the nodes need to better done. However, this example does show that we can access the elements of the source AST including annotations.# Import the JModelica.org Python packages import pymodelica from pymodelica.compiler_wrappers import ModelicaCompiler # Create a compiler and compiler target object mc = ModelicaCompiler() # Build trees as if for an FMU or Model Exchange v 1.0 #target = mc.create_target_object("me", "1.0") source = mc.parse_model("CauerLowPassAnalog.mo") indent_amount = 2 def dump(src, fid, indent=0): ind = " " * (indent_amount * indent) try: fid.write(ind + src.getNodeName() + "\n") except: fid.write(ind + "exception: " + str(src) + "\n") try: for idx in range(src.numChild): dump(src.children[idx], fid, indent+1) except: fid.write(ind + "(exception)\n") # dump the filter instance with open('out.txt', 'w') as fid: dump(source, fid, 0) print "DONE!"
<commit_before><commit_msg>Add a new version of dump source that actually marches along the nodes The content of the nodes need to better done. However, this example does show that we can access the elements of the source AST including annotations.<commit_after># Import the JModelica.org Python packages import pymodelica from pymodelica.compiler_wrappers import ModelicaCompiler # Create a compiler and compiler target object mc = ModelicaCompiler() # Build trees as if for an FMU or Model Exchange v 1.0 #target = mc.create_target_object("me", "1.0") source = mc.parse_model("CauerLowPassAnalog.mo") indent_amount = 2 def dump(src, fid, indent=0): ind = " " * (indent_amount * indent) try: fid.write(ind + src.getNodeName() + "\n") except: fid.write(ind + "exception: " + str(src) + "\n") try: for idx in range(src.numChild): dump(src.children[idx], fid, indent+1) except: fid.write(ind + "(exception)\n") # dump the filter instance with open('out.txt', 'w') as fid: dump(source, fid, 0) print "DONE!"
a50b96ece7db9b732a6dc96c6d981588a5760311
test_builder.py
test_builder.py
import re try: import yaml except: print('PyYAML not installed') from pathlib import Path def mkdir(path): try: path.mkdir(parents=True) except FileExistsError: pass def test_loop(path, c_path, cpp_path): for file in path.glob('**/*.yaml'): each_test(path, file, c_path, cpp_path) def each_test(path, file, c_path, cpp_path): with file.open() as istream: test = yaml.load(istream) test_keys = test.keys() try: tables = re.findall('([a-zA-Z\d\.\_\-]+)', test['table_variable_name']) except KeyError: pass else: test['table_variable_name'] = tables test_file = file.relative_to(path) c_file = (c_path / test_file).with_suffix('.cpp') if not c_file.exists(): mkdir(c_file) c_file.touch() with c_file.open('w') as ostream: yaml.dump(test, ostream) cpp_file = (cpp_path / test_file).with_suffix('.cpp') if not cpp_file.exists(): mkdir(cpp_file) cpp_file.touch() with cpp_file.open('w') as ostream: yaml.dump(test, ostream) def main(): cwd_path = Path('.') base_path = cwd_path / 'rethinkdb' / 'test' polyglot_path = base_path / 'rql_test' / 'src' tests_path = cwd_path / 'test' new_test_c_path = (tests_path / 'c' / 'polyglot') mkdir(new_test_c_path) new_test_c_path = new_test_c_path.resolve() new_test_cpp_path = (tests_path / 'cpp' / 'polyglot') mkdir(new_test_cpp_path) new_test_cpp_path = new_test_cpp_path.resolve() test_loop(polyglot_path) main()
Add script to convert tests from RethinkDB core.
Add script to convert tests from RethinkDB core.
Python
apache-2.0
grandquista/ReQL-Core,grandquista/ReQL-Core,grandquista/ReQL-Core,grandquista/ReQL-Core
Add script to convert tests from RethinkDB core.
import re try: import yaml except: print('PyYAML not installed') from pathlib import Path def mkdir(path): try: path.mkdir(parents=True) except FileExistsError: pass def test_loop(path, c_path, cpp_path): for file in path.glob('**/*.yaml'): each_test(path, file, c_path, cpp_path) def each_test(path, file, c_path, cpp_path): with file.open() as istream: test = yaml.load(istream) test_keys = test.keys() try: tables = re.findall('([a-zA-Z\d\.\_\-]+)', test['table_variable_name']) except KeyError: pass else: test['table_variable_name'] = tables test_file = file.relative_to(path) c_file = (c_path / test_file).with_suffix('.cpp') if not c_file.exists(): mkdir(c_file) c_file.touch() with c_file.open('w') as ostream: yaml.dump(test, ostream) cpp_file = (cpp_path / test_file).with_suffix('.cpp') if not cpp_file.exists(): mkdir(cpp_file) cpp_file.touch() with cpp_file.open('w') as ostream: yaml.dump(test, ostream) def main(): cwd_path = Path('.') base_path = cwd_path / 'rethinkdb' / 'test' polyglot_path = base_path / 'rql_test' / 'src' tests_path = cwd_path / 'test' new_test_c_path = (tests_path / 'c' / 'polyglot') mkdir(new_test_c_path) new_test_c_path = new_test_c_path.resolve() new_test_cpp_path = (tests_path / 'cpp' / 'polyglot') mkdir(new_test_cpp_path) new_test_cpp_path = new_test_cpp_path.resolve() test_loop(polyglot_path) main()
<commit_before><commit_msg>Add script to convert tests from RethinkDB core.<commit_after>
import re try: import yaml except: print('PyYAML not installed') from pathlib import Path def mkdir(path): try: path.mkdir(parents=True) except FileExistsError: pass def test_loop(path, c_path, cpp_path): for file in path.glob('**/*.yaml'): each_test(path, file, c_path, cpp_path) def each_test(path, file, c_path, cpp_path): with file.open() as istream: test = yaml.load(istream) test_keys = test.keys() try: tables = re.findall('([a-zA-Z\d\.\_\-]+)', test['table_variable_name']) except KeyError: pass else: test['table_variable_name'] = tables test_file = file.relative_to(path) c_file = (c_path / test_file).with_suffix('.cpp') if not c_file.exists(): mkdir(c_file) c_file.touch() with c_file.open('w') as ostream: yaml.dump(test, ostream) cpp_file = (cpp_path / test_file).with_suffix('.cpp') if not cpp_file.exists(): mkdir(cpp_file) cpp_file.touch() with cpp_file.open('w') as ostream: yaml.dump(test, ostream) def main(): cwd_path = Path('.') base_path = cwd_path / 'rethinkdb' / 'test' polyglot_path = base_path / 'rql_test' / 'src' tests_path = cwd_path / 'test' new_test_c_path = (tests_path / 'c' / 'polyglot') mkdir(new_test_c_path) new_test_c_path = new_test_c_path.resolve() new_test_cpp_path = (tests_path / 'cpp' / 'polyglot') mkdir(new_test_cpp_path) new_test_cpp_path = new_test_cpp_path.resolve() test_loop(polyglot_path) main()
Add script to convert tests from RethinkDB core.import re try: import yaml except: print('PyYAML not installed') from pathlib import Path def mkdir(path): try: path.mkdir(parents=True) except FileExistsError: pass def test_loop(path, c_path, cpp_path): for file in path.glob('**/*.yaml'): each_test(path, file, c_path, cpp_path) def each_test(path, file, c_path, cpp_path): with file.open() as istream: test = yaml.load(istream) test_keys = test.keys() try: tables = re.findall('([a-zA-Z\d\.\_\-]+)', test['table_variable_name']) except KeyError: pass else: test['table_variable_name'] = tables test_file = file.relative_to(path) c_file = (c_path / test_file).with_suffix('.cpp') if not c_file.exists(): mkdir(c_file) c_file.touch() with c_file.open('w') as ostream: yaml.dump(test, ostream) cpp_file = (cpp_path / test_file).with_suffix('.cpp') if not cpp_file.exists(): mkdir(cpp_file) cpp_file.touch() with cpp_file.open('w') as ostream: yaml.dump(test, ostream) def main(): cwd_path = Path('.') base_path = cwd_path / 'rethinkdb' / 'test' polyglot_path = base_path / 'rql_test' / 'src' tests_path = cwd_path / 'test' new_test_c_path = (tests_path / 'c' / 'polyglot') mkdir(new_test_c_path) new_test_c_path = new_test_c_path.resolve() new_test_cpp_path = (tests_path / 'cpp' / 'polyglot') mkdir(new_test_cpp_path) new_test_cpp_path = new_test_cpp_path.resolve() test_loop(polyglot_path) main()
<commit_before><commit_msg>Add script to convert tests from RethinkDB core.<commit_after>import re try: import yaml except: print('PyYAML not installed') from pathlib import Path def mkdir(path): try: path.mkdir(parents=True) except FileExistsError: pass def test_loop(path, c_path, cpp_path): for file in path.glob('**/*.yaml'): each_test(path, file, c_path, cpp_path) def each_test(path, file, c_path, cpp_path): with file.open() as istream: test = yaml.load(istream) test_keys = test.keys() try: tables = re.findall('([a-zA-Z\d\.\_\-]+)', test['table_variable_name']) except KeyError: pass else: test['table_variable_name'] = tables test_file = file.relative_to(path) c_file = (c_path / test_file).with_suffix('.cpp') if not c_file.exists(): mkdir(c_file) c_file.touch() with c_file.open('w') as ostream: yaml.dump(test, ostream) cpp_file = (cpp_path / test_file).with_suffix('.cpp') if not cpp_file.exists(): mkdir(cpp_file) cpp_file.touch() with cpp_file.open('w') as ostream: yaml.dump(test, ostream) def main(): cwd_path = Path('.') base_path = cwd_path / 'rethinkdb' / 'test' polyglot_path = base_path / 'rql_test' / 'src' tests_path = cwd_path / 'test' new_test_c_path = (tests_path / 'c' / 'polyglot') mkdir(new_test_c_path) new_test_c_path = new_test_c_path.resolve() new_test_cpp_path = (tests_path / 'cpp' / 'polyglot') mkdir(new_test_cpp_path) new_test_cpp_path = new_test_cpp_path.resolve() test_loop(polyglot_path) main()
e5ad0f3029df610a308c107a640de438f62eb00b
tests/chainer_tests/training_tests/triggers_tests/test_early_stopping_trigger.py
tests/chainer_tests/training_tests/triggers_tests/test_early_stopping_trigger.py
import unittest import chainer import numpy from chainer import testing from chainer import training from chainer.training import triggers from chainer.training import util class DummyUpdater(training.Updater): def __init__(self): self.iteration = 0 def finalize(self): pass def get_all_optimizers(self): return {} def update(self): self.iteration += 1 @property def epoch(self): return 1 @property def is_new_epoch(self): return False def _test_trigger(self, trigger, key, accuracies, expected): updater = DummyUpdater() trainer = training.Trainer(updater) for accuracy, expected in zip(accuracies, expected): updater.update() trainer.observation = {key: accuracy} self.assertEqual(trigger(trainer), expected) class TestEarlyStoppingTrigger(unittest.TestCase): def test_early_stopping_trigger(self): key = 'main/accuracy' trigger = triggers.EarlyStoppingTrigger(monitor=key, patients=3, trigger=(1, 'iteration'), verbose=False) trigger = util.get_trigger(trigger) accuracies = [0.5, 0.5, 0.6, 0.7, 0.6, 0.4, 0.3, 0.2] accuracies = numpy.asarray([ chainer.Variable(numpy.asarray(acc, dtype=numpy.float32)) for acc in accuracies]) expected = [False, False, False, False, False, False, False, True] _test_trigger(self, trigger, key, accuracies, expected) testing.run_module(__name__, __file__)
Add test for early stopping trigger
Add test for early stopping trigger
Python
mit
rezoo/chainer,keisuke-umezawa/chainer,keisuke-umezawa/chainer,aonotas/chainer,chainer/chainer,hvy/chainer,niboshi/chainer,ktnyt/chainer,tkerola/chainer,niboshi/chainer,chainer/chainer,niboshi/chainer,anaruse/chainer,wkentaro/chainer,okuta/chainer,ktnyt/chainer,ronekko/chainer,hvy/chainer,hvy/chainer,jnishi/chainer,okuta/chainer,chainer/chainer,niboshi/chainer,okuta/chainer,keisuke-umezawa/chainer,hvy/chainer,ktnyt/chainer,wkentaro/chainer,chainer/chainer,wkentaro/chainer,jnishi/chainer,wkentaro/chainer,jnishi/chainer,jnishi/chainer,keisuke-umezawa/chainer,okuta/chainer,ktnyt/chainer,pfnet/chainer
Add test for early stopping trigger
import unittest import chainer import numpy from chainer import testing from chainer import training from chainer.training import triggers from chainer.training import util class DummyUpdater(training.Updater): def __init__(self): self.iteration = 0 def finalize(self): pass def get_all_optimizers(self): return {} def update(self): self.iteration += 1 @property def epoch(self): return 1 @property def is_new_epoch(self): return False def _test_trigger(self, trigger, key, accuracies, expected): updater = DummyUpdater() trainer = training.Trainer(updater) for accuracy, expected in zip(accuracies, expected): updater.update() trainer.observation = {key: accuracy} self.assertEqual(trigger(trainer), expected) class TestEarlyStoppingTrigger(unittest.TestCase): def test_early_stopping_trigger(self): key = 'main/accuracy' trigger = triggers.EarlyStoppingTrigger(monitor=key, patients=3, trigger=(1, 'iteration'), verbose=False) trigger = util.get_trigger(trigger) accuracies = [0.5, 0.5, 0.6, 0.7, 0.6, 0.4, 0.3, 0.2] accuracies = numpy.asarray([ chainer.Variable(numpy.asarray(acc, dtype=numpy.float32)) for acc in accuracies]) expected = [False, False, False, False, False, False, False, True] _test_trigger(self, trigger, key, accuracies, expected) testing.run_module(__name__, __file__)
<commit_before><commit_msg>Add test for early stopping trigger<commit_after>
import unittest import chainer import numpy from chainer import testing from chainer import training from chainer.training import triggers from chainer.training import util class DummyUpdater(training.Updater): def __init__(self): self.iteration = 0 def finalize(self): pass def get_all_optimizers(self): return {} def update(self): self.iteration += 1 @property def epoch(self): return 1 @property def is_new_epoch(self): return False def _test_trigger(self, trigger, key, accuracies, expected): updater = DummyUpdater() trainer = training.Trainer(updater) for accuracy, expected in zip(accuracies, expected): updater.update() trainer.observation = {key: accuracy} self.assertEqual(trigger(trainer), expected) class TestEarlyStoppingTrigger(unittest.TestCase): def test_early_stopping_trigger(self): key = 'main/accuracy' trigger = triggers.EarlyStoppingTrigger(monitor=key, patients=3, trigger=(1, 'iteration'), verbose=False) trigger = util.get_trigger(trigger) accuracies = [0.5, 0.5, 0.6, 0.7, 0.6, 0.4, 0.3, 0.2] accuracies = numpy.asarray([ chainer.Variable(numpy.asarray(acc, dtype=numpy.float32)) for acc in accuracies]) expected = [False, False, False, False, False, False, False, True] _test_trigger(self, trigger, key, accuracies, expected) testing.run_module(__name__, __file__)
Add test for early stopping triggerimport unittest import chainer import numpy from chainer import testing from chainer import training from chainer.training import triggers from chainer.training import util class DummyUpdater(training.Updater): def __init__(self): self.iteration = 0 def finalize(self): pass def get_all_optimizers(self): return {} def update(self): self.iteration += 1 @property def epoch(self): return 1 @property def is_new_epoch(self): return False def _test_trigger(self, trigger, key, accuracies, expected): updater = DummyUpdater() trainer = training.Trainer(updater) for accuracy, expected in zip(accuracies, expected): updater.update() trainer.observation = {key: accuracy} self.assertEqual(trigger(trainer), expected) class TestEarlyStoppingTrigger(unittest.TestCase): def test_early_stopping_trigger(self): key = 'main/accuracy' trigger = triggers.EarlyStoppingTrigger(monitor=key, patients=3, trigger=(1, 'iteration'), verbose=False) trigger = util.get_trigger(trigger) accuracies = [0.5, 0.5, 0.6, 0.7, 0.6, 0.4, 0.3, 0.2] accuracies = numpy.asarray([ chainer.Variable(numpy.asarray(acc, dtype=numpy.float32)) for acc in accuracies]) expected = [False, False, False, False, False, False, False, True] _test_trigger(self, trigger, key, accuracies, expected) testing.run_module(__name__, __file__)
<commit_before><commit_msg>Add test for early stopping trigger<commit_after>import unittest import chainer import numpy from chainer import testing from chainer import training from chainer.training import triggers from chainer.training import util class DummyUpdater(training.Updater): def __init__(self): self.iteration = 0 def finalize(self): pass def get_all_optimizers(self): return {} def update(self): self.iteration += 1 @property def epoch(self): return 1 @property def is_new_epoch(self): return False def _test_trigger(self, trigger, key, accuracies, expected): updater = DummyUpdater() trainer = training.Trainer(updater) for accuracy, expected in zip(accuracies, expected): updater.update() trainer.observation = {key: accuracy} self.assertEqual(trigger(trainer), expected) class TestEarlyStoppingTrigger(unittest.TestCase): def test_early_stopping_trigger(self): key = 'main/accuracy' trigger = triggers.EarlyStoppingTrigger(monitor=key, patients=3, trigger=(1, 'iteration'), verbose=False) trigger = util.get_trigger(trigger) accuracies = [0.5, 0.5, 0.6, 0.7, 0.6, 0.4, 0.3, 0.2] accuracies = numpy.asarray([ chainer.Variable(numpy.asarray(acc, dtype=numpy.float32)) for acc in accuracies]) expected = [False, False, False, False, False, False, False, True] _test_trigger(self, trigger, key, accuracies, expected) testing.run_module(__name__, __file__)
3644df1b645d4fd607f22b24c5e676644be4a9da
Lib/test/test_bsddb.py
Lib/test/test_bsddb.py
#! /usr/bin/env python """Test script for the bsddb C module Roger E. Masse """ import bsddb import tempfile from test_support import verbose def test(openmethod, what): if verbose: print '\nTesting: ', what fname = tempfile.mktemp() f = openmethod(fname, 'c') if verbose: print 'creation...' f['0'] = '' f['a'] = 'Guido' f['b'] = 'van' f['c'] = 'Rossum' f['d'] = 'invented' f['f'] = 'Python' if verbose: print '%s %s %s' % (f['a'], f['b'], f['c']) if what == 'BTree' : if verbose: print 'key ordering...' f.set_location(f.first()[0]) while 1: try: rec = f.next() except KeyError: if rec <> f.last(): print 'Error, last <> last!' f.previous() break if verbose: print rec if not f.has_key('a'): print 'Error, missing key!' f.sync() f.close() if verbose: print 'modification...' f = openmethod(fname, 'w') f['d'] = 'discovered' if verbose: print 'access...' for key in f.keys(): word = f[key] if verbose: print word f.close() types = [(bsddb.btopen, 'BTree'), (bsddb.hashopen, 'Hash Table'), # (bsddb.rnopen,'Record Numbers'), 'put' for RECNO for bsddb 1.85 # appears broken... at least on # Solaris Intel - rmasse 1/97 ] for type in types: test(type[0], type[1])
Test script for the bsddb C extension module.
Test script for the bsddb C extension module.
Python
mit
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
Test script for the bsddb C extension module.
#! /usr/bin/env python """Test script for the bsddb C module Roger E. Masse """ import bsddb import tempfile from test_support import verbose def test(openmethod, what): if verbose: print '\nTesting: ', what fname = tempfile.mktemp() f = openmethod(fname, 'c') if verbose: print 'creation...' f['0'] = '' f['a'] = 'Guido' f['b'] = 'van' f['c'] = 'Rossum' f['d'] = 'invented' f['f'] = 'Python' if verbose: print '%s %s %s' % (f['a'], f['b'], f['c']) if what == 'BTree' : if verbose: print 'key ordering...' f.set_location(f.first()[0]) while 1: try: rec = f.next() except KeyError: if rec <> f.last(): print 'Error, last <> last!' f.previous() break if verbose: print rec if not f.has_key('a'): print 'Error, missing key!' f.sync() f.close() if verbose: print 'modification...' f = openmethod(fname, 'w') f['d'] = 'discovered' if verbose: print 'access...' for key in f.keys(): word = f[key] if verbose: print word f.close() types = [(bsddb.btopen, 'BTree'), (bsddb.hashopen, 'Hash Table'), # (bsddb.rnopen,'Record Numbers'), 'put' for RECNO for bsddb 1.85 # appears broken... at least on # Solaris Intel - rmasse 1/97 ] for type in types: test(type[0], type[1])
<commit_before><commit_msg>Test script for the bsddb C extension module.<commit_after>
#! /usr/bin/env python """Test script for the bsddb C module Roger E. Masse """ import bsddb import tempfile from test_support import verbose def test(openmethod, what): if verbose: print '\nTesting: ', what fname = tempfile.mktemp() f = openmethod(fname, 'c') if verbose: print 'creation...' f['0'] = '' f['a'] = 'Guido' f['b'] = 'van' f['c'] = 'Rossum' f['d'] = 'invented' f['f'] = 'Python' if verbose: print '%s %s %s' % (f['a'], f['b'], f['c']) if what == 'BTree' : if verbose: print 'key ordering...' f.set_location(f.first()[0]) while 1: try: rec = f.next() except KeyError: if rec <> f.last(): print 'Error, last <> last!' f.previous() break if verbose: print rec if not f.has_key('a'): print 'Error, missing key!' f.sync() f.close() if verbose: print 'modification...' f = openmethod(fname, 'w') f['d'] = 'discovered' if verbose: print 'access...' for key in f.keys(): word = f[key] if verbose: print word f.close() types = [(bsddb.btopen, 'BTree'), (bsddb.hashopen, 'Hash Table'), # (bsddb.rnopen,'Record Numbers'), 'put' for RECNO for bsddb 1.85 # appears broken... at least on # Solaris Intel - rmasse 1/97 ] for type in types: test(type[0], type[1])
Test script for the bsddb C extension module.#! /usr/bin/env python """Test script for the bsddb C module Roger E. Masse """ import bsddb import tempfile from test_support import verbose def test(openmethod, what): if verbose: print '\nTesting: ', what fname = tempfile.mktemp() f = openmethod(fname, 'c') if verbose: print 'creation...' f['0'] = '' f['a'] = 'Guido' f['b'] = 'van' f['c'] = 'Rossum' f['d'] = 'invented' f['f'] = 'Python' if verbose: print '%s %s %s' % (f['a'], f['b'], f['c']) if what == 'BTree' : if verbose: print 'key ordering...' f.set_location(f.first()[0]) while 1: try: rec = f.next() except KeyError: if rec <> f.last(): print 'Error, last <> last!' f.previous() break if verbose: print rec if not f.has_key('a'): print 'Error, missing key!' f.sync() f.close() if verbose: print 'modification...' f = openmethod(fname, 'w') f['d'] = 'discovered' if verbose: print 'access...' for key in f.keys(): word = f[key] if verbose: print word f.close() types = [(bsddb.btopen, 'BTree'), (bsddb.hashopen, 'Hash Table'), # (bsddb.rnopen,'Record Numbers'), 'put' for RECNO for bsddb 1.85 # appears broken... at least on # Solaris Intel - rmasse 1/97 ] for type in types: test(type[0], type[1])
<commit_before><commit_msg>Test script for the bsddb C extension module.<commit_after>#! /usr/bin/env python """Test script for the bsddb C module Roger E. Masse """ import bsddb import tempfile from test_support import verbose def test(openmethod, what): if verbose: print '\nTesting: ', what fname = tempfile.mktemp() f = openmethod(fname, 'c') if verbose: print 'creation...' f['0'] = '' f['a'] = 'Guido' f['b'] = 'van' f['c'] = 'Rossum' f['d'] = 'invented' f['f'] = 'Python' if verbose: print '%s %s %s' % (f['a'], f['b'], f['c']) if what == 'BTree' : if verbose: print 'key ordering...' f.set_location(f.first()[0]) while 1: try: rec = f.next() except KeyError: if rec <> f.last(): print 'Error, last <> last!' f.previous() break if verbose: print rec if not f.has_key('a'): print 'Error, missing key!' f.sync() f.close() if verbose: print 'modification...' f = openmethod(fname, 'w') f['d'] = 'discovered' if verbose: print 'access...' for key in f.keys(): word = f[key] if verbose: print word f.close() types = [(bsddb.btopen, 'BTree'), (bsddb.hashopen, 'Hash Table'), # (bsddb.rnopen,'Record Numbers'), 'put' for RECNO for bsddb 1.85 # appears broken... at least on # Solaris Intel - rmasse 1/97 ] for type in types: test(type[0], type[1])
7cb4a7290b2af1983e1e291073c0a740d9e1334e
failure/tests/test_finders.py
failure/tests/test_finders.py
# -*- coding: utf-8 -*- # Copyright (C) 2016 GoDaddy Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslotest import base class FindersTest(base.BaseTestCase): pass
Add some useful finder tests
Add some useful finder tests
Python
apache-2.0
harlowja/failure
Add some useful finder tests
# -*- coding: utf-8 -*- # Copyright (C) 2016 GoDaddy Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslotest import base class FindersTest(base.BaseTestCase): pass
<commit_before><commit_msg>Add some useful finder tests<commit_after>
# -*- coding: utf-8 -*- # Copyright (C) 2016 GoDaddy Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslotest import base class FindersTest(base.BaseTestCase): pass
Add some useful finder tests# -*- coding: utf-8 -*- # Copyright (C) 2016 GoDaddy Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslotest import base class FindersTest(base.BaseTestCase): pass
<commit_before><commit_msg>Add some useful finder tests<commit_after># -*- coding: utf-8 -*- # Copyright (C) 2016 GoDaddy Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslotest import base class FindersTest(base.BaseTestCase): pass
0f5716b10afff9ccbc17fb595cd7cc2f85b45f8f
tools/perf/page_sets/service_worker.py
tools/perf/page_sets/service_worker.py
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry.page import page as page from telemetry.page import page_set as page_set archive_data_file_path = 'data/service_worker.json' class ServiceWorkerPageSet(page_set.PageSet): """Page set of applications using ServiceWorker""" def __init__(self): super(ServiceWorkerPageSet, self).__init__( archive_data_file=archive_data_file_path, bucket=page_set.PARTNER_BUCKET) # Why: the first application using ServiceWorker # 1st time: registration self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, make_javascript_deterministic=False)) # 2st time: 1st onfetch with caching self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, make_javascript_deterministic=False)) # 3rd time: 2nd onfetch from cache self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, make_javascript_deterministic=False))
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry.page import page as page from telemetry.page import page_set as page_set archive_data_file_path = 'data/service_worker.json' class ServiceWorkerPageSet(page_set.PageSet): """Page set of applications using ServiceWorker""" def __init__(self): super(ServiceWorkerPageSet, self).__init__( archive_data_file=archive_data_file_path, bucket=page_set.PARTNER_BUCKET) # Why: the first application using ServiceWorker # 1st time: registration self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, name='first_load', make_javascript_deterministic=False)) # 2st time: 1st onfetch with caching self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, name='second_load', make_javascript_deterministic=False)) # 3rd time: 2nd onfetch from cache self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, name='third_load', make_javascript_deterministic=False))
Add name attribute to each Page in ServiceWorkerPageSet
Telemetry: Add name attribute to each Page in ServiceWorkerPageSet ServiceWorkerPerfTest loads the same page three times, but these should have different characteristics because ServiceWorker works differently. This patch gives each page load a name so we can track them separately. BUG= TEST=tools/perf/run_benchmark service_worker.service_worker Review URL: https://codereview.chromium.org/1086813003 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#325802}
Python
bsd-3-clause
Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,Fireblend/chromium-crosswalk,Fireblend/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,Just-D/chromium-1,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,Fireblend/chromium-crosswalk,Chilledheart/chromium,TheTypoMaster/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Fireblend/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,axinging/chromium-crosswalk,Chilledheart/chromium,chuan9/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,Pluto-tv/chromium-crosswalk,Pluto-tv/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,Just-D/chromium-1,Fireblend/chromium-crosswalk,Chilledheart/chromium,Pluto-tv/chromium-crosswalk
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry.page import page as page from telemetry.page import page_set as page_set archive_data_file_path = 'data/service_worker.json' class ServiceWorkerPageSet(page_set.PageSet): """Page set of applications using ServiceWorker""" def __init__(self): super(ServiceWorkerPageSet, self).__init__( archive_data_file=archive_data_file_path, bucket=page_set.PARTNER_BUCKET) # Why: the first application using ServiceWorker # 1st time: registration self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, make_javascript_deterministic=False)) # 2st time: 1st onfetch with caching self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, make_javascript_deterministic=False)) # 3rd time: 2nd onfetch from cache self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, make_javascript_deterministic=False)) Telemetry: Add name attribute to each Page in ServiceWorkerPageSet ServiceWorkerPerfTest loads the same page three times, but these should have different characteristics because ServiceWorker works differently. This patch gives each page load a name so we can track them separately. BUG= TEST=tools/perf/run_benchmark service_worker.service_worker Review URL: https://codereview.chromium.org/1086813003 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#325802}
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry.page import page as page from telemetry.page import page_set as page_set archive_data_file_path = 'data/service_worker.json' class ServiceWorkerPageSet(page_set.PageSet): """Page set of applications using ServiceWorker""" def __init__(self): super(ServiceWorkerPageSet, self).__init__( archive_data_file=archive_data_file_path, bucket=page_set.PARTNER_BUCKET) # Why: the first application using ServiceWorker # 1st time: registration self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, name='first_load', make_javascript_deterministic=False)) # 2st time: 1st onfetch with caching self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, name='second_load', make_javascript_deterministic=False)) # 3rd time: 2nd onfetch from cache self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, name='third_load', make_javascript_deterministic=False))
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry.page import page as page from telemetry.page import page_set as page_set archive_data_file_path = 'data/service_worker.json' class ServiceWorkerPageSet(page_set.PageSet): """Page set of applications using ServiceWorker""" def __init__(self): super(ServiceWorkerPageSet, self).__init__( archive_data_file=archive_data_file_path, bucket=page_set.PARTNER_BUCKET) # Why: the first application using ServiceWorker # 1st time: registration self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, make_javascript_deterministic=False)) # 2st time: 1st onfetch with caching self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, make_javascript_deterministic=False)) # 3rd time: 2nd onfetch from cache self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, make_javascript_deterministic=False)) <commit_msg>Telemetry: Add name attribute to each Page in ServiceWorkerPageSet ServiceWorkerPerfTest loads the same page three times, but these should have different characteristics because ServiceWorker works differently. This patch gives each page load a name so we can track them separately. BUG= TEST=tools/perf/run_benchmark service_worker.service_worker Review URL: https://codereview.chromium.org/1086813003 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#325802}<commit_after>
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry.page import page as page from telemetry.page import page_set as page_set archive_data_file_path = 'data/service_worker.json' class ServiceWorkerPageSet(page_set.PageSet): """Page set of applications using ServiceWorker""" def __init__(self): super(ServiceWorkerPageSet, self).__init__( archive_data_file=archive_data_file_path, bucket=page_set.PARTNER_BUCKET) # Why: the first application using ServiceWorker # 1st time: registration self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, name='first_load', make_javascript_deterministic=False)) # 2st time: 1st onfetch with caching self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, name='second_load', make_javascript_deterministic=False)) # 3rd time: 2nd onfetch from cache self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, name='third_load', make_javascript_deterministic=False))
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry.page import page as page from telemetry.page import page_set as page_set archive_data_file_path = 'data/service_worker.json' class ServiceWorkerPageSet(page_set.PageSet): """Page set of applications using ServiceWorker""" def __init__(self): super(ServiceWorkerPageSet, self).__init__( archive_data_file=archive_data_file_path, bucket=page_set.PARTNER_BUCKET) # Why: the first application using ServiceWorker # 1st time: registration self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, make_javascript_deterministic=False)) # 2st time: 1st onfetch with caching self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, make_javascript_deterministic=False)) # 3rd time: 2nd onfetch from cache self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, make_javascript_deterministic=False)) Telemetry: Add name attribute to each Page in ServiceWorkerPageSet ServiceWorkerPerfTest loads the same page three times, but these should have different characteristics because ServiceWorker works differently. This patch gives each page load a name so we can track them separately. BUG= TEST=tools/perf/run_benchmark service_worker.service_worker Review URL: https://codereview.chromium.org/1086813003 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#325802}# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry.page import page as page from telemetry.page import page_set as page_set archive_data_file_path = 'data/service_worker.json' class ServiceWorkerPageSet(page_set.PageSet): """Page set of applications using ServiceWorker""" def __init__(self): super(ServiceWorkerPageSet, self).__init__( archive_data_file=archive_data_file_path, bucket=page_set.PARTNER_BUCKET) # Why: the first application using ServiceWorker # 1st time: registration self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, name='first_load', make_javascript_deterministic=False)) # 2st time: 1st onfetch with caching self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, name='second_load', make_javascript_deterministic=False)) # 3rd time: 2nd onfetch from cache self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, name='third_load', make_javascript_deterministic=False))
<commit_before># Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry.page import page as page from telemetry.page import page_set as page_set archive_data_file_path = 'data/service_worker.json' class ServiceWorkerPageSet(page_set.PageSet): """Page set of applications using ServiceWorker""" def __init__(self): super(ServiceWorkerPageSet, self).__init__( archive_data_file=archive_data_file_path, bucket=page_set.PARTNER_BUCKET) # Why: the first application using ServiceWorker # 1st time: registration self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, make_javascript_deterministic=False)) # 2st time: 1st onfetch with caching self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, make_javascript_deterministic=False)) # 3rd time: 2nd onfetch from cache self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, make_javascript_deterministic=False)) <commit_msg>Telemetry: Add name attribute to each Page in ServiceWorkerPageSet ServiceWorkerPerfTest loads the same page three times, but these should have different characteristics because ServiceWorker works differently. This patch gives each page load a name so we can track them separately. BUG= TEST=tools/perf/run_benchmark service_worker.service_worker Review URL: https://codereview.chromium.org/1086813003 Cr-Commit-Position: 972c6d2dc6dd5efdad1377c0d224e03eb8f276f7@{#325802}<commit_after># Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry.page import page as page from telemetry.page import page_set as page_set archive_data_file_path = 'data/service_worker.json' class ServiceWorkerPageSet(page_set.PageSet): """Page set of applications using ServiceWorker""" def __init__(self): super(ServiceWorkerPageSet, self).__init__( archive_data_file=archive_data_file_path, bucket=page_set.PARTNER_BUCKET) # Why: the first application using ServiceWorker # 1st time: registration self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, name='first_load', make_javascript_deterministic=False)) # 2st time: 1st onfetch with caching self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, name='second_load', make_javascript_deterministic=False)) # 3rd time: 2nd onfetch from cache self.AddUserStory(page.Page( 'https://jakearchibald.github.io/trained-to-thrill/', self, name='third_load', make_javascript_deterministic=False))
b2a0247746756cc86074754bc993a757d6702b12
hw02/exercise-02-01.py
hw02/exercise-02-01.py
''' For Homework 02, Exercieses 01-02. EdX Learning From Data course. Jonathan Miller ''' import random # FUNCTIONS ########################### def runTrial(numCoins, numFlips): def flipCoin(): if random.random() > 0.5: return head else: return tail def findv1(vList): return vList[0] def findvrand(vList): return random.choice(vList) def findvmin(vList): vmin = 1. for v in vList: if v < vmin: vmin = v return vmin def sequencesToRatios(flipSequences): v1 = 0 vrand = 0 vmin = 0 vList = [] for sequence in flipSequences: numHeads = 0 #print sequence for flip in sequence: if flip == head: numHeads += 1. vList.append( numHeads / numFlips) #print vList v1 = findv1(vList) vrand = findvrand(vList) vmin = findvmin(vList) return v1, vrand, vmin flipSequences = [] v1 = 0 vrand = 0 vmin = 0 for coin in range(numCoins): coinFlipResults = "" for flip in range(numFlips): coinFlipResults += flipCoin() flipSequences.append(coinFlipResults) v1, vrand, vmin = sequencesToRatios(flipSequences) return v1, vrand, vmin # MAIN ########################### numTrials = 100000 #numTrials = 1 numCoins = 1000 numFlips = 10 v1Exp = 0 vrandExp = 0 vminExp = 0 head = "H" tail = 't' for trial in range(numTrials): v1Trial, vrandTrial, vminTrial = runTrial(numCoins,numFlips) #print v1Trial, vrandTrial, vminTrial v1Exp += v1Trial vrandExp += vrandTrial vminExp += vminTrial v1Exp /= numTrials vrandExp /= numTrials vminExp /= numTrials print v1Exp, vrandExp, vminExp
Add coin flip simulator (hw02)
Add coin flip simulator (hw02)
Python
apache-2.0
JMill/edX-Learning-From-Data-Programming
Add coin flip simulator (hw02)
''' For Homework 02, Exercieses 01-02. EdX Learning From Data course. Jonathan Miller ''' import random # FUNCTIONS ########################### def runTrial(numCoins, numFlips): def flipCoin(): if random.random() > 0.5: return head else: return tail def findv1(vList): return vList[0] def findvrand(vList): return random.choice(vList) def findvmin(vList): vmin = 1. for v in vList: if v < vmin: vmin = v return vmin def sequencesToRatios(flipSequences): v1 = 0 vrand = 0 vmin = 0 vList = [] for sequence in flipSequences: numHeads = 0 #print sequence for flip in sequence: if flip == head: numHeads += 1. vList.append( numHeads / numFlips) #print vList v1 = findv1(vList) vrand = findvrand(vList) vmin = findvmin(vList) return v1, vrand, vmin flipSequences = [] v1 = 0 vrand = 0 vmin = 0 for coin in range(numCoins): coinFlipResults = "" for flip in range(numFlips): coinFlipResults += flipCoin() flipSequences.append(coinFlipResults) v1, vrand, vmin = sequencesToRatios(flipSequences) return v1, vrand, vmin # MAIN ########################### numTrials = 100000 #numTrials = 1 numCoins = 1000 numFlips = 10 v1Exp = 0 vrandExp = 0 vminExp = 0 head = "H" tail = 't' for trial in range(numTrials): v1Trial, vrandTrial, vminTrial = runTrial(numCoins,numFlips) #print v1Trial, vrandTrial, vminTrial v1Exp += v1Trial vrandExp += vrandTrial vminExp += vminTrial v1Exp /= numTrials vrandExp /= numTrials vminExp /= numTrials print v1Exp, vrandExp, vminExp
<commit_before><commit_msg>Add coin flip simulator (hw02)<commit_after>
''' For Homework 02, Exercieses 01-02. EdX Learning From Data course. Jonathan Miller ''' import random # FUNCTIONS ########################### def runTrial(numCoins, numFlips): def flipCoin(): if random.random() > 0.5: return head else: return tail def findv1(vList): return vList[0] def findvrand(vList): return random.choice(vList) def findvmin(vList): vmin = 1. for v in vList: if v < vmin: vmin = v return vmin def sequencesToRatios(flipSequences): v1 = 0 vrand = 0 vmin = 0 vList = [] for sequence in flipSequences: numHeads = 0 #print sequence for flip in sequence: if flip == head: numHeads += 1. vList.append( numHeads / numFlips) #print vList v1 = findv1(vList) vrand = findvrand(vList) vmin = findvmin(vList) return v1, vrand, vmin flipSequences = [] v1 = 0 vrand = 0 vmin = 0 for coin in range(numCoins): coinFlipResults = "" for flip in range(numFlips): coinFlipResults += flipCoin() flipSequences.append(coinFlipResults) v1, vrand, vmin = sequencesToRatios(flipSequences) return v1, vrand, vmin # MAIN ########################### numTrials = 100000 #numTrials = 1 numCoins = 1000 numFlips = 10 v1Exp = 0 vrandExp = 0 vminExp = 0 head = "H" tail = 't' for trial in range(numTrials): v1Trial, vrandTrial, vminTrial = runTrial(numCoins,numFlips) #print v1Trial, vrandTrial, vminTrial v1Exp += v1Trial vrandExp += vrandTrial vminExp += vminTrial v1Exp /= numTrials vrandExp /= numTrials vminExp /= numTrials print v1Exp, vrandExp, vminExp
Add coin flip simulator (hw02)''' For Homework 02, Exercieses 01-02. EdX Learning From Data course. Jonathan Miller ''' import random # FUNCTIONS ########################### def runTrial(numCoins, numFlips): def flipCoin(): if random.random() > 0.5: return head else: return tail def findv1(vList): return vList[0] def findvrand(vList): return random.choice(vList) def findvmin(vList): vmin = 1. for v in vList: if v < vmin: vmin = v return vmin def sequencesToRatios(flipSequences): v1 = 0 vrand = 0 vmin = 0 vList = [] for sequence in flipSequences: numHeads = 0 #print sequence for flip in sequence: if flip == head: numHeads += 1. vList.append( numHeads / numFlips) #print vList v1 = findv1(vList) vrand = findvrand(vList) vmin = findvmin(vList) return v1, vrand, vmin flipSequences = [] v1 = 0 vrand = 0 vmin = 0 for coin in range(numCoins): coinFlipResults = "" for flip in range(numFlips): coinFlipResults += flipCoin() flipSequences.append(coinFlipResults) v1, vrand, vmin = sequencesToRatios(flipSequences) return v1, vrand, vmin # MAIN ########################### numTrials = 100000 #numTrials = 1 numCoins = 1000 numFlips = 10 v1Exp = 0 vrandExp = 0 vminExp = 0 head = "H" tail = 't' for trial in range(numTrials): v1Trial, vrandTrial, vminTrial = runTrial(numCoins,numFlips) #print v1Trial, vrandTrial, vminTrial v1Exp += v1Trial vrandExp += vrandTrial vminExp += vminTrial v1Exp /= numTrials vrandExp /= numTrials vminExp /= numTrials print v1Exp, vrandExp, vminExp
<commit_before><commit_msg>Add coin flip simulator (hw02)<commit_after>''' For Homework 02, Exercieses 01-02. EdX Learning From Data course. Jonathan Miller ''' import random # FUNCTIONS ########################### def runTrial(numCoins, numFlips): def flipCoin(): if random.random() > 0.5: return head else: return tail def findv1(vList): return vList[0] def findvrand(vList): return random.choice(vList) def findvmin(vList): vmin = 1. for v in vList: if v < vmin: vmin = v return vmin def sequencesToRatios(flipSequences): v1 = 0 vrand = 0 vmin = 0 vList = [] for sequence in flipSequences: numHeads = 0 #print sequence for flip in sequence: if flip == head: numHeads += 1. vList.append( numHeads / numFlips) #print vList v1 = findv1(vList) vrand = findvrand(vList) vmin = findvmin(vList) return v1, vrand, vmin flipSequences = [] v1 = 0 vrand = 0 vmin = 0 for coin in range(numCoins): coinFlipResults = "" for flip in range(numFlips): coinFlipResults += flipCoin() flipSequences.append(coinFlipResults) v1, vrand, vmin = sequencesToRatios(flipSequences) return v1, vrand, vmin # MAIN ########################### numTrials = 100000 #numTrials = 1 numCoins = 1000 numFlips = 10 v1Exp = 0 vrandExp = 0 vminExp = 0 head = "H" tail = 't' for trial in range(numTrials): v1Trial, vrandTrial, vminTrial = runTrial(numCoins,numFlips) #print v1Trial, vrandTrial, vminTrial v1Exp += v1Trial vrandExp += vrandTrial vminExp += vminTrial v1Exp /= numTrials vrandExp /= numTrials vminExp /= numTrials print v1Exp, vrandExp, vminExp
c67dc16e73eea093befaa03790bd8d6f1b452c9a
form_designer/tests/test_cms_plugin.py
form_designer/tests/test_cms_plugin.py
import pytest from cms import api from cms.page_rendering import render_page from django.contrib.auth.models import AnonymousUser from django.utils.crypto import get_random_string from form_designer.contrib.cms_plugins.form_designer_form.cms_plugins import FormDesignerPlugin from form_designer.models import FormDefinition, FormDefinitionField @pytest.mark.django_db def test_cms_plugin_renders_in_cms_page(rf): fd = FormDefinition.objects.create( mail_to='test@example.com', mail_subject='Someone sent you a greeting: {{ test }}' ) field = FormDefinitionField.objects.create( form_definition=fd, name='test', label=get_random_string(), field_class='django.forms.CharField', ) page = api.create_page("test", "page.html", "en") ph = page.get_placeholders()[0] api.add_plugin(ph, FormDesignerPlugin, "en", form_definition=fd) request = rf.get("/") request.user = AnonymousUser() request.current_page = page response = render_page(request, page, "fi", "test") response.render() content = response.content.decode("utf8") assert field.label in content assert "<form" in content
Add simple test for FormDesignerPlugin
Add simple test for FormDesignerPlugin
Python
bsd-3-clause
kcsry/django-form-designer,andersinno/django-form-designer,kcsry/django-form-designer,andersinno/django-form-designer,andersinno/django-form-designer-ai,andersinno/django-form-designer-ai
Add simple test for FormDesignerPlugin
import pytest from cms import api from cms.page_rendering import render_page from django.contrib.auth.models import AnonymousUser from django.utils.crypto import get_random_string from form_designer.contrib.cms_plugins.form_designer_form.cms_plugins import FormDesignerPlugin from form_designer.models import FormDefinition, FormDefinitionField @pytest.mark.django_db def test_cms_plugin_renders_in_cms_page(rf): fd = FormDefinition.objects.create( mail_to='test@example.com', mail_subject='Someone sent you a greeting: {{ test }}' ) field = FormDefinitionField.objects.create( form_definition=fd, name='test', label=get_random_string(), field_class='django.forms.CharField', ) page = api.create_page("test", "page.html", "en") ph = page.get_placeholders()[0] api.add_plugin(ph, FormDesignerPlugin, "en", form_definition=fd) request = rf.get("/") request.user = AnonymousUser() request.current_page = page response = render_page(request, page, "fi", "test") response.render() content = response.content.decode("utf8") assert field.label in content assert "<form" in content
<commit_before><commit_msg>Add simple test for FormDesignerPlugin<commit_after>
import pytest from cms import api from cms.page_rendering import render_page from django.contrib.auth.models import AnonymousUser from django.utils.crypto import get_random_string from form_designer.contrib.cms_plugins.form_designer_form.cms_plugins import FormDesignerPlugin from form_designer.models import FormDefinition, FormDefinitionField @pytest.mark.django_db def test_cms_plugin_renders_in_cms_page(rf): fd = FormDefinition.objects.create( mail_to='test@example.com', mail_subject='Someone sent you a greeting: {{ test }}' ) field = FormDefinitionField.objects.create( form_definition=fd, name='test', label=get_random_string(), field_class='django.forms.CharField', ) page = api.create_page("test", "page.html", "en") ph = page.get_placeholders()[0] api.add_plugin(ph, FormDesignerPlugin, "en", form_definition=fd) request = rf.get("/") request.user = AnonymousUser() request.current_page = page response = render_page(request, page, "fi", "test") response.render() content = response.content.decode("utf8") assert field.label in content assert "<form" in content
Add simple test for FormDesignerPluginimport pytest from cms import api from cms.page_rendering import render_page from django.contrib.auth.models import AnonymousUser from django.utils.crypto import get_random_string from form_designer.contrib.cms_plugins.form_designer_form.cms_plugins import FormDesignerPlugin from form_designer.models import FormDefinition, FormDefinitionField @pytest.mark.django_db def test_cms_plugin_renders_in_cms_page(rf): fd = FormDefinition.objects.create( mail_to='test@example.com', mail_subject='Someone sent you a greeting: {{ test }}' ) field = FormDefinitionField.objects.create( form_definition=fd, name='test', label=get_random_string(), field_class='django.forms.CharField', ) page = api.create_page("test", "page.html", "en") ph = page.get_placeholders()[0] api.add_plugin(ph, FormDesignerPlugin, "en", form_definition=fd) request = rf.get("/") request.user = AnonymousUser() request.current_page = page response = render_page(request, page, "fi", "test") response.render() content = response.content.decode("utf8") assert field.label in content assert "<form" in content
<commit_before><commit_msg>Add simple test for FormDesignerPlugin<commit_after>import pytest from cms import api from cms.page_rendering import render_page from django.contrib.auth.models import AnonymousUser from django.utils.crypto import get_random_string from form_designer.contrib.cms_plugins.form_designer_form.cms_plugins import FormDesignerPlugin from form_designer.models import FormDefinition, FormDefinitionField @pytest.mark.django_db def test_cms_plugin_renders_in_cms_page(rf): fd = FormDefinition.objects.create( mail_to='test@example.com', mail_subject='Someone sent you a greeting: {{ test }}' ) field = FormDefinitionField.objects.create( form_definition=fd, name='test', label=get_random_string(), field_class='django.forms.CharField', ) page = api.create_page("test", "page.html", "en") ph = page.get_placeholders()[0] api.add_plugin(ph, FormDesignerPlugin, "en", form_definition=fd) request = rf.get("/") request.user = AnonymousUser() request.current_page = page response = render_page(request, page, "fi", "test") response.render() content = response.content.decode("utf8") assert field.label in content assert "<form" in content
f908dd5fda528b4ce6ebaed082050348bf6f23a5
i8c/tests/test_entry_point.py
i8c/tests/test_entry_point.py
from i8c.tests import TestCase import i8c import sys class TestEntryPoint(TestCase): """Test the console scripts entry point.""" def setUp(self): self.saved_argv = sys.argv self.saved_stderr = sys.stderr def tearDown(self): sys.argv = self.saved_argv sys.stderr = self.saved_stderr def test_success_path(self): """Check the console scripts entry point success path.""" sys.argv[1:] = ["--version"] self.assertIs(i8c.run_compiler(), None) def test_failure_path(self): """Check the console scripts entry point failure path.""" sys.argv[1:] = ["-x"] sys.stderr = sys.stdout self.assertEqual(i8c.run_compiler(), 1)
Test the console scripts entry point
Test the console scripts entry point
Python
lgpl-2.1
gbenson/i8c
Test the console scripts entry point
from i8c.tests import TestCase import i8c import sys class TestEntryPoint(TestCase): """Test the console scripts entry point.""" def setUp(self): self.saved_argv = sys.argv self.saved_stderr = sys.stderr def tearDown(self): sys.argv = self.saved_argv sys.stderr = self.saved_stderr def test_success_path(self): """Check the console scripts entry point success path.""" sys.argv[1:] = ["--version"] self.assertIs(i8c.run_compiler(), None) def test_failure_path(self): """Check the console scripts entry point failure path.""" sys.argv[1:] = ["-x"] sys.stderr = sys.stdout self.assertEqual(i8c.run_compiler(), 1)
<commit_before><commit_msg>Test the console scripts entry point<commit_after>
from i8c.tests import TestCase import i8c import sys class TestEntryPoint(TestCase): """Test the console scripts entry point.""" def setUp(self): self.saved_argv = sys.argv self.saved_stderr = sys.stderr def tearDown(self): sys.argv = self.saved_argv sys.stderr = self.saved_stderr def test_success_path(self): """Check the console scripts entry point success path.""" sys.argv[1:] = ["--version"] self.assertIs(i8c.run_compiler(), None) def test_failure_path(self): """Check the console scripts entry point failure path.""" sys.argv[1:] = ["-x"] sys.stderr = sys.stdout self.assertEqual(i8c.run_compiler(), 1)
Test the console scripts entry pointfrom i8c.tests import TestCase import i8c import sys class TestEntryPoint(TestCase): """Test the console scripts entry point.""" def setUp(self): self.saved_argv = sys.argv self.saved_stderr = sys.stderr def tearDown(self): sys.argv = self.saved_argv sys.stderr = self.saved_stderr def test_success_path(self): """Check the console scripts entry point success path.""" sys.argv[1:] = ["--version"] self.assertIs(i8c.run_compiler(), None) def test_failure_path(self): """Check the console scripts entry point failure path.""" sys.argv[1:] = ["-x"] sys.stderr = sys.stdout self.assertEqual(i8c.run_compiler(), 1)
<commit_before><commit_msg>Test the console scripts entry point<commit_after>from i8c.tests import TestCase import i8c import sys class TestEntryPoint(TestCase): """Test the console scripts entry point.""" def setUp(self): self.saved_argv = sys.argv self.saved_stderr = sys.stderr def tearDown(self): sys.argv = self.saved_argv sys.stderr = self.saved_stderr def test_success_path(self): """Check the console scripts entry point success path.""" sys.argv[1:] = ["--version"] self.assertIs(i8c.run_compiler(), None) def test_failure_path(self): """Check the console scripts entry point failure path.""" sys.argv[1:] = ["-x"] sys.stderr = sys.stdout self.assertEqual(i8c.run_compiler(), 1)
f7b875bb3d4b313e9c1e22297918d33e67633104
geotrek/altimetry/tests/test_models.py
geotrek/altimetry/tests/test_models.py
import os from django.test import TestCase from django.conf import settings from geotrek.trekking.factories import TrekFactory from geotrek.trekking.models import Trek class AltimetryMixinTest(TestCase): def test_get_elevation_chart_none(self): trek = TrekFactory.create(no_path=True) trek.get_elevation_chart_path() basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles') print(Trek._meta.model_name, trek.pk, 'en') self.assertTrue(os.path.exists(os.path.join(basefolder, '%s-%s-%s.png' % (Trek._meta.model_name, '1', 'en'))))
Add test trek_dtail_pdf language none
Add test trek_dtail_pdf language none
Python
bsd-2-clause
makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek
Add test trek_dtail_pdf language none
import os from django.test import TestCase from django.conf import settings from geotrek.trekking.factories import TrekFactory from geotrek.trekking.models import Trek class AltimetryMixinTest(TestCase): def test_get_elevation_chart_none(self): trek = TrekFactory.create(no_path=True) trek.get_elevation_chart_path() basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles') print(Trek._meta.model_name, trek.pk, 'en') self.assertTrue(os.path.exists(os.path.join(basefolder, '%s-%s-%s.png' % (Trek._meta.model_name, '1', 'en'))))
<commit_before><commit_msg>Add test trek_dtail_pdf language none<commit_after>
import os from django.test import TestCase from django.conf import settings from geotrek.trekking.factories import TrekFactory from geotrek.trekking.models import Trek class AltimetryMixinTest(TestCase): def test_get_elevation_chart_none(self): trek = TrekFactory.create(no_path=True) trek.get_elevation_chart_path() basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles') print(Trek._meta.model_name, trek.pk, 'en') self.assertTrue(os.path.exists(os.path.join(basefolder, '%s-%s-%s.png' % (Trek._meta.model_name, '1', 'en'))))
Add test trek_dtail_pdf language noneimport os from django.test import TestCase from django.conf import settings from geotrek.trekking.factories import TrekFactory from geotrek.trekking.models import Trek class AltimetryMixinTest(TestCase): def test_get_elevation_chart_none(self): trek = TrekFactory.create(no_path=True) trek.get_elevation_chart_path() basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles') print(Trek._meta.model_name, trek.pk, 'en') self.assertTrue(os.path.exists(os.path.join(basefolder, '%s-%s-%s.png' % (Trek._meta.model_name, '1', 'en'))))
<commit_before><commit_msg>Add test trek_dtail_pdf language none<commit_after>import os from django.test import TestCase from django.conf import settings from geotrek.trekking.factories import TrekFactory from geotrek.trekking.models import Trek class AltimetryMixinTest(TestCase): def test_get_elevation_chart_none(self): trek = TrekFactory.create(no_path=True) trek.get_elevation_chart_path() basefolder = os.path.join(settings.MEDIA_ROOT, 'profiles') print(Trek._meta.model_name, trek.pk, 'en') self.assertTrue(os.path.exists(os.path.join(basefolder, '%s-%s-%s.png' % (Trek._meta.model_name, '1', 'en'))))
8a0d40f9874119084f5f7a1471cb565bb85d6938
match_to_wd.py
match_to_wd.py
import csv import pickle import sys def load_index(): with open('data/wd_issn.pkl') as inf: data = pickle.load(inf) return data wd_idx = load_index() def match(issn, eissn): for isn in [issn, eissn]: if issn != "": wd = wd_idx.get(issn) if wd is not None: return wd def main(): """ Check the WD index for each row. """ matches = 0 with open(sys.argv[2], 'wb') as csvfile: fields = ['wosid', 'title', 'issn', 'eissn', 'wikidata'] jwriter = csv.DictWriter(csvfile, fieldnames=fields) jwriter.writeheader() with open(sys.argv[1]) as infile: for n, row in enumerate(csv.DictReader(infile)): issn = row.get('issn') eissn = row.get('eissn') wd = match(issn, eissn) row['wikidata'] = wd jwriter.writerow(row) if wd is not None: matches += 1 print print '-' * 25 print "Total journals", n + 1 print "Wikidata matches", matches print "Matches ", round(matches / float(n) * 100, 2), "%" print if __name__ == "__main__": main()
Add match to wd script.
Add match to wd script.
Python
mit
lawlesst/c4l16-idhub
Add match to wd script.
import csv import pickle import sys def load_index(): with open('data/wd_issn.pkl') as inf: data = pickle.load(inf) return data wd_idx = load_index() def match(issn, eissn): for isn in [issn, eissn]: if issn != "": wd = wd_idx.get(issn) if wd is not None: return wd def main(): """ Check the WD index for each row. """ matches = 0 with open(sys.argv[2], 'wb') as csvfile: fields = ['wosid', 'title', 'issn', 'eissn', 'wikidata'] jwriter = csv.DictWriter(csvfile, fieldnames=fields) jwriter.writeheader() with open(sys.argv[1]) as infile: for n, row in enumerate(csv.DictReader(infile)): issn = row.get('issn') eissn = row.get('eissn') wd = match(issn, eissn) row['wikidata'] = wd jwriter.writerow(row) if wd is not None: matches += 1 print print '-' * 25 print "Total journals", n + 1 print "Wikidata matches", matches print "Matches ", round(matches / float(n) * 100, 2), "%" print if __name__ == "__main__": main()
<commit_before><commit_msg>Add match to wd script.<commit_after>
import csv import pickle import sys def load_index(): with open('data/wd_issn.pkl') as inf: data = pickle.load(inf) return data wd_idx = load_index() def match(issn, eissn): for isn in [issn, eissn]: if issn != "": wd = wd_idx.get(issn) if wd is not None: return wd def main(): """ Check the WD index for each row. """ matches = 0 with open(sys.argv[2], 'wb') as csvfile: fields = ['wosid', 'title', 'issn', 'eissn', 'wikidata'] jwriter = csv.DictWriter(csvfile, fieldnames=fields) jwriter.writeheader() with open(sys.argv[1]) as infile: for n, row in enumerate(csv.DictReader(infile)): issn = row.get('issn') eissn = row.get('eissn') wd = match(issn, eissn) row['wikidata'] = wd jwriter.writerow(row) if wd is not None: matches += 1 print print '-' * 25 print "Total journals", n + 1 print "Wikidata matches", matches print "Matches ", round(matches / float(n) * 100, 2), "%" print if __name__ == "__main__": main()
Add match to wd script.import csv import pickle import sys def load_index(): with open('data/wd_issn.pkl') as inf: data = pickle.load(inf) return data wd_idx = load_index() def match(issn, eissn): for isn in [issn, eissn]: if issn != "": wd = wd_idx.get(issn) if wd is not None: return wd def main(): """ Check the WD index for each row. """ matches = 0 with open(sys.argv[2], 'wb') as csvfile: fields = ['wosid', 'title', 'issn', 'eissn', 'wikidata'] jwriter = csv.DictWriter(csvfile, fieldnames=fields) jwriter.writeheader() with open(sys.argv[1]) as infile: for n, row in enumerate(csv.DictReader(infile)): issn = row.get('issn') eissn = row.get('eissn') wd = match(issn, eissn) row['wikidata'] = wd jwriter.writerow(row) if wd is not None: matches += 1 print print '-' * 25 print "Total journals", n + 1 print "Wikidata matches", matches print "Matches ", round(matches / float(n) * 100, 2), "%" print if __name__ == "__main__": main()
<commit_before><commit_msg>Add match to wd script.<commit_after>import csv import pickle import sys def load_index(): with open('data/wd_issn.pkl') as inf: data = pickle.load(inf) return data wd_idx = load_index() def match(issn, eissn): for isn in [issn, eissn]: if issn != "": wd = wd_idx.get(issn) if wd is not None: return wd def main(): """ Check the WD index for each row. """ matches = 0 with open(sys.argv[2], 'wb') as csvfile: fields = ['wosid', 'title', 'issn', 'eissn', 'wikidata'] jwriter = csv.DictWriter(csvfile, fieldnames=fields) jwriter.writeheader() with open(sys.argv[1]) as infile: for n, row in enumerate(csv.DictReader(infile)): issn = row.get('issn') eissn = row.get('eissn') wd = match(issn, eissn) row['wikidata'] = wd jwriter.writerow(row) if wd is not None: matches += 1 print print '-' * 25 print "Total journals", n + 1 print "Wikidata matches", matches print "Matches ", round(matches / float(n) * 100, 2), "%" print if __name__ == "__main__": main()
7716818beb0dba581cd3536e321676d756e282d9
course_discovery/apps/core/migrations/0011_remove_partner_lms_commerce_api_url.py
course_discovery/apps/core/migrations/0011_remove_partner_lms_commerce_api_url.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.15 on 2019-04-12 17:31 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('core', '0010_partner_lms_coursemode_api_url'), ] operations = [ migrations.RemoveField( model_name='partner', name='lms_commerce_api_url', ), ]
Remove the lms_comerce_api_url field from partners object
Remove the lms_comerce_api_url field from partners object
Python
agpl-3.0
edx/course-discovery,edx/course-discovery,edx/course-discovery,edx/course-discovery
Remove the lms_comerce_api_url field from partners object
# -*- coding: utf-8 -*- # Generated by Django 1.11.15 on 2019-04-12 17:31 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('core', '0010_partner_lms_coursemode_api_url'), ] operations = [ migrations.RemoveField( model_name='partner', name='lms_commerce_api_url', ), ]
<commit_before><commit_msg>Remove the lms_comerce_api_url field from partners object<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.11.15 on 2019-04-12 17:31 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('core', '0010_partner_lms_coursemode_api_url'), ] operations = [ migrations.RemoveField( model_name='partner', name='lms_commerce_api_url', ), ]
Remove the lms_comerce_api_url field from partners object# -*- coding: utf-8 -*- # Generated by Django 1.11.15 on 2019-04-12 17:31 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('core', '0010_partner_lms_coursemode_api_url'), ] operations = [ migrations.RemoveField( model_name='partner', name='lms_commerce_api_url', ), ]
<commit_before><commit_msg>Remove the lms_comerce_api_url field from partners object<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.11.15 on 2019-04-12 17:31 from __future__ import unicode_literals from django.db import migrations class Migration(migrations.Migration): dependencies = [ ('core', '0010_partner_lms_coursemode_api_url'), ] operations = [ migrations.RemoveField( model_name='partner', name='lms_commerce_api_url', ), ]
bb1aafd72899d35bfcad5a84373281f732ad01ab
integration_tests/test_minerva_dump.py
integration_tests/test_minerva_dump.py
from contextlib import closing import subprocess import unittest from nose.tools import eq_ from minerva.test import connect class MinervaDump(unittest.TestCase): """ Use standard Python unittest TestCase here because of the assertMultiLineEqual function. """ def test_run(self): self.maxDiff = None with closing(connect()) as conn: with closing(conn.cursor()) as cursor: cursor.execute("DELETE FROM trend.trendstore") cursor.execute("DELETE FROM attribute_directory.attributestore") cursor.execute( "SELECT trend.create_trendstore(" " 'test-datasource'," " 'test-entitytype'," " '900'," " ARRAY[" " ('x', 'integer', 'test trend')," " ('y', 'double precision', 'another test trend')" " ]::trend.trend_descr[]" ")") cursor.execute( "SELECT attribute_directory.create_attributestore(" " 'test-datasource'," " 'test-entitytype'," " ARRAY[" " ('height', 'double precision', 'fictive attribute')," " ('power', 'integer', 'another fictive attribute')" " ]::attribute_directory.attribute_descr[]" ")" ) conn.commit() process = subprocess.Popen(['minerva-dump'], stdout=subprocess.PIPE) out, err = process.communicate() self.assertMultiLineEqual(out, """\ SELECT trend.create_trendstore( 'test-datasource', 'test-entitytype', '900', ARRAY[ ('x', 'integer', ''), ('y', 'double precision', '') ]::trend.trend_descr[] ); SELECT attribute_directory.create_attributestore( 'test-datasource', 'test-entitytype', ARRAY[ ('height', 'double precision', 'fictive attribute'), ('power', 'integer', 'another fictive attribute') ]::attribute_directory.attribute_descr[] ); """)
Add integration test for minerva-dump script
Add integration test for minerva-dump script
Python
agpl-3.0
hendrikx-itc/minerva,hendrikx-itc/minerva
Add integration test for minerva-dump script
from contextlib import closing import subprocess import unittest from nose.tools import eq_ from minerva.test import connect class MinervaDump(unittest.TestCase): """ Use standard Python unittest TestCase here because of the assertMultiLineEqual function. """ def test_run(self): self.maxDiff = None with closing(connect()) as conn: with closing(conn.cursor()) as cursor: cursor.execute("DELETE FROM trend.trendstore") cursor.execute("DELETE FROM attribute_directory.attributestore") cursor.execute( "SELECT trend.create_trendstore(" " 'test-datasource'," " 'test-entitytype'," " '900'," " ARRAY[" " ('x', 'integer', 'test trend')," " ('y', 'double precision', 'another test trend')" " ]::trend.trend_descr[]" ")") cursor.execute( "SELECT attribute_directory.create_attributestore(" " 'test-datasource'," " 'test-entitytype'," " ARRAY[" " ('height', 'double precision', 'fictive attribute')," " ('power', 'integer', 'another fictive attribute')" " ]::attribute_directory.attribute_descr[]" ")" ) conn.commit() process = subprocess.Popen(['minerva-dump'], stdout=subprocess.PIPE) out, err = process.communicate() self.assertMultiLineEqual(out, """\ SELECT trend.create_trendstore( 'test-datasource', 'test-entitytype', '900', ARRAY[ ('x', 'integer', ''), ('y', 'double precision', '') ]::trend.trend_descr[] ); SELECT attribute_directory.create_attributestore( 'test-datasource', 'test-entitytype', ARRAY[ ('height', 'double precision', 'fictive attribute'), ('power', 'integer', 'another fictive attribute') ]::attribute_directory.attribute_descr[] ); """)
<commit_before><commit_msg>Add integration test for minerva-dump script<commit_after>
from contextlib import closing import subprocess import unittest from nose.tools import eq_ from minerva.test import connect class MinervaDump(unittest.TestCase): """ Use standard Python unittest TestCase here because of the assertMultiLineEqual function. """ def test_run(self): self.maxDiff = None with closing(connect()) as conn: with closing(conn.cursor()) as cursor: cursor.execute("DELETE FROM trend.trendstore") cursor.execute("DELETE FROM attribute_directory.attributestore") cursor.execute( "SELECT trend.create_trendstore(" " 'test-datasource'," " 'test-entitytype'," " '900'," " ARRAY[" " ('x', 'integer', 'test trend')," " ('y', 'double precision', 'another test trend')" " ]::trend.trend_descr[]" ")") cursor.execute( "SELECT attribute_directory.create_attributestore(" " 'test-datasource'," " 'test-entitytype'," " ARRAY[" " ('height', 'double precision', 'fictive attribute')," " ('power', 'integer', 'another fictive attribute')" " ]::attribute_directory.attribute_descr[]" ")" ) conn.commit() process = subprocess.Popen(['minerva-dump'], stdout=subprocess.PIPE) out, err = process.communicate() self.assertMultiLineEqual(out, """\ SELECT trend.create_trendstore( 'test-datasource', 'test-entitytype', '900', ARRAY[ ('x', 'integer', ''), ('y', 'double precision', '') ]::trend.trend_descr[] ); SELECT attribute_directory.create_attributestore( 'test-datasource', 'test-entitytype', ARRAY[ ('height', 'double precision', 'fictive attribute'), ('power', 'integer', 'another fictive attribute') ]::attribute_directory.attribute_descr[] ); """)
Add integration test for minerva-dump scriptfrom contextlib import closing import subprocess import unittest from nose.tools import eq_ from minerva.test import connect class MinervaDump(unittest.TestCase): """ Use standard Python unittest TestCase here because of the assertMultiLineEqual function. """ def test_run(self): self.maxDiff = None with closing(connect()) as conn: with closing(conn.cursor()) as cursor: cursor.execute("DELETE FROM trend.trendstore") cursor.execute("DELETE FROM attribute_directory.attributestore") cursor.execute( "SELECT trend.create_trendstore(" " 'test-datasource'," " 'test-entitytype'," " '900'," " ARRAY[" " ('x', 'integer', 'test trend')," " ('y', 'double precision', 'another test trend')" " ]::trend.trend_descr[]" ")") cursor.execute( "SELECT attribute_directory.create_attributestore(" " 'test-datasource'," " 'test-entitytype'," " ARRAY[" " ('height', 'double precision', 'fictive attribute')," " ('power', 'integer', 'another fictive attribute')" " ]::attribute_directory.attribute_descr[]" ")" ) conn.commit() process = subprocess.Popen(['minerva-dump'], stdout=subprocess.PIPE) out, err = process.communicate() self.assertMultiLineEqual(out, """\ SELECT trend.create_trendstore( 'test-datasource', 'test-entitytype', '900', ARRAY[ ('x', 'integer', ''), ('y', 'double precision', '') ]::trend.trend_descr[] ); SELECT attribute_directory.create_attributestore( 'test-datasource', 'test-entitytype', ARRAY[ ('height', 'double precision', 'fictive attribute'), ('power', 'integer', 'another fictive attribute') ]::attribute_directory.attribute_descr[] ); """)
<commit_before><commit_msg>Add integration test for minerva-dump script<commit_after>from contextlib import closing import subprocess import unittest from nose.tools import eq_ from minerva.test import connect class MinervaDump(unittest.TestCase): """ Use standard Python unittest TestCase here because of the assertMultiLineEqual function. """ def test_run(self): self.maxDiff = None with closing(connect()) as conn: with closing(conn.cursor()) as cursor: cursor.execute("DELETE FROM trend.trendstore") cursor.execute("DELETE FROM attribute_directory.attributestore") cursor.execute( "SELECT trend.create_trendstore(" " 'test-datasource'," " 'test-entitytype'," " '900'," " ARRAY[" " ('x', 'integer', 'test trend')," " ('y', 'double precision', 'another test trend')" " ]::trend.trend_descr[]" ")") cursor.execute( "SELECT attribute_directory.create_attributestore(" " 'test-datasource'," " 'test-entitytype'," " ARRAY[" " ('height', 'double precision', 'fictive attribute')," " ('power', 'integer', 'another fictive attribute')" " ]::attribute_directory.attribute_descr[]" ")" ) conn.commit() process = subprocess.Popen(['minerva-dump'], stdout=subprocess.PIPE) out, err = process.communicate() self.assertMultiLineEqual(out, """\ SELECT trend.create_trendstore( 'test-datasource', 'test-entitytype', '900', ARRAY[ ('x', 'integer', ''), ('y', 'double precision', '') ]::trend.trend_descr[] ); SELECT attribute_directory.create_attributestore( 'test-datasource', 'test-entitytype', ARRAY[ ('height', 'double precision', 'fictive attribute'), ('power', 'integer', 'another fictive attribute') ]::attribute_directory.attribute_descr[] ); """)
186b231b7149b52dc95837aabd5f44b1c02c8e41
samples/sample_pyqtgraph_no_datetime.py
samples/sample_pyqtgraph_no_datetime.py
#!/usr/bin/env python # -*- coding: utf-8 -*- """ This example demonstrates a random walk with pyqtgraph. """ import sys import numpy as np import pyqtgraph as pg from pyqtgraph.Qt import QtGui, QtCore from numpy_buffer import RingBuffer # https://github.com/scls19fr/numpy-buffer class RandomWalkPlot: def __init__(self, win): #self.plot = pg.plot() self.plot = win.addPlot(title="Updating plot") self.ptr = 0 #pen = 'r' pen = pg.mkPen('b', style=QtCore.Qt.SolidLine) self.curve = self.plot.plot(pen=pen, symbol='+') self.timer = QtCore.QTimer() self.timer.timeout.connect(self.update) self.timer.start(50) self.value = 1000 # initial value N = 100 # number of elements into circular buffer self.data_y = RingBuffer(N, self.value) def update(self): self.value += np.random.uniform(-1, 1) self.data_y.append(self.value) self.curve.setData(y=self.data_y) # size is increasing up to N #self.curve.setData(y=self.data_y.all[::-1]) # size is always N #if self.ptr == 0: # self.plot.enableAutoRange('xy', False) ## stop auto-scaling after the first data set is plotted #self.ptr += 1 def main(): #QtGui.QApplication.setGraphicsSystem('raster') app = QtGui.QApplication(sys.argv) #mw = QtGui.QMainWindow() #mw.resize(800,800) pg.setConfigOption('background', 'w') pg.setConfigOption('foreground', 'k') win = pg.GraphicsWindow(title="Basic plotting examples") win.resize(1000, 600) win.setWindowTitle('plot') # Enable antialiasing for prettier plots pg.setConfigOptions(antialias=True) upl = RandomWalkPlot(win) ## Start Qt event loop unless running in interactive mode or using pyside. if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'): QtGui.QApplication.instance().exec_() if __name__ == '__main__': main()
Add PyQtGraph random walk without datetime
Add PyQtGraph random walk without datetime
Python
bsd-3-clause
scls19fr/numpy-buffer
Add PyQtGraph random walk without datetime
#!/usr/bin/env python # -*- coding: utf-8 -*- """ This example demonstrates a random walk with pyqtgraph. """ import sys import numpy as np import pyqtgraph as pg from pyqtgraph.Qt import QtGui, QtCore from numpy_buffer import RingBuffer # https://github.com/scls19fr/numpy-buffer class RandomWalkPlot: def __init__(self, win): #self.plot = pg.plot() self.plot = win.addPlot(title="Updating plot") self.ptr = 0 #pen = 'r' pen = pg.mkPen('b', style=QtCore.Qt.SolidLine) self.curve = self.plot.plot(pen=pen, symbol='+') self.timer = QtCore.QTimer() self.timer.timeout.connect(self.update) self.timer.start(50) self.value = 1000 # initial value N = 100 # number of elements into circular buffer self.data_y = RingBuffer(N, self.value) def update(self): self.value += np.random.uniform(-1, 1) self.data_y.append(self.value) self.curve.setData(y=self.data_y) # size is increasing up to N #self.curve.setData(y=self.data_y.all[::-1]) # size is always N #if self.ptr == 0: # self.plot.enableAutoRange('xy', False) ## stop auto-scaling after the first data set is plotted #self.ptr += 1 def main(): #QtGui.QApplication.setGraphicsSystem('raster') app = QtGui.QApplication(sys.argv) #mw = QtGui.QMainWindow() #mw.resize(800,800) pg.setConfigOption('background', 'w') pg.setConfigOption('foreground', 'k') win = pg.GraphicsWindow(title="Basic plotting examples") win.resize(1000, 600) win.setWindowTitle('plot') # Enable antialiasing for prettier plots pg.setConfigOptions(antialias=True) upl = RandomWalkPlot(win) ## Start Qt event loop unless running in interactive mode or using pyside. if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'): QtGui.QApplication.instance().exec_() if __name__ == '__main__': main()
<commit_before><commit_msg>Add PyQtGraph random walk without datetime<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- """ This example demonstrates a random walk with pyqtgraph. """ import sys import numpy as np import pyqtgraph as pg from pyqtgraph.Qt import QtGui, QtCore from numpy_buffer import RingBuffer # https://github.com/scls19fr/numpy-buffer class RandomWalkPlot: def __init__(self, win): #self.plot = pg.plot() self.plot = win.addPlot(title="Updating plot") self.ptr = 0 #pen = 'r' pen = pg.mkPen('b', style=QtCore.Qt.SolidLine) self.curve = self.plot.plot(pen=pen, symbol='+') self.timer = QtCore.QTimer() self.timer.timeout.connect(self.update) self.timer.start(50) self.value = 1000 # initial value N = 100 # number of elements into circular buffer self.data_y = RingBuffer(N, self.value) def update(self): self.value += np.random.uniform(-1, 1) self.data_y.append(self.value) self.curve.setData(y=self.data_y) # size is increasing up to N #self.curve.setData(y=self.data_y.all[::-1]) # size is always N #if self.ptr == 0: # self.plot.enableAutoRange('xy', False) ## stop auto-scaling after the first data set is plotted #self.ptr += 1 def main(): #QtGui.QApplication.setGraphicsSystem('raster') app = QtGui.QApplication(sys.argv) #mw = QtGui.QMainWindow() #mw.resize(800,800) pg.setConfigOption('background', 'w') pg.setConfigOption('foreground', 'k') win = pg.GraphicsWindow(title="Basic plotting examples") win.resize(1000, 600) win.setWindowTitle('plot') # Enable antialiasing for prettier plots pg.setConfigOptions(antialias=True) upl = RandomWalkPlot(win) ## Start Qt event loop unless running in interactive mode or using pyside. if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'): QtGui.QApplication.instance().exec_() if __name__ == '__main__': main()
Add PyQtGraph random walk without datetime#!/usr/bin/env python # -*- coding: utf-8 -*- """ This example demonstrates a random walk with pyqtgraph. """ import sys import numpy as np import pyqtgraph as pg from pyqtgraph.Qt import QtGui, QtCore from numpy_buffer import RingBuffer # https://github.com/scls19fr/numpy-buffer class RandomWalkPlot: def __init__(self, win): #self.plot = pg.plot() self.plot = win.addPlot(title="Updating plot") self.ptr = 0 #pen = 'r' pen = pg.mkPen('b', style=QtCore.Qt.SolidLine) self.curve = self.plot.plot(pen=pen, symbol='+') self.timer = QtCore.QTimer() self.timer.timeout.connect(self.update) self.timer.start(50) self.value = 1000 # initial value N = 100 # number of elements into circular buffer self.data_y = RingBuffer(N, self.value) def update(self): self.value += np.random.uniform(-1, 1) self.data_y.append(self.value) self.curve.setData(y=self.data_y) # size is increasing up to N #self.curve.setData(y=self.data_y.all[::-1]) # size is always N #if self.ptr == 0: # self.plot.enableAutoRange('xy', False) ## stop auto-scaling after the first data set is plotted #self.ptr += 1 def main(): #QtGui.QApplication.setGraphicsSystem('raster') app = QtGui.QApplication(sys.argv) #mw = QtGui.QMainWindow() #mw.resize(800,800) pg.setConfigOption('background', 'w') pg.setConfigOption('foreground', 'k') win = pg.GraphicsWindow(title="Basic plotting examples") win.resize(1000, 600) win.setWindowTitle('plot') # Enable antialiasing for prettier plots pg.setConfigOptions(antialias=True) upl = RandomWalkPlot(win) ## Start Qt event loop unless running in interactive mode or using pyside. if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'): QtGui.QApplication.instance().exec_() if __name__ == '__main__': main()
<commit_before><commit_msg>Add PyQtGraph random walk without datetime<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- """ This example demonstrates a random walk with pyqtgraph. """ import sys import numpy as np import pyqtgraph as pg from pyqtgraph.Qt import QtGui, QtCore from numpy_buffer import RingBuffer # https://github.com/scls19fr/numpy-buffer class RandomWalkPlot: def __init__(self, win): #self.plot = pg.plot() self.plot = win.addPlot(title="Updating plot") self.ptr = 0 #pen = 'r' pen = pg.mkPen('b', style=QtCore.Qt.SolidLine) self.curve = self.plot.plot(pen=pen, symbol='+') self.timer = QtCore.QTimer() self.timer.timeout.connect(self.update) self.timer.start(50) self.value = 1000 # initial value N = 100 # number of elements into circular buffer self.data_y = RingBuffer(N, self.value) def update(self): self.value += np.random.uniform(-1, 1) self.data_y.append(self.value) self.curve.setData(y=self.data_y) # size is increasing up to N #self.curve.setData(y=self.data_y.all[::-1]) # size is always N #if self.ptr == 0: # self.plot.enableAutoRange('xy', False) ## stop auto-scaling after the first data set is plotted #self.ptr += 1 def main(): #QtGui.QApplication.setGraphicsSystem('raster') app = QtGui.QApplication(sys.argv) #mw = QtGui.QMainWindow() #mw.resize(800,800) pg.setConfigOption('background', 'w') pg.setConfigOption('foreground', 'k') win = pg.GraphicsWindow(title="Basic plotting examples") win.resize(1000, 600) win.setWindowTitle('plot') # Enable antialiasing for prettier plots pg.setConfigOptions(antialias=True) upl = RandomWalkPlot(win) ## Start Qt event loop unless running in interactive mode or using pyside. if (sys.flags.interactive != 1) or not hasattr(QtCore, 'PYQT_VERSION'): QtGui.QApplication.instance().exec_() if __name__ == '__main__': main()
fc5714951bac61f17509eacf8ec2413e14a79ddc
txircd/modules/core/sno_oper.py
txircd/modules/core/sno_oper.py
from twisted.plugin import IPlugin from txircd.module_interface import IModuleData, ModuleData from zope.interface import implements class SnoOper(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeOper" core = True def hookIRCd(self, ircd): self.ircd = ircd def actions(self): return [ ("operreport", 1, self.sendOperNotice), ("servernoticetype", 1, self.checkSnoType) ] def sendOperNotice(self, user, reason): if reason: message = "Failed OPER attempt from {} ({}).".format(user.nick, reason) else: message = "{} has opered.".format(user.nick) snodata = { "mask": "oper", "message": message } self.ircd.runActionProcessing("sendservernotice", snodata) def checkSnoType(self, user, typename): return typename == "oper" snoOper = SnoOper()
Add a snomask for OPER attempts
Add a snomask for OPER attempts
Python
bsd-3-clause
ElementalAlchemist/txircd,Heufneutje/txircd
Add a snomask for OPER attempts
from twisted.plugin import IPlugin from txircd.module_interface import IModuleData, ModuleData from zope.interface import implements class SnoOper(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeOper" core = True def hookIRCd(self, ircd): self.ircd = ircd def actions(self): return [ ("operreport", 1, self.sendOperNotice), ("servernoticetype", 1, self.checkSnoType) ] def sendOperNotice(self, user, reason): if reason: message = "Failed OPER attempt from {} ({}).".format(user.nick, reason) else: message = "{} has opered.".format(user.nick) snodata = { "mask": "oper", "message": message } self.ircd.runActionProcessing("sendservernotice", snodata) def checkSnoType(self, user, typename): return typename == "oper" snoOper = SnoOper()
<commit_before><commit_msg>Add a snomask for OPER attempts<commit_after>
from twisted.plugin import IPlugin from txircd.module_interface import IModuleData, ModuleData from zope.interface import implements class SnoOper(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeOper" core = True def hookIRCd(self, ircd): self.ircd = ircd def actions(self): return [ ("operreport", 1, self.sendOperNotice), ("servernoticetype", 1, self.checkSnoType) ] def sendOperNotice(self, user, reason): if reason: message = "Failed OPER attempt from {} ({}).".format(user.nick, reason) else: message = "{} has opered.".format(user.nick) snodata = { "mask": "oper", "message": message } self.ircd.runActionProcessing("sendservernotice", snodata) def checkSnoType(self, user, typename): return typename == "oper" snoOper = SnoOper()
Add a snomask for OPER attemptsfrom twisted.plugin import IPlugin from txircd.module_interface import IModuleData, ModuleData from zope.interface import implements class SnoOper(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeOper" core = True def hookIRCd(self, ircd): self.ircd = ircd def actions(self): return [ ("operreport", 1, self.sendOperNotice), ("servernoticetype", 1, self.checkSnoType) ] def sendOperNotice(self, user, reason): if reason: message = "Failed OPER attempt from {} ({}).".format(user.nick, reason) else: message = "{} has opered.".format(user.nick) snodata = { "mask": "oper", "message": message } self.ircd.runActionProcessing("sendservernotice", snodata) def checkSnoType(self, user, typename): return typename == "oper" snoOper = SnoOper()
<commit_before><commit_msg>Add a snomask for OPER attempts<commit_after>from twisted.plugin import IPlugin from txircd.module_interface import IModuleData, ModuleData from zope.interface import implements class SnoOper(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeOper" core = True def hookIRCd(self, ircd): self.ircd = ircd def actions(self): return [ ("operreport", 1, self.sendOperNotice), ("servernoticetype", 1, self.checkSnoType) ] def sendOperNotice(self, user, reason): if reason: message = "Failed OPER attempt from {} ({}).".format(user.nick, reason) else: message = "{} has opered.".format(user.nick) snodata = { "mask": "oper", "message": message } self.ircd.runActionProcessing("sendservernotice", snodata) def checkSnoType(self, user, typename): return typename == "oper" snoOper = SnoOper()
8f3c5dc924b0a8ad35d99d66eb809a51b49fc178
zerver/management/commands/rename-stream.py
zerver/management/commands/rename-stream.py
from __future__ import absolute_import from django.core.management.base import BaseCommand from zerver.lib.actions import do_rename_stream from zerver.models import Realm, get_realm class Command(BaseCommand): help = """Change the stream name for a realm. Usage: python manage.py rename-stream <domain> <old name> <new name>""" def handle(self, *args, **options): if len(args) != 3: print "Please provide a domain and the old and new names." exit(1) domain, old_name, new_name = args try: realm = get_realm(domain) except Realm.DoesNotExist: print "Unknown domain %s" % (domain,) exit(1) do_rename_stream(realm, old_name, new_name)
Add a management command to rename a stream.
Add a management command to rename a stream. (imported from commit b3acadc09b289b48e2ac298e50a5427545b6a473)
Python
apache-2.0
eeshangarg/zulip,dhcrzf/zulip,wavelets/zulip,hengqujushi/zulip,praveenaki/zulip,armooo/zulip,JanzTam/zulip,glovebx/zulip,amanharitsh123/zulip,dxq-git/zulip,xuxiao/zulip,stamhe/zulip,pradiptad/zulip,akuseru/zulip,bitemyapp/zulip,JPJPJPOPOP/zulip,zorojean/zulip,kokoar/zulip,zwily/zulip,LAndreas/zulip,christi3k/zulip,natanovia/zulip,dwrpayne/zulip,ashwinirudrappa/zulip,dxq-git/zulip,lfranchi/zulip,susansls/zulip,dattatreya303/zulip,ryanbackman/zulip,codeKonami/zulip,ufosky-server/zulip,eastlhu/zulip,nicholasbs/zulip,shaunstanislaus/zulip,zhaoweigg/zulip,vikas-parashar/zulip,Vallher/zulip,andersk/zulip,pradiptad/zulip,peiwei/zulip,dotcool/zulip,johnny9/zulip,jackrzhang/zulip,udxxabp/zulip,saitodisse/zulip,ipernet/zulip,jonesgithub/zulip,xuanhan863/zulip,zorojean/zulip,easyfmxu/zulip,arpith/zulip,Frouk/zulip,Diptanshu8/zulip,saitodisse/zulip,sonali0901/zulip,dnmfarrell/zulip,themass/zulip,dawran6/zulip,RobotCaleb/zulip,deer-hope/zulip,ikasumiwt/zulip,DazWorrall/zulip,suxinde2009/zulip,fw1121/zulip,DazWorrall/zulip,cosmicAsymmetry/zulip,vikas-parashar/zulip,ufosky-server/zulip,sup95/zulip,akuseru/zulip,gigawhitlocks/zulip,shubhamdhama/zulip,zulip/zulip,levixie/zulip,noroot/zulip,aliceriot/zulip,aps-sids/zulip,Jianchun1/zulip,paxapy/zulip,dwrpayne/zulip,guiquanz/zulip,brainwane/zulip,souravbadami/zulip,udxxabp/zulip,ipernet/zulip,MayB/zulip,brainwane/zulip,RobotCaleb/zulip,littledogboy/zulip,kokoar/zulip,themass/zulip,Diptanshu8/zulip,samatdav/zulip,shaunstanislaus/zulip,blaze225/zulip,m1ssou/zulip,ryansnowboarder/zulip,sharmaeklavya2/zulip,Gabriel0402/zulip,wangdeshui/zulip,KJin99/zulip,nicholasbs/zulip,gkotian/zulip,isht3/zulip,EasonYi/zulip,ryansnowboarder/zulip,sonali0901/zulip,mansilladev/zulip,gigawhitlocks/zulip,pradiptad/zulip,j831/zulip,MariaFaBella85/zulip,MayB/zulip,developerfm/zulip,wangdeshui/zulip,souravbadami/zulip,MariaFaBella85/zulip,blaze225/zulip,shubhamdhama/zulip,aps-sids/zulip,MayB/zulip,synicalsyntax/zulip,dawran6/zulip,ryansnowboarder/zulip,gkotian/zulip,bitemyapp/zulip,hj3938/zulip,so0k/zulip,zacps/zulip,esander91/zulip,luyifan/zulip,littledogboy/zulip,ashwinirudrappa/zulip,JPJPJPOPOP/zulip,niftynei/zulip,huangkebo/zulip,adnanh/zulip,vakila/zulip,xuanhan863/zulip,zorojean/zulip,zachallaun/zulip,gkotian/zulip,seapasulli/zulip,Gabriel0402/zulip,shrikrishnaholla/zulip,joshisa/zulip,Vallher/zulip,PaulPetring/zulip,DazWorrall/zulip,johnny9/zulip,aps-sids/zulip,jessedhillon/zulip,JPJPJPOPOP/zulip,sup95/zulip,blaze225/zulip,fw1121/zulip,sonali0901/zulip,arpith/zulip,swinghu/zulip,TigorC/zulip,KingxBanana/zulip,Suninus/zulip,EasonYi/zulip,reyha/zulip,itnihao/zulip,wavelets/zulip,mansilladev/zulip,armooo/zulip,hengqujushi/zulip,Galexrt/zulip,ryanbackman/zulip,zulip/zulip,jrowan/zulip,jphilipsen05/zulip,shrikrishnaholla/zulip,udxxabp/zulip,ApsOps/zulip,joyhchen/zulip,zofuthan/zulip,hj3938/zulip,Drooids/zulip,showell/zulip,LeeRisk/zulip,ericzhou2008/zulip,kaiyuanheshang/zulip,hayderimran7/zulip,Diptanshu8/zulip,avastu/zulip,jeffcao/zulip,reyha/zulip,bitemyapp/zulip,moria/zulip,zachallaun/zulip,esander91/zulip,Drooids/zulip,dnmfarrell/zulip,ApsOps/zulip,xuxiao/zulip,littledogboy/zulip,Frouk/zulip,xuanhan863/zulip,ahmadassaf/zulip,guiquanz/zulip,wdaher/zulip,proliming/zulip,ryanbackman/zulip,amanharitsh123/zulip,mahim97/zulip,huangkebo/zulip,amallia/zulip,mdavid/zulip,schatt/zulip,jrowan/zulip,zachallaun/zulip,tbutter/zulip,rishig/zulip,itnihao/zulip,grave-w-grave/zulip,deer-hope/zulip,zhaoweigg/zulip,yuvipanda/zulip,samatdav/zulip,rishig/zulip,yuvipanda/zulip,natanovia/zulip,firstblade/zulip,punchagan/zulip,akuseru/zulip,synicalsyntax/zulip,kou/zulip,peiwei/zulip,jeffcao/zulip,eastlhu/zulip,mdavid/zulip,yocome/zulip,LAndreas/zulip,swinghu/zulip,itnihao/zulip,MayB/zulip,bssrdf/zulip,gkotian/zulip,zulip/zulip,Batterfii/zulip,tommyip/zulip,codeKonami/zulip,shrikrishnaholla/zulip,kokoar/zulip,m1ssou/zulip,proliming/zulip,glovebx/zulip,yuvipanda/zulip,luyifan/zulip,seapasulli/zulip,swinghu/zulip,jainayush975/zulip,Jianchun1/zulip,vaidap/zulip,showell/zulip,aakash-cr7/zulip,timabbott/zulip,hafeez3000/zulip,hengqujushi/zulip,aliceriot/zulip,aps-sids/zulip,punchagan/zulip,ericzhou2008/zulip,jimmy54/zulip,jessedhillon/zulip,willingc/zulip,eastlhu/zulip,brockwhittaker/zulip,jackrzhang/zulip,babbage/zulip,jimmy54/zulip,timabbott/zulip,jonesgithub/zulip,amyliu345/zulip,yuvipanda/zulip,he15his/zulip,timabbott/zulip,rht/zulip,tdr130/zulip,jerryge/zulip,noroot/zulip,zofuthan/zulip,SmartPeople/zulip,armooo/zulip,kou/zulip,jerryge/zulip,voidException/zulip,hj3938/zulip,nicholasbs/zulip,blaze225/zulip,kaiyuanheshang/zulip,andersk/zulip,firstblade/zulip,LAndreas/zulip,Drooids/zulip,christi3k/zulip,cosmicAsymmetry/zulip,seapasulli/zulip,hackerkid/zulip,johnnygaddarr/zulip,ApsOps/zulip,christi3k/zulip,shaunstanislaus/zulip,proliming/zulip,verma-varsha/zulip,dwrpayne/zulip,zhaoweigg/zulip,calvinleenyc/zulip,EasonYi/zulip,PhilSk/zulip,Juanvulcano/zulip,aliceriot/zulip,KJin99/zulip,jonesgithub/zulip,bastianh/zulip,voidException/zulip,brockwhittaker/zulip,EasonYi/zulip,stamhe/zulip,punchagan/zulip,dnmfarrell/zulip,hengqujushi/zulip,tiansiyuan/zulip,levixie/zulip,Juanvulcano/zulip,Juanvulcano/zulip,bowlofstew/zulip,ryanbackman/zulip,aakash-cr7/zulip,zulip/zulip,sharmaeklavya2/zulip,cosmicAsymmetry/zulip,zachallaun/zulip,cosmicAsymmetry/zulip,christi3k/zulip,dhcrzf/zulip,krtkmj/zulip,Cheppers/zulip,thomasboyt/zulip,umkay/zulip,so0k/zulip,Cheppers/zulip,lfranchi/zulip,technicalpickles/zulip,so0k/zulip,Cheppers/zulip,rht/zulip,developerfm/zulip,dwrpayne/zulip,natanovia/zulip,stamhe/zulip,jonesgithub/zulip,codeKonami/zulip,shubhamdhama/zulip,jeffcao/zulip,ericzhou2008/zulip,jessedhillon/zulip,peiwei/zulip,pradiptad/zulip,andersk/zulip,Cheppers/zulip,luyifan/zulip,vakila/zulip,dhcrzf/zulip,peiwei/zulip,avastu/zulip,he15his/zulip,zorojean/zulip,udxxabp/zulip,synicalsyntax/zulip,punchagan/zulip,bastianh/zulip,JanzTam/zulip,TigorC/zulip,aliceriot/zulip,zulip/zulip,tbutter/zulip,dxq-git/zulip,rishig/zulip,PaulPetring/zulip,mdavid/zulip,Jianchun1/zulip,JPJPJPOPOP/zulip,adnanh/zulip,peiwei/zulip,kou/zulip,amallia/zulip,qq1012803704/zulip,firstblade/zulip,umkay/zulip,huangkebo/zulip,vabs22/zulip,karamcnair/zulip,ufosky-server/zulip,ahmadassaf/zulip,JanzTam/zulip,nicholasbs/zulip,shubhamdhama/zulip,swinghu/zulip,seapasulli/zulip,timabbott/zulip,aps-sids/zulip,xuanhan863/zulip,noroot/zulip,hackerkid/zulip,jerryge/zulip,wdaher/zulip,jeffcao/zulip,shubhamdhama/zulip,willingc/zulip,grave-w-grave/zulip,wweiradio/zulip,itnihao/zulip,karamcnair/zulip,themass/zulip,dhcrzf/zulip,proliming/zulip,lfranchi/zulip,jphilipsen05/zulip,themass/zulip,kaiyuanheshang/zulip,jimmy54/zulip,moria/zulip,KJin99/zulip,Drooids/zulip,cosmicAsymmetry/zulip,noroot/zulip,karamcnair/zulip,seapasulli/zulip,j831/zulip,krtkmj/zulip,hayderimran7/zulip,bluesea/zulip,ApsOps/zulip,yocome/zulip,dattatreya303/zulip,aakash-cr7/zulip,Qgap/zulip,xuanhan863/zulip,AZtheAsian/zulip,guiquanz/zulip,technicalpickles/zulip,yocome/zulip,dattatreya303/zulip,dwrpayne/zulip,hackerkid/zulip,susansls/zulip,so0k/zulip,RobotCaleb/zulip,m1ssou/zulip,wweiradio/zulip,gkotian/zulip,JanzTam/zulip,suxinde2009/zulip,Galexrt/zulip,stamhe/zulip,peguin40/zulip,amyliu345/zulip,bluesea/zulip,tdr130/zulip,ApsOps/zulip,jerryge/zulip,ryansnowboarder/zulip,armooo/zulip,amyliu345/zulip,ericzhou2008/zulip,ryanbackman/zulip,itnihao/zulip,hafeez3000/zulip,calvinleenyc/zulip,hustlzp/zulip,proliming/zulip,DazWorrall/zulip,firstblade/zulip,littledogboy/zulip,ahmadassaf/zulip,bssrdf/zulip,jeffcao/zulip,praveenaki/zulip,paxapy/zulip,reyha/zulip,mdavid/zulip,Frouk/zulip,synicalsyntax/zulip,peiwei/zulip,showell/zulip,vakila/zulip,deer-hope/zulip,johnny9/zulip,voidException/zulip,bastianh/zulip,codeKonami/zulip,pradiptad/zulip,xuxiao/zulip,PhilSk/zulip,bowlofstew/zulip,babbage/zulip,saitodisse/zulip,calvinleenyc/zulip,esander91/zulip,schatt/zulip,reyha/zulip,souravbadami/zulip,RobotCaleb/zulip,verma-varsha/zulip,ikasumiwt/zulip,eastlhu/zulip,zulip/zulip,amallia/zulip,huangkebo/zulip,avastu/zulip,deer-hope/zulip,vaidap/zulip,dxq-git/zulip,Gabriel0402/zulip,verma-varsha/zulip,easyfmxu/zulip,vakila/zulip,stamhe/zulip,luyifan/zulip,sharmaeklavya2/zulip,zacps/zulip,vabs22/zulip,cosmicAsymmetry/zulip,natanovia/zulip,wangdeshui/zulip,dhcrzf/zulip,MayB/zulip,KJin99/zulip,littledogboy/zulip,jessedhillon/zulip,alliejones/zulip,so0k/zulip,huangkebo/zulip,luyifan/zulip,ikasumiwt/zulip,PaulPetring/zulip,DazWorrall/zulip,zorojean/zulip,zofuthan/zulip,grave-w-grave/zulip,bssrdf/zulip,avastu/zulip,lfranchi/zulip,yuvipanda/zulip,xuanhan863/zulip,yocome/zulip,praveenaki/zulip,dattatreya303/zulip,schatt/zulip,jrowan/zulip,hengqujushi/zulip,paxapy/zulip,ufosky-server/zulip,saitodisse/zulip,schatt/zulip,mansilladev/zulip,jrowan/zulip,joshisa/zulip,zachallaun/zulip,susansls/zulip,adnanh/zulip,udxxabp/zulip,atomic-labs/zulip,Jianchun1/zulip,jimmy54/zulip,easyfmxu/zulip,avastu/zulip,jphilipsen05/zulip,lfranchi/zulip,punchagan/zulip,atomic-labs/zulip,joyhchen/zulip,niftynei/zulip,stamhe/zulip,johnnygaddarr/zulip,jimmy54/zulip,Batterfii/zulip,amanharitsh123/zulip,rht/zulip,arpitpanwar/zulip,jimmy54/zulip,deer-hope/zulip,ahmadassaf/zulip,mansilladev/zulip,bowlofstew/zulip,j831/zulip,hafeez3000/zulip,bluesea/zulip,zhaoweigg/zulip,MariaFaBella85/zulip,PhilSk/zulip,akuseru/zulip,LAndreas/zulip,Qgap/zulip,LeeRisk/zulip,jimmy54/zulip,hackerkid/zulip,jessedhillon/zulip,rishig/zulip,dhcrzf/zulip,amallia/zulip,levixie/zulip,shubhamdhama/zulip,brockwhittaker/zulip,mohsenSy/zulip,wdaher/zulip,Gabriel0402/zulip,DazWorrall/zulip,vikas-parashar/zulip,sonali0901/zulip,vikas-parashar/zulip,voidException/zulip,dotcool/zulip,andersk/zulip,ericzhou2008/zulip,technicalpickles/zulip,joyhchen/zulip,ikasumiwt/zulip,hj3938/zulip,niftynei/zulip,rht/zulip,dawran6/zulip,ryansnowboarder/zulip,so0k/zulip,hj3938/zulip,technicalpickles/zulip,eastlhu/zulip,jackrzhang/zulip,zwily/zulip,dhcrzf/zulip,amanharitsh123/zulip,udxxabp/zulip,niftynei/zulip,wweiradio/zulip,alliejones/zulip,verma-varsha/zulip,alliejones/zulip,brockwhittaker/zulip,akuseru/zulip,thomasboyt/zulip,SmartPeople/zulip,karamcnair/zulip,technicalpickles/zulip,shaunstanislaus/zulip,themass/zulip,zorojean/zulip,PhilSk/zulip,susansls/zulip,samatdav/zulip,mahim97/zulip,guiquanz/zulip,arpith/zulip,amyliu345/zulip,LeeRisk/zulip,adnanh/zulip,xuxiao/zulip,peguin40/zulip,fw1121/zulip,brockwhittaker/zulip,brainwane/zulip,firstblade/zulip,punchagan/zulip,pradiptad/zulip,zofuthan/zulip,mdavid/zulip,dattatreya303/zulip,MayB/zulip,SmartPeople/zulip,ahmadassaf/zulip,vakila/zulip,sharmaeklavya2/zulip,hafeez3000/zulip,praveenaki/zulip,babbage/zulip,timabbott/zulip,j831/zulip,tdr130/zulip,lfranchi/zulip,tbutter/zulip,glovebx/zulip,itnihao/zulip,jackrzhang/zulip,arpitpanwar/zulip,atomic-labs/zulip,dawran6/zulip,KingxBanana/zulip,adnanh/zulip,qq1012803704/zulip,dotcool/zulip,moria/zulip,qq1012803704/zulip,zulip/zulip,wweiradio/zulip,Cheppers/zulip,dawran6/zulip,synicalsyntax/zulip,udxxabp/zulip,wweiradio/zulip,kokoar/zulip,Vallher/zulip,gigawhitlocks/zulip,LAndreas/zulip,qq1012803704/zulip,esander91/zulip,moria/zulip,KingxBanana/zulip,zacps/zulip,isht3/zulip,johnnygaddarr/zulip,codeKonami/zulip,dnmfarrell/zulip,gigawhitlocks/zulip,jessedhillon/zulip,codeKonami/zulip,zhaoweigg/zulip,wangdeshui/zulip,Gabriel0402/zulip,dawran6/zulip,mahim97/zulip,JanzTam/zulip,showell/zulip,natanovia/zulip,m1ssou/zulip,wangdeshui/zulip,timabbott/zulip,sonali0901/zulip,TigorC/zulip,glovebx/zulip,moria/zulip,wweiradio/zulip,arpitpanwar/zulip,ipernet/zulip,bitemyapp/zulip,Galexrt/zulip,willingc/zulip,easyfmxu/zulip,schatt/zulip,suxinde2009/zulip,umkay/zulip,souravbadami/zulip,brainwane/zulip,sup95/zulip,zwily/zulip,dnmfarrell/zulip,yocome/zulip,avastu/zulip,huangkebo/zulip,littledogboy/zulip,jerryge/zulip,swinghu/zulip,christi3k/zulip,reyha/zulip,PaulPetring/zulip,Suninus/zulip,sup95/zulip,zofuthan/zulip,jackrzhang/zulip,ashwinirudrappa/zulip,samatdav/zulip,alliejones/zulip,Galexrt/zulip,joshisa/zulip,noroot/zulip,ashwinirudrappa/zulip,luyifan/zulip,tommyip/zulip,tbutter/zulip,bssrdf/zulip,showell/zulip,EasonYi/zulip,LAndreas/zulip,stamhe/zulip,PaulPetring/zulip,hj3938/zulip,tiansiyuan/zulip,MariaFaBella85/zulip,umkay/zulip,levixie/zulip,guiquanz/zulip,levixie/zulip,KingxBanana/zulip,showell/zulip,suxinde2009/zulip,levixie/zulip,johnnygaddarr/zulip,ipernet/zulip,eeshangarg/zulip,MariaFaBella85/zulip,punchagan/zulip,MariaFaBella85/zulip,ipernet/zulip,bastianh/zulip,amyliu345/zulip,ipernet/zulip,nicholasbs/zulip,PhilSk/zulip,kou/zulip,dattatreya303/zulip,mahim97/zulip,tbutter/zulip,bastianh/zulip,alliejones/zulip,aakash-cr7/zulip,yocome/zulip,wdaher/zulip,bowlofstew/zulip,qq1012803704/zulip,hackerkid/zulip,fw1121/zulip,developerfm/zulip,mahim97/zulip,brainwane/zulip,RobotCaleb/zulip,TigorC/zulip,kou/zulip,bastianh/zulip,arpitpanwar/zulip,vaidap/zulip,Qgap/zulip,hustlzp/zulip,andersk/zulip,calvinleenyc/zulip,guiquanz/zulip,Juanvulcano/zulip,thomasboyt/zulip,dnmfarrell/zulip,willingc/zulip,vaidap/zulip,joshisa/zulip,tommyip/zulip,jerryge/zulip,firstblade/zulip,ashwinirudrappa/zulip,synicalsyntax/zulip,eastlhu/zulip,proliming/zulip,amanharitsh123/zulip,peiwei/zulip,ashwinirudrappa/zulip,vikas-parashar/zulip,atomic-labs/zulip,arpith/zulip,LeeRisk/zulip,ericzhou2008/zulip,jrowan/zulip,babbage/zulip,mansilladev/zulip,rishig/zulip,rishig/zulip,m1ssou/zulip,arpith/zulip,calvinleenyc/zulip,gigawhitlocks/zulip,sharmaeklavya2/zulip,vikas-parashar/zulip,kokoar/zulip,kaiyuanheshang/zulip,amallia/zulip,he15his/zulip,dotcool/zulip,RobotCaleb/zulip,zofuthan/zulip,eeshangarg/zulip,kou/zulip,sonali0901/zulip,hackerkid/zulip,synicalsyntax/zulip,Batterfii/zulip,eeshangarg/zulip,tbutter/zulip,brockwhittaker/zulip,amanharitsh123/zulip,hustlzp/zulip,shrikrishnaholla/zulip,brainwane/zulip,gkotian/zulip,ufosky-server/zulip,vabs22/zulip,umkay/zulip,tommyip/zulip,ufosky-server/zulip,glovebx/zulip,arpith/zulip,blaze225/zulip,hackerkid/zulip,niftynei/zulip,kou/zulip,yuvipanda/zulip,swinghu/zulip,johnny9/zulip,hj3938/zulip,bluesea/zulip,hayderimran7/zulip,zachallaun/zulip,pradiptad/zulip,umkay/zulip,Gabriel0402/zulip,wavelets/zulip,joshisa/zulip,shrikrishnaholla/zulip,ryansnowboarder/zulip,zacps/zulip,voidException/zulip,DazWorrall/zulip,Galexrt/zulip,akuseru/zulip,shrikrishnaholla/zulip,Frouk/zulip,levixie/zulip,mohsenSy/zulip,glovebx/zulip,isht3/zulip,qq1012803704/zulip,Jianchun1/zulip,bowlofstew/zulip,wavelets/zulip,glovebx/zulip,johnnygaddarr/zulip,dwrpayne/zulip,hengqujushi/zulip,KingxBanana/zulip,hayderimran7/zulip,eeshangarg/zulip,PhilSk/zulip,atomic-labs/zulip,bluesea/zulip,voidException/zulip,peguin40/zulip,dwrpayne/zulip,bitemyapp/zulip,susansls/zulip,jphilipsen05/zulip,dnmfarrell/zulip,KJin99/zulip,dotcool/zulip,krtkmj/zulip,PaulPetring/zulip,huangkebo/zulip,alliejones/zulip,mohsenSy/zulip,tdr130/zulip,zwily/zulip,sup95/zulip,eeshangarg/zulip,TigorC/zulip,developerfm/zulip,KJin99/zulip,isht3/zulip,KingxBanana/zulip,niftynei/zulip,peguin40/zulip,Batterfii/zulip,bssrdf/zulip,atomic-labs/zulip,mahim97/zulip,zwily/zulip,zhaoweigg/zulip,voidException/zulip,dxq-git/zulip,MayB/zulip,PaulPetring/zulip,Jianchun1/zulip,vaidap/zulip,tiansiyuan/zulip,moria/zulip,rht/zulip,bluesea/zulip,Drooids/zulip,bowlofstew/zulip,grave-w-grave/zulip,RobotCaleb/zulip,jphilipsen05/zulip,saitodisse/zulip,xuxiao/zulip,hengqujushi/zulip,jonesgithub/zulip,technicalpickles/zulip,Qgap/zulip,aps-sids/zulip,ApsOps/zulip,suxinde2009/zulip,aliceriot/zulip,AZtheAsian/zulip,tbutter/zulip,Batterfii/zulip,jessedhillon/zulip,zofuthan/zulip,guiquanz/zulip,bluesea/zulip,paxapy/zulip,kaiyuanheshang/zulip,he15his/zulip,Juanvulcano/zulip,jerryge/zulip,praveenaki/zulip,babbage/zulip,LeeRisk/zulip,hayderimran7/zulip,xuanhan863/zulip,ahmadassaf/zulip,MariaFaBella85/zulip,johnny9/zulip,joshisa/zulip,mohsenSy/zulip,fw1121/zulip,johnnygaddarr/zulip,natanovia/zulip,Qgap/zulip,akuseru/zulip,samatdav/zulip,babbage/zulip,Drooids/zulip,kokoar/zulip,babbage/zulip,wavelets/zulip,christi3k/zulip,SmartPeople/zulip,arpitpanwar/zulip,isht3/zulip,calvinleenyc/zulip,jainayush975/zulip,mohsenSy/zulip,adnanh/zulip,AZtheAsian/zulip,bastianh/zulip,Diptanshu8/zulip,zwily/zulip,Gabriel0402/zulip,jackrzhang/zulip,avastu/zulip,hafeez3000/zulip,verma-varsha/zulip,wdaher/zulip,Frouk/zulip,Vallher/zulip,Qgap/zulip,amyliu345/zulip,vabs22/zulip,Vallher/zulip,JPJPJPOPOP/zulip,esander91/zulip,aliceriot/zulip,suxinde2009/zulip,mansilladev/zulip,j831/zulip,joyhchen/zulip,deer-hope/zulip,Suninus/zulip,jphilipsen05/zulip,karamcnair/zulip,so0k/zulip,adnanh/zulip,gigawhitlocks/zulip,amallia/zulip,rht/zulip,sharmaeklavya2/zulip,tommyip/zulip,wavelets/zulip,xuxiao/zulip,zorojean/zulip,isht3/zulip,paxapy/zulip,xuxiao/zulip,easyfmxu/zulip,EasonYi/zulip,ahmadassaf/zulip,krtkmj/zulip,tdr130/zulip,Cheppers/zulip,yocome/zulip,hayderimran7/zulip,jainayush975/zulip,wangdeshui/zulip,shaunstanislaus/zulip,jainayush975/zulip,timabbott/zulip,Batterfii/zulip,hustlzp/zulip,themass/zulip,noroot/zulip,tdr130/zulip,deer-hope/zulip,aakash-cr7/zulip,ikasumiwt/zulip,Galexrt/zulip,kaiyuanheshang/zulip,bssrdf/zulip,arpitpanwar/zulip,he15his/zulip,wangdeshui/zulip,wavelets/zulip,krtkmj/zulip,joyhchen/zulip,nicholasbs/zulip,bitemyapp/zulip,jonesgithub/zulip,wdaher/zulip,Juanvulcano/zulip,souravbadami/zulip,zacps/zulip,tiansiyuan/zulip,andersk/zulip,bitemyapp/zulip,schatt/zulip,Suninus/zulip,ipernet/zulip,m1ssou/zulip,jrowan/zulip,zacps/zulip,easyfmxu/zulip,atomic-labs/zulip,thomasboyt/zulip,krtkmj/zulip,ikasumiwt/zulip,ufosky-server/zulip,itnihao/zulip,AZtheAsian/zulip,developerfm/zulip,hafeez3000/zulip,shaunstanislaus/zulip,thomasboyt/zulip,yuvipanda/zulip,Cheppers/zulip,ryansnowboarder/zulip,johnny9/zulip,willingc/zulip,LeeRisk/zulip,jackrzhang/zulip,mansilladev/zulip,mdavid/zulip,developerfm/zulip,schatt/zulip,easyfmxu/zulip,shaunstanislaus/zulip,technicalpickles/zulip,AZtheAsian/zulip,Batterfii/zulip,noroot/zulip,blaze225/zulip,armooo/zulip,hustlzp/zulip,joyhchen/zulip,fw1121/zulip,souravbadami/zulip,jeffcao/zulip,Frouk/zulip,firstblade/zulip,samatdav/zulip,umkay/zulip,joshisa/zulip,SmartPeople/zulip,eeshangarg/zulip,codeKonami/zulip,verma-varsha/zulip,arpitpanwar/zulip,JanzTam/zulip,zachallaun/zulip,aps-sids/zulip,showell/zulip,tommyip/zulip,rht/zulip,andersk/zulip,ApsOps/zulip,ryanbackman/zulip,ikasumiwt/zulip,ericzhou2008/zulip,hustlzp/zulip,JPJPJPOPOP/zulip,tiansiyuan/zulip,esander91/zulip,dotcool/zulip,tdr130/zulip,jainayush975/zulip,peguin40/zulip,wweiradio/zulip,KJin99/zulip,grave-w-grave/zulip,vaidap/zulip,armooo/zulip,EasonYi/zulip,johnny9/zulip,praveenaki/zulip,Diptanshu8/zulip,moria/zulip,fw1121/zulip,AZtheAsian/zulip,brainwane/zulip,dotcool/zulip,eastlhu/zulip,bssrdf/zulip,mdavid/zulip,paxapy/zulip,dxq-git/zulip,shrikrishnaholla/zulip,Frouk/zulip,jonesgithub/zulip,zhaoweigg/zulip,aakash-cr7/zulip,m1ssou/zulip,reyha/zulip,vabs22/zulip,wdaher/zulip,littledogboy/zulip,amallia/zulip,Diptanshu8/zulip,jainayush975/zulip,lfranchi/zulip,TigorC/zulip,natanovia/zulip,developerfm/zulip,gkotian/zulip,Suninus/zulip,aliceriot/zulip,johnnygaddarr/zulip,he15his/zulip,hayderimran7/zulip,qq1012803704/zulip,vakila/zulip,karamcnair/zulip,nicholasbs/zulip,hafeez3000/zulip,grave-w-grave/zulip,bowlofstew/zulip,kaiyuanheshang/zulip,SmartPeople/zulip,sup95/zulip,themass/zulip,armooo/zulip,Suninus/zulip,seapasulli/zulip,dxq-git/zulip,saitodisse/zulip,jeffcao/zulip,luyifan/zulip,LeeRisk/zulip,tiansiyuan/zulip,gigawhitlocks/zulip,thomasboyt/zulip,ashwinirudrappa/zulip,seapasulli/zulip,thomasboyt/zulip,vakila/zulip,Galexrt/zulip,willingc/zulip,shubhamdhama/zulip,kokoar/zulip,tiansiyuan/zulip,willingc/zulip,Suninus/zulip,swinghu/zulip,zwily/zulip,susansls/zulip,tommyip/zulip,JanzTam/zulip,he15his/zulip,suxinde2009/zulip,rishig/zulip,Vallher/zulip,mohsenSy/zulip,krtkmj/zulip,Drooids/zulip,hustlzp/zulip,Vallher/zulip,praveenaki/zulip,j831/zulip,peguin40/zulip,alliejones/zulip,proliming/zulip,LAndreas/zulip,karamcnair/zulip,vabs22/zulip,Qgap/zulip,saitodisse/zulip,esander91/zulip
Add a management command to rename a stream. (imported from commit b3acadc09b289b48e2ac298e50a5427545b6a473)
from __future__ import absolute_import from django.core.management.base import BaseCommand from zerver.lib.actions import do_rename_stream from zerver.models import Realm, get_realm class Command(BaseCommand): help = """Change the stream name for a realm. Usage: python manage.py rename-stream <domain> <old name> <new name>""" def handle(self, *args, **options): if len(args) != 3: print "Please provide a domain and the old and new names." exit(1) domain, old_name, new_name = args try: realm = get_realm(domain) except Realm.DoesNotExist: print "Unknown domain %s" % (domain,) exit(1) do_rename_stream(realm, old_name, new_name)
<commit_before><commit_msg>Add a management command to rename a stream. (imported from commit b3acadc09b289b48e2ac298e50a5427545b6a473)<commit_after>
from __future__ import absolute_import from django.core.management.base import BaseCommand from zerver.lib.actions import do_rename_stream from zerver.models import Realm, get_realm class Command(BaseCommand): help = """Change the stream name for a realm. Usage: python manage.py rename-stream <domain> <old name> <new name>""" def handle(self, *args, **options): if len(args) != 3: print "Please provide a domain and the old and new names." exit(1) domain, old_name, new_name = args try: realm = get_realm(domain) except Realm.DoesNotExist: print "Unknown domain %s" % (domain,) exit(1) do_rename_stream(realm, old_name, new_name)
Add a management command to rename a stream. (imported from commit b3acadc09b289b48e2ac298e50a5427545b6a473)from __future__ import absolute_import from django.core.management.base import BaseCommand from zerver.lib.actions import do_rename_stream from zerver.models import Realm, get_realm class Command(BaseCommand): help = """Change the stream name for a realm. Usage: python manage.py rename-stream <domain> <old name> <new name>""" def handle(self, *args, **options): if len(args) != 3: print "Please provide a domain and the old and new names." exit(1) domain, old_name, new_name = args try: realm = get_realm(domain) except Realm.DoesNotExist: print "Unknown domain %s" % (domain,) exit(1) do_rename_stream(realm, old_name, new_name)
<commit_before><commit_msg>Add a management command to rename a stream. (imported from commit b3acadc09b289b48e2ac298e50a5427545b6a473)<commit_after>from __future__ import absolute_import from django.core.management.base import BaseCommand from zerver.lib.actions import do_rename_stream from zerver.models import Realm, get_realm class Command(BaseCommand): help = """Change the stream name for a realm. Usage: python manage.py rename-stream <domain> <old name> <new name>""" def handle(self, *args, **options): if len(args) != 3: print "Please provide a domain and the old and new names." exit(1) domain, old_name, new_name = args try: realm = get_realm(domain) except Realm.DoesNotExist: print "Unknown domain %s" % (domain,) exit(1) do_rename_stream(realm, old_name, new_name)
583155db6a85808c69fa25ba4959ebd370aa2fba
etc/config/check_modules.py
etc/config/check_modules.py
from subprocess import call import os modules = [ 'backend.unichat.eu', 'realtime.unichat.eu', 'presence.unichat.eu', 'matchmaker.unichat.eu', ] for m in modules: with open(os.devnull, 'w') as devnull: call( ['curl', '-m', '1', m], stdout=devnull, stderr=devnull )
Add script to check if modules up
Add script to check if modules up
Python
mit
dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet,dimkarakostas/unimeet
Add script to check if modules up
from subprocess import call import os modules = [ 'backend.unichat.eu', 'realtime.unichat.eu', 'presence.unichat.eu', 'matchmaker.unichat.eu', ] for m in modules: with open(os.devnull, 'w') as devnull: call( ['curl', '-m', '1', m], stdout=devnull, stderr=devnull )
<commit_before><commit_msg>Add script to check if modules up<commit_after>
from subprocess import call import os modules = [ 'backend.unichat.eu', 'realtime.unichat.eu', 'presence.unichat.eu', 'matchmaker.unichat.eu', ] for m in modules: with open(os.devnull, 'w') as devnull: call( ['curl', '-m', '1', m], stdout=devnull, stderr=devnull )
Add script to check if modules upfrom subprocess import call import os modules = [ 'backend.unichat.eu', 'realtime.unichat.eu', 'presence.unichat.eu', 'matchmaker.unichat.eu', ] for m in modules: with open(os.devnull, 'w') as devnull: call( ['curl', '-m', '1', m], stdout=devnull, stderr=devnull )
<commit_before><commit_msg>Add script to check if modules up<commit_after>from subprocess import call import os modules = [ 'backend.unichat.eu', 'realtime.unichat.eu', 'presence.unichat.eu', 'matchmaker.unichat.eu', ] for m in modules: with open(os.devnull, 'w') as devnull: call( ['curl', '-m', '1', m], stdout=devnull, stderr=devnull )
50db2fa37aab219c9273cf3f76269de11e2dc86b
src/ggrc_basic_permissions/migrations/versions/20131204014446_54b6efd65a93_add_mappings_to_audi.py
src/ggrc_basic_permissions/migrations/versions/20131204014446_54b6efd65a93_add_mappings_to_audi.py
"""Add mappings to Auditor role Revision ID: 54b6efd65a93 Revises: 13b49798db19 Create Date: 2013-12-04 01:44:46.023974 """ # revision identifiers, used by Alembic. revision = '54b6efd65a93' down_revision = '13b49798db19' import sqlalchemy as sa from alembic import op from datetime import datetime from sqlalchemy.sql import table, column, select import json roles_table = table('roles', column('id', sa.Integer), column('name', sa.String), column('permissions_json', sa.String) ) mapping_types = [ 'ObjectControl', 'ObjectDocument', 'ObjectObjective', 'ObjectPerson', 'ObjectSection', 'Relationship', ] def get_auditor_permissions(): connection = op.get_bind() auditor_role = connection.execute( select([roles_table.c.id, roles_table.c.permissions_json])\ .where(roles_table.c.name == 'Auditor')).fetchone() return json.loads(auditor_role.permissions_json) def update_auditor_permissions(permissions): op.execute(roles_table\ .update()\ .values(permissions_json = json.dumps(permissions))\ .where(roles_table.c.name == 'Auditor')) def upgrade(): permissions = get_auditor_permissions() permissions['read'].extend(mapping_types) update_auditor_permissions(permissions) def downgrade(): permissions = get_auditor_permissions() for resource_type in mapping_types: permissions['read'].remove(resource_type) update_auditor_permissions(permissions)
Add a migration to allow Auditor role to see mappings defined in the Audit context.
Add a migration to allow Auditor role to see mappings defined in the Audit context.
Python
apache-2.0
andrei-karalionak/ggrc-core,andrei-karalionak/ggrc-core,selahssea/ggrc-core,prasannav7/ggrc-core,josthkko/ggrc-core,hyperNURb/ggrc-core,jmakov/ggrc-core,josthkko/ggrc-core,edofic/ggrc-core,selahssea/ggrc-core,hyperNURb/ggrc-core,kr41/ggrc-core,NejcZupec/ggrc-core,jmakov/ggrc-core,prasannav7/ggrc-core,edofic/ggrc-core,VinnieJohns/ggrc-core,edofic/ggrc-core,uskudnik/ggrc-core,VinnieJohns/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,jmakov/ggrc-core,hasanalom/ggrc-core,VinnieJohns/ggrc-core,kr41/ggrc-core,selahssea/ggrc-core,vladan-m/ggrc-core,NejcZupec/ggrc-core,VinnieJohns/ggrc-core,hasanalom/ggrc-core,josthkko/ggrc-core,vladan-m/ggrc-core,josthkko/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,jmakov/ggrc-core,AleksNeStu/ggrc-core,uskudnik/ggrc-core,prasannav7/ggrc-core,plamut/ggrc-core,uskudnik/ggrc-core,hasanalom/ggrc-core,j0gurt/ggrc-core,edofic/ggrc-core,uskudnik/ggrc-core,vladan-m/ggrc-core,kr41/ggrc-core,plamut/ggrc-core,vladan-m/ggrc-core,hasanalom/ggrc-core,uskudnik/ggrc-core,vladan-m/ggrc-core,hyperNURb/ggrc-core,j0gurt/ggrc-core,andrei-karalionak/ggrc-core,kr41/ggrc-core,AleksNeStu/ggrc-core,jmakov/ggrc-core,NejcZupec/ggrc-core,selahssea/ggrc-core,hasanalom/ggrc-core,hyperNURb/ggrc-core,AleksNeStu/ggrc-core,j0gurt/ggrc-core,hyperNURb/ggrc-core,NejcZupec/ggrc-core,plamut/ggrc-core,prasannav7/ggrc-core
Add a migration to allow Auditor role to see mappings defined in the Audit context.
"""Add mappings to Auditor role Revision ID: 54b6efd65a93 Revises: 13b49798db19 Create Date: 2013-12-04 01:44:46.023974 """ # revision identifiers, used by Alembic. revision = '54b6efd65a93' down_revision = '13b49798db19' import sqlalchemy as sa from alembic import op from datetime import datetime from sqlalchemy.sql import table, column, select import json roles_table = table('roles', column('id', sa.Integer), column('name', sa.String), column('permissions_json', sa.String) ) mapping_types = [ 'ObjectControl', 'ObjectDocument', 'ObjectObjective', 'ObjectPerson', 'ObjectSection', 'Relationship', ] def get_auditor_permissions(): connection = op.get_bind() auditor_role = connection.execute( select([roles_table.c.id, roles_table.c.permissions_json])\ .where(roles_table.c.name == 'Auditor')).fetchone() return json.loads(auditor_role.permissions_json) def update_auditor_permissions(permissions): op.execute(roles_table\ .update()\ .values(permissions_json = json.dumps(permissions))\ .where(roles_table.c.name == 'Auditor')) def upgrade(): permissions = get_auditor_permissions() permissions['read'].extend(mapping_types) update_auditor_permissions(permissions) def downgrade(): permissions = get_auditor_permissions() for resource_type in mapping_types: permissions['read'].remove(resource_type) update_auditor_permissions(permissions)
<commit_before><commit_msg>Add a migration to allow Auditor role to see mappings defined in the Audit context.<commit_after>
"""Add mappings to Auditor role Revision ID: 54b6efd65a93 Revises: 13b49798db19 Create Date: 2013-12-04 01:44:46.023974 """ # revision identifiers, used by Alembic. revision = '54b6efd65a93' down_revision = '13b49798db19' import sqlalchemy as sa from alembic import op from datetime import datetime from sqlalchemy.sql import table, column, select import json roles_table = table('roles', column('id', sa.Integer), column('name', sa.String), column('permissions_json', sa.String) ) mapping_types = [ 'ObjectControl', 'ObjectDocument', 'ObjectObjective', 'ObjectPerson', 'ObjectSection', 'Relationship', ] def get_auditor_permissions(): connection = op.get_bind() auditor_role = connection.execute( select([roles_table.c.id, roles_table.c.permissions_json])\ .where(roles_table.c.name == 'Auditor')).fetchone() return json.loads(auditor_role.permissions_json) def update_auditor_permissions(permissions): op.execute(roles_table\ .update()\ .values(permissions_json = json.dumps(permissions))\ .where(roles_table.c.name == 'Auditor')) def upgrade(): permissions = get_auditor_permissions() permissions['read'].extend(mapping_types) update_auditor_permissions(permissions) def downgrade(): permissions = get_auditor_permissions() for resource_type in mapping_types: permissions['read'].remove(resource_type) update_auditor_permissions(permissions)
Add a migration to allow Auditor role to see mappings defined in the Audit context. """Add mappings to Auditor role Revision ID: 54b6efd65a93 Revises: 13b49798db19 Create Date: 2013-12-04 01:44:46.023974 """ # revision identifiers, used by Alembic. revision = '54b6efd65a93' down_revision = '13b49798db19' import sqlalchemy as sa from alembic import op from datetime import datetime from sqlalchemy.sql import table, column, select import json roles_table = table('roles', column('id', sa.Integer), column('name', sa.String), column('permissions_json', sa.String) ) mapping_types = [ 'ObjectControl', 'ObjectDocument', 'ObjectObjective', 'ObjectPerson', 'ObjectSection', 'Relationship', ] def get_auditor_permissions(): connection = op.get_bind() auditor_role = connection.execute( select([roles_table.c.id, roles_table.c.permissions_json])\ .where(roles_table.c.name == 'Auditor')).fetchone() return json.loads(auditor_role.permissions_json) def update_auditor_permissions(permissions): op.execute(roles_table\ .update()\ .values(permissions_json = json.dumps(permissions))\ .where(roles_table.c.name == 'Auditor')) def upgrade(): permissions = get_auditor_permissions() permissions['read'].extend(mapping_types) update_auditor_permissions(permissions) def downgrade(): permissions = get_auditor_permissions() for resource_type in mapping_types: permissions['read'].remove(resource_type) update_auditor_permissions(permissions)
<commit_before><commit_msg>Add a migration to allow Auditor role to see mappings defined in the Audit context.<commit_after> """Add mappings to Auditor role Revision ID: 54b6efd65a93 Revises: 13b49798db19 Create Date: 2013-12-04 01:44:46.023974 """ # revision identifiers, used by Alembic. revision = '54b6efd65a93' down_revision = '13b49798db19' import sqlalchemy as sa from alembic import op from datetime import datetime from sqlalchemy.sql import table, column, select import json roles_table = table('roles', column('id', sa.Integer), column('name', sa.String), column('permissions_json', sa.String) ) mapping_types = [ 'ObjectControl', 'ObjectDocument', 'ObjectObjective', 'ObjectPerson', 'ObjectSection', 'Relationship', ] def get_auditor_permissions(): connection = op.get_bind() auditor_role = connection.execute( select([roles_table.c.id, roles_table.c.permissions_json])\ .where(roles_table.c.name == 'Auditor')).fetchone() return json.loads(auditor_role.permissions_json) def update_auditor_permissions(permissions): op.execute(roles_table\ .update()\ .values(permissions_json = json.dumps(permissions))\ .where(roles_table.c.name == 'Auditor')) def upgrade(): permissions = get_auditor_permissions() permissions['read'].extend(mapping_types) update_auditor_permissions(permissions) def downgrade(): permissions = get_auditor_permissions() for resource_type in mapping_types: permissions['read'].remove(resource_type) update_auditor_permissions(permissions)
c74ce3d4561a7367903863aaabe1af113d43aa0c
mgnemu/models/BaseModel.py
mgnemu/models/BaseModel.py
# -*- coding: utf-8 -*- """ Base model of project. Contains to basic methods: dumps(object_data) - writes object into json. loads(json_data) - converts json into model object. """ import json class BaseModel(): def dumps(object_data): return json.dumps(object_data) def loads(json_data): return json.loads(json_data)
Add base model obect to project.
Add base model obect to project.
Python
mit
0xporky/mgnemu-python
Add base model obect to project.
# -*- coding: utf-8 -*- """ Base model of project. Contains to basic methods: dumps(object_data) - writes object into json. loads(json_data) - converts json into model object. """ import json class BaseModel(): def dumps(object_data): return json.dumps(object_data) def loads(json_data): return json.loads(json_data)
<commit_before><commit_msg>Add base model obect to project.<commit_after>
# -*- coding: utf-8 -*- """ Base model of project. Contains to basic methods: dumps(object_data) - writes object into json. loads(json_data) - converts json into model object. """ import json class BaseModel(): def dumps(object_data): return json.dumps(object_data) def loads(json_data): return json.loads(json_data)
Add base model obect to project.# -*- coding: utf-8 -*- """ Base model of project. Contains to basic methods: dumps(object_data) - writes object into json. loads(json_data) - converts json into model object. """ import json class BaseModel(): def dumps(object_data): return json.dumps(object_data) def loads(json_data): return json.loads(json_data)
<commit_before><commit_msg>Add base model obect to project.<commit_after># -*- coding: utf-8 -*- """ Base model of project. Contains to basic methods: dumps(object_data) - writes object into json. loads(json_data) - converts json into model object. """ import json class BaseModel(): def dumps(object_data): return json.dumps(object_data) def loads(json_data): return json.loads(json_data)
17fe9d01b6771888a44d6a039b337a84c32e64e8
tests/test_common_utils.py
tests/test_common_utils.py
from bart.common import Utils import unittest import pandas as pd class TestCommonUtils(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestCommonUtils, self).__init__(*args, **kwargs) def test_interval_sum(self): """Test Utils Function: interval_sum""" array = [0, 0, 1, 1, 1, 1, 0, 0] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series, 1), 3) array = [False, False, True, True, True, True, False, False] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series), 3) array = [0, 0, 1, 0, 0, 0] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series, 1), 0) array = [0, 0, 1, 0, 1, 1] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series, 1), 1)
Add test case for interval_sum
tests: Add test case for interval_sum Signed-off-by: Kapileshwar Singh <d373e2b6407ea84be359ce4a11e8631121819e79@arm.com>
Python
apache-2.0
JaviMerino/bart,ARM-software/bart
tests: Add test case for interval_sum Signed-off-by: Kapileshwar Singh <d373e2b6407ea84be359ce4a11e8631121819e79@arm.com>
from bart.common import Utils import unittest import pandas as pd class TestCommonUtils(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestCommonUtils, self).__init__(*args, **kwargs) def test_interval_sum(self): """Test Utils Function: interval_sum""" array = [0, 0, 1, 1, 1, 1, 0, 0] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series, 1), 3) array = [False, False, True, True, True, True, False, False] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series), 3) array = [0, 0, 1, 0, 0, 0] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series, 1), 0) array = [0, 0, 1, 0, 1, 1] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series, 1), 1)
<commit_before><commit_msg>tests: Add test case for interval_sum Signed-off-by: Kapileshwar Singh <d373e2b6407ea84be359ce4a11e8631121819e79@arm.com><commit_after>
from bart.common import Utils import unittest import pandas as pd class TestCommonUtils(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestCommonUtils, self).__init__(*args, **kwargs) def test_interval_sum(self): """Test Utils Function: interval_sum""" array = [0, 0, 1, 1, 1, 1, 0, 0] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series, 1), 3) array = [False, False, True, True, True, True, False, False] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series), 3) array = [0, 0, 1, 0, 0, 0] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series, 1), 0) array = [0, 0, 1, 0, 1, 1] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series, 1), 1)
tests: Add test case for interval_sum Signed-off-by: Kapileshwar Singh <d373e2b6407ea84be359ce4a11e8631121819e79@arm.com>from bart.common import Utils import unittest import pandas as pd class TestCommonUtils(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestCommonUtils, self).__init__(*args, **kwargs) def test_interval_sum(self): """Test Utils Function: interval_sum""" array = [0, 0, 1, 1, 1, 1, 0, 0] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series, 1), 3) array = [False, False, True, True, True, True, False, False] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series), 3) array = [0, 0, 1, 0, 0, 0] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series, 1), 0) array = [0, 0, 1, 0, 1, 1] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series, 1), 1)
<commit_before><commit_msg>tests: Add test case for interval_sum Signed-off-by: Kapileshwar Singh <d373e2b6407ea84be359ce4a11e8631121819e79@arm.com><commit_after>from bart.common import Utils import unittest import pandas as pd class TestCommonUtils(unittest.TestCase): def __init__(self, *args, **kwargs): super(TestCommonUtils, self).__init__(*args, **kwargs) def test_interval_sum(self): """Test Utils Function: interval_sum""" array = [0, 0, 1, 1, 1, 1, 0, 0] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series, 1), 3) array = [False, False, True, True, True, True, False, False] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series), 3) array = [0, 0, 1, 0, 0, 0] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series, 1), 0) array = [0, 0, 1, 0, 1, 1] series = pd.Series(array) self.assertEqual(Utils.interval_sum(series, 1), 1)
e6a3e2ac8267ae3a0f361138bd8cb25f82b12b9d
benchexec/tools/avr.py
benchexec/tools/avr.py
# This file is part of BenchExec, a framework for reliable benchmarking: # https://github.com/sosy-lab/benchexec # # SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org> # # SPDX-License-Identifier: Apache-2.0 import benchexec.result as result import benchexec.tools.template class Tool(benchexec.tools.template.BaseTool2): """ Tool info for AVR -- Abstractly Verifying Reachability URL: https://github.com/aman-goel/avr """ def executable(self, tool_locator): return tool_locator.find_executable("avr.py") def name(self): return "AVR" def cmdline(self, executable, options, task, rlimits): return [executable] + options + [task.single_input_file] def determine_result(self, run): """ @return: status of AVR after executing a run """ if run.was_timeout: return "TIMEOUT" status = None for line in run.output: if "avr-h" in line: status = result.RESULT_TRUE_PROP if "avr-v" in line: status = result.RESULT_FALSE_PROP if not status: status = result.RESULT_ERROR return status
Create a tool-info module for AVR
Create a tool-info module for AVR
Python
apache-2.0
sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec
Create a tool-info module for AVR
# This file is part of BenchExec, a framework for reliable benchmarking: # https://github.com/sosy-lab/benchexec # # SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org> # # SPDX-License-Identifier: Apache-2.0 import benchexec.result as result import benchexec.tools.template class Tool(benchexec.tools.template.BaseTool2): """ Tool info for AVR -- Abstractly Verifying Reachability URL: https://github.com/aman-goel/avr """ def executable(self, tool_locator): return tool_locator.find_executable("avr.py") def name(self): return "AVR" def cmdline(self, executable, options, task, rlimits): return [executable] + options + [task.single_input_file] def determine_result(self, run): """ @return: status of AVR after executing a run """ if run.was_timeout: return "TIMEOUT" status = None for line in run.output: if "avr-h" in line: status = result.RESULT_TRUE_PROP if "avr-v" in line: status = result.RESULT_FALSE_PROP if not status: status = result.RESULT_ERROR return status
<commit_before><commit_msg>Create a tool-info module for AVR<commit_after>
# This file is part of BenchExec, a framework for reliable benchmarking: # https://github.com/sosy-lab/benchexec # # SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org> # # SPDX-License-Identifier: Apache-2.0 import benchexec.result as result import benchexec.tools.template class Tool(benchexec.tools.template.BaseTool2): """ Tool info for AVR -- Abstractly Verifying Reachability URL: https://github.com/aman-goel/avr """ def executable(self, tool_locator): return tool_locator.find_executable("avr.py") def name(self): return "AVR" def cmdline(self, executable, options, task, rlimits): return [executable] + options + [task.single_input_file] def determine_result(self, run): """ @return: status of AVR after executing a run """ if run.was_timeout: return "TIMEOUT" status = None for line in run.output: if "avr-h" in line: status = result.RESULT_TRUE_PROP if "avr-v" in line: status = result.RESULT_FALSE_PROP if not status: status = result.RESULT_ERROR return status
Create a tool-info module for AVR# This file is part of BenchExec, a framework for reliable benchmarking: # https://github.com/sosy-lab/benchexec # # SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org> # # SPDX-License-Identifier: Apache-2.0 import benchexec.result as result import benchexec.tools.template class Tool(benchexec.tools.template.BaseTool2): """ Tool info for AVR -- Abstractly Verifying Reachability URL: https://github.com/aman-goel/avr """ def executable(self, tool_locator): return tool_locator.find_executable("avr.py") def name(self): return "AVR" def cmdline(self, executable, options, task, rlimits): return [executable] + options + [task.single_input_file] def determine_result(self, run): """ @return: status of AVR after executing a run """ if run.was_timeout: return "TIMEOUT" status = None for line in run.output: if "avr-h" in line: status = result.RESULT_TRUE_PROP if "avr-v" in line: status = result.RESULT_FALSE_PROP if not status: status = result.RESULT_ERROR return status
<commit_before><commit_msg>Create a tool-info module for AVR<commit_after># This file is part of BenchExec, a framework for reliable benchmarking: # https://github.com/sosy-lab/benchexec # # SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org> # # SPDX-License-Identifier: Apache-2.0 import benchexec.result as result import benchexec.tools.template class Tool(benchexec.tools.template.BaseTool2): """ Tool info for AVR -- Abstractly Verifying Reachability URL: https://github.com/aman-goel/avr """ def executable(self, tool_locator): return tool_locator.find_executable("avr.py") def name(self): return "AVR" def cmdline(self, executable, options, task, rlimits): return [executable] + options + [task.single_input_file] def determine_result(self, run): """ @return: status of AVR after executing a run """ if run.was_timeout: return "TIMEOUT" status = None for line in run.output: if "avr-h" in line: status = result.RESULT_TRUE_PROP if "avr-v" in line: status = result.RESULT_FALSE_PROP if not status: status = result.RESULT_ERROR return status
b373eaab5918488292075c962f4374dc8815c395
test/integration/generate_partitions.py
test/integration/generate_partitions.py
import sys import random if len(sys.argv) != 3: print >> sys.stderr, "USAGE: python generate_partitions.py nodes partitions_per_node" sys.exit() FORMAT_WIDTH = 10 nodes = int(sys.argv[1]) partitions = int(sys.argv[2]) ids = range(nodes * partitions) # use known seed so this is repeatable random.seed(92873498274) random.shuffle(ids) for i in xrange(nodes): print print 'node', i print '<partitions>' print ' ', node_ids = sorted(ids[i*partitions:(i+1)*partitions]) for j in xrange(len(node_ids)): print str(node_ids[j]) + ',', if j % FORMAT_WIDTH == FORMAT_WIDTH - 1: print print ' ', print '</partitions>'
Add script to generate partition ids.
Add script to generate partition ids.
Python
apache-2.0
medallia/voldemort,HB-SI/voldemort,voldemort/voldemort,HB-SI/voldemort,cshaxu/voldemort,squarY/voldemort,medallia/voldemort,cshaxu/voldemort,stotch/voldemort,rickbw/voldemort,birendraa/voldemort,voldemort/voldemort,birendraa/voldemort,PratikDeshpande/voldemort,HB-SI/voldemort,null-exception/voldemort,mabh/voldemort,jalkjaer/voldemort,jalkjaer/voldemort,FelixGV/voldemort,bitti/voldemort,voldemort/voldemort,LeoYao/voldemort,squarY/voldemort,cshaxu/voldemort,arunthirupathi/voldemort,jeffpc/voldemort,dallasmarlow/voldemort,jeffpc/voldemort,birendraa/voldemort,bitti/voldemort,arunthirupathi/voldemort,bitti/voldemort,LeoYao/voldemort,arunthirupathi/voldemort,null-exception/voldemort,null-exception/voldemort,LeoYao/voldemort,dallasmarlow/voldemort,cshaxu/voldemort,squarY/voldemort,PratikDeshpande/voldemort,jeffpc/voldemort,voldemort/voldemort,dallasmarlow/voldemort,mabh/voldemort,stotch/voldemort,mabh/voldemort,jwlent55/voldemort,arunthirupathi/voldemort,dallasmarlow/voldemort,birendraa/voldemort,jwlent55/voldemort,medallia/voldemort,rickbw/voldemort,jalkjaer/voldemort,gnb/voldemort,stotch/voldemort,jalkjaer/voldemort,jalkjaer/voldemort,HB-SI/voldemort,bhasudha/voldemort,bhasudha/voldemort,cshaxu/voldemort,squarY/voldemort,jeffpc/voldemort,PratikDeshpande/voldemort,jalkjaer/voldemort,voldemort/voldemort,null-exception/voldemort,rickbw/voldemort,dallasmarlow/voldemort,arunthirupathi/voldemort,medallia/voldemort,medallia/voldemort,gnb/voldemort,birendraa/voldemort,FelixGV/voldemort,HB-SI/voldemort,medallia/voldemort,null-exception/voldemort,arunthirupathi/voldemort,bitti/voldemort,rickbw/voldemort,bhasudha/voldemort,mabh/voldemort,stotch/voldemort,jeffpc/voldemort,FelixGV/voldemort,PratikDeshpande/voldemort,gnb/voldemort,bhasudha/voldemort,jwlent55/voldemort,mabh/voldemort,squarY/voldemort,LeoYao/voldemort,jwlent55/voldemort,bitti/voldemort,FelixGV/voldemort,PratikDeshpande/voldemort,dallasmarlow/voldemort,FelixGV/voldemort,jalkjaer/voldemort,squarY/voldemort,gnb/voldemort,LeoYao/voldemort,PratikDeshpande/voldemort,birendraa/voldemort,LeoYao/voldemort,bhasudha/voldemort,bitti/voldemort,stotch/voldemort,gnb/voldemort,stotch/voldemort,FelixGV/voldemort,bitti/voldemort,arunthirupathi/voldemort,mabh/voldemort,jwlent55/voldemort,cshaxu/voldemort,bhasudha/voldemort,jeffpc/voldemort,HB-SI/voldemort,voldemort/voldemort,null-exception/voldemort,gnb/voldemort,squarY/voldemort,rickbw/voldemort,rickbw/voldemort,jwlent55/voldemort,voldemort/voldemort,FelixGV/voldemort
Add script to generate partition ids.
import sys import random if len(sys.argv) != 3: print >> sys.stderr, "USAGE: python generate_partitions.py nodes partitions_per_node" sys.exit() FORMAT_WIDTH = 10 nodes = int(sys.argv[1]) partitions = int(sys.argv[2]) ids = range(nodes * partitions) # use known seed so this is repeatable random.seed(92873498274) random.shuffle(ids) for i in xrange(nodes): print print 'node', i print '<partitions>' print ' ', node_ids = sorted(ids[i*partitions:(i+1)*partitions]) for j in xrange(len(node_ids)): print str(node_ids[j]) + ',', if j % FORMAT_WIDTH == FORMAT_WIDTH - 1: print print ' ', print '</partitions>'
<commit_before><commit_msg>Add script to generate partition ids.<commit_after>
import sys import random if len(sys.argv) != 3: print >> sys.stderr, "USAGE: python generate_partitions.py nodes partitions_per_node" sys.exit() FORMAT_WIDTH = 10 nodes = int(sys.argv[1]) partitions = int(sys.argv[2]) ids = range(nodes * partitions) # use known seed so this is repeatable random.seed(92873498274) random.shuffle(ids) for i in xrange(nodes): print print 'node', i print '<partitions>' print ' ', node_ids = sorted(ids[i*partitions:(i+1)*partitions]) for j in xrange(len(node_ids)): print str(node_ids[j]) + ',', if j % FORMAT_WIDTH == FORMAT_WIDTH - 1: print print ' ', print '</partitions>'
Add script to generate partition ids.import sys import random if len(sys.argv) != 3: print >> sys.stderr, "USAGE: python generate_partitions.py nodes partitions_per_node" sys.exit() FORMAT_WIDTH = 10 nodes = int(sys.argv[1]) partitions = int(sys.argv[2]) ids = range(nodes * partitions) # use known seed so this is repeatable random.seed(92873498274) random.shuffle(ids) for i in xrange(nodes): print print 'node', i print '<partitions>' print ' ', node_ids = sorted(ids[i*partitions:(i+1)*partitions]) for j in xrange(len(node_ids)): print str(node_ids[j]) + ',', if j % FORMAT_WIDTH == FORMAT_WIDTH - 1: print print ' ', print '</partitions>'
<commit_before><commit_msg>Add script to generate partition ids.<commit_after>import sys import random if len(sys.argv) != 3: print >> sys.stderr, "USAGE: python generate_partitions.py nodes partitions_per_node" sys.exit() FORMAT_WIDTH = 10 nodes = int(sys.argv[1]) partitions = int(sys.argv[2]) ids = range(nodes * partitions) # use known seed so this is repeatable random.seed(92873498274) random.shuffle(ids) for i in xrange(nodes): print print 'node', i print '<partitions>' print ' ', node_ids = sorted(ids[i*partitions:(i+1)*partitions]) for j in xrange(len(node_ids)): print str(node_ids[j]) + ',', if j % FORMAT_WIDTH == FORMAT_WIDTH - 1: print print ' ', print '</partitions>'
5caf67f27d82689babf5dcf0a234dfe3c261ff9c
zephyr/retention_policy.py
zephyr/retention_policy.py
""" Implements the per-domain data retention policy. The goal is to have a single place where the policy is defined. This is complicated by needing to apply this policy both to the database and to log files. Additionally, we want to use an efficient query for the database, rather than iterating through messages one by one. The code in this module does not actually remove anything; it just identifies which items should be kept or removed. """ import sys import operator from django.utils import timezone from django.db.models import Q from datetime import datetime, timedelta from zephyr.models import Realm, UserMessage # Each domain has a maximum age for retained messages. # # FIXME: Move this into the database. max_age = { 'customer1.invalid': timedelta(days=31), } def should_expunge_from_log(msg, now): """Should a particular log entry be expunged? msg: a log entry dict now: current time for purposes of determining log entry age""" # This function will be called many times, but we want to compare all # entries against a consistent "current time". So the caller passes # that time as a parameter. # FIXME: Yet another place where we compute the domain manually. # See #260. user = msg.get('sender_email') if user is None: user = msg.get('user') if user is None: # Avoid printing the entire message, but give enough information to find it later. print >>sys.stderr, "WARNING: Can't get user for message at", msg['timestamp'] return False domain = user.split('@', 1)[1] if domain not in max_age: # Keep forever. return False age = now - datetime.fromtimestamp(msg['timestamp']) return age > max_age[domain] def get_UserMessages_to_expunge(): """Fetch all UserMessages which should be expunged from the database. After deleting these, you may also want to call Message.remove_unreachable().""" # Unlike retain_in_log, this handles all messages at once, so we # use the actual current time. now = timezone.now() queries = [Q(user_profile__realm = realm, message__pub_date__lt = now - max_age[realm.domain]) for realm in Realm.objects.all() if realm.domain in max_age] if not queries: return UserMessage.objects.none() # Return all objects matching any of the queries in 'queries'. return UserMessage.objects.filter(reduce(operator.or_, queries))
Implement a framework for data retention policies
Implement a framework for data retention policies And add customer1.invalid to it. (imported from commit 32b0293bc48abf5d9a3bd36f14f6b16d48ea6ff2)
Python
apache-2.0
akuseru/zulip,AZtheAsian/zulip,KJin99/zulip,EasonYi/zulip,rishig/zulip,seapasulli/zulip,gigawhitlocks/zulip,dotcool/zulip,wdaher/zulip,dattatreya303/zulip,arpith/zulip,aps-sids/zulip,natanovia/zulip,wangdeshui/zulip,fw1121/zulip,JanzTam/zulip,natanovia/zulip,mansilladev/zulip,littledogboy/zulip,bowlofstew/zulip,dhcrzf/zulip,blaze225/zulip,kokoar/zulip,bowlofstew/zulip,ericzhou2008/zulip,he15his/zulip,Suninus/zulip,Galexrt/zulip,kaiyuanheshang/zulip,developerfm/zulip,kokoar/zulip,Suninus/zulip,shaunstanislaus/zulip,nicholasbs/zulip,jainayush975/zulip,stamhe/zulip,amallia/zulip,avastu/zulip,tiansiyuan/zulip,bitemyapp/zulip,luyifan/zulip,zachallaun/zulip,pradiptad/zulip,luyifan/zulip,krtkmj/zulip,stamhe/zulip,m1ssou/zulip,timabbott/zulip,fw1121/zulip,johnny9/zulip,gigawhitlocks/zulip,tommyip/zulip,hayderimran7/zulip,ericzhou2008/zulip,seapasulli/zulip,Cheppers/zulip,johnnygaddarr/zulip,souravbadami/zulip,tbutter/zulip,aakash-cr7/zulip,zacps/zulip,nicholasbs/zulip,Juanvulcano/zulip,swinghu/zulip,umkay/zulip,aliceriot/zulip,johnnygaddarr/zulip,Galexrt/zulip,timabbott/zulip,ApsOps/zulip,PaulPetring/zulip,reyha/zulip,qq1012803704/zulip,pradiptad/zulip,SmartPeople/zulip,kou/zulip,hayderimran7/zulip,cosmicAsymmetry/zulip,jerryge/zulip,PaulPetring/zulip,jeffcao/zulip,sonali0901/zulip,Qgap/zulip,tiansiyuan/zulip,avastu/zulip,karamcnair/zulip,Galexrt/zulip,amanharitsh123/zulip,Frouk/zulip,Frouk/zulip,zofuthan/zulip,schatt/zulip,Qgap/zulip,so0k/zulip,umkay/zulip,ashwinirudrappa/zulip,zorojean/zulip,wdaher/zulip,Juanvulcano/zulip,thomasboyt/zulip,dotcool/zulip,ahmadassaf/zulip,zofuthan/zulip,lfranchi/zulip,jphilipsen05/zulip,hj3938/zulip,joyhchen/zulip,shaunstanislaus/zulip,babbage/zulip,bssrdf/zulip,Vallher/zulip,suxinde2009/zulip,shrikrishnaholla/zulip,ikasumiwt/zulip,huangkebo/zulip,hengqujushi/zulip,Juanvulcano/zulip,codeKonami/zulip,MariaFaBella85/zulip,akuseru/zulip,wangdeshui/zulip,guiquanz/zulip,jessedhillon/zulip,jeffcao/zulip,brockwhittaker/zulip,LeeRisk/zulip,umkay/zulip,natanovia/zulip,jimmy54/zulip,sup95/zulip,brainwane/zulip,stamhe/zulip,johnny9/zulip,esander91/zulip,jimmy54/zulip,suxinde2009/zulip,zwily/zulip,adnanh/zulip,KingxBanana/zulip,brockwhittaker/zulip,jrowan/zulip,willingc/zulip,Batterfii/zulip,PhilSk/zulip,Cheppers/zulip,kaiyuanheshang/zulip,pradiptad/zulip,ikasumiwt/zulip,seapasulli/zulip,aakash-cr7/zulip,jrowan/zulip,littledogboy/zulip,kou/zulip,johnnygaddarr/zulip,huangkebo/zulip,samatdav/zulip,stamhe/zulip,arpith/zulip,atomic-labs/zulip,natanovia/zulip,akuseru/zulip,sonali0901/zulip,KJin99/zulip,hackerkid/zulip,peiwei/zulip,alliejones/zulip,MariaFaBella85/zulip,TigorC/zulip,PaulPetring/zulip,hayderimran7/zulip,kou/zulip,PaulPetring/zulip,wweiradio/zulip,ahmadassaf/zulip,susansls/zulip,PhilSk/zulip,zacps/zulip,m1ssou/zulip,andersk/zulip,christi3k/zulip,JanzTam/zulip,amyliu345/zulip,themass/zulip,Drooids/zulip,swinghu/zulip,peguin40/zulip,mdavid/zulip,wavelets/zulip,kaiyuanheshang/zulip,ryanbackman/zulip,wangdeshui/zulip,levixie/zulip,dhcrzf/zulip,grave-w-grave/zulip,codeKonami/zulip,Cheppers/zulip,udxxabp/zulip,zwily/zulip,jeffcao/zulip,itnihao/zulip,dotcool/zulip,arpitpanwar/zulip,eeshangarg/zulip,rht/zulip,shrikrishnaholla/zulip,vabs22/zulip,souravbadami/zulip,ryansnowboarder/zulip,jonesgithub/zulip,yuvipanda/zulip,Galexrt/zulip,reyha/zulip,EasonYi/zulip,punchagan/zulip,mohsenSy/zulip,Diptanshu8/zulip,schatt/zulip,Frouk/zulip,shaunstanislaus/zulip,showell/zulip,verma-varsha/zulip,firstblade/zulip,samatdav/zulip,babbage/zulip,ipernet/zulip,Vallher/zulip,Suninus/zulip,guiquanz/zulip,itnihao/zulip,ApsOps/zulip,proliming/zulip,tbutter/zulip,zorojean/zulip,dawran6/zulip,proliming/zulip,sharmaeklavya2/zulip,krtkmj/zulip,bastianh/zulip,he15his/zulip,johnny9/zulip,kou/zulip,KJin99/zulip,bastianh/zulip,armooo/zulip,gigawhitlocks/zulip,mansilladev/zulip,krtkmj/zulip,joyhchen/zulip,aps-sids/zulip,gigawhitlocks/zulip,dnmfarrell/zulip,thomasboyt/zulip,levixie/zulip,babbage/zulip,punchagan/zulip,moria/zulip,jessedhillon/zulip,johnny9/zulip,shrikrishnaholla/zulip,dwrpayne/zulip,he15his/zulip,adnanh/zulip,mahim97/zulip,andersk/zulip,vaidap/zulip,ericzhou2008/zulip,PaulPetring/zulip,adnanh/zulip,aliceriot/zulip,wavelets/zulip,christi3k/zulip,he15his/zulip,noroot/zulip,udxxabp/zulip,avastu/zulip,he15his/zulip,bssrdf/zulip,arpitpanwar/zulip,shaunstanislaus/zulip,blaze225/zulip,ericzhou2008/zulip,kaiyuanheshang/zulip,eastlhu/zulip,shrikrishnaholla/zulip,KJin99/zulip,ApsOps/zulip,souravbadami/zulip,Drooids/zulip,Frouk/zulip,JanzTam/zulip,jackrzhang/zulip,dxq-git/zulip,suxinde2009/zulip,verma-varsha/zulip,developerfm/zulip,niftynei/zulip,zacps/zulip,umkay/zulip,KingxBanana/zulip,hackerkid/zulip,amyliu345/zulip,akuseru/zulip,KingxBanana/zulip,luyifan/zulip,zhaoweigg/zulip,dotcool/zulip,dattatreya303/zulip,zulip/zulip,DazWorrall/zulip,shrikrishnaholla/zulip,atomic-labs/zulip,jainayush975/zulip,deer-hope/zulip,dotcool/zulip,MariaFaBella85/zulip,atomic-labs/zulip,verma-varsha/zulip,themass/zulip,isht3/zulip,schatt/zulip,JanzTam/zulip,zhaoweigg/zulip,esander91/zulip,adnanh/zulip,vakila/zulip,aps-sids/zulip,littledogboy/zulip,sonali0901/zulip,dxq-git/zulip,showell/zulip,deer-hope/zulip,MariaFaBella85/zulip,guiquanz/zulip,nicholasbs/zulip,aps-sids/zulip,sharmaeklavya2/zulip,SmartPeople/zulip,noroot/zulip,hj3938/zulip,niftynei/zulip,rishig/zulip,souravbadami/zulip,nicholasbs/zulip,bowlofstew/zulip,easyfmxu/zulip,tommyip/zulip,timabbott/zulip,joshisa/zulip,tommyip/zulip,praveenaki/zulip,kokoar/zulip,amyliu345/zulip,calvinleenyc/zulip,peguin40/zulip,jerryge/zulip,krtkmj/zulip,zhaoweigg/zulip,udxxabp/zulip,synicalsyntax/zulip,bssrdf/zulip,stamhe/zulip,susansls/zulip,easyfmxu/zulip,eastlhu/zulip,zwily/zulip,timabbott/zulip,Jianchun1/zulip,Qgap/zulip,yocome/zulip,PhilSk/zulip,gkotian/zulip,umkay/zulip,hafeez3000/zulip,hj3938/zulip,guiquanz/zulip,punchagan/zulip,bitemyapp/zulip,MariaFaBella85/zulip,wangdeshui/zulip,technicalpickles/zulip,punchagan/zulip,aps-sids/zulip,sharmaeklavya2/zulip,jphilipsen05/zulip,armooo/zulip,levixie/zulip,arpitpanwar/zulip,pradiptad/zulip,karamcnair/zulip,seapasulli/zulip,mahim97/zulip,bitemyapp/zulip,vikas-parashar/zulip,LAndreas/zulip,grave-w-grave/zulip,shaunstanislaus/zulip,wweiradio/zulip,LeeRisk/zulip,Diptanshu8/zulip,hayderimran7/zulip,peiwei/zulip,jerryge/zulip,jimmy54/zulip,ikasumiwt/zulip,noroot/zulip,Suninus/zulip,karamcnair/zulip,gigawhitlocks/zulip,Drooids/zulip,synicalsyntax/zulip,vaidap/zulip,peiwei/zulip,showell/zulip,gigawhitlocks/zulip,Suninus/zulip,Batterfii/zulip,saitodisse/zulip,bowlofstew/zulip,aliceriot/zulip,ipernet/zulip,andersk/zulip,MariaFaBella85/zulip,johnny9/zulip,Gabriel0402/zulip,zulip/zulip,Drooids/zulip,KJin99/zulip,themass/zulip,itnihao/zulip,LAndreas/zulip,thomasboyt/zulip,qq1012803704/zulip,levixie/zulip,calvinleenyc/zulip,ashwinirudrappa/zulip,glovebx/zulip,Batterfii/zulip,sup95/zulip,sharmaeklavya2/zulip,ikasumiwt/zulip,babbage/zulip,guiquanz/zulip,eeshangarg/zulip,shaunstanislaus/zulip,lfranchi/zulip,proliming/zulip,andersk/zulip,praveenaki/zulip,kaiyuanheshang/zulip,saitodisse/zulip,saitodisse/zulip,amyliu345/zulip,hafeez3000/zulip,mdavid/zulip,dnmfarrell/zulip,LeeRisk/zulip,ahmadassaf/zulip,johnnygaddarr/zulip,bitemyapp/zulip,zorojean/zulip,zulip/zulip,punchagan/zulip,themass/zulip,jessedhillon/zulip,xuanhan863/zulip,natanovia/zulip,dxq-git/zulip,esander91/zulip,Frouk/zulip,themass/zulip,reyha/zulip,firstblade/zulip,PhilSk/zulip,tdr130/zulip,jeffcao/zulip,adnanh/zulip,wavelets/zulip,hj3938/zulip,dawran6/zulip,LAndreas/zulip,Batterfii/zulip,rht/zulip,tommyip/zulip,aakash-cr7/zulip,praveenaki/zulip,zachallaun/zulip,mansilladev/zulip,natanovia/zulip,AZtheAsian/zulip,isht3/zulip,vabs22/zulip,hj3938/zulip,aakash-cr7/zulip,timabbott/zulip,deer-hope/zulip,tiansiyuan/zulip,zorojean/zulip,willingc/zulip,tdr130/zulip,j831/zulip,ufosky-server/zulip,zorojean/zulip,tiansiyuan/zulip,verma-varsha/zulip,noroot/zulip,mdavid/zulip,JPJPJPOPOP/zulip,qq1012803704/zulip,hengqujushi/zulip,jonesgithub/zulip,hafeez3000/zulip,hafeez3000/zulip,hackerkid/zulip,glovebx/zulip,Juanvulcano/zulip,deer-hope/zulip,dawran6/zulip,joyhchen/zulip,babbage/zulip,dnmfarrell/zulip,xuanhan863/zulip,zofuthan/zulip,arpith/zulip,gkotian/zulip,easyfmxu/zulip,atomic-labs/zulip,tommyip/zulip,zorojean/zulip,susansls/zulip,hackerkid/zulip,zachallaun/zulip,tdr130/zulip,christi3k/zulip,easyfmxu/zulip,showell/zulip,xuxiao/zulip,arpitpanwar/zulip,TigorC/zulip,proliming/zulip,dhcrzf/zulip,aakash-cr7/zulip,bluesea/zulip,LAndreas/zulip,isht3/zulip,Gabriel0402/zulip,j831/zulip,mdavid/zulip,MariaFaBella85/zulip,huangkebo/zulip,bluesea/zulip,Cheppers/zulip,jonesgithub/zulip,dwrpayne/zulip,ryansnowboarder/zulip,punchagan/zulip,easyfmxu/zulip,bssrdf/zulip,MayB/zulip,mansilladev/zulip,dawran6/zulip,SmartPeople/zulip,Cheppers/zulip,mohsenSy/zulip,zulip/zulip,paxapy/zulip,natanovia/zulip,hengqujushi/zulip,bluesea/zulip,saitodisse/zulip,seapasulli/zulip,SmartPeople/zulip,yuvipanda/zulip,ryanbackman/zulip,EasonYi/zulip,qq1012803704/zulip,arpitpanwar/zulip,dnmfarrell/zulip,tiansiyuan/zulip,dxq-git/zulip,avastu/zulip,mdavid/zulip,bastianh/zulip,calvinleenyc/zulip,paxapy/zulip,vikas-parashar/zulip,qq1012803704/zulip,voidException/zulip,calvinleenyc/zulip,zofuthan/zulip,guiquanz/zulip,levixie/zulip,mdavid/zulip,jphilipsen05/zulip,jainayush975/zulip,zacps/zulip,sonali0901/zulip,voidException/zulip,glovebx/zulip,DazWorrall/zulip,rishig/zulip,technicalpickles/zulip,bitemyapp/zulip,ikasumiwt/zulip,huangkebo/zulip,moria/zulip,firstblade/zulip,reyha/zulip,tbutter/zulip,MayB/zulip,Batterfii/zulip,saitodisse/zulip,Gabriel0402/zulip,qq1012803704/zulip,lfranchi/zulip,jackrzhang/zulip,JPJPJPOPOP/zulip,krtkmj/zulip,hafeez3000/zulip,vabs22/zulip,dhcrzf/zulip,aliceriot/zulip,proliming/zulip,amallia/zulip,Qgap/zulip,avastu/zulip,hustlzp/zulip,willingc/zulip,bastianh/zulip,nicholasbs/zulip,glovebx/zulip,pradiptad/zulip,luyifan/zulip,xuxiao/zulip,kaiyuanheshang/zulip,vabs22/zulip,susansls/zulip,proliming/zulip,shubhamdhama/zulip,Qgap/zulip,brockwhittaker/zulip,so0k/zulip,babbage/zulip,kokoar/zulip,jessedhillon/zulip,thomasboyt/zulip,souravbadami/zulip,PaulPetring/zulip,dwrpayne/zulip,esander91/zulip,shrikrishnaholla/zulip,karamcnair/zulip,m1ssou/zulip,developerfm/zulip,alliejones/zulip,paxapy/zulip,Jianchun1/zulip,codeKonami/zulip,moria/zulip,fw1121/zulip,jackrzhang/zulip,lfranchi/zulip,AZtheAsian/zulip,samatdav/zulip,DazWorrall/zulip,arpith/zulip,DazWorrall/zulip,yuvipanda/zulip,Galexrt/zulip,DazWorrall/zulip,tdr130/zulip,bastianh/zulip,umkay/zulip,tbutter/zulip,moria/zulip,ryanbackman/zulip,TigorC/zulip,dattatreya303/zulip,jessedhillon/zulip,kou/zulip,he15his/zulip,jphilipsen05/zulip,schatt/zulip,mahim97/zulip,jrowan/zulip,seapasulli/zulip,zacps/zulip,fw1121/zulip,dnmfarrell/zulip,dhcrzf/zulip,akuseru/zulip,adnanh/zulip,PaulPetring/zulip,fw1121/zulip,mahim97/zulip,jainayush975/zulip,brockwhittaker/zulip,Diptanshu8/zulip,bluesea/zulip,synicalsyntax/zulip,ufosky-server/zulip,thomasboyt/zulip,Gabriel0402/zulip,TigorC/zulip,amanharitsh123/zulip,vikas-parashar/zulip,shubhamdhama/zulip,ipernet/zulip,samatdav/zulip,stamhe/zulip,EasonYi/zulip,cosmicAsymmetry/zulip,aps-sids/zulip,nicholasbs/zulip,ryanbackman/zulip,bowlofstew/zulip,itnihao/zulip,bowlofstew/zulip,KingxBanana/zulip,bitemyapp/zulip,zorojean/zulip,paxapy/zulip,m1ssou/zulip,Vallher/zulip,kokoar/zulip,hustlzp/zulip,hj3938/zulip,Vallher/zulip,andersk/zulip,nicholasbs/zulip,amallia/zulip,rishig/zulip,dawran6/zulip,saitodisse/zulip,dnmfarrell/zulip,ryanbackman/zulip,LAndreas/zulip,luyifan/zulip,itnihao/zulip,armooo/zulip,ufosky-server/zulip,hustlzp/zulip,swinghu/zulip,grave-w-grave/zulip,jimmy54/zulip,mohsenSy/zulip,brockwhittaker/zulip,littledogboy/zulip,amanharitsh123/zulip,dwrpayne/zulip,zofuthan/zulip,MayB/zulip,eastlhu/zulip,bssrdf/zulip,sup95/zulip,ikasumiwt/zulip,vakila/zulip,wdaher/zulip,gkotian/zulip,fw1121/zulip,synicalsyntax/zulip,AZtheAsian/zulip,yuvipanda/zulip,mansilladev/zulip,he15his/zulip,brainwane/zulip,sup95/zulip,amanharitsh123/zulip,alliejones/zulip,xuxiao/zulip,gigawhitlocks/zulip,moria/zulip,vakila/zulip,rht/zulip,ipernet/zulip,Drooids/zulip,stamhe/zulip,armooo/zulip,praveenaki/zulip,schatt/zulip,JPJPJPOPOP/zulip,praveenaki/zulip,JanzTam/zulip,blaze225/zulip,jeffcao/zulip,wweiradio/zulip,willingc/zulip,blaze225/zulip,gkotian/zulip,vaidap/zulip,m1ssou/zulip,calvinleenyc/zulip,xuanhan863/zulip,vaidap/zulip,RobotCaleb/zulip,MayB/zulip,peiwei/zulip,Frouk/zulip,Diptanshu8/zulip,verma-varsha/zulip,hengqujushi/zulip,avastu/zulip,wdaher/zulip,johnny9/zulip,sonali0901/zulip,shubhamdhama/zulip,dotcool/zulip,mohsenSy/zulip,itnihao/zulip,kokoar/zulip,technicalpickles/zulip,susansls/zulip,RobotCaleb/zulip,rishig/zulip,zofuthan/zulip,amanharitsh123/zulip,praveenaki/zulip,wdaher/zulip,seapasulli/zulip,JanzTam/zulip,peguin40/zulip,andersk/zulip,jphilipsen05/zulip,SmartPeople/zulip,Jianchun1/zulip,glovebx/zulip,RobotCaleb/zulip,esander91/zulip,eeshangarg/zulip,joyhchen/zulip,rht/zulip,eastlhu/zulip,gkotian/zulip,wavelets/zulip,grave-w-grave/zulip,tbutter/zulip,atomic-labs/zulip,jerryge/zulip,johnnygaddarr/zulip,johnnygaddarr/zulip,eastlhu/zulip,Vallher/zulip,wweiradio/zulip,Qgap/zulip,jeffcao/zulip,mansilladev/zulip,yocome/zulip,zulip/zulip,jerryge/zulip,yocome/zulip,dwrpayne/zulip,littledogboy/zulip,voidException/zulip,amallia/zulip,bowlofstew/zulip,joyhchen/zulip,jessedhillon/zulip,themass/zulip,dhcrzf/zulip,tdr130/zulip,LeeRisk/zulip,dotcool/zulip,LeeRisk/zulip,jessedhillon/zulip,sup95/zulip,mahim97/zulip,firstblade/zulip,cosmicAsymmetry/zulip,shubhamdhama/zulip,tbutter/zulip,niftynei/zulip,wavelets/zulip,vaidap/zulip,bssrdf/zulip,niftynei/zulip,so0k/zulip,EasonYi/zulip,isht3/zulip,ikasumiwt/zulip,ufosky-server/zulip,synicalsyntax/zulip,levixie/zulip,tommyip/zulip,souravbadami/zulip,shaunstanislaus/zulip,Frouk/zulip,vabs22/zulip,jackrzhang/zulip,JPJPJPOPOP/zulip,glovebx/zulip,jainayush975/zulip,firstblade/zulip,JPJPJPOPOP/zulip,RobotCaleb/zulip,developerfm/zulip,ApsOps/zulip,alliejones/zulip,m1ssou/zulip,sonali0901/zulip,jrowan/zulip,christi3k/zulip,aliceriot/zulip,codeKonami/zulip,atomic-labs/zulip,Vallher/zulip,xuxiao/zulip,Gabriel0402/zulip,vakila/zulip,jonesgithub/zulip,zofuthan/zulip,vaidap/zulip,rishig/zulip,suxinde2009/zulip,willingc/zulip,so0k/zulip,tbutter/zulip,KJin99/zulip,eeshangarg/zulip,MayB/zulip,udxxabp/zulip,udxxabp/zulip,Diptanshu8/zulip,codeKonami/zulip,ryansnowboarder/zulip,zachallaun/zulip,blaze225/zulip,Gabriel0402/zulip,KJin99/zulip,Suninus/zulip,so0k/zulip,hustlzp/zulip,hafeez3000/zulip,dxq-git/zulip,pradiptad/zulip,shrikrishnaholla/zulip,esander91/zulip,EasonYi/zulip,zachallaun/zulip,esander91/zulip,zulip/zulip,zhaoweigg/zulip,voidException/zulip,zhaoweigg/zulip,Cheppers/zulip,zulip/zulip,vakila/zulip,jonesgithub/zulip,Drooids/zulip,johnnygaddarr/zulip,karamcnair/zulip,KingxBanana/zulip,RobotCaleb/zulip,alliejones/zulip,zwily/zulip,wweiradio/zulip,ryansnowboarder/zulip,rht/zulip,ahmadassaf/zulip,rishig/zulip,Jianchun1/zulip,KingxBanana/zulip,j831/zulip,xuanhan863/zulip,dnmfarrell/zulip,arpitpanwar/zulip,zwily/zulip,DazWorrall/zulip,MayB/zulip,yuvipanda/zulip,atomic-labs/zulip,so0k/zulip,schatt/zulip,jackrzhang/zulip,ipernet/zulip,reyha/zulip,paxapy/zulip,amyliu345/zulip,niftynei/zulip,krtkmj/zulip,wangdeshui/zulip,Diptanshu8/zulip,deer-hope/zulip,proliming/zulip,Galexrt/zulip,peguin40/zulip,codeKonami/zulip,bssrdf/zulip,jimmy54/zulip,ufosky-server/zulip,aakash-cr7/zulip,technicalpickles/zulip,zwily/zulip,LeeRisk/zulip,ericzhou2008/zulip,wavelets/zulip,technicalpickles/zulip,brainwane/zulip,swinghu/zulip,hackerkid/zulip,brainwane/zulip,RobotCaleb/zulip,joshisa/zulip,hustlzp/zulip,tdr130/zulip,eeshangarg/zulip,Qgap/zulip,itnihao/zulip,arpith/zulip,tiansiyuan/zulip,willingc/zulip,johnny9/zulip,christi3k/zulip,qq1012803704/zulip,suxinde2009/zulip,JPJPJPOPOP/zulip,hackerkid/zulip,samatdav/zulip,ashwinirudrappa/zulip,JanzTam/zulip,AZtheAsian/zulip,xuxiao/zulip,zhaoweigg/zulip,vakila/zulip,dattatreya303/zulip,kokoar/zulip,LAndreas/zulip,armooo/zulip,Gabriel0402/zulip,xuxiao/zulip,krtkmj/zulip,vikas-parashar/zulip,udxxabp/zulip,grave-w-grave/zulip,adnanh/zulip,showell/zulip,codeKonami/zulip,mansilladev/zulip,yuvipanda/zulip,hafeez3000/zulip,themass/zulip,vakila/zulip,Jianchun1/zulip,akuseru/zulip,jimmy54/zulip,amallia/zulip,cosmicAsymmetry/zulip,peiwei/zulip,moria/zulip,bastianh/zulip,sup95/zulip,pradiptad/zulip,grave-w-grave/zulip,praveenaki/zulip,synicalsyntax/zulip,eastlhu/zulip,eastlhu/zulip,joshisa/zulip,ashwinirudrappa/zulip,glovebx/zulip,jerryge/zulip,arpith/zulip,isht3/zulip,rht/zulip,andersk/zulip,joshisa/zulip,jrowan/zulip,bastianh/zulip,reyha/zulip,LAndreas/zulip,gkotian/zulip,timabbott/zulip,tiansiyuan/zulip,niftynei/zulip,dattatreya303/zulip,cosmicAsymmetry/zulip,susansls/zulip,dxq-git/zulip,noroot/zulip,peiwei/zulip,wweiradio/zulip,showell/zulip,hj3938/zulip,technicalpickles/zulip,karamcnair/zulip,j831/zulip,amallia/zulip,yuvipanda/zulip,blaze225/zulip,brainwane/zulip,brainwane/zulip,bluesea/zulip,wdaher/zulip,showell/zulip,sharmaeklavya2/zulip,hengqujushi/zulip,punchagan/zulip,peguin40/zulip,lfranchi/zulip,ryansnowboarder/zulip,lfranchi/zulip,shubhamdhama/zulip,eeshangarg/zulip,levixie/zulip,so0k/zulip,wavelets/zulip,ahmadassaf/zulip,RobotCaleb/zulip,SmartPeople/zulip,noroot/zulip,jeffcao/zulip,karamcnair/zulip,aps-sids/zulip,thomasboyt/zulip,developerfm/zulip,wangdeshui/zulip,guiquanz/zulip,jerryge/zulip,ApsOps/zulip,hackerkid/zulip,zhaoweigg/zulip,wdaher/zulip,lfranchi/zulip,tommyip/zulip,ApsOps/zulip,hustlzp/zulip,gkotian/zulip,hayderimran7/zulip,timabbott/zulip,Juanvulcano/zulip,eeshangarg/zulip,isht3/zulip,kaiyuanheshang/zulip,ashwinirudrappa/zulip,dattatreya303/zulip,hayderimran7/zulip,ahmadassaf/zulip,samatdav/zulip,umkay/zulip,dhcrzf/zulip,jackrzhang/zulip,dwrpayne/zulip,TigorC/zulip,Cheppers/zulip,saitodisse/zulip,bluesea/zulip,brockwhittaker/zulip,developerfm/zulip,jrowan/zulip,Jianchun1/zulip,ufosky-server/zulip,huangkebo/zulip,littledogboy/zulip,PhilSk/zulip,Batterfii/zulip,cosmicAsymmetry/zulip,j831/zulip,ipernet/zulip,hustlzp/zulip,calvinleenyc/zulip,joshisa/zulip,xuanhan863/zulip,ryansnowboarder/zulip,ipernet/zulip,TigorC/zulip,luyifan/zulip,jimmy54/zulip,firstblade/zulip,babbage/zulip,voidException/zulip,tdr130/zulip,noroot/zulip,huangkebo/zulip,shubhamdhama/zulip,ericzhou2008/zulip,suxinde2009/zulip,hayderimran7/zulip,willingc/zulip,synicalsyntax/zulip,yocome/zulip,voidException/zulip,zachallaun/zulip,armooo/zulip,avastu/zulip,mohsenSy/zulip,joshisa/zulip,armooo/zulip,ryanbackman/zulip,xuxiao/zulip,suxinde2009/zulip,Juanvulcano/zulip,thomasboyt/zulip,akuseru/zulip,aliceriot/zulip,deer-hope/zulip,verma-varsha/zulip,luyifan/zulip,m1ssou/zulip,Drooids/zulip,swinghu/zulip,ashwinirudrappa/zulip,EasonYi/zulip,ryansnowboarder/zulip,technicalpickles/zulip,ahmadassaf/zulip,joyhchen/zulip,peguin40/zulip,yocome/zulip,ashwinirudrappa/zulip,mohsenSy/zulip,wweiradio/zulip,developerfm/zulip,easyfmxu/zulip,jonesgithub/zulip,voidException/zulip,vikas-parashar/zulip,ApsOps/zulip,amyliu345/zulip,mdavid/zulip,jonesgithub/zulip,amallia/zulip,littledogboy/zulip,Suninus/zulip,dxq-git/zulip,mahim97/zulip,LeeRisk/zulip,moria/zulip,alliejones/zulip,Vallher/zulip,jainayush975/zulip,j831/zulip,kou/zulip,swinghu/zulip,hengqujushi/zulip,udxxabp/zulip,xuanhan863/zulip,easyfmxu/zulip,aliceriot/zulip,zacps/zulip,amanharitsh123/zulip,rht/zulip,peiwei/zulip,Batterfii/zulip,vabs22/zulip,schatt/zulip,brainwane/zulip,ericzhou2008/zulip,christi3k/zulip,zwily/zulip,jackrzhang/zulip,swinghu/zulip,yocome/zulip,alliejones/zulip,sharmaeklavya2/zulip,bluesea/zulip,deer-hope/zulip,zachallaun/zulip,bitemyapp/zulip,xuanhan863/zulip,AZtheAsian/zulip,DazWorrall/zulip,arpitpanwar/zulip,dawran6/zulip,paxapy/zulip,hengqujushi/zulip,shubhamdhama/zulip,kou/zulip,yocome/zulip,dwrpayne/zulip,Galexrt/zulip,huangkebo/zulip,PhilSk/zulip,ufosky-server/zulip,fw1121/zulip,joshisa/zulip,wangdeshui/zulip,vikas-parashar/zulip,MayB/zulip,jphilipsen05/zulip,firstblade/zulip
Implement a framework for data retention policies And add customer1.invalid to it. (imported from commit 32b0293bc48abf5d9a3bd36f14f6b16d48ea6ff2)
""" Implements the per-domain data retention policy. The goal is to have a single place where the policy is defined. This is complicated by needing to apply this policy both to the database and to log files. Additionally, we want to use an efficient query for the database, rather than iterating through messages one by one. The code in this module does not actually remove anything; it just identifies which items should be kept or removed. """ import sys import operator from django.utils import timezone from django.db.models import Q from datetime import datetime, timedelta from zephyr.models import Realm, UserMessage # Each domain has a maximum age for retained messages. # # FIXME: Move this into the database. max_age = { 'customer1.invalid': timedelta(days=31), } def should_expunge_from_log(msg, now): """Should a particular log entry be expunged? msg: a log entry dict now: current time for purposes of determining log entry age""" # This function will be called many times, but we want to compare all # entries against a consistent "current time". So the caller passes # that time as a parameter. # FIXME: Yet another place where we compute the domain manually. # See #260. user = msg.get('sender_email') if user is None: user = msg.get('user') if user is None: # Avoid printing the entire message, but give enough information to find it later. print >>sys.stderr, "WARNING: Can't get user for message at", msg['timestamp'] return False domain = user.split('@', 1)[1] if domain not in max_age: # Keep forever. return False age = now - datetime.fromtimestamp(msg['timestamp']) return age > max_age[domain] def get_UserMessages_to_expunge(): """Fetch all UserMessages which should be expunged from the database. After deleting these, you may also want to call Message.remove_unreachable().""" # Unlike retain_in_log, this handles all messages at once, so we # use the actual current time. now = timezone.now() queries = [Q(user_profile__realm = realm, message__pub_date__lt = now - max_age[realm.domain]) for realm in Realm.objects.all() if realm.domain in max_age] if not queries: return UserMessage.objects.none() # Return all objects matching any of the queries in 'queries'. return UserMessage.objects.filter(reduce(operator.or_, queries))
<commit_before><commit_msg>Implement a framework for data retention policies And add customer1.invalid to it. (imported from commit 32b0293bc48abf5d9a3bd36f14f6b16d48ea6ff2)<commit_after>
""" Implements the per-domain data retention policy. The goal is to have a single place where the policy is defined. This is complicated by needing to apply this policy both to the database and to log files. Additionally, we want to use an efficient query for the database, rather than iterating through messages one by one. The code in this module does not actually remove anything; it just identifies which items should be kept or removed. """ import sys import operator from django.utils import timezone from django.db.models import Q from datetime import datetime, timedelta from zephyr.models import Realm, UserMessage # Each domain has a maximum age for retained messages. # # FIXME: Move this into the database. max_age = { 'customer1.invalid': timedelta(days=31), } def should_expunge_from_log(msg, now): """Should a particular log entry be expunged? msg: a log entry dict now: current time for purposes of determining log entry age""" # This function will be called many times, but we want to compare all # entries against a consistent "current time". So the caller passes # that time as a parameter. # FIXME: Yet another place where we compute the domain manually. # See #260. user = msg.get('sender_email') if user is None: user = msg.get('user') if user is None: # Avoid printing the entire message, but give enough information to find it later. print >>sys.stderr, "WARNING: Can't get user for message at", msg['timestamp'] return False domain = user.split('@', 1)[1] if domain not in max_age: # Keep forever. return False age = now - datetime.fromtimestamp(msg['timestamp']) return age > max_age[domain] def get_UserMessages_to_expunge(): """Fetch all UserMessages which should be expunged from the database. After deleting these, you may also want to call Message.remove_unreachable().""" # Unlike retain_in_log, this handles all messages at once, so we # use the actual current time. now = timezone.now() queries = [Q(user_profile__realm = realm, message__pub_date__lt = now - max_age[realm.domain]) for realm in Realm.objects.all() if realm.domain in max_age] if not queries: return UserMessage.objects.none() # Return all objects matching any of the queries in 'queries'. return UserMessage.objects.filter(reduce(operator.or_, queries))
Implement a framework for data retention policies And add customer1.invalid to it. (imported from commit 32b0293bc48abf5d9a3bd36f14f6b16d48ea6ff2)""" Implements the per-domain data retention policy. The goal is to have a single place where the policy is defined. This is complicated by needing to apply this policy both to the database and to log files. Additionally, we want to use an efficient query for the database, rather than iterating through messages one by one. The code in this module does not actually remove anything; it just identifies which items should be kept or removed. """ import sys import operator from django.utils import timezone from django.db.models import Q from datetime import datetime, timedelta from zephyr.models import Realm, UserMessage # Each domain has a maximum age for retained messages. # # FIXME: Move this into the database. max_age = { 'customer1.invalid': timedelta(days=31), } def should_expunge_from_log(msg, now): """Should a particular log entry be expunged? msg: a log entry dict now: current time for purposes of determining log entry age""" # This function will be called many times, but we want to compare all # entries against a consistent "current time". So the caller passes # that time as a parameter. # FIXME: Yet another place where we compute the domain manually. # See #260. user = msg.get('sender_email') if user is None: user = msg.get('user') if user is None: # Avoid printing the entire message, but give enough information to find it later. print >>sys.stderr, "WARNING: Can't get user for message at", msg['timestamp'] return False domain = user.split('@', 1)[1] if domain not in max_age: # Keep forever. return False age = now - datetime.fromtimestamp(msg['timestamp']) return age > max_age[domain] def get_UserMessages_to_expunge(): """Fetch all UserMessages which should be expunged from the database. After deleting these, you may also want to call Message.remove_unreachable().""" # Unlike retain_in_log, this handles all messages at once, so we # use the actual current time. now = timezone.now() queries = [Q(user_profile__realm = realm, message__pub_date__lt = now - max_age[realm.domain]) for realm in Realm.objects.all() if realm.domain in max_age] if not queries: return UserMessage.objects.none() # Return all objects matching any of the queries in 'queries'. return UserMessage.objects.filter(reduce(operator.or_, queries))
<commit_before><commit_msg>Implement a framework for data retention policies And add customer1.invalid to it. (imported from commit 32b0293bc48abf5d9a3bd36f14f6b16d48ea6ff2)<commit_after>""" Implements the per-domain data retention policy. The goal is to have a single place where the policy is defined. This is complicated by needing to apply this policy both to the database and to log files. Additionally, we want to use an efficient query for the database, rather than iterating through messages one by one. The code in this module does not actually remove anything; it just identifies which items should be kept or removed. """ import sys import operator from django.utils import timezone from django.db.models import Q from datetime import datetime, timedelta from zephyr.models import Realm, UserMessage # Each domain has a maximum age for retained messages. # # FIXME: Move this into the database. max_age = { 'customer1.invalid': timedelta(days=31), } def should_expunge_from_log(msg, now): """Should a particular log entry be expunged? msg: a log entry dict now: current time for purposes of determining log entry age""" # This function will be called many times, but we want to compare all # entries against a consistent "current time". So the caller passes # that time as a parameter. # FIXME: Yet another place where we compute the domain manually. # See #260. user = msg.get('sender_email') if user is None: user = msg.get('user') if user is None: # Avoid printing the entire message, but give enough information to find it later. print >>sys.stderr, "WARNING: Can't get user for message at", msg['timestamp'] return False domain = user.split('@', 1)[1] if domain not in max_age: # Keep forever. return False age = now - datetime.fromtimestamp(msg['timestamp']) return age > max_age[domain] def get_UserMessages_to_expunge(): """Fetch all UserMessages which should be expunged from the database. After deleting these, you may also want to call Message.remove_unreachable().""" # Unlike retain_in_log, this handles all messages at once, so we # use the actual current time. now = timezone.now() queries = [Q(user_profile__realm = realm, message__pub_date__lt = now - max_age[realm.domain]) for realm in Realm.objects.all() if realm.domain in max_age] if not queries: return UserMessage.objects.none() # Return all objects matching any of the queries in 'queries'. return UserMessage.objects.filter(reduce(operator.or_, queries))
8953b336dfcb8bd6c69b2af8e960a215a47838f8
Problems/reverseStringInPlace.py
Problems/reverseStringInPlace.py
#!/Applications/anaconda/envs/Python3/bin def main(): # Test suite tests = [ None, [''], ['f','o','o',' ','b','a','r']] for t in tests: print('Testing: {}'.format(t)) print('Result: {}'.format(reverse_string_in_place(t))) return 0 def reverse_string_in_place(chars): ''' Reverses a string (input as a list of chars) in place ''' if chars is None: return None for i in range(len(chars) // 2): chars[i], chars[-i-1] = chars[-i-1], chars[i] return chars if __name__ == '__main__': main()
Add reverse a list of characters in place
Add reverse a list of characters in place
Python
mit
HKuz/Test_Code
Add reverse a list of characters in place
#!/Applications/anaconda/envs/Python3/bin def main(): # Test suite tests = [ None, [''], ['f','o','o',' ','b','a','r']] for t in tests: print('Testing: {}'.format(t)) print('Result: {}'.format(reverse_string_in_place(t))) return 0 def reverse_string_in_place(chars): ''' Reverses a string (input as a list of chars) in place ''' if chars is None: return None for i in range(len(chars) // 2): chars[i], chars[-i-1] = chars[-i-1], chars[i] return chars if __name__ == '__main__': main()
<commit_before><commit_msg>Add reverse a list of characters in place<commit_after>
#!/Applications/anaconda/envs/Python3/bin def main(): # Test suite tests = [ None, [''], ['f','o','o',' ','b','a','r']] for t in tests: print('Testing: {}'.format(t)) print('Result: {}'.format(reverse_string_in_place(t))) return 0 def reverse_string_in_place(chars): ''' Reverses a string (input as a list of chars) in place ''' if chars is None: return None for i in range(len(chars) // 2): chars[i], chars[-i-1] = chars[-i-1], chars[i] return chars if __name__ == '__main__': main()
Add reverse a list of characters in place#!/Applications/anaconda/envs/Python3/bin def main(): # Test suite tests = [ None, [''], ['f','o','o',' ','b','a','r']] for t in tests: print('Testing: {}'.format(t)) print('Result: {}'.format(reverse_string_in_place(t))) return 0 def reverse_string_in_place(chars): ''' Reverses a string (input as a list of chars) in place ''' if chars is None: return None for i in range(len(chars) // 2): chars[i], chars[-i-1] = chars[-i-1], chars[i] return chars if __name__ == '__main__': main()
<commit_before><commit_msg>Add reverse a list of characters in place<commit_after>#!/Applications/anaconda/envs/Python3/bin def main(): # Test suite tests = [ None, [''], ['f','o','o',' ','b','a','r']] for t in tests: print('Testing: {}'.format(t)) print('Result: {}'.format(reverse_string_in_place(t))) return 0 def reverse_string_in_place(chars): ''' Reverses a string (input as a list of chars) in place ''' if chars is None: return None for i in range(len(chars) // 2): chars[i], chars[-i-1] = chars[-i-1], chars[i] return chars if __name__ == '__main__': main()
e8c8464d36e91c9a8d61db0531a2e73dcdee88b7
utilities/tests/test_simulation_utils.py
utilities/tests/test_simulation_utils.py
from utilities.simulation_utilities import check_inputs import pytest import numpy as np @pytest.mark.parametrize("input,expected", [ (None, np.ndarray([0])), ([0], np.array([0])), (1, np.array([1])), (range(5), np.array([0,1,2,3,4])) ]) def test_check_inputs(input, expected): assert np.allclose(check_inputs(input), expected)
Add a test for check_inputs.
Add a test for check_inputs.
Python
mit
jason-neal/companion_simulations,jason-neal/companion_simulations
Add a test for check_inputs.
from utilities.simulation_utilities import check_inputs import pytest import numpy as np @pytest.mark.parametrize("input,expected", [ (None, np.ndarray([0])), ([0], np.array([0])), (1, np.array([1])), (range(5), np.array([0,1,2,3,4])) ]) def test_check_inputs(input, expected): assert np.allclose(check_inputs(input), expected)
<commit_before><commit_msg>Add a test for check_inputs.<commit_after>
from utilities.simulation_utilities import check_inputs import pytest import numpy as np @pytest.mark.parametrize("input,expected", [ (None, np.ndarray([0])), ([0], np.array([0])), (1, np.array([1])), (range(5), np.array([0,1,2,3,4])) ]) def test_check_inputs(input, expected): assert np.allclose(check_inputs(input), expected)
Add a test for check_inputs.from utilities.simulation_utilities import check_inputs import pytest import numpy as np @pytest.mark.parametrize("input,expected", [ (None, np.ndarray([0])), ([0], np.array([0])), (1, np.array([1])), (range(5), np.array([0,1,2,3,4])) ]) def test_check_inputs(input, expected): assert np.allclose(check_inputs(input), expected)
<commit_before><commit_msg>Add a test for check_inputs.<commit_after>from utilities.simulation_utilities import check_inputs import pytest import numpy as np @pytest.mark.parametrize("input,expected", [ (None, np.ndarray([0])), ([0], np.array([0])), (1, np.array([1])), (range(5), np.array([0,1,2,3,4])) ]) def test_check_inputs(input, expected): assert np.allclose(check_inputs(input), expected)
ecef0ea7743f25326183d09622949682ce6feb3c
test/ts.py
test/ts.py
#!/usr/bin/env python ''' Module: test Desc: Test to see how quickly I can parse TS es packets Author: John O'Neil Email: oneil.john@gmail.com DATE: Thursday, October 20th 2016 ''' import os import sys import argparse PACKET_SIZE = 188 SYNC_BYTE = 'G' #generator def next_packet(filename): with open(filename, 'rb') as f: while True: packet = f.read(PACKET_SIZE) if packet: # first byte SHOULD be the sync byte # but if it isn't find one. if packet[0] != SYNC_BYTE: start_byte = 0 print packet[0] for i in range(start_byte, PACKET_SIZE): if packet[i] == SYNC_BYTE: start_byte = i break # didn't find a new start? FAIL if start_byte == 0: #print ":".join("{:02x}".format(ord(c)) for c in packet) raise Exception("failure to find sync byte in ts packet size.") continue remainder = f.read(PACKET_SIZE - start_byte) packet = packet[start_byte:] + remainder yield packet else: break def main(): parser = argparse.ArgumentParser(description='Remove ARIB formatted Closed Caption information from an MPEG TS file and format the results as a standard .ass subtitle file.') parser.add_argument('infile', help='Input filename (MPEG2 Transport Stream File)', type=str) args = parser.parse_args() infilename = args.infile if not os.path.exists(infilename): print 'Input filename :' + infilename + " does not exist." os.exit(-1) total_filesize = os.path.getsize(infilename) read_size = 0 percent_read = 0 prev_percent_read = percent_read #CC data is not, in itself timestamped, so we've got to use packet info #to reconstruct the timing of the closed captions (i.e. how many seconds into #the file are they shown?) #show initial progress information sys.stdout.write("progress: %d%% \r" % (percent_read) ) sys.stdout.flush() for packet in next_packet(infilename): read_size += PACKET_SIZE percent_read =((read_size/float(total_filesize))* 100) new_percent_read = int(percent_read * 100) if new_percent_read != prev_percent_read: prev_percent_read = new_percent_read sys.stdout.write("progress: %.2f%% \r" % (percent_read) ) sys.stdout.flush() if __name__ == "__main__": main()
Test traverses a TS file quickly
Test traverses a TS file quickly
Python
apache-2.0
johnoneil/arib,johnoneil/arib
Test traverses a TS file quickly
#!/usr/bin/env python ''' Module: test Desc: Test to see how quickly I can parse TS es packets Author: John O'Neil Email: oneil.john@gmail.com DATE: Thursday, October 20th 2016 ''' import os import sys import argparse PACKET_SIZE = 188 SYNC_BYTE = 'G' #generator def next_packet(filename): with open(filename, 'rb') as f: while True: packet = f.read(PACKET_SIZE) if packet: # first byte SHOULD be the sync byte # but if it isn't find one. if packet[0] != SYNC_BYTE: start_byte = 0 print packet[0] for i in range(start_byte, PACKET_SIZE): if packet[i] == SYNC_BYTE: start_byte = i break # didn't find a new start? FAIL if start_byte == 0: #print ":".join("{:02x}".format(ord(c)) for c in packet) raise Exception("failure to find sync byte in ts packet size.") continue remainder = f.read(PACKET_SIZE - start_byte) packet = packet[start_byte:] + remainder yield packet else: break def main(): parser = argparse.ArgumentParser(description='Remove ARIB formatted Closed Caption information from an MPEG TS file and format the results as a standard .ass subtitle file.') parser.add_argument('infile', help='Input filename (MPEG2 Transport Stream File)', type=str) args = parser.parse_args() infilename = args.infile if not os.path.exists(infilename): print 'Input filename :' + infilename + " does not exist." os.exit(-1) total_filesize = os.path.getsize(infilename) read_size = 0 percent_read = 0 prev_percent_read = percent_read #CC data is not, in itself timestamped, so we've got to use packet info #to reconstruct the timing of the closed captions (i.e. how many seconds into #the file are they shown?) #show initial progress information sys.stdout.write("progress: %d%% \r" % (percent_read) ) sys.stdout.flush() for packet in next_packet(infilename): read_size += PACKET_SIZE percent_read =((read_size/float(total_filesize))* 100) new_percent_read = int(percent_read * 100) if new_percent_read != prev_percent_read: prev_percent_read = new_percent_read sys.stdout.write("progress: %.2f%% \r" % (percent_read) ) sys.stdout.flush() if __name__ == "__main__": main()
<commit_before><commit_msg>Test traverses a TS file quickly<commit_after>
#!/usr/bin/env python ''' Module: test Desc: Test to see how quickly I can parse TS es packets Author: John O'Neil Email: oneil.john@gmail.com DATE: Thursday, October 20th 2016 ''' import os import sys import argparse PACKET_SIZE = 188 SYNC_BYTE = 'G' #generator def next_packet(filename): with open(filename, 'rb') as f: while True: packet = f.read(PACKET_SIZE) if packet: # first byte SHOULD be the sync byte # but if it isn't find one. if packet[0] != SYNC_BYTE: start_byte = 0 print packet[0] for i in range(start_byte, PACKET_SIZE): if packet[i] == SYNC_BYTE: start_byte = i break # didn't find a new start? FAIL if start_byte == 0: #print ":".join("{:02x}".format(ord(c)) for c in packet) raise Exception("failure to find sync byte in ts packet size.") continue remainder = f.read(PACKET_SIZE - start_byte) packet = packet[start_byte:] + remainder yield packet else: break def main(): parser = argparse.ArgumentParser(description='Remove ARIB formatted Closed Caption information from an MPEG TS file and format the results as a standard .ass subtitle file.') parser.add_argument('infile', help='Input filename (MPEG2 Transport Stream File)', type=str) args = parser.parse_args() infilename = args.infile if not os.path.exists(infilename): print 'Input filename :' + infilename + " does not exist." os.exit(-1) total_filesize = os.path.getsize(infilename) read_size = 0 percent_read = 0 prev_percent_read = percent_read #CC data is not, in itself timestamped, so we've got to use packet info #to reconstruct the timing of the closed captions (i.e. how many seconds into #the file are they shown?) #show initial progress information sys.stdout.write("progress: %d%% \r" % (percent_read) ) sys.stdout.flush() for packet in next_packet(infilename): read_size += PACKET_SIZE percent_read =((read_size/float(total_filesize))* 100) new_percent_read = int(percent_read * 100) if new_percent_read != prev_percent_read: prev_percent_read = new_percent_read sys.stdout.write("progress: %.2f%% \r" % (percent_read) ) sys.stdout.flush() if __name__ == "__main__": main()
Test traverses a TS file quickly#!/usr/bin/env python ''' Module: test Desc: Test to see how quickly I can parse TS es packets Author: John O'Neil Email: oneil.john@gmail.com DATE: Thursday, October 20th 2016 ''' import os import sys import argparse PACKET_SIZE = 188 SYNC_BYTE = 'G' #generator def next_packet(filename): with open(filename, 'rb') as f: while True: packet = f.read(PACKET_SIZE) if packet: # first byte SHOULD be the sync byte # but if it isn't find one. if packet[0] != SYNC_BYTE: start_byte = 0 print packet[0] for i in range(start_byte, PACKET_SIZE): if packet[i] == SYNC_BYTE: start_byte = i break # didn't find a new start? FAIL if start_byte == 0: #print ":".join("{:02x}".format(ord(c)) for c in packet) raise Exception("failure to find sync byte in ts packet size.") continue remainder = f.read(PACKET_SIZE - start_byte) packet = packet[start_byte:] + remainder yield packet else: break def main(): parser = argparse.ArgumentParser(description='Remove ARIB formatted Closed Caption information from an MPEG TS file and format the results as a standard .ass subtitle file.') parser.add_argument('infile', help='Input filename (MPEG2 Transport Stream File)', type=str) args = parser.parse_args() infilename = args.infile if not os.path.exists(infilename): print 'Input filename :' + infilename + " does not exist." os.exit(-1) total_filesize = os.path.getsize(infilename) read_size = 0 percent_read = 0 prev_percent_read = percent_read #CC data is not, in itself timestamped, so we've got to use packet info #to reconstruct the timing of the closed captions (i.e. how many seconds into #the file are they shown?) #show initial progress information sys.stdout.write("progress: %d%% \r" % (percent_read) ) sys.stdout.flush() for packet in next_packet(infilename): read_size += PACKET_SIZE percent_read =((read_size/float(total_filesize))* 100) new_percent_read = int(percent_read * 100) if new_percent_read != prev_percent_read: prev_percent_read = new_percent_read sys.stdout.write("progress: %.2f%% \r" % (percent_read) ) sys.stdout.flush() if __name__ == "__main__": main()
<commit_before><commit_msg>Test traverses a TS file quickly<commit_after>#!/usr/bin/env python ''' Module: test Desc: Test to see how quickly I can parse TS es packets Author: John O'Neil Email: oneil.john@gmail.com DATE: Thursday, October 20th 2016 ''' import os import sys import argparse PACKET_SIZE = 188 SYNC_BYTE = 'G' #generator def next_packet(filename): with open(filename, 'rb') as f: while True: packet = f.read(PACKET_SIZE) if packet: # first byte SHOULD be the sync byte # but if it isn't find one. if packet[0] != SYNC_BYTE: start_byte = 0 print packet[0] for i in range(start_byte, PACKET_SIZE): if packet[i] == SYNC_BYTE: start_byte = i break # didn't find a new start? FAIL if start_byte == 0: #print ":".join("{:02x}".format(ord(c)) for c in packet) raise Exception("failure to find sync byte in ts packet size.") continue remainder = f.read(PACKET_SIZE - start_byte) packet = packet[start_byte:] + remainder yield packet else: break def main(): parser = argparse.ArgumentParser(description='Remove ARIB formatted Closed Caption information from an MPEG TS file and format the results as a standard .ass subtitle file.') parser.add_argument('infile', help='Input filename (MPEG2 Transport Stream File)', type=str) args = parser.parse_args() infilename = args.infile if not os.path.exists(infilename): print 'Input filename :' + infilename + " does not exist." os.exit(-1) total_filesize = os.path.getsize(infilename) read_size = 0 percent_read = 0 prev_percent_read = percent_read #CC data is not, in itself timestamped, so we've got to use packet info #to reconstruct the timing of the closed captions (i.e. how many seconds into #the file are they shown?) #show initial progress information sys.stdout.write("progress: %d%% \r" % (percent_read) ) sys.stdout.flush() for packet in next_packet(infilename): read_size += PACKET_SIZE percent_read =((read_size/float(total_filesize))* 100) new_percent_read = int(percent_read * 100) if new_percent_read != prev_percent_read: prev_percent_read = new_percent_read sys.stdout.write("progress: %.2f%% \r" % (percent_read) ) sys.stdout.flush() if __name__ == "__main__": main()
b5c6e37bdcb88545d187ad1e3adbbbe1d466f874
py/knight-probability-in-chessboard.py
py/knight-probability-in-chessboard.py
from collections import Counter from math import log, exp class Solution(object): def knightProbability(self, N, K, r, c): """ :type N: int :type K: int :type r: int :type c: int :rtype: float """ p = Counter() p[r, c] += 1 ds = [(-1, -2), (-2, -1), (1, -2), (2, -1), (1, 2), (2, 1), (-1, 2), (-2, 1)] for i in xrange(K): np = Counter() for (px, py), prev_p in p.iteritems(): for dx, dy in ds: nx, ny = px + dx, py + dy if 0 <= nx < N and 0 <= ny < N: np[nx, ny] += prev_p p = np s = sum(p.values()) return 0 if s == 0 else exp(log(sum(p.values())) - K * log(8))
Add py solution for 688. Knight Probability in Chessboard
Add py solution for 688. Knight Probability in Chessboard 688. Knight Probability in Chessboard: https://leetcode.com/problems/knight-probability-in-chessboard/
Python
apache-2.0
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
Add py solution for 688. Knight Probability in Chessboard 688. Knight Probability in Chessboard: https://leetcode.com/problems/knight-probability-in-chessboard/
from collections import Counter from math import log, exp class Solution(object): def knightProbability(self, N, K, r, c): """ :type N: int :type K: int :type r: int :type c: int :rtype: float """ p = Counter() p[r, c] += 1 ds = [(-1, -2), (-2, -1), (1, -2), (2, -1), (1, 2), (2, 1), (-1, 2), (-2, 1)] for i in xrange(K): np = Counter() for (px, py), prev_p in p.iteritems(): for dx, dy in ds: nx, ny = px + dx, py + dy if 0 <= nx < N and 0 <= ny < N: np[nx, ny] += prev_p p = np s = sum(p.values()) return 0 if s == 0 else exp(log(sum(p.values())) - K * log(8))
<commit_before><commit_msg>Add py solution for 688. Knight Probability in Chessboard 688. Knight Probability in Chessboard: https://leetcode.com/problems/knight-probability-in-chessboard/<commit_after>
from collections import Counter from math import log, exp class Solution(object): def knightProbability(self, N, K, r, c): """ :type N: int :type K: int :type r: int :type c: int :rtype: float """ p = Counter() p[r, c] += 1 ds = [(-1, -2), (-2, -1), (1, -2), (2, -1), (1, 2), (2, 1), (-1, 2), (-2, 1)] for i in xrange(K): np = Counter() for (px, py), prev_p in p.iteritems(): for dx, dy in ds: nx, ny = px + dx, py + dy if 0 <= nx < N and 0 <= ny < N: np[nx, ny] += prev_p p = np s = sum(p.values()) return 0 if s == 0 else exp(log(sum(p.values())) - K * log(8))
Add py solution for 688. Knight Probability in Chessboard 688. Knight Probability in Chessboard: https://leetcode.com/problems/knight-probability-in-chessboard/from collections import Counter from math import log, exp class Solution(object): def knightProbability(self, N, K, r, c): """ :type N: int :type K: int :type r: int :type c: int :rtype: float """ p = Counter() p[r, c] += 1 ds = [(-1, -2), (-2, -1), (1, -2), (2, -1), (1, 2), (2, 1), (-1, 2), (-2, 1)] for i in xrange(K): np = Counter() for (px, py), prev_p in p.iteritems(): for dx, dy in ds: nx, ny = px + dx, py + dy if 0 <= nx < N and 0 <= ny < N: np[nx, ny] += prev_p p = np s = sum(p.values()) return 0 if s == 0 else exp(log(sum(p.values())) - K * log(8))
<commit_before><commit_msg>Add py solution for 688. Knight Probability in Chessboard 688. Knight Probability in Chessboard: https://leetcode.com/problems/knight-probability-in-chessboard/<commit_after>from collections import Counter from math import log, exp class Solution(object): def knightProbability(self, N, K, r, c): """ :type N: int :type K: int :type r: int :type c: int :rtype: float """ p = Counter() p[r, c] += 1 ds = [(-1, -2), (-2, -1), (1, -2), (2, -1), (1, 2), (2, 1), (-1, 2), (-2, 1)] for i in xrange(K): np = Counter() for (px, py), prev_p in p.iteritems(): for dx, dy in ds: nx, ny = px + dx, py + dy if 0 <= nx < N and 0 <= ny < N: np[nx, ny] += prev_p p = np s = sum(p.values()) return 0 if s == 0 else exp(log(sum(p.values())) - K * log(8))
f80acf05f7d492f3716be961b88c4e82d332500c
dbaas/util/update_instances_with_offering.py
dbaas/util/update_instances_with_offering.py
# coding: utf-8 class UpdateInstances(object): @staticmethod def do(): from dbaas_cloudstack.models import DatabaseInfraOffering from dbaas_cloudstack.models import PlanAttr from physical.models import Instance infra_offerings = DatabaseInfraOffering.objects.all() for infra_offering in infra_offerings: plan_attr = PlanAttr.objects.get(plan=infra_offering.databaseinfra.plan) strong_offering = infra_offering.offering weaker_offering = plan_attr.get_weaker_offering() for instance in infra_offering.databaseinfra.instances.all(): if (instance.instance_type == Instance.MONGODB_ARBITER or instance.instance_type == Instance.Sentinel): instance.offering = weaker_offering else: instance.oferring = strong_offering instance.save()
Create class to update instances with offering
Create class to update instances with offering
Python
bsd-3-clause
globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service,globocom/database-as-a-service
Create class to update instances with offering
# coding: utf-8 class UpdateInstances(object): @staticmethod def do(): from dbaas_cloudstack.models import DatabaseInfraOffering from dbaas_cloudstack.models import PlanAttr from physical.models import Instance infra_offerings = DatabaseInfraOffering.objects.all() for infra_offering in infra_offerings: plan_attr = PlanAttr.objects.get(plan=infra_offering.databaseinfra.plan) strong_offering = infra_offering.offering weaker_offering = plan_attr.get_weaker_offering() for instance in infra_offering.databaseinfra.instances.all(): if (instance.instance_type == Instance.MONGODB_ARBITER or instance.instance_type == Instance.Sentinel): instance.offering = weaker_offering else: instance.oferring = strong_offering instance.save()
<commit_before><commit_msg>Create class to update instances with offering<commit_after>
# coding: utf-8 class UpdateInstances(object): @staticmethod def do(): from dbaas_cloudstack.models import DatabaseInfraOffering from dbaas_cloudstack.models import PlanAttr from physical.models import Instance infra_offerings = DatabaseInfraOffering.objects.all() for infra_offering in infra_offerings: plan_attr = PlanAttr.objects.get(plan=infra_offering.databaseinfra.plan) strong_offering = infra_offering.offering weaker_offering = plan_attr.get_weaker_offering() for instance in infra_offering.databaseinfra.instances.all(): if (instance.instance_type == Instance.MONGODB_ARBITER or instance.instance_type == Instance.Sentinel): instance.offering = weaker_offering else: instance.oferring = strong_offering instance.save()
Create class to update instances with offering# coding: utf-8 class UpdateInstances(object): @staticmethod def do(): from dbaas_cloudstack.models import DatabaseInfraOffering from dbaas_cloudstack.models import PlanAttr from physical.models import Instance infra_offerings = DatabaseInfraOffering.objects.all() for infra_offering in infra_offerings: plan_attr = PlanAttr.objects.get(plan=infra_offering.databaseinfra.plan) strong_offering = infra_offering.offering weaker_offering = plan_attr.get_weaker_offering() for instance in infra_offering.databaseinfra.instances.all(): if (instance.instance_type == Instance.MONGODB_ARBITER or instance.instance_type == Instance.Sentinel): instance.offering = weaker_offering else: instance.oferring = strong_offering instance.save()
<commit_before><commit_msg>Create class to update instances with offering<commit_after># coding: utf-8 class UpdateInstances(object): @staticmethod def do(): from dbaas_cloudstack.models import DatabaseInfraOffering from dbaas_cloudstack.models import PlanAttr from physical.models import Instance infra_offerings = DatabaseInfraOffering.objects.all() for infra_offering in infra_offerings: plan_attr = PlanAttr.objects.get(plan=infra_offering.databaseinfra.plan) strong_offering = infra_offering.offering weaker_offering = plan_attr.get_weaker_offering() for instance in infra_offering.databaseinfra.instances.all(): if (instance.instance_type == Instance.MONGODB_ARBITER or instance.instance_type == Instance.Sentinel): instance.offering = weaker_offering else: instance.oferring = strong_offering instance.save()
112e3516d7c77f23edfed704651e8c5e7f7d75e4
scripts/DEV/adm/heatmap.py
scripts/DEV/adm/heatmap.py
import numpy as np import numpy.ma as ma import matplotlib.pyplot as plt import matplotlib.cm as cm from pyiem.plot import MapPlot import sys year = int(sys.argv[1]) x = [] y = [] for linenum, line in enumerate(open('visit_history_093013_st12.csv')): if linenum == 0: continue tokens = line.split(",") if int(tokens[5]) != year: continue try: y.append( float(tokens[17].strip()) ) x.append( float(tokens[18].strip()) ) except: continue H2, xedges, yedges = np.histogram2d(y, x, bins=(50, 100),range=[[25,50],[-130,-60]]) m = MapPlot(sector='conus', title='Heat Map of Location of Visitors, year=%s' % (year,), subtitle='from visit_history_093013_st12.csv',nologo=True) x,y = np.meshgrid(yedges, xedges) levels = [1,2,5,7,10,15,20,25,30,40,50,60,70,80,90,100,200] H3 = ma.array(H2) H3.mask = np.where(H2 < 1, True, False) cmap = cm.get_cmap('jet') cmap.set_under('white') cmap.set_over('black') m.pcolormesh(x,y, H3, levels, cmap=cmap, units='count') #m.drawcounties() m.postprocess(filename='conus_heatmap_%s.png' % (year,))
Add example heat map script for data
Add example heat map script for data
Python
mit
akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem,akrherz/iem
Add example heat map script for data
import numpy as np import numpy.ma as ma import matplotlib.pyplot as plt import matplotlib.cm as cm from pyiem.plot import MapPlot import sys year = int(sys.argv[1]) x = [] y = [] for linenum, line in enumerate(open('visit_history_093013_st12.csv')): if linenum == 0: continue tokens = line.split(",") if int(tokens[5]) != year: continue try: y.append( float(tokens[17].strip()) ) x.append( float(tokens[18].strip()) ) except: continue H2, xedges, yedges = np.histogram2d(y, x, bins=(50, 100),range=[[25,50],[-130,-60]]) m = MapPlot(sector='conus', title='Heat Map of Location of Visitors, year=%s' % (year,), subtitle='from visit_history_093013_st12.csv',nologo=True) x,y = np.meshgrid(yedges, xedges) levels = [1,2,5,7,10,15,20,25,30,40,50,60,70,80,90,100,200] H3 = ma.array(H2) H3.mask = np.where(H2 < 1, True, False) cmap = cm.get_cmap('jet') cmap.set_under('white') cmap.set_over('black') m.pcolormesh(x,y, H3, levels, cmap=cmap, units='count') #m.drawcounties() m.postprocess(filename='conus_heatmap_%s.png' % (year,))
<commit_before><commit_msg>Add example heat map script for data<commit_after>
import numpy as np import numpy.ma as ma import matplotlib.pyplot as plt import matplotlib.cm as cm from pyiem.plot import MapPlot import sys year = int(sys.argv[1]) x = [] y = [] for linenum, line in enumerate(open('visit_history_093013_st12.csv')): if linenum == 0: continue tokens = line.split(",") if int(tokens[5]) != year: continue try: y.append( float(tokens[17].strip()) ) x.append( float(tokens[18].strip()) ) except: continue H2, xedges, yedges = np.histogram2d(y, x, bins=(50, 100),range=[[25,50],[-130,-60]]) m = MapPlot(sector='conus', title='Heat Map of Location of Visitors, year=%s' % (year,), subtitle='from visit_history_093013_st12.csv',nologo=True) x,y = np.meshgrid(yedges, xedges) levels = [1,2,5,7,10,15,20,25,30,40,50,60,70,80,90,100,200] H3 = ma.array(H2) H3.mask = np.where(H2 < 1, True, False) cmap = cm.get_cmap('jet') cmap.set_under('white') cmap.set_over('black') m.pcolormesh(x,y, H3, levels, cmap=cmap, units='count') #m.drawcounties() m.postprocess(filename='conus_heatmap_%s.png' % (year,))
Add example heat map script for dataimport numpy as np import numpy.ma as ma import matplotlib.pyplot as plt import matplotlib.cm as cm from pyiem.plot import MapPlot import sys year = int(sys.argv[1]) x = [] y = [] for linenum, line in enumerate(open('visit_history_093013_st12.csv')): if linenum == 0: continue tokens = line.split(",") if int(tokens[5]) != year: continue try: y.append( float(tokens[17].strip()) ) x.append( float(tokens[18].strip()) ) except: continue H2, xedges, yedges = np.histogram2d(y, x, bins=(50, 100),range=[[25,50],[-130,-60]]) m = MapPlot(sector='conus', title='Heat Map of Location of Visitors, year=%s' % (year,), subtitle='from visit_history_093013_st12.csv',nologo=True) x,y = np.meshgrid(yedges, xedges) levels = [1,2,5,7,10,15,20,25,30,40,50,60,70,80,90,100,200] H3 = ma.array(H2) H3.mask = np.where(H2 < 1, True, False) cmap = cm.get_cmap('jet') cmap.set_under('white') cmap.set_over('black') m.pcolormesh(x,y, H3, levels, cmap=cmap, units='count') #m.drawcounties() m.postprocess(filename='conus_heatmap_%s.png' % (year,))
<commit_before><commit_msg>Add example heat map script for data<commit_after>import numpy as np import numpy.ma as ma import matplotlib.pyplot as plt import matplotlib.cm as cm from pyiem.plot import MapPlot import sys year = int(sys.argv[1]) x = [] y = [] for linenum, line in enumerate(open('visit_history_093013_st12.csv')): if linenum == 0: continue tokens = line.split(",") if int(tokens[5]) != year: continue try: y.append( float(tokens[17].strip()) ) x.append( float(tokens[18].strip()) ) except: continue H2, xedges, yedges = np.histogram2d(y, x, bins=(50, 100),range=[[25,50],[-130,-60]]) m = MapPlot(sector='conus', title='Heat Map of Location of Visitors, year=%s' % (year,), subtitle='from visit_history_093013_st12.csv',nologo=True) x,y = np.meshgrid(yedges, xedges) levels = [1,2,5,7,10,15,20,25,30,40,50,60,70,80,90,100,200] H3 = ma.array(H2) H3.mask = np.where(H2 < 1, True, False) cmap = cm.get_cmap('jet') cmap.set_under('white') cmap.set_over('black') m.pcolormesh(x,y, H3, levels, cmap=cmap, units='count') #m.drawcounties() m.postprocess(filename='conus_heatmap_%s.png' % (year,))
62a860112db8c263d26eb9e4cdb67d23ce61848a
python/opencv/opencv_2/write_image.py
python/opencv/opencv_2/write_image.py
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org) """ OpenCV - Write image: write an image given in arguments Required: opencv library (Debian: aptitude install python-opencv) See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#write-an-image """ from __future__ import print_function import cv2 as cv import argparse def main(): # Parse the programm options (get the path of the image files to read and write) parser = argparse.ArgumentParser(description='An opencv snippet.') parser.add_argument("--infile", "-i", help="The picture file to read", required=True, metavar="FILE") parser.add_argument("--outfile", "-o", help="The picture file to write", required=True, metavar="FILE") args = parser.parse_args() infile_str = args.infile outfile_str = args.outfile # OpenCV # imread_flags is a flag which specifies the way image should be read: # - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag. # - cv.IMREAD_GRAYSCALE loads image in grayscale mode # - cv.IMREAD_UNCHANGED loads image as such including alpha channel imread_flags = cv.IMREAD_GRAYSCALE img_np_array = cv.imread(infile_str, imread_flags) # Read the image cv.imwrite(outfile_str, img_np_array) # Write the image if __name__ == '__main__': main()
Add a snippet (Python OpenCV).
Add a snippet (Python OpenCV).
Python
mit
jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets,jeremiedecock/snippets
Add a snippet (Python OpenCV).
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org) """ OpenCV - Write image: write an image given in arguments Required: opencv library (Debian: aptitude install python-opencv) See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#write-an-image """ from __future__ import print_function import cv2 as cv import argparse def main(): # Parse the programm options (get the path of the image files to read and write) parser = argparse.ArgumentParser(description='An opencv snippet.') parser.add_argument("--infile", "-i", help="The picture file to read", required=True, metavar="FILE") parser.add_argument("--outfile", "-o", help="The picture file to write", required=True, metavar="FILE") args = parser.parse_args() infile_str = args.infile outfile_str = args.outfile # OpenCV # imread_flags is a flag which specifies the way image should be read: # - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag. # - cv.IMREAD_GRAYSCALE loads image in grayscale mode # - cv.IMREAD_UNCHANGED loads image as such including alpha channel imread_flags = cv.IMREAD_GRAYSCALE img_np_array = cv.imread(infile_str, imread_flags) # Read the image cv.imwrite(outfile_str, img_np_array) # Write the image if __name__ == '__main__': main()
<commit_before><commit_msg>Add a snippet (Python OpenCV).<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org) """ OpenCV - Write image: write an image given in arguments Required: opencv library (Debian: aptitude install python-opencv) See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#write-an-image """ from __future__ import print_function import cv2 as cv import argparse def main(): # Parse the programm options (get the path of the image files to read and write) parser = argparse.ArgumentParser(description='An opencv snippet.') parser.add_argument("--infile", "-i", help="The picture file to read", required=True, metavar="FILE") parser.add_argument("--outfile", "-o", help="The picture file to write", required=True, metavar="FILE") args = parser.parse_args() infile_str = args.infile outfile_str = args.outfile # OpenCV # imread_flags is a flag which specifies the way image should be read: # - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag. # - cv.IMREAD_GRAYSCALE loads image in grayscale mode # - cv.IMREAD_UNCHANGED loads image as such including alpha channel imread_flags = cv.IMREAD_GRAYSCALE img_np_array = cv.imread(infile_str, imread_flags) # Read the image cv.imwrite(outfile_str, img_np_array) # Write the image if __name__ == '__main__': main()
Add a snippet (Python OpenCV).#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org) """ OpenCV - Write image: write an image given in arguments Required: opencv library (Debian: aptitude install python-opencv) See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#write-an-image """ from __future__ import print_function import cv2 as cv import argparse def main(): # Parse the programm options (get the path of the image files to read and write) parser = argparse.ArgumentParser(description='An opencv snippet.') parser.add_argument("--infile", "-i", help="The picture file to read", required=True, metavar="FILE") parser.add_argument("--outfile", "-o", help="The picture file to write", required=True, metavar="FILE") args = parser.parse_args() infile_str = args.infile outfile_str = args.outfile # OpenCV # imread_flags is a flag which specifies the way image should be read: # - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag. # - cv.IMREAD_GRAYSCALE loads image in grayscale mode # - cv.IMREAD_UNCHANGED loads image as such including alpha channel imread_flags = cv.IMREAD_GRAYSCALE img_np_array = cv.imread(infile_str, imread_flags) # Read the image cv.imwrite(outfile_str, img_np_array) # Write the image if __name__ == '__main__': main()
<commit_before><commit_msg>Add a snippet (Python OpenCV).<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2015 Jérémie DECOCK (http://www.jdhp.org) """ OpenCV - Write image: write an image given in arguments Required: opencv library (Debian: aptitude install python-opencv) See: https://opencv-python-tutroals.readthedocs.org/en/latest/py_tutorials/py_gui/py_image_display/py_image_display.html#write-an-image """ from __future__ import print_function import cv2 as cv import argparse def main(): # Parse the programm options (get the path of the image files to read and write) parser = argparse.ArgumentParser(description='An opencv snippet.') parser.add_argument("--infile", "-i", help="The picture file to read", required=True, metavar="FILE") parser.add_argument("--outfile", "-o", help="The picture file to write", required=True, metavar="FILE") args = parser.parse_args() infile_str = args.infile outfile_str = args.outfile # OpenCV # imread_flags is a flag which specifies the way image should be read: # - cv.IMREAD_COLOR loads a color image. Any transparency of image will be neglected. It is the default flag. # - cv.IMREAD_GRAYSCALE loads image in grayscale mode # - cv.IMREAD_UNCHANGED loads image as such including alpha channel imread_flags = cv.IMREAD_GRAYSCALE img_np_array = cv.imread(infile_str, imread_flags) # Read the image cv.imwrite(outfile_str, img_np_array) # Write the image if __name__ == '__main__': main()
dcf49377defe8ce3debadc8d0c09b683663f7b73
space/migrations/0011_privateapikey.py
space/migrations/0011_privateapikey.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models from django.conf import settings import uuid class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('space', '0010_auto_20151129_2322'), ] operations = [ migrations.CreateModel( name='PrivateAPIKey', fields=[ ('key', models.UUIDField(primary_key=True, default=uuid.uuid4, verbose_name='Clef', editable=False, serialize=False)), ('name', models.CharField(max_length=250, verbose_name='Utilisée pour')), ('active', models.BooleanField(default=False)), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, verbose_name='Utilisateur')), ], options={ 'verbose_name_plural': "Clefs d'accès à l'API privée", 'verbose_name': "Clef d'accès à l'API privée", }, ), ]
Add migration for the new PrivateAPIKey model (Bravo Nils !)
Add migration for the new PrivateAPIKey model (Bravo Nils !)
Python
agpl-3.0
UrLab/incubator,UrLab/incubator,UrLab/incubator,UrLab/incubator
Add migration for the new PrivateAPIKey model (Bravo Nils !)
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models from django.conf import settings import uuid class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('space', '0010_auto_20151129_2322'), ] operations = [ migrations.CreateModel( name='PrivateAPIKey', fields=[ ('key', models.UUIDField(primary_key=True, default=uuid.uuid4, verbose_name='Clef', editable=False, serialize=False)), ('name', models.CharField(max_length=250, verbose_name='Utilisée pour')), ('active', models.BooleanField(default=False)), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, verbose_name='Utilisateur')), ], options={ 'verbose_name_plural': "Clefs d'accès à l'API privée", 'verbose_name': "Clef d'accès à l'API privée", }, ), ]
<commit_before><commit_msg>Add migration for the new PrivateAPIKey model (Bravo Nils !)<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models from django.conf import settings import uuid class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('space', '0010_auto_20151129_2322'), ] operations = [ migrations.CreateModel( name='PrivateAPIKey', fields=[ ('key', models.UUIDField(primary_key=True, default=uuid.uuid4, verbose_name='Clef', editable=False, serialize=False)), ('name', models.CharField(max_length=250, verbose_name='Utilisée pour')), ('active', models.BooleanField(default=False)), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, verbose_name='Utilisateur')), ], options={ 'verbose_name_plural': "Clefs d'accès à l'API privée", 'verbose_name': "Clef d'accès à l'API privée", }, ), ]
Add migration for the new PrivateAPIKey model (Bravo Nils !)# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models from django.conf import settings import uuid class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('space', '0010_auto_20151129_2322'), ] operations = [ migrations.CreateModel( name='PrivateAPIKey', fields=[ ('key', models.UUIDField(primary_key=True, default=uuid.uuid4, verbose_name='Clef', editable=False, serialize=False)), ('name', models.CharField(max_length=250, verbose_name='Utilisée pour')), ('active', models.BooleanField(default=False)), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, verbose_name='Utilisateur')), ], options={ 'verbose_name_plural': "Clefs d'accès à l'API privée", 'verbose_name': "Clef d'accès à l'API privée", }, ), ]
<commit_before><commit_msg>Add migration for the new PrivateAPIKey model (Bravo Nils !)<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models from django.conf import settings import uuid class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('space', '0010_auto_20151129_2322'), ] operations = [ migrations.CreateModel( name='PrivateAPIKey', fields=[ ('key', models.UUIDField(primary_key=True, default=uuid.uuid4, verbose_name='Clef', editable=False, serialize=False)), ('name', models.CharField(max_length=250, verbose_name='Utilisée pour')), ('active', models.BooleanField(default=False)), ('user', models.ForeignKey(to=settings.AUTH_USER_MODEL, verbose_name='Utilisateur')), ], options={ 'verbose_name_plural': "Clefs d'accès à l'API privée", 'verbose_name': "Clef d'accès à l'API privée", }, ), ]
e78e56e38dfa7bfac79bc3a699ca76236d700e2a
tests/grammar_term-nonterm_test/TerminalAddWhenCreatingTest.py
tests/grammar_term-nonterm_test/TerminalAddWhenCreatingTest.py
#!/usr/bin/env python """ :Author Patrik Valkovic :Created 23.06.2017 16:39 :Licence GNUv3 Part of grammpy """ from unittest import TestCase from grammpy import Grammar from grammpy.Terminal import Terminal class TerminalAddWhenCreatingTest(TestCase): def test_addOneInArray(self): gr = Grammar(terminals=['A']) self.assertTrue(gr.have_term('A')) def test_addTwoInArray(self): gr = Grammar(terminals=['A', 0]) self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term(0)) self.assertTrue(gr.have_term([0, 'A'])) def test_addOneSeparate(self): gr = Grammar(terminals='A') self.assertTrue(gr.have_term('A')) def test_addThreeInString(self): gr = Grammar(terminals='ABC') self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(('A','B','C'))) self.assertFalse(gr.have_term('D')) def test_addThreeInTuple(self): gr = Grammar(terminals=('A', 'B', 'C')) self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(['A', 'B', 'C'])) self.assertFalse(gr.have_term('D')) def test_addThreeOneDelete(self): gr = Grammar(terminals=('A', 'B', 'C')) self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(['A', 'B', 'C'])) self.assertFalse(gr.have_term('D')) gr.remove_term('B') self.assertTrue(gr.have_term('A')) self.assertFalse(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(['A', 'C'])) self.assertFalse(gr.have_term('D'))
Add tests creation of grammar with terminals as parameters
Add tests creation of grammar with terminals as parameters
Python
mit
PatrikValkovic/grammpy
Add tests creation of grammar with terminals as parameters
#!/usr/bin/env python """ :Author Patrik Valkovic :Created 23.06.2017 16:39 :Licence GNUv3 Part of grammpy """ from unittest import TestCase from grammpy import Grammar from grammpy.Terminal import Terminal class TerminalAddWhenCreatingTest(TestCase): def test_addOneInArray(self): gr = Grammar(terminals=['A']) self.assertTrue(gr.have_term('A')) def test_addTwoInArray(self): gr = Grammar(terminals=['A', 0]) self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term(0)) self.assertTrue(gr.have_term([0, 'A'])) def test_addOneSeparate(self): gr = Grammar(terminals='A') self.assertTrue(gr.have_term('A')) def test_addThreeInString(self): gr = Grammar(terminals='ABC') self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(('A','B','C'))) self.assertFalse(gr.have_term('D')) def test_addThreeInTuple(self): gr = Grammar(terminals=('A', 'B', 'C')) self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(['A', 'B', 'C'])) self.assertFalse(gr.have_term('D')) def test_addThreeOneDelete(self): gr = Grammar(terminals=('A', 'B', 'C')) self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(['A', 'B', 'C'])) self.assertFalse(gr.have_term('D')) gr.remove_term('B') self.assertTrue(gr.have_term('A')) self.assertFalse(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(['A', 'C'])) self.assertFalse(gr.have_term('D'))
<commit_before><commit_msg>Add tests creation of grammar with terminals as parameters<commit_after>
#!/usr/bin/env python """ :Author Patrik Valkovic :Created 23.06.2017 16:39 :Licence GNUv3 Part of grammpy """ from unittest import TestCase from grammpy import Grammar from grammpy.Terminal import Terminal class TerminalAddWhenCreatingTest(TestCase): def test_addOneInArray(self): gr = Grammar(terminals=['A']) self.assertTrue(gr.have_term('A')) def test_addTwoInArray(self): gr = Grammar(terminals=['A', 0]) self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term(0)) self.assertTrue(gr.have_term([0, 'A'])) def test_addOneSeparate(self): gr = Grammar(terminals='A') self.assertTrue(gr.have_term('A')) def test_addThreeInString(self): gr = Grammar(terminals='ABC') self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(('A','B','C'))) self.assertFalse(gr.have_term('D')) def test_addThreeInTuple(self): gr = Grammar(terminals=('A', 'B', 'C')) self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(['A', 'B', 'C'])) self.assertFalse(gr.have_term('D')) def test_addThreeOneDelete(self): gr = Grammar(terminals=('A', 'B', 'C')) self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(['A', 'B', 'C'])) self.assertFalse(gr.have_term('D')) gr.remove_term('B') self.assertTrue(gr.have_term('A')) self.assertFalse(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(['A', 'C'])) self.assertFalse(gr.have_term('D'))
Add tests creation of grammar with terminals as parameters#!/usr/bin/env python """ :Author Patrik Valkovic :Created 23.06.2017 16:39 :Licence GNUv3 Part of grammpy """ from unittest import TestCase from grammpy import Grammar from grammpy.Terminal import Terminal class TerminalAddWhenCreatingTest(TestCase): def test_addOneInArray(self): gr = Grammar(terminals=['A']) self.assertTrue(gr.have_term('A')) def test_addTwoInArray(self): gr = Grammar(terminals=['A', 0]) self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term(0)) self.assertTrue(gr.have_term([0, 'A'])) def test_addOneSeparate(self): gr = Grammar(terminals='A') self.assertTrue(gr.have_term('A')) def test_addThreeInString(self): gr = Grammar(terminals='ABC') self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(('A','B','C'))) self.assertFalse(gr.have_term('D')) def test_addThreeInTuple(self): gr = Grammar(terminals=('A', 'B', 'C')) self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(['A', 'B', 'C'])) self.assertFalse(gr.have_term('D')) def test_addThreeOneDelete(self): gr = Grammar(terminals=('A', 'B', 'C')) self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(['A', 'B', 'C'])) self.assertFalse(gr.have_term('D')) gr.remove_term('B') self.assertTrue(gr.have_term('A')) self.assertFalse(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(['A', 'C'])) self.assertFalse(gr.have_term('D'))
<commit_before><commit_msg>Add tests creation of grammar with terminals as parameters<commit_after>#!/usr/bin/env python """ :Author Patrik Valkovic :Created 23.06.2017 16:39 :Licence GNUv3 Part of grammpy """ from unittest import TestCase from grammpy import Grammar from grammpy.Terminal import Terminal class TerminalAddWhenCreatingTest(TestCase): def test_addOneInArray(self): gr = Grammar(terminals=['A']) self.assertTrue(gr.have_term('A')) def test_addTwoInArray(self): gr = Grammar(terminals=['A', 0]) self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term(0)) self.assertTrue(gr.have_term([0, 'A'])) def test_addOneSeparate(self): gr = Grammar(terminals='A') self.assertTrue(gr.have_term('A')) def test_addThreeInString(self): gr = Grammar(terminals='ABC') self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(('A','B','C'))) self.assertFalse(gr.have_term('D')) def test_addThreeInTuple(self): gr = Grammar(terminals=('A', 'B', 'C')) self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(['A', 'B', 'C'])) self.assertFalse(gr.have_term('D')) def test_addThreeOneDelete(self): gr = Grammar(terminals=('A', 'B', 'C')) self.assertTrue(gr.have_term('A')) self.assertTrue(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(['A', 'B', 'C'])) self.assertFalse(gr.have_term('D')) gr.remove_term('B') self.assertTrue(gr.have_term('A')) self.assertFalse(gr.have_term('B')) self.assertTrue(gr.have_term('C')) self.assertTrue(gr.have_term(['A', 'C'])) self.assertFalse(gr.have_term('D'))
629553ec992c59500ef64b04b8fc9fb0500bcaee
wqflask/tests/wqflask/test_user_session.py
wqflask/tests/wqflask/test_user_session.py
"""Test cases for some methods in user_session.py""" import unittest from wqflask.user_session import verify_cookie class TestUserSession(unittest.TestCase): def test_verify_cookie(self): """ Test cookie verification """ self.assertEqual( "3f4c1dbf-5b56-4260-87d6-f35445bda37e", verify_cookie(("3f4c1dbf-5b56-4260-87d6-" "f35445bda37e:af4fcf5eace9e7c864ce")))
Add tests for cookie verification
Add tests for cookie verification
Python
agpl-3.0
pjotrp/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,genenetwork/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,pjotrp/genenetwork2,zsloan/genenetwork2,zsloan/genenetwork2,genenetwork/genenetwork2,pjotrp/genenetwork2,pjotrp/genenetwork2
Add tests for cookie verification
"""Test cases for some methods in user_session.py""" import unittest from wqflask.user_session import verify_cookie class TestUserSession(unittest.TestCase): def test_verify_cookie(self): """ Test cookie verification """ self.assertEqual( "3f4c1dbf-5b56-4260-87d6-f35445bda37e", verify_cookie(("3f4c1dbf-5b56-4260-87d6-" "f35445bda37e:af4fcf5eace9e7c864ce")))
<commit_before><commit_msg>Add tests for cookie verification<commit_after>
"""Test cases for some methods in user_session.py""" import unittest from wqflask.user_session import verify_cookie class TestUserSession(unittest.TestCase): def test_verify_cookie(self): """ Test cookie verification """ self.assertEqual( "3f4c1dbf-5b56-4260-87d6-f35445bda37e", verify_cookie(("3f4c1dbf-5b56-4260-87d6-" "f35445bda37e:af4fcf5eace9e7c864ce")))
Add tests for cookie verification"""Test cases for some methods in user_session.py""" import unittest from wqflask.user_session import verify_cookie class TestUserSession(unittest.TestCase): def test_verify_cookie(self): """ Test cookie verification """ self.assertEqual( "3f4c1dbf-5b56-4260-87d6-f35445bda37e", verify_cookie(("3f4c1dbf-5b56-4260-87d6-" "f35445bda37e:af4fcf5eace9e7c864ce")))
<commit_before><commit_msg>Add tests for cookie verification<commit_after>"""Test cases for some methods in user_session.py""" import unittest from wqflask.user_session import verify_cookie class TestUserSession(unittest.TestCase): def test_verify_cookie(self): """ Test cookie verification """ self.assertEqual( "3f4c1dbf-5b56-4260-87d6-f35445bda37e", verify_cookie(("3f4c1dbf-5b56-4260-87d6-" "f35445bda37e:af4fcf5eace9e7c864ce")))
a4f86e11bbdcf2f478fd9cfb8df33a150f8a086e
poolstatapi-example.py
poolstatapi-example.py
import requests import hmac import hashlib import base64 PUBLIC_KEY = '##' PRIVATE_KEY = '##' URL = 'https://www.poolstat.net.au/restapi/v1/ladders' digest = hmac.new(PRIVATE_KEY, URL, digestmod=hashlib.sha256).hexdigest() # signature = base64.b64encode(digest).decode() print digest #print signature headers = { 'X-Public': PUBLIC_KEY, 'X-Hash': digest }; def get_response(): params = {"year": "2017"} return requests.get(URL, headers=headers) res = get_response() print res
Add poolstat api example to project root.
Add poolstat api example to project root.
Python
mit
benjiboi214/mmpl-wagtail,benjiboi214/mmpl-wagtail,benjiboi214/mmpl-wagtail
Add poolstat api example to project root.
import requests import hmac import hashlib import base64 PUBLIC_KEY = '##' PRIVATE_KEY = '##' URL = 'https://www.poolstat.net.au/restapi/v1/ladders' digest = hmac.new(PRIVATE_KEY, URL, digestmod=hashlib.sha256).hexdigest() # signature = base64.b64encode(digest).decode() print digest #print signature headers = { 'X-Public': PUBLIC_KEY, 'X-Hash': digest }; def get_response(): params = {"year": "2017"} return requests.get(URL, headers=headers) res = get_response() print res
<commit_before><commit_msg>Add poolstat api example to project root.<commit_after>
import requests import hmac import hashlib import base64 PUBLIC_KEY = '##' PRIVATE_KEY = '##' URL = 'https://www.poolstat.net.au/restapi/v1/ladders' digest = hmac.new(PRIVATE_KEY, URL, digestmod=hashlib.sha256).hexdigest() # signature = base64.b64encode(digest).decode() print digest #print signature headers = { 'X-Public': PUBLIC_KEY, 'X-Hash': digest }; def get_response(): params = {"year": "2017"} return requests.get(URL, headers=headers) res = get_response() print res
Add poolstat api example to project root.import requests import hmac import hashlib import base64 PUBLIC_KEY = '##' PRIVATE_KEY = '##' URL = 'https://www.poolstat.net.au/restapi/v1/ladders' digest = hmac.new(PRIVATE_KEY, URL, digestmod=hashlib.sha256).hexdigest() # signature = base64.b64encode(digest).decode() print digest #print signature headers = { 'X-Public': PUBLIC_KEY, 'X-Hash': digest }; def get_response(): params = {"year": "2017"} return requests.get(URL, headers=headers) res = get_response() print res
<commit_before><commit_msg>Add poolstat api example to project root.<commit_after>import requests import hmac import hashlib import base64 PUBLIC_KEY = '##' PRIVATE_KEY = '##' URL = 'https://www.poolstat.net.au/restapi/v1/ladders' digest = hmac.new(PRIVATE_KEY, URL, digestmod=hashlib.sha256).hexdigest() # signature = base64.b64encode(digest).decode() print digest #print signature headers = { 'X-Public': PUBLIC_KEY, 'X-Hash': digest }; def get_response(): params = {"year": "2017"} return requests.get(URL, headers=headers) res = get_response() print res
55ccaad47c2e3a1432b012218c72bca28fe06d73
greetings.py
greetings.py
from fnexchange.core.plugins import AbstractPlugin class GreetingsPlugin(AbstractPlugin): """ GreetingsPlugin provides an interface to generate greetings in different languages for given users (provided their names and locales). At this time, only the following locales are supported: "en-us", "hi-in" Request payload schema: payload = [ {"name": "John", "locale": "en-us"}, ... {"name": "Emma", "locale": "hi-in"}, ] Response payload schema: payload = [ {"name": "John", "locale": "en-us", greeting: "Hello, John"}, ... {"name": "Emma", "locale": "en-us", greeting: "Namaste, Emma"}, ] """ DEFAULT_LOCALE = "en-us" hello_map = { 'en-us': "Hello, {name}! My name is {greeter}", 'hi-in': "Namaste, {name}! My name is {greeter}", } bye_map = { 'en-us': "Goodbye, {name}!", 'hi-in': "Phir Milenge, {name}!", } def __greet(self, greeting_map, element): name = element["name"] locale = element["locale"] try: greeting = greeting_map[locale].format(name=name, greeter=self.config.greeter) except KeyError: greeting = "Greetings!" return dict(name=name, locale=locale, greeting=greeting) def __hello(self, element): return self.__greet(self.hello_map, element) def __bye(self, element): return self.__greet(self.hello_map, element) def say_hello(self, payload): return map(self.__hello, payload) def say_bye(self, payload): return map(self.__bye, payload)
Create a GreetingsPlugin as a sample plugin
Create a GreetingsPlugin as a sample plugin
Python
apache-2.0
dnif/fnExchange-sample-plugin
Create a GreetingsPlugin as a sample plugin
from fnexchange.core.plugins import AbstractPlugin class GreetingsPlugin(AbstractPlugin): """ GreetingsPlugin provides an interface to generate greetings in different languages for given users (provided their names and locales). At this time, only the following locales are supported: "en-us", "hi-in" Request payload schema: payload = [ {"name": "John", "locale": "en-us"}, ... {"name": "Emma", "locale": "hi-in"}, ] Response payload schema: payload = [ {"name": "John", "locale": "en-us", greeting: "Hello, John"}, ... {"name": "Emma", "locale": "en-us", greeting: "Namaste, Emma"}, ] """ DEFAULT_LOCALE = "en-us" hello_map = { 'en-us': "Hello, {name}! My name is {greeter}", 'hi-in': "Namaste, {name}! My name is {greeter}", } bye_map = { 'en-us': "Goodbye, {name}!", 'hi-in': "Phir Milenge, {name}!", } def __greet(self, greeting_map, element): name = element["name"] locale = element["locale"] try: greeting = greeting_map[locale].format(name=name, greeter=self.config.greeter) except KeyError: greeting = "Greetings!" return dict(name=name, locale=locale, greeting=greeting) def __hello(self, element): return self.__greet(self.hello_map, element) def __bye(self, element): return self.__greet(self.hello_map, element) def say_hello(self, payload): return map(self.__hello, payload) def say_bye(self, payload): return map(self.__bye, payload)
<commit_before><commit_msg>Create a GreetingsPlugin as a sample plugin<commit_after>
from fnexchange.core.plugins import AbstractPlugin class GreetingsPlugin(AbstractPlugin): """ GreetingsPlugin provides an interface to generate greetings in different languages for given users (provided their names and locales). At this time, only the following locales are supported: "en-us", "hi-in" Request payload schema: payload = [ {"name": "John", "locale": "en-us"}, ... {"name": "Emma", "locale": "hi-in"}, ] Response payload schema: payload = [ {"name": "John", "locale": "en-us", greeting: "Hello, John"}, ... {"name": "Emma", "locale": "en-us", greeting: "Namaste, Emma"}, ] """ DEFAULT_LOCALE = "en-us" hello_map = { 'en-us': "Hello, {name}! My name is {greeter}", 'hi-in': "Namaste, {name}! My name is {greeter}", } bye_map = { 'en-us': "Goodbye, {name}!", 'hi-in': "Phir Milenge, {name}!", } def __greet(self, greeting_map, element): name = element["name"] locale = element["locale"] try: greeting = greeting_map[locale].format(name=name, greeter=self.config.greeter) except KeyError: greeting = "Greetings!" return dict(name=name, locale=locale, greeting=greeting) def __hello(self, element): return self.__greet(self.hello_map, element) def __bye(self, element): return self.__greet(self.hello_map, element) def say_hello(self, payload): return map(self.__hello, payload) def say_bye(self, payload): return map(self.__bye, payload)
Create a GreetingsPlugin as a sample pluginfrom fnexchange.core.plugins import AbstractPlugin class GreetingsPlugin(AbstractPlugin): """ GreetingsPlugin provides an interface to generate greetings in different languages for given users (provided their names and locales). At this time, only the following locales are supported: "en-us", "hi-in" Request payload schema: payload = [ {"name": "John", "locale": "en-us"}, ... {"name": "Emma", "locale": "hi-in"}, ] Response payload schema: payload = [ {"name": "John", "locale": "en-us", greeting: "Hello, John"}, ... {"name": "Emma", "locale": "en-us", greeting: "Namaste, Emma"}, ] """ DEFAULT_LOCALE = "en-us" hello_map = { 'en-us': "Hello, {name}! My name is {greeter}", 'hi-in': "Namaste, {name}! My name is {greeter}", } bye_map = { 'en-us': "Goodbye, {name}!", 'hi-in': "Phir Milenge, {name}!", } def __greet(self, greeting_map, element): name = element["name"] locale = element["locale"] try: greeting = greeting_map[locale].format(name=name, greeter=self.config.greeter) except KeyError: greeting = "Greetings!" return dict(name=name, locale=locale, greeting=greeting) def __hello(self, element): return self.__greet(self.hello_map, element) def __bye(self, element): return self.__greet(self.hello_map, element) def say_hello(self, payload): return map(self.__hello, payload) def say_bye(self, payload): return map(self.__bye, payload)
<commit_before><commit_msg>Create a GreetingsPlugin as a sample plugin<commit_after>from fnexchange.core.plugins import AbstractPlugin class GreetingsPlugin(AbstractPlugin): """ GreetingsPlugin provides an interface to generate greetings in different languages for given users (provided their names and locales). At this time, only the following locales are supported: "en-us", "hi-in" Request payload schema: payload = [ {"name": "John", "locale": "en-us"}, ... {"name": "Emma", "locale": "hi-in"}, ] Response payload schema: payload = [ {"name": "John", "locale": "en-us", greeting: "Hello, John"}, ... {"name": "Emma", "locale": "en-us", greeting: "Namaste, Emma"}, ] """ DEFAULT_LOCALE = "en-us" hello_map = { 'en-us': "Hello, {name}! My name is {greeter}", 'hi-in': "Namaste, {name}! My name is {greeter}", } bye_map = { 'en-us': "Goodbye, {name}!", 'hi-in': "Phir Milenge, {name}!", } def __greet(self, greeting_map, element): name = element["name"] locale = element["locale"] try: greeting = greeting_map[locale].format(name=name, greeter=self.config.greeter) except KeyError: greeting = "Greetings!" return dict(name=name, locale=locale, greeting=greeting) def __hello(self, element): return self.__greet(self.hello_map, element) def __bye(self, element): return self.__greet(self.hello_map, element) def say_hello(self, payload): return map(self.__hello, payload) def say_bye(self, payload): return map(self.__bye, payload)
be8b7c27b25b60540c2e53504ce42543724577df
napalm_logs/utils/__init__.py
napalm_logs/utils/__init__.py
# -*- coding: utf-8 -*- ''' napalm-logs utilities ''' from __future__ import absolute_import from __future__ import unicode_literals # Import pythond stdlib import ssl import socket # Import python stdlib import umsgpack import nacl.secret import nacl.signing import nacl.encoding from nacl.exceptions import CryptoError from nacl.exceptions import BadSignatureError # Import napalm-logs pkgs import napalm_logs.config as defaults from napalm_logs.exceptions import CryptoException from napalm_logs.exceptions import BadSignatureException def authenticate(certificate, address=defaults.AUTH_ADDRESS, port=defaults.AUTH_PORT): ''' Authenticate the client and return the private and signature keys. Establish a connection through a secured socket, then do the handshake using the napalm-logs auth algorithm. ''' if ':' in address: skt_ver = socket.AF_INET6 else: skt_ver = socket.AF_INET skt = socket.socket(skt_ver, socket.SOCK_STREAM) ssl_skt = ssl.wrap_socket(skt, ca_certs=certificate, cert_reqs=ssl.CERT_REQUIRED) ssl_sock.connect((address, port)) # Explicit INIT ssl_sock.write(defaults.MAGIC_REQ) # Receive the private key private_key = ssl_sock.read() # Send back explicit ACK ssl_sock.write(defaults.MAGIC_ACK) # Read the hex of the verification key verify_key_hex = ssl_sock.read() # Send back explicit ACK ssl_sock.write(defaults.MAGIC_ACK) # Close the socket ssl_sock.close() private_key_obj = nacl.secret.SecretBox(private_key) verify_key_obj = nacl.signing.VerifyKey(verify_key_hex, encoder=nacl.encoding.HexEncoder) return private_key_obj, verify_key_obj def decrypt(binary, verify_key_obj, private_key_obj): ''' Decrypt and unpack the original OpenConfig object, serialized using MessagePack. Raise BadSignatureException when the signature was forged or corrupted. ''' try: encrypted = verify_key_obj.verify(binary) except BadSignatureError as bserr: log.error('Signature was forged or corrupt', exc_info=True) raise BadSignatureException('Signature was forged or corrupt') try: packed = private_key_obj.decrypt(encrypted) except CryptoError as cerr: log.error('Unable to decrypt', exc_info=True) raise CryptoException('Unable to decrypt') return umsgpack.unpackb(packed)
Add utilities for the clients
Add utilities for the clients
Python
apache-2.0
napalm-automation/napalm-logs,napalm-automation/napalm-logs
Add utilities for the clients
# -*- coding: utf-8 -*- ''' napalm-logs utilities ''' from __future__ import absolute_import from __future__ import unicode_literals # Import pythond stdlib import ssl import socket # Import python stdlib import umsgpack import nacl.secret import nacl.signing import nacl.encoding from nacl.exceptions import CryptoError from nacl.exceptions import BadSignatureError # Import napalm-logs pkgs import napalm_logs.config as defaults from napalm_logs.exceptions import CryptoException from napalm_logs.exceptions import BadSignatureException def authenticate(certificate, address=defaults.AUTH_ADDRESS, port=defaults.AUTH_PORT): ''' Authenticate the client and return the private and signature keys. Establish a connection through a secured socket, then do the handshake using the napalm-logs auth algorithm. ''' if ':' in address: skt_ver = socket.AF_INET6 else: skt_ver = socket.AF_INET skt = socket.socket(skt_ver, socket.SOCK_STREAM) ssl_skt = ssl.wrap_socket(skt, ca_certs=certificate, cert_reqs=ssl.CERT_REQUIRED) ssl_sock.connect((address, port)) # Explicit INIT ssl_sock.write(defaults.MAGIC_REQ) # Receive the private key private_key = ssl_sock.read() # Send back explicit ACK ssl_sock.write(defaults.MAGIC_ACK) # Read the hex of the verification key verify_key_hex = ssl_sock.read() # Send back explicit ACK ssl_sock.write(defaults.MAGIC_ACK) # Close the socket ssl_sock.close() private_key_obj = nacl.secret.SecretBox(private_key) verify_key_obj = nacl.signing.VerifyKey(verify_key_hex, encoder=nacl.encoding.HexEncoder) return private_key_obj, verify_key_obj def decrypt(binary, verify_key_obj, private_key_obj): ''' Decrypt and unpack the original OpenConfig object, serialized using MessagePack. Raise BadSignatureException when the signature was forged or corrupted. ''' try: encrypted = verify_key_obj.verify(binary) except BadSignatureError as bserr: log.error('Signature was forged or corrupt', exc_info=True) raise BadSignatureException('Signature was forged or corrupt') try: packed = private_key_obj.decrypt(encrypted) except CryptoError as cerr: log.error('Unable to decrypt', exc_info=True) raise CryptoException('Unable to decrypt') return umsgpack.unpackb(packed)
<commit_before><commit_msg>Add utilities for the clients<commit_after>
# -*- coding: utf-8 -*- ''' napalm-logs utilities ''' from __future__ import absolute_import from __future__ import unicode_literals # Import pythond stdlib import ssl import socket # Import python stdlib import umsgpack import nacl.secret import nacl.signing import nacl.encoding from nacl.exceptions import CryptoError from nacl.exceptions import BadSignatureError # Import napalm-logs pkgs import napalm_logs.config as defaults from napalm_logs.exceptions import CryptoException from napalm_logs.exceptions import BadSignatureException def authenticate(certificate, address=defaults.AUTH_ADDRESS, port=defaults.AUTH_PORT): ''' Authenticate the client and return the private and signature keys. Establish a connection through a secured socket, then do the handshake using the napalm-logs auth algorithm. ''' if ':' in address: skt_ver = socket.AF_INET6 else: skt_ver = socket.AF_INET skt = socket.socket(skt_ver, socket.SOCK_STREAM) ssl_skt = ssl.wrap_socket(skt, ca_certs=certificate, cert_reqs=ssl.CERT_REQUIRED) ssl_sock.connect((address, port)) # Explicit INIT ssl_sock.write(defaults.MAGIC_REQ) # Receive the private key private_key = ssl_sock.read() # Send back explicit ACK ssl_sock.write(defaults.MAGIC_ACK) # Read the hex of the verification key verify_key_hex = ssl_sock.read() # Send back explicit ACK ssl_sock.write(defaults.MAGIC_ACK) # Close the socket ssl_sock.close() private_key_obj = nacl.secret.SecretBox(private_key) verify_key_obj = nacl.signing.VerifyKey(verify_key_hex, encoder=nacl.encoding.HexEncoder) return private_key_obj, verify_key_obj def decrypt(binary, verify_key_obj, private_key_obj): ''' Decrypt and unpack the original OpenConfig object, serialized using MessagePack. Raise BadSignatureException when the signature was forged or corrupted. ''' try: encrypted = verify_key_obj.verify(binary) except BadSignatureError as bserr: log.error('Signature was forged or corrupt', exc_info=True) raise BadSignatureException('Signature was forged or corrupt') try: packed = private_key_obj.decrypt(encrypted) except CryptoError as cerr: log.error('Unable to decrypt', exc_info=True) raise CryptoException('Unable to decrypt') return umsgpack.unpackb(packed)
Add utilities for the clients# -*- coding: utf-8 -*- ''' napalm-logs utilities ''' from __future__ import absolute_import from __future__ import unicode_literals # Import pythond stdlib import ssl import socket # Import python stdlib import umsgpack import nacl.secret import nacl.signing import nacl.encoding from nacl.exceptions import CryptoError from nacl.exceptions import BadSignatureError # Import napalm-logs pkgs import napalm_logs.config as defaults from napalm_logs.exceptions import CryptoException from napalm_logs.exceptions import BadSignatureException def authenticate(certificate, address=defaults.AUTH_ADDRESS, port=defaults.AUTH_PORT): ''' Authenticate the client and return the private and signature keys. Establish a connection through a secured socket, then do the handshake using the napalm-logs auth algorithm. ''' if ':' in address: skt_ver = socket.AF_INET6 else: skt_ver = socket.AF_INET skt = socket.socket(skt_ver, socket.SOCK_STREAM) ssl_skt = ssl.wrap_socket(skt, ca_certs=certificate, cert_reqs=ssl.CERT_REQUIRED) ssl_sock.connect((address, port)) # Explicit INIT ssl_sock.write(defaults.MAGIC_REQ) # Receive the private key private_key = ssl_sock.read() # Send back explicit ACK ssl_sock.write(defaults.MAGIC_ACK) # Read the hex of the verification key verify_key_hex = ssl_sock.read() # Send back explicit ACK ssl_sock.write(defaults.MAGIC_ACK) # Close the socket ssl_sock.close() private_key_obj = nacl.secret.SecretBox(private_key) verify_key_obj = nacl.signing.VerifyKey(verify_key_hex, encoder=nacl.encoding.HexEncoder) return private_key_obj, verify_key_obj def decrypt(binary, verify_key_obj, private_key_obj): ''' Decrypt and unpack the original OpenConfig object, serialized using MessagePack. Raise BadSignatureException when the signature was forged or corrupted. ''' try: encrypted = verify_key_obj.verify(binary) except BadSignatureError as bserr: log.error('Signature was forged or corrupt', exc_info=True) raise BadSignatureException('Signature was forged or corrupt') try: packed = private_key_obj.decrypt(encrypted) except CryptoError as cerr: log.error('Unable to decrypt', exc_info=True) raise CryptoException('Unable to decrypt') return umsgpack.unpackb(packed)
<commit_before><commit_msg>Add utilities for the clients<commit_after># -*- coding: utf-8 -*- ''' napalm-logs utilities ''' from __future__ import absolute_import from __future__ import unicode_literals # Import pythond stdlib import ssl import socket # Import python stdlib import umsgpack import nacl.secret import nacl.signing import nacl.encoding from nacl.exceptions import CryptoError from nacl.exceptions import BadSignatureError # Import napalm-logs pkgs import napalm_logs.config as defaults from napalm_logs.exceptions import CryptoException from napalm_logs.exceptions import BadSignatureException def authenticate(certificate, address=defaults.AUTH_ADDRESS, port=defaults.AUTH_PORT): ''' Authenticate the client and return the private and signature keys. Establish a connection through a secured socket, then do the handshake using the napalm-logs auth algorithm. ''' if ':' in address: skt_ver = socket.AF_INET6 else: skt_ver = socket.AF_INET skt = socket.socket(skt_ver, socket.SOCK_STREAM) ssl_skt = ssl.wrap_socket(skt, ca_certs=certificate, cert_reqs=ssl.CERT_REQUIRED) ssl_sock.connect((address, port)) # Explicit INIT ssl_sock.write(defaults.MAGIC_REQ) # Receive the private key private_key = ssl_sock.read() # Send back explicit ACK ssl_sock.write(defaults.MAGIC_ACK) # Read the hex of the verification key verify_key_hex = ssl_sock.read() # Send back explicit ACK ssl_sock.write(defaults.MAGIC_ACK) # Close the socket ssl_sock.close() private_key_obj = nacl.secret.SecretBox(private_key) verify_key_obj = nacl.signing.VerifyKey(verify_key_hex, encoder=nacl.encoding.HexEncoder) return private_key_obj, verify_key_obj def decrypt(binary, verify_key_obj, private_key_obj): ''' Decrypt and unpack the original OpenConfig object, serialized using MessagePack. Raise BadSignatureException when the signature was forged or corrupted. ''' try: encrypted = verify_key_obj.verify(binary) except BadSignatureError as bserr: log.error('Signature was forged or corrupt', exc_info=True) raise BadSignatureException('Signature was forged or corrupt') try: packed = private_key_obj.decrypt(encrypted) except CryptoError as cerr: log.error('Unable to decrypt', exc_info=True) raise CryptoException('Unable to decrypt') return umsgpack.unpackb(packed)
986ee0ffae416cf1dc833f581a89d718dc2ff2fe
bird/evaluate.py
bird/evaluate.py
from models.cuberun import CubeRun import numpy as np import utils import loader nb_classes = 19 input_shape = (257, 624, 1) (cols, rows, chs) = input_shape image_shape = (cols, rows) batch_size=32 def evaluate(model, data_filepath, file2labels_filepath): model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['binary_accuracy', 'fbeta_score']) (X_test, Y_test, filenames) = loader.load_all_data(data_filepath, file2labels_filepath, nb_classes=nb_classes, image_shape=image_shape) print("Predicting ...") Y = model.predict(X_test, batch_size=batch_size, verbose=1) Y = np.round(Y) for (y, gt) in zip(Y, Y_test): print("predicted: ", binary_to_id(y), "\t ground truth: ", binary_to_id(gt)) #print("Evaluating ...") #scores = model.evaluate(X_test, Y_test, batch_size=batch_size, verbose=1) #print("%s: %.2f%%" % (model.metrics_names[0], scores[0])) #print("%s: %.2f%%" % (model.metrics_names[1], scores[1])) #print("%s: %.2f%%" % (model.metrics_names[2], scores[2])) def binary_to_id(Y): i = 0 r = [] for y in Y: if y == 1: r.append(i) i = i+1 return r
Add a first draft of the evaluation module.
Add a first draft of the evaluation module.
Python
mit
johnmartinsson/bird-species-classification,johnmartinsson/bird-species-classification
Add a first draft of the evaluation module.
from models.cuberun import CubeRun import numpy as np import utils import loader nb_classes = 19 input_shape = (257, 624, 1) (cols, rows, chs) = input_shape image_shape = (cols, rows) batch_size=32 def evaluate(model, data_filepath, file2labels_filepath): model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['binary_accuracy', 'fbeta_score']) (X_test, Y_test, filenames) = loader.load_all_data(data_filepath, file2labels_filepath, nb_classes=nb_classes, image_shape=image_shape) print("Predicting ...") Y = model.predict(X_test, batch_size=batch_size, verbose=1) Y = np.round(Y) for (y, gt) in zip(Y, Y_test): print("predicted: ", binary_to_id(y), "\t ground truth: ", binary_to_id(gt)) #print("Evaluating ...") #scores = model.evaluate(X_test, Y_test, batch_size=batch_size, verbose=1) #print("%s: %.2f%%" % (model.metrics_names[0], scores[0])) #print("%s: %.2f%%" % (model.metrics_names[1], scores[1])) #print("%s: %.2f%%" % (model.metrics_names[2], scores[2])) def binary_to_id(Y): i = 0 r = [] for y in Y: if y == 1: r.append(i) i = i+1 return r
<commit_before><commit_msg>Add a first draft of the evaluation module.<commit_after>
from models.cuberun import CubeRun import numpy as np import utils import loader nb_classes = 19 input_shape = (257, 624, 1) (cols, rows, chs) = input_shape image_shape = (cols, rows) batch_size=32 def evaluate(model, data_filepath, file2labels_filepath): model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['binary_accuracy', 'fbeta_score']) (X_test, Y_test, filenames) = loader.load_all_data(data_filepath, file2labels_filepath, nb_classes=nb_classes, image_shape=image_shape) print("Predicting ...") Y = model.predict(X_test, batch_size=batch_size, verbose=1) Y = np.round(Y) for (y, gt) in zip(Y, Y_test): print("predicted: ", binary_to_id(y), "\t ground truth: ", binary_to_id(gt)) #print("Evaluating ...") #scores = model.evaluate(X_test, Y_test, batch_size=batch_size, verbose=1) #print("%s: %.2f%%" % (model.metrics_names[0], scores[0])) #print("%s: %.2f%%" % (model.metrics_names[1], scores[1])) #print("%s: %.2f%%" % (model.metrics_names[2], scores[2])) def binary_to_id(Y): i = 0 r = [] for y in Y: if y == 1: r.append(i) i = i+1 return r
Add a first draft of the evaluation module.from models.cuberun import CubeRun import numpy as np import utils import loader nb_classes = 19 input_shape = (257, 624, 1) (cols, rows, chs) = input_shape image_shape = (cols, rows) batch_size=32 def evaluate(model, data_filepath, file2labels_filepath): model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['binary_accuracy', 'fbeta_score']) (X_test, Y_test, filenames) = loader.load_all_data(data_filepath, file2labels_filepath, nb_classes=nb_classes, image_shape=image_shape) print("Predicting ...") Y = model.predict(X_test, batch_size=batch_size, verbose=1) Y = np.round(Y) for (y, gt) in zip(Y, Y_test): print("predicted: ", binary_to_id(y), "\t ground truth: ", binary_to_id(gt)) #print("Evaluating ...") #scores = model.evaluate(X_test, Y_test, batch_size=batch_size, verbose=1) #print("%s: %.2f%%" % (model.metrics_names[0], scores[0])) #print("%s: %.2f%%" % (model.metrics_names[1], scores[1])) #print("%s: %.2f%%" % (model.metrics_names[2], scores[2])) def binary_to_id(Y): i = 0 r = [] for y in Y: if y == 1: r.append(i) i = i+1 return r
<commit_before><commit_msg>Add a first draft of the evaluation module.<commit_after>from models.cuberun import CubeRun import numpy as np import utils import loader nb_classes = 19 input_shape = (257, 624, 1) (cols, rows, chs) = input_shape image_shape = (cols, rows) batch_size=32 def evaluate(model, data_filepath, file2labels_filepath): model.compile(loss='binary_crossentropy', optimizer='adam', metrics=['binary_accuracy', 'fbeta_score']) (X_test, Y_test, filenames) = loader.load_all_data(data_filepath, file2labels_filepath, nb_classes=nb_classes, image_shape=image_shape) print("Predicting ...") Y = model.predict(X_test, batch_size=batch_size, verbose=1) Y = np.round(Y) for (y, gt) in zip(Y, Y_test): print("predicted: ", binary_to_id(y), "\t ground truth: ", binary_to_id(gt)) #print("Evaluating ...") #scores = model.evaluate(X_test, Y_test, batch_size=batch_size, verbose=1) #print("%s: %.2f%%" % (model.metrics_names[0], scores[0])) #print("%s: %.2f%%" % (model.metrics_names[1], scores[1])) #print("%s: %.2f%%" % (model.metrics_names[2], scores[2])) def binary_to_id(Y): i = 0 r = [] for y in Y: if y == 1: r.append(i) i = i+1 return r
4b0221ca503be9450919e4ed4e6a75ce92cd2d63
csdms/dakota/variables/continuous_design.py
csdms/dakota/variables/continuous_design.py
"""Implementation of a Dakota continous design variable.""" from .base import VariableBase classname = 'ContinuousDesign' class ContinuousDesign(VariableBase): """Define attributes for Dakota continous design variables.""" def __init__(self, variables=('x1', 'x2'), initial_point=None, lower_bounds=None, upper_bounds=None, scale_types=None, scales=None, **kwargs): VariableBase.__init__(self, **kwargs) self.variables = variables self._initial_point = initial_point self._lower_bounds = lower_bounds self._upper_bounds = upper_bounds if initial_point is None and lower_bounds is None and upper_bounds is None: self._initial_point = (0.0, 0.0) @property def initial_point(self): """Start points used by study variables.""" return self._initial_point @initial_point.setter def initial_point(self, value): """Set start points used by study variables. Parameters ---------- value : list or tuple of numbers The new initial points. """ if not isinstance(value, (tuple, list)): raise TypeError("Initial points must be a tuple or a list") self._initial_point = value @property def lower_bounds(self): """Minimum values of study variables.""" return self._lower_bounds @lower_bounds.setter def lower_bounds(self, value): """Set minimum values of study variables. Parameters ---------- value : list or tuple of numbers The minimum values. """ if not isinstance(value, (tuple, list)): raise TypeError("Lower bounds must be a tuple or a list") self._lower_bounds = value @property def upper_bounds(self): """Maximum values of study variables.""" return self._upper_bounds @upper_bounds.setter def upper_bounds(self, value): """Set maximum values of study variables. Parameters ---------- value : list or tuple of numbers The maximum values. """ if not isinstance(value, (tuple, list)): raise TypeError("Upper bounds must be a tuple or a list") self._upper_bounds = value def variables_block(self): """Define the variables block for continous design variables.""" s = 'variables\n' s += ' {0} = {1}'.format(self.variable_type, len(self.variables)) if self.initial_point is not None: s += '\n' \ + ' initial_point =' for pt in self.initial_point: s += ' {}'.format(pt) if self.lower_bounds is not None: s += '\n' \ + ' lower_bounds =' for b in self.lower_bounds: s += ' {}'.format(b) if self.upper_bounds is not None: s += '\n' \ + ' upper_bounds =' for b in self.upper_bounds: s += ' {}'.format(b) s += '\n' \ + ' descriptors =' for vd in self.variables: s += ' {!r}'.format(vd) s += '\n\n' return(s)
Create new module for continuous design variables
Create new module for continuous design variables
Python
mit
csdms/dakota,csdms/dakota
Create new module for continuous design variables
"""Implementation of a Dakota continous design variable.""" from .base import VariableBase classname = 'ContinuousDesign' class ContinuousDesign(VariableBase): """Define attributes for Dakota continous design variables.""" def __init__(self, variables=('x1', 'x2'), initial_point=None, lower_bounds=None, upper_bounds=None, scale_types=None, scales=None, **kwargs): VariableBase.__init__(self, **kwargs) self.variables = variables self._initial_point = initial_point self._lower_bounds = lower_bounds self._upper_bounds = upper_bounds if initial_point is None and lower_bounds is None and upper_bounds is None: self._initial_point = (0.0, 0.0) @property def initial_point(self): """Start points used by study variables.""" return self._initial_point @initial_point.setter def initial_point(self, value): """Set start points used by study variables. Parameters ---------- value : list or tuple of numbers The new initial points. """ if not isinstance(value, (tuple, list)): raise TypeError("Initial points must be a tuple or a list") self._initial_point = value @property def lower_bounds(self): """Minimum values of study variables.""" return self._lower_bounds @lower_bounds.setter def lower_bounds(self, value): """Set minimum values of study variables. Parameters ---------- value : list or tuple of numbers The minimum values. """ if not isinstance(value, (tuple, list)): raise TypeError("Lower bounds must be a tuple or a list") self._lower_bounds = value @property def upper_bounds(self): """Maximum values of study variables.""" return self._upper_bounds @upper_bounds.setter def upper_bounds(self, value): """Set maximum values of study variables. Parameters ---------- value : list or tuple of numbers The maximum values. """ if not isinstance(value, (tuple, list)): raise TypeError("Upper bounds must be a tuple or a list") self._upper_bounds = value def variables_block(self): """Define the variables block for continous design variables.""" s = 'variables\n' s += ' {0} = {1}'.format(self.variable_type, len(self.variables)) if self.initial_point is not None: s += '\n' \ + ' initial_point =' for pt in self.initial_point: s += ' {}'.format(pt) if self.lower_bounds is not None: s += '\n' \ + ' lower_bounds =' for b in self.lower_bounds: s += ' {}'.format(b) if self.upper_bounds is not None: s += '\n' \ + ' upper_bounds =' for b in self.upper_bounds: s += ' {}'.format(b) s += '\n' \ + ' descriptors =' for vd in self.variables: s += ' {!r}'.format(vd) s += '\n\n' return(s)
<commit_before><commit_msg>Create new module for continuous design variables<commit_after>
"""Implementation of a Dakota continous design variable.""" from .base import VariableBase classname = 'ContinuousDesign' class ContinuousDesign(VariableBase): """Define attributes for Dakota continous design variables.""" def __init__(self, variables=('x1', 'x2'), initial_point=None, lower_bounds=None, upper_bounds=None, scale_types=None, scales=None, **kwargs): VariableBase.__init__(self, **kwargs) self.variables = variables self._initial_point = initial_point self._lower_bounds = lower_bounds self._upper_bounds = upper_bounds if initial_point is None and lower_bounds is None and upper_bounds is None: self._initial_point = (0.0, 0.0) @property def initial_point(self): """Start points used by study variables.""" return self._initial_point @initial_point.setter def initial_point(self, value): """Set start points used by study variables. Parameters ---------- value : list or tuple of numbers The new initial points. """ if not isinstance(value, (tuple, list)): raise TypeError("Initial points must be a tuple or a list") self._initial_point = value @property def lower_bounds(self): """Minimum values of study variables.""" return self._lower_bounds @lower_bounds.setter def lower_bounds(self, value): """Set minimum values of study variables. Parameters ---------- value : list or tuple of numbers The minimum values. """ if not isinstance(value, (tuple, list)): raise TypeError("Lower bounds must be a tuple or a list") self._lower_bounds = value @property def upper_bounds(self): """Maximum values of study variables.""" return self._upper_bounds @upper_bounds.setter def upper_bounds(self, value): """Set maximum values of study variables. Parameters ---------- value : list or tuple of numbers The maximum values. """ if not isinstance(value, (tuple, list)): raise TypeError("Upper bounds must be a tuple or a list") self._upper_bounds = value def variables_block(self): """Define the variables block for continous design variables.""" s = 'variables\n' s += ' {0} = {1}'.format(self.variable_type, len(self.variables)) if self.initial_point is not None: s += '\n' \ + ' initial_point =' for pt in self.initial_point: s += ' {}'.format(pt) if self.lower_bounds is not None: s += '\n' \ + ' lower_bounds =' for b in self.lower_bounds: s += ' {}'.format(b) if self.upper_bounds is not None: s += '\n' \ + ' upper_bounds =' for b in self.upper_bounds: s += ' {}'.format(b) s += '\n' \ + ' descriptors =' for vd in self.variables: s += ' {!r}'.format(vd) s += '\n\n' return(s)
Create new module for continuous design variables"""Implementation of a Dakota continous design variable.""" from .base import VariableBase classname = 'ContinuousDesign' class ContinuousDesign(VariableBase): """Define attributes for Dakota continous design variables.""" def __init__(self, variables=('x1', 'x2'), initial_point=None, lower_bounds=None, upper_bounds=None, scale_types=None, scales=None, **kwargs): VariableBase.__init__(self, **kwargs) self.variables = variables self._initial_point = initial_point self._lower_bounds = lower_bounds self._upper_bounds = upper_bounds if initial_point is None and lower_bounds is None and upper_bounds is None: self._initial_point = (0.0, 0.0) @property def initial_point(self): """Start points used by study variables.""" return self._initial_point @initial_point.setter def initial_point(self, value): """Set start points used by study variables. Parameters ---------- value : list or tuple of numbers The new initial points. """ if not isinstance(value, (tuple, list)): raise TypeError("Initial points must be a tuple or a list") self._initial_point = value @property def lower_bounds(self): """Minimum values of study variables.""" return self._lower_bounds @lower_bounds.setter def lower_bounds(self, value): """Set minimum values of study variables. Parameters ---------- value : list or tuple of numbers The minimum values. """ if not isinstance(value, (tuple, list)): raise TypeError("Lower bounds must be a tuple or a list") self._lower_bounds = value @property def upper_bounds(self): """Maximum values of study variables.""" return self._upper_bounds @upper_bounds.setter def upper_bounds(self, value): """Set maximum values of study variables. Parameters ---------- value : list or tuple of numbers The maximum values. """ if not isinstance(value, (tuple, list)): raise TypeError("Upper bounds must be a tuple or a list") self._upper_bounds = value def variables_block(self): """Define the variables block for continous design variables.""" s = 'variables\n' s += ' {0} = {1}'.format(self.variable_type, len(self.variables)) if self.initial_point is not None: s += '\n' \ + ' initial_point =' for pt in self.initial_point: s += ' {}'.format(pt) if self.lower_bounds is not None: s += '\n' \ + ' lower_bounds =' for b in self.lower_bounds: s += ' {}'.format(b) if self.upper_bounds is not None: s += '\n' \ + ' upper_bounds =' for b in self.upper_bounds: s += ' {}'.format(b) s += '\n' \ + ' descriptors =' for vd in self.variables: s += ' {!r}'.format(vd) s += '\n\n' return(s)
<commit_before><commit_msg>Create new module for continuous design variables<commit_after>"""Implementation of a Dakota continous design variable.""" from .base import VariableBase classname = 'ContinuousDesign' class ContinuousDesign(VariableBase): """Define attributes for Dakota continous design variables.""" def __init__(self, variables=('x1', 'x2'), initial_point=None, lower_bounds=None, upper_bounds=None, scale_types=None, scales=None, **kwargs): VariableBase.__init__(self, **kwargs) self.variables = variables self._initial_point = initial_point self._lower_bounds = lower_bounds self._upper_bounds = upper_bounds if initial_point is None and lower_bounds is None and upper_bounds is None: self._initial_point = (0.0, 0.0) @property def initial_point(self): """Start points used by study variables.""" return self._initial_point @initial_point.setter def initial_point(self, value): """Set start points used by study variables. Parameters ---------- value : list or tuple of numbers The new initial points. """ if not isinstance(value, (tuple, list)): raise TypeError("Initial points must be a tuple or a list") self._initial_point = value @property def lower_bounds(self): """Minimum values of study variables.""" return self._lower_bounds @lower_bounds.setter def lower_bounds(self, value): """Set minimum values of study variables. Parameters ---------- value : list or tuple of numbers The minimum values. """ if not isinstance(value, (tuple, list)): raise TypeError("Lower bounds must be a tuple or a list") self._lower_bounds = value @property def upper_bounds(self): """Maximum values of study variables.""" return self._upper_bounds @upper_bounds.setter def upper_bounds(self, value): """Set maximum values of study variables. Parameters ---------- value : list or tuple of numbers The maximum values. """ if not isinstance(value, (tuple, list)): raise TypeError("Upper bounds must be a tuple or a list") self._upper_bounds = value def variables_block(self): """Define the variables block for continous design variables.""" s = 'variables\n' s += ' {0} = {1}'.format(self.variable_type, len(self.variables)) if self.initial_point is not None: s += '\n' \ + ' initial_point =' for pt in self.initial_point: s += ' {}'.format(pt) if self.lower_bounds is not None: s += '\n' \ + ' lower_bounds =' for b in self.lower_bounds: s += ' {}'.format(b) if self.upper_bounds is not None: s += '\n' \ + ' upper_bounds =' for b in self.upper_bounds: s += ' {}'.format(b) s += '\n' \ + ' descriptors =' for vd in self.variables: s += ' {!r}'.format(vd) s += '\n\n' return(s)
74388ceaacb7eedf98eb03f1263ea2ec6db596e1
python_scripts/media_export_through_api.py
python_scripts/media_export_through_api.py
# -*- coding: utf-8 -*- import psycopg2 import psycopg2.extras import requests import json import mc_database import mediacloud def get_download_from_api( mc_api_url, api_key, downloads_id ): r = requests.get( mc_api_url +'/api/v2/downloads/single/' + str( downloads_id) , params = { 'key': api_key} ) download = r.json()[0] return download def add_feed_download_with_api( mc_api_url, api_key, download, raw_content ): r = requests.put( mc_api_url + '/api/v2/crawler/add_feed_download', params={ 'key': api_key }, data=json.dumps( { 'download': download, 'raw_content': raw_content } ), headers={ 'Accept': 'application/json'} ) return r local_key = '2a4cebc31101a2d3d5e60456c23ae877c2d49944068f237e1134e2c75191a2af' local_key = '1161251f5de4f381a198eea4dc20350fd992f5eef7cb2fdc284c245ff3d4f3ca' source_media_cloud_api_url = 'http://localhost:8000/' dest_media_cloud_api_url = 'http://localhost:3000/' source_api_key = 'e07cf98dd0d457351354ee520635c226acd238ecf15ec9e853346e185343bf7b' dest_api_key = local_key db_label = "AWS backup crawler" conn = mc_database.connect_to_database( db_label ) cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cursor.execute( "SELECT * from downloads where type='feed' and state in ( 'success', 'feed_error') order by downloads_id limit 10" ) feed_downloads = cursor.fetchall() for feed_download in feed_downloads: download = get_download_from_api( source_media_cloud_api_url, source_api_key, feed_download['downloads_id'] ) #print download #break raw_content = download['raw_content' ] del download['raw_content'] if download[ 'state' ] == 'feed_error': download[ 'state' ] = 'success' add_feed_download_with_api( dest_media_cloud_api_url, dest_api_key, download, raw_content )
Add initial script to export feed downloads.
Add initial script to export feed downloads.
Python
agpl-3.0
AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,AchyuthIIIT/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud,berkmancenter/mediacloud
Add initial script to export feed downloads.
# -*- coding: utf-8 -*- import psycopg2 import psycopg2.extras import requests import json import mc_database import mediacloud def get_download_from_api( mc_api_url, api_key, downloads_id ): r = requests.get( mc_api_url +'/api/v2/downloads/single/' + str( downloads_id) , params = { 'key': api_key} ) download = r.json()[0] return download def add_feed_download_with_api( mc_api_url, api_key, download, raw_content ): r = requests.put( mc_api_url + '/api/v2/crawler/add_feed_download', params={ 'key': api_key }, data=json.dumps( { 'download': download, 'raw_content': raw_content } ), headers={ 'Accept': 'application/json'} ) return r local_key = '2a4cebc31101a2d3d5e60456c23ae877c2d49944068f237e1134e2c75191a2af' local_key = '1161251f5de4f381a198eea4dc20350fd992f5eef7cb2fdc284c245ff3d4f3ca' source_media_cloud_api_url = 'http://localhost:8000/' dest_media_cloud_api_url = 'http://localhost:3000/' source_api_key = 'e07cf98dd0d457351354ee520635c226acd238ecf15ec9e853346e185343bf7b' dest_api_key = local_key db_label = "AWS backup crawler" conn = mc_database.connect_to_database( db_label ) cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cursor.execute( "SELECT * from downloads where type='feed' and state in ( 'success', 'feed_error') order by downloads_id limit 10" ) feed_downloads = cursor.fetchall() for feed_download in feed_downloads: download = get_download_from_api( source_media_cloud_api_url, source_api_key, feed_download['downloads_id'] ) #print download #break raw_content = download['raw_content' ] del download['raw_content'] if download[ 'state' ] == 'feed_error': download[ 'state' ] = 'success' add_feed_download_with_api( dest_media_cloud_api_url, dest_api_key, download, raw_content )
<commit_before><commit_msg>Add initial script to export feed downloads.<commit_after>
# -*- coding: utf-8 -*- import psycopg2 import psycopg2.extras import requests import json import mc_database import mediacloud def get_download_from_api( mc_api_url, api_key, downloads_id ): r = requests.get( mc_api_url +'/api/v2/downloads/single/' + str( downloads_id) , params = { 'key': api_key} ) download = r.json()[0] return download def add_feed_download_with_api( mc_api_url, api_key, download, raw_content ): r = requests.put( mc_api_url + '/api/v2/crawler/add_feed_download', params={ 'key': api_key }, data=json.dumps( { 'download': download, 'raw_content': raw_content } ), headers={ 'Accept': 'application/json'} ) return r local_key = '2a4cebc31101a2d3d5e60456c23ae877c2d49944068f237e1134e2c75191a2af' local_key = '1161251f5de4f381a198eea4dc20350fd992f5eef7cb2fdc284c245ff3d4f3ca' source_media_cloud_api_url = 'http://localhost:8000/' dest_media_cloud_api_url = 'http://localhost:3000/' source_api_key = 'e07cf98dd0d457351354ee520635c226acd238ecf15ec9e853346e185343bf7b' dest_api_key = local_key db_label = "AWS backup crawler" conn = mc_database.connect_to_database( db_label ) cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cursor.execute( "SELECT * from downloads where type='feed' and state in ( 'success', 'feed_error') order by downloads_id limit 10" ) feed_downloads = cursor.fetchall() for feed_download in feed_downloads: download = get_download_from_api( source_media_cloud_api_url, source_api_key, feed_download['downloads_id'] ) #print download #break raw_content = download['raw_content' ] del download['raw_content'] if download[ 'state' ] == 'feed_error': download[ 'state' ] = 'success' add_feed_download_with_api( dest_media_cloud_api_url, dest_api_key, download, raw_content )
Add initial script to export feed downloads.# -*- coding: utf-8 -*- import psycopg2 import psycopg2.extras import requests import json import mc_database import mediacloud def get_download_from_api( mc_api_url, api_key, downloads_id ): r = requests.get( mc_api_url +'/api/v2/downloads/single/' + str( downloads_id) , params = { 'key': api_key} ) download = r.json()[0] return download def add_feed_download_with_api( mc_api_url, api_key, download, raw_content ): r = requests.put( mc_api_url + '/api/v2/crawler/add_feed_download', params={ 'key': api_key }, data=json.dumps( { 'download': download, 'raw_content': raw_content } ), headers={ 'Accept': 'application/json'} ) return r local_key = '2a4cebc31101a2d3d5e60456c23ae877c2d49944068f237e1134e2c75191a2af' local_key = '1161251f5de4f381a198eea4dc20350fd992f5eef7cb2fdc284c245ff3d4f3ca' source_media_cloud_api_url = 'http://localhost:8000/' dest_media_cloud_api_url = 'http://localhost:3000/' source_api_key = 'e07cf98dd0d457351354ee520635c226acd238ecf15ec9e853346e185343bf7b' dest_api_key = local_key db_label = "AWS backup crawler" conn = mc_database.connect_to_database( db_label ) cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cursor.execute( "SELECT * from downloads where type='feed' and state in ( 'success', 'feed_error') order by downloads_id limit 10" ) feed_downloads = cursor.fetchall() for feed_download in feed_downloads: download = get_download_from_api( source_media_cloud_api_url, source_api_key, feed_download['downloads_id'] ) #print download #break raw_content = download['raw_content' ] del download['raw_content'] if download[ 'state' ] == 'feed_error': download[ 'state' ] = 'success' add_feed_download_with_api( dest_media_cloud_api_url, dest_api_key, download, raw_content )
<commit_before><commit_msg>Add initial script to export feed downloads.<commit_after># -*- coding: utf-8 -*- import psycopg2 import psycopg2.extras import requests import json import mc_database import mediacloud def get_download_from_api( mc_api_url, api_key, downloads_id ): r = requests.get( mc_api_url +'/api/v2/downloads/single/' + str( downloads_id) , params = { 'key': api_key} ) download = r.json()[0] return download def add_feed_download_with_api( mc_api_url, api_key, download, raw_content ): r = requests.put( mc_api_url + '/api/v2/crawler/add_feed_download', params={ 'key': api_key }, data=json.dumps( { 'download': download, 'raw_content': raw_content } ), headers={ 'Accept': 'application/json'} ) return r local_key = '2a4cebc31101a2d3d5e60456c23ae877c2d49944068f237e1134e2c75191a2af' local_key = '1161251f5de4f381a198eea4dc20350fd992f5eef7cb2fdc284c245ff3d4f3ca' source_media_cloud_api_url = 'http://localhost:8000/' dest_media_cloud_api_url = 'http://localhost:3000/' source_api_key = 'e07cf98dd0d457351354ee520635c226acd238ecf15ec9e853346e185343bf7b' dest_api_key = local_key db_label = "AWS backup crawler" conn = mc_database.connect_to_database( db_label ) cursor = conn.cursor(cursor_factory=psycopg2.extras.DictCursor) cursor.execute( "SELECT * from downloads where type='feed' and state in ( 'success', 'feed_error') order by downloads_id limit 10" ) feed_downloads = cursor.fetchall() for feed_download in feed_downloads: download = get_download_from_api( source_media_cloud_api_url, source_api_key, feed_download['downloads_id'] ) #print download #break raw_content = download['raw_content' ] del download['raw_content'] if download[ 'state' ] == 'feed_error': download[ 'state' ] = 'success' add_feed_download_with_api( dest_media_cloud_api_url, dest_api_key, download, raw_content )
91396ed246166f610e9cfc4519862f061af4e6b2
cat/admin.py
cat/admin.py
from django.contrib import admin from models import MuseumObject,FunctionalCategory class MOAdmin(admin.ModelAdmin): fields = ('registration_number','country','description','comment') list_display = ('registration_number','country','description','comment') list_filter = ('country','functional_category') search_fields = ['description','comment'] admin.site.register(MuseumObject, MOAdmin) admin.site.register(FunctionalCategory)
Enable Django Admin for some of our data
Enable Django Admin for some of our data
Python
bsd-3-clause
uq-eresearch/uqam,uq-eresearch/uqam,uq-eresearch/uqam,uq-eresearch/uqam
Enable Django Admin for some of our data
from django.contrib import admin from models import MuseumObject,FunctionalCategory class MOAdmin(admin.ModelAdmin): fields = ('registration_number','country','description','comment') list_display = ('registration_number','country','description','comment') list_filter = ('country','functional_category') search_fields = ['description','comment'] admin.site.register(MuseumObject, MOAdmin) admin.site.register(FunctionalCategory)
<commit_before><commit_msg>Enable Django Admin for some of our data<commit_after>
from django.contrib import admin from models import MuseumObject,FunctionalCategory class MOAdmin(admin.ModelAdmin): fields = ('registration_number','country','description','comment') list_display = ('registration_number','country','description','comment') list_filter = ('country','functional_category') search_fields = ['description','comment'] admin.site.register(MuseumObject, MOAdmin) admin.site.register(FunctionalCategory)
Enable Django Admin for some of our datafrom django.contrib import admin from models import MuseumObject,FunctionalCategory class MOAdmin(admin.ModelAdmin): fields = ('registration_number','country','description','comment') list_display = ('registration_number','country','description','comment') list_filter = ('country','functional_category') search_fields = ['description','comment'] admin.site.register(MuseumObject, MOAdmin) admin.site.register(FunctionalCategory)
<commit_before><commit_msg>Enable Django Admin for some of our data<commit_after>from django.contrib import admin from models import MuseumObject,FunctionalCategory class MOAdmin(admin.ModelAdmin): fields = ('registration_number','country','description','comment') list_display = ('registration_number','country','description','comment') list_filter = ('country','functional_category') search_fields = ['description','comment'] admin.site.register(MuseumObject, MOAdmin) admin.site.register(FunctionalCategory)
edc982bdfaece6aaf23b3e7f9c967de800eacbd6
txircd/modules/extra/snotice_links.py
txircd/modules/extra/snotice_links.py
from twisted.plugin import IPlugin from txircd.modbase import IModuleData, ModuleData from zope.interface import implements class SnoLinks(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeLinks" def actions(self): return [ ("serverconnect", 1, self.announceConnect), ("serverquit", 1, self.announceQuit), ("servernoticetype", 1, self.checkSnoType) ] def announceConnect(self, server): message = "Server {} ({}) connected (to {})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name) self.ircd.runActionStandard("sendservernotice", "links", message) def announceQuit(self, server, reason): message = "Server {} ({}) disconnected (from {}) ({})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name, reason) self.ircd.runActionStandard("sendservernotice", "links", message) def checkSnoType(self, user, typename): if typename == "links": return True return False snoLinks = SnoLinks()
Implement links server notice type
Implement links server notice type
Python
bsd-3-clause
Heufneutje/txircd
Implement links server notice type
from twisted.plugin import IPlugin from txircd.modbase import IModuleData, ModuleData from zope.interface import implements class SnoLinks(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeLinks" def actions(self): return [ ("serverconnect", 1, self.announceConnect), ("serverquit", 1, self.announceQuit), ("servernoticetype", 1, self.checkSnoType) ] def announceConnect(self, server): message = "Server {} ({}) connected (to {})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name) self.ircd.runActionStandard("sendservernotice", "links", message) def announceQuit(self, server, reason): message = "Server {} ({}) disconnected (from {}) ({})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name, reason) self.ircd.runActionStandard("sendservernotice", "links", message) def checkSnoType(self, user, typename): if typename == "links": return True return False snoLinks = SnoLinks()
<commit_before><commit_msg>Implement links server notice type<commit_after>
from twisted.plugin import IPlugin from txircd.modbase import IModuleData, ModuleData from zope.interface import implements class SnoLinks(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeLinks" def actions(self): return [ ("serverconnect", 1, self.announceConnect), ("serverquit", 1, self.announceQuit), ("servernoticetype", 1, self.checkSnoType) ] def announceConnect(self, server): message = "Server {} ({}) connected (to {})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name) self.ircd.runActionStandard("sendservernotice", "links", message) def announceQuit(self, server, reason): message = "Server {} ({}) disconnected (from {}) ({})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name, reason) self.ircd.runActionStandard("sendservernotice", "links", message) def checkSnoType(self, user, typename): if typename == "links": return True return False snoLinks = SnoLinks()
Implement links server notice typefrom twisted.plugin import IPlugin from txircd.modbase import IModuleData, ModuleData from zope.interface import implements class SnoLinks(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeLinks" def actions(self): return [ ("serverconnect", 1, self.announceConnect), ("serverquit", 1, self.announceQuit), ("servernoticetype", 1, self.checkSnoType) ] def announceConnect(self, server): message = "Server {} ({}) connected (to {})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name) self.ircd.runActionStandard("sendservernotice", "links", message) def announceQuit(self, server, reason): message = "Server {} ({}) disconnected (from {}) ({})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name, reason) self.ircd.runActionStandard("sendservernotice", "links", message) def checkSnoType(self, user, typename): if typename == "links": return True return False snoLinks = SnoLinks()
<commit_before><commit_msg>Implement links server notice type<commit_after>from twisted.plugin import IPlugin from txircd.modbase import IModuleData, ModuleData from zope.interface import implements class SnoLinks(ModuleData): implements(IPlugin, IModuleData) name = "ServerNoticeLinks" def actions(self): return [ ("serverconnect", 1, self.announceConnect), ("serverquit", 1, self.announceQuit), ("servernoticetype", 1, self.checkSnoType) ] def announceConnect(self, server): message = "Server {} ({}) connected (to {})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name) self.ircd.runActionStandard("sendservernotice", "links", message) def announceQuit(self, server, reason): message = "Server {} ({}) disconnected (from {}) ({})".format(server.name, server.serverID, self.ircd.name if server.nextClosest == self.ircd.serverID else self.ircd.servers[server.nextClosest].name, reason) self.ircd.runActionStandard("sendservernotice", "links", message) def checkSnoType(self, user, typename): if typename == "links": return True return False snoLinks = SnoLinks()
c65f59c6a6048807d29e5ce123447afd006ce05f
users/migrations/0007_username_length.py
users/migrations/0007_username_length.py
# -*- coding: utf-8 -*- # Generated by Django 1.10.2 on 2016-11-02 11:53 from __future__ import unicode_literals import django.contrib.auth.validators from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('users', '0006_auto_20160508_1407'), ] operations = [ migrations.AlterField( model_name='user', name='username', field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username'), ), ]
Add missing migration for username length
Add missing migration for username length The username length changed from 30 to 150 in commit f2a0c964 which updated Django to 1.10, but there was no migration created. Add the missing migration.
Python
mit
mikkokeskinen/tunnistamo,mikkokeskinen/tunnistamo
Add missing migration for username length The username length changed from 30 to 150 in commit f2a0c964 which updated Django to 1.10, but there was no migration created. Add the missing migration.
# -*- coding: utf-8 -*- # Generated by Django 1.10.2 on 2016-11-02 11:53 from __future__ import unicode_literals import django.contrib.auth.validators from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('users', '0006_auto_20160508_1407'), ] operations = [ migrations.AlterField( model_name='user', name='username', field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username'), ), ]
<commit_before><commit_msg>Add missing migration for username length The username length changed from 30 to 150 in commit f2a0c964 which updated Django to 1.10, but there was no migration created. Add the missing migration.<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.10.2 on 2016-11-02 11:53 from __future__ import unicode_literals import django.contrib.auth.validators from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('users', '0006_auto_20160508_1407'), ] operations = [ migrations.AlterField( model_name='user', name='username', field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username'), ), ]
Add missing migration for username length The username length changed from 30 to 150 in commit f2a0c964 which updated Django to 1.10, but there was no migration created. Add the missing migration.# -*- coding: utf-8 -*- # Generated by Django 1.10.2 on 2016-11-02 11:53 from __future__ import unicode_literals import django.contrib.auth.validators from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('users', '0006_auto_20160508_1407'), ] operations = [ migrations.AlterField( model_name='user', name='username', field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username'), ), ]
<commit_before><commit_msg>Add missing migration for username length The username length changed from 30 to 150 in commit f2a0c964 which updated Django to 1.10, but there was no migration created. Add the missing migration.<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.10.2 on 2016-11-02 11:53 from __future__ import unicode_literals import django.contrib.auth.validators from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('users', '0006_auto_20160508_1407'), ] operations = [ migrations.AlterField( model_name='user', name='username', field=models.CharField(error_messages={'unique': 'A user with that username already exists.'}, help_text='Required. 150 characters or fewer. Letters, digits and @/./+/-/_ only.', max_length=150, unique=True, validators=[django.contrib.auth.validators.UnicodeUsernameValidator()], verbose_name='username'), ), ]
1ef84c24c60cf802aeb4bf6084f9b7fc7696f79a
scripts/album_times.py
scripts/album_times.py
#!/usr/bin/env python3 """Radio scheduling program. Usage: album_times.py [--host=HOST] PORT Options: --host=HOST Hostname of MPD [default: localhost] -h --help Show this text Prints out the last scheduling time of every album. """ from datetime import datetime from docopt import docopt from mpd import MPDClient def album_sticker_get(client, album, sticker): """Gets a sticker associated with an album.""" # I am pretty sure that MPD only implements stickers for songs, so # the sticker gets attached to the first song in the album. tracks = client.find("album", album) if len(tracks) == 0: return return client.sticker_get("song", tracks[0]["file"], "album_" + sticker) def album_sticker_set(client, album, sticker, val): """Sets a sticker associated with an album.""" # I am pretty sure that MPD only implements stickers for songs, so # the sticker gets attached to the first song in the album. tracks = client.find("album", album) if len(tracks) == 0: return return client.sticker_set("song", tracks[0]["file"], "album_" + sticker, val) def list_albums(client): """Lists albums sorted by last play timestamp.""" # Get all albums albums = client.list("album") all_albums = list(filter(lambda a: a not in ["", "Lainchan Radio Transitions"], albums)) # Group albums by when they were last scheduled albums_by_last_scheduled = {} last_scheduled_times = [] for album in all_albums: # Get the last scheduled time, defaulting to 0 try: last_scheduled = int(album_sticker_get(client, album, "last_scheduled")) except: last_scheduled = 0 # Put the album into the appropriate bucket if last_scheduled in albums_by_last_scheduled: albums_by_last_scheduled[last_scheduled].append(album) else: albums_by_last_scheduled[last_scheduled] = [album] last_scheduled_times.append(last_scheduled) # Pick the 10 oldest times last_scheduled_times.sort() for last_scheduled in last_scheduled_times: dt = datetime.utcfromtimestamp(last_scheduled) albums = albums_by_last_scheduled[last_scheduled] print("{}: {}".format(dt.strftime('%Y-%m-%d %H:%M:%S'), albums)) if __name__ == "__main__": args = docopt(__doc__) try: args["PORT"] = int(args["PORT"]) except: print("PORT must be an integer") exit(1) try: client = MPDClient() client.connect(args["--host"], args["PORT"]) except: print("could not connect to MPD") exit(2) list_albums(client)
Add a script to print the scheduling times of albums
Add a script to print the scheduling times of albums
Python
mit
barrucadu/lainonlife,barrucadu/lainonlife,barrucadu/lainonlife,barrucadu/lainonlife
Add a script to print the scheduling times of albums
#!/usr/bin/env python3 """Radio scheduling program. Usage: album_times.py [--host=HOST] PORT Options: --host=HOST Hostname of MPD [default: localhost] -h --help Show this text Prints out the last scheduling time of every album. """ from datetime import datetime from docopt import docopt from mpd import MPDClient def album_sticker_get(client, album, sticker): """Gets a sticker associated with an album.""" # I am pretty sure that MPD only implements stickers for songs, so # the sticker gets attached to the first song in the album. tracks = client.find("album", album) if len(tracks) == 0: return return client.sticker_get("song", tracks[0]["file"], "album_" + sticker) def album_sticker_set(client, album, sticker, val): """Sets a sticker associated with an album.""" # I am pretty sure that MPD only implements stickers for songs, so # the sticker gets attached to the first song in the album. tracks = client.find("album", album) if len(tracks) == 0: return return client.sticker_set("song", tracks[0]["file"], "album_" + sticker, val) def list_albums(client): """Lists albums sorted by last play timestamp.""" # Get all albums albums = client.list("album") all_albums = list(filter(lambda a: a not in ["", "Lainchan Radio Transitions"], albums)) # Group albums by when they were last scheduled albums_by_last_scheduled = {} last_scheduled_times = [] for album in all_albums: # Get the last scheduled time, defaulting to 0 try: last_scheduled = int(album_sticker_get(client, album, "last_scheduled")) except: last_scheduled = 0 # Put the album into the appropriate bucket if last_scheduled in albums_by_last_scheduled: albums_by_last_scheduled[last_scheduled].append(album) else: albums_by_last_scheduled[last_scheduled] = [album] last_scheduled_times.append(last_scheduled) # Pick the 10 oldest times last_scheduled_times.sort() for last_scheduled in last_scheduled_times: dt = datetime.utcfromtimestamp(last_scheduled) albums = albums_by_last_scheduled[last_scheduled] print("{}: {}".format(dt.strftime('%Y-%m-%d %H:%M:%S'), albums)) if __name__ == "__main__": args = docopt(__doc__) try: args["PORT"] = int(args["PORT"]) except: print("PORT must be an integer") exit(1) try: client = MPDClient() client.connect(args["--host"], args["PORT"]) except: print("could not connect to MPD") exit(2) list_albums(client)
<commit_before><commit_msg>Add a script to print the scheduling times of albums<commit_after>
#!/usr/bin/env python3 """Radio scheduling program. Usage: album_times.py [--host=HOST] PORT Options: --host=HOST Hostname of MPD [default: localhost] -h --help Show this text Prints out the last scheduling time of every album. """ from datetime import datetime from docopt import docopt from mpd import MPDClient def album_sticker_get(client, album, sticker): """Gets a sticker associated with an album.""" # I am pretty sure that MPD only implements stickers for songs, so # the sticker gets attached to the first song in the album. tracks = client.find("album", album) if len(tracks) == 0: return return client.sticker_get("song", tracks[0]["file"], "album_" + sticker) def album_sticker_set(client, album, sticker, val): """Sets a sticker associated with an album.""" # I am pretty sure that MPD only implements stickers for songs, so # the sticker gets attached to the first song in the album. tracks = client.find("album", album) if len(tracks) == 0: return return client.sticker_set("song", tracks[0]["file"], "album_" + sticker, val) def list_albums(client): """Lists albums sorted by last play timestamp.""" # Get all albums albums = client.list("album") all_albums = list(filter(lambda a: a not in ["", "Lainchan Radio Transitions"], albums)) # Group albums by when they were last scheduled albums_by_last_scheduled = {} last_scheduled_times = [] for album in all_albums: # Get the last scheduled time, defaulting to 0 try: last_scheduled = int(album_sticker_get(client, album, "last_scheduled")) except: last_scheduled = 0 # Put the album into the appropriate bucket if last_scheduled in albums_by_last_scheduled: albums_by_last_scheduled[last_scheduled].append(album) else: albums_by_last_scheduled[last_scheduled] = [album] last_scheduled_times.append(last_scheduled) # Pick the 10 oldest times last_scheduled_times.sort() for last_scheduled in last_scheduled_times: dt = datetime.utcfromtimestamp(last_scheduled) albums = albums_by_last_scheduled[last_scheduled] print("{}: {}".format(dt.strftime('%Y-%m-%d %H:%M:%S'), albums)) if __name__ == "__main__": args = docopt(__doc__) try: args["PORT"] = int(args["PORT"]) except: print("PORT must be an integer") exit(1) try: client = MPDClient() client.connect(args["--host"], args["PORT"]) except: print("could not connect to MPD") exit(2) list_albums(client)
Add a script to print the scheduling times of albums#!/usr/bin/env python3 """Radio scheduling program. Usage: album_times.py [--host=HOST] PORT Options: --host=HOST Hostname of MPD [default: localhost] -h --help Show this text Prints out the last scheduling time of every album. """ from datetime import datetime from docopt import docopt from mpd import MPDClient def album_sticker_get(client, album, sticker): """Gets a sticker associated with an album.""" # I am pretty sure that MPD only implements stickers for songs, so # the sticker gets attached to the first song in the album. tracks = client.find("album", album) if len(tracks) == 0: return return client.sticker_get("song", tracks[0]["file"], "album_" + sticker) def album_sticker_set(client, album, sticker, val): """Sets a sticker associated with an album.""" # I am pretty sure that MPD only implements stickers for songs, so # the sticker gets attached to the first song in the album. tracks = client.find("album", album) if len(tracks) == 0: return return client.sticker_set("song", tracks[0]["file"], "album_" + sticker, val) def list_albums(client): """Lists albums sorted by last play timestamp.""" # Get all albums albums = client.list("album") all_albums = list(filter(lambda a: a not in ["", "Lainchan Radio Transitions"], albums)) # Group albums by when they were last scheduled albums_by_last_scheduled = {} last_scheduled_times = [] for album in all_albums: # Get the last scheduled time, defaulting to 0 try: last_scheduled = int(album_sticker_get(client, album, "last_scheduled")) except: last_scheduled = 0 # Put the album into the appropriate bucket if last_scheduled in albums_by_last_scheduled: albums_by_last_scheduled[last_scheduled].append(album) else: albums_by_last_scheduled[last_scheduled] = [album] last_scheduled_times.append(last_scheduled) # Pick the 10 oldest times last_scheduled_times.sort() for last_scheduled in last_scheduled_times: dt = datetime.utcfromtimestamp(last_scheduled) albums = albums_by_last_scheduled[last_scheduled] print("{}: {}".format(dt.strftime('%Y-%m-%d %H:%M:%S'), albums)) if __name__ == "__main__": args = docopt(__doc__) try: args["PORT"] = int(args["PORT"]) except: print("PORT must be an integer") exit(1) try: client = MPDClient() client.connect(args["--host"], args["PORT"]) except: print("could not connect to MPD") exit(2) list_albums(client)
<commit_before><commit_msg>Add a script to print the scheduling times of albums<commit_after>#!/usr/bin/env python3 """Radio scheduling program. Usage: album_times.py [--host=HOST] PORT Options: --host=HOST Hostname of MPD [default: localhost] -h --help Show this text Prints out the last scheduling time of every album. """ from datetime import datetime from docopt import docopt from mpd import MPDClient def album_sticker_get(client, album, sticker): """Gets a sticker associated with an album.""" # I am pretty sure that MPD only implements stickers for songs, so # the sticker gets attached to the first song in the album. tracks = client.find("album", album) if len(tracks) == 0: return return client.sticker_get("song", tracks[0]["file"], "album_" + sticker) def album_sticker_set(client, album, sticker, val): """Sets a sticker associated with an album.""" # I am pretty sure that MPD only implements stickers for songs, so # the sticker gets attached to the first song in the album. tracks = client.find("album", album) if len(tracks) == 0: return return client.sticker_set("song", tracks[0]["file"], "album_" + sticker, val) def list_albums(client): """Lists albums sorted by last play timestamp.""" # Get all albums albums = client.list("album") all_albums = list(filter(lambda a: a not in ["", "Lainchan Radio Transitions"], albums)) # Group albums by when they were last scheduled albums_by_last_scheduled = {} last_scheduled_times = [] for album in all_albums: # Get the last scheduled time, defaulting to 0 try: last_scheduled = int(album_sticker_get(client, album, "last_scheduled")) except: last_scheduled = 0 # Put the album into the appropriate bucket if last_scheduled in albums_by_last_scheduled: albums_by_last_scheduled[last_scheduled].append(album) else: albums_by_last_scheduled[last_scheduled] = [album] last_scheduled_times.append(last_scheduled) # Pick the 10 oldest times last_scheduled_times.sort() for last_scheduled in last_scheduled_times: dt = datetime.utcfromtimestamp(last_scheduled) albums = albums_by_last_scheduled[last_scheduled] print("{}: {}".format(dt.strftime('%Y-%m-%d %H:%M:%S'), albums)) if __name__ == "__main__": args = docopt(__doc__) try: args["PORT"] = int(args["PORT"]) except: print("PORT must be an integer") exit(1) try: client = MPDClient() client.connect(args["--host"], args["PORT"]) except: print("could not connect to MPD") exit(2) list_albums(client)
bda42a4630e8b9e720443b6785ff2e3435bfdfa6
pybaseball/team_results.py
pybaseball/team_results.py
import pandas as pd import requests from bs4 import BeautifulSoup # TODO: raise error if year > current year or < first year of a team's existence # TODO: team validation. return error if team does not exist. # TODO: sanitize team inputs (force to all caps) def get_soup(season, team): # get most recent year's schedule if year not specified if(season is None): season = datetime.datetime.today().strftime("%Y") url = "http://www.baseball-reference.com/teams/{}/{}-schedule-scores.shtml".format(team, season) s=requests.get(url).content return BeautifulSoup(s, "html.parser") def get_table(soup): table = soup.find_all('table')[0] data = [] headings = [th.get_text() for th in table.find("tr").find_all("th")] headings = headings[1:] # the "gm#" heading doesn't have a <td> element headings[3] = "Home_Away" data.append(headings) table_body = table.find('tbody') rows = table_body.find_all('tr') for row_index in range(len(rows)-1): #last row is a description of column meanings row = rows[row_index] try: cols = row.find_all('td') #links = row.find_all('a') if cols[3].text == "": cols[3].string = 'Home' # this element only has an entry if it's an away game if cols[12].text == "": cols[12].string = "None" # tie games won't have a pitcher win or loss if cols[13].text == "": cols[13].string = "None" if cols[14].text == "": cols[14].string = "None" # games w/o saves have blank td entry if cols[8].text == "": cols[8].string = "9" # entry is blank if no extra innings cols = [ele.text.strip() for ele in cols] data.append([ele for ele in cols if ele]) except: # two cases will break the above: games that haven't happened yet, and BR's redundant mid-table headers # if future games, grab the scheduling info. Otherwise do nothing. if len(cols)>1: cols = [ele.text.strip() for ele in cols][0:5] data.append([ele for ele in cols if ele]) #convert to pandas dataframe. make first row the table's column names and reindex. data = pd.DataFrame(data) data = data.rename(columns=data.iloc[0]) data = data.reindex(data.index.drop(0)) return data def schedule_and_record(season=None, team=None): # retrieve html from baseball reference soup = get_soup(season, team) table = get_table(soup) return table
Add code for getting team schedule and game outcomes
Add code for getting team schedule and game outcomes
Python
mit
jldbc/pybaseball
Add code for getting team schedule and game outcomes
import pandas as pd import requests from bs4 import BeautifulSoup # TODO: raise error if year > current year or < first year of a team's existence # TODO: team validation. return error if team does not exist. # TODO: sanitize team inputs (force to all caps) def get_soup(season, team): # get most recent year's schedule if year not specified if(season is None): season = datetime.datetime.today().strftime("%Y") url = "http://www.baseball-reference.com/teams/{}/{}-schedule-scores.shtml".format(team, season) s=requests.get(url).content return BeautifulSoup(s, "html.parser") def get_table(soup): table = soup.find_all('table')[0] data = [] headings = [th.get_text() for th in table.find("tr").find_all("th")] headings = headings[1:] # the "gm#" heading doesn't have a <td> element headings[3] = "Home_Away" data.append(headings) table_body = table.find('tbody') rows = table_body.find_all('tr') for row_index in range(len(rows)-1): #last row is a description of column meanings row = rows[row_index] try: cols = row.find_all('td') #links = row.find_all('a') if cols[3].text == "": cols[3].string = 'Home' # this element only has an entry if it's an away game if cols[12].text == "": cols[12].string = "None" # tie games won't have a pitcher win or loss if cols[13].text == "": cols[13].string = "None" if cols[14].text == "": cols[14].string = "None" # games w/o saves have blank td entry if cols[8].text == "": cols[8].string = "9" # entry is blank if no extra innings cols = [ele.text.strip() for ele in cols] data.append([ele for ele in cols if ele]) except: # two cases will break the above: games that haven't happened yet, and BR's redundant mid-table headers # if future games, grab the scheduling info. Otherwise do nothing. if len(cols)>1: cols = [ele.text.strip() for ele in cols][0:5] data.append([ele for ele in cols if ele]) #convert to pandas dataframe. make first row the table's column names and reindex. data = pd.DataFrame(data) data = data.rename(columns=data.iloc[0]) data = data.reindex(data.index.drop(0)) return data def schedule_and_record(season=None, team=None): # retrieve html from baseball reference soup = get_soup(season, team) table = get_table(soup) return table
<commit_before><commit_msg>Add code for getting team schedule and game outcomes<commit_after>
import pandas as pd import requests from bs4 import BeautifulSoup # TODO: raise error if year > current year or < first year of a team's existence # TODO: team validation. return error if team does not exist. # TODO: sanitize team inputs (force to all caps) def get_soup(season, team): # get most recent year's schedule if year not specified if(season is None): season = datetime.datetime.today().strftime("%Y") url = "http://www.baseball-reference.com/teams/{}/{}-schedule-scores.shtml".format(team, season) s=requests.get(url).content return BeautifulSoup(s, "html.parser") def get_table(soup): table = soup.find_all('table')[0] data = [] headings = [th.get_text() for th in table.find("tr").find_all("th")] headings = headings[1:] # the "gm#" heading doesn't have a <td> element headings[3] = "Home_Away" data.append(headings) table_body = table.find('tbody') rows = table_body.find_all('tr') for row_index in range(len(rows)-1): #last row is a description of column meanings row = rows[row_index] try: cols = row.find_all('td') #links = row.find_all('a') if cols[3].text == "": cols[3].string = 'Home' # this element only has an entry if it's an away game if cols[12].text == "": cols[12].string = "None" # tie games won't have a pitcher win or loss if cols[13].text == "": cols[13].string = "None" if cols[14].text == "": cols[14].string = "None" # games w/o saves have blank td entry if cols[8].text == "": cols[8].string = "9" # entry is blank if no extra innings cols = [ele.text.strip() for ele in cols] data.append([ele for ele in cols if ele]) except: # two cases will break the above: games that haven't happened yet, and BR's redundant mid-table headers # if future games, grab the scheduling info. Otherwise do nothing. if len(cols)>1: cols = [ele.text.strip() for ele in cols][0:5] data.append([ele for ele in cols if ele]) #convert to pandas dataframe. make first row the table's column names and reindex. data = pd.DataFrame(data) data = data.rename(columns=data.iloc[0]) data = data.reindex(data.index.drop(0)) return data def schedule_and_record(season=None, team=None): # retrieve html from baseball reference soup = get_soup(season, team) table = get_table(soup) return table
Add code for getting team schedule and game outcomesimport pandas as pd import requests from bs4 import BeautifulSoup # TODO: raise error if year > current year or < first year of a team's existence # TODO: team validation. return error if team does not exist. # TODO: sanitize team inputs (force to all caps) def get_soup(season, team): # get most recent year's schedule if year not specified if(season is None): season = datetime.datetime.today().strftime("%Y") url = "http://www.baseball-reference.com/teams/{}/{}-schedule-scores.shtml".format(team, season) s=requests.get(url).content return BeautifulSoup(s, "html.parser") def get_table(soup): table = soup.find_all('table')[0] data = [] headings = [th.get_text() for th in table.find("tr").find_all("th")] headings = headings[1:] # the "gm#" heading doesn't have a <td> element headings[3] = "Home_Away" data.append(headings) table_body = table.find('tbody') rows = table_body.find_all('tr') for row_index in range(len(rows)-1): #last row is a description of column meanings row = rows[row_index] try: cols = row.find_all('td') #links = row.find_all('a') if cols[3].text == "": cols[3].string = 'Home' # this element only has an entry if it's an away game if cols[12].text == "": cols[12].string = "None" # tie games won't have a pitcher win or loss if cols[13].text == "": cols[13].string = "None" if cols[14].text == "": cols[14].string = "None" # games w/o saves have blank td entry if cols[8].text == "": cols[8].string = "9" # entry is blank if no extra innings cols = [ele.text.strip() for ele in cols] data.append([ele for ele in cols if ele]) except: # two cases will break the above: games that haven't happened yet, and BR's redundant mid-table headers # if future games, grab the scheduling info. Otherwise do nothing. if len(cols)>1: cols = [ele.text.strip() for ele in cols][0:5] data.append([ele for ele in cols if ele]) #convert to pandas dataframe. make first row the table's column names and reindex. data = pd.DataFrame(data) data = data.rename(columns=data.iloc[0]) data = data.reindex(data.index.drop(0)) return data def schedule_and_record(season=None, team=None): # retrieve html from baseball reference soup = get_soup(season, team) table = get_table(soup) return table
<commit_before><commit_msg>Add code for getting team schedule and game outcomes<commit_after>import pandas as pd import requests from bs4 import BeautifulSoup # TODO: raise error if year > current year or < first year of a team's existence # TODO: team validation. return error if team does not exist. # TODO: sanitize team inputs (force to all caps) def get_soup(season, team): # get most recent year's schedule if year not specified if(season is None): season = datetime.datetime.today().strftime("%Y") url = "http://www.baseball-reference.com/teams/{}/{}-schedule-scores.shtml".format(team, season) s=requests.get(url).content return BeautifulSoup(s, "html.parser") def get_table(soup): table = soup.find_all('table')[0] data = [] headings = [th.get_text() for th in table.find("tr").find_all("th")] headings = headings[1:] # the "gm#" heading doesn't have a <td> element headings[3] = "Home_Away" data.append(headings) table_body = table.find('tbody') rows = table_body.find_all('tr') for row_index in range(len(rows)-1): #last row is a description of column meanings row = rows[row_index] try: cols = row.find_all('td') #links = row.find_all('a') if cols[3].text == "": cols[3].string = 'Home' # this element only has an entry if it's an away game if cols[12].text == "": cols[12].string = "None" # tie games won't have a pitcher win or loss if cols[13].text == "": cols[13].string = "None" if cols[14].text == "": cols[14].string = "None" # games w/o saves have blank td entry if cols[8].text == "": cols[8].string = "9" # entry is blank if no extra innings cols = [ele.text.strip() for ele in cols] data.append([ele for ele in cols if ele]) except: # two cases will break the above: games that haven't happened yet, and BR's redundant mid-table headers # if future games, grab the scheduling info. Otherwise do nothing. if len(cols)>1: cols = [ele.text.strip() for ele in cols][0:5] data.append([ele for ele in cols if ele]) #convert to pandas dataframe. make first row the table's column names and reindex. data = pd.DataFrame(data) data = data.rename(columns=data.iloc[0]) data = data.reindex(data.index.drop(0)) return data def schedule_and_record(season=None, team=None): # retrieve html from baseball reference soup = get_soup(season, team) table = get_table(soup) return table
30d0ca9fa2c76463569362eb0f640dbbe0079068
buildGame.py
buildGame.py
#!/usr/bin/env python import fnmatch import os from subprocess import call rootPath = 'ludumdare26' pattern = '*.coffee' for root, dirs, files in os.walk(rootPath): for filename in fnmatch.filter(files, pattern): print( os.path.join(root, filename)) call( [ 'coffee', '-c', os.path.join(root, filename) ] )
Add builder to compile coffeescript to javascript files
Add builder to compile coffeescript to javascript files
Python
apache-2.0
ZzCalvinzZ/ludumdare26,ZzCalvinzZ/ludumdare26
Add builder to compile coffeescript to javascript files
#!/usr/bin/env python import fnmatch import os from subprocess import call rootPath = 'ludumdare26' pattern = '*.coffee' for root, dirs, files in os.walk(rootPath): for filename in fnmatch.filter(files, pattern): print( os.path.join(root, filename)) call( [ 'coffee', '-c', os.path.join(root, filename) ] )
<commit_before><commit_msg>Add builder to compile coffeescript to javascript files<commit_after>
#!/usr/bin/env python import fnmatch import os from subprocess import call rootPath = 'ludumdare26' pattern = '*.coffee' for root, dirs, files in os.walk(rootPath): for filename in fnmatch.filter(files, pattern): print( os.path.join(root, filename)) call( [ 'coffee', '-c', os.path.join(root, filename) ] )
Add builder to compile coffeescript to javascript files#!/usr/bin/env python import fnmatch import os from subprocess import call rootPath = 'ludumdare26' pattern = '*.coffee' for root, dirs, files in os.walk(rootPath): for filename in fnmatch.filter(files, pattern): print( os.path.join(root, filename)) call( [ 'coffee', '-c', os.path.join(root, filename) ] )
<commit_before><commit_msg>Add builder to compile coffeescript to javascript files<commit_after>#!/usr/bin/env python import fnmatch import os from subprocess import call rootPath = 'ludumdare26' pattern = '*.coffee' for root, dirs, files in os.walk(rootPath): for filename in fnmatch.filter(files, pattern): print( os.path.join(root, filename)) call( [ 'coffee', '-c', os.path.join(root, filename) ] )
df03481fd9b52e17bc637dacefd15bead4f07f23
project/creditor/management/commands/update_membershipfees.py
project/creditor/management/commands/update_membershipfees.py
# -*- coding: utf-8 -*- import datetime import dateutil.parser from creditor.models import RecurringTransaction, TransactionTag from creditor.tests.fixtures.recurring import MembershipfeeFactory from django.core.management.base import BaseCommand, CommandError from members.models import Member class Command(BaseCommand): help = 'Update membership fee RecurringTransactions' def add_arguments(self, parser): parser.add_argument('oldamount', type=int) parser.add_argument('cutoffdate', type=str) parser.add_argument('newamount', type=int) def handle(self, *args, **options): cutoff_dt = dateutil.parser.parse(options['cutoffdate']) end_dt = cutoff_dt - datetime.timedelta(minutes=1) tgt_tag = TransactionTag.objects.get(label='Membership fee', tmatch='1') for rt in RecurringTransaction.objects.filter( rtype=RecurringTransaction.YEARLY, tag=tgt_tag, end=None, start__lt=cutoff_dt, amount=options['oldamount'] ): rt.end = end_dt rt.save() newrt = MembershipfeeFactory.create(amount=options['newamount'], start=cutoff_dt, end=None, owner=rt.owner) if options['verbosity'] > 0: print("Generated RecurringTransaction %s" % newrt)
Add initial version of membership fee updater
Add initial version of membership fee updater
Python
mit
HelsinkiHacklab/asylum,HelsinkiHacklab/asylum,HelsinkiHacklab/asylum,HelsinkiHacklab/asylum
Add initial version of membership fee updater
# -*- coding: utf-8 -*- import datetime import dateutil.parser from creditor.models import RecurringTransaction, TransactionTag from creditor.tests.fixtures.recurring import MembershipfeeFactory from django.core.management.base import BaseCommand, CommandError from members.models import Member class Command(BaseCommand): help = 'Update membership fee RecurringTransactions' def add_arguments(self, parser): parser.add_argument('oldamount', type=int) parser.add_argument('cutoffdate', type=str) parser.add_argument('newamount', type=int) def handle(self, *args, **options): cutoff_dt = dateutil.parser.parse(options['cutoffdate']) end_dt = cutoff_dt - datetime.timedelta(minutes=1) tgt_tag = TransactionTag.objects.get(label='Membership fee', tmatch='1') for rt in RecurringTransaction.objects.filter( rtype=RecurringTransaction.YEARLY, tag=tgt_tag, end=None, start__lt=cutoff_dt, amount=options['oldamount'] ): rt.end = end_dt rt.save() newrt = MembershipfeeFactory.create(amount=options['newamount'], start=cutoff_dt, end=None, owner=rt.owner) if options['verbosity'] > 0: print("Generated RecurringTransaction %s" % newrt)
<commit_before><commit_msg>Add initial version of membership fee updater<commit_after>
# -*- coding: utf-8 -*- import datetime import dateutil.parser from creditor.models import RecurringTransaction, TransactionTag from creditor.tests.fixtures.recurring import MembershipfeeFactory from django.core.management.base import BaseCommand, CommandError from members.models import Member class Command(BaseCommand): help = 'Update membership fee RecurringTransactions' def add_arguments(self, parser): parser.add_argument('oldamount', type=int) parser.add_argument('cutoffdate', type=str) parser.add_argument('newamount', type=int) def handle(self, *args, **options): cutoff_dt = dateutil.parser.parse(options['cutoffdate']) end_dt = cutoff_dt - datetime.timedelta(minutes=1) tgt_tag = TransactionTag.objects.get(label='Membership fee', tmatch='1') for rt in RecurringTransaction.objects.filter( rtype=RecurringTransaction.YEARLY, tag=tgt_tag, end=None, start__lt=cutoff_dt, amount=options['oldamount'] ): rt.end = end_dt rt.save() newrt = MembershipfeeFactory.create(amount=options['newamount'], start=cutoff_dt, end=None, owner=rt.owner) if options['verbosity'] > 0: print("Generated RecurringTransaction %s" % newrt)
Add initial version of membership fee updater# -*- coding: utf-8 -*- import datetime import dateutil.parser from creditor.models import RecurringTransaction, TransactionTag from creditor.tests.fixtures.recurring import MembershipfeeFactory from django.core.management.base import BaseCommand, CommandError from members.models import Member class Command(BaseCommand): help = 'Update membership fee RecurringTransactions' def add_arguments(self, parser): parser.add_argument('oldamount', type=int) parser.add_argument('cutoffdate', type=str) parser.add_argument('newamount', type=int) def handle(self, *args, **options): cutoff_dt = dateutil.parser.parse(options['cutoffdate']) end_dt = cutoff_dt - datetime.timedelta(minutes=1) tgt_tag = TransactionTag.objects.get(label='Membership fee', tmatch='1') for rt in RecurringTransaction.objects.filter( rtype=RecurringTransaction.YEARLY, tag=tgt_tag, end=None, start__lt=cutoff_dt, amount=options['oldamount'] ): rt.end = end_dt rt.save() newrt = MembershipfeeFactory.create(amount=options['newamount'], start=cutoff_dt, end=None, owner=rt.owner) if options['verbosity'] > 0: print("Generated RecurringTransaction %s" % newrt)
<commit_before><commit_msg>Add initial version of membership fee updater<commit_after># -*- coding: utf-8 -*- import datetime import dateutil.parser from creditor.models import RecurringTransaction, TransactionTag from creditor.tests.fixtures.recurring import MembershipfeeFactory from django.core.management.base import BaseCommand, CommandError from members.models import Member class Command(BaseCommand): help = 'Update membership fee RecurringTransactions' def add_arguments(self, parser): parser.add_argument('oldamount', type=int) parser.add_argument('cutoffdate', type=str) parser.add_argument('newamount', type=int) def handle(self, *args, **options): cutoff_dt = dateutil.parser.parse(options['cutoffdate']) end_dt = cutoff_dt - datetime.timedelta(minutes=1) tgt_tag = TransactionTag.objects.get(label='Membership fee', tmatch='1') for rt in RecurringTransaction.objects.filter( rtype=RecurringTransaction.YEARLY, tag=tgt_tag, end=None, start__lt=cutoff_dt, amount=options['oldamount'] ): rt.end = end_dt rt.save() newrt = MembershipfeeFactory.create(amount=options['newamount'], start=cutoff_dt, end=None, owner=rt.owner) if options['verbosity'] > 0: print("Generated RecurringTransaction %s" % newrt)
4e8ff1f7e524d8dc843816c714e86d11e21a8562
uploadtoCDB.py
uploadtoCDB.py
#uploadtoCDB.py #Written By: Alejandro Morejon Cortina (Apr 2016) #usage:python uploadtoCDB.py <username> <filecontainingkey.txt> <upload.csv> import sys import requests import csv import json import time #sys.argv[1] is the your cartodb user name #sys.argv[2] is the text file containing your api key #sys.argv[3] is the csv file to be uploaded if __name__ == "__main__": with open(sys.argv[2],'r') as fi: key = fi.readline().strip('\n') cdbusername = sys.argv[1] #import url to cartodb account importurl = "https://" + cdbusername +".cartodb.com/api/v1/imports/?api_key="+key f = open(sys.argv[3],"rb") #request to upload file to cartodb r = requests.post(importurl,files={'file': f}) print r.text f.close() response = json.loads(r.text) checkimporturl = "https://"+ cdbusername +".cartodb.com/api/v1/imports/" status = requests.get(checkimporturl + response["item_queue_id"] + "?api_key=" + key) #wait for upload to finish while not json.loads(status.text)["state"] in ["complete","failure"]: status = requests.get(checkimporturl + response["item_queue_id"] + "?api_key=" + key) time.sleep(1)
Add script to upload files to carto db
Add script to upload files to carto db
Python
mit
alejandro-mc/BDM-DDD,alejandro-mc/BDM-DDD
Add script to upload files to carto db
#uploadtoCDB.py #Written By: Alejandro Morejon Cortina (Apr 2016) #usage:python uploadtoCDB.py <username> <filecontainingkey.txt> <upload.csv> import sys import requests import csv import json import time #sys.argv[1] is the your cartodb user name #sys.argv[2] is the text file containing your api key #sys.argv[3] is the csv file to be uploaded if __name__ == "__main__": with open(sys.argv[2],'r') as fi: key = fi.readline().strip('\n') cdbusername = sys.argv[1] #import url to cartodb account importurl = "https://" + cdbusername +".cartodb.com/api/v1/imports/?api_key="+key f = open(sys.argv[3],"rb") #request to upload file to cartodb r = requests.post(importurl,files={'file': f}) print r.text f.close() response = json.loads(r.text) checkimporturl = "https://"+ cdbusername +".cartodb.com/api/v1/imports/" status = requests.get(checkimporturl + response["item_queue_id"] + "?api_key=" + key) #wait for upload to finish while not json.loads(status.text)["state"] in ["complete","failure"]: status = requests.get(checkimporturl + response["item_queue_id"] + "?api_key=" + key) time.sleep(1)
<commit_before><commit_msg>Add script to upload files to carto db<commit_after>
#uploadtoCDB.py #Written By: Alejandro Morejon Cortina (Apr 2016) #usage:python uploadtoCDB.py <username> <filecontainingkey.txt> <upload.csv> import sys import requests import csv import json import time #sys.argv[1] is the your cartodb user name #sys.argv[2] is the text file containing your api key #sys.argv[3] is the csv file to be uploaded if __name__ == "__main__": with open(sys.argv[2],'r') as fi: key = fi.readline().strip('\n') cdbusername = sys.argv[1] #import url to cartodb account importurl = "https://" + cdbusername +".cartodb.com/api/v1/imports/?api_key="+key f = open(sys.argv[3],"rb") #request to upload file to cartodb r = requests.post(importurl,files={'file': f}) print r.text f.close() response = json.loads(r.text) checkimporturl = "https://"+ cdbusername +".cartodb.com/api/v1/imports/" status = requests.get(checkimporturl + response["item_queue_id"] + "?api_key=" + key) #wait for upload to finish while not json.loads(status.text)["state"] in ["complete","failure"]: status = requests.get(checkimporturl + response["item_queue_id"] + "?api_key=" + key) time.sleep(1)
Add script to upload files to carto db#uploadtoCDB.py #Written By: Alejandro Morejon Cortina (Apr 2016) #usage:python uploadtoCDB.py <username> <filecontainingkey.txt> <upload.csv> import sys import requests import csv import json import time #sys.argv[1] is the your cartodb user name #sys.argv[2] is the text file containing your api key #sys.argv[3] is the csv file to be uploaded if __name__ == "__main__": with open(sys.argv[2],'r') as fi: key = fi.readline().strip('\n') cdbusername = sys.argv[1] #import url to cartodb account importurl = "https://" + cdbusername +".cartodb.com/api/v1/imports/?api_key="+key f = open(sys.argv[3],"rb") #request to upload file to cartodb r = requests.post(importurl,files={'file': f}) print r.text f.close() response = json.loads(r.text) checkimporturl = "https://"+ cdbusername +".cartodb.com/api/v1/imports/" status = requests.get(checkimporturl + response["item_queue_id"] + "?api_key=" + key) #wait for upload to finish while not json.loads(status.text)["state"] in ["complete","failure"]: status = requests.get(checkimporturl + response["item_queue_id"] + "?api_key=" + key) time.sleep(1)
<commit_before><commit_msg>Add script to upload files to carto db<commit_after>#uploadtoCDB.py #Written By: Alejandro Morejon Cortina (Apr 2016) #usage:python uploadtoCDB.py <username> <filecontainingkey.txt> <upload.csv> import sys import requests import csv import json import time #sys.argv[1] is the your cartodb user name #sys.argv[2] is the text file containing your api key #sys.argv[3] is the csv file to be uploaded if __name__ == "__main__": with open(sys.argv[2],'r') as fi: key = fi.readline().strip('\n') cdbusername = sys.argv[1] #import url to cartodb account importurl = "https://" + cdbusername +".cartodb.com/api/v1/imports/?api_key="+key f = open(sys.argv[3],"rb") #request to upload file to cartodb r = requests.post(importurl,files={'file': f}) print r.text f.close() response = json.loads(r.text) checkimporturl = "https://"+ cdbusername +".cartodb.com/api/v1/imports/" status = requests.get(checkimporturl + response["item_queue_id"] + "?api_key=" + key) #wait for upload to finish while not json.loads(status.text)["state"] in ["complete","failure"]: status = requests.get(checkimporturl + response["item_queue_id"] + "?api_key=" + key) time.sleep(1)
8955c00cdf3715b0f6403e9d049c0e221f77f7ac
client.py
client.py
#!/usr/bin/env python import chess import chess.pgn import requests import random game = chess.pgn.Game() node = game board = game.board() while not board.is_game_over(claim_draw=True): move = random.choice(list(board.legal_moves)) node = node.add_variation(move) board.push(move) game.headers["Result"] = board.result(claim_draw=True) print(game) print() res = requests.put("http://localhost:9000/", data=str(game)) print(res) print(res.text) print(res)
Add helper script for testing
Add helper script for testing
Python
agpl-3.0
niklasf/lila-openingexplorer,niklasf/lila-openingexplorer
Add helper script for testing
#!/usr/bin/env python import chess import chess.pgn import requests import random game = chess.pgn.Game() node = game board = game.board() while not board.is_game_over(claim_draw=True): move = random.choice(list(board.legal_moves)) node = node.add_variation(move) board.push(move) game.headers["Result"] = board.result(claim_draw=True) print(game) print() res = requests.put("http://localhost:9000/", data=str(game)) print(res) print(res.text) print(res)
<commit_before><commit_msg>Add helper script for testing<commit_after>
#!/usr/bin/env python import chess import chess.pgn import requests import random game = chess.pgn.Game() node = game board = game.board() while not board.is_game_over(claim_draw=True): move = random.choice(list(board.legal_moves)) node = node.add_variation(move) board.push(move) game.headers["Result"] = board.result(claim_draw=True) print(game) print() res = requests.put("http://localhost:9000/", data=str(game)) print(res) print(res.text) print(res)
Add helper script for testing#!/usr/bin/env python import chess import chess.pgn import requests import random game = chess.pgn.Game() node = game board = game.board() while not board.is_game_over(claim_draw=True): move = random.choice(list(board.legal_moves)) node = node.add_variation(move) board.push(move) game.headers["Result"] = board.result(claim_draw=True) print(game) print() res = requests.put("http://localhost:9000/", data=str(game)) print(res) print(res.text) print(res)
<commit_before><commit_msg>Add helper script for testing<commit_after>#!/usr/bin/env python import chess import chess.pgn import requests import random game = chess.pgn.Game() node = game board = game.board() while not board.is_game_over(claim_draw=True): move = random.choice(list(board.legal_moves)) node = node.add_variation(move) board.push(move) game.headers["Result"] = board.result(claim_draw=True) print(game) print() res = requests.put("http://localhost:9000/", data=str(game)) print(res) print(res.text) print(res)
80a1cc839abc23a80b511c99e6a6c03b044eaf35
ext_download.py
ext_download.py
import argparse import glob import json from kvd_utils import download_json, get_session ext_url_template = 'https://vaalit.yle.fi/content/kv2017/{version}/electorates/{electorate}/municipalities/{municipality}/pollingDistricts/{district}/partyAndCandidateResults.json' def download_ext_data(version): sess = get_session() for muni_fn in glob.glob('data/{version}/*.json'.format(version=version)): with open(muni_fn) as infp: muni = json.load(infp) name = muni['calculationStatus']['name']['fi'] perc = float(muni['calculationStatus']['calculationStatusPercent']) if perc < 100: print('%s: %.2f%% less than 100%% percent, skipping' % (name, perc)) continue for district in muni['pollingDistricts']: url = ext_url_template.format( version=version, electorate=muni['calculationStatus']['edid'], municipality=muni['calculationStatus']['muid'], district=district['pdid'], ) output_file = 'ext_data/{version}/{name}/{district}.json'.format( version=version, name=name, district=district['name']['fi'].replace(' ', '_'), ) download_json(sess, url, output_file) if __name__ == '__main__': ap = argparse.ArgumentParser() ap.add_argument('version', type=int) args = ap.parse_args() download_ext_data(version=args.version)
Add downloader for per-polling-station data
Add downloader for per-polling-station data
Python
mit
akx/yle-kuntavaalit-2017-data
Add downloader for per-polling-station data
import argparse import glob import json from kvd_utils import download_json, get_session ext_url_template = 'https://vaalit.yle.fi/content/kv2017/{version}/electorates/{electorate}/municipalities/{municipality}/pollingDistricts/{district}/partyAndCandidateResults.json' def download_ext_data(version): sess = get_session() for muni_fn in glob.glob('data/{version}/*.json'.format(version=version)): with open(muni_fn) as infp: muni = json.load(infp) name = muni['calculationStatus']['name']['fi'] perc = float(muni['calculationStatus']['calculationStatusPercent']) if perc < 100: print('%s: %.2f%% less than 100%% percent, skipping' % (name, perc)) continue for district in muni['pollingDistricts']: url = ext_url_template.format( version=version, electorate=muni['calculationStatus']['edid'], municipality=muni['calculationStatus']['muid'], district=district['pdid'], ) output_file = 'ext_data/{version}/{name}/{district}.json'.format( version=version, name=name, district=district['name']['fi'].replace(' ', '_'), ) download_json(sess, url, output_file) if __name__ == '__main__': ap = argparse.ArgumentParser() ap.add_argument('version', type=int) args = ap.parse_args() download_ext_data(version=args.version)
<commit_before><commit_msg>Add downloader for per-polling-station data<commit_after>
import argparse import glob import json from kvd_utils import download_json, get_session ext_url_template = 'https://vaalit.yle.fi/content/kv2017/{version}/electorates/{electorate}/municipalities/{municipality}/pollingDistricts/{district}/partyAndCandidateResults.json' def download_ext_data(version): sess = get_session() for muni_fn in glob.glob('data/{version}/*.json'.format(version=version)): with open(muni_fn) as infp: muni = json.load(infp) name = muni['calculationStatus']['name']['fi'] perc = float(muni['calculationStatus']['calculationStatusPercent']) if perc < 100: print('%s: %.2f%% less than 100%% percent, skipping' % (name, perc)) continue for district in muni['pollingDistricts']: url = ext_url_template.format( version=version, electorate=muni['calculationStatus']['edid'], municipality=muni['calculationStatus']['muid'], district=district['pdid'], ) output_file = 'ext_data/{version}/{name}/{district}.json'.format( version=version, name=name, district=district['name']['fi'].replace(' ', '_'), ) download_json(sess, url, output_file) if __name__ == '__main__': ap = argparse.ArgumentParser() ap.add_argument('version', type=int) args = ap.parse_args() download_ext_data(version=args.version)
Add downloader for per-polling-station dataimport argparse import glob import json from kvd_utils import download_json, get_session ext_url_template = 'https://vaalit.yle.fi/content/kv2017/{version}/electorates/{electorate}/municipalities/{municipality}/pollingDistricts/{district}/partyAndCandidateResults.json' def download_ext_data(version): sess = get_session() for muni_fn in glob.glob('data/{version}/*.json'.format(version=version)): with open(muni_fn) as infp: muni = json.load(infp) name = muni['calculationStatus']['name']['fi'] perc = float(muni['calculationStatus']['calculationStatusPercent']) if perc < 100: print('%s: %.2f%% less than 100%% percent, skipping' % (name, perc)) continue for district in muni['pollingDistricts']: url = ext_url_template.format( version=version, electorate=muni['calculationStatus']['edid'], municipality=muni['calculationStatus']['muid'], district=district['pdid'], ) output_file = 'ext_data/{version}/{name}/{district}.json'.format( version=version, name=name, district=district['name']['fi'].replace(' ', '_'), ) download_json(sess, url, output_file) if __name__ == '__main__': ap = argparse.ArgumentParser() ap.add_argument('version', type=int) args = ap.parse_args() download_ext_data(version=args.version)
<commit_before><commit_msg>Add downloader for per-polling-station data<commit_after>import argparse import glob import json from kvd_utils import download_json, get_session ext_url_template = 'https://vaalit.yle.fi/content/kv2017/{version}/electorates/{electorate}/municipalities/{municipality}/pollingDistricts/{district}/partyAndCandidateResults.json' def download_ext_data(version): sess = get_session() for muni_fn in glob.glob('data/{version}/*.json'.format(version=version)): with open(muni_fn) as infp: muni = json.load(infp) name = muni['calculationStatus']['name']['fi'] perc = float(muni['calculationStatus']['calculationStatusPercent']) if perc < 100: print('%s: %.2f%% less than 100%% percent, skipping' % (name, perc)) continue for district in muni['pollingDistricts']: url = ext_url_template.format( version=version, electorate=muni['calculationStatus']['edid'], municipality=muni['calculationStatus']['muid'], district=district['pdid'], ) output_file = 'ext_data/{version}/{name}/{district}.json'.format( version=version, name=name, district=district['name']['fi'].replace(' ', '_'), ) download_json(sess, url, output_file) if __name__ == '__main__': ap = argparse.ArgumentParser() ap.add_argument('version', type=int) args = ap.parse_args() download_ext_data(version=args.version)
574b069363f74de35b75b6b28ca66976e6af45bb
corehq/apps/smsforms/management/commands/migrate_sms_sessions_to_sql.py
corehq/apps/smsforms/management/commands/migrate_sms_sessions_to_sql.py
import logging from django.core.management.base import BaseCommand from corehq.apps.smsforms.models import XFormsSession, sync_sql_session_from_couch_session, SQLXFormsSession from dimagi.utils.couch.database import iter_docs class Command(BaseCommand): args = "" help = "" def handle(self, *args, **options): db = XFormsSession.get_db() session_ids = [row['id'] for row in db.view("smsforms/sessions_by_touchforms_id")] errors = [] for session_doc in iter_docs(db, session_ids): try: couch_session = XFormsSession.wrap(session_doc) sync_sql_session_from_couch_session(couch_session) except Exception as e: logging.exception('problem migrating session {}: {}'.format(session_doc['_id'], e)) errors.append(session_doc['_id']) print 'migrated {} couch sessions. there are now {} in sql'.format( len(session_ids) - len(errors), SQLXFormsSession.objects.count() ) if errors: print 'errors: {}'.format(', '.join(errors))
import logging from django.core.management.base import BaseCommand from corehq.apps.smsforms.models import XFormsSession, sync_sql_session_from_couch_session, SQLXFormsSession from dimagi.utils.couch.database import iter_docs class Command(BaseCommand): args = "" help = "" def handle(self, *args, **options): db = XFormsSession.get_db() session_ids = [row['id'] for row in db.view("smsforms/sessions_by_touchforms_id")] errors = [] for session_doc in iter_docs(db, session_ids): try: # Handle the old touchforms session id convention where it was # always an int session_id = session_doc.get("session_id", None) if isinstance(session_id, int): session_doc["session_id"] = str(session_id) couch_session = XFormsSession.wrap(session_doc) sync_sql_session_from_couch_session(couch_session) except Exception as e: logging.exception('problem migrating session {}: {}'.format(session_doc['_id'], e)) errors.append(session_doc['_id']) print 'migrated {} couch sessions. there are now {} in sql'.format( len(session_ids) - len(errors), SQLXFormsSession.objects.count() ) if errors: print 'errors: {}'.format(', '.join(errors))
Fix situation where session id is an int
Fix situation where session id is an int
Python
bsd-3-clause
dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq
import logging from django.core.management.base import BaseCommand from corehq.apps.smsforms.models import XFormsSession, sync_sql_session_from_couch_session, SQLXFormsSession from dimagi.utils.couch.database import iter_docs class Command(BaseCommand): args = "" help = "" def handle(self, *args, **options): db = XFormsSession.get_db() session_ids = [row['id'] for row in db.view("smsforms/sessions_by_touchforms_id")] errors = [] for session_doc in iter_docs(db, session_ids): try: couch_session = XFormsSession.wrap(session_doc) sync_sql_session_from_couch_session(couch_session) except Exception as e: logging.exception('problem migrating session {}: {}'.format(session_doc['_id'], e)) errors.append(session_doc['_id']) print 'migrated {} couch sessions. there are now {} in sql'.format( len(session_ids) - len(errors), SQLXFormsSession.objects.count() ) if errors: print 'errors: {}'.format(', '.join(errors)) Fix situation where session id is an int
import logging from django.core.management.base import BaseCommand from corehq.apps.smsforms.models import XFormsSession, sync_sql_session_from_couch_session, SQLXFormsSession from dimagi.utils.couch.database import iter_docs class Command(BaseCommand): args = "" help = "" def handle(self, *args, **options): db = XFormsSession.get_db() session_ids = [row['id'] for row in db.view("smsforms/sessions_by_touchforms_id")] errors = [] for session_doc in iter_docs(db, session_ids): try: # Handle the old touchforms session id convention where it was # always an int session_id = session_doc.get("session_id", None) if isinstance(session_id, int): session_doc["session_id"] = str(session_id) couch_session = XFormsSession.wrap(session_doc) sync_sql_session_from_couch_session(couch_session) except Exception as e: logging.exception('problem migrating session {}: {}'.format(session_doc['_id'], e)) errors.append(session_doc['_id']) print 'migrated {} couch sessions. there are now {} in sql'.format( len(session_ids) - len(errors), SQLXFormsSession.objects.count() ) if errors: print 'errors: {}'.format(', '.join(errors))
<commit_before>import logging from django.core.management.base import BaseCommand from corehq.apps.smsforms.models import XFormsSession, sync_sql_session_from_couch_session, SQLXFormsSession from dimagi.utils.couch.database import iter_docs class Command(BaseCommand): args = "" help = "" def handle(self, *args, **options): db = XFormsSession.get_db() session_ids = [row['id'] for row in db.view("smsforms/sessions_by_touchforms_id")] errors = [] for session_doc in iter_docs(db, session_ids): try: couch_session = XFormsSession.wrap(session_doc) sync_sql_session_from_couch_session(couch_session) except Exception as e: logging.exception('problem migrating session {}: {}'.format(session_doc['_id'], e)) errors.append(session_doc['_id']) print 'migrated {} couch sessions. there are now {} in sql'.format( len(session_ids) - len(errors), SQLXFormsSession.objects.count() ) if errors: print 'errors: {}'.format(', '.join(errors)) <commit_msg>Fix situation where session id is an int<commit_after>
import logging from django.core.management.base import BaseCommand from corehq.apps.smsforms.models import XFormsSession, sync_sql_session_from_couch_session, SQLXFormsSession from dimagi.utils.couch.database import iter_docs class Command(BaseCommand): args = "" help = "" def handle(self, *args, **options): db = XFormsSession.get_db() session_ids = [row['id'] for row in db.view("smsforms/sessions_by_touchforms_id")] errors = [] for session_doc in iter_docs(db, session_ids): try: # Handle the old touchforms session id convention where it was # always an int session_id = session_doc.get("session_id", None) if isinstance(session_id, int): session_doc["session_id"] = str(session_id) couch_session = XFormsSession.wrap(session_doc) sync_sql_session_from_couch_session(couch_session) except Exception as e: logging.exception('problem migrating session {}: {}'.format(session_doc['_id'], e)) errors.append(session_doc['_id']) print 'migrated {} couch sessions. there are now {} in sql'.format( len(session_ids) - len(errors), SQLXFormsSession.objects.count() ) if errors: print 'errors: {}'.format(', '.join(errors))
import logging from django.core.management.base import BaseCommand from corehq.apps.smsforms.models import XFormsSession, sync_sql_session_from_couch_session, SQLXFormsSession from dimagi.utils.couch.database import iter_docs class Command(BaseCommand): args = "" help = "" def handle(self, *args, **options): db = XFormsSession.get_db() session_ids = [row['id'] for row in db.view("smsforms/sessions_by_touchforms_id")] errors = [] for session_doc in iter_docs(db, session_ids): try: couch_session = XFormsSession.wrap(session_doc) sync_sql_session_from_couch_session(couch_session) except Exception as e: logging.exception('problem migrating session {}: {}'.format(session_doc['_id'], e)) errors.append(session_doc['_id']) print 'migrated {} couch sessions. there are now {} in sql'.format( len(session_ids) - len(errors), SQLXFormsSession.objects.count() ) if errors: print 'errors: {}'.format(', '.join(errors)) Fix situation where session id is an intimport logging from django.core.management.base import BaseCommand from corehq.apps.smsforms.models import XFormsSession, sync_sql_session_from_couch_session, SQLXFormsSession from dimagi.utils.couch.database import iter_docs class Command(BaseCommand): args = "" help = "" def handle(self, *args, **options): db = XFormsSession.get_db() session_ids = [row['id'] for row in db.view("smsforms/sessions_by_touchforms_id")] errors = [] for session_doc in iter_docs(db, session_ids): try: # Handle the old touchforms session id convention where it was # always an int session_id = session_doc.get("session_id", None) if isinstance(session_id, int): session_doc["session_id"] = str(session_id) couch_session = XFormsSession.wrap(session_doc) sync_sql_session_from_couch_session(couch_session) except Exception as e: logging.exception('problem migrating session {}: {}'.format(session_doc['_id'], e)) errors.append(session_doc['_id']) print 'migrated {} couch sessions. there are now {} in sql'.format( len(session_ids) - len(errors), SQLXFormsSession.objects.count() ) if errors: print 'errors: {}'.format(', '.join(errors))
<commit_before>import logging from django.core.management.base import BaseCommand from corehq.apps.smsforms.models import XFormsSession, sync_sql_session_from_couch_session, SQLXFormsSession from dimagi.utils.couch.database import iter_docs class Command(BaseCommand): args = "" help = "" def handle(self, *args, **options): db = XFormsSession.get_db() session_ids = [row['id'] for row in db.view("smsforms/sessions_by_touchforms_id")] errors = [] for session_doc in iter_docs(db, session_ids): try: couch_session = XFormsSession.wrap(session_doc) sync_sql_session_from_couch_session(couch_session) except Exception as e: logging.exception('problem migrating session {}: {}'.format(session_doc['_id'], e)) errors.append(session_doc['_id']) print 'migrated {} couch sessions. there are now {} in sql'.format( len(session_ids) - len(errors), SQLXFormsSession.objects.count() ) if errors: print 'errors: {}'.format(', '.join(errors)) <commit_msg>Fix situation where session id is an int<commit_after>import logging from django.core.management.base import BaseCommand from corehq.apps.smsforms.models import XFormsSession, sync_sql_session_from_couch_session, SQLXFormsSession from dimagi.utils.couch.database import iter_docs class Command(BaseCommand): args = "" help = "" def handle(self, *args, **options): db = XFormsSession.get_db() session_ids = [row['id'] for row in db.view("smsforms/sessions_by_touchforms_id")] errors = [] for session_doc in iter_docs(db, session_ids): try: # Handle the old touchforms session id convention where it was # always an int session_id = session_doc.get("session_id", None) if isinstance(session_id, int): session_doc["session_id"] = str(session_id) couch_session = XFormsSession.wrap(session_doc) sync_sql_session_from_couch_session(couch_session) except Exception as e: logging.exception('problem migrating session {}: {}'.format(session_doc['_id'], e)) errors.append(session_doc['_id']) print 'migrated {} couch sessions. there are now {} in sql'.format( len(session_ids) - len(errors), SQLXFormsSession.objects.count() ) if errors: print 'errors: {}'.format(', '.join(errors))
23f626ddaabfa799da48ee35c29db05f95f8a732
polling_stations/apps/data_collection/management/commands/import_rct.py
polling_stations/apps/data_collection/management/commands/import_rct.py
""" Import Rhondda Cynon Taf note: this script takes quite a long time to run """ from time import sleep from django.contrib.gis.geos import Point from data_collection.management.commands import BaseAddressCsvImporter from data_finder.helpers import geocode from data_collection.google_geocoding_api_wrapper import ( GoogleGeocodingApiWrapper, PostcodeNotFoundException ) class Command(BaseAddressCsvImporter): """ Imports the Polling Station data from Rhondda Cynon Taf """ council_id = 'W06000016' addresses_name = 'PROPERTYLISTINGFORDEMOCRACYCLUB.csv' stations_name = 'POLLINGSTATIONS8MARCH2016.csv' def station_record_to_dict(self, record): # format address address = "\n".join([ record.address1, record.address2, record.address3, record.address4, record.address5 ]) while "\n\n" in address: address = address.replace("\n\n", "\n") # remove trailing "\n" if present if address[-1:] == '\n': address = address[:-1] # attempt to attach postcode if missing postcode = record.postcode if not postcode: gwrapper = GoogleGeocodingApiWrapper(address, self.council_id, 'UTA') try: postcode = gwrapper.address_to_postcode() except PostcodeNotFoundException: postcode = '' """ No grid references were supplied, so attempt to derive a grid ref from postcode if we have that """ sleep(1.3) # ensure we don't hit mapit's usage limit if postcode: try: gridref = geocode(postcode) location = Point(gridref['wgs84_lon'], gridref['wgs84_lat'], srid=4326) except KeyError: location = None else: location = None return { 'internal_council_id': record.polling_district, 'postcode' : postcode, 'address' : address, 'location' : location } def address_record_to_dict(self, record): # format address address = ", ".join([ record.address1, record.address2, record.address3, record.address4, record.address5, record.address6, ]) while ", , " in address: address = address.replace(", , ", ", ") # remove trailing ", " if present if address[-2:] == ', ': address = address[:-2] return { 'address' : address, 'postcode' : record.postcode, 'polling_station_id': record.district }
Add import script for Rhondda Cynon Taff
Add import script for Rhondda Cynon Taff
Python
bsd-3-clause
andylolz/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,chris48s/UK-Polling-Stations,DemocracyClub/UK-Polling-Stations,andylolz/UK-Polling-Stations,chris48s/UK-Polling-Stations,andylolz/UK-Polling-Stations,chris48s/UK-Polling-Stations
Add import script for Rhondda Cynon Taff
""" Import Rhondda Cynon Taf note: this script takes quite a long time to run """ from time import sleep from django.contrib.gis.geos import Point from data_collection.management.commands import BaseAddressCsvImporter from data_finder.helpers import geocode from data_collection.google_geocoding_api_wrapper import ( GoogleGeocodingApiWrapper, PostcodeNotFoundException ) class Command(BaseAddressCsvImporter): """ Imports the Polling Station data from Rhondda Cynon Taf """ council_id = 'W06000016' addresses_name = 'PROPERTYLISTINGFORDEMOCRACYCLUB.csv' stations_name = 'POLLINGSTATIONS8MARCH2016.csv' def station_record_to_dict(self, record): # format address address = "\n".join([ record.address1, record.address2, record.address3, record.address4, record.address5 ]) while "\n\n" in address: address = address.replace("\n\n", "\n") # remove trailing "\n" if present if address[-1:] == '\n': address = address[:-1] # attempt to attach postcode if missing postcode = record.postcode if not postcode: gwrapper = GoogleGeocodingApiWrapper(address, self.council_id, 'UTA') try: postcode = gwrapper.address_to_postcode() except PostcodeNotFoundException: postcode = '' """ No grid references were supplied, so attempt to derive a grid ref from postcode if we have that """ sleep(1.3) # ensure we don't hit mapit's usage limit if postcode: try: gridref = geocode(postcode) location = Point(gridref['wgs84_lon'], gridref['wgs84_lat'], srid=4326) except KeyError: location = None else: location = None return { 'internal_council_id': record.polling_district, 'postcode' : postcode, 'address' : address, 'location' : location } def address_record_to_dict(self, record): # format address address = ", ".join([ record.address1, record.address2, record.address3, record.address4, record.address5, record.address6, ]) while ", , " in address: address = address.replace(", , ", ", ") # remove trailing ", " if present if address[-2:] == ', ': address = address[:-2] return { 'address' : address, 'postcode' : record.postcode, 'polling_station_id': record.district }
<commit_before><commit_msg>Add import script for Rhondda Cynon Taff<commit_after>
""" Import Rhondda Cynon Taf note: this script takes quite a long time to run """ from time import sleep from django.contrib.gis.geos import Point from data_collection.management.commands import BaseAddressCsvImporter from data_finder.helpers import geocode from data_collection.google_geocoding_api_wrapper import ( GoogleGeocodingApiWrapper, PostcodeNotFoundException ) class Command(BaseAddressCsvImporter): """ Imports the Polling Station data from Rhondda Cynon Taf """ council_id = 'W06000016' addresses_name = 'PROPERTYLISTINGFORDEMOCRACYCLUB.csv' stations_name = 'POLLINGSTATIONS8MARCH2016.csv' def station_record_to_dict(self, record): # format address address = "\n".join([ record.address1, record.address2, record.address3, record.address4, record.address5 ]) while "\n\n" in address: address = address.replace("\n\n", "\n") # remove trailing "\n" if present if address[-1:] == '\n': address = address[:-1] # attempt to attach postcode if missing postcode = record.postcode if not postcode: gwrapper = GoogleGeocodingApiWrapper(address, self.council_id, 'UTA') try: postcode = gwrapper.address_to_postcode() except PostcodeNotFoundException: postcode = '' """ No grid references were supplied, so attempt to derive a grid ref from postcode if we have that """ sleep(1.3) # ensure we don't hit mapit's usage limit if postcode: try: gridref = geocode(postcode) location = Point(gridref['wgs84_lon'], gridref['wgs84_lat'], srid=4326) except KeyError: location = None else: location = None return { 'internal_council_id': record.polling_district, 'postcode' : postcode, 'address' : address, 'location' : location } def address_record_to_dict(self, record): # format address address = ", ".join([ record.address1, record.address2, record.address3, record.address4, record.address5, record.address6, ]) while ", , " in address: address = address.replace(", , ", ", ") # remove trailing ", " if present if address[-2:] == ', ': address = address[:-2] return { 'address' : address, 'postcode' : record.postcode, 'polling_station_id': record.district }
Add import script for Rhondda Cynon Taff""" Import Rhondda Cynon Taf note: this script takes quite a long time to run """ from time import sleep from django.contrib.gis.geos import Point from data_collection.management.commands import BaseAddressCsvImporter from data_finder.helpers import geocode from data_collection.google_geocoding_api_wrapper import ( GoogleGeocodingApiWrapper, PostcodeNotFoundException ) class Command(BaseAddressCsvImporter): """ Imports the Polling Station data from Rhondda Cynon Taf """ council_id = 'W06000016' addresses_name = 'PROPERTYLISTINGFORDEMOCRACYCLUB.csv' stations_name = 'POLLINGSTATIONS8MARCH2016.csv' def station_record_to_dict(self, record): # format address address = "\n".join([ record.address1, record.address2, record.address3, record.address4, record.address5 ]) while "\n\n" in address: address = address.replace("\n\n", "\n") # remove trailing "\n" if present if address[-1:] == '\n': address = address[:-1] # attempt to attach postcode if missing postcode = record.postcode if not postcode: gwrapper = GoogleGeocodingApiWrapper(address, self.council_id, 'UTA') try: postcode = gwrapper.address_to_postcode() except PostcodeNotFoundException: postcode = '' """ No grid references were supplied, so attempt to derive a grid ref from postcode if we have that """ sleep(1.3) # ensure we don't hit mapit's usage limit if postcode: try: gridref = geocode(postcode) location = Point(gridref['wgs84_lon'], gridref['wgs84_lat'], srid=4326) except KeyError: location = None else: location = None return { 'internal_council_id': record.polling_district, 'postcode' : postcode, 'address' : address, 'location' : location } def address_record_to_dict(self, record): # format address address = ", ".join([ record.address1, record.address2, record.address3, record.address4, record.address5, record.address6, ]) while ", , " in address: address = address.replace(", , ", ", ") # remove trailing ", " if present if address[-2:] == ', ': address = address[:-2] return { 'address' : address, 'postcode' : record.postcode, 'polling_station_id': record.district }
<commit_before><commit_msg>Add import script for Rhondda Cynon Taff<commit_after>""" Import Rhondda Cynon Taf note: this script takes quite a long time to run """ from time import sleep from django.contrib.gis.geos import Point from data_collection.management.commands import BaseAddressCsvImporter from data_finder.helpers import geocode from data_collection.google_geocoding_api_wrapper import ( GoogleGeocodingApiWrapper, PostcodeNotFoundException ) class Command(BaseAddressCsvImporter): """ Imports the Polling Station data from Rhondda Cynon Taf """ council_id = 'W06000016' addresses_name = 'PROPERTYLISTINGFORDEMOCRACYCLUB.csv' stations_name = 'POLLINGSTATIONS8MARCH2016.csv' def station_record_to_dict(self, record): # format address address = "\n".join([ record.address1, record.address2, record.address3, record.address4, record.address5 ]) while "\n\n" in address: address = address.replace("\n\n", "\n") # remove trailing "\n" if present if address[-1:] == '\n': address = address[:-1] # attempt to attach postcode if missing postcode = record.postcode if not postcode: gwrapper = GoogleGeocodingApiWrapper(address, self.council_id, 'UTA') try: postcode = gwrapper.address_to_postcode() except PostcodeNotFoundException: postcode = '' """ No grid references were supplied, so attempt to derive a grid ref from postcode if we have that """ sleep(1.3) # ensure we don't hit mapit's usage limit if postcode: try: gridref = geocode(postcode) location = Point(gridref['wgs84_lon'], gridref['wgs84_lat'], srid=4326) except KeyError: location = None else: location = None return { 'internal_council_id': record.polling_district, 'postcode' : postcode, 'address' : address, 'location' : location } def address_record_to_dict(self, record): # format address address = ", ".join([ record.address1, record.address2, record.address3, record.address4, record.address5, record.address6, ]) while ", , " in address: address = address.replace(", , ", ", ") # remove trailing ", " if present if address[-2:] == ', ': address = address[:-2] return { 'address' : address, 'postcode' : record.postcode, 'polling_station_id': record.district }
6a47701ea874e657475542809ac0f9320063cb9b
releasezip.py
releasezip.py
import os import sys import zipfile def zipdir(path, ziph): # ziph is zipfile handle for root, dirs, files in os.walk(path): for file in files: ziph.write(os.path.join(root, file)) def main(version) : release = zipfile.ZipFile('release-{}.zip'.format(version), 'w') zipdir('python', release) zipdir('idc', release) zipdir('notebook', release) release.write('build/release/ida_ipython.p64', 'plugins/ida_ipython.p64') release.write('build/release/ida_ipython.plw', 'plugins/ida_ipython.plw') release.write('README.md') release.close() if __name__ == "__main__": if len(sys.argv) > 1: main(sys.argv[1]) else: print "No release name provided"
Add script to help create release zip
Add script to help create release zip
Python
mit
james91b/ida_ipython,james91b/ida_ipython,tmr232/ida_ipython,james91b/ida_ipython
Add script to help create release zip
import os import sys import zipfile def zipdir(path, ziph): # ziph is zipfile handle for root, dirs, files in os.walk(path): for file in files: ziph.write(os.path.join(root, file)) def main(version) : release = zipfile.ZipFile('release-{}.zip'.format(version), 'w') zipdir('python', release) zipdir('idc', release) zipdir('notebook', release) release.write('build/release/ida_ipython.p64', 'plugins/ida_ipython.p64') release.write('build/release/ida_ipython.plw', 'plugins/ida_ipython.plw') release.write('README.md') release.close() if __name__ == "__main__": if len(sys.argv) > 1: main(sys.argv[1]) else: print "No release name provided"
<commit_before><commit_msg>Add script to help create release zip<commit_after>
import os import sys import zipfile def zipdir(path, ziph): # ziph is zipfile handle for root, dirs, files in os.walk(path): for file in files: ziph.write(os.path.join(root, file)) def main(version) : release = zipfile.ZipFile('release-{}.zip'.format(version), 'w') zipdir('python', release) zipdir('idc', release) zipdir('notebook', release) release.write('build/release/ida_ipython.p64', 'plugins/ida_ipython.p64') release.write('build/release/ida_ipython.plw', 'plugins/ida_ipython.plw') release.write('README.md') release.close() if __name__ == "__main__": if len(sys.argv) > 1: main(sys.argv[1]) else: print "No release name provided"
Add script to help create release zipimport os import sys import zipfile def zipdir(path, ziph): # ziph is zipfile handle for root, dirs, files in os.walk(path): for file in files: ziph.write(os.path.join(root, file)) def main(version) : release = zipfile.ZipFile('release-{}.zip'.format(version), 'w') zipdir('python', release) zipdir('idc', release) zipdir('notebook', release) release.write('build/release/ida_ipython.p64', 'plugins/ida_ipython.p64') release.write('build/release/ida_ipython.plw', 'plugins/ida_ipython.plw') release.write('README.md') release.close() if __name__ == "__main__": if len(sys.argv) > 1: main(sys.argv[1]) else: print "No release name provided"
<commit_before><commit_msg>Add script to help create release zip<commit_after>import os import sys import zipfile def zipdir(path, ziph): # ziph is zipfile handle for root, dirs, files in os.walk(path): for file in files: ziph.write(os.path.join(root, file)) def main(version) : release = zipfile.ZipFile('release-{}.zip'.format(version), 'w') zipdir('python', release) zipdir('idc', release) zipdir('notebook', release) release.write('build/release/ida_ipython.p64', 'plugins/ida_ipython.p64') release.write('build/release/ida_ipython.plw', 'plugins/ida_ipython.plw') release.write('README.md') release.close() if __name__ == "__main__": if len(sys.argv) > 1: main(sys.argv[1]) else: print "No release name provided"
2dfc4fcc61c0f9d00860168d44da5e03db8e61eb
photobox/cheesefolder.py
photobox/cheesefolder.py
import random class Cheesefolder(): def __init__(self, folder): self.directory = folder pass def getrandomphoto(self): files = self.directory.getfiles_fullpath() filecount = len(files) randomindex = random.randint(0, filecount - 1) return files[randomindex]
Add a class to get a random photo of a folder
Add a class to get a random photo of a folder
Python
mit
MarkusAmshove/Photobox
Add a class to get a random photo of a folder
import random class Cheesefolder(): def __init__(self, folder): self.directory = folder pass def getrandomphoto(self): files = self.directory.getfiles_fullpath() filecount = len(files) randomindex = random.randint(0, filecount - 1) return files[randomindex]
<commit_before><commit_msg>Add a class to get a random photo of a folder<commit_after>
import random class Cheesefolder(): def __init__(self, folder): self.directory = folder pass def getrandomphoto(self): files = self.directory.getfiles_fullpath() filecount = len(files) randomindex = random.randint(0, filecount - 1) return files[randomindex]
Add a class to get a random photo of a folderimport random class Cheesefolder(): def __init__(self, folder): self.directory = folder pass def getrandomphoto(self): files = self.directory.getfiles_fullpath() filecount = len(files) randomindex = random.randint(0, filecount - 1) return files[randomindex]
<commit_before><commit_msg>Add a class to get a random photo of a folder<commit_after>import random class Cheesefolder(): def __init__(self, folder): self.directory = folder pass def getrandomphoto(self): files = self.directory.getfiles_fullpath() filecount = len(files) randomindex = random.randint(0, filecount - 1) return files[randomindex]
0bef4682c6a81464fd6e72fddd6f0b5957f4d566
tools/data/calculate_paired_bbox_mean_std.py
tools/data/calculate_paired_bbox_mean_std.py
#!/usr/bin/env python import argparse import scipy.io as sio import sys import os.path as osp import numpy as np import cPickle this_dir = osp.dirname(__file__) sys.path.insert(0, osp.join(this_dir, '../../external/py-faster-rcnn/lib')) from fast_rcnn.bbox_transform import bbox_transform if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('paired_gt_file') parser.add_argument('save_mean_file') parser.add_argument('save_std_file') args = parser.parse_args() deltas = [] gts = sio.loadmat(args.paired_gt_file)['gt'] for gt1, gt2 in gts: if len(gt1) == 0: continue deltas.append(bbox_transform(gt1, gt2)) delta = np.vstack(deltas) mean = np.mean(delta, axis=0) std = np.std(delta, axis=0) with open(args.save_mean_file, 'wb') as f: cPickle.dump(mean, f, cPickle.HIGHEST_PROTOCOL) with open(args.save_std_file, 'wb') as f: cPickle.dump(std, f, cPickle.HIGHEST_PROTOCOL)
Add a script to calculate paried roi bbox emean and std.
Add a script to calculate paried roi bbox emean and std.
Python
mit
myfavouritekk/TPN
Add a script to calculate paried roi bbox emean and std.
#!/usr/bin/env python import argparse import scipy.io as sio import sys import os.path as osp import numpy as np import cPickle this_dir = osp.dirname(__file__) sys.path.insert(0, osp.join(this_dir, '../../external/py-faster-rcnn/lib')) from fast_rcnn.bbox_transform import bbox_transform if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('paired_gt_file') parser.add_argument('save_mean_file') parser.add_argument('save_std_file') args = parser.parse_args() deltas = [] gts = sio.loadmat(args.paired_gt_file)['gt'] for gt1, gt2 in gts: if len(gt1) == 0: continue deltas.append(bbox_transform(gt1, gt2)) delta = np.vstack(deltas) mean = np.mean(delta, axis=0) std = np.std(delta, axis=0) with open(args.save_mean_file, 'wb') as f: cPickle.dump(mean, f, cPickle.HIGHEST_PROTOCOL) with open(args.save_std_file, 'wb') as f: cPickle.dump(std, f, cPickle.HIGHEST_PROTOCOL)
<commit_before><commit_msg>Add a script to calculate paried roi bbox emean and std.<commit_after>
#!/usr/bin/env python import argparse import scipy.io as sio import sys import os.path as osp import numpy as np import cPickle this_dir = osp.dirname(__file__) sys.path.insert(0, osp.join(this_dir, '../../external/py-faster-rcnn/lib')) from fast_rcnn.bbox_transform import bbox_transform if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('paired_gt_file') parser.add_argument('save_mean_file') parser.add_argument('save_std_file') args = parser.parse_args() deltas = [] gts = sio.loadmat(args.paired_gt_file)['gt'] for gt1, gt2 in gts: if len(gt1) == 0: continue deltas.append(bbox_transform(gt1, gt2)) delta = np.vstack(deltas) mean = np.mean(delta, axis=0) std = np.std(delta, axis=0) with open(args.save_mean_file, 'wb') as f: cPickle.dump(mean, f, cPickle.HIGHEST_PROTOCOL) with open(args.save_std_file, 'wb') as f: cPickle.dump(std, f, cPickle.HIGHEST_PROTOCOL)
Add a script to calculate paried roi bbox emean and std.#!/usr/bin/env python import argparse import scipy.io as sio import sys import os.path as osp import numpy as np import cPickle this_dir = osp.dirname(__file__) sys.path.insert(0, osp.join(this_dir, '../../external/py-faster-rcnn/lib')) from fast_rcnn.bbox_transform import bbox_transform if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('paired_gt_file') parser.add_argument('save_mean_file') parser.add_argument('save_std_file') args = parser.parse_args() deltas = [] gts = sio.loadmat(args.paired_gt_file)['gt'] for gt1, gt2 in gts: if len(gt1) == 0: continue deltas.append(bbox_transform(gt1, gt2)) delta = np.vstack(deltas) mean = np.mean(delta, axis=0) std = np.std(delta, axis=0) with open(args.save_mean_file, 'wb') as f: cPickle.dump(mean, f, cPickle.HIGHEST_PROTOCOL) with open(args.save_std_file, 'wb') as f: cPickle.dump(std, f, cPickle.HIGHEST_PROTOCOL)
<commit_before><commit_msg>Add a script to calculate paried roi bbox emean and std.<commit_after>#!/usr/bin/env python import argparse import scipy.io as sio import sys import os.path as osp import numpy as np import cPickle this_dir = osp.dirname(__file__) sys.path.insert(0, osp.join(this_dir, '../../external/py-faster-rcnn/lib')) from fast_rcnn.bbox_transform import bbox_transform if __name__ == '__main__': parser = argparse.ArgumentParser() parser.add_argument('paired_gt_file') parser.add_argument('save_mean_file') parser.add_argument('save_std_file') args = parser.parse_args() deltas = [] gts = sio.loadmat(args.paired_gt_file)['gt'] for gt1, gt2 in gts: if len(gt1) == 0: continue deltas.append(bbox_transform(gt1, gt2)) delta = np.vstack(deltas) mean = np.mean(delta, axis=0) std = np.std(delta, axis=0) with open(args.save_mean_file, 'wb') as f: cPickle.dump(mean, f, cPickle.HIGHEST_PROTOCOL) with open(args.save_std_file, 'wb') as f: cPickle.dump(std, f, cPickle.HIGHEST_PROTOCOL)
c6951374eba137614744928c18fa4d34de5c5d89
src/algorithms/sorting/dataset_generator.py
src/algorithms/sorting/dataset_generator.py
# Description: Script to Generate Data Sets For Sorting Algorithms import random import logging # Global Configuration TOTAL_ROWS = 10 SORTED = False # Overrides REVERSE_SORTED and RANDOM_NUMBERS REVERSE_SORTED = False # Overrides RANDOM_NUMBERS RANDOM_NUMBERS = True # Least Precedence WRITE_TO_FILE = False def configure_logging(write_to_file_enabled): """Configure Logging Based on Global Configurations.""" # Set logging level from DEBUG, INFO, WARNING. ERROR, CRITICAL level = logging.DEBUG # Do not print debug messages when writing to file is enabled. if (write_to_file_enabled): level = logging.INFO # Configure Log Level logging.basicConfig(level=level) def generate_numbers(): """Generate a list of numbers based on Global Configurations""" if SORTED: numbers = range(1, TOTAL_ROWS + 1) elif REVERSE_SORTED: numbers = range(TOTAL_ROWS, 0, -1) elif RANDOM_NUMBERS: numbers = range(1, TOTAL_ROWS + 1) random.shuffle(numbers) random.shuffle(numbers) random.shuffle(numbers) logging.debug(numbers) return numbers def write_to_file(numbers, filename): """Write to file based on Global Configurations.""" if WRITE_TO_FILE: logging.info('Writing data to file: {0}'.format(filename)) with open(filename, 'w') as file_handle: for item in numbers: file_handle.write(str(item) + '\n') def main(): """Main function.""" configure_logging(WRITE_TO_FILE) # Generate numbers based on configurations numbers = generate_numbers() # Write numbers to a file # Filenames Examples: dataset_10_reverse_sorted.txt, dataset_100_sorted.txt, dataset_1000_random.txt etc. filename="dataset/dataset_{0}_{1}.txt".format(TOTAL_ROWS, 'sorted' if SORTED else 'reverse_sorted' if REVERSE_SORTED else 'random') write_to_file(numbers, filename) # Call Main main()
Add a script to generate data sets for testing sorting algorithms.
Add a script to generate data sets for testing sorting algorithms.
Python
mit
vikash-india/ProgrammingProblems,vikash-india/ProgrammingProblems
Add a script to generate data sets for testing sorting algorithms.
# Description: Script to Generate Data Sets For Sorting Algorithms import random import logging # Global Configuration TOTAL_ROWS = 10 SORTED = False # Overrides REVERSE_SORTED and RANDOM_NUMBERS REVERSE_SORTED = False # Overrides RANDOM_NUMBERS RANDOM_NUMBERS = True # Least Precedence WRITE_TO_FILE = False def configure_logging(write_to_file_enabled): """Configure Logging Based on Global Configurations.""" # Set logging level from DEBUG, INFO, WARNING. ERROR, CRITICAL level = logging.DEBUG # Do not print debug messages when writing to file is enabled. if (write_to_file_enabled): level = logging.INFO # Configure Log Level logging.basicConfig(level=level) def generate_numbers(): """Generate a list of numbers based on Global Configurations""" if SORTED: numbers = range(1, TOTAL_ROWS + 1) elif REVERSE_SORTED: numbers = range(TOTAL_ROWS, 0, -1) elif RANDOM_NUMBERS: numbers = range(1, TOTAL_ROWS + 1) random.shuffle(numbers) random.shuffle(numbers) random.shuffle(numbers) logging.debug(numbers) return numbers def write_to_file(numbers, filename): """Write to file based on Global Configurations.""" if WRITE_TO_FILE: logging.info('Writing data to file: {0}'.format(filename)) with open(filename, 'w') as file_handle: for item in numbers: file_handle.write(str(item) + '\n') def main(): """Main function.""" configure_logging(WRITE_TO_FILE) # Generate numbers based on configurations numbers = generate_numbers() # Write numbers to a file # Filenames Examples: dataset_10_reverse_sorted.txt, dataset_100_sorted.txt, dataset_1000_random.txt etc. filename="dataset/dataset_{0}_{1}.txt".format(TOTAL_ROWS, 'sorted' if SORTED else 'reverse_sorted' if REVERSE_SORTED else 'random') write_to_file(numbers, filename) # Call Main main()
<commit_before><commit_msg>Add a script to generate data sets for testing sorting algorithms.<commit_after>
# Description: Script to Generate Data Sets For Sorting Algorithms import random import logging # Global Configuration TOTAL_ROWS = 10 SORTED = False # Overrides REVERSE_SORTED and RANDOM_NUMBERS REVERSE_SORTED = False # Overrides RANDOM_NUMBERS RANDOM_NUMBERS = True # Least Precedence WRITE_TO_FILE = False def configure_logging(write_to_file_enabled): """Configure Logging Based on Global Configurations.""" # Set logging level from DEBUG, INFO, WARNING. ERROR, CRITICAL level = logging.DEBUG # Do not print debug messages when writing to file is enabled. if (write_to_file_enabled): level = logging.INFO # Configure Log Level logging.basicConfig(level=level) def generate_numbers(): """Generate a list of numbers based on Global Configurations""" if SORTED: numbers = range(1, TOTAL_ROWS + 1) elif REVERSE_SORTED: numbers = range(TOTAL_ROWS, 0, -1) elif RANDOM_NUMBERS: numbers = range(1, TOTAL_ROWS + 1) random.shuffle(numbers) random.shuffle(numbers) random.shuffle(numbers) logging.debug(numbers) return numbers def write_to_file(numbers, filename): """Write to file based on Global Configurations.""" if WRITE_TO_FILE: logging.info('Writing data to file: {0}'.format(filename)) with open(filename, 'w') as file_handle: for item in numbers: file_handle.write(str(item) + '\n') def main(): """Main function.""" configure_logging(WRITE_TO_FILE) # Generate numbers based on configurations numbers = generate_numbers() # Write numbers to a file # Filenames Examples: dataset_10_reverse_sorted.txt, dataset_100_sorted.txt, dataset_1000_random.txt etc. filename="dataset/dataset_{0}_{1}.txt".format(TOTAL_ROWS, 'sorted' if SORTED else 'reverse_sorted' if REVERSE_SORTED else 'random') write_to_file(numbers, filename) # Call Main main()
Add a script to generate data sets for testing sorting algorithms.# Description: Script to Generate Data Sets For Sorting Algorithms import random import logging # Global Configuration TOTAL_ROWS = 10 SORTED = False # Overrides REVERSE_SORTED and RANDOM_NUMBERS REVERSE_SORTED = False # Overrides RANDOM_NUMBERS RANDOM_NUMBERS = True # Least Precedence WRITE_TO_FILE = False def configure_logging(write_to_file_enabled): """Configure Logging Based on Global Configurations.""" # Set logging level from DEBUG, INFO, WARNING. ERROR, CRITICAL level = logging.DEBUG # Do not print debug messages when writing to file is enabled. if (write_to_file_enabled): level = logging.INFO # Configure Log Level logging.basicConfig(level=level) def generate_numbers(): """Generate a list of numbers based on Global Configurations""" if SORTED: numbers = range(1, TOTAL_ROWS + 1) elif REVERSE_SORTED: numbers = range(TOTAL_ROWS, 0, -1) elif RANDOM_NUMBERS: numbers = range(1, TOTAL_ROWS + 1) random.shuffle(numbers) random.shuffle(numbers) random.shuffle(numbers) logging.debug(numbers) return numbers def write_to_file(numbers, filename): """Write to file based on Global Configurations.""" if WRITE_TO_FILE: logging.info('Writing data to file: {0}'.format(filename)) with open(filename, 'w') as file_handle: for item in numbers: file_handle.write(str(item) + '\n') def main(): """Main function.""" configure_logging(WRITE_TO_FILE) # Generate numbers based on configurations numbers = generate_numbers() # Write numbers to a file # Filenames Examples: dataset_10_reverse_sorted.txt, dataset_100_sorted.txt, dataset_1000_random.txt etc. filename="dataset/dataset_{0}_{1}.txt".format(TOTAL_ROWS, 'sorted' if SORTED else 'reverse_sorted' if REVERSE_SORTED else 'random') write_to_file(numbers, filename) # Call Main main()
<commit_before><commit_msg>Add a script to generate data sets for testing sorting algorithms.<commit_after># Description: Script to Generate Data Sets For Sorting Algorithms import random import logging # Global Configuration TOTAL_ROWS = 10 SORTED = False # Overrides REVERSE_SORTED and RANDOM_NUMBERS REVERSE_SORTED = False # Overrides RANDOM_NUMBERS RANDOM_NUMBERS = True # Least Precedence WRITE_TO_FILE = False def configure_logging(write_to_file_enabled): """Configure Logging Based on Global Configurations.""" # Set logging level from DEBUG, INFO, WARNING. ERROR, CRITICAL level = logging.DEBUG # Do not print debug messages when writing to file is enabled. if (write_to_file_enabled): level = logging.INFO # Configure Log Level logging.basicConfig(level=level) def generate_numbers(): """Generate a list of numbers based on Global Configurations""" if SORTED: numbers = range(1, TOTAL_ROWS + 1) elif REVERSE_SORTED: numbers = range(TOTAL_ROWS, 0, -1) elif RANDOM_NUMBERS: numbers = range(1, TOTAL_ROWS + 1) random.shuffle(numbers) random.shuffle(numbers) random.shuffle(numbers) logging.debug(numbers) return numbers def write_to_file(numbers, filename): """Write to file based on Global Configurations.""" if WRITE_TO_FILE: logging.info('Writing data to file: {0}'.format(filename)) with open(filename, 'w') as file_handle: for item in numbers: file_handle.write(str(item) + '\n') def main(): """Main function.""" configure_logging(WRITE_TO_FILE) # Generate numbers based on configurations numbers = generate_numbers() # Write numbers to a file # Filenames Examples: dataset_10_reverse_sorted.txt, dataset_100_sorted.txt, dataset_1000_random.txt etc. filename="dataset/dataset_{0}_{1}.txt".format(TOTAL_ROWS, 'sorted' if SORTED else 'reverse_sorted' if REVERSE_SORTED else 'random') write_to_file(numbers, filename) # Call Main main()
45917087377adef01a4d4ce829013a7958a3afe5
test/pathtools_test.py
test/pathtools_test.py
import os import pytest import hetio.readwrite from hetio.pathtools import paths_between, DWPC directory = os.path.dirname(os.path.abspath(__file__)) def test_disease_gene_example_dwpc(): """ Test the DWPC computation from https://doi.org/10.1371/journal.pcbi.1004259.g002 """ path = os.path.join(directory, 'data', 'disease-gene-example-graph.json') graph = hetio.readwrite.read_graph(path) metagraph = graph.metagraph # Define traversal metapath = metagraph.metapath_from_abbrev('GiGaD') source_id = 'Gene', 'IRF1' target_id = 'Disease', 'Multiple Sclerosis' # Extract paths paths = paths_between(graph, source_id, target_id, metapath) assert len(paths) == 3 # Test degree-weighted path count dwpc = DWPC(paths, damping_exponent=0.5) assert dwpc == pytest.approx(0.25 + 0.25 + 32**-0.5)
Add PC / DWPC test
Add PC / DWPC test
Python
cc0-1.0
dhimmel/hetio
Add PC / DWPC test
import os import pytest import hetio.readwrite from hetio.pathtools import paths_between, DWPC directory = os.path.dirname(os.path.abspath(__file__)) def test_disease_gene_example_dwpc(): """ Test the DWPC computation from https://doi.org/10.1371/journal.pcbi.1004259.g002 """ path = os.path.join(directory, 'data', 'disease-gene-example-graph.json') graph = hetio.readwrite.read_graph(path) metagraph = graph.metagraph # Define traversal metapath = metagraph.metapath_from_abbrev('GiGaD') source_id = 'Gene', 'IRF1' target_id = 'Disease', 'Multiple Sclerosis' # Extract paths paths = paths_between(graph, source_id, target_id, metapath) assert len(paths) == 3 # Test degree-weighted path count dwpc = DWPC(paths, damping_exponent=0.5) assert dwpc == pytest.approx(0.25 + 0.25 + 32**-0.5)
<commit_before><commit_msg>Add PC / DWPC test<commit_after>
import os import pytest import hetio.readwrite from hetio.pathtools import paths_between, DWPC directory = os.path.dirname(os.path.abspath(__file__)) def test_disease_gene_example_dwpc(): """ Test the DWPC computation from https://doi.org/10.1371/journal.pcbi.1004259.g002 """ path = os.path.join(directory, 'data', 'disease-gene-example-graph.json') graph = hetio.readwrite.read_graph(path) metagraph = graph.metagraph # Define traversal metapath = metagraph.metapath_from_abbrev('GiGaD') source_id = 'Gene', 'IRF1' target_id = 'Disease', 'Multiple Sclerosis' # Extract paths paths = paths_between(graph, source_id, target_id, metapath) assert len(paths) == 3 # Test degree-weighted path count dwpc = DWPC(paths, damping_exponent=0.5) assert dwpc == pytest.approx(0.25 + 0.25 + 32**-0.5)
Add PC / DWPC testimport os import pytest import hetio.readwrite from hetio.pathtools import paths_between, DWPC directory = os.path.dirname(os.path.abspath(__file__)) def test_disease_gene_example_dwpc(): """ Test the DWPC computation from https://doi.org/10.1371/journal.pcbi.1004259.g002 """ path = os.path.join(directory, 'data', 'disease-gene-example-graph.json') graph = hetio.readwrite.read_graph(path) metagraph = graph.metagraph # Define traversal metapath = metagraph.metapath_from_abbrev('GiGaD') source_id = 'Gene', 'IRF1' target_id = 'Disease', 'Multiple Sclerosis' # Extract paths paths = paths_between(graph, source_id, target_id, metapath) assert len(paths) == 3 # Test degree-weighted path count dwpc = DWPC(paths, damping_exponent=0.5) assert dwpc == pytest.approx(0.25 + 0.25 + 32**-0.5)
<commit_before><commit_msg>Add PC / DWPC test<commit_after>import os import pytest import hetio.readwrite from hetio.pathtools import paths_between, DWPC directory = os.path.dirname(os.path.abspath(__file__)) def test_disease_gene_example_dwpc(): """ Test the DWPC computation from https://doi.org/10.1371/journal.pcbi.1004259.g002 """ path = os.path.join(directory, 'data', 'disease-gene-example-graph.json') graph = hetio.readwrite.read_graph(path) metagraph = graph.metagraph # Define traversal metapath = metagraph.metapath_from_abbrev('GiGaD') source_id = 'Gene', 'IRF1' target_id = 'Disease', 'Multiple Sclerosis' # Extract paths paths = paths_between(graph, source_id, target_id, metapath) assert len(paths) == 3 # Test degree-weighted path count dwpc = DWPC(paths, damping_exponent=0.5) assert dwpc == pytest.approx(0.25 + 0.25 + 32**-0.5)
a8bded67a92632fd6d2d8791dd245dc82c773c8d
integration/basic_server.py
integration/basic_server.py
""" This is a basic test which allows you to setup a server and listen to it. For example, running: python integration/basic_server.py localhost 8040 Sets up a server. Running curl against it generates the following reponse: curl 'http://localhost:8040/' <html><body><h1>ok</h1><br/>from 28330 And in server output will print out the entire string (Lorem ipsum dolor etc.) """ import os import sys import string import argparse import gevent.pywsgi from tectonic.prefork import Master if __name__ == '__main__': a = argparse.ArgumentParser() a.add_argument('address') a.add_argument('port', type=int) a.add_argument('--logpath', default='log') a.add_argument('--pidfile', default='pidfile') a.add_argument('--daemonize', '-d', default=False, action='store_true') def wsgi(environ, start_response): start_response('200 OK', [('Content-Type', 'text/html')]) pid = os.getpid() spid = str(pid) sys.stderr.write('''\ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus eleifend a metus quis sollicitudin. Aenean nec dolor iaculis, rhoncus turpis sit amet, interdum quam. Nunc rhoncus magna a leo interdum luctus. Vestibulum nec sapien diam. Aliquam rutrum venenatis mattis. Etiam eget adipiscing risus. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Fusce nibh nulla, lacinia quis dignissim vel, condimentum at odio. Nunc et diam mauris. Fusce sit amet odio sagittis, convallis urna a, blandit urna. Phasellus mattis ligula sed tincidunt pellentesque. Nullam tempor convallis dapibus. Duis vitae vulputate sem, nec eleifend orci. Donec vel metus fringilla, ultricies nunc at, ultrices quam. Donec placerat nisi quis fringilla facilisis. Fusce eget erat ut magna consectetur elementum. Aenean non vulputate nulla. Aliquam eu dui nibh. Vivamus mollis suscipit neque, quis aliquam ipsum auctor non. Nulla cursus turpis turpis, nec euismod urna placerat at. Nunc id sapien nibh. Vestibulum condimentum luctus placerat. Donec vitae posuere arcu.''' + '\n') return ['<html><body><h1>ok</h1><br/>from ' + spid] args = a.parse_args() Master(server_class=gevent.pywsgi.WSGIServer, socket_factory=gevent.socket.socket, sleep=gevent.sleep, wsgi=wsgi, address=(args.address, args.port), logpath=args.logpath, pidfile=args.pidfile).run(args.daemonize)
Add an integration folder for tests that are beyond the unittest scope
Add an integration folder for tests that are beyond the unittest scope
Python
bsd-3-clause
markrwilliams/tectonic
Add an integration folder for tests that are beyond the unittest scope
""" This is a basic test which allows you to setup a server and listen to it. For example, running: python integration/basic_server.py localhost 8040 Sets up a server. Running curl against it generates the following reponse: curl 'http://localhost:8040/' <html><body><h1>ok</h1><br/>from 28330 And in server output will print out the entire string (Lorem ipsum dolor etc.) """ import os import sys import string import argparse import gevent.pywsgi from tectonic.prefork import Master if __name__ == '__main__': a = argparse.ArgumentParser() a.add_argument('address') a.add_argument('port', type=int) a.add_argument('--logpath', default='log') a.add_argument('--pidfile', default='pidfile') a.add_argument('--daemonize', '-d', default=False, action='store_true') def wsgi(environ, start_response): start_response('200 OK', [('Content-Type', 'text/html')]) pid = os.getpid() spid = str(pid) sys.stderr.write('''\ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus eleifend a metus quis sollicitudin. Aenean nec dolor iaculis, rhoncus turpis sit amet, interdum quam. Nunc rhoncus magna a leo interdum luctus. Vestibulum nec sapien diam. Aliquam rutrum venenatis mattis. Etiam eget adipiscing risus. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Fusce nibh nulla, lacinia quis dignissim vel, condimentum at odio. Nunc et diam mauris. Fusce sit amet odio sagittis, convallis urna a, blandit urna. Phasellus mattis ligula sed tincidunt pellentesque. Nullam tempor convallis dapibus. Duis vitae vulputate sem, nec eleifend orci. Donec vel metus fringilla, ultricies nunc at, ultrices quam. Donec placerat nisi quis fringilla facilisis. Fusce eget erat ut magna consectetur elementum. Aenean non vulputate nulla. Aliquam eu dui nibh. Vivamus mollis suscipit neque, quis aliquam ipsum auctor non. Nulla cursus turpis turpis, nec euismod urna placerat at. Nunc id sapien nibh. Vestibulum condimentum luctus placerat. Donec vitae posuere arcu.''' + '\n') return ['<html><body><h1>ok</h1><br/>from ' + spid] args = a.parse_args() Master(server_class=gevent.pywsgi.WSGIServer, socket_factory=gevent.socket.socket, sleep=gevent.sleep, wsgi=wsgi, address=(args.address, args.port), logpath=args.logpath, pidfile=args.pidfile).run(args.daemonize)
<commit_before><commit_msg>Add an integration folder for tests that are beyond the unittest scope<commit_after>
""" This is a basic test which allows you to setup a server and listen to it. For example, running: python integration/basic_server.py localhost 8040 Sets up a server. Running curl against it generates the following reponse: curl 'http://localhost:8040/' <html><body><h1>ok</h1><br/>from 28330 And in server output will print out the entire string (Lorem ipsum dolor etc.) """ import os import sys import string import argparse import gevent.pywsgi from tectonic.prefork import Master if __name__ == '__main__': a = argparse.ArgumentParser() a.add_argument('address') a.add_argument('port', type=int) a.add_argument('--logpath', default='log') a.add_argument('--pidfile', default='pidfile') a.add_argument('--daemonize', '-d', default=False, action='store_true') def wsgi(environ, start_response): start_response('200 OK', [('Content-Type', 'text/html')]) pid = os.getpid() spid = str(pid) sys.stderr.write('''\ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus eleifend a metus quis sollicitudin. Aenean nec dolor iaculis, rhoncus turpis sit amet, interdum quam. Nunc rhoncus magna a leo interdum luctus. Vestibulum nec sapien diam. Aliquam rutrum venenatis mattis. Etiam eget adipiscing risus. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Fusce nibh nulla, lacinia quis dignissim vel, condimentum at odio. Nunc et diam mauris. Fusce sit amet odio sagittis, convallis urna a, blandit urna. Phasellus mattis ligula sed tincidunt pellentesque. Nullam tempor convallis dapibus. Duis vitae vulputate sem, nec eleifend orci. Donec vel metus fringilla, ultricies nunc at, ultrices quam. Donec placerat nisi quis fringilla facilisis. Fusce eget erat ut magna consectetur elementum. Aenean non vulputate nulla. Aliquam eu dui nibh. Vivamus mollis suscipit neque, quis aliquam ipsum auctor non. Nulla cursus turpis turpis, nec euismod urna placerat at. Nunc id sapien nibh. Vestibulum condimentum luctus placerat. Donec vitae posuere arcu.''' + '\n') return ['<html><body><h1>ok</h1><br/>from ' + spid] args = a.parse_args() Master(server_class=gevent.pywsgi.WSGIServer, socket_factory=gevent.socket.socket, sleep=gevent.sleep, wsgi=wsgi, address=(args.address, args.port), logpath=args.logpath, pidfile=args.pidfile).run(args.daemonize)
Add an integration folder for tests that are beyond the unittest scope""" This is a basic test which allows you to setup a server and listen to it. For example, running: python integration/basic_server.py localhost 8040 Sets up a server. Running curl against it generates the following reponse: curl 'http://localhost:8040/' <html><body><h1>ok</h1><br/>from 28330 And in server output will print out the entire string (Lorem ipsum dolor etc.) """ import os import sys import string import argparse import gevent.pywsgi from tectonic.prefork import Master if __name__ == '__main__': a = argparse.ArgumentParser() a.add_argument('address') a.add_argument('port', type=int) a.add_argument('--logpath', default='log') a.add_argument('--pidfile', default='pidfile') a.add_argument('--daemonize', '-d', default=False, action='store_true') def wsgi(environ, start_response): start_response('200 OK', [('Content-Type', 'text/html')]) pid = os.getpid() spid = str(pid) sys.stderr.write('''\ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus eleifend a metus quis sollicitudin. Aenean nec dolor iaculis, rhoncus turpis sit amet, interdum quam. Nunc rhoncus magna a leo interdum luctus. Vestibulum nec sapien diam. Aliquam rutrum venenatis mattis. Etiam eget adipiscing risus. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Fusce nibh nulla, lacinia quis dignissim vel, condimentum at odio. Nunc et diam mauris. Fusce sit amet odio sagittis, convallis urna a, blandit urna. Phasellus mattis ligula sed tincidunt pellentesque. Nullam tempor convallis dapibus. Duis vitae vulputate sem, nec eleifend orci. Donec vel metus fringilla, ultricies nunc at, ultrices quam. Donec placerat nisi quis fringilla facilisis. Fusce eget erat ut magna consectetur elementum. Aenean non vulputate nulla. Aliquam eu dui nibh. Vivamus mollis suscipit neque, quis aliquam ipsum auctor non. Nulla cursus turpis turpis, nec euismod urna placerat at. Nunc id sapien nibh. Vestibulum condimentum luctus placerat. Donec vitae posuere arcu.''' + '\n') return ['<html><body><h1>ok</h1><br/>from ' + spid] args = a.parse_args() Master(server_class=gevent.pywsgi.WSGIServer, socket_factory=gevent.socket.socket, sleep=gevent.sleep, wsgi=wsgi, address=(args.address, args.port), logpath=args.logpath, pidfile=args.pidfile).run(args.daemonize)
<commit_before><commit_msg>Add an integration folder for tests that are beyond the unittest scope<commit_after>""" This is a basic test which allows you to setup a server and listen to it. For example, running: python integration/basic_server.py localhost 8040 Sets up a server. Running curl against it generates the following reponse: curl 'http://localhost:8040/' <html><body><h1>ok</h1><br/>from 28330 And in server output will print out the entire string (Lorem ipsum dolor etc.) """ import os import sys import string import argparse import gevent.pywsgi from tectonic.prefork import Master if __name__ == '__main__': a = argparse.ArgumentParser() a.add_argument('address') a.add_argument('port', type=int) a.add_argument('--logpath', default='log') a.add_argument('--pidfile', default='pidfile') a.add_argument('--daemonize', '-d', default=False, action='store_true') def wsgi(environ, start_response): start_response('200 OK', [('Content-Type', 'text/html')]) pid = os.getpid() spid = str(pid) sys.stderr.write('''\ Lorem ipsum dolor sit amet, consectetur adipiscing elit. Phasellus eleifend a metus quis sollicitudin. Aenean nec dolor iaculis, rhoncus turpis sit amet, interdum quam. Nunc rhoncus magna a leo interdum luctus. Vestibulum nec sapien diam. Aliquam rutrum venenatis mattis. Etiam eget adipiscing risus. Vestibulum ante ipsum primis in faucibus orci luctus et ultrices posuere cubilia Curae; Fusce nibh nulla, lacinia quis dignissim vel, condimentum at odio. Nunc et diam mauris. Fusce sit amet odio sagittis, convallis urna a, blandit urna. Phasellus mattis ligula sed tincidunt pellentesque. Nullam tempor convallis dapibus. Duis vitae vulputate sem, nec eleifend orci. Donec vel metus fringilla, ultricies nunc at, ultrices quam. Donec placerat nisi quis fringilla facilisis. Fusce eget erat ut magna consectetur elementum. Aenean non vulputate nulla. Aliquam eu dui nibh. Vivamus mollis suscipit neque, quis aliquam ipsum auctor non. Nulla cursus turpis turpis, nec euismod urna placerat at. Nunc id sapien nibh. Vestibulum condimentum luctus placerat. Donec vitae posuere arcu.''' + '\n') return ['<html><body><h1>ok</h1><br/>from ' + spid] args = a.parse_args() Master(server_class=gevent.pywsgi.WSGIServer, socket_factory=gevent.socket.socket, sleep=gevent.sleep, wsgi=wsgi, address=(args.address, args.port), logpath=args.logpath, pidfile=args.pidfile).run(args.daemonize)
c34eb62d19c4216aa54199a083a06c2c45318cea
feedthefox/devices/migrations/0006_auto_20151110_1355.py
feedthefox/devices/migrations/0006_auto_20151110_1355.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import feedthefox.devices.models class Migration(migrations.Migration): dependencies = [ ('devices', '0005_auto_20151105_1048'), ] operations = [ migrations.AlterField( model_name='deviceinfo', name='imei', field=models.CharField(default='', blank=True, max_length=17, validators=[feedthefox.devices.models.validate_imei]), ), ]
Add missing migration for IMEI db validation.
Add missing migration for IMEI db validation.
Python
mpl-2.0
akatsoulas/feedthefox,akatsoulas/feedthefox,mozilla/feedthefox,mozilla/feedthefox,akatsoulas/feedthefox,mozilla/feedthefox,akatsoulas/feedthefox,mozilla/feedthefox
Add missing migration for IMEI db validation.
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import feedthefox.devices.models class Migration(migrations.Migration): dependencies = [ ('devices', '0005_auto_20151105_1048'), ] operations = [ migrations.AlterField( model_name='deviceinfo', name='imei', field=models.CharField(default='', blank=True, max_length=17, validators=[feedthefox.devices.models.validate_imei]), ), ]
<commit_before><commit_msg>Add missing migration for IMEI db validation.<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import feedthefox.devices.models class Migration(migrations.Migration): dependencies = [ ('devices', '0005_auto_20151105_1048'), ] operations = [ migrations.AlterField( model_name='deviceinfo', name='imei', field=models.CharField(default='', blank=True, max_length=17, validators=[feedthefox.devices.models.validate_imei]), ), ]
Add missing migration for IMEI db validation.# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import feedthefox.devices.models class Migration(migrations.Migration): dependencies = [ ('devices', '0005_auto_20151105_1048'), ] operations = [ migrations.AlterField( model_name='deviceinfo', name='imei', field=models.CharField(default='', blank=True, max_length=17, validators=[feedthefox.devices.models.validate_imei]), ), ]
<commit_before><commit_msg>Add missing migration for IMEI db validation.<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models import feedthefox.devices.models class Migration(migrations.Migration): dependencies = [ ('devices', '0005_auto_20151105_1048'), ] operations = [ migrations.AlterField( model_name='deviceinfo', name='imei', field=models.CharField(default='', blank=True, max_length=17, validators=[feedthefox.devices.models.validate_imei]), ), ]
b3b7e2fcbff5cd0ec2d2b4457b7a46d1846d55a8
glue_vispy_viewers/common/vispy_viewer.py
glue_vispy_viewers/common/vispy_viewer.py
from __future__ import absolute_import, division, print_function import sys from vispy import scene from glue.external.qt import QtGui, get_qapp class VispyWidget(QtGui.QWidget): def __init__(self, parent=None): super(VispyWidget, self).__init__(parent=parent) # Prepare Vispy canvas self.canvas = scene.SceneCanvas(keys='interactive', show=False) # Set up a viewbox self.view = self.canvas.central_widget.add_view() self.view.parent = self.canvas.scene # Set whether we are emulating a 3D texture. This needs to be enabled # as a workaround on Windows otherwise VisPy crashes. self.emulate_texture = (sys.platform == 'win32' and sys.version_info[0] < 3) # Add a 3D axis to keep us oriented self.axis = scene.visuals.XYZAxis(parent=self.view.scene) # Create a turntable camera. For now, this is the only camerate type # we support, but if we support more in future, we should implement # that here self.view.camera = scene.cameras.TurntableCamera(parent=self.view.scene, fov=90) # Add the native canvas widget to this widget layout = QtGui.QVBoxLayout() layout.addWidget(self.canvas.native) self.setLayout(layout) def _update_stretch(self): pass def _update_attributes(self): pass def _update_limits(self): pass def _reset_view(self): self.view.camera.reset() if __name__ == "__main__": from viewer_options import VispyOptionsWidget app = get_qapp() w = VispyWidget() d = VispyOptionsWidget(vispy_widget=w) d.show() w.show() app.exec_() app.quit()
Implement a generic Vispy widget
Implement a generic Vispy widget
Python
bsd-2-clause
PennyQ/glue-3d-viewer,PennyQ/astro-vispy,glue-viz/glue-vispy-viewers,glue-viz/glue-3d-viewer,astrofrog/glue-vispy-viewers,astrofrog/glue-3d-viewer
Implement a generic Vispy widget
from __future__ import absolute_import, division, print_function import sys from vispy import scene from glue.external.qt import QtGui, get_qapp class VispyWidget(QtGui.QWidget): def __init__(self, parent=None): super(VispyWidget, self).__init__(parent=parent) # Prepare Vispy canvas self.canvas = scene.SceneCanvas(keys='interactive', show=False) # Set up a viewbox self.view = self.canvas.central_widget.add_view() self.view.parent = self.canvas.scene # Set whether we are emulating a 3D texture. This needs to be enabled # as a workaround on Windows otherwise VisPy crashes. self.emulate_texture = (sys.platform == 'win32' and sys.version_info[0] < 3) # Add a 3D axis to keep us oriented self.axis = scene.visuals.XYZAxis(parent=self.view.scene) # Create a turntable camera. For now, this is the only camerate type # we support, but if we support more in future, we should implement # that here self.view.camera = scene.cameras.TurntableCamera(parent=self.view.scene, fov=90) # Add the native canvas widget to this widget layout = QtGui.QVBoxLayout() layout.addWidget(self.canvas.native) self.setLayout(layout) def _update_stretch(self): pass def _update_attributes(self): pass def _update_limits(self): pass def _reset_view(self): self.view.camera.reset() if __name__ == "__main__": from viewer_options import VispyOptionsWidget app = get_qapp() w = VispyWidget() d = VispyOptionsWidget(vispy_widget=w) d.show() w.show() app.exec_() app.quit()
<commit_before><commit_msg>Implement a generic Vispy widget<commit_after>
from __future__ import absolute_import, division, print_function import sys from vispy import scene from glue.external.qt import QtGui, get_qapp class VispyWidget(QtGui.QWidget): def __init__(self, parent=None): super(VispyWidget, self).__init__(parent=parent) # Prepare Vispy canvas self.canvas = scene.SceneCanvas(keys='interactive', show=False) # Set up a viewbox self.view = self.canvas.central_widget.add_view() self.view.parent = self.canvas.scene # Set whether we are emulating a 3D texture. This needs to be enabled # as a workaround on Windows otherwise VisPy crashes. self.emulate_texture = (sys.platform == 'win32' and sys.version_info[0] < 3) # Add a 3D axis to keep us oriented self.axis = scene.visuals.XYZAxis(parent=self.view.scene) # Create a turntable camera. For now, this is the only camerate type # we support, but if we support more in future, we should implement # that here self.view.camera = scene.cameras.TurntableCamera(parent=self.view.scene, fov=90) # Add the native canvas widget to this widget layout = QtGui.QVBoxLayout() layout.addWidget(self.canvas.native) self.setLayout(layout) def _update_stretch(self): pass def _update_attributes(self): pass def _update_limits(self): pass def _reset_view(self): self.view.camera.reset() if __name__ == "__main__": from viewer_options import VispyOptionsWidget app = get_qapp() w = VispyWidget() d = VispyOptionsWidget(vispy_widget=w) d.show() w.show() app.exec_() app.quit()
Implement a generic Vispy widgetfrom __future__ import absolute_import, division, print_function import sys from vispy import scene from glue.external.qt import QtGui, get_qapp class VispyWidget(QtGui.QWidget): def __init__(self, parent=None): super(VispyWidget, self).__init__(parent=parent) # Prepare Vispy canvas self.canvas = scene.SceneCanvas(keys='interactive', show=False) # Set up a viewbox self.view = self.canvas.central_widget.add_view() self.view.parent = self.canvas.scene # Set whether we are emulating a 3D texture. This needs to be enabled # as a workaround on Windows otherwise VisPy crashes. self.emulate_texture = (sys.platform == 'win32' and sys.version_info[0] < 3) # Add a 3D axis to keep us oriented self.axis = scene.visuals.XYZAxis(parent=self.view.scene) # Create a turntable camera. For now, this is the only camerate type # we support, but if we support more in future, we should implement # that here self.view.camera = scene.cameras.TurntableCamera(parent=self.view.scene, fov=90) # Add the native canvas widget to this widget layout = QtGui.QVBoxLayout() layout.addWidget(self.canvas.native) self.setLayout(layout) def _update_stretch(self): pass def _update_attributes(self): pass def _update_limits(self): pass def _reset_view(self): self.view.camera.reset() if __name__ == "__main__": from viewer_options import VispyOptionsWidget app = get_qapp() w = VispyWidget() d = VispyOptionsWidget(vispy_widget=w) d.show() w.show() app.exec_() app.quit()
<commit_before><commit_msg>Implement a generic Vispy widget<commit_after>from __future__ import absolute_import, division, print_function import sys from vispy import scene from glue.external.qt import QtGui, get_qapp class VispyWidget(QtGui.QWidget): def __init__(self, parent=None): super(VispyWidget, self).__init__(parent=parent) # Prepare Vispy canvas self.canvas = scene.SceneCanvas(keys='interactive', show=False) # Set up a viewbox self.view = self.canvas.central_widget.add_view() self.view.parent = self.canvas.scene # Set whether we are emulating a 3D texture. This needs to be enabled # as a workaround on Windows otherwise VisPy crashes. self.emulate_texture = (sys.platform == 'win32' and sys.version_info[0] < 3) # Add a 3D axis to keep us oriented self.axis = scene.visuals.XYZAxis(parent=self.view.scene) # Create a turntable camera. For now, this is the only camerate type # we support, but if we support more in future, we should implement # that here self.view.camera = scene.cameras.TurntableCamera(parent=self.view.scene, fov=90) # Add the native canvas widget to this widget layout = QtGui.QVBoxLayout() layout.addWidget(self.canvas.native) self.setLayout(layout) def _update_stretch(self): pass def _update_attributes(self): pass def _update_limits(self): pass def _reset_view(self): self.view.camera.reset() if __name__ == "__main__": from viewer_options import VispyOptionsWidget app = get_qapp() w = VispyWidget() d = VispyOptionsWidget(vispy_widget=w) d.show() w.show() app.exec_() app.quit()
8994f69f23271aa93d83e81032542f17b38423fd
.ipython/profile_default/ipython_config.py
.ipython/profile_default/ipython_config.py
""" IPython configuration with custom prompt using gruvbox colors. - https://github.com/reillysiemens/ipython-style-gruvbox Thanks to @petobens for their excellent dotfiles. - https://github.com/petobens/dotfiles """ from typing import List, Optional, Tuple import IPython.terminal.prompts as prompts from prompt_toolkit.application import get_app from prompt_toolkit.key_binding.vi_state import InputMode from pygments.token import _TokenType, Token from gruvbox import Color, GruvboxStyle config = get_config() # type: ignore # noqa: E0602 class Prompt(prompts.Prompts): """Custom IPython prompt.""" _before: str = "❰" _after: str = "❱ ⬢ " _continuation: str = "… " def in_prompt_tokens(self) -> List[Tuple[_TokenType, str]]: """Return in prompt.""" if get_app().vi_state.input_mode == InputMode.INSERT: prompt_token = prompts.Token.InsertPrompt num_token = prompts.Token.InsertPromptNum else: prompt_token = prompts.Token.NavPrompt num_token = prompts.Token.NavPromptNum return [ (prompt_token, self._before), (num_token, str(self.shell.execution_count)), (prompt_token, self._after), ] def continuation_prompt_tokens( self, width: Optional[int] = None ) -> List[Tuple[_TokenType, str]]: """Return continuation prompt.""" if width is None: width = self._width() if get_app().vi_state.input_mode == InputMode.INSERT: token = prompts.Token.InsertPrompt else: token = prompts.Token.NavPrompt return [(token, " " * (width - 2) + self._continuation)] def out_prompt_tokens(self) -> List[Tuple[_TokenType, str]]: """Return out prompt.""" return [] config.TerminalIPythonApp.display_banner = False config.TerminalInteractiveShell.confirm_exit = False config.TerminalInteractiveShell.editing_mode = "vi" config.TerminalInteractiveShell.true_color = True config.TerminalInteractiveShell.prompts_class = Prompt config.TerminalInteractiveShell.highlighting_style = GruvboxStyle config.TerminalInteractiveShell.highlighting_style_overrides = { Token.InsertPrompt: Color.neutral_blue, Token.NavPrompt: Color.neutral_purple, Token.InsertPromptNum: f"{Color.neutral_purple} bold", Token.NavPromptNum: f"{Color.neutral_blue} bold", }
Add custom IPython configuration ✨
Add custom IPython configuration ✨
Python
isc
reillysiemens/dotfiles,reillysiemens/dotfiles
Add custom IPython configuration ✨
""" IPython configuration with custom prompt using gruvbox colors. - https://github.com/reillysiemens/ipython-style-gruvbox Thanks to @petobens for their excellent dotfiles. - https://github.com/petobens/dotfiles """ from typing import List, Optional, Tuple import IPython.terminal.prompts as prompts from prompt_toolkit.application import get_app from prompt_toolkit.key_binding.vi_state import InputMode from pygments.token import _TokenType, Token from gruvbox import Color, GruvboxStyle config = get_config() # type: ignore # noqa: E0602 class Prompt(prompts.Prompts): """Custom IPython prompt.""" _before: str = "❰" _after: str = "❱ ⬢ " _continuation: str = "… " def in_prompt_tokens(self) -> List[Tuple[_TokenType, str]]: """Return in prompt.""" if get_app().vi_state.input_mode == InputMode.INSERT: prompt_token = prompts.Token.InsertPrompt num_token = prompts.Token.InsertPromptNum else: prompt_token = prompts.Token.NavPrompt num_token = prompts.Token.NavPromptNum return [ (prompt_token, self._before), (num_token, str(self.shell.execution_count)), (prompt_token, self._after), ] def continuation_prompt_tokens( self, width: Optional[int] = None ) -> List[Tuple[_TokenType, str]]: """Return continuation prompt.""" if width is None: width = self._width() if get_app().vi_state.input_mode == InputMode.INSERT: token = prompts.Token.InsertPrompt else: token = prompts.Token.NavPrompt return [(token, " " * (width - 2) + self._continuation)] def out_prompt_tokens(self) -> List[Tuple[_TokenType, str]]: """Return out prompt.""" return [] config.TerminalIPythonApp.display_banner = False config.TerminalInteractiveShell.confirm_exit = False config.TerminalInteractiveShell.editing_mode = "vi" config.TerminalInteractiveShell.true_color = True config.TerminalInteractiveShell.prompts_class = Prompt config.TerminalInteractiveShell.highlighting_style = GruvboxStyle config.TerminalInteractiveShell.highlighting_style_overrides = { Token.InsertPrompt: Color.neutral_blue, Token.NavPrompt: Color.neutral_purple, Token.InsertPromptNum: f"{Color.neutral_purple} bold", Token.NavPromptNum: f"{Color.neutral_blue} bold", }
<commit_before><commit_msg>Add custom IPython configuration ✨<commit_after>
""" IPython configuration with custom prompt using gruvbox colors. - https://github.com/reillysiemens/ipython-style-gruvbox Thanks to @petobens for their excellent dotfiles. - https://github.com/petobens/dotfiles """ from typing import List, Optional, Tuple import IPython.terminal.prompts as prompts from prompt_toolkit.application import get_app from prompt_toolkit.key_binding.vi_state import InputMode from pygments.token import _TokenType, Token from gruvbox import Color, GruvboxStyle config = get_config() # type: ignore # noqa: E0602 class Prompt(prompts.Prompts): """Custom IPython prompt.""" _before: str = "❰" _after: str = "❱ ⬢ " _continuation: str = "… " def in_prompt_tokens(self) -> List[Tuple[_TokenType, str]]: """Return in prompt.""" if get_app().vi_state.input_mode == InputMode.INSERT: prompt_token = prompts.Token.InsertPrompt num_token = prompts.Token.InsertPromptNum else: prompt_token = prompts.Token.NavPrompt num_token = prompts.Token.NavPromptNum return [ (prompt_token, self._before), (num_token, str(self.shell.execution_count)), (prompt_token, self._after), ] def continuation_prompt_tokens( self, width: Optional[int] = None ) -> List[Tuple[_TokenType, str]]: """Return continuation prompt.""" if width is None: width = self._width() if get_app().vi_state.input_mode == InputMode.INSERT: token = prompts.Token.InsertPrompt else: token = prompts.Token.NavPrompt return [(token, " " * (width - 2) + self._continuation)] def out_prompt_tokens(self) -> List[Tuple[_TokenType, str]]: """Return out prompt.""" return [] config.TerminalIPythonApp.display_banner = False config.TerminalInteractiveShell.confirm_exit = False config.TerminalInteractiveShell.editing_mode = "vi" config.TerminalInteractiveShell.true_color = True config.TerminalInteractiveShell.prompts_class = Prompt config.TerminalInteractiveShell.highlighting_style = GruvboxStyle config.TerminalInteractiveShell.highlighting_style_overrides = { Token.InsertPrompt: Color.neutral_blue, Token.NavPrompt: Color.neutral_purple, Token.InsertPromptNum: f"{Color.neutral_purple} bold", Token.NavPromptNum: f"{Color.neutral_blue} bold", }
Add custom IPython configuration ✨""" IPython configuration with custom prompt using gruvbox colors. - https://github.com/reillysiemens/ipython-style-gruvbox Thanks to @petobens for their excellent dotfiles. - https://github.com/petobens/dotfiles """ from typing import List, Optional, Tuple import IPython.terminal.prompts as prompts from prompt_toolkit.application import get_app from prompt_toolkit.key_binding.vi_state import InputMode from pygments.token import _TokenType, Token from gruvbox import Color, GruvboxStyle config = get_config() # type: ignore # noqa: E0602 class Prompt(prompts.Prompts): """Custom IPython prompt.""" _before: str = "❰" _after: str = "❱ ⬢ " _continuation: str = "… " def in_prompt_tokens(self) -> List[Tuple[_TokenType, str]]: """Return in prompt.""" if get_app().vi_state.input_mode == InputMode.INSERT: prompt_token = prompts.Token.InsertPrompt num_token = prompts.Token.InsertPromptNum else: prompt_token = prompts.Token.NavPrompt num_token = prompts.Token.NavPromptNum return [ (prompt_token, self._before), (num_token, str(self.shell.execution_count)), (prompt_token, self._after), ] def continuation_prompt_tokens( self, width: Optional[int] = None ) -> List[Tuple[_TokenType, str]]: """Return continuation prompt.""" if width is None: width = self._width() if get_app().vi_state.input_mode == InputMode.INSERT: token = prompts.Token.InsertPrompt else: token = prompts.Token.NavPrompt return [(token, " " * (width - 2) + self._continuation)] def out_prompt_tokens(self) -> List[Tuple[_TokenType, str]]: """Return out prompt.""" return [] config.TerminalIPythonApp.display_banner = False config.TerminalInteractiveShell.confirm_exit = False config.TerminalInteractiveShell.editing_mode = "vi" config.TerminalInteractiveShell.true_color = True config.TerminalInteractiveShell.prompts_class = Prompt config.TerminalInteractiveShell.highlighting_style = GruvboxStyle config.TerminalInteractiveShell.highlighting_style_overrides = { Token.InsertPrompt: Color.neutral_blue, Token.NavPrompt: Color.neutral_purple, Token.InsertPromptNum: f"{Color.neutral_purple} bold", Token.NavPromptNum: f"{Color.neutral_blue} bold", }
<commit_before><commit_msg>Add custom IPython configuration ✨<commit_after>""" IPython configuration with custom prompt using gruvbox colors. - https://github.com/reillysiemens/ipython-style-gruvbox Thanks to @petobens for their excellent dotfiles. - https://github.com/petobens/dotfiles """ from typing import List, Optional, Tuple import IPython.terminal.prompts as prompts from prompt_toolkit.application import get_app from prompt_toolkit.key_binding.vi_state import InputMode from pygments.token import _TokenType, Token from gruvbox import Color, GruvboxStyle config = get_config() # type: ignore # noqa: E0602 class Prompt(prompts.Prompts): """Custom IPython prompt.""" _before: str = "❰" _after: str = "❱ ⬢ " _continuation: str = "… " def in_prompt_tokens(self) -> List[Tuple[_TokenType, str]]: """Return in prompt.""" if get_app().vi_state.input_mode == InputMode.INSERT: prompt_token = prompts.Token.InsertPrompt num_token = prompts.Token.InsertPromptNum else: prompt_token = prompts.Token.NavPrompt num_token = prompts.Token.NavPromptNum return [ (prompt_token, self._before), (num_token, str(self.shell.execution_count)), (prompt_token, self._after), ] def continuation_prompt_tokens( self, width: Optional[int] = None ) -> List[Tuple[_TokenType, str]]: """Return continuation prompt.""" if width is None: width = self._width() if get_app().vi_state.input_mode == InputMode.INSERT: token = prompts.Token.InsertPrompt else: token = prompts.Token.NavPrompt return [(token, " " * (width - 2) + self._continuation)] def out_prompt_tokens(self) -> List[Tuple[_TokenType, str]]: """Return out prompt.""" return [] config.TerminalIPythonApp.display_banner = False config.TerminalInteractiveShell.confirm_exit = False config.TerminalInteractiveShell.editing_mode = "vi" config.TerminalInteractiveShell.true_color = True config.TerminalInteractiveShell.prompts_class = Prompt config.TerminalInteractiveShell.highlighting_style = GruvboxStyle config.TerminalInteractiveShell.highlighting_style_overrides = { Token.InsertPrompt: Color.neutral_blue, Token.NavPrompt: Color.neutral_purple, Token.InsertPromptNum: f"{Color.neutral_purple} bold", Token.NavPromptNum: f"{Color.neutral_blue} bold", }
1c7f6a6c44af9c2de372fb2c07469da29bc11764
tests/test_encoding.py
tests/test_encoding.py
from diana.encoding import encode, decode from nose.tools import eq_ DECODE_TESTS = [ ('', (), ()), ('b', (0x00,), (0,)), ('BB', (0x12, 0xfe), (0x12, 0xfe)), ('bb', (0x12, 0xfe), (0x12, -2)), ('s', (0x12, 0x34), (0x3412,)), ('s', (0xff, 0xff), (-1,)), ('S', (0xff, 0xff), (0xffff,)), ('i', (0x12, 0x34, 0x56, 0x78), (0x78563412,)), ('I', (0xff, 0xff, 0xff, 0xff), (0xffffffff,)), ('i', (0xff, 0xff, 0xff, 0xff), (-1,)), ('f', (0x00, 0x00, 0x80, 0x3f), (1.0,)), ('u', (0x05, 0x00, 0x00, 0x00, 0x62, 0x00, 0x65, 0x00, 0x65, 0x00, 0x73, 0x00, 0x00, 0x00), ('bees',)), ('[B]', (0x12, 0x34, 0x56, 0x78), ([(0x12,), (0x34,), (0x56,), (0x78,)],)), ('[BB]', (0x12, 0x34, 0x56, 0x78), ([(0x12, 0x34), (0x56, 0x78)],)), ('B[BB]B', (0x12, 0x34, 0x56, 0x78), (0x12, [(0x34, 0x56)], 0x78)), ('B[]', (0x12,), (0x12, [])) ] def test_encode(): def code(fmt, coded, uncoded): data = bytes(coded) output = encode(fmt, uncoded) eq_(output, data) for fmt, coded, uncoded in DECODE_TESTS: yield code, fmt, coded, uncoded def test_decode(): def code(fmt, coded, uncoded): data = bytes(coded) output = decode(fmt, data) eq_(output, uncoded) for fmt, coded, uncoded in DECODE_TESTS: yield code, fmt, coded, uncoded
Add tests for encoding subsystem
Add tests for encoding subsystem
Python
mit
prophile/libdiana
Add tests for encoding subsystem
from diana.encoding import encode, decode from nose.tools import eq_ DECODE_TESTS = [ ('', (), ()), ('b', (0x00,), (0,)), ('BB', (0x12, 0xfe), (0x12, 0xfe)), ('bb', (0x12, 0xfe), (0x12, -2)), ('s', (0x12, 0x34), (0x3412,)), ('s', (0xff, 0xff), (-1,)), ('S', (0xff, 0xff), (0xffff,)), ('i', (0x12, 0x34, 0x56, 0x78), (0x78563412,)), ('I', (0xff, 0xff, 0xff, 0xff), (0xffffffff,)), ('i', (0xff, 0xff, 0xff, 0xff), (-1,)), ('f', (0x00, 0x00, 0x80, 0x3f), (1.0,)), ('u', (0x05, 0x00, 0x00, 0x00, 0x62, 0x00, 0x65, 0x00, 0x65, 0x00, 0x73, 0x00, 0x00, 0x00), ('bees',)), ('[B]', (0x12, 0x34, 0x56, 0x78), ([(0x12,), (0x34,), (0x56,), (0x78,)],)), ('[BB]', (0x12, 0x34, 0x56, 0x78), ([(0x12, 0x34), (0x56, 0x78)],)), ('B[BB]B', (0x12, 0x34, 0x56, 0x78), (0x12, [(0x34, 0x56)], 0x78)), ('B[]', (0x12,), (0x12, [])) ] def test_encode(): def code(fmt, coded, uncoded): data = bytes(coded) output = encode(fmt, uncoded) eq_(output, data) for fmt, coded, uncoded in DECODE_TESTS: yield code, fmt, coded, uncoded def test_decode(): def code(fmt, coded, uncoded): data = bytes(coded) output = decode(fmt, data) eq_(output, uncoded) for fmt, coded, uncoded in DECODE_TESTS: yield code, fmt, coded, uncoded
<commit_before><commit_msg>Add tests for encoding subsystem<commit_after>
from diana.encoding import encode, decode from nose.tools import eq_ DECODE_TESTS = [ ('', (), ()), ('b', (0x00,), (0,)), ('BB', (0x12, 0xfe), (0x12, 0xfe)), ('bb', (0x12, 0xfe), (0x12, -2)), ('s', (0x12, 0x34), (0x3412,)), ('s', (0xff, 0xff), (-1,)), ('S', (0xff, 0xff), (0xffff,)), ('i', (0x12, 0x34, 0x56, 0x78), (0x78563412,)), ('I', (0xff, 0xff, 0xff, 0xff), (0xffffffff,)), ('i', (0xff, 0xff, 0xff, 0xff), (-1,)), ('f', (0x00, 0x00, 0x80, 0x3f), (1.0,)), ('u', (0x05, 0x00, 0x00, 0x00, 0x62, 0x00, 0x65, 0x00, 0x65, 0x00, 0x73, 0x00, 0x00, 0x00), ('bees',)), ('[B]', (0x12, 0x34, 0x56, 0x78), ([(0x12,), (0x34,), (0x56,), (0x78,)],)), ('[BB]', (0x12, 0x34, 0x56, 0x78), ([(0x12, 0x34), (0x56, 0x78)],)), ('B[BB]B', (0x12, 0x34, 0x56, 0x78), (0x12, [(0x34, 0x56)], 0x78)), ('B[]', (0x12,), (0x12, [])) ] def test_encode(): def code(fmt, coded, uncoded): data = bytes(coded) output = encode(fmt, uncoded) eq_(output, data) for fmt, coded, uncoded in DECODE_TESTS: yield code, fmt, coded, uncoded def test_decode(): def code(fmt, coded, uncoded): data = bytes(coded) output = decode(fmt, data) eq_(output, uncoded) for fmt, coded, uncoded in DECODE_TESTS: yield code, fmt, coded, uncoded
Add tests for encoding subsystemfrom diana.encoding import encode, decode from nose.tools import eq_ DECODE_TESTS = [ ('', (), ()), ('b', (0x00,), (0,)), ('BB', (0x12, 0xfe), (0x12, 0xfe)), ('bb', (0x12, 0xfe), (0x12, -2)), ('s', (0x12, 0x34), (0x3412,)), ('s', (0xff, 0xff), (-1,)), ('S', (0xff, 0xff), (0xffff,)), ('i', (0x12, 0x34, 0x56, 0x78), (0x78563412,)), ('I', (0xff, 0xff, 0xff, 0xff), (0xffffffff,)), ('i', (0xff, 0xff, 0xff, 0xff), (-1,)), ('f', (0x00, 0x00, 0x80, 0x3f), (1.0,)), ('u', (0x05, 0x00, 0x00, 0x00, 0x62, 0x00, 0x65, 0x00, 0x65, 0x00, 0x73, 0x00, 0x00, 0x00), ('bees',)), ('[B]', (0x12, 0x34, 0x56, 0x78), ([(0x12,), (0x34,), (0x56,), (0x78,)],)), ('[BB]', (0x12, 0x34, 0x56, 0x78), ([(0x12, 0x34), (0x56, 0x78)],)), ('B[BB]B', (0x12, 0x34, 0x56, 0x78), (0x12, [(0x34, 0x56)], 0x78)), ('B[]', (0x12,), (0x12, [])) ] def test_encode(): def code(fmt, coded, uncoded): data = bytes(coded) output = encode(fmt, uncoded) eq_(output, data) for fmt, coded, uncoded in DECODE_TESTS: yield code, fmt, coded, uncoded def test_decode(): def code(fmt, coded, uncoded): data = bytes(coded) output = decode(fmt, data) eq_(output, uncoded) for fmt, coded, uncoded in DECODE_TESTS: yield code, fmt, coded, uncoded
<commit_before><commit_msg>Add tests for encoding subsystem<commit_after>from diana.encoding import encode, decode from nose.tools import eq_ DECODE_TESTS = [ ('', (), ()), ('b', (0x00,), (0,)), ('BB', (0x12, 0xfe), (0x12, 0xfe)), ('bb', (0x12, 0xfe), (0x12, -2)), ('s', (0x12, 0x34), (0x3412,)), ('s', (0xff, 0xff), (-1,)), ('S', (0xff, 0xff), (0xffff,)), ('i', (0x12, 0x34, 0x56, 0x78), (0x78563412,)), ('I', (0xff, 0xff, 0xff, 0xff), (0xffffffff,)), ('i', (0xff, 0xff, 0xff, 0xff), (-1,)), ('f', (0x00, 0x00, 0x80, 0x3f), (1.0,)), ('u', (0x05, 0x00, 0x00, 0x00, 0x62, 0x00, 0x65, 0x00, 0x65, 0x00, 0x73, 0x00, 0x00, 0x00), ('bees',)), ('[B]', (0x12, 0x34, 0x56, 0x78), ([(0x12,), (0x34,), (0x56,), (0x78,)],)), ('[BB]', (0x12, 0x34, 0x56, 0x78), ([(0x12, 0x34), (0x56, 0x78)],)), ('B[BB]B', (0x12, 0x34, 0x56, 0x78), (0x12, [(0x34, 0x56)], 0x78)), ('B[]', (0x12,), (0x12, [])) ] def test_encode(): def code(fmt, coded, uncoded): data = bytes(coded) output = encode(fmt, uncoded) eq_(output, data) for fmt, coded, uncoded in DECODE_TESTS: yield code, fmt, coded, uncoded def test_decode(): def code(fmt, coded, uncoded): data = bytes(coded) output = decode(fmt, data) eq_(output, uncoded) for fmt, coded, uncoded in DECODE_TESTS: yield code, fmt, coded, uncoded
93a1ff67e62d0508744420cab8263a8cc893b119
test/list_backup_counts.py
test/list_backup_counts.py
import urbackup_api server = urbackup_api.urbackup_server("http://127.0.0.1:55414/x", "admin", "foo") clients = server.get_status() for client in clients: file_backups = server.get_clientbackups(client["id"]) incr_file = 0 full_file = 0 for file_backup in file_backups: if file_backup["incremental"]>0: full_file+=1 else: incr_file+=1 incr_image = 0 full_image = 0 image_backups = server.get_clientimagebackups(client["id"]) for image_backup in image_backups: if image_backup["letter"]=="SYSVOL" or image_backup["letter"]=="ESP": continue if image_backup["incremental"]>0: full_image+=1 else: incr_image+=1 print("Client {clientname} has {incr_file} incr file backups, {full_file} " "full file backups, {incr_image} incr image backups and " "{full_image} full image backups".format( incr_file=incr_file, clientname=client["name"], full_file=full_file, incr_image=incr_image, full_image=full_image) )
Add example listing backup counts
Add example listing backup counts
Python
apache-2.0
uroni/urbackup-server-python-web-api-wrapper
Add example listing backup counts
import urbackup_api server = urbackup_api.urbackup_server("http://127.0.0.1:55414/x", "admin", "foo") clients = server.get_status() for client in clients: file_backups = server.get_clientbackups(client["id"]) incr_file = 0 full_file = 0 for file_backup in file_backups: if file_backup["incremental"]>0: full_file+=1 else: incr_file+=1 incr_image = 0 full_image = 0 image_backups = server.get_clientimagebackups(client["id"]) for image_backup in image_backups: if image_backup["letter"]=="SYSVOL" or image_backup["letter"]=="ESP": continue if image_backup["incremental"]>0: full_image+=1 else: incr_image+=1 print("Client {clientname} has {incr_file} incr file backups, {full_file} " "full file backups, {incr_image} incr image backups and " "{full_image} full image backups".format( incr_file=incr_file, clientname=client["name"], full_file=full_file, incr_image=incr_image, full_image=full_image) )
<commit_before><commit_msg>Add example listing backup counts<commit_after>
import urbackup_api server = urbackup_api.urbackup_server("http://127.0.0.1:55414/x", "admin", "foo") clients = server.get_status() for client in clients: file_backups = server.get_clientbackups(client["id"]) incr_file = 0 full_file = 0 for file_backup in file_backups: if file_backup["incremental"]>0: full_file+=1 else: incr_file+=1 incr_image = 0 full_image = 0 image_backups = server.get_clientimagebackups(client["id"]) for image_backup in image_backups: if image_backup["letter"]=="SYSVOL" or image_backup["letter"]=="ESP": continue if image_backup["incremental"]>0: full_image+=1 else: incr_image+=1 print("Client {clientname} has {incr_file} incr file backups, {full_file} " "full file backups, {incr_image} incr image backups and " "{full_image} full image backups".format( incr_file=incr_file, clientname=client["name"], full_file=full_file, incr_image=incr_image, full_image=full_image) )
Add example listing backup countsimport urbackup_api server = urbackup_api.urbackup_server("http://127.0.0.1:55414/x", "admin", "foo") clients = server.get_status() for client in clients: file_backups = server.get_clientbackups(client["id"]) incr_file = 0 full_file = 0 for file_backup in file_backups: if file_backup["incremental"]>0: full_file+=1 else: incr_file+=1 incr_image = 0 full_image = 0 image_backups = server.get_clientimagebackups(client["id"]) for image_backup in image_backups: if image_backup["letter"]=="SYSVOL" or image_backup["letter"]=="ESP": continue if image_backup["incremental"]>0: full_image+=1 else: incr_image+=1 print("Client {clientname} has {incr_file} incr file backups, {full_file} " "full file backups, {incr_image} incr image backups and " "{full_image} full image backups".format( incr_file=incr_file, clientname=client["name"], full_file=full_file, incr_image=incr_image, full_image=full_image) )
<commit_before><commit_msg>Add example listing backup counts<commit_after>import urbackup_api server = urbackup_api.urbackup_server("http://127.0.0.1:55414/x", "admin", "foo") clients = server.get_status() for client in clients: file_backups = server.get_clientbackups(client["id"]) incr_file = 0 full_file = 0 for file_backup in file_backups: if file_backup["incremental"]>0: full_file+=1 else: incr_file+=1 incr_image = 0 full_image = 0 image_backups = server.get_clientimagebackups(client["id"]) for image_backup in image_backups: if image_backup["letter"]=="SYSVOL" or image_backup["letter"]=="ESP": continue if image_backup["incremental"]>0: full_image+=1 else: incr_image+=1 print("Client {clientname} has {incr_file} incr file backups, {full_file} " "full file backups, {incr_image} incr image backups and " "{full_image} full image backups".format( incr_file=incr_file, clientname=client["name"], full_file=full_file, incr_image=incr_image, full_image=full_image) )
16f0ec2d0e5c33126ddb01604213c6a14115e605
test/test_pocket_parser.py
test/test_pocket_parser.py
import unittest import utils import os import sys import re import subprocess TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) sys.path.append(TOPDIR) import pocket_parser class Tests(unittest.TestCase): def test_get_cnc(self): """Test get_cnc() function""" res = pocket_parser.get_cnc(os.path.join(TOPDIR, 'test', 'input', 'test.pdb'), None) self.assertEqual(len(res), 8) self.assertEqual(res[('ILE', 9, 'A')], 0.0) if __name__ == '__main__': unittest.main()
Add basic test of pocket_parser.
Add basic test of pocket_parser.
Python
lgpl-2.1
salilab/cryptosite,salilab/cryptosite,salilab/cryptosite
Add basic test of pocket_parser.
import unittest import utils import os import sys import re import subprocess TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) sys.path.append(TOPDIR) import pocket_parser class Tests(unittest.TestCase): def test_get_cnc(self): """Test get_cnc() function""" res = pocket_parser.get_cnc(os.path.join(TOPDIR, 'test', 'input', 'test.pdb'), None) self.assertEqual(len(res), 8) self.assertEqual(res[('ILE', 9, 'A')], 0.0) if __name__ == '__main__': unittest.main()
<commit_before><commit_msg>Add basic test of pocket_parser.<commit_after>
import unittest import utils import os import sys import re import subprocess TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) sys.path.append(TOPDIR) import pocket_parser class Tests(unittest.TestCase): def test_get_cnc(self): """Test get_cnc() function""" res = pocket_parser.get_cnc(os.path.join(TOPDIR, 'test', 'input', 'test.pdb'), None) self.assertEqual(len(res), 8) self.assertEqual(res[('ILE', 9, 'A')], 0.0) if __name__ == '__main__': unittest.main()
Add basic test of pocket_parser.import unittest import utils import os import sys import re import subprocess TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) sys.path.append(TOPDIR) import pocket_parser class Tests(unittest.TestCase): def test_get_cnc(self): """Test get_cnc() function""" res = pocket_parser.get_cnc(os.path.join(TOPDIR, 'test', 'input', 'test.pdb'), None) self.assertEqual(len(res), 8) self.assertEqual(res[('ILE', 9, 'A')], 0.0) if __name__ == '__main__': unittest.main()
<commit_before><commit_msg>Add basic test of pocket_parser.<commit_after>import unittest import utils import os import sys import re import subprocess TOPDIR = os.path.abspath(os.path.join(os.path.dirname(__file__), '..')) sys.path.append(TOPDIR) import pocket_parser class Tests(unittest.TestCase): def test_get_cnc(self): """Test get_cnc() function""" res = pocket_parser.get_cnc(os.path.join(TOPDIR, 'test', 'input', 'test.pdb'), None) self.assertEqual(len(res), 8) self.assertEqual(res[('ILE', 9, 'A')], 0.0) if __name__ == '__main__': unittest.main()
7021806e9e510286424dae696c2f4eee0a70b630
src/forms.py
src/forms.py
#-*- coding: utf-8 -*- from __future__ import unicode_literals import crispy_forms.helper class DefaultFormHelper(crispy_forms.helper.FormHelper): def __init__(self, form=None): super(DefaultFormHelper, self).__init__(form=form) self.form_class = "form-horizontal" self.html5_required = True self.help_text_inline = True
Define default crispy form helper
Define default crispy form helper
Python
mit
nigma/djutil
Define default crispy form helper
#-*- coding: utf-8 -*- from __future__ import unicode_literals import crispy_forms.helper class DefaultFormHelper(crispy_forms.helper.FormHelper): def __init__(self, form=None): super(DefaultFormHelper, self).__init__(form=form) self.form_class = "form-horizontal" self.html5_required = True self.help_text_inline = True
<commit_before><commit_msg>Define default crispy form helper<commit_after>
#-*- coding: utf-8 -*- from __future__ import unicode_literals import crispy_forms.helper class DefaultFormHelper(crispy_forms.helper.FormHelper): def __init__(self, form=None): super(DefaultFormHelper, self).__init__(form=form) self.form_class = "form-horizontal" self.html5_required = True self.help_text_inline = True
Define default crispy form helper#-*- coding: utf-8 -*- from __future__ import unicode_literals import crispy_forms.helper class DefaultFormHelper(crispy_forms.helper.FormHelper): def __init__(self, form=None): super(DefaultFormHelper, self).__init__(form=form) self.form_class = "form-horizontal" self.html5_required = True self.help_text_inline = True
<commit_before><commit_msg>Define default crispy form helper<commit_after>#-*- coding: utf-8 -*- from __future__ import unicode_literals import crispy_forms.helper class DefaultFormHelper(crispy_forms.helper.FormHelper): def __init__(self, form=None): super(DefaultFormHelper, self).__init__(form=form) self.form_class = "form-horizontal" self.html5_required = True self.help_text_inline = True
8919cf7171d7a659c0b90c41f2520029bab1423e
scripts/run_travis.py
scripts/run_travis.py
#!/usr/bin/env python3 import argparse import sys import pprint import shlex import yaml from pathlib import Path def gen_test_script(ty, job, output): output.write('#!/usr/bin/env bash\n\n') output.write('set -ex\n') # extract environment variables e_str = ty['env'][job] for v_assign in shlex.split(e_str): output.write(v_assign + '\n') output.write('\n') # extract script lines for l in ty['script']: output.write(l + '\n') def main(): parser = argparse.ArgumentParser(description='Run travis jobs locally') parser.add_argument('--yml', '-y', metavar='travis.yml', type=Path, default=Path('.travis.yml'), help='.travis.yml file') parser.add_argument('--job', '-j', metavar='JOB', type=int, default=0) parser.add_argument('--output', '-o', metavar='OUTPUT.sh', type=argparse.FileType('w'), default=sys.stdout) parser.add_argument('--verbose', '-v', action='store_true') args = parser.parse_args() ymlf = args.yml with open(ymlf, 'r') as f: yml = f.read() ty = yaml.load(yml) if args.verbose: pp = pprint.PrettyPrinter(indent=4, stream=sys.stderr) pp.pprint(ty) gen_test_script(ty, args.job, args.output) if __name__ == '__main__': sys.exit(main())
Add tool to extract build process from .travis.yml
Add tool to extract build process from .travis.yml Usage: ./scripts/run_travis.py -j JOB_ID -o run.sh bash run.sh | ts "%F %H:%M:%.S"
Python
mpl-2.0
advancedtelematic/sota_client_cpp,advancedtelematic/aktualizr,advancedtelematic/aktualizr,advancedtelematic/aktualizr,advancedtelematic/aktualizr,advancedtelematic/sota_client_cpp
Add tool to extract build process from .travis.yml Usage: ./scripts/run_travis.py -j JOB_ID -o run.sh bash run.sh | ts "%F %H:%M:%.S"
#!/usr/bin/env python3 import argparse import sys import pprint import shlex import yaml from pathlib import Path def gen_test_script(ty, job, output): output.write('#!/usr/bin/env bash\n\n') output.write('set -ex\n') # extract environment variables e_str = ty['env'][job] for v_assign in shlex.split(e_str): output.write(v_assign + '\n') output.write('\n') # extract script lines for l in ty['script']: output.write(l + '\n') def main(): parser = argparse.ArgumentParser(description='Run travis jobs locally') parser.add_argument('--yml', '-y', metavar='travis.yml', type=Path, default=Path('.travis.yml'), help='.travis.yml file') parser.add_argument('--job', '-j', metavar='JOB', type=int, default=0) parser.add_argument('--output', '-o', metavar='OUTPUT.sh', type=argparse.FileType('w'), default=sys.stdout) parser.add_argument('--verbose', '-v', action='store_true') args = parser.parse_args() ymlf = args.yml with open(ymlf, 'r') as f: yml = f.read() ty = yaml.load(yml) if args.verbose: pp = pprint.PrettyPrinter(indent=4, stream=sys.stderr) pp.pprint(ty) gen_test_script(ty, args.job, args.output) if __name__ == '__main__': sys.exit(main())
<commit_before><commit_msg>Add tool to extract build process from .travis.yml Usage: ./scripts/run_travis.py -j JOB_ID -o run.sh bash run.sh | ts "%F %H:%M:%.S"<commit_after>
#!/usr/bin/env python3 import argparse import sys import pprint import shlex import yaml from pathlib import Path def gen_test_script(ty, job, output): output.write('#!/usr/bin/env bash\n\n') output.write('set -ex\n') # extract environment variables e_str = ty['env'][job] for v_assign in shlex.split(e_str): output.write(v_assign + '\n') output.write('\n') # extract script lines for l in ty['script']: output.write(l + '\n') def main(): parser = argparse.ArgumentParser(description='Run travis jobs locally') parser.add_argument('--yml', '-y', metavar='travis.yml', type=Path, default=Path('.travis.yml'), help='.travis.yml file') parser.add_argument('--job', '-j', metavar='JOB', type=int, default=0) parser.add_argument('--output', '-o', metavar='OUTPUT.sh', type=argparse.FileType('w'), default=sys.stdout) parser.add_argument('--verbose', '-v', action='store_true') args = parser.parse_args() ymlf = args.yml with open(ymlf, 'r') as f: yml = f.read() ty = yaml.load(yml) if args.verbose: pp = pprint.PrettyPrinter(indent=4, stream=sys.stderr) pp.pprint(ty) gen_test_script(ty, args.job, args.output) if __name__ == '__main__': sys.exit(main())
Add tool to extract build process from .travis.yml Usage: ./scripts/run_travis.py -j JOB_ID -o run.sh bash run.sh | ts "%F %H:%M:%.S"#!/usr/bin/env python3 import argparse import sys import pprint import shlex import yaml from pathlib import Path def gen_test_script(ty, job, output): output.write('#!/usr/bin/env bash\n\n') output.write('set -ex\n') # extract environment variables e_str = ty['env'][job] for v_assign in shlex.split(e_str): output.write(v_assign + '\n') output.write('\n') # extract script lines for l in ty['script']: output.write(l + '\n') def main(): parser = argparse.ArgumentParser(description='Run travis jobs locally') parser.add_argument('--yml', '-y', metavar='travis.yml', type=Path, default=Path('.travis.yml'), help='.travis.yml file') parser.add_argument('--job', '-j', metavar='JOB', type=int, default=0) parser.add_argument('--output', '-o', metavar='OUTPUT.sh', type=argparse.FileType('w'), default=sys.stdout) parser.add_argument('--verbose', '-v', action='store_true') args = parser.parse_args() ymlf = args.yml with open(ymlf, 'r') as f: yml = f.read() ty = yaml.load(yml) if args.verbose: pp = pprint.PrettyPrinter(indent=4, stream=sys.stderr) pp.pprint(ty) gen_test_script(ty, args.job, args.output) if __name__ == '__main__': sys.exit(main())
<commit_before><commit_msg>Add tool to extract build process from .travis.yml Usage: ./scripts/run_travis.py -j JOB_ID -o run.sh bash run.sh | ts "%F %H:%M:%.S"<commit_after>#!/usr/bin/env python3 import argparse import sys import pprint import shlex import yaml from pathlib import Path def gen_test_script(ty, job, output): output.write('#!/usr/bin/env bash\n\n') output.write('set -ex\n') # extract environment variables e_str = ty['env'][job] for v_assign in shlex.split(e_str): output.write(v_assign + '\n') output.write('\n') # extract script lines for l in ty['script']: output.write(l + '\n') def main(): parser = argparse.ArgumentParser(description='Run travis jobs locally') parser.add_argument('--yml', '-y', metavar='travis.yml', type=Path, default=Path('.travis.yml'), help='.travis.yml file') parser.add_argument('--job', '-j', metavar='JOB', type=int, default=0) parser.add_argument('--output', '-o', metavar='OUTPUT.sh', type=argparse.FileType('w'), default=sys.stdout) parser.add_argument('--verbose', '-v', action='store_true') args = parser.parse_args() ymlf = args.yml with open(ymlf, 'r') as f: yml = f.read() ty = yaml.load(yml) if args.verbose: pp = pprint.PrettyPrinter(indent=4, stream=sys.stderr) pp.pprint(ty) gen_test_script(ty, args.job, args.output) if __name__ == '__main__': sys.exit(main())
d186c80feb7dee875a1a7debfd115e100dc3fca1
send_studentvoices.py
send_studentvoices.py
import os if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "studentportal.settings") from django.core.urlresolvers import reverse from django.conf import settings from post_office import mail from studentvoice.models import Voice for voice in Voice.objects.filter(was_sent=False, parent__isnull=True, is_published=True, response__isnull=True, score__gte=settings.STUDENTVOICE_THRESHOLD): url = reverse('studentvoice:show', args=(voice.pk,)) email_context = {'voice': voice, 'url': url} print "Handling voice #%d..." % voice.pk # Send notification to the voice recipient print "Preparing recipient email to %s..." % voice.recipient.email if voice.recipient.secondary_email: secondary_email = [voice.recipient.secondary_email] print "Adding secondary_email, as CC." else: secondary_email = None mail.send([voice.recipient.email], cc=secondary_email, template="studentvoice_threshold_recipient", context=email_context) # Send notification to the voice submitter print "Preparing submitter email to %s..." % voice.submitter.email mail.send([voice.submitter.email], template="studentvoice_threshold_submitter", context=email_context) # Send notification to the those who voted in favor of the voice for vote in voice.vote_set.filter(is_counted=True, vote_type='U'): print "Preparing voter email to %s..." % vote.submitter.email email_context['vote'] = vote mail.send([vote.submitter.email], template="studentvoice_threshold_voter", context=email_context) voice.was_sent = True voice.is_editable = False voice.save()
Add a cronjob script for sending studentvoice notifications.
Add a cronjob script for sending studentvoice notifications.
Python
agpl-3.0
enjaz/enjaz,osamak/student-portal,osamak/student-portal,osamak/student-portal,enjaz/enjaz,enjaz/enjaz,enjaz/enjaz,osamak/student-portal,enjaz/enjaz,osamak/student-portal
Add a cronjob script for sending studentvoice notifications.
import os if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "studentportal.settings") from django.core.urlresolvers import reverse from django.conf import settings from post_office import mail from studentvoice.models import Voice for voice in Voice.objects.filter(was_sent=False, parent__isnull=True, is_published=True, response__isnull=True, score__gte=settings.STUDENTVOICE_THRESHOLD): url = reverse('studentvoice:show', args=(voice.pk,)) email_context = {'voice': voice, 'url': url} print "Handling voice #%d..." % voice.pk # Send notification to the voice recipient print "Preparing recipient email to %s..." % voice.recipient.email if voice.recipient.secondary_email: secondary_email = [voice.recipient.secondary_email] print "Adding secondary_email, as CC." else: secondary_email = None mail.send([voice.recipient.email], cc=secondary_email, template="studentvoice_threshold_recipient", context=email_context) # Send notification to the voice submitter print "Preparing submitter email to %s..." % voice.submitter.email mail.send([voice.submitter.email], template="studentvoice_threshold_submitter", context=email_context) # Send notification to the those who voted in favor of the voice for vote in voice.vote_set.filter(is_counted=True, vote_type='U'): print "Preparing voter email to %s..." % vote.submitter.email email_context['vote'] = vote mail.send([vote.submitter.email], template="studentvoice_threshold_voter", context=email_context) voice.was_sent = True voice.is_editable = False voice.save()
<commit_before><commit_msg>Add a cronjob script for sending studentvoice notifications.<commit_after>
import os if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "studentportal.settings") from django.core.urlresolvers import reverse from django.conf import settings from post_office import mail from studentvoice.models import Voice for voice in Voice.objects.filter(was_sent=False, parent__isnull=True, is_published=True, response__isnull=True, score__gte=settings.STUDENTVOICE_THRESHOLD): url = reverse('studentvoice:show', args=(voice.pk,)) email_context = {'voice': voice, 'url': url} print "Handling voice #%d..." % voice.pk # Send notification to the voice recipient print "Preparing recipient email to %s..." % voice.recipient.email if voice.recipient.secondary_email: secondary_email = [voice.recipient.secondary_email] print "Adding secondary_email, as CC." else: secondary_email = None mail.send([voice.recipient.email], cc=secondary_email, template="studentvoice_threshold_recipient", context=email_context) # Send notification to the voice submitter print "Preparing submitter email to %s..." % voice.submitter.email mail.send([voice.submitter.email], template="studentvoice_threshold_submitter", context=email_context) # Send notification to the those who voted in favor of the voice for vote in voice.vote_set.filter(is_counted=True, vote_type='U'): print "Preparing voter email to %s..." % vote.submitter.email email_context['vote'] = vote mail.send([vote.submitter.email], template="studentvoice_threshold_voter", context=email_context) voice.was_sent = True voice.is_editable = False voice.save()
Add a cronjob script for sending studentvoice notifications.import os if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "studentportal.settings") from django.core.urlresolvers import reverse from django.conf import settings from post_office import mail from studentvoice.models import Voice for voice in Voice.objects.filter(was_sent=False, parent__isnull=True, is_published=True, response__isnull=True, score__gte=settings.STUDENTVOICE_THRESHOLD): url = reverse('studentvoice:show', args=(voice.pk,)) email_context = {'voice': voice, 'url': url} print "Handling voice #%d..." % voice.pk # Send notification to the voice recipient print "Preparing recipient email to %s..." % voice.recipient.email if voice.recipient.secondary_email: secondary_email = [voice.recipient.secondary_email] print "Adding secondary_email, as CC." else: secondary_email = None mail.send([voice.recipient.email], cc=secondary_email, template="studentvoice_threshold_recipient", context=email_context) # Send notification to the voice submitter print "Preparing submitter email to %s..." % voice.submitter.email mail.send([voice.submitter.email], template="studentvoice_threshold_submitter", context=email_context) # Send notification to the those who voted in favor of the voice for vote in voice.vote_set.filter(is_counted=True, vote_type='U'): print "Preparing voter email to %s..." % vote.submitter.email email_context['vote'] = vote mail.send([vote.submitter.email], template="studentvoice_threshold_voter", context=email_context) voice.was_sent = True voice.is_editable = False voice.save()
<commit_before><commit_msg>Add a cronjob script for sending studentvoice notifications.<commit_after>import os if __name__ == "__main__": os.environ.setdefault("DJANGO_SETTINGS_MODULE", "studentportal.settings") from django.core.urlresolvers import reverse from django.conf import settings from post_office import mail from studentvoice.models import Voice for voice in Voice.objects.filter(was_sent=False, parent__isnull=True, is_published=True, response__isnull=True, score__gte=settings.STUDENTVOICE_THRESHOLD): url = reverse('studentvoice:show', args=(voice.pk,)) email_context = {'voice': voice, 'url': url} print "Handling voice #%d..." % voice.pk # Send notification to the voice recipient print "Preparing recipient email to %s..." % voice.recipient.email if voice.recipient.secondary_email: secondary_email = [voice.recipient.secondary_email] print "Adding secondary_email, as CC." else: secondary_email = None mail.send([voice.recipient.email], cc=secondary_email, template="studentvoice_threshold_recipient", context=email_context) # Send notification to the voice submitter print "Preparing submitter email to %s..." % voice.submitter.email mail.send([voice.submitter.email], template="studentvoice_threshold_submitter", context=email_context) # Send notification to the those who voted in favor of the voice for vote in voice.vote_set.filter(is_counted=True, vote_type='U'): print "Preparing voter email to %s..." % vote.submitter.email email_context['vote'] = vote mail.send([vote.submitter.email], template="studentvoice_threshold_voter", context=email_context) voice.was_sent = True voice.is_editable = False voice.save()
eb9ba88177ce23ef259b1731f02c38d0ccaa8318
run_build.py
run_build.py
#!/usr/bin/python3 import re import os import string import sys import subprocess import auto_merge def compile_dogecoin(): path = os.getcwd() subprocess.check_output([path + os.path.sep + 'autogen.sh']) subprocess.check_output([path + os.path.sep + 'configure']) subprocess.check_output(['make', 'clean'], stderr=subprocess.STDOUT) subprocess.check_output(['make'], stderr=subprocess.STDOUT) subprocess.check_output(['make', 'check'], stderr=subprocess.STDOUT) return True config = auto_merge.load_configuration('config.yml') if not 'dogecoin_repo' in config: print('Missing "dogecoin_repo" configuration.') sys.exit(1) if not config['dogecoin_repo']['path']: print('Missing "dogecoin_repo" configuration.') sys.exit(1) cwd = os.getcwd() os.chdir(config['dogecoin_repo']['path']) os.chdir('..') # Go up to the directory above the Git repository build_success = compile_dogecoin() os.chdir(cwd)
Add new script to build Dogecoin
Add new script to build Dogecoin
Python
mit
rnicoll/robodoge
Add new script to build Dogecoin
#!/usr/bin/python3 import re import os import string import sys import subprocess import auto_merge def compile_dogecoin(): path = os.getcwd() subprocess.check_output([path + os.path.sep + 'autogen.sh']) subprocess.check_output([path + os.path.sep + 'configure']) subprocess.check_output(['make', 'clean'], stderr=subprocess.STDOUT) subprocess.check_output(['make'], stderr=subprocess.STDOUT) subprocess.check_output(['make', 'check'], stderr=subprocess.STDOUT) return True config = auto_merge.load_configuration('config.yml') if not 'dogecoin_repo' in config: print('Missing "dogecoin_repo" configuration.') sys.exit(1) if not config['dogecoin_repo']['path']: print('Missing "dogecoin_repo" configuration.') sys.exit(1) cwd = os.getcwd() os.chdir(config['dogecoin_repo']['path']) os.chdir('..') # Go up to the directory above the Git repository build_success = compile_dogecoin() os.chdir(cwd)
<commit_before><commit_msg>Add new script to build Dogecoin<commit_after>
#!/usr/bin/python3 import re import os import string import sys import subprocess import auto_merge def compile_dogecoin(): path = os.getcwd() subprocess.check_output([path + os.path.sep + 'autogen.sh']) subprocess.check_output([path + os.path.sep + 'configure']) subprocess.check_output(['make', 'clean'], stderr=subprocess.STDOUT) subprocess.check_output(['make'], stderr=subprocess.STDOUT) subprocess.check_output(['make', 'check'], stderr=subprocess.STDOUT) return True config = auto_merge.load_configuration('config.yml') if not 'dogecoin_repo' in config: print('Missing "dogecoin_repo" configuration.') sys.exit(1) if not config['dogecoin_repo']['path']: print('Missing "dogecoin_repo" configuration.') sys.exit(1) cwd = os.getcwd() os.chdir(config['dogecoin_repo']['path']) os.chdir('..') # Go up to the directory above the Git repository build_success = compile_dogecoin() os.chdir(cwd)
Add new script to build Dogecoin#!/usr/bin/python3 import re import os import string import sys import subprocess import auto_merge def compile_dogecoin(): path = os.getcwd() subprocess.check_output([path + os.path.sep + 'autogen.sh']) subprocess.check_output([path + os.path.sep + 'configure']) subprocess.check_output(['make', 'clean'], stderr=subprocess.STDOUT) subprocess.check_output(['make'], stderr=subprocess.STDOUT) subprocess.check_output(['make', 'check'], stderr=subprocess.STDOUT) return True config = auto_merge.load_configuration('config.yml') if not 'dogecoin_repo' in config: print('Missing "dogecoin_repo" configuration.') sys.exit(1) if not config['dogecoin_repo']['path']: print('Missing "dogecoin_repo" configuration.') sys.exit(1) cwd = os.getcwd() os.chdir(config['dogecoin_repo']['path']) os.chdir('..') # Go up to the directory above the Git repository build_success = compile_dogecoin() os.chdir(cwd)
<commit_before><commit_msg>Add new script to build Dogecoin<commit_after>#!/usr/bin/python3 import re import os import string import sys import subprocess import auto_merge def compile_dogecoin(): path = os.getcwd() subprocess.check_output([path + os.path.sep + 'autogen.sh']) subprocess.check_output([path + os.path.sep + 'configure']) subprocess.check_output(['make', 'clean'], stderr=subprocess.STDOUT) subprocess.check_output(['make'], stderr=subprocess.STDOUT) subprocess.check_output(['make', 'check'], stderr=subprocess.STDOUT) return True config = auto_merge.load_configuration('config.yml') if not 'dogecoin_repo' in config: print('Missing "dogecoin_repo" configuration.') sys.exit(1) if not config['dogecoin_repo']['path']: print('Missing "dogecoin_repo" configuration.') sys.exit(1) cwd = os.getcwd() os.chdir(config['dogecoin_repo']['path']) os.chdir('..') # Go up to the directory above the Git repository build_success = compile_dogecoin() os.chdir(cwd)
46eaacef6240a72089bda049214640c50ec353ec
backend/globaleaks/tests/handlers/test_robots.py
backend/globaleaks/tests/handlers/test_robots.py
# -*- coding: utf-8 -*- import json from twisted.internet.defer import inlineCallbacks from globaleaks.handlers import robots from globaleaks.models import config from globaleaks.rest import requests from globaleaks.settings import GLSettings from globaleaks.tests import helpers class TestRobotstxtHandlerHandler(helpers.TestHandler): _handler = robots.RobotstxtHandler @inlineCallbacks def test_get_with_indexing_disabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = False yield handler.get() self.assertEqual(self.responses[0], "User-agent: *\n") self.assertEqual(self.responses[1], "Disallow: /") @inlineCallbacks def test_get_with_indexing_enabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = True yield handler.get() self.assertEqual(self.responses[0], "User-agent: *\n") self.assertEqual(self.responses[1], "Allow: /") class TestSitemapHandlerHandler(helpers.TestHandler): _handler = robots.SitemapHandler @inlineCallbacks def test_get_with_indexing_disabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = False yield handler.get() self.assertEqual(handler.get_status(), 404) @inlineCallbacks def test_get_with_indexing_enabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = True yield handler.get() self.assertEqual(handler.get_status(), 200) class TestAhmiaDescriptionHandler(helpers.TestHandler): _handler = robots.AhmiaDescriptionHandler @inlineCallbacks def test_get_ahmia_disabled(self): handler = self.request() GLSettings.memory_copy.ahmia = False yield handler.get() self.assertEqual(handler.get_status(), 404) @inlineCallbacks def test_get_ahmia_enabled(self): handler = self.request() GLSettings.memory_copy.ahmia = True yield handler.get() self._handler.validate_message(json.dumps(self.responses[0]), requests.AhmiaDesc)
Add tests for robots APIs
Add tests for robots APIs
Python
agpl-3.0
vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks,vodkina/GlobaLeaks
Add tests for robots APIs
# -*- coding: utf-8 -*- import json from twisted.internet.defer import inlineCallbacks from globaleaks.handlers import robots from globaleaks.models import config from globaleaks.rest import requests from globaleaks.settings import GLSettings from globaleaks.tests import helpers class TestRobotstxtHandlerHandler(helpers.TestHandler): _handler = robots.RobotstxtHandler @inlineCallbacks def test_get_with_indexing_disabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = False yield handler.get() self.assertEqual(self.responses[0], "User-agent: *\n") self.assertEqual(self.responses[1], "Disallow: /") @inlineCallbacks def test_get_with_indexing_enabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = True yield handler.get() self.assertEqual(self.responses[0], "User-agent: *\n") self.assertEqual(self.responses[1], "Allow: /") class TestSitemapHandlerHandler(helpers.TestHandler): _handler = robots.SitemapHandler @inlineCallbacks def test_get_with_indexing_disabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = False yield handler.get() self.assertEqual(handler.get_status(), 404) @inlineCallbacks def test_get_with_indexing_enabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = True yield handler.get() self.assertEqual(handler.get_status(), 200) class TestAhmiaDescriptionHandler(helpers.TestHandler): _handler = robots.AhmiaDescriptionHandler @inlineCallbacks def test_get_ahmia_disabled(self): handler = self.request() GLSettings.memory_copy.ahmia = False yield handler.get() self.assertEqual(handler.get_status(), 404) @inlineCallbacks def test_get_ahmia_enabled(self): handler = self.request() GLSettings.memory_copy.ahmia = True yield handler.get() self._handler.validate_message(json.dumps(self.responses[0]), requests.AhmiaDesc)
<commit_before><commit_msg>Add tests for robots APIs<commit_after>
# -*- coding: utf-8 -*- import json from twisted.internet.defer import inlineCallbacks from globaleaks.handlers import robots from globaleaks.models import config from globaleaks.rest import requests from globaleaks.settings import GLSettings from globaleaks.tests import helpers class TestRobotstxtHandlerHandler(helpers.TestHandler): _handler = robots.RobotstxtHandler @inlineCallbacks def test_get_with_indexing_disabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = False yield handler.get() self.assertEqual(self.responses[0], "User-agent: *\n") self.assertEqual(self.responses[1], "Disallow: /") @inlineCallbacks def test_get_with_indexing_enabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = True yield handler.get() self.assertEqual(self.responses[0], "User-agent: *\n") self.assertEqual(self.responses[1], "Allow: /") class TestSitemapHandlerHandler(helpers.TestHandler): _handler = robots.SitemapHandler @inlineCallbacks def test_get_with_indexing_disabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = False yield handler.get() self.assertEqual(handler.get_status(), 404) @inlineCallbacks def test_get_with_indexing_enabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = True yield handler.get() self.assertEqual(handler.get_status(), 200) class TestAhmiaDescriptionHandler(helpers.TestHandler): _handler = robots.AhmiaDescriptionHandler @inlineCallbacks def test_get_ahmia_disabled(self): handler = self.request() GLSettings.memory_copy.ahmia = False yield handler.get() self.assertEqual(handler.get_status(), 404) @inlineCallbacks def test_get_ahmia_enabled(self): handler = self.request() GLSettings.memory_copy.ahmia = True yield handler.get() self._handler.validate_message(json.dumps(self.responses[0]), requests.AhmiaDesc)
Add tests for robots APIs# -*- coding: utf-8 -*- import json from twisted.internet.defer import inlineCallbacks from globaleaks.handlers import robots from globaleaks.models import config from globaleaks.rest import requests from globaleaks.settings import GLSettings from globaleaks.tests import helpers class TestRobotstxtHandlerHandler(helpers.TestHandler): _handler = robots.RobotstxtHandler @inlineCallbacks def test_get_with_indexing_disabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = False yield handler.get() self.assertEqual(self.responses[0], "User-agent: *\n") self.assertEqual(self.responses[1], "Disallow: /") @inlineCallbacks def test_get_with_indexing_enabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = True yield handler.get() self.assertEqual(self.responses[0], "User-agent: *\n") self.assertEqual(self.responses[1], "Allow: /") class TestSitemapHandlerHandler(helpers.TestHandler): _handler = robots.SitemapHandler @inlineCallbacks def test_get_with_indexing_disabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = False yield handler.get() self.assertEqual(handler.get_status(), 404) @inlineCallbacks def test_get_with_indexing_enabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = True yield handler.get() self.assertEqual(handler.get_status(), 200) class TestAhmiaDescriptionHandler(helpers.TestHandler): _handler = robots.AhmiaDescriptionHandler @inlineCallbacks def test_get_ahmia_disabled(self): handler = self.request() GLSettings.memory_copy.ahmia = False yield handler.get() self.assertEqual(handler.get_status(), 404) @inlineCallbacks def test_get_ahmia_enabled(self): handler = self.request() GLSettings.memory_copy.ahmia = True yield handler.get() self._handler.validate_message(json.dumps(self.responses[0]), requests.AhmiaDesc)
<commit_before><commit_msg>Add tests for robots APIs<commit_after># -*- coding: utf-8 -*- import json from twisted.internet.defer import inlineCallbacks from globaleaks.handlers import robots from globaleaks.models import config from globaleaks.rest import requests from globaleaks.settings import GLSettings from globaleaks.tests import helpers class TestRobotstxtHandlerHandler(helpers.TestHandler): _handler = robots.RobotstxtHandler @inlineCallbacks def test_get_with_indexing_disabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = False yield handler.get() self.assertEqual(self.responses[0], "User-agent: *\n") self.assertEqual(self.responses[1], "Disallow: /") @inlineCallbacks def test_get_with_indexing_enabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = True yield handler.get() self.assertEqual(self.responses[0], "User-agent: *\n") self.assertEqual(self.responses[1], "Allow: /") class TestSitemapHandlerHandler(helpers.TestHandler): _handler = robots.SitemapHandler @inlineCallbacks def test_get_with_indexing_disabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = False yield handler.get() self.assertEqual(handler.get_status(), 404) @inlineCallbacks def test_get_with_indexing_enabled(self): handler = self.request() GLSettings.memory_copy.allow_indexing = True yield handler.get() self.assertEqual(handler.get_status(), 200) class TestAhmiaDescriptionHandler(helpers.TestHandler): _handler = robots.AhmiaDescriptionHandler @inlineCallbacks def test_get_ahmia_disabled(self): handler = self.request() GLSettings.memory_copy.ahmia = False yield handler.get() self.assertEqual(handler.get_status(), 404) @inlineCallbacks def test_get_ahmia_enabled(self): handler = self.request() GLSettings.memory_copy.ahmia = True yield handler.get() self._handler.validate_message(json.dumps(self.responses[0]), requests.AhmiaDesc)
3e3e5bb92b1d9e0e1981a6deba41152826c3fce0
scripts/popvsdistinct.py
scripts/popvsdistinct.py
""" Plot and calculate county population size to number of distinct hashtags. """ import matplotlib.pyplot as plt import seaborn import pandas import twitterproj import scipy.stats import numpy as np def populations(): # Grab demographic info data = {} df = pandas.read_csv('../census/county/PEP_2013_PEPANNRES_with_ann.csv') for county in df.values[1:]: # Skip column headers fips = county[1] data[fips] = int(county[-1]) return data def distinct_ht(): # Grab tweet info data = {} db = twitterproj.connect() for county in twitterproj.hashtag_counts__counties(db, bot_filtered=True): fips = county['geoid'] #data[fips]['tweeted_hashtags'] = sum(county['counts'].values()) data[fips] = len(county['counts']) return data def main(): pops = populations() dhts = distinct_ht() Y = np.array(dhts.values()) X = np.array([pops[idx] for idx in dhts.keys()]) r, p = scipy.stats.pearsonr(X, Y) print r , p plt.scatter(X, Y, s=1, marker='o') plt.savefig('popdistht.pdf') plt.title(r'$\rho = {}'.format(r)) if __name__ == '__main__': main()
Add script to plot population vs distinct hashtags.
Add script to plot population vs distinct hashtags.
Python
unlicense
chebee7i/twitter,chebee7i/twitter,chebee7i/twitter
Add script to plot population vs distinct hashtags.
""" Plot and calculate county population size to number of distinct hashtags. """ import matplotlib.pyplot as plt import seaborn import pandas import twitterproj import scipy.stats import numpy as np def populations(): # Grab demographic info data = {} df = pandas.read_csv('../census/county/PEP_2013_PEPANNRES_with_ann.csv') for county in df.values[1:]: # Skip column headers fips = county[1] data[fips] = int(county[-1]) return data def distinct_ht(): # Grab tweet info data = {} db = twitterproj.connect() for county in twitterproj.hashtag_counts__counties(db, bot_filtered=True): fips = county['geoid'] #data[fips]['tweeted_hashtags'] = sum(county['counts'].values()) data[fips] = len(county['counts']) return data def main(): pops = populations() dhts = distinct_ht() Y = np.array(dhts.values()) X = np.array([pops[idx] for idx in dhts.keys()]) r, p = scipy.stats.pearsonr(X, Y) print r , p plt.scatter(X, Y, s=1, marker='o') plt.savefig('popdistht.pdf') plt.title(r'$\rho = {}'.format(r)) if __name__ == '__main__': main()
<commit_before><commit_msg>Add script to plot population vs distinct hashtags.<commit_after>
""" Plot and calculate county population size to number of distinct hashtags. """ import matplotlib.pyplot as plt import seaborn import pandas import twitterproj import scipy.stats import numpy as np def populations(): # Grab demographic info data = {} df = pandas.read_csv('../census/county/PEP_2013_PEPANNRES_with_ann.csv') for county in df.values[1:]: # Skip column headers fips = county[1] data[fips] = int(county[-1]) return data def distinct_ht(): # Grab tweet info data = {} db = twitterproj.connect() for county in twitterproj.hashtag_counts__counties(db, bot_filtered=True): fips = county['geoid'] #data[fips]['tweeted_hashtags'] = sum(county['counts'].values()) data[fips] = len(county['counts']) return data def main(): pops = populations() dhts = distinct_ht() Y = np.array(dhts.values()) X = np.array([pops[idx] for idx in dhts.keys()]) r, p = scipy.stats.pearsonr(X, Y) print r , p plt.scatter(X, Y, s=1, marker='o') plt.savefig('popdistht.pdf') plt.title(r'$\rho = {}'.format(r)) if __name__ == '__main__': main()
Add script to plot population vs distinct hashtags.""" Plot and calculate county population size to number of distinct hashtags. """ import matplotlib.pyplot as plt import seaborn import pandas import twitterproj import scipy.stats import numpy as np def populations(): # Grab demographic info data = {} df = pandas.read_csv('../census/county/PEP_2013_PEPANNRES_with_ann.csv') for county in df.values[1:]: # Skip column headers fips = county[1] data[fips] = int(county[-1]) return data def distinct_ht(): # Grab tweet info data = {} db = twitterproj.connect() for county in twitterproj.hashtag_counts__counties(db, bot_filtered=True): fips = county['geoid'] #data[fips]['tweeted_hashtags'] = sum(county['counts'].values()) data[fips] = len(county['counts']) return data def main(): pops = populations() dhts = distinct_ht() Y = np.array(dhts.values()) X = np.array([pops[idx] for idx in dhts.keys()]) r, p = scipy.stats.pearsonr(X, Y) print r , p plt.scatter(X, Y, s=1, marker='o') plt.savefig('popdistht.pdf') plt.title(r'$\rho = {}'.format(r)) if __name__ == '__main__': main()
<commit_before><commit_msg>Add script to plot population vs distinct hashtags.<commit_after>""" Plot and calculate county population size to number of distinct hashtags. """ import matplotlib.pyplot as plt import seaborn import pandas import twitterproj import scipy.stats import numpy as np def populations(): # Grab demographic info data = {} df = pandas.read_csv('../census/county/PEP_2013_PEPANNRES_with_ann.csv') for county in df.values[1:]: # Skip column headers fips = county[1] data[fips] = int(county[-1]) return data def distinct_ht(): # Grab tweet info data = {} db = twitterproj.connect() for county in twitterproj.hashtag_counts__counties(db, bot_filtered=True): fips = county['geoid'] #data[fips]['tweeted_hashtags'] = sum(county['counts'].values()) data[fips] = len(county['counts']) return data def main(): pops = populations() dhts = distinct_ht() Y = np.array(dhts.values()) X = np.array([pops[idx] for idx in dhts.keys()]) r, p = scipy.stats.pearsonr(X, Y) print r , p plt.scatter(X, Y, s=1, marker='o') plt.savefig('popdistht.pdf') plt.title(r'$\rho = {}'.format(r)) if __name__ == '__main__': main()
465a604547e1438e650c8b4142816e2330363767
tests/cpydiff/types_list_store_noniter.py
tests/cpydiff/types_list_store_noniter.py
""" categories: Types,list description: List slice-store with non-iterable on RHS is not implemented cause: RHS is restricted to be a tuple or list workaround: Use ``list(<iter>)`` on RHS to convert the iterable to a list """ l = [10, 20] l[0:1] = range(4) print(l)
Add a test for storing iterable to a list slice.
tests/cpydiff: Add a test for storing iterable to a list slice.
Python
mit
alex-robbins/micropython,adafruit/circuitpython,PappaPeppar/micropython,adafruit/micropython,SHA2017-badge/micropython-esp32,swegener/micropython,Timmenem/micropython,henriknelson/micropython,adafruit/micropython,TDAbboud/micropython,SHA2017-badge/micropython-esp32,deshipu/micropython,deshipu/micropython,deshipu/micropython,torwag/micropython,adafruit/circuitpython,selste/micropython,toolmacher/micropython,cwyark/micropython,AriZuu/micropython,oopy/micropython,adafruit/circuitpython,chrisdearman/micropython,torwag/micropython,infinnovation/micropython,trezor/micropython,kerneltask/micropython,AriZuu/micropython,swegener/micropython,puuu/micropython,torwag/micropython,pozetroninc/micropython,MrSurly/micropython,blazewicz/micropython,selste/micropython,lowRISC/micropython,dmazzella/micropython,micropython/micropython-esp32,toolmacher/micropython,tobbad/micropython,PappaPeppar/micropython,PappaPeppar/micropython,hiway/micropython,torwag/micropython,PappaPeppar/micropython,blazewicz/micropython,MrSurly/micropython,pramasoul/micropython,selste/micropython,Timmenem/micropython,alex-robbins/micropython,Peetz0r/micropython-esp32,infinnovation/micropython,henriknelson/micropython,selste/micropython,pfalcon/micropython,trezor/micropython,AriZuu/micropython,blazewicz/micropython,alex-robbins/micropython,HenrikSolver/micropython,hiway/micropython,micropython/micropython-esp32,ryannathans/micropython,oopy/micropython,torwag/micropython,tralamazza/micropython,cwyark/micropython,infinnovation/micropython,tobbad/micropython,adafruit/micropython,AriZuu/micropython,henriknelson/micropython,tobbad/micropython,puuu/micropython,MrSurly/micropython-esp32,pfalcon/micropython,micropython/micropython-esp32,TDAbboud/micropython,blazewicz/micropython,kerneltask/micropython,puuu/micropython,adafruit/micropython,AriZuu/micropython,lowRISC/micropython,kerneltask/micropython,dmazzella/micropython,oopy/micropython,pramasoul/micropython,tralamazza/micropython,pozetroninc/micropython,MrSurly/micropython,chrisdearman/micropython,deshipu/micropython,tobbad/micropython,toolmacher/micropython,puuu/micropython,HenrikSolver/micropython,TDAbboud/micropython,swegener/micropython,TDAbboud/micropython,pozetroninc/micropython,MrSurly/micropython-esp32,alex-robbins/micropython,swegener/micropython,Timmenem/micropython,TDAbboud/micropython,deshipu/micropython,trezor/micropython,MrSurly/micropython-esp32,infinnovation/micropython,cwyark/micropython,trezor/micropython,micropython/micropython-esp32,bvernoux/micropython,hiway/micropython,pramasoul/micropython,tralamazza/micropython,MrSurly/micropython,adafruit/circuitpython,trezor/micropython,pfalcon/micropython,chrisdearman/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,bvernoux/micropython,Peetz0r/micropython-esp32,pfalcon/micropython,hiway/micropython,infinnovation/micropython,henriknelson/micropython,SHA2017-badge/micropython-esp32,MrSurly/micropython-esp32,cwyark/micropython,kerneltask/micropython,lowRISC/micropython,adafruit/circuitpython,SHA2017-badge/micropython-esp32,dmazzella/micropython,bvernoux/micropython,chrisdearman/micropython,hiway/micropython,lowRISC/micropython,bvernoux/micropython,adafruit/micropython,selste/micropython,Peetz0r/micropython-esp32,lowRISC/micropython,chrisdearman/micropython,henriknelson/micropython,ryannathans/micropython,adafruit/circuitpython,pramasoul/micropython,HenrikSolver/micropython,dmazzella/micropython,Peetz0r/micropython-esp32,PappaPeppar/micropython,pfalcon/micropython,pramasoul/micropython,ryannathans/micropython,oopy/micropython,bvernoux/micropython,Timmenem/micropython,cwyark/micropython,pozetroninc/micropython,Peetz0r/micropython-esp32,kerneltask/micropython,SHA2017-badge/micropython-esp32,tralamazza/micropython,puuu/micropython,ryannathans/micropython,toolmacher/micropython,alex-robbins/micropython,HenrikSolver/micropython,Timmenem/micropython,tobbad/micropython,MrSurly/micropython,ryannathans/micropython,blazewicz/micropython,HenrikSolver/micropython,oopy/micropython,pozetroninc/micropython,swegener/micropython,toolmacher/micropython
tests/cpydiff: Add a test for storing iterable to a list slice.
""" categories: Types,list description: List slice-store with non-iterable on RHS is not implemented cause: RHS is restricted to be a tuple or list workaround: Use ``list(<iter>)`` on RHS to convert the iterable to a list """ l = [10, 20] l[0:1] = range(4) print(l)
<commit_before><commit_msg>tests/cpydiff: Add a test for storing iterable to a list slice.<commit_after>
""" categories: Types,list description: List slice-store with non-iterable on RHS is not implemented cause: RHS is restricted to be a tuple or list workaround: Use ``list(<iter>)`` on RHS to convert the iterable to a list """ l = [10, 20] l[0:1] = range(4) print(l)
tests/cpydiff: Add a test for storing iterable to a list slice.""" categories: Types,list description: List slice-store with non-iterable on RHS is not implemented cause: RHS is restricted to be a tuple or list workaround: Use ``list(<iter>)`` on RHS to convert the iterable to a list """ l = [10, 20] l[0:1] = range(4) print(l)
<commit_before><commit_msg>tests/cpydiff: Add a test for storing iterable to a list slice.<commit_after>""" categories: Types,list description: List slice-store with non-iterable on RHS is not implemented cause: RHS is restricted to be a tuple or list workaround: Use ``list(<iter>)`` on RHS to convert the iterable to a list """ l = [10, 20] l[0:1] = range(4) print(l)
d6d4f175330638f35d4eb0512ef14f82eab74f50
show-adverts.py
show-adverts.py
# -*- coding: utf-8 -*- import os, sys print(sys.version_info) import marshal import select import socket import time def _unpack(message): return marshal.loads(message) def _pack(message): return marshal.dumps(message) PORT = 9999 MESSAGE_SIZE = 256 # # Set the socket up to broadcast datagrams over UDP # s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) s.bind(("192.168.31.2", PORT)) # # Add the raw UDP socket to a ZeroMQ socket poller so we can check whether # it's received anything as part of the beacon's main event loop. # print("Listening...") while True: rlist, wlist, xlist = select.select([s], [], [], 1) if s in rlist: message, source = s.recvfrom(MESSAGE_SIZE) print("Message: %r, Source: %r" % (message, source)) service_name, service_address = _unpack(message) print("%s: Found %s at %s" % (time.asctime(), service_name, service_address))
Add a debug tool to show advertising broadcasts
Add a debug tool to show advertising broadcasts
Python
mit
tjguk/networkzero,tjguk/networkzero,tjguk/networkzero
Add a debug tool to show advertising broadcasts
# -*- coding: utf-8 -*- import os, sys print(sys.version_info) import marshal import select import socket import time def _unpack(message): return marshal.loads(message) def _pack(message): return marshal.dumps(message) PORT = 9999 MESSAGE_SIZE = 256 # # Set the socket up to broadcast datagrams over UDP # s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) s.bind(("192.168.31.2", PORT)) # # Add the raw UDP socket to a ZeroMQ socket poller so we can check whether # it's received anything as part of the beacon's main event loop. # print("Listening...") while True: rlist, wlist, xlist = select.select([s], [], [], 1) if s in rlist: message, source = s.recvfrom(MESSAGE_SIZE) print("Message: %r, Source: %r" % (message, source)) service_name, service_address = _unpack(message) print("%s: Found %s at %s" % (time.asctime(), service_name, service_address))
<commit_before><commit_msg>Add a debug tool to show advertising broadcasts<commit_after>
# -*- coding: utf-8 -*- import os, sys print(sys.version_info) import marshal import select import socket import time def _unpack(message): return marshal.loads(message) def _pack(message): return marshal.dumps(message) PORT = 9999 MESSAGE_SIZE = 256 # # Set the socket up to broadcast datagrams over UDP # s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) s.bind(("192.168.31.2", PORT)) # # Add the raw UDP socket to a ZeroMQ socket poller so we can check whether # it's received anything as part of the beacon's main event loop. # print("Listening...") while True: rlist, wlist, xlist = select.select([s], [], [], 1) if s in rlist: message, source = s.recvfrom(MESSAGE_SIZE) print("Message: %r, Source: %r" % (message, source)) service_name, service_address = _unpack(message) print("%s: Found %s at %s" % (time.asctime(), service_name, service_address))
Add a debug tool to show advertising broadcasts# -*- coding: utf-8 -*- import os, sys print(sys.version_info) import marshal import select import socket import time def _unpack(message): return marshal.loads(message) def _pack(message): return marshal.dumps(message) PORT = 9999 MESSAGE_SIZE = 256 # # Set the socket up to broadcast datagrams over UDP # s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) s.bind(("192.168.31.2", PORT)) # # Add the raw UDP socket to a ZeroMQ socket poller so we can check whether # it's received anything as part of the beacon's main event loop. # print("Listening...") while True: rlist, wlist, xlist = select.select([s], [], [], 1) if s in rlist: message, source = s.recvfrom(MESSAGE_SIZE) print("Message: %r, Source: %r" % (message, source)) service_name, service_address = _unpack(message) print("%s: Found %s at %s" % (time.asctime(), service_name, service_address))
<commit_before><commit_msg>Add a debug tool to show advertising broadcasts<commit_after># -*- coding: utf-8 -*- import os, sys print(sys.version_info) import marshal import select import socket import time def _unpack(message): return marshal.loads(message) def _pack(message): return marshal.dumps(message) PORT = 9999 MESSAGE_SIZE = 256 # # Set the socket up to broadcast datagrams over UDP # s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP) s.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1) s.bind(("192.168.31.2", PORT)) # # Add the raw UDP socket to a ZeroMQ socket poller so we can check whether # it's received anything as part of the beacon's main event loop. # print("Listening...") while True: rlist, wlist, xlist = select.select([s], [], [], 1) if s in rlist: message, source = s.recvfrom(MESSAGE_SIZE) print("Message: %r, Source: %r" % (message, source)) service_name, service_address = _unpack(message) print("%s: Found %s at %s" % (time.asctime(), service_name, service_address))
4a70f9ed2f19cba08208fa9f2a3cafe38ee283b6
corehq/apps/userreports/tests/test_columns.py
corehq/apps/userreports/tests/test_columns.py
from django.test import SimpleTestCase from jsonobject.exceptions import BadValueError from corehq.apps.userreports.reports.specs import ReportColumn class TestReportColumn(SimpleTestCase): def testBadAggregation(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple_", "field": "doc_id", "type": "field", }) def testGoodFormat(self): for format in [ 'default', 'percent_of_total', ]: self.assertEquals(ReportColumn, type( ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": format, "type": "field", }) )) def testBadFormat(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": "default_", "type": "field", })
from django.test import SimpleTestCase from jsonobject.exceptions import BadValueError from sqlagg import SumWhen from corehq.apps.userreports.sql import _expand_column from corehq.apps.userreports.reports.specs import ReportColumn class TestReportColumn(SimpleTestCase): def testBadAggregation(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple_", "field": "doc_id", "type": "field", }) def testGoodFormat(self): for format in [ 'default', 'percent_of_total', ]: self.assertEquals(ReportColumn, type( ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": format, "type": "field", }) )) def testBadFormat(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": "default_", "type": "field", }) class TestExpandReportColumn(SimpleTestCase): def test_expansion(self): column = ReportColumn( type="field", field="lab_result", display="Lab Result", format="default", aggregation="expand", description="foo" ) cols = _expand_column(column, ["positive", "negative"]) self.assertEqual(len(cols), 2) self.assertEqual(type(cols[0].view), SumWhen) self.assertEqual(cols[1].view.whens, {'negative':1})
Add simple test for column expansion
Add simple test for column expansion
Python
bsd-3-clause
qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq
from django.test import SimpleTestCase from jsonobject.exceptions import BadValueError from corehq.apps.userreports.reports.specs import ReportColumn class TestReportColumn(SimpleTestCase): def testBadAggregation(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple_", "field": "doc_id", "type": "field", }) def testGoodFormat(self): for format in [ 'default', 'percent_of_total', ]: self.assertEquals(ReportColumn, type( ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": format, "type": "field", }) )) def testBadFormat(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": "default_", "type": "field", }) Add simple test for column expansion
from django.test import SimpleTestCase from jsonobject.exceptions import BadValueError from sqlagg import SumWhen from corehq.apps.userreports.sql import _expand_column from corehq.apps.userreports.reports.specs import ReportColumn class TestReportColumn(SimpleTestCase): def testBadAggregation(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple_", "field": "doc_id", "type": "field", }) def testGoodFormat(self): for format in [ 'default', 'percent_of_total', ]: self.assertEquals(ReportColumn, type( ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": format, "type": "field", }) )) def testBadFormat(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": "default_", "type": "field", }) class TestExpandReportColumn(SimpleTestCase): def test_expansion(self): column = ReportColumn( type="field", field="lab_result", display="Lab Result", format="default", aggregation="expand", description="foo" ) cols = _expand_column(column, ["positive", "negative"]) self.assertEqual(len(cols), 2) self.assertEqual(type(cols[0].view), SumWhen) self.assertEqual(cols[1].view.whens, {'negative':1})
<commit_before>from django.test import SimpleTestCase from jsonobject.exceptions import BadValueError from corehq.apps.userreports.reports.specs import ReportColumn class TestReportColumn(SimpleTestCase): def testBadAggregation(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple_", "field": "doc_id", "type": "field", }) def testGoodFormat(self): for format in [ 'default', 'percent_of_total', ]: self.assertEquals(ReportColumn, type( ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": format, "type": "field", }) )) def testBadFormat(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": "default_", "type": "field", }) <commit_msg>Add simple test for column expansion<commit_after>
from django.test import SimpleTestCase from jsonobject.exceptions import BadValueError from sqlagg import SumWhen from corehq.apps.userreports.sql import _expand_column from corehq.apps.userreports.reports.specs import ReportColumn class TestReportColumn(SimpleTestCase): def testBadAggregation(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple_", "field": "doc_id", "type": "field", }) def testGoodFormat(self): for format in [ 'default', 'percent_of_total', ]: self.assertEquals(ReportColumn, type( ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": format, "type": "field", }) )) def testBadFormat(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": "default_", "type": "field", }) class TestExpandReportColumn(SimpleTestCase): def test_expansion(self): column = ReportColumn( type="field", field="lab_result", display="Lab Result", format="default", aggregation="expand", description="foo" ) cols = _expand_column(column, ["positive", "negative"]) self.assertEqual(len(cols), 2) self.assertEqual(type(cols[0].view), SumWhen) self.assertEqual(cols[1].view.whens, {'negative':1})
from django.test import SimpleTestCase from jsonobject.exceptions import BadValueError from corehq.apps.userreports.reports.specs import ReportColumn class TestReportColumn(SimpleTestCase): def testBadAggregation(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple_", "field": "doc_id", "type": "field", }) def testGoodFormat(self): for format in [ 'default', 'percent_of_total', ]: self.assertEquals(ReportColumn, type( ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": format, "type": "field", }) )) def testBadFormat(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": "default_", "type": "field", }) Add simple test for column expansionfrom django.test import SimpleTestCase from jsonobject.exceptions import BadValueError from sqlagg import SumWhen from corehq.apps.userreports.sql import _expand_column from corehq.apps.userreports.reports.specs import ReportColumn class TestReportColumn(SimpleTestCase): def testBadAggregation(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple_", "field": "doc_id", "type": "field", }) def testGoodFormat(self): for format in [ 'default', 'percent_of_total', ]: self.assertEquals(ReportColumn, type( ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": format, "type": "field", }) )) def testBadFormat(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": "default_", "type": "field", }) class TestExpandReportColumn(SimpleTestCase): def test_expansion(self): column = ReportColumn( type="field", field="lab_result", display="Lab Result", format="default", aggregation="expand", description="foo" ) cols = _expand_column(column, ["positive", "negative"]) self.assertEqual(len(cols), 2) self.assertEqual(type(cols[0].view), SumWhen) self.assertEqual(cols[1].view.whens, {'negative':1})
<commit_before>from django.test import SimpleTestCase from jsonobject.exceptions import BadValueError from corehq.apps.userreports.reports.specs import ReportColumn class TestReportColumn(SimpleTestCase): def testBadAggregation(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple_", "field": "doc_id", "type": "field", }) def testGoodFormat(self): for format in [ 'default', 'percent_of_total', ]: self.assertEquals(ReportColumn, type( ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": format, "type": "field", }) )) def testBadFormat(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": "default_", "type": "field", }) <commit_msg>Add simple test for column expansion<commit_after>from django.test import SimpleTestCase from jsonobject.exceptions import BadValueError from sqlagg import SumWhen from corehq.apps.userreports.sql import _expand_column from corehq.apps.userreports.reports.specs import ReportColumn class TestReportColumn(SimpleTestCase): def testBadAggregation(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple_", "field": "doc_id", "type": "field", }) def testGoodFormat(self): for format in [ 'default', 'percent_of_total', ]: self.assertEquals(ReportColumn, type( ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": format, "type": "field", }) )) def testBadFormat(self): with self.assertRaises(BadValueError): ReportColumn.wrap({ "aggregation": "simple", "field": "doc_id", "format": "default_", "type": "field", }) class TestExpandReportColumn(SimpleTestCase): def test_expansion(self): column = ReportColumn( type="field", field="lab_result", display="Lab Result", format="default", aggregation="expand", description="foo" ) cols = _expand_column(column, ["positive", "negative"]) self.assertEqual(len(cols), 2) self.assertEqual(type(cols[0].view), SumWhen) self.assertEqual(cols[1].view.whens, {'negative':1})
643456f6f1bb9f264dbe6d3ad48a84af4e4dd91c
temba/flows/migrations/0087_fix_open_ended_ruleset_with_timeout.py
temba/flows/migrations/0087_fix_open_ended_ruleset_with_timeout.py
# -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2017-01-26 08:42 from __future__ import unicode_literals import json from django.db import migrations def fix_ruleset_categories_open_ended(RuleSet): rulesets = list(RuleSet.objects.all()) if not rulesets: return affected_flows = [] for ruleset in rulesets: base_lang = ruleset.flow.base_language rules_json = json.loads(ruleset.rules) if len(rules_json) == 2 and rules_json[1]['test']['type'] == 'timeout': rules_json[0]['category'][base_lang] = 'All Responses' ruleset.rules = json.dumps(rules_json) ruleset.save() if ruleset.flow.pk not in affected_flows: affected_flows.append(ruleset.flow.pk) print("Adjusted ruleset %d from flow %d" % (ruleset.id, ruleset.flow.id)) print("Update oped ended categories with timeout on %d flows" % len(affected_flows)) def apply_as_migration(apps, schema_editor): RuleSet = apps.get_model('flows', 'RuleSet') fix_ruleset_categories_open_ended(RuleSet) def apply_manual(): from temba.flows.models import RuleSet fix_ruleset_categories_open_ended(RuleSet) class Migration(migrations.Migration): dependencies = [ ('flows', '0086_is_squashed'), ] operations = [ migrations.RunPython(apply_as_migration) ]
Add data migrations for rulesets
Add data migrations for rulesets
Python
agpl-3.0
pulilab/rapidpro,pulilab/rapidpro,pulilab/rapidpro,tsotetsi/textily-web,tsotetsi/textily-web,tsotetsi/textily-web,tsotetsi/textily-web,tsotetsi/textily-web,pulilab/rapidpro,pulilab/rapidpro
Add data migrations for rulesets
# -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2017-01-26 08:42 from __future__ import unicode_literals import json from django.db import migrations def fix_ruleset_categories_open_ended(RuleSet): rulesets = list(RuleSet.objects.all()) if not rulesets: return affected_flows = [] for ruleset in rulesets: base_lang = ruleset.flow.base_language rules_json = json.loads(ruleset.rules) if len(rules_json) == 2 and rules_json[1]['test']['type'] == 'timeout': rules_json[0]['category'][base_lang] = 'All Responses' ruleset.rules = json.dumps(rules_json) ruleset.save() if ruleset.flow.pk not in affected_flows: affected_flows.append(ruleset.flow.pk) print("Adjusted ruleset %d from flow %d" % (ruleset.id, ruleset.flow.id)) print("Update oped ended categories with timeout on %d flows" % len(affected_flows)) def apply_as_migration(apps, schema_editor): RuleSet = apps.get_model('flows', 'RuleSet') fix_ruleset_categories_open_ended(RuleSet) def apply_manual(): from temba.flows.models import RuleSet fix_ruleset_categories_open_ended(RuleSet) class Migration(migrations.Migration): dependencies = [ ('flows', '0086_is_squashed'), ] operations = [ migrations.RunPython(apply_as_migration) ]
<commit_before><commit_msg>Add data migrations for rulesets<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2017-01-26 08:42 from __future__ import unicode_literals import json from django.db import migrations def fix_ruleset_categories_open_ended(RuleSet): rulesets = list(RuleSet.objects.all()) if not rulesets: return affected_flows = [] for ruleset in rulesets: base_lang = ruleset.flow.base_language rules_json = json.loads(ruleset.rules) if len(rules_json) == 2 and rules_json[1]['test']['type'] == 'timeout': rules_json[0]['category'][base_lang] = 'All Responses' ruleset.rules = json.dumps(rules_json) ruleset.save() if ruleset.flow.pk not in affected_flows: affected_flows.append(ruleset.flow.pk) print("Adjusted ruleset %d from flow %d" % (ruleset.id, ruleset.flow.id)) print("Update oped ended categories with timeout on %d flows" % len(affected_flows)) def apply_as_migration(apps, schema_editor): RuleSet = apps.get_model('flows', 'RuleSet') fix_ruleset_categories_open_ended(RuleSet) def apply_manual(): from temba.flows.models import RuleSet fix_ruleset_categories_open_ended(RuleSet) class Migration(migrations.Migration): dependencies = [ ('flows', '0086_is_squashed'), ] operations = [ migrations.RunPython(apply_as_migration) ]
Add data migrations for rulesets# -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2017-01-26 08:42 from __future__ import unicode_literals import json from django.db import migrations def fix_ruleset_categories_open_ended(RuleSet): rulesets = list(RuleSet.objects.all()) if not rulesets: return affected_flows = [] for ruleset in rulesets: base_lang = ruleset.flow.base_language rules_json = json.loads(ruleset.rules) if len(rules_json) == 2 and rules_json[1]['test']['type'] == 'timeout': rules_json[0]['category'][base_lang] = 'All Responses' ruleset.rules = json.dumps(rules_json) ruleset.save() if ruleset.flow.pk not in affected_flows: affected_flows.append(ruleset.flow.pk) print("Adjusted ruleset %d from flow %d" % (ruleset.id, ruleset.flow.id)) print("Update oped ended categories with timeout on %d flows" % len(affected_flows)) def apply_as_migration(apps, schema_editor): RuleSet = apps.get_model('flows', 'RuleSet') fix_ruleset_categories_open_ended(RuleSet) def apply_manual(): from temba.flows.models import RuleSet fix_ruleset_categories_open_ended(RuleSet) class Migration(migrations.Migration): dependencies = [ ('flows', '0086_is_squashed'), ] operations = [ migrations.RunPython(apply_as_migration) ]
<commit_before><commit_msg>Add data migrations for rulesets<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.10.5 on 2017-01-26 08:42 from __future__ import unicode_literals import json from django.db import migrations def fix_ruleset_categories_open_ended(RuleSet): rulesets = list(RuleSet.objects.all()) if not rulesets: return affected_flows = [] for ruleset in rulesets: base_lang = ruleset.flow.base_language rules_json = json.loads(ruleset.rules) if len(rules_json) == 2 and rules_json[1]['test']['type'] == 'timeout': rules_json[0]['category'][base_lang] = 'All Responses' ruleset.rules = json.dumps(rules_json) ruleset.save() if ruleset.flow.pk not in affected_flows: affected_flows.append(ruleset.flow.pk) print("Adjusted ruleset %d from flow %d" % (ruleset.id, ruleset.flow.id)) print("Update oped ended categories with timeout on %d flows" % len(affected_flows)) def apply_as_migration(apps, schema_editor): RuleSet = apps.get_model('flows', 'RuleSet') fix_ruleset_categories_open_ended(RuleSet) def apply_manual(): from temba.flows.models import RuleSet fix_ruleset_categories_open_ended(RuleSet) class Migration(migrations.Migration): dependencies = [ ('flows', '0086_is_squashed'), ] operations = [ migrations.RunPython(apply_as_migration) ]
36fb0255a4037a9fe7b6d61868f8666325fea944
tests/blueprints/user_message/test_address_formatting.py
tests/blueprints/user_message/test_address_formatting.py
""" :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from unittest.mock import patch import pytest from byceps.database import db from byceps.services.email.models import EmailConfig from byceps.services.user_message import service as user_message_service from testfixtures.brand import create_brand as _create_brand from testfixtures.user import create_user as _create_user from tests.helpers import app_context def test_recipient_formatting(application, params): screen_name, email_address, expected = params brand = create_brand() user = create_user(screen_name, email_address=email_address) message = user_message_service.create_message(user.id, user.id, '', '', brand.id) assert message.recipients == [expected] @pytest.fixture(params=[ ('Alice', 'alice@example.com', 'Alice <alice@example.com>'), ('Bob', 'bob@example.com', 'Bob <bob@example.com>'), ]) def params(request): yield request.param @pytest.fixture def application(): with app_context(): set_up_database() yield tear_down_database() # helpers def set_up_database(): db.reflect() db.drop_all() db.create_all() def tear_down_database(): db.session.remove() db.drop_all() def create_brand(): brand = _create_brand() db.session.add(brand) db.session.commit() sender_address = '{}@example.com'.format(brand.id) email_config = EmailConfig(brand.id, sender_address) db.session.add(email_config) db.session.commit() return brand def create_user(*args, **kwargs): user = _create_user(*args, **kwargs) db.session.add(user) db.session.commit() return user
Test recipient address formatting in user message e-mails
Test recipient address formatting in user message e-mails
Python
bsd-3-clause
m-ober/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps
Test recipient address formatting in user message e-mails
""" :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from unittest.mock import patch import pytest from byceps.database import db from byceps.services.email.models import EmailConfig from byceps.services.user_message import service as user_message_service from testfixtures.brand import create_brand as _create_brand from testfixtures.user import create_user as _create_user from tests.helpers import app_context def test_recipient_formatting(application, params): screen_name, email_address, expected = params brand = create_brand() user = create_user(screen_name, email_address=email_address) message = user_message_service.create_message(user.id, user.id, '', '', brand.id) assert message.recipients == [expected] @pytest.fixture(params=[ ('Alice', 'alice@example.com', 'Alice <alice@example.com>'), ('Bob', 'bob@example.com', 'Bob <bob@example.com>'), ]) def params(request): yield request.param @pytest.fixture def application(): with app_context(): set_up_database() yield tear_down_database() # helpers def set_up_database(): db.reflect() db.drop_all() db.create_all() def tear_down_database(): db.session.remove() db.drop_all() def create_brand(): brand = _create_brand() db.session.add(brand) db.session.commit() sender_address = '{}@example.com'.format(brand.id) email_config = EmailConfig(brand.id, sender_address) db.session.add(email_config) db.session.commit() return brand def create_user(*args, **kwargs): user = _create_user(*args, **kwargs) db.session.add(user) db.session.commit() return user
<commit_before><commit_msg>Test recipient address formatting in user message e-mails<commit_after>
""" :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from unittest.mock import patch import pytest from byceps.database import db from byceps.services.email.models import EmailConfig from byceps.services.user_message import service as user_message_service from testfixtures.brand import create_brand as _create_brand from testfixtures.user import create_user as _create_user from tests.helpers import app_context def test_recipient_formatting(application, params): screen_name, email_address, expected = params brand = create_brand() user = create_user(screen_name, email_address=email_address) message = user_message_service.create_message(user.id, user.id, '', '', brand.id) assert message.recipients == [expected] @pytest.fixture(params=[ ('Alice', 'alice@example.com', 'Alice <alice@example.com>'), ('Bob', 'bob@example.com', 'Bob <bob@example.com>'), ]) def params(request): yield request.param @pytest.fixture def application(): with app_context(): set_up_database() yield tear_down_database() # helpers def set_up_database(): db.reflect() db.drop_all() db.create_all() def tear_down_database(): db.session.remove() db.drop_all() def create_brand(): brand = _create_brand() db.session.add(brand) db.session.commit() sender_address = '{}@example.com'.format(brand.id) email_config = EmailConfig(brand.id, sender_address) db.session.add(email_config) db.session.commit() return brand def create_user(*args, **kwargs): user = _create_user(*args, **kwargs) db.session.add(user) db.session.commit() return user
Test recipient address formatting in user message e-mails""" :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from unittest.mock import patch import pytest from byceps.database import db from byceps.services.email.models import EmailConfig from byceps.services.user_message import service as user_message_service from testfixtures.brand import create_brand as _create_brand from testfixtures.user import create_user as _create_user from tests.helpers import app_context def test_recipient_formatting(application, params): screen_name, email_address, expected = params brand = create_brand() user = create_user(screen_name, email_address=email_address) message = user_message_service.create_message(user.id, user.id, '', '', brand.id) assert message.recipients == [expected] @pytest.fixture(params=[ ('Alice', 'alice@example.com', 'Alice <alice@example.com>'), ('Bob', 'bob@example.com', 'Bob <bob@example.com>'), ]) def params(request): yield request.param @pytest.fixture def application(): with app_context(): set_up_database() yield tear_down_database() # helpers def set_up_database(): db.reflect() db.drop_all() db.create_all() def tear_down_database(): db.session.remove() db.drop_all() def create_brand(): brand = _create_brand() db.session.add(brand) db.session.commit() sender_address = '{}@example.com'.format(brand.id) email_config = EmailConfig(brand.id, sender_address) db.session.add(email_config) db.session.commit() return brand def create_user(*args, **kwargs): user = _create_user(*args, **kwargs) db.session.add(user) db.session.commit() return user
<commit_before><commit_msg>Test recipient address formatting in user message e-mails<commit_after>""" :Copyright: 2006-2018 Jochen Kupperschmidt :License: Modified BSD, see LICENSE for details. """ from unittest.mock import patch import pytest from byceps.database import db from byceps.services.email.models import EmailConfig from byceps.services.user_message import service as user_message_service from testfixtures.brand import create_brand as _create_brand from testfixtures.user import create_user as _create_user from tests.helpers import app_context def test_recipient_formatting(application, params): screen_name, email_address, expected = params brand = create_brand() user = create_user(screen_name, email_address=email_address) message = user_message_service.create_message(user.id, user.id, '', '', brand.id) assert message.recipients == [expected] @pytest.fixture(params=[ ('Alice', 'alice@example.com', 'Alice <alice@example.com>'), ('Bob', 'bob@example.com', 'Bob <bob@example.com>'), ]) def params(request): yield request.param @pytest.fixture def application(): with app_context(): set_up_database() yield tear_down_database() # helpers def set_up_database(): db.reflect() db.drop_all() db.create_all() def tear_down_database(): db.session.remove() db.drop_all() def create_brand(): brand = _create_brand() db.session.add(brand) db.session.commit() sender_address = '{}@example.com'.format(brand.id) email_config = EmailConfig(brand.id, sender_address) db.session.add(email_config) db.session.commit() return brand def create_user(*args, **kwargs): user = _create_user(*args, **kwargs) db.session.add(user) db.session.commit() return user
f290dd020b2cb3e586c8de6c4e8e3c1bc80f3583
evaluation/packages/colours.py
evaluation/packages/colours.py
"""@package Colours This module provides the colourmaps used in globOpt to display primitives according to their gid """ import packages.primitive as primitive import packages.orderedSet as orderedSet class Colours(object): def __init__(self): self.colListMedium = ['#F15A60', '#7AC36A', '#5A9BD4', '#FAA75B', '#9E67AB', '#CE7058', '#D77FB4', '#F1ADCB', '#B2A377'] self.colListDark = ['#F15A60', '#7AC367', '#5A9B15', '#FAA75B', '#9E67AB', '#CE7058', '#D77FB4', '#F1ADCB', '#B2A377'] """ Compute the colourmap associating one colour per group id. Also output the masks associating the node idx for each group id (can be used directly as filter in networkX display funtions) """ def getDIDColourMap(self, primArray): ids = orderedSet.OrderedSet() gfilter = {} for p in primArray: ids.add(p.did) if p.did not in gfilter: gfilter[p.did] = [] gfilter[p.did].append(p.uid) cmap = {} nbCol = len(self.colListMedium) for idx, did in enumerate(ids): print idx, idx%nbCol, self.colListMedium[idx%nbCol] cmap[did] = self.colListMedium[idx%nbCol] return cmap, gfilter
Add new class to compute the colourmap and the node filters accordingly to did
Add new class to compute the colourmap and the node filters accordingly to did
Python
apache-2.0
amonszpart/globOpt,amonszpart/globOpt,amonszpart/globOpt,amonszpart/globOpt,NUAAXXY/globOpt,NUAAXXY/globOpt,NUAAXXY/globOpt,NUAAXXY/globOpt,amonszpart/globOpt,NUAAXXY/globOpt,amonszpart/globOpt,NUAAXXY/globOpt
Add new class to compute the colourmap and the node filters accordingly to did
"""@package Colours This module provides the colourmaps used in globOpt to display primitives according to their gid """ import packages.primitive as primitive import packages.orderedSet as orderedSet class Colours(object): def __init__(self): self.colListMedium = ['#F15A60', '#7AC36A', '#5A9BD4', '#FAA75B', '#9E67AB', '#CE7058', '#D77FB4', '#F1ADCB', '#B2A377'] self.colListDark = ['#F15A60', '#7AC367', '#5A9B15', '#FAA75B', '#9E67AB', '#CE7058', '#D77FB4', '#F1ADCB', '#B2A377'] """ Compute the colourmap associating one colour per group id. Also output the masks associating the node idx for each group id (can be used directly as filter in networkX display funtions) """ def getDIDColourMap(self, primArray): ids = orderedSet.OrderedSet() gfilter = {} for p in primArray: ids.add(p.did) if p.did not in gfilter: gfilter[p.did] = [] gfilter[p.did].append(p.uid) cmap = {} nbCol = len(self.colListMedium) for idx, did in enumerate(ids): print idx, idx%nbCol, self.colListMedium[idx%nbCol] cmap[did] = self.colListMedium[idx%nbCol] return cmap, gfilter
<commit_before><commit_msg>Add new class to compute the colourmap and the node filters accordingly to did<commit_after>
"""@package Colours This module provides the colourmaps used in globOpt to display primitives according to their gid """ import packages.primitive as primitive import packages.orderedSet as orderedSet class Colours(object): def __init__(self): self.colListMedium = ['#F15A60', '#7AC36A', '#5A9BD4', '#FAA75B', '#9E67AB', '#CE7058', '#D77FB4', '#F1ADCB', '#B2A377'] self.colListDark = ['#F15A60', '#7AC367', '#5A9B15', '#FAA75B', '#9E67AB', '#CE7058', '#D77FB4', '#F1ADCB', '#B2A377'] """ Compute the colourmap associating one colour per group id. Also output the masks associating the node idx for each group id (can be used directly as filter in networkX display funtions) """ def getDIDColourMap(self, primArray): ids = orderedSet.OrderedSet() gfilter = {} for p in primArray: ids.add(p.did) if p.did not in gfilter: gfilter[p.did] = [] gfilter[p.did].append(p.uid) cmap = {} nbCol = len(self.colListMedium) for idx, did in enumerate(ids): print idx, idx%nbCol, self.colListMedium[idx%nbCol] cmap[did] = self.colListMedium[idx%nbCol] return cmap, gfilter
Add new class to compute the colourmap and the node filters accordingly to did"""@package Colours This module provides the colourmaps used in globOpt to display primitives according to their gid """ import packages.primitive as primitive import packages.orderedSet as orderedSet class Colours(object): def __init__(self): self.colListMedium = ['#F15A60', '#7AC36A', '#5A9BD4', '#FAA75B', '#9E67AB', '#CE7058', '#D77FB4', '#F1ADCB', '#B2A377'] self.colListDark = ['#F15A60', '#7AC367', '#5A9B15', '#FAA75B', '#9E67AB', '#CE7058', '#D77FB4', '#F1ADCB', '#B2A377'] """ Compute the colourmap associating one colour per group id. Also output the masks associating the node idx for each group id (can be used directly as filter in networkX display funtions) """ def getDIDColourMap(self, primArray): ids = orderedSet.OrderedSet() gfilter = {} for p in primArray: ids.add(p.did) if p.did not in gfilter: gfilter[p.did] = [] gfilter[p.did].append(p.uid) cmap = {} nbCol = len(self.colListMedium) for idx, did in enumerate(ids): print idx, idx%nbCol, self.colListMedium[idx%nbCol] cmap[did] = self.colListMedium[idx%nbCol] return cmap, gfilter
<commit_before><commit_msg>Add new class to compute the colourmap and the node filters accordingly to did<commit_after>"""@package Colours This module provides the colourmaps used in globOpt to display primitives according to their gid """ import packages.primitive as primitive import packages.orderedSet as orderedSet class Colours(object): def __init__(self): self.colListMedium = ['#F15A60', '#7AC36A', '#5A9BD4', '#FAA75B', '#9E67AB', '#CE7058', '#D77FB4', '#F1ADCB', '#B2A377'] self.colListDark = ['#F15A60', '#7AC367', '#5A9B15', '#FAA75B', '#9E67AB', '#CE7058', '#D77FB4', '#F1ADCB', '#B2A377'] """ Compute the colourmap associating one colour per group id. Also output the masks associating the node idx for each group id (can be used directly as filter in networkX display funtions) """ def getDIDColourMap(self, primArray): ids = orderedSet.OrderedSet() gfilter = {} for p in primArray: ids.add(p.did) if p.did not in gfilter: gfilter[p.did] = [] gfilter[p.did].append(p.uid) cmap = {} nbCol = len(self.colListMedium) for idx, did in enumerate(ids): print idx, idx%nbCol, self.colListMedium[idx%nbCol] cmap[did] = self.colListMedium[idx%nbCol] return cmap, gfilter
833d114bd1bc396dc7c6b0434782f9e326319e88
readAptinaRAW.py
readAptinaRAW.py
import os import numpy import matplotlib.pyplot as plt Directory = '/scratch/tmp/DevWareX/MT9M001/DSL949A-NIR/' Folder = '1394629994_MT9M001_DSL949A-NIR_0.0_0.0f_040ms_090mm_to150mm' File = 'MT9M001_1280x1024_DSL949A-NIR_0.0_0.0f_040ms_090mm_to150mm_090mm.raw' Size = [int(File.split('_')[1].split('x')[1]), int(File.split('_')[1].split('x')[0])] # fromfile FileToLoad = os.path.join(Directory, Folder, File) FromFile = numpy.fromfile(FileToLoad, dtype=numpy.uint16).reshape(Size) #~ FromFile -= numpy.mean(FromFile) MemMap = numpy.memmap(FileToLoad, dtype=numpy.uint16, shape=(Size[0],Size[1])) #~ MemMap -= numpy.mean(MemMap) plt.figure(File) plt.subplot(121) plt.imshow(FromFile,cmap='gray') plt.title('numpy.fromfile > leaves file') plt.subplot(122) plt.imshow(MemMap,cmap='gray') plt.title('numpy.memmap > destroys file') plt.show() print 'Only use "numpy.memmap" for displaying files! If you perform some',\ 'calculations on the files (e.g "File -= numpy.mean(File)") these',\ 'calculations are immediately saved to disk, essentially destroying the',\ 'file! In this case use "numpy.fromfile"!'
Add file to read .RAW images from Aptina
Add file to read .RAW images from Aptina Reading the files via numpy.memmap can destroy the files if one performs calculations like Image -= numpy.mean(Image), since those changes are saved to disk immediately... Thus loading the .RAW files via numpy.fromfile is a better idea. This tiny script shows how to read the images generated with DevWare for display or further processing, for example in "DetectWhichImageIsFocusedBestAndCalculateMTF"...
Python
unlicense
habi/GlobalDiagnostiX,habi/GlobalDiagnostiX,habi/GlobalDiagnostiX
Add file to read .RAW images from Aptina Reading the files via numpy.memmap can destroy the files if one performs calculations like Image -= numpy.mean(Image), since those changes are saved to disk immediately... Thus loading the .RAW files via numpy.fromfile is a better idea. This tiny script shows how to read the images generated with DevWare for display or further processing, for example in "DetectWhichImageIsFocusedBestAndCalculateMTF"...
import os import numpy import matplotlib.pyplot as plt Directory = '/scratch/tmp/DevWareX/MT9M001/DSL949A-NIR/' Folder = '1394629994_MT9M001_DSL949A-NIR_0.0_0.0f_040ms_090mm_to150mm' File = 'MT9M001_1280x1024_DSL949A-NIR_0.0_0.0f_040ms_090mm_to150mm_090mm.raw' Size = [int(File.split('_')[1].split('x')[1]), int(File.split('_')[1].split('x')[0])] # fromfile FileToLoad = os.path.join(Directory, Folder, File) FromFile = numpy.fromfile(FileToLoad, dtype=numpy.uint16).reshape(Size) #~ FromFile -= numpy.mean(FromFile) MemMap = numpy.memmap(FileToLoad, dtype=numpy.uint16, shape=(Size[0],Size[1])) #~ MemMap -= numpy.mean(MemMap) plt.figure(File) plt.subplot(121) plt.imshow(FromFile,cmap='gray') plt.title('numpy.fromfile > leaves file') plt.subplot(122) plt.imshow(MemMap,cmap='gray') plt.title('numpy.memmap > destroys file') plt.show() print 'Only use "numpy.memmap" for displaying files! If you perform some',\ 'calculations on the files (e.g "File -= numpy.mean(File)") these',\ 'calculations are immediately saved to disk, essentially destroying the',\ 'file! In this case use "numpy.fromfile"!'
<commit_before><commit_msg>Add file to read .RAW images from Aptina Reading the files via numpy.memmap can destroy the files if one performs calculations like Image -= numpy.mean(Image), since those changes are saved to disk immediately... Thus loading the .RAW files via numpy.fromfile is a better idea. This tiny script shows how to read the images generated with DevWare for display or further processing, for example in "DetectWhichImageIsFocusedBestAndCalculateMTF"...<commit_after>
import os import numpy import matplotlib.pyplot as plt Directory = '/scratch/tmp/DevWareX/MT9M001/DSL949A-NIR/' Folder = '1394629994_MT9M001_DSL949A-NIR_0.0_0.0f_040ms_090mm_to150mm' File = 'MT9M001_1280x1024_DSL949A-NIR_0.0_0.0f_040ms_090mm_to150mm_090mm.raw' Size = [int(File.split('_')[1].split('x')[1]), int(File.split('_')[1].split('x')[0])] # fromfile FileToLoad = os.path.join(Directory, Folder, File) FromFile = numpy.fromfile(FileToLoad, dtype=numpy.uint16).reshape(Size) #~ FromFile -= numpy.mean(FromFile) MemMap = numpy.memmap(FileToLoad, dtype=numpy.uint16, shape=(Size[0],Size[1])) #~ MemMap -= numpy.mean(MemMap) plt.figure(File) plt.subplot(121) plt.imshow(FromFile,cmap='gray') plt.title('numpy.fromfile > leaves file') plt.subplot(122) plt.imshow(MemMap,cmap='gray') plt.title('numpy.memmap > destroys file') plt.show() print 'Only use "numpy.memmap" for displaying files! If you perform some',\ 'calculations on the files (e.g "File -= numpy.mean(File)") these',\ 'calculations are immediately saved to disk, essentially destroying the',\ 'file! In this case use "numpy.fromfile"!'
Add file to read .RAW images from Aptina Reading the files via numpy.memmap can destroy the files if one performs calculations like Image -= numpy.mean(Image), since those changes are saved to disk immediately... Thus loading the .RAW files via numpy.fromfile is a better idea. This tiny script shows how to read the images generated with DevWare for display or further processing, for example in "DetectWhichImageIsFocusedBestAndCalculateMTF"...import os import numpy import matplotlib.pyplot as plt Directory = '/scratch/tmp/DevWareX/MT9M001/DSL949A-NIR/' Folder = '1394629994_MT9M001_DSL949A-NIR_0.0_0.0f_040ms_090mm_to150mm' File = 'MT9M001_1280x1024_DSL949A-NIR_0.0_0.0f_040ms_090mm_to150mm_090mm.raw' Size = [int(File.split('_')[1].split('x')[1]), int(File.split('_')[1].split('x')[0])] # fromfile FileToLoad = os.path.join(Directory, Folder, File) FromFile = numpy.fromfile(FileToLoad, dtype=numpy.uint16).reshape(Size) #~ FromFile -= numpy.mean(FromFile) MemMap = numpy.memmap(FileToLoad, dtype=numpy.uint16, shape=(Size[0],Size[1])) #~ MemMap -= numpy.mean(MemMap) plt.figure(File) plt.subplot(121) plt.imshow(FromFile,cmap='gray') plt.title('numpy.fromfile > leaves file') plt.subplot(122) plt.imshow(MemMap,cmap='gray') plt.title('numpy.memmap > destroys file') plt.show() print 'Only use "numpy.memmap" for displaying files! If you perform some',\ 'calculations on the files (e.g "File -= numpy.mean(File)") these',\ 'calculations are immediately saved to disk, essentially destroying the',\ 'file! In this case use "numpy.fromfile"!'
<commit_before><commit_msg>Add file to read .RAW images from Aptina Reading the files via numpy.memmap can destroy the files if one performs calculations like Image -= numpy.mean(Image), since those changes are saved to disk immediately... Thus loading the .RAW files via numpy.fromfile is a better idea. This tiny script shows how to read the images generated with DevWare for display or further processing, for example in "DetectWhichImageIsFocusedBestAndCalculateMTF"...<commit_after>import os import numpy import matplotlib.pyplot as plt Directory = '/scratch/tmp/DevWareX/MT9M001/DSL949A-NIR/' Folder = '1394629994_MT9M001_DSL949A-NIR_0.0_0.0f_040ms_090mm_to150mm' File = 'MT9M001_1280x1024_DSL949A-NIR_0.0_0.0f_040ms_090mm_to150mm_090mm.raw' Size = [int(File.split('_')[1].split('x')[1]), int(File.split('_')[1].split('x')[0])] # fromfile FileToLoad = os.path.join(Directory, Folder, File) FromFile = numpy.fromfile(FileToLoad, dtype=numpy.uint16).reshape(Size) #~ FromFile -= numpy.mean(FromFile) MemMap = numpy.memmap(FileToLoad, dtype=numpy.uint16, shape=(Size[0],Size[1])) #~ MemMap -= numpy.mean(MemMap) plt.figure(File) plt.subplot(121) plt.imshow(FromFile,cmap='gray') plt.title('numpy.fromfile > leaves file') plt.subplot(122) plt.imshow(MemMap,cmap='gray') plt.title('numpy.memmap > destroys file') plt.show() print 'Only use "numpy.memmap" for displaying files! If you perform some',\ 'calculations on the files (e.g "File -= numpy.mean(File)") these',\ 'calculations are immediately saved to disk, essentially destroying the',\ 'file! In this case use "numpy.fromfile"!'
56fd675e5bf0bd68a73e21c244807c39a87a3eee
heufybot/modules/util/commandhandler.py
heufybot/modules/util/commandhandler.py
from twisted.plugin import IPlugin from heufybot.moduleinterface import BotModule, IBotModule from zope.interface import implements class CommandHandler(BotModule): implements(IPlugin, IBotModule) name = "CommandHandler" def hookBot(self, bot): self.bot = bot def actions(self): return [ ("message-channel", 1, self.handleChannelMessage), ("message-user", 1, self.handlePrivateMessage) ] def handleChannelMessage(self, server, channel, user, messageBody): pass def handlePrivateMessage(self, server, user, messageBody): pass def handleCommand(self, message): pass
Implement the command handler framework
Implement the command handler framework
Python
mit
Heufneutje/PyHeufyBot,Heufneutje/PyHeufyBot
Implement the command handler framework
from twisted.plugin import IPlugin from heufybot.moduleinterface import BotModule, IBotModule from zope.interface import implements class CommandHandler(BotModule): implements(IPlugin, IBotModule) name = "CommandHandler" def hookBot(self, bot): self.bot = bot def actions(self): return [ ("message-channel", 1, self.handleChannelMessage), ("message-user", 1, self.handlePrivateMessage) ] def handleChannelMessage(self, server, channel, user, messageBody): pass def handlePrivateMessage(self, server, user, messageBody): pass def handleCommand(self, message): pass
<commit_before><commit_msg>Implement the command handler framework<commit_after>
from twisted.plugin import IPlugin from heufybot.moduleinterface import BotModule, IBotModule from zope.interface import implements class CommandHandler(BotModule): implements(IPlugin, IBotModule) name = "CommandHandler" def hookBot(self, bot): self.bot = bot def actions(self): return [ ("message-channel", 1, self.handleChannelMessage), ("message-user", 1, self.handlePrivateMessage) ] def handleChannelMessage(self, server, channel, user, messageBody): pass def handlePrivateMessage(self, server, user, messageBody): pass def handleCommand(self, message): pass
Implement the command handler frameworkfrom twisted.plugin import IPlugin from heufybot.moduleinterface import BotModule, IBotModule from zope.interface import implements class CommandHandler(BotModule): implements(IPlugin, IBotModule) name = "CommandHandler" def hookBot(self, bot): self.bot = bot def actions(self): return [ ("message-channel", 1, self.handleChannelMessage), ("message-user", 1, self.handlePrivateMessage) ] def handleChannelMessage(self, server, channel, user, messageBody): pass def handlePrivateMessage(self, server, user, messageBody): pass def handleCommand(self, message): pass
<commit_before><commit_msg>Implement the command handler framework<commit_after>from twisted.plugin import IPlugin from heufybot.moduleinterface import BotModule, IBotModule from zope.interface import implements class CommandHandler(BotModule): implements(IPlugin, IBotModule) name = "CommandHandler" def hookBot(self, bot): self.bot = bot def actions(self): return [ ("message-channel", 1, self.handleChannelMessage), ("message-user", 1, self.handlePrivateMessage) ] def handleChannelMessage(self, server, channel, user, messageBody): pass def handlePrivateMessage(self, server, user, messageBody): pass def handleCommand(self, message): pass
26cad83ebb6466d66f1e9fd87e963af4b5247ecc
sort/heap_sort/python/heap_sort_ccsc.py
sort/heap_sort/python/heap_sort_ccsc.py
# Python program for implementation of heap Sort # To heapify subtree rooted at index i. # n is size of heap def heapify(arr, n, i): largest = i # Initialize largest as root l = 2 * i + 1 # left = 2*i + 1 r = 2 * i + 2 # right = 2*i + 2 # See if left child of root exists and is # greater than root if l < n and arr[i] < arr[l]: largest = l # See if right child of root exists and is # greater than root if r < n and arr[largest] < arr[r]: largest = r # Change root, if needed if largest != i: arr[i],arr[largest] = arr[largest],arr[i] # swap # Heapify the root. heapify(arr, n, largest) # The main function to sort an array of given size def heapSort(arr): n = len(arr) # Build a maxheap. # Since last parent will be at ((n//2)-1) we can start at that location. for i in range(n // 2 - 1, -1, -1): heapify(arr, n, i) # One by one extract elements for i in range(n-1, 0, -1): arr[i], arr[0] = arr[0], arr[i] # swap heapify(arr, i, 0) # Driver code to test above arr = [ 12, 11, 13, 5, 6, 7] heapSort(arr) n = len(arr) print ("Sorted array is") for i in range(n): print ("%d" %arr[i]), # This code is contributed by Chirag Chopra
Add Heap sort implemented in python
Add Heap sort implemented in python
Python
cc0-1.0
ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms,ZoranPandovski/al-go-rithms
Add Heap sort implemented in python
# Python program for implementation of heap Sort # To heapify subtree rooted at index i. # n is size of heap def heapify(arr, n, i): largest = i # Initialize largest as root l = 2 * i + 1 # left = 2*i + 1 r = 2 * i + 2 # right = 2*i + 2 # See if left child of root exists and is # greater than root if l < n and arr[i] < arr[l]: largest = l # See if right child of root exists and is # greater than root if r < n and arr[largest] < arr[r]: largest = r # Change root, if needed if largest != i: arr[i],arr[largest] = arr[largest],arr[i] # swap # Heapify the root. heapify(arr, n, largest) # The main function to sort an array of given size def heapSort(arr): n = len(arr) # Build a maxheap. # Since last parent will be at ((n//2)-1) we can start at that location. for i in range(n // 2 - 1, -1, -1): heapify(arr, n, i) # One by one extract elements for i in range(n-1, 0, -1): arr[i], arr[0] = arr[0], arr[i] # swap heapify(arr, i, 0) # Driver code to test above arr = [ 12, 11, 13, 5, 6, 7] heapSort(arr) n = len(arr) print ("Sorted array is") for i in range(n): print ("%d" %arr[i]), # This code is contributed by Chirag Chopra
<commit_before><commit_msg>Add Heap sort implemented in python<commit_after>
# Python program for implementation of heap Sort # To heapify subtree rooted at index i. # n is size of heap def heapify(arr, n, i): largest = i # Initialize largest as root l = 2 * i + 1 # left = 2*i + 1 r = 2 * i + 2 # right = 2*i + 2 # See if left child of root exists and is # greater than root if l < n and arr[i] < arr[l]: largest = l # See if right child of root exists and is # greater than root if r < n and arr[largest] < arr[r]: largest = r # Change root, if needed if largest != i: arr[i],arr[largest] = arr[largest],arr[i] # swap # Heapify the root. heapify(arr, n, largest) # The main function to sort an array of given size def heapSort(arr): n = len(arr) # Build a maxheap. # Since last parent will be at ((n//2)-1) we can start at that location. for i in range(n // 2 - 1, -1, -1): heapify(arr, n, i) # One by one extract elements for i in range(n-1, 0, -1): arr[i], arr[0] = arr[0], arr[i] # swap heapify(arr, i, 0) # Driver code to test above arr = [ 12, 11, 13, 5, 6, 7] heapSort(arr) n = len(arr) print ("Sorted array is") for i in range(n): print ("%d" %arr[i]), # This code is contributed by Chirag Chopra
Add Heap sort implemented in python# Python program for implementation of heap Sort # To heapify subtree rooted at index i. # n is size of heap def heapify(arr, n, i): largest = i # Initialize largest as root l = 2 * i + 1 # left = 2*i + 1 r = 2 * i + 2 # right = 2*i + 2 # See if left child of root exists and is # greater than root if l < n and arr[i] < arr[l]: largest = l # See if right child of root exists and is # greater than root if r < n and arr[largest] < arr[r]: largest = r # Change root, if needed if largest != i: arr[i],arr[largest] = arr[largest],arr[i] # swap # Heapify the root. heapify(arr, n, largest) # The main function to sort an array of given size def heapSort(arr): n = len(arr) # Build a maxheap. # Since last parent will be at ((n//2)-1) we can start at that location. for i in range(n // 2 - 1, -1, -1): heapify(arr, n, i) # One by one extract elements for i in range(n-1, 0, -1): arr[i], arr[0] = arr[0], arr[i] # swap heapify(arr, i, 0) # Driver code to test above arr = [ 12, 11, 13, 5, 6, 7] heapSort(arr) n = len(arr) print ("Sorted array is") for i in range(n): print ("%d" %arr[i]), # This code is contributed by Chirag Chopra
<commit_before><commit_msg>Add Heap sort implemented in python<commit_after># Python program for implementation of heap Sort # To heapify subtree rooted at index i. # n is size of heap def heapify(arr, n, i): largest = i # Initialize largest as root l = 2 * i + 1 # left = 2*i + 1 r = 2 * i + 2 # right = 2*i + 2 # See if left child of root exists and is # greater than root if l < n and arr[i] < arr[l]: largest = l # See if right child of root exists and is # greater than root if r < n and arr[largest] < arr[r]: largest = r # Change root, if needed if largest != i: arr[i],arr[largest] = arr[largest],arr[i] # swap # Heapify the root. heapify(arr, n, largest) # The main function to sort an array of given size def heapSort(arr): n = len(arr) # Build a maxheap. # Since last parent will be at ((n//2)-1) we can start at that location. for i in range(n // 2 - 1, -1, -1): heapify(arr, n, i) # One by one extract elements for i in range(n-1, 0, -1): arr[i], arr[0] = arr[0], arr[i] # swap heapify(arr, i, 0) # Driver code to test above arr = [ 12, 11, 13, 5, 6, 7] heapSort(arr) n = len(arr) print ("Sorted array is") for i in range(n): print ("%d" %arr[i]), # This code is contributed by Chirag Chopra
083957302452bdd966286bfd8d37d53dce8db7d3
pykeg/contrib/facebook/fbutil.py
pykeg/contrib/facebook/fbutil.py
import facebook def profile_for_user(user): profile = user.facebookprofile_set.all() if not profile: return None return profile[0] def session_for_user(user): profile = profile_for_user(user) if not profile: return None session = profile.session.all() if not session: return None return session[0] def stream_publish(user, **kwargs): session = session_for_user(user) if not session: raise ValueError, "No session." fb = facebook.Facebook(settings.FACEBOOK_API_KEY, settings.FACEBOOK_SECRET_KEY) fb.session_key = session.session_id fb.session_key_expires = 0 return fb.stream.publish(**kwargs)
Add utility methods for facebook stuff.
Add utility methods for facebook stuff.
Python
mit
Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server,Kegbot/kegbot-server
Add utility methods for facebook stuff.
import facebook def profile_for_user(user): profile = user.facebookprofile_set.all() if not profile: return None return profile[0] def session_for_user(user): profile = profile_for_user(user) if not profile: return None session = profile.session.all() if not session: return None return session[0] def stream_publish(user, **kwargs): session = session_for_user(user) if not session: raise ValueError, "No session." fb = facebook.Facebook(settings.FACEBOOK_API_KEY, settings.FACEBOOK_SECRET_KEY) fb.session_key = session.session_id fb.session_key_expires = 0 return fb.stream.publish(**kwargs)
<commit_before><commit_msg>Add utility methods for facebook stuff.<commit_after>
import facebook def profile_for_user(user): profile = user.facebookprofile_set.all() if not profile: return None return profile[0] def session_for_user(user): profile = profile_for_user(user) if not profile: return None session = profile.session.all() if not session: return None return session[0] def stream_publish(user, **kwargs): session = session_for_user(user) if not session: raise ValueError, "No session." fb = facebook.Facebook(settings.FACEBOOK_API_KEY, settings.FACEBOOK_SECRET_KEY) fb.session_key = session.session_id fb.session_key_expires = 0 return fb.stream.publish(**kwargs)
Add utility methods for facebook stuff.import facebook def profile_for_user(user): profile = user.facebookprofile_set.all() if not profile: return None return profile[0] def session_for_user(user): profile = profile_for_user(user) if not profile: return None session = profile.session.all() if not session: return None return session[0] def stream_publish(user, **kwargs): session = session_for_user(user) if not session: raise ValueError, "No session." fb = facebook.Facebook(settings.FACEBOOK_API_KEY, settings.FACEBOOK_SECRET_KEY) fb.session_key = session.session_id fb.session_key_expires = 0 return fb.stream.publish(**kwargs)
<commit_before><commit_msg>Add utility methods for facebook stuff.<commit_after>import facebook def profile_for_user(user): profile = user.facebookprofile_set.all() if not profile: return None return profile[0] def session_for_user(user): profile = profile_for_user(user) if not profile: return None session = profile.session.all() if not session: return None return session[0] def stream_publish(user, **kwargs): session = session_for_user(user) if not session: raise ValueError, "No session." fb = facebook.Facebook(settings.FACEBOOK_API_KEY, settings.FACEBOOK_SECRET_KEY) fb.session_key = session.session_id fb.session_key_expires = 0 return fb.stream.publish(**kwargs)
f0ac1914790e69fe786d6d3182cf15fd09302c28
integration-test/912-missing-building-part.py
integration-test/912-missing-building-part.py
# http://www.openstreetmap.org/way/287494678 z = 18 x = 77193 y = 98529 while z >= 16: assert_has_feature( z, x, y, 'buildings', { 'kind': 'building', 'id': 287494678 }) z -= 1 x /= 2 y /= 2
Add test for missing building part seen in production.
Add test for missing building part seen in production.
Python
mit
mapzen/vector-datasource,mapzen/vector-datasource,mapzen/vector-datasource
Add test for missing building part seen in production.
# http://www.openstreetmap.org/way/287494678 z = 18 x = 77193 y = 98529 while z >= 16: assert_has_feature( z, x, y, 'buildings', { 'kind': 'building', 'id': 287494678 }) z -= 1 x /= 2 y /= 2
<commit_before><commit_msg>Add test for missing building part seen in production.<commit_after>
# http://www.openstreetmap.org/way/287494678 z = 18 x = 77193 y = 98529 while z >= 16: assert_has_feature( z, x, y, 'buildings', { 'kind': 'building', 'id': 287494678 }) z -= 1 x /= 2 y /= 2
Add test for missing building part seen in production.# http://www.openstreetmap.org/way/287494678 z = 18 x = 77193 y = 98529 while z >= 16: assert_has_feature( z, x, y, 'buildings', { 'kind': 'building', 'id': 287494678 }) z -= 1 x /= 2 y /= 2
<commit_before><commit_msg>Add test for missing building part seen in production.<commit_after># http://www.openstreetmap.org/way/287494678 z = 18 x = 77193 y = 98529 while z >= 16: assert_has_feature( z, x, y, 'buildings', { 'kind': 'building', 'id': 287494678 }) z -= 1 x /= 2 y /= 2
5e81fca928862b1c9574f1092a131337735b63f4
tests/integration/iam/test_connection.py
tests/integration/iam/test_connection.py
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. # All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import boto import time from boto.compat import unittest class TestIAM(unittest.TestCase): def test_group_users(self): # A very basic test to create a group, a user, add the user # to the group and then delete everything iam = boto.connect_iam() name = 'boto-test-%d' % time.time() username = 'boto-test-user-%d' % time.time() iam.create_group(name) iam.create_user(username) iam.add_user_to_group(name, username) iam.remove_user_from_group(name, username) iam.delete_user(username) iam.delete_group(name)
Add basic IAM integration test
Add basic IAM integration test
Python
mit
lra/boto,jotes/boto,Asana/boto,nexusz99/boto,serviceagility/boto,ekalosak/boto,shipci/boto,yangchaogit/boto,varunarya10/boto,kouk/boto,podhmo/boto,j-carl/boto,janslow/boto,disruptek/boto,nikhilraog/boto,bryx-inc/boto,khagler/boto,TiVoMaker/boto,s0enke/boto,stevenbrichards/boto,acourtney2015/boto,weka-io/boto,rayluo/boto,awatts/boto,appneta/boto,zzzirk/boto,dimdung/boto,alex/boto,trademob/boto,revmischa/boto,appneta/boto,ramitsurana/boto,ddzialak/boto,felix-d/boto,shaunbrady/boto,abridgett/boto,kouk/boto,tpodowd/boto,vijaylbais/boto,ocadotechnology/boto,Pretio/boto,disruptek/boto,garnaat/boto,israelbenatar/boto,zachmullen/boto,alex/boto,campenberger/boto,alfredodeza/boto,weebygames/boto,SaranyaKarthikeyan/boto,bleib1dj/boto,nishigori/boto,ryansb/boto,darjus-amzn/boto,tpodowd/boto,elainexmas/boto,drbild/boto,pfhayes/boto,vishnugonela/boto,drbild/boto,jindongh/boto,rosmo/boto,clouddocx/boto
Add basic IAM integration test
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. # All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import boto import time from boto.compat import unittest class TestIAM(unittest.TestCase): def test_group_users(self): # A very basic test to create a group, a user, add the user # to the group and then delete everything iam = boto.connect_iam() name = 'boto-test-%d' % time.time() username = 'boto-test-user-%d' % time.time() iam.create_group(name) iam.create_user(username) iam.add_user_to_group(name, username) iam.remove_user_from_group(name, username) iam.delete_user(username) iam.delete_group(name)
<commit_before><commit_msg>Add basic IAM integration test<commit_after>
# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. # All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import boto import time from boto.compat import unittest class TestIAM(unittest.TestCase): def test_group_users(self): # A very basic test to create a group, a user, add the user # to the group and then delete everything iam = boto.connect_iam() name = 'boto-test-%d' % time.time() username = 'boto-test-user-%d' % time.time() iam.create_group(name) iam.create_user(username) iam.add_user_to_group(name, username) iam.remove_user_from_group(name, username) iam.delete_user(username) iam.delete_group(name)
Add basic IAM integration test# Copyright (c) 2014 Amazon.com, Inc. or its affiliates. # All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import boto import time from boto.compat import unittest class TestIAM(unittest.TestCase): def test_group_users(self): # A very basic test to create a group, a user, add the user # to the group and then delete everything iam = boto.connect_iam() name = 'boto-test-%d' % time.time() username = 'boto-test-user-%d' % time.time() iam.create_group(name) iam.create_user(username) iam.add_user_to_group(name, username) iam.remove_user_from_group(name, username) iam.delete_user(username) iam.delete_group(name)
<commit_before><commit_msg>Add basic IAM integration test<commit_after># Copyright (c) 2014 Amazon.com, Inc. or its affiliates. # All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import boto import time from boto.compat import unittest class TestIAM(unittest.TestCase): def test_group_users(self): # A very basic test to create a group, a user, add the user # to the group and then delete everything iam = boto.connect_iam() name = 'boto-test-%d' % time.time() username = 'boto-test-user-%d' % time.time() iam.create_group(name) iam.create_user(username) iam.add_user_to_group(name, username) iam.remove_user_from_group(name, username) iam.delete_user(username) iam.delete_group(name)
c3d87e837c85284baa132104e4843c3fd8f429d3
day-04-2.py
day-04-2.py
import hashlib puzzle_input = b'iwrupvqb' number = 100000 while True: key = puzzle_input + str(number).encode() if hashlib.md5(key).hexdigest()[:6] == '000000': break number += 1 print(number) # Now that I think about it, starting with 100,000 was probably not the right # thing to do. I could've easily never found my answer. But I did, and I guess # it probably saved a little time. So okay. # My answer: 9958218
Complete day 4 part 2
Complete day 4 part 2
Python
mit
foxscotch/advent-of-code,foxscotch/advent-of-code
Complete day 4 part 2
import hashlib puzzle_input = b'iwrupvqb' number = 100000 while True: key = puzzle_input + str(number).encode() if hashlib.md5(key).hexdigest()[:6] == '000000': break number += 1 print(number) # Now that I think about it, starting with 100,000 was probably not the right # thing to do. I could've easily never found my answer. But I did, and I guess # it probably saved a little time. So okay. # My answer: 9958218
<commit_before><commit_msg>Complete day 4 part 2<commit_after>
import hashlib puzzle_input = b'iwrupvqb' number = 100000 while True: key = puzzle_input + str(number).encode() if hashlib.md5(key).hexdigest()[:6] == '000000': break number += 1 print(number) # Now that I think about it, starting with 100,000 was probably not the right # thing to do. I could've easily never found my answer. But I did, and I guess # it probably saved a little time. So okay. # My answer: 9958218
Complete day 4 part 2import hashlib puzzle_input = b'iwrupvqb' number = 100000 while True: key = puzzle_input + str(number).encode() if hashlib.md5(key).hexdigest()[:6] == '000000': break number += 1 print(number) # Now that I think about it, starting with 100,000 was probably not the right # thing to do. I could've easily never found my answer. But I did, and I guess # it probably saved a little time. So okay. # My answer: 9958218
<commit_before><commit_msg>Complete day 4 part 2<commit_after>import hashlib puzzle_input = b'iwrupvqb' number = 100000 while True: key = puzzle_input + str(number).encode() if hashlib.md5(key).hexdigest()[:6] == '000000': break number += 1 print(number) # Now that I think about it, starting with 100,000 was probably not the right # thing to do. I could've easily never found my answer. But I did, and I guess # it probably saved a little time. So okay. # My answer: 9958218
1dd1111bd1bab62ed900d74f347a7fe10d03eb03
test/release.py
test/release.py
from __future__ import absolute_import import user_agent import re def test_changelog(): """ Parse changelog and ensure that it contains * unreleased version younger than release date * release version has a date """ re_date = re.compile(r'^\d{4}-\d{2}-\d{2}$') ver_dates = {} ver_history = [] for line in open('CHANGELOG.md'): if line.startswith('## ['): ver = line.split('[')[1].split(']')[0] date = line.split('-', 1)[1].strip().lower() ver_dates[ver] = date ver_history.append(ver) release = user_agent.__version__ print(ver_dates) print(ver_history) assert 'unreleased' not in ver_dates[release] assert re_date.match(ver_dates[release]) assert ver_history.index(release) == 1
Test that changelog is not outdated
Test that changelog is not outdated
Python
mit
lorien/user_agent
Test that changelog is not outdated
from __future__ import absolute_import import user_agent import re def test_changelog(): """ Parse changelog and ensure that it contains * unreleased version younger than release date * release version has a date """ re_date = re.compile(r'^\d{4}-\d{2}-\d{2}$') ver_dates = {} ver_history = [] for line in open('CHANGELOG.md'): if line.startswith('## ['): ver = line.split('[')[1].split(']')[0] date = line.split('-', 1)[1].strip().lower() ver_dates[ver] = date ver_history.append(ver) release = user_agent.__version__ print(ver_dates) print(ver_history) assert 'unreleased' not in ver_dates[release] assert re_date.match(ver_dates[release]) assert ver_history.index(release) == 1
<commit_before><commit_msg>Test that changelog is not outdated<commit_after>
from __future__ import absolute_import import user_agent import re def test_changelog(): """ Parse changelog and ensure that it contains * unreleased version younger than release date * release version has a date """ re_date = re.compile(r'^\d{4}-\d{2}-\d{2}$') ver_dates = {} ver_history = [] for line in open('CHANGELOG.md'): if line.startswith('## ['): ver = line.split('[')[1].split(']')[0] date = line.split('-', 1)[1].strip().lower() ver_dates[ver] = date ver_history.append(ver) release = user_agent.__version__ print(ver_dates) print(ver_history) assert 'unreleased' not in ver_dates[release] assert re_date.match(ver_dates[release]) assert ver_history.index(release) == 1
Test that changelog is not outdatedfrom __future__ import absolute_import import user_agent import re def test_changelog(): """ Parse changelog and ensure that it contains * unreleased version younger than release date * release version has a date """ re_date = re.compile(r'^\d{4}-\d{2}-\d{2}$') ver_dates = {} ver_history = [] for line in open('CHANGELOG.md'): if line.startswith('## ['): ver = line.split('[')[1].split(']')[0] date = line.split('-', 1)[1].strip().lower() ver_dates[ver] = date ver_history.append(ver) release = user_agent.__version__ print(ver_dates) print(ver_history) assert 'unreleased' not in ver_dates[release] assert re_date.match(ver_dates[release]) assert ver_history.index(release) == 1
<commit_before><commit_msg>Test that changelog is not outdated<commit_after>from __future__ import absolute_import import user_agent import re def test_changelog(): """ Parse changelog and ensure that it contains * unreleased version younger than release date * release version has a date """ re_date = re.compile(r'^\d{4}-\d{2}-\d{2}$') ver_dates = {} ver_history = [] for line in open('CHANGELOG.md'): if line.startswith('## ['): ver = line.split('[')[1].split(']')[0] date = line.split('-', 1)[1].strip().lower() ver_dates[ver] = date ver_history.append(ver) release = user_agent.__version__ print(ver_dates) print(ver_history) assert 'unreleased' not in ver_dates[release] assert re_date.match(ver_dates[release]) assert ver_history.index(release) == 1
1b83a31090cd803d2eca0b9caed0f4cc9a149fbd
cubes/stores.py
cubes/stores.py
from .errors import * from .browser import AggregationBrowser from .extensions import get_namespace, initialize_namespace __all__ = ( "open_store", "Store" ) def open_store(name, **options): """Gets a new instance of a model provider with name `name`.""" ns = get_namespace("stores") if not ns: ns = initialize_namespace("stores", root_class=Store, suffix="_store") try: factory = ns[name] except KeyError: raise CubesError("Unable to find store '%s'" % name) return factory(**options) def create_browser(type_, cube, store, locale, **options): """Creates a new browser.""" ns = get_namespace("browsers") if not ns: ns = initialize_namespace("browsers", root_class=AggregationBrowser, suffix="_browser") try: factory = ns[type_] except KeyError: raise CubesError("Unable to find browser of type '%s'" % type_) return factory(cube=cube, store=store, locale=locale, **options) class Store(object): """Abstract class to find other stores through the class hierarchy.""" pass
from .errors import * from .browser import AggregationBrowser from .extensions import get_namespace, initialize_namespace __all__ = ( "open_store", "Store" ) def open_store(name, **options): """Gets a new instance of a model provider with name `name`.""" ns = get_namespace("stores") if not ns: ns = initialize_namespace("stores", root_class=Store, suffix="_store") try: factory = ns[name] except KeyError: raise ConfigurationError("Unknown store '%s'" % name) return factory(**options) def create_browser(type_, cube, store, locale, **options): """Creates a new browser.""" ns = get_namespace("browsers") if not ns: ns = initialize_namespace("browsers", root_class=AggregationBrowser, suffix="_browser") try: factory = ns[type_] except KeyError: raise ConfigurationError("Unable to find browser of type '%s'" % type_) return factory(cube=cube, store=store, locale=locale, **options) class Store(object): """Abstract class to find other stores through the class hierarchy.""" pass
Raise ConfigurationError error that causes server to fail and dump whole stacktrace
Raise ConfigurationError error that causes server to fail and dump whole stacktrace
Python
mit
noyeitan/cubes,jell0720/cubes,zejn/cubes,jell0720/cubes,she11c0de/cubes,cesarmarinhorj/cubes,jell0720/cubes,she11c0de/cubes,cesarmarinhorj/cubes,she11c0de/cubes,zejn/cubes,pombredanne/cubes,ubreddy/cubes,cesarmarinhorj/cubes,pombredanne/cubes,pombredanne/cubes,noyeitan/cubes,ubreddy/cubes,zejn/cubes,ubreddy/cubes,noyeitan/cubes
from .errors import * from .browser import AggregationBrowser from .extensions import get_namespace, initialize_namespace __all__ = ( "open_store", "Store" ) def open_store(name, **options): """Gets a new instance of a model provider with name `name`.""" ns = get_namespace("stores") if not ns: ns = initialize_namespace("stores", root_class=Store, suffix="_store") try: factory = ns[name] except KeyError: raise CubesError("Unable to find store '%s'" % name) return factory(**options) def create_browser(type_, cube, store, locale, **options): """Creates a new browser.""" ns = get_namespace("browsers") if not ns: ns = initialize_namespace("browsers", root_class=AggregationBrowser, suffix="_browser") try: factory = ns[type_] except KeyError: raise CubesError("Unable to find browser of type '%s'" % type_) return factory(cube=cube, store=store, locale=locale, **options) class Store(object): """Abstract class to find other stores through the class hierarchy.""" pass Raise ConfigurationError error that causes server to fail and dump whole stacktrace
from .errors import * from .browser import AggregationBrowser from .extensions import get_namespace, initialize_namespace __all__ = ( "open_store", "Store" ) def open_store(name, **options): """Gets a new instance of a model provider with name `name`.""" ns = get_namespace("stores") if not ns: ns = initialize_namespace("stores", root_class=Store, suffix="_store") try: factory = ns[name] except KeyError: raise ConfigurationError("Unknown store '%s'" % name) return factory(**options) def create_browser(type_, cube, store, locale, **options): """Creates a new browser.""" ns = get_namespace("browsers") if not ns: ns = initialize_namespace("browsers", root_class=AggregationBrowser, suffix="_browser") try: factory = ns[type_] except KeyError: raise ConfigurationError("Unable to find browser of type '%s'" % type_) return factory(cube=cube, store=store, locale=locale, **options) class Store(object): """Abstract class to find other stores through the class hierarchy.""" pass
<commit_before>from .errors import * from .browser import AggregationBrowser from .extensions import get_namespace, initialize_namespace __all__ = ( "open_store", "Store" ) def open_store(name, **options): """Gets a new instance of a model provider with name `name`.""" ns = get_namespace("stores") if not ns: ns = initialize_namespace("stores", root_class=Store, suffix="_store") try: factory = ns[name] except KeyError: raise CubesError("Unable to find store '%s'" % name) return factory(**options) def create_browser(type_, cube, store, locale, **options): """Creates a new browser.""" ns = get_namespace("browsers") if not ns: ns = initialize_namespace("browsers", root_class=AggregationBrowser, suffix="_browser") try: factory = ns[type_] except KeyError: raise CubesError("Unable to find browser of type '%s'" % type_) return factory(cube=cube, store=store, locale=locale, **options) class Store(object): """Abstract class to find other stores through the class hierarchy.""" pass <commit_msg>Raise ConfigurationError error that causes server to fail and dump whole stacktrace<commit_after>
from .errors import * from .browser import AggregationBrowser from .extensions import get_namespace, initialize_namespace __all__ = ( "open_store", "Store" ) def open_store(name, **options): """Gets a new instance of a model provider with name `name`.""" ns = get_namespace("stores") if not ns: ns = initialize_namespace("stores", root_class=Store, suffix="_store") try: factory = ns[name] except KeyError: raise ConfigurationError("Unknown store '%s'" % name) return factory(**options) def create_browser(type_, cube, store, locale, **options): """Creates a new browser.""" ns = get_namespace("browsers") if not ns: ns = initialize_namespace("browsers", root_class=AggregationBrowser, suffix="_browser") try: factory = ns[type_] except KeyError: raise ConfigurationError("Unable to find browser of type '%s'" % type_) return factory(cube=cube, store=store, locale=locale, **options) class Store(object): """Abstract class to find other stores through the class hierarchy.""" pass
from .errors import * from .browser import AggregationBrowser from .extensions import get_namespace, initialize_namespace __all__ = ( "open_store", "Store" ) def open_store(name, **options): """Gets a new instance of a model provider with name `name`.""" ns = get_namespace("stores") if not ns: ns = initialize_namespace("stores", root_class=Store, suffix="_store") try: factory = ns[name] except KeyError: raise CubesError("Unable to find store '%s'" % name) return factory(**options) def create_browser(type_, cube, store, locale, **options): """Creates a new browser.""" ns = get_namespace("browsers") if not ns: ns = initialize_namespace("browsers", root_class=AggregationBrowser, suffix="_browser") try: factory = ns[type_] except KeyError: raise CubesError("Unable to find browser of type '%s'" % type_) return factory(cube=cube, store=store, locale=locale, **options) class Store(object): """Abstract class to find other stores through the class hierarchy.""" pass Raise ConfigurationError error that causes server to fail and dump whole stacktracefrom .errors import * from .browser import AggregationBrowser from .extensions import get_namespace, initialize_namespace __all__ = ( "open_store", "Store" ) def open_store(name, **options): """Gets a new instance of a model provider with name `name`.""" ns = get_namespace("stores") if not ns: ns = initialize_namespace("stores", root_class=Store, suffix="_store") try: factory = ns[name] except KeyError: raise ConfigurationError("Unknown store '%s'" % name) return factory(**options) def create_browser(type_, cube, store, locale, **options): """Creates a new browser.""" ns = get_namespace("browsers") if not ns: ns = initialize_namespace("browsers", root_class=AggregationBrowser, suffix="_browser") try: factory = ns[type_] except KeyError: raise ConfigurationError("Unable to find browser of type '%s'" % type_) return factory(cube=cube, store=store, locale=locale, **options) class Store(object): """Abstract class to find other stores through the class hierarchy.""" pass
<commit_before>from .errors import * from .browser import AggregationBrowser from .extensions import get_namespace, initialize_namespace __all__ = ( "open_store", "Store" ) def open_store(name, **options): """Gets a new instance of a model provider with name `name`.""" ns = get_namespace("stores") if not ns: ns = initialize_namespace("stores", root_class=Store, suffix="_store") try: factory = ns[name] except KeyError: raise CubesError("Unable to find store '%s'" % name) return factory(**options) def create_browser(type_, cube, store, locale, **options): """Creates a new browser.""" ns = get_namespace("browsers") if not ns: ns = initialize_namespace("browsers", root_class=AggregationBrowser, suffix="_browser") try: factory = ns[type_] except KeyError: raise CubesError("Unable to find browser of type '%s'" % type_) return factory(cube=cube, store=store, locale=locale, **options) class Store(object): """Abstract class to find other stores through the class hierarchy.""" pass <commit_msg>Raise ConfigurationError error that causes server to fail and dump whole stacktrace<commit_after>from .errors import * from .browser import AggregationBrowser from .extensions import get_namespace, initialize_namespace __all__ = ( "open_store", "Store" ) def open_store(name, **options): """Gets a new instance of a model provider with name `name`.""" ns = get_namespace("stores") if not ns: ns = initialize_namespace("stores", root_class=Store, suffix="_store") try: factory = ns[name] except KeyError: raise ConfigurationError("Unknown store '%s'" % name) return factory(**options) def create_browser(type_, cube, store, locale, **options): """Creates a new browser.""" ns = get_namespace("browsers") if not ns: ns = initialize_namespace("browsers", root_class=AggregationBrowser, suffix="_browser") try: factory = ns[type_] except KeyError: raise ConfigurationError("Unable to find browser of type '%s'" % type_) return factory(cube=cube, store=store, locale=locale, **options) class Store(object): """Abstract class to find other stores through the class hierarchy.""" pass
4046c743323a4357864afcac482a5625ed71c184
euler006.py
euler006.py
#!/usr/bin/python limit = 100 sum_sq = ((limit + 1) * limit) / 2 sum_sq *= sum_sq sq_sum = (limit * (limit + 1) * ((limit * 2) + 1)) / 6 print (int (sum_sq - sq_sum))
Add solution for problem 6
Add solution for problem 6
Python
mit
cifvts/PyEuler
Add solution for problem 6
#!/usr/bin/python limit = 100 sum_sq = ((limit + 1) * limit) / 2 sum_sq *= sum_sq sq_sum = (limit * (limit + 1) * ((limit * 2) + 1)) / 6 print (int (sum_sq - sq_sum))
<commit_before><commit_msg>Add solution for problem 6<commit_after>
#!/usr/bin/python limit = 100 sum_sq = ((limit + 1) * limit) / 2 sum_sq *= sum_sq sq_sum = (limit * (limit + 1) * ((limit * 2) + 1)) / 6 print (int (sum_sq - sq_sum))
Add solution for problem 6#!/usr/bin/python limit = 100 sum_sq = ((limit + 1) * limit) / 2 sum_sq *= sum_sq sq_sum = (limit * (limit + 1) * ((limit * 2) + 1)) / 6 print (int (sum_sq - sq_sum))
<commit_before><commit_msg>Add solution for problem 6<commit_after>#!/usr/bin/python limit = 100 sum_sq = ((limit + 1) * limit) / 2 sum_sq *= sum_sq sq_sum = (limit * (limit + 1) * ((limit * 2) + 1)) / 6 print (int (sum_sq - sq_sum))
337e60c3d63b56b1237e3d5b052a96f3824cc6c2
corehq/apps/sms/management/commands/migrate_sms_to_sql.py
corehq/apps/sms/management/commands/migrate_sms_to_sql.py
from corehq.apps.sms.models import SMSLog, SMS from custom.fri.models import FRISMSLog from dimagi.utils.couch.database import iter_docs from django.core.management.base import BaseCommand, CommandError from optparse import make_option class Command(BaseCommand): args = "" help = ("Migrates SMSLog to SMS") option_list = BaseCommand.option_list + ( make_option("--balance_only", action="store_true", dest="balance_only", default=False, help="Include this option to only run the balancing step."), ) def get_sms_couch_ids(self): result = SMSLog.view( 'sms/by_domain', include_docs=False, reduce=False, ).all() return [row['id'] for row in result if row['key'][1] == 'SMSLog'] def run_migration(self): count = 0 ids = self.get_sms_couch_ids() total_count = len(ids) for doc in iter_docs(FRISMSLog.get_db(), ids): couch_sms = FRISMSLog.wrap(doc) try: couch_sms._migration_do_sync() except Exception as e: print 'Could not sync SMSLog %s: %s' % (couch_sms._id, e) count += 1 if (count % 10000) == 0: print 'Processed %s / %s documents' % (count, total_count) def balance(self): sql_count = SMS.objects.count() couch_count = len(self.get_sms_couch_ids()) print "SQL Count: %s, Couch Count: %s" % (sql_count, couch_count) def handle(self, *args, **options): if not options['balance_only']: self.run_migration() self.balance()
Add command to migrate SMSLog to SQL
Add command to migrate SMSLog to SQL
Python
bsd-3-clause
dimagi/commcare-hq,puttarajubr/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,qedsoftware/commcare-hq,qedsoftware/commcare-hq,puttarajubr/commcare-hq
Add command to migrate SMSLog to SQL
from corehq.apps.sms.models import SMSLog, SMS from custom.fri.models import FRISMSLog from dimagi.utils.couch.database import iter_docs from django.core.management.base import BaseCommand, CommandError from optparse import make_option class Command(BaseCommand): args = "" help = ("Migrates SMSLog to SMS") option_list = BaseCommand.option_list + ( make_option("--balance_only", action="store_true", dest="balance_only", default=False, help="Include this option to only run the balancing step."), ) def get_sms_couch_ids(self): result = SMSLog.view( 'sms/by_domain', include_docs=False, reduce=False, ).all() return [row['id'] for row in result if row['key'][1] == 'SMSLog'] def run_migration(self): count = 0 ids = self.get_sms_couch_ids() total_count = len(ids) for doc in iter_docs(FRISMSLog.get_db(), ids): couch_sms = FRISMSLog.wrap(doc) try: couch_sms._migration_do_sync() except Exception as e: print 'Could not sync SMSLog %s: %s' % (couch_sms._id, e) count += 1 if (count % 10000) == 0: print 'Processed %s / %s documents' % (count, total_count) def balance(self): sql_count = SMS.objects.count() couch_count = len(self.get_sms_couch_ids()) print "SQL Count: %s, Couch Count: %s" % (sql_count, couch_count) def handle(self, *args, **options): if not options['balance_only']: self.run_migration() self.balance()
<commit_before><commit_msg>Add command to migrate SMSLog to SQL<commit_after>
from corehq.apps.sms.models import SMSLog, SMS from custom.fri.models import FRISMSLog from dimagi.utils.couch.database import iter_docs from django.core.management.base import BaseCommand, CommandError from optparse import make_option class Command(BaseCommand): args = "" help = ("Migrates SMSLog to SMS") option_list = BaseCommand.option_list + ( make_option("--balance_only", action="store_true", dest="balance_only", default=False, help="Include this option to only run the balancing step."), ) def get_sms_couch_ids(self): result = SMSLog.view( 'sms/by_domain', include_docs=False, reduce=False, ).all() return [row['id'] for row in result if row['key'][1] == 'SMSLog'] def run_migration(self): count = 0 ids = self.get_sms_couch_ids() total_count = len(ids) for doc in iter_docs(FRISMSLog.get_db(), ids): couch_sms = FRISMSLog.wrap(doc) try: couch_sms._migration_do_sync() except Exception as e: print 'Could not sync SMSLog %s: %s' % (couch_sms._id, e) count += 1 if (count % 10000) == 0: print 'Processed %s / %s documents' % (count, total_count) def balance(self): sql_count = SMS.objects.count() couch_count = len(self.get_sms_couch_ids()) print "SQL Count: %s, Couch Count: %s" % (sql_count, couch_count) def handle(self, *args, **options): if not options['balance_only']: self.run_migration() self.balance()
Add command to migrate SMSLog to SQLfrom corehq.apps.sms.models import SMSLog, SMS from custom.fri.models import FRISMSLog from dimagi.utils.couch.database import iter_docs from django.core.management.base import BaseCommand, CommandError from optparse import make_option class Command(BaseCommand): args = "" help = ("Migrates SMSLog to SMS") option_list = BaseCommand.option_list + ( make_option("--balance_only", action="store_true", dest="balance_only", default=False, help="Include this option to only run the balancing step."), ) def get_sms_couch_ids(self): result = SMSLog.view( 'sms/by_domain', include_docs=False, reduce=False, ).all() return [row['id'] for row in result if row['key'][1] == 'SMSLog'] def run_migration(self): count = 0 ids = self.get_sms_couch_ids() total_count = len(ids) for doc in iter_docs(FRISMSLog.get_db(), ids): couch_sms = FRISMSLog.wrap(doc) try: couch_sms._migration_do_sync() except Exception as e: print 'Could not sync SMSLog %s: %s' % (couch_sms._id, e) count += 1 if (count % 10000) == 0: print 'Processed %s / %s documents' % (count, total_count) def balance(self): sql_count = SMS.objects.count() couch_count = len(self.get_sms_couch_ids()) print "SQL Count: %s, Couch Count: %s" % (sql_count, couch_count) def handle(self, *args, **options): if not options['balance_only']: self.run_migration() self.balance()
<commit_before><commit_msg>Add command to migrate SMSLog to SQL<commit_after>from corehq.apps.sms.models import SMSLog, SMS from custom.fri.models import FRISMSLog from dimagi.utils.couch.database import iter_docs from django.core.management.base import BaseCommand, CommandError from optparse import make_option class Command(BaseCommand): args = "" help = ("Migrates SMSLog to SMS") option_list = BaseCommand.option_list + ( make_option("--balance_only", action="store_true", dest="balance_only", default=False, help="Include this option to only run the balancing step."), ) def get_sms_couch_ids(self): result = SMSLog.view( 'sms/by_domain', include_docs=False, reduce=False, ).all() return [row['id'] for row in result if row['key'][1] == 'SMSLog'] def run_migration(self): count = 0 ids = self.get_sms_couch_ids() total_count = len(ids) for doc in iter_docs(FRISMSLog.get_db(), ids): couch_sms = FRISMSLog.wrap(doc) try: couch_sms._migration_do_sync() except Exception as e: print 'Could not sync SMSLog %s: %s' % (couch_sms._id, e) count += 1 if (count % 10000) == 0: print 'Processed %s / %s documents' % (count, total_count) def balance(self): sql_count = SMS.objects.count() couch_count = len(self.get_sms_couch_ids()) print "SQL Count: %s, Couch Count: %s" % (sql_count, couch_count) def handle(self, *args, **options): if not options['balance_only']: self.run_migration() self.balance()
d1588bdf0a672de8d7d4f4f9cddcc236f5b9026e
examples/plotting/file/properties_alpha.py
examples/plotting/file/properties_alpha.py
import bokeh.plotting as plt from itertools import product plt.output_file('properties_alpha.html') cats = ['RGB', 'RGBA', 'Alpha+RGB', 'Alpha+RGBA'] p = plt.figure(x_range=cats, y_range=cats, title="Fill and Line Color Property Combinations") alpha = 0.5 fill_color = (242, 44, 64) fill_color_alpha = (242, 44, 64, alpha) line_color = (64, 126, 231) line_color_alpha = (64, 126, 231, alpha) # define fill and line color combinations fill = [(1, {'fill_color': fill_color}), (2, {'fill_color': fill_color_alpha}), (3, {'fill_alpha': alpha, 'fill_color': fill_color}), (4, {'fill_alpha': alpha, 'fill_color': fill_color_alpha})] line = [(1, {'line_color': line_color}), (2, {'line_color': line_color_alpha}), (3, {'line_alpha': alpha, 'line_color': line_color}), (4, {'line_alpha': alpha, 'line_color': line_color_alpha})] # plot intersection of fill and line combinations combinations = product(fill, line) for comb in combinations: x, fill_options = comb[0] y, line_options = comb[1] options = fill_options.copy() options.update(line_options) p.circle(x, y, line_width=7, size=50, **options) p.xaxis[0].axis_label = "Fill Options" p.yaxis[0].axis_label = "Line Options" plt.show(p)
Add plot for color property combinations to examples.
Add plot for color property combinations to examples.
Python
bsd-3-clause
CrazyGuo/bokeh,ericdill/bokeh,saifrahmed/bokeh,stuart-knock/bokeh,schoolie/bokeh,PythonCharmers/bokeh,alan-unravel/bokeh,percyfal/bokeh,mindriot101/bokeh,satishgoda/bokeh,roxyboy/bokeh,daodaoliang/bokeh,ChristosChristofidis/bokeh,rs2/bokeh,Karel-van-de-Plassche/bokeh,aavanian/bokeh,ChristosChristofidis/bokeh,Karel-van-de-Plassche/bokeh,CrazyGuo/bokeh,msarahan/bokeh,stonebig/bokeh,mindriot101/bokeh,bokeh/bokeh,rothnic/bokeh,bokeh/bokeh,aiguofer/bokeh,laurent-george/bokeh,xguse/bokeh,timothydmorton/bokeh,stuart-knock/bokeh,mutirri/bokeh,CrazyGuo/bokeh,birdsarah/bokeh,draperjames/bokeh,justacec/bokeh,msarahan/bokeh,tacaswell/bokeh,bsipocz/bokeh,josherick/bokeh,jakirkham/bokeh,rs2/bokeh,timsnyder/bokeh,draperjames/bokeh,laurent-george/bokeh,ericmjl/bokeh,muku42/bokeh,Karel-van-de-Plassche/bokeh,phobson/bokeh,caseyclements/bokeh,clairetang6/bokeh,ChristosChristofidis/bokeh,KasperPRasmussen/bokeh,srinathv/bokeh,jakirkham/bokeh,maxalbert/bokeh,htygithub/bokeh,Karel-van-de-Plassche/bokeh,dennisobrien/bokeh,gpfreitas/bokeh,gpfreitas/bokeh,azjps/bokeh,aavanian/bokeh,ChinaQuants/bokeh,josherick/bokeh,tacaswell/bokeh,ericmjl/bokeh,muku42/bokeh,mutirri/bokeh,aiguofer/bokeh,stuart-knock/bokeh,aiguofer/bokeh,tacaswell/bokeh,timsnyder/bokeh,justacec/bokeh,aavanian/bokeh,dennisobrien/bokeh,quasiben/bokeh,stonebig/bokeh,dennisobrien/bokeh,PythonCharmers/bokeh,daodaoliang/bokeh,rhiever/bokeh,rothnic/bokeh,paultcochrane/bokeh,bsipocz/bokeh,mindriot101/bokeh,ptitjano/bokeh,akloster/bokeh,muku42/bokeh,paultcochrane/bokeh,carlvlewis/bokeh,justacec/bokeh,alan-unravel/bokeh,laurent-george/bokeh,phobson/bokeh,maxalbert/bokeh,schoolie/bokeh,alan-unravel/bokeh,xguse/bokeh,carlvlewis/bokeh,eteq/bokeh,aavanian/bokeh,azjps/bokeh,percyfal/bokeh,deeplook/bokeh,ericdill/bokeh,draperjames/bokeh,eteq/bokeh,carlvlewis/bokeh,timsnyder/bokeh,percyfal/bokeh,ptitjano/bokeh,xguse/bokeh,abele/bokeh,mindriot101/bokeh,matbra/bokeh,phobson/bokeh,stonebig/bokeh,josherick/bokeh,alan-unravel/bokeh,aavanian/bokeh,bsipocz/bokeh,dennisobrien/bokeh,evidation-health/bokeh,rothnic/bokeh,Karel-van-de-Plassche/bokeh,matbra/bokeh,birdsarah/bokeh,DuCorey/bokeh,KasperPRasmussen/bokeh,KasperPRasmussen/bokeh,aiguofer/bokeh,rs2/bokeh,htygithub/bokeh,ahmadia/bokeh,quasiben/bokeh,paultcochrane/bokeh,draperjames/bokeh,jakirkham/bokeh,timsnyder/bokeh,caseyclements/bokeh,evidation-health/bokeh,maxalbert/bokeh,timothydmorton/bokeh,azjps/bokeh,laurent-george/bokeh,philippjfr/bokeh,evidation-health/bokeh,saifrahmed/bokeh,ericdill/bokeh,azjps/bokeh,timsnyder/bokeh,satishgoda/bokeh,tacaswell/bokeh,bokeh/bokeh,jplourenco/bokeh,srinathv/bokeh,timothydmorton/bokeh,awanke/bokeh,phobson/bokeh,evidation-health/bokeh,srinathv/bokeh,satishgoda/bokeh,ptitjano/bokeh,mutirri/bokeh,ericdill/bokeh,philippjfr/bokeh,matbra/bokeh,ChinaQuants/bokeh,satishgoda/bokeh,akloster/bokeh,khkaminska/bokeh,rs2/bokeh,ericmjl/bokeh,birdsarah/bokeh,roxyboy/bokeh,xguse/bokeh,deeplook/bokeh,bokeh/bokeh,dennisobrien/bokeh,eteq/bokeh,ptitjano/bokeh,rhiever/bokeh,DuCorey/bokeh,paultcochrane/bokeh,PythonCharmers/bokeh,KasperPRasmussen/bokeh,jakirkham/bokeh,DuCorey/bokeh,ChinaQuants/bokeh,schoolie/bokeh,bsipocz/bokeh,stonebig/bokeh,jplourenco/bokeh,philippjfr/bokeh,schoolie/bokeh,rs2/bokeh,gpfreitas/bokeh,birdsarah/bokeh,ahmadia/bokeh,timothydmorton/bokeh,maxalbert/bokeh,khkaminska/bokeh,philippjfr/bokeh,aiguofer/bokeh,phobson/bokeh,eteq/bokeh,carlvlewis/bokeh,clairetang6/bokeh,PythonCharmers/bokeh,azjps/bokeh,clairetang6/bokeh,daodaoliang/bokeh,saifrahmed/bokeh,khkaminska/bokeh,clairetang6/bokeh,schoolie/bokeh,awanke/bokeh,ericmjl/bokeh,akloster/bokeh,percyfal/bokeh,abele/bokeh,philippjfr/bokeh,daodaoliang/bokeh,gpfreitas/bokeh,justacec/bokeh,jplourenco/bokeh,jplourenco/bokeh,rhiever/bokeh,caseyclements/bokeh,htygithub/bokeh,deeplook/bokeh,awanke/bokeh,abele/bokeh,josherick/bokeh,caseyclements/bokeh,bokeh/bokeh,srinathv/bokeh,DuCorey/bokeh,percyfal/bokeh,KasperPRasmussen/bokeh,saifrahmed/bokeh,htygithub/bokeh,ChristosChristofidis/bokeh,rothnic/bokeh,ptitjano/bokeh,ericmjl/bokeh,quasiben/bokeh,awanke/bokeh,rhiever/bokeh,DuCorey/bokeh,ahmadia/bokeh,akloster/bokeh,msarahan/bokeh,deeplook/bokeh,ChinaQuants/bokeh,jakirkham/bokeh,msarahan/bokeh,muku42/bokeh,roxyboy/bokeh,abele/bokeh,ahmadia/bokeh,CrazyGuo/bokeh,stuart-knock/bokeh,matbra/bokeh,khkaminska/bokeh,roxyboy/bokeh,mutirri/bokeh,draperjames/bokeh
Add plot for color property combinations to examples.
import bokeh.plotting as plt from itertools import product plt.output_file('properties_alpha.html') cats = ['RGB', 'RGBA', 'Alpha+RGB', 'Alpha+RGBA'] p = plt.figure(x_range=cats, y_range=cats, title="Fill and Line Color Property Combinations") alpha = 0.5 fill_color = (242, 44, 64) fill_color_alpha = (242, 44, 64, alpha) line_color = (64, 126, 231) line_color_alpha = (64, 126, 231, alpha) # define fill and line color combinations fill = [(1, {'fill_color': fill_color}), (2, {'fill_color': fill_color_alpha}), (3, {'fill_alpha': alpha, 'fill_color': fill_color}), (4, {'fill_alpha': alpha, 'fill_color': fill_color_alpha})] line = [(1, {'line_color': line_color}), (2, {'line_color': line_color_alpha}), (3, {'line_alpha': alpha, 'line_color': line_color}), (4, {'line_alpha': alpha, 'line_color': line_color_alpha})] # plot intersection of fill and line combinations combinations = product(fill, line) for comb in combinations: x, fill_options = comb[0] y, line_options = comb[1] options = fill_options.copy() options.update(line_options) p.circle(x, y, line_width=7, size=50, **options) p.xaxis[0].axis_label = "Fill Options" p.yaxis[0].axis_label = "Line Options" plt.show(p)
<commit_before><commit_msg>Add plot for color property combinations to examples.<commit_after>
import bokeh.plotting as plt from itertools import product plt.output_file('properties_alpha.html') cats = ['RGB', 'RGBA', 'Alpha+RGB', 'Alpha+RGBA'] p = plt.figure(x_range=cats, y_range=cats, title="Fill and Line Color Property Combinations") alpha = 0.5 fill_color = (242, 44, 64) fill_color_alpha = (242, 44, 64, alpha) line_color = (64, 126, 231) line_color_alpha = (64, 126, 231, alpha) # define fill and line color combinations fill = [(1, {'fill_color': fill_color}), (2, {'fill_color': fill_color_alpha}), (3, {'fill_alpha': alpha, 'fill_color': fill_color}), (4, {'fill_alpha': alpha, 'fill_color': fill_color_alpha})] line = [(1, {'line_color': line_color}), (2, {'line_color': line_color_alpha}), (3, {'line_alpha': alpha, 'line_color': line_color}), (4, {'line_alpha': alpha, 'line_color': line_color_alpha})] # plot intersection of fill and line combinations combinations = product(fill, line) for comb in combinations: x, fill_options = comb[0] y, line_options = comb[1] options = fill_options.copy() options.update(line_options) p.circle(x, y, line_width=7, size=50, **options) p.xaxis[0].axis_label = "Fill Options" p.yaxis[0].axis_label = "Line Options" plt.show(p)
Add plot for color property combinations to examples.import bokeh.plotting as plt from itertools import product plt.output_file('properties_alpha.html') cats = ['RGB', 'RGBA', 'Alpha+RGB', 'Alpha+RGBA'] p = plt.figure(x_range=cats, y_range=cats, title="Fill and Line Color Property Combinations") alpha = 0.5 fill_color = (242, 44, 64) fill_color_alpha = (242, 44, 64, alpha) line_color = (64, 126, 231) line_color_alpha = (64, 126, 231, alpha) # define fill and line color combinations fill = [(1, {'fill_color': fill_color}), (2, {'fill_color': fill_color_alpha}), (3, {'fill_alpha': alpha, 'fill_color': fill_color}), (4, {'fill_alpha': alpha, 'fill_color': fill_color_alpha})] line = [(1, {'line_color': line_color}), (2, {'line_color': line_color_alpha}), (3, {'line_alpha': alpha, 'line_color': line_color}), (4, {'line_alpha': alpha, 'line_color': line_color_alpha})] # plot intersection of fill and line combinations combinations = product(fill, line) for comb in combinations: x, fill_options = comb[0] y, line_options = comb[1] options = fill_options.copy() options.update(line_options) p.circle(x, y, line_width=7, size=50, **options) p.xaxis[0].axis_label = "Fill Options" p.yaxis[0].axis_label = "Line Options" plt.show(p)
<commit_before><commit_msg>Add plot for color property combinations to examples.<commit_after>import bokeh.plotting as plt from itertools import product plt.output_file('properties_alpha.html') cats = ['RGB', 'RGBA', 'Alpha+RGB', 'Alpha+RGBA'] p = plt.figure(x_range=cats, y_range=cats, title="Fill and Line Color Property Combinations") alpha = 0.5 fill_color = (242, 44, 64) fill_color_alpha = (242, 44, 64, alpha) line_color = (64, 126, 231) line_color_alpha = (64, 126, 231, alpha) # define fill and line color combinations fill = [(1, {'fill_color': fill_color}), (2, {'fill_color': fill_color_alpha}), (3, {'fill_alpha': alpha, 'fill_color': fill_color}), (4, {'fill_alpha': alpha, 'fill_color': fill_color_alpha})] line = [(1, {'line_color': line_color}), (2, {'line_color': line_color_alpha}), (3, {'line_alpha': alpha, 'line_color': line_color}), (4, {'line_alpha': alpha, 'line_color': line_color_alpha})] # plot intersection of fill and line combinations combinations = product(fill, line) for comb in combinations: x, fill_options = comb[0] y, line_options = comb[1] options = fill_options.copy() options.update(line_options) p.circle(x, y, line_width=7, size=50, **options) p.xaxis[0].axis_label = "Fill Options" p.yaxis[0].axis_label = "Line Options" plt.show(p)
090c73c20e3a57f5b2710c270b0dfc139633d623
test/test_gamenode.py
test/test_gamenode.py
""" Tests for the GameNode module """ from contextlib import contextmanager from io import StringIO import sys import unittest from src import gamenode @contextmanager def captured_output(): """ Redirects stdout to StringIO so we can inspect Print statements """ new_out = StringIO() old_out = sys.stdout try: sys.stdout = new_out yield sys.stdout finally: sys.stdout = old_out class TestGameNode(unittest.TestCase): """ Tests for the GameNode module """ def test_default_instantiation(self): """ Test a known default instantiation """ gn_obj = gamenode.GameNode() result = gn_obj.gameState[0][0] self.assertEqual(result, -1) self.assertFalse(gn_obj.leafP) self.assertFalse(gn_obj.rootP) self.assertFalse(gn_obj.score) def test_initialize(self): """ Test initialization """ gn_obj = gamenode.GameNode() result = gn_obj.initialize() self.assertFalse(result) def test_getState_default(self): """ Test a known getState value """ gn_obj = gamenode.GameNode() result = gn_obj.getState(0, 0) self.assertEqual(result, -1) def test_getState_bad_location(self): """ Test getState with a bad location """ gn_obj = gamenode.GameNode() self.assertRaises(IndexError, gn_obj.getState, 0, 100) def test_setState_good_location(self): """ Test setState with good location """ gn_obj = gamenode.GameNode() gn_obj.setState(0, 0, 5) result = gn_obj.getState(0, 0) self.assertEqual(result, 5) def test_setState_bad_location(self): """ Test setState with bad location """ gn_obj = gamenode.GameNode() self.assertRaises(IndexError, gn_obj.setState, 0, 100, 5) def test_print_board(self): """Check that print_board works""" with captured_output() as out: gn_obj = gamenode.GameNode() gn_obj.print_board() actual_print = out.getvalue().strip() expected_print = ("-1-1-1 \n" " -1-1-1 \n" "-1-1-1-1-1-1-1\n" "-1-1-1-1-1-1-1\n" "-1-1-1-1-1-1-1\n" " -1-1-1 \n" " -1-1-1") self.assertEqual(actual_print, expected_print)
Add tests for GameNode module
Add tests for GameNode module
Python
mit
blairck/jaeger
Add tests for GameNode module
""" Tests for the GameNode module """ from contextlib import contextmanager from io import StringIO import sys import unittest from src import gamenode @contextmanager def captured_output(): """ Redirects stdout to StringIO so we can inspect Print statements """ new_out = StringIO() old_out = sys.stdout try: sys.stdout = new_out yield sys.stdout finally: sys.stdout = old_out class TestGameNode(unittest.TestCase): """ Tests for the GameNode module """ def test_default_instantiation(self): """ Test a known default instantiation """ gn_obj = gamenode.GameNode() result = gn_obj.gameState[0][0] self.assertEqual(result, -1) self.assertFalse(gn_obj.leafP) self.assertFalse(gn_obj.rootP) self.assertFalse(gn_obj.score) def test_initialize(self): """ Test initialization """ gn_obj = gamenode.GameNode() result = gn_obj.initialize() self.assertFalse(result) def test_getState_default(self): """ Test a known getState value """ gn_obj = gamenode.GameNode() result = gn_obj.getState(0, 0) self.assertEqual(result, -1) def test_getState_bad_location(self): """ Test getState with a bad location """ gn_obj = gamenode.GameNode() self.assertRaises(IndexError, gn_obj.getState, 0, 100) def test_setState_good_location(self): """ Test setState with good location """ gn_obj = gamenode.GameNode() gn_obj.setState(0, 0, 5) result = gn_obj.getState(0, 0) self.assertEqual(result, 5) def test_setState_bad_location(self): """ Test setState with bad location """ gn_obj = gamenode.GameNode() self.assertRaises(IndexError, gn_obj.setState, 0, 100, 5) def test_print_board(self): """Check that print_board works""" with captured_output() as out: gn_obj = gamenode.GameNode() gn_obj.print_board() actual_print = out.getvalue().strip() expected_print = ("-1-1-1 \n" " -1-1-1 \n" "-1-1-1-1-1-1-1\n" "-1-1-1-1-1-1-1\n" "-1-1-1-1-1-1-1\n" " -1-1-1 \n" " -1-1-1") self.assertEqual(actual_print, expected_print)
<commit_before><commit_msg>Add tests for GameNode module<commit_after>
""" Tests for the GameNode module """ from contextlib import contextmanager from io import StringIO import sys import unittest from src import gamenode @contextmanager def captured_output(): """ Redirects stdout to StringIO so we can inspect Print statements """ new_out = StringIO() old_out = sys.stdout try: sys.stdout = new_out yield sys.stdout finally: sys.stdout = old_out class TestGameNode(unittest.TestCase): """ Tests for the GameNode module """ def test_default_instantiation(self): """ Test a known default instantiation """ gn_obj = gamenode.GameNode() result = gn_obj.gameState[0][0] self.assertEqual(result, -1) self.assertFalse(gn_obj.leafP) self.assertFalse(gn_obj.rootP) self.assertFalse(gn_obj.score) def test_initialize(self): """ Test initialization """ gn_obj = gamenode.GameNode() result = gn_obj.initialize() self.assertFalse(result) def test_getState_default(self): """ Test a known getState value """ gn_obj = gamenode.GameNode() result = gn_obj.getState(0, 0) self.assertEqual(result, -1) def test_getState_bad_location(self): """ Test getState with a bad location """ gn_obj = gamenode.GameNode() self.assertRaises(IndexError, gn_obj.getState, 0, 100) def test_setState_good_location(self): """ Test setState with good location """ gn_obj = gamenode.GameNode() gn_obj.setState(0, 0, 5) result = gn_obj.getState(0, 0) self.assertEqual(result, 5) def test_setState_bad_location(self): """ Test setState with bad location """ gn_obj = gamenode.GameNode() self.assertRaises(IndexError, gn_obj.setState, 0, 100, 5) def test_print_board(self): """Check that print_board works""" with captured_output() as out: gn_obj = gamenode.GameNode() gn_obj.print_board() actual_print = out.getvalue().strip() expected_print = ("-1-1-1 \n" " -1-1-1 \n" "-1-1-1-1-1-1-1\n" "-1-1-1-1-1-1-1\n" "-1-1-1-1-1-1-1\n" " -1-1-1 \n" " -1-1-1") self.assertEqual(actual_print, expected_print)
Add tests for GameNode module""" Tests for the GameNode module """ from contextlib import contextmanager from io import StringIO import sys import unittest from src import gamenode @contextmanager def captured_output(): """ Redirects stdout to StringIO so we can inspect Print statements """ new_out = StringIO() old_out = sys.stdout try: sys.stdout = new_out yield sys.stdout finally: sys.stdout = old_out class TestGameNode(unittest.TestCase): """ Tests for the GameNode module """ def test_default_instantiation(self): """ Test a known default instantiation """ gn_obj = gamenode.GameNode() result = gn_obj.gameState[0][0] self.assertEqual(result, -1) self.assertFalse(gn_obj.leafP) self.assertFalse(gn_obj.rootP) self.assertFalse(gn_obj.score) def test_initialize(self): """ Test initialization """ gn_obj = gamenode.GameNode() result = gn_obj.initialize() self.assertFalse(result) def test_getState_default(self): """ Test a known getState value """ gn_obj = gamenode.GameNode() result = gn_obj.getState(0, 0) self.assertEqual(result, -1) def test_getState_bad_location(self): """ Test getState with a bad location """ gn_obj = gamenode.GameNode() self.assertRaises(IndexError, gn_obj.getState, 0, 100) def test_setState_good_location(self): """ Test setState with good location """ gn_obj = gamenode.GameNode() gn_obj.setState(0, 0, 5) result = gn_obj.getState(0, 0) self.assertEqual(result, 5) def test_setState_bad_location(self): """ Test setState with bad location """ gn_obj = gamenode.GameNode() self.assertRaises(IndexError, gn_obj.setState, 0, 100, 5) def test_print_board(self): """Check that print_board works""" with captured_output() as out: gn_obj = gamenode.GameNode() gn_obj.print_board() actual_print = out.getvalue().strip() expected_print = ("-1-1-1 \n" " -1-1-1 \n" "-1-1-1-1-1-1-1\n" "-1-1-1-1-1-1-1\n" "-1-1-1-1-1-1-1\n" " -1-1-1 \n" " -1-1-1") self.assertEqual(actual_print, expected_print)
<commit_before><commit_msg>Add tests for GameNode module<commit_after>""" Tests for the GameNode module """ from contextlib import contextmanager from io import StringIO import sys import unittest from src import gamenode @contextmanager def captured_output(): """ Redirects stdout to StringIO so we can inspect Print statements """ new_out = StringIO() old_out = sys.stdout try: sys.stdout = new_out yield sys.stdout finally: sys.stdout = old_out class TestGameNode(unittest.TestCase): """ Tests for the GameNode module """ def test_default_instantiation(self): """ Test a known default instantiation """ gn_obj = gamenode.GameNode() result = gn_obj.gameState[0][0] self.assertEqual(result, -1) self.assertFalse(gn_obj.leafP) self.assertFalse(gn_obj.rootP) self.assertFalse(gn_obj.score) def test_initialize(self): """ Test initialization """ gn_obj = gamenode.GameNode() result = gn_obj.initialize() self.assertFalse(result) def test_getState_default(self): """ Test a known getState value """ gn_obj = gamenode.GameNode() result = gn_obj.getState(0, 0) self.assertEqual(result, -1) def test_getState_bad_location(self): """ Test getState with a bad location """ gn_obj = gamenode.GameNode() self.assertRaises(IndexError, gn_obj.getState, 0, 100) def test_setState_good_location(self): """ Test setState with good location """ gn_obj = gamenode.GameNode() gn_obj.setState(0, 0, 5) result = gn_obj.getState(0, 0) self.assertEqual(result, 5) def test_setState_bad_location(self): """ Test setState with bad location """ gn_obj = gamenode.GameNode() self.assertRaises(IndexError, gn_obj.setState, 0, 100, 5) def test_print_board(self): """Check that print_board works""" with captured_output() as out: gn_obj = gamenode.GameNode() gn_obj.print_board() actual_print = out.getvalue().strip() expected_print = ("-1-1-1 \n" " -1-1-1 \n" "-1-1-1-1-1-1-1\n" "-1-1-1-1-1-1-1\n" "-1-1-1-1-1-1-1\n" " -1-1-1 \n" " -1-1-1") self.assertEqual(actual_print, expected_print)
3063044995a14921fd0da2ebbbd57942bb5ca24d
hubblestack/extmods/modules/safecommand.py
hubblestack/extmods/modules/safecommand.py
# -*- encoding: utf-8 -*- ''' Safe Command ============ The idea behind this module is to allow an arbitrary command to be executed safely, with the arguments to the specified binary (optionally) coming from the fileserver. For example, you might have some internal license auditing application for which you need the ability to modify the command line arguments from hubblestack_data. But what you don't want is the ability to execute arbitrary commands from hubblestack_data. You also want to avoid command injection. This module allows for this functionality. ''' from __future__ import absolute_import import logging from salt.exceptions import CommandExecutionError log = logging.getLogger(__name__) def run(command, args=None, override_file=None): ''' This function allows a specific command to be run, with the option to have command-line arguments for the command to be defined in hubblestack_data. The command is run with python_shell=False, which will prevent command injection. command The command to be run. Usually just the binary name, but can also include arguments/flags that need to be inserted to make the command safe, such as sandbox flags. args The rest of the args for the command. Can be a string or a list. override_file A fileserver location (``salt://this/is/a/path.txt``). The contents of the file at this location will be used *instead of* ``args`` ''' pass
Add the skeleton and docs
Add the skeleton and docs
Python
apache-2.0
basepi/hubble,basepi/hubble
Add the skeleton and docs
# -*- encoding: utf-8 -*- ''' Safe Command ============ The idea behind this module is to allow an arbitrary command to be executed safely, with the arguments to the specified binary (optionally) coming from the fileserver. For example, you might have some internal license auditing application for which you need the ability to modify the command line arguments from hubblestack_data. But what you don't want is the ability to execute arbitrary commands from hubblestack_data. You also want to avoid command injection. This module allows for this functionality. ''' from __future__ import absolute_import import logging from salt.exceptions import CommandExecutionError log = logging.getLogger(__name__) def run(command, args=None, override_file=None): ''' This function allows a specific command to be run, with the option to have command-line arguments for the command to be defined in hubblestack_data. The command is run with python_shell=False, which will prevent command injection. command The command to be run. Usually just the binary name, but can also include arguments/flags that need to be inserted to make the command safe, such as sandbox flags. args The rest of the args for the command. Can be a string or a list. override_file A fileserver location (``salt://this/is/a/path.txt``). The contents of the file at this location will be used *instead of* ``args`` ''' pass
<commit_before><commit_msg>Add the skeleton and docs<commit_after>
# -*- encoding: utf-8 -*- ''' Safe Command ============ The idea behind this module is to allow an arbitrary command to be executed safely, with the arguments to the specified binary (optionally) coming from the fileserver. For example, you might have some internal license auditing application for which you need the ability to modify the command line arguments from hubblestack_data. But what you don't want is the ability to execute arbitrary commands from hubblestack_data. You also want to avoid command injection. This module allows for this functionality. ''' from __future__ import absolute_import import logging from salt.exceptions import CommandExecutionError log = logging.getLogger(__name__) def run(command, args=None, override_file=None): ''' This function allows a specific command to be run, with the option to have command-line arguments for the command to be defined in hubblestack_data. The command is run with python_shell=False, which will prevent command injection. command The command to be run. Usually just the binary name, but can also include arguments/flags that need to be inserted to make the command safe, such as sandbox flags. args The rest of the args for the command. Can be a string or a list. override_file A fileserver location (``salt://this/is/a/path.txt``). The contents of the file at this location will be used *instead of* ``args`` ''' pass
Add the skeleton and docs# -*- encoding: utf-8 -*- ''' Safe Command ============ The idea behind this module is to allow an arbitrary command to be executed safely, with the arguments to the specified binary (optionally) coming from the fileserver. For example, you might have some internal license auditing application for which you need the ability to modify the command line arguments from hubblestack_data. But what you don't want is the ability to execute arbitrary commands from hubblestack_data. You also want to avoid command injection. This module allows for this functionality. ''' from __future__ import absolute_import import logging from salt.exceptions import CommandExecutionError log = logging.getLogger(__name__) def run(command, args=None, override_file=None): ''' This function allows a specific command to be run, with the option to have command-line arguments for the command to be defined in hubblestack_data. The command is run with python_shell=False, which will prevent command injection. command The command to be run. Usually just the binary name, but can also include arguments/flags that need to be inserted to make the command safe, such as sandbox flags. args The rest of the args for the command. Can be a string or a list. override_file A fileserver location (``salt://this/is/a/path.txt``). The contents of the file at this location will be used *instead of* ``args`` ''' pass
<commit_before><commit_msg>Add the skeleton and docs<commit_after># -*- encoding: utf-8 -*- ''' Safe Command ============ The idea behind this module is to allow an arbitrary command to be executed safely, with the arguments to the specified binary (optionally) coming from the fileserver. For example, you might have some internal license auditing application for which you need the ability to modify the command line arguments from hubblestack_data. But what you don't want is the ability to execute arbitrary commands from hubblestack_data. You also want to avoid command injection. This module allows for this functionality. ''' from __future__ import absolute_import import logging from salt.exceptions import CommandExecutionError log = logging.getLogger(__name__) def run(command, args=None, override_file=None): ''' This function allows a specific command to be run, with the option to have command-line arguments for the command to be defined in hubblestack_data. The command is run with python_shell=False, which will prevent command injection. command The command to be run. Usually just the binary name, but can also include arguments/flags that need to be inserted to make the command safe, such as sandbox flags. args The rest of the args for the command. Can be a string or a list. override_file A fileserver location (``salt://this/is/a/path.txt``). The contents of the file at this location will be used *instead of* ``args`` ''' pass
53d0d5886670ba33a645fd8c82479fb4495d25d1
website/migrations/0002_auto_20150118_2210.py
website/migrations/0002_auto_20150118_2210.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('website', '0001_initial'), ] operations = [ migrations.AddField( model_name='query', name='cacheable', field=models.BooleanField(default=True, help_text=b'allows this query result to be cached'), preserve_default=True, ), migrations.AddField( model_name='querycache', name='hash', field=models.CharField(default='', max_length=1024), preserve_default=False, ), migrations.AlterField( model_name='querycache', name='run_time', field=models.DateTimeField(auto_now=True), preserve_default=True, ), ]
Add new migrations (use "" as default for hash)
Add new migrations (use "" as default for hash) Fixes #22
Python
mit
sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz,sqlviz/sqlviz
Add new migrations (use "" as default for hash) Fixes #22
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('website', '0001_initial'), ] operations = [ migrations.AddField( model_name='query', name='cacheable', field=models.BooleanField(default=True, help_text=b'allows this query result to be cached'), preserve_default=True, ), migrations.AddField( model_name='querycache', name='hash', field=models.CharField(default='', max_length=1024), preserve_default=False, ), migrations.AlterField( model_name='querycache', name='run_time', field=models.DateTimeField(auto_now=True), preserve_default=True, ), ]
<commit_before><commit_msg>Add new migrations (use "" as default for hash) Fixes #22<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('website', '0001_initial'), ] operations = [ migrations.AddField( model_name='query', name='cacheable', field=models.BooleanField(default=True, help_text=b'allows this query result to be cached'), preserve_default=True, ), migrations.AddField( model_name='querycache', name='hash', field=models.CharField(default='', max_length=1024), preserve_default=False, ), migrations.AlterField( model_name='querycache', name='run_time', field=models.DateTimeField(auto_now=True), preserve_default=True, ), ]
Add new migrations (use "" as default for hash) Fixes #22# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('website', '0001_initial'), ] operations = [ migrations.AddField( model_name='query', name='cacheable', field=models.BooleanField(default=True, help_text=b'allows this query result to be cached'), preserve_default=True, ), migrations.AddField( model_name='querycache', name='hash', field=models.CharField(default='', max_length=1024), preserve_default=False, ), migrations.AlterField( model_name='querycache', name='run_time', field=models.DateTimeField(auto_now=True), preserve_default=True, ), ]
<commit_before><commit_msg>Add new migrations (use "" as default for hash) Fixes #22<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('website', '0001_initial'), ] operations = [ migrations.AddField( model_name='query', name='cacheable', field=models.BooleanField(default=True, help_text=b'allows this query result to be cached'), preserve_default=True, ), migrations.AddField( model_name='querycache', name='hash', field=models.CharField(default='', max_length=1024), preserve_default=False, ), migrations.AlterField( model_name='querycache', name='run_time', field=models.DateTimeField(auto_now=True), preserve_default=True, ), ]
37b5531e2cc969e1ee73a46bf372d89871f922a7
tools/gen_prime.py
tools/gen_prime.py
import argparse import sys # Sieve of Eratosthenes # Code by David Eppstein, UC Irvine, 28 Feb 2002 # http://code.activestate.com/recipes/117119/ def gen_primes(): """ Generate an infinite sequence of prime numbers. """ # Maps composites to primes witnessing their compositeness. # This is memory efficient, as the sieve is not "run forward" # indefinitely, but only as long as required by the current # number being tested. # D = {} # The running integer that's checked for primeness q = 2 while True: if q not in D: # q is a new prime. # Yield it and mark its first multiple that isn't # already marked in previous iterations # yield q D[q * q] = [q] else: # q is composite. D[q] is the list of primes that # divide it. Since we've reached q, we no longer # need it in the map, but we'll mark the next # multiples of its witnesses to prepare for larger # numbers # for p in D[q]: D.setdefault(p + q, []).append(p) del D[q] q += 1 if __name__ == "__main__": parser = argparse.ArgumentParser(description="Generate prime number array") parser.add_argument('n', metavar='N', nargs=1, type=int, help="Limit value") group = parser.add_mutually_exclusive_group() group.add_argument('--count', action='store_const', const=True, default=False, help='limit number of generated prime number (default)') group.add_argument('--value', action='store_const', const=True, default=False, help='limit max value of generated prime number') args = parser.parse_args() if args.value: limit = args.n[0] else: limit = args.n[0]-2 prime = iter(gen_primes()) sys.stdout.write("{"+str(prime.next())) for idx, val in enumerate(prime): if args.value and limit < val: break elif limit < idx: break sys.stdout.write(", "+str(val)) print("}")
Add array of prime number generator code
Add array of prime number generator code
Python
mit
everyevery/programming_study,everyevery/algorithm_code,everyevery/algorithm_code,everyevery/programming_study,everyevery/algorithm_code,everyevery/algorithm_code,everyevery/algorithm_code,everyevery/programming_study,everyevery/algorithm_code,everyevery/programming_study,everyevery/algorithm_code,everyevery/programming_study,everyevery/programming_study,everyevery/programming_study,everyevery/algorithm_code,everyevery/programming_study,everyevery/algorithm_code
Add array of prime number generator code
import argparse import sys # Sieve of Eratosthenes # Code by David Eppstein, UC Irvine, 28 Feb 2002 # http://code.activestate.com/recipes/117119/ def gen_primes(): """ Generate an infinite sequence of prime numbers. """ # Maps composites to primes witnessing their compositeness. # This is memory efficient, as the sieve is not "run forward" # indefinitely, but only as long as required by the current # number being tested. # D = {} # The running integer that's checked for primeness q = 2 while True: if q not in D: # q is a new prime. # Yield it and mark its first multiple that isn't # already marked in previous iterations # yield q D[q * q] = [q] else: # q is composite. D[q] is the list of primes that # divide it. Since we've reached q, we no longer # need it in the map, but we'll mark the next # multiples of its witnesses to prepare for larger # numbers # for p in D[q]: D.setdefault(p + q, []).append(p) del D[q] q += 1 if __name__ == "__main__": parser = argparse.ArgumentParser(description="Generate prime number array") parser.add_argument('n', metavar='N', nargs=1, type=int, help="Limit value") group = parser.add_mutually_exclusive_group() group.add_argument('--count', action='store_const', const=True, default=False, help='limit number of generated prime number (default)') group.add_argument('--value', action='store_const', const=True, default=False, help='limit max value of generated prime number') args = parser.parse_args() if args.value: limit = args.n[0] else: limit = args.n[0]-2 prime = iter(gen_primes()) sys.stdout.write("{"+str(prime.next())) for idx, val in enumerate(prime): if args.value and limit < val: break elif limit < idx: break sys.stdout.write(", "+str(val)) print("}")
<commit_before><commit_msg>Add array of prime number generator code<commit_after>
import argparse import sys # Sieve of Eratosthenes # Code by David Eppstein, UC Irvine, 28 Feb 2002 # http://code.activestate.com/recipes/117119/ def gen_primes(): """ Generate an infinite sequence of prime numbers. """ # Maps composites to primes witnessing their compositeness. # This is memory efficient, as the sieve is not "run forward" # indefinitely, but only as long as required by the current # number being tested. # D = {} # The running integer that's checked for primeness q = 2 while True: if q not in D: # q is a new prime. # Yield it and mark its first multiple that isn't # already marked in previous iterations # yield q D[q * q] = [q] else: # q is composite. D[q] is the list of primes that # divide it. Since we've reached q, we no longer # need it in the map, but we'll mark the next # multiples of its witnesses to prepare for larger # numbers # for p in D[q]: D.setdefault(p + q, []).append(p) del D[q] q += 1 if __name__ == "__main__": parser = argparse.ArgumentParser(description="Generate prime number array") parser.add_argument('n', metavar='N', nargs=1, type=int, help="Limit value") group = parser.add_mutually_exclusive_group() group.add_argument('--count', action='store_const', const=True, default=False, help='limit number of generated prime number (default)') group.add_argument('--value', action='store_const', const=True, default=False, help='limit max value of generated prime number') args = parser.parse_args() if args.value: limit = args.n[0] else: limit = args.n[0]-2 prime = iter(gen_primes()) sys.stdout.write("{"+str(prime.next())) for idx, val in enumerate(prime): if args.value and limit < val: break elif limit < idx: break sys.stdout.write(", "+str(val)) print("}")
Add array of prime number generator codeimport argparse import sys # Sieve of Eratosthenes # Code by David Eppstein, UC Irvine, 28 Feb 2002 # http://code.activestate.com/recipes/117119/ def gen_primes(): """ Generate an infinite sequence of prime numbers. """ # Maps composites to primes witnessing their compositeness. # This is memory efficient, as the sieve is not "run forward" # indefinitely, but only as long as required by the current # number being tested. # D = {} # The running integer that's checked for primeness q = 2 while True: if q not in D: # q is a new prime. # Yield it and mark its first multiple that isn't # already marked in previous iterations # yield q D[q * q] = [q] else: # q is composite. D[q] is the list of primes that # divide it. Since we've reached q, we no longer # need it in the map, but we'll mark the next # multiples of its witnesses to prepare for larger # numbers # for p in D[q]: D.setdefault(p + q, []).append(p) del D[q] q += 1 if __name__ == "__main__": parser = argparse.ArgumentParser(description="Generate prime number array") parser.add_argument('n', metavar='N', nargs=1, type=int, help="Limit value") group = parser.add_mutually_exclusive_group() group.add_argument('--count', action='store_const', const=True, default=False, help='limit number of generated prime number (default)') group.add_argument('--value', action='store_const', const=True, default=False, help='limit max value of generated prime number') args = parser.parse_args() if args.value: limit = args.n[0] else: limit = args.n[0]-2 prime = iter(gen_primes()) sys.stdout.write("{"+str(prime.next())) for idx, val in enumerate(prime): if args.value and limit < val: break elif limit < idx: break sys.stdout.write(", "+str(val)) print("}")
<commit_before><commit_msg>Add array of prime number generator code<commit_after>import argparse import sys # Sieve of Eratosthenes # Code by David Eppstein, UC Irvine, 28 Feb 2002 # http://code.activestate.com/recipes/117119/ def gen_primes(): """ Generate an infinite sequence of prime numbers. """ # Maps composites to primes witnessing their compositeness. # This is memory efficient, as the sieve is not "run forward" # indefinitely, but only as long as required by the current # number being tested. # D = {} # The running integer that's checked for primeness q = 2 while True: if q not in D: # q is a new prime. # Yield it and mark its first multiple that isn't # already marked in previous iterations # yield q D[q * q] = [q] else: # q is composite. D[q] is the list of primes that # divide it. Since we've reached q, we no longer # need it in the map, but we'll mark the next # multiples of its witnesses to prepare for larger # numbers # for p in D[q]: D.setdefault(p + q, []).append(p) del D[q] q += 1 if __name__ == "__main__": parser = argparse.ArgumentParser(description="Generate prime number array") parser.add_argument('n', metavar='N', nargs=1, type=int, help="Limit value") group = parser.add_mutually_exclusive_group() group.add_argument('--count', action='store_const', const=True, default=False, help='limit number of generated prime number (default)') group.add_argument('--value', action='store_const', const=True, default=False, help='limit max value of generated prime number') args = parser.parse_args() if args.value: limit = args.n[0] else: limit = args.n[0]-2 prime = iter(gen_primes()) sys.stdout.write("{"+str(prime.next())) for idx, val in enumerate(prime): if args.value and limit < val: break elif limit < idx: break sys.stdout.write(", "+str(val)) print("}")
47775471e2e8f0d88cb79d362114b5f49128a492
factory/main.py
factory/main.py
import random def get_digit(): return random.randint(1, 9) from zope.component.factory import Factory factory = Factory(get_digit, 'random_digit', 'Gives a random digit') from zope.component import getGlobalSiteManager from zope.component.interfaces import IFactory gsm = getGlobalSiteManager() gsm.registerUtility(factory, IFactory, 'random_digit') from zope.component import getUtility assert 1 <= getUtility(IFactory, 'random_digit')() <= 9 # creates digit from zope.component import createObject assert 1 <= createObject('random_digit') <= 9 # also creates a digit
Add example of factory usage
Add example of factory usage
Python
mit
duboviy/zca
Add example of factory usage
import random def get_digit(): return random.randint(1, 9) from zope.component.factory import Factory factory = Factory(get_digit, 'random_digit', 'Gives a random digit') from zope.component import getGlobalSiteManager from zope.component.interfaces import IFactory gsm = getGlobalSiteManager() gsm.registerUtility(factory, IFactory, 'random_digit') from zope.component import getUtility assert 1 <= getUtility(IFactory, 'random_digit')() <= 9 # creates digit from zope.component import createObject assert 1 <= createObject('random_digit') <= 9 # also creates a digit
<commit_before><commit_msg>Add example of factory usage<commit_after>
import random def get_digit(): return random.randint(1, 9) from zope.component.factory import Factory factory = Factory(get_digit, 'random_digit', 'Gives a random digit') from zope.component import getGlobalSiteManager from zope.component.interfaces import IFactory gsm = getGlobalSiteManager() gsm.registerUtility(factory, IFactory, 'random_digit') from zope.component import getUtility assert 1 <= getUtility(IFactory, 'random_digit')() <= 9 # creates digit from zope.component import createObject assert 1 <= createObject('random_digit') <= 9 # also creates a digit
Add example of factory usageimport random def get_digit(): return random.randint(1, 9) from zope.component.factory import Factory factory = Factory(get_digit, 'random_digit', 'Gives a random digit') from zope.component import getGlobalSiteManager from zope.component.interfaces import IFactory gsm = getGlobalSiteManager() gsm.registerUtility(factory, IFactory, 'random_digit') from zope.component import getUtility assert 1 <= getUtility(IFactory, 'random_digit')() <= 9 # creates digit from zope.component import createObject assert 1 <= createObject('random_digit') <= 9 # also creates a digit
<commit_before><commit_msg>Add example of factory usage<commit_after>import random def get_digit(): return random.randint(1, 9) from zope.component.factory import Factory factory = Factory(get_digit, 'random_digit', 'Gives a random digit') from zope.component import getGlobalSiteManager from zope.component.interfaces import IFactory gsm = getGlobalSiteManager() gsm.registerUtility(factory, IFactory, 'random_digit') from zope.component import getUtility assert 1 <= getUtility(IFactory, 'random_digit')() <= 9 # creates digit from zope.component import createObject assert 1 <= createObject('random_digit') <= 9 # also creates a digit
b9d167cf1eba2d55ab7710e78f38c3fa010d21ef
axelrod/strategies/__init__.py
axelrod/strategies/__init__.py
from cooperator import * from defector import * from grudger import * from rand import * from titfortat import * from gobymajority import * from alternator import * from averagecopier import * from grumpy import * strategies = [ Defector, Cooperator, TitForTat, Grudger, GoByMajority, Random, Alternator, AverageCopier, Grumpy, ]
from cooperator import * from defector import * from grudger import * from rand import * from titfortat import * from gobymajority import * from alternator import * from averagecopier import * from grumpy import * from inverse import * strategies = [ Defector, Cooperator, TitForTat, Grudger, GoByMajority, Random, Alternator, AverageCopier, Grumpy, Inverse ]
Change init to add inverse strategy
Change init to add inverse strategy
Python
mit
marcharper/Axelrod,ranjinidas/Axelrod,ranjinidas/Axelrod,marcharper/Axelrod
from cooperator import * from defector import * from grudger import * from rand import * from titfortat import * from gobymajority import * from alternator import * from averagecopier import * from grumpy import * strategies = [ Defector, Cooperator, TitForTat, Grudger, GoByMajority, Random, Alternator, AverageCopier, Grumpy, ] Change init to add inverse strategy
from cooperator import * from defector import * from grudger import * from rand import * from titfortat import * from gobymajority import * from alternator import * from averagecopier import * from grumpy import * from inverse import * strategies = [ Defector, Cooperator, TitForTat, Grudger, GoByMajority, Random, Alternator, AverageCopier, Grumpy, Inverse ]
<commit_before>from cooperator import * from defector import * from grudger import * from rand import * from titfortat import * from gobymajority import * from alternator import * from averagecopier import * from grumpy import * strategies = [ Defector, Cooperator, TitForTat, Grudger, GoByMajority, Random, Alternator, AverageCopier, Grumpy, ] <commit_msg>Change init to add inverse strategy<commit_after>
from cooperator import * from defector import * from grudger import * from rand import * from titfortat import * from gobymajority import * from alternator import * from averagecopier import * from grumpy import * from inverse import * strategies = [ Defector, Cooperator, TitForTat, Grudger, GoByMajority, Random, Alternator, AverageCopier, Grumpy, Inverse ]
from cooperator import * from defector import * from grudger import * from rand import * from titfortat import * from gobymajority import * from alternator import * from averagecopier import * from grumpy import * strategies = [ Defector, Cooperator, TitForTat, Grudger, GoByMajority, Random, Alternator, AverageCopier, Grumpy, ] Change init to add inverse strategyfrom cooperator import * from defector import * from grudger import * from rand import * from titfortat import * from gobymajority import * from alternator import * from averagecopier import * from grumpy import * from inverse import * strategies = [ Defector, Cooperator, TitForTat, Grudger, GoByMajority, Random, Alternator, AverageCopier, Grumpy, Inverse ]
<commit_before>from cooperator import * from defector import * from grudger import * from rand import * from titfortat import * from gobymajority import * from alternator import * from averagecopier import * from grumpy import * strategies = [ Defector, Cooperator, TitForTat, Grudger, GoByMajority, Random, Alternator, AverageCopier, Grumpy, ] <commit_msg>Change init to add inverse strategy<commit_after>from cooperator import * from defector import * from grudger import * from rand import * from titfortat import * from gobymajority import * from alternator import * from averagecopier import * from grumpy import * from inverse import * strategies = [ Defector, Cooperator, TitForTat, Grudger, GoByMajority, Random, Alternator, AverageCopier, Grumpy, Inverse ]