commit
stringlengths
40
40
old_file
stringlengths
4
118
new_file
stringlengths
4
118
old_contents
stringlengths
0
2.94k
new_contents
stringlengths
1
4.43k
subject
stringlengths
15
444
message
stringlengths
16
3.45k
lang
stringclasses
1 value
license
stringclasses
13 values
repos
stringlengths
5
43.2k
prompt
stringlengths
17
4.58k
response
stringlengths
1
4.43k
prompt_tagged
stringlengths
58
4.62k
response_tagged
stringlengths
1
4.43k
text
stringlengths
132
7.29k
text_tagged
stringlengths
173
7.33k
11bb0d7aa106e1cafee8b4f00bf75a2aa02e97cf
SecC/ErrMer_Proto.py
SecC/ErrMer_Proto.py
from __future__ import division import numpy as np from utilBMF.HTSUtils import pFastqProxy, pFastqFile consInFq1 = pFastqFile("/home/brett/Projects/BMFTools_Devel/lamda_data/lamda-50di" "v_S4_L001_R1_001.fastq.rescued.shaded.BS.fastq") consInFq2 = pFastqFile("/home/brett/Projects/BMFTools_Devel/lamda_data/kmer_test/" "")
Add small script to start calculating error from kmer motifs, prototyping
Add small script to start calculating error from kmer motifs, prototyping
Python
mit
ARUP-NGS/BMFtools,ARUP-NGS/BMFtools,ARUP-NGS/BMFtools
Add small script to start calculating error from kmer motifs, prototyping
from __future__ import division import numpy as np from utilBMF.HTSUtils import pFastqProxy, pFastqFile consInFq1 = pFastqFile("/home/brett/Projects/BMFTools_Devel/lamda_data/lamda-50di" "v_S4_L001_R1_001.fastq.rescued.shaded.BS.fastq") consInFq2 = pFastqFile("/home/brett/Projects/BMFTools_Devel/lamda_data/kmer_test/" "")
<commit_before><commit_msg>Add small script to start calculating error from kmer motifs, prototyping<commit_after>
from __future__ import division import numpy as np from utilBMF.HTSUtils import pFastqProxy, pFastqFile consInFq1 = pFastqFile("/home/brett/Projects/BMFTools_Devel/lamda_data/lamda-50di" "v_S4_L001_R1_001.fastq.rescued.shaded.BS.fastq") consInFq2 = pFastqFile("/home/brett/Projects/BMFTools_Devel/lamda_data/kmer_test/" "")
Add small script to start calculating error from kmer motifs, prototypingfrom __future__ import division import numpy as np from utilBMF.HTSUtils import pFastqProxy, pFastqFile consInFq1 = pFastqFile("/home/brett/Projects/BMFTools_Devel/lamda_data/lamda-50di" "v_S4_L001_R1_001.fastq.rescued.shaded.BS.fastq") consInFq2 = pFastqFile("/home/brett/Projects/BMFTools_Devel/lamda_data/kmer_test/" "")
<commit_before><commit_msg>Add small script to start calculating error from kmer motifs, prototyping<commit_after>from __future__ import division import numpy as np from utilBMF.HTSUtils import pFastqProxy, pFastqFile consInFq1 = pFastqFile("/home/brett/Projects/BMFTools_Devel/lamda_data/lamda-50di" "v_S4_L001_R1_001.fastq.rescued.shaded.BS.fastq") consInFq2 = pFastqFile("/home/brett/Projects/BMFTools_Devel/lamda_data/kmer_test/" "")
7d59b4e21ed36c916c66de06488712decf96b110
tests/filter/test_distinct_random_filter.py
tests/filter/test_distinct_random_filter.py
from datetime import date import pytest from freezegun import freeze_time from adhocracy4.filters.filters import DistinctOrderingFilter from tests.apps.questions.models import Question @pytest.mark.django_db def test_random_distinct_ordering_no_seed(question_factory): questions = [question_factory() for i in range(5)] qs = Question.objects.all() random_filter = DistinctOrderingFilter() random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[1] assert random_qs[1] == questions[3] assert random_qs[2] == questions[4] assert random_qs[3] == questions[2] assert random_qs[4] == questions[0] @pytest.mark.django_db def test_random_distinct_ordering_with_date(question_factory): questions = [question_factory() for i in range(5)] qs = Question.objects.all() with freeze_time('2020-01-01 00:00:00 UTC'): random_filter = DistinctOrderingFilter(random_seed=date.today()) random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[4] assert random_qs[1] == questions[3] assert random_qs[2] == questions[0] assert random_qs[3] == questions[2] assert random_qs[4] == questions[1] with freeze_time('2020-01-02 00:00:00 UTC'): random_filter = DistinctOrderingFilter(random_seed=date.today()) random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[4] assert random_qs[1] == questions[3] assert random_qs[2] == questions[2] assert random_qs[3] == questions[1] assert random_qs[4] == questions[0] with freeze_time('2020-01-03 00:00:00 UTC'): random_filter = DistinctOrderingFilter(random_seed=date.today()) random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[3] assert random_qs[1] == questions[1] assert random_qs[2] == questions[0] assert random_qs[3] == questions[2] assert random_qs[4] == questions[4]
Add test for distinct random order filter
tests: Add test for distinct random order filter
Python
agpl-3.0
liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4
tests: Add test for distinct random order filter
from datetime import date import pytest from freezegun import freeze_time from adhocracy4.filters.filters import DistinctOrderingFilter from tests.apps.questions.models import Question @pytest.mark.django_db def test_random_distinct_ordering_no_seed(question_factory): questions = [question_factory() for i in range(5)] qs = Question.objects.all() random_filter = DistinctOrderingFilter() random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[1] assert random_qs[1] == questions[3] assert random_qs[2] == questions[4] assert random_qs[3] == questions[2] assert random_qs[4] == questions[0] @pytest.mark.django_db def test_random_distinct_ordering_with_date(question_factory): questions = [question_factory() for i in range(5)] qs = Question.objects.all() with freeze_time('2020-01-01 00:00:00 UTC'): random_filter = DistinctOrderingFilter(random_seed=date.today()) random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[4] assert random_qs[1] == questions[3] assert random_qs[2] == questions[0] assert random_qs[3] == questions[2] assert random_qs[4] == questions[1] with freeze_time('2020-01-02 00:00:00 UTC'): random_filter = DistinctOrderingFilter(random_seed=date.today()) random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[4] assert random_qs[1] == questions[3] assert random_qs[2] == questions[2] assert random_qs[3] == questions[1] assert random_qs[4] == questions[0] with freeze_time('2020-01-03 00:00:00 UTC'): random_filter = DistinctOrderingFilter(random_seed=date.today()) random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[3] assert random_qs[1] == questions[1] assert random_qs[2] == questions[0] assert random_qs[3] == questions[2] assert random_qs[4] == questions[4]
<commit_before><commit_msg>tests: Add test for distinct random order filter<commit_after>
from datetime import date import pytest from freezegun import freeze_time from adhocracy4.filters.filters import DistinctOrderingFilter from tests.apps.questions.models import Question @pytest.mark.django_db def test_random_distinct_ordering_no_seed(question_factory): questions = [question_factory() for i in range(5)] qs = Question.objects.all() random_filter = DistinctOrderingFilter() random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[1] assert random_qs[1] == questions[3] assert random_qs[2] == questions[4] assert random_qs[3] == questions[2] assert random_qs[4] == questions[0] @pytest.mark.django_db def test_random_distinct_ordering_with_date(question_factory): questions = [question_factory() for i in range(5)] qs = Question.objects.all() with freeze_time('2020-01-01 00:00:00 UTC'): random_filter = DistinctOrderingFilter(random_seed=date.today()) random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[4] assert random_qs[1] == questions[3] assert random_qs[2] == questions[0] assert random_qs[3] == questions[2] assert random_qs[4] == questions[1] with freeze_time('2020-01-02 00:00:00 UTC'): random_filter = DistinctOrderingFilter(random_seed=date.today()) random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[4] assert random_qs[1] == questions[3] assert random_qs[2] == questions[2] assert random_qs[3] == questions[1] assert random_qs[4] == questions[0] with freeze_time('2020-01-03 00:00:00 UTC'): random_filter = DistinctOrderingFilter(random_seed=date.today()) random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[3] assert random_qs[1] == questions[1] assert random_qs[2] == questions[0] assert random_qs[3] == questions[2] assert random_qs[4] == questions[4]
tests: Add test for distinct random order filterfrom datetime import date import pytest from freezegun import freeze_time from adhocracy4.filters.filters import DistinctOrderingFilter from tests.apps.questions.models import Question @pytest.mark.django_db def test_random_distinct_ordering_no_seed(question_factory): questions = [question_factory() for i in range(5)] qs = Question.objects.all() random_filter = DistinctOrderingFilter() random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[1] assert random_qs[1] == questions[3] assert random_qs[2] == questions[4] assert random_qs[3] == questions[2] assert random_qs[4] == questions[0] @pytest.mark.django_db def test_random_distinct_ordering_with_date(question_factory): questions = [question_factory() for i in range(5)] qs = Question.objects.all() with freeze_time('2020-01-01 00:00:00 UTC'): random_filter = DistinctOrderingFilter(random_seed=date.today()) random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[4] assert random_qs[1] == questions[3] assert random_qs[2] == questions[0] assert random_qs[3] == questions[2] assert random_qs[4] == questions[1] with freeze_time('2020-01-02 00:00:00 UTC'): random_filter = DistinctOrderingFilter(random_seed=date.today()) random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[4] assert random_qs[1] == questions[3] assert random_qs[2] == questions[2] assert random_qs[3] == questions[1] assert random_qs[4] == questions[0] with freeze_time('2020-01-03 00:00:00 UTC'): random_filter = DistinctOrderingFilter(random_seed=date.today()) random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[3] assert random_qs[1] == questions[1] assert random_qs[2] == questions[0] assert random_qs[3] == questions[2] assert random_qs[4] == questions[4]
<commit_before><commit_msg>tests: Add test for distinct random order filter<commit_after>from datetime import date import pytest from freezegun import freeze_time from adhocracy4.filters.filters import DistinctOrderingFilter from tests.apps.questions.models import Question @pytest.mark.django_db def test_random_distinct_ordering_no_seed(question_factory): questions = [question_factory() for i in range(5)] qs = Question.objects.all() random_filter = DistinctOrderingFilter() random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[1] assert random_qs[1] == questions[3] assert random_qs[2] == questions[4] assert random_qs[3] == questions[2] assert random_qs[4] == questions[0] @pytest.mark.django_db def test_random_distinct_ordering_with_date(question_factory): questions = [question_factory() for i in range(5)] qs = Question.objects.all() with freeze_time('2020-01-01 00:00:00 UTC'): random_filter = DistinctOrderingFilter(random_seed=date.today()) random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[4] assert random_qs[1] == questions[3] assert random_qs[2] == questions[0] assert random_qs[3] == questions[2] assert random_qs[4] == questions[1] with freeze_time('2020-01-02 00:00:00 UTC'): random_filter = DistinctOrderingFilter(random_seed=date.today()) random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[4] assert random_qs[1] == questions[3] assert random_qs[2] == questions[2] assert random_qs[3] == questions[1] assert random_qs[4] == questions[0] with freeze_time('2020-01-03 00:00:00 UTC'): random_filter = DistinctOrderingFilter(random_seed=date.today()) random_qs = random_filter.filter(qs, ['?']) assert random_qs[0] == questions[3] assert random_qs[1] == questions[1] assert random_qs[2] == questions[0] assert random_qs[3] == questions[2] assert random_qs[4] == questions[4]
3081aa14230f7374e406e15be992235eaf961551
butter/utils.py
butter/utils.py
#!/usr/bin/env python from cffi import FFI as _FFI import fcntl import array _ffi = _FFI() _ffi.cdef(""" #define FIONREAD ... """) _C = _ffi.verify(""" #include <sys/ioctl.h> """, libraries=[]) def get_buffered_length(fd): buf = array.array("I", [0]) fcntl.ioctl(fd, _C.FIONREAD, buf) return buf[0]
Add function to read length of bytes in the buffer from the kernel
Add function to read length of bytes in the buffer from the kernel
Python
bsd-3-clause
arkaitzj/python-butter,dasSOZO/python-butter,wdv4758h/butter
Add function to read length of bytes in the buffer from the kernel
#!/usr/bin/env python from cffi import FFI as _FFI import fcntl import array _ffi = _FFI() _ffi.cdef(""" #define FIONREAD ... """) _C = _ffi.verify(""" #include <sys/ioctl.h> """, libraries=[]) def get_buffered_length(fd): buf = array.array("I", [0]) fcntl.ioctl(fd, _C.FIONREAD, buf) return buf[0]
<commit_before><commit_msg>Add function to read length of bytes in the buffer from the kernel<commit_after>
#!/usr/bin/env python from cffi import FFI as _FFI import fcntl import array _ffi = _FFI() _ffi.cdef(""" #define FIONREAD ... """) _C = _ffi.verify(""" #include <sys/ioctl.h> """, libraries=[]) def get_buffered_length(fd): buf = array.array("I", [0]) fcntl.ioctl(fd, _C.FIONREAD, buf) return buf[0]
Add function to read length of bytes in the buffer from the kernel#!/usr/bin/env python from cffi import FFI as _FFI import fcntl import array _ffi = _FFI() _ffi.cdef(""" #define FIONREAD ... """) _C = _ffi.verify(""" #include <sys/ioctl.h> """, libraries=[]) def get_buffered_length(fd): buf = array.array("I", [0]) fcntl.ioctl(fd, _C.FIONREAD, buf) return buf[0]
<commit_before><commit_msg>Add function to read length of bytes in the buffer from the kernel<commit_after>#!/usr/bin/env python from cffi import FFI as _FFI import fcntl import array _ffi = _FFI() _ffi.cdef(""" #define FIONREAD ... """) _C = _ffi.verify(""" #include <sys/ioctl.h> """, libraries=[]) def get_buffered_length(fd): buf = array.array("I", [0]) fcntl.ioctl(fd, _C.FIONREAD, buf) return buf[0]
1fdfff95e04678f107775870f0f1b4eda6af8073
bson/tests/test_binary.py
bson/tests/test_binary.py
#!/usr/bin/env python from unittest import TestCase from bson import dumps, loads class TestBinary(TestCase): def setUp(self): lyrics = b""" I've Had Enough - Earth Wind and Fire Getting down, there's a party in motion Everybody's on the scene And I can hear the sound, like the roar of the ocean As it rushes to the stream Live it up, don't ya hear people screaming Gotta do it all their way Until they burn it up and the lights nowhere gleaming What a price you have to pay Why do we feel whe have to feed the fire We're only caught up in our desire, ooh I've had enough, it's just too tough To keep it up, so I am calling out to you To lift us up, the world is rough I am so tired and I've had enough Spinning' round in perpetual motion Like a crystal ball of dreams And moving in the crowd, there's a hint of a notion That you never will be seen Slow it down, feel some emotion 'Cause there's nothing in between Reaching that higher ground, but your faith and devotion To be on the winning team Why do we feel we have to feed the fire We're only caught up in our own desire, ooh I've had enough, it's just too tough To keep it up, so I am calling out to you To lift us up, the world is rough I am so tired and I've had enough """.strip().split(b"\n") self.doc = {"lyrics": lyrics} def test_binary(self): dump = dumps(self.doc) decoded = loads(dump) self.assertEqual(decoded, self.doc)
Add test for PR-56(Fix binary decode)
Add test for PR-56(Fix binary decode)
Python
bsd-3-clause
martinkou/bson
Add test for PR-56(Fix binary decode)
#!/usr/bin/env python from unittest import TestCase from bson import dumps, loads class TestBinary(TestCase): def setUp(self): lyrics = b""" I've Had Enough - Earth Wind and Fire Getting down, there's a party in motion Everybody's on the scene And I can hear the sound, like the roar of the ocean As it rushes to the stream Live it up, don't ya hear people screaming Gotta do it all their way Until they burn it up and the lights nowhere gleaming What a price you have to pay Why do we feel whe have to feed the fire We're only caught up in our desire, ooh I've had enough, it's just too tough To keep it up, so I am calling out to you To lift us up, the world is rough I am so tired and I've had enough Spinning' round in perpetual motion Like a crystal ball of dreams And moving in the crowd, there's a hint of a notion That you never will be seen Slow it down, feel some emotion 'Cause there's nothing in between Reaching that higher ground, but your faith and devotion To be on the winning team Why do we feel we have to feed the fire We're only caught up in our own desire, ooh I've had enough, it's just too tough To keep it up, so I am calling out to you To lift us up, the world is rough I am so tired and I've had enough """.strip().split(b"\n") self.doc = {"lyrics": lyrics} def test_binary(self): dump = dumps(self.doc) decoded = loads(dump) self.assertEqual(decoded, self.doc)
<commit_before><commit_msg>Add test for PR-56(Fix binary decode)<commit_after>
#!/usr/bin/env python from unittest import TestCase from bson import dumps, loads class TestBinary(TestCase): def setUp(self): lyrics = b""" I've Had Enough - Earth Wind and Fire Getting down, there's a party in motion Everybody's on the scene And I can hear the sound, like the roar of the ocean As it rushes to the stream Live it up, don't ya hear people screaming Gotta do it all their way Until they burn it up and the lights nowhere gleaming What a price you have to pay Why do we feel whe have to feed the fire We're only caught up in our desire, ooh I've had enough, it's just too tough To keep it up, so I am calling out to you To lift us up, the world is rough I am so tired and I've had enough Spinning' round in perpetual motion Like a crystal ball of dreams And moving in the crowd, there's a hint of a notion That you never will be seen Slow it down, feel some emotion 'Cause there's nothing in between Reaching that higher ground, but your faith and devotion To be on the winning team Why do we feel we have to feed the fire We're only caught up in our own desire, ooh I've had enough, it's just too tough To keep it up, so I am calling out to you To lift us up, the world is rough I am so tired and I've had enough """.strip().split(b"\n") self.doc = {"lyrics": lyrics} def test_binary(self): dump = dumps(self.doc) decoded = loads(dump) self.assertEqual(decoded, self.doc)
Add test for PR-56(Fix binary decode)#!/usr/bin/env python from unittest import TestCase from bson import dumps, loads class TestBinary(TestCase): def setUp(self): lyrics = b""" I've Had Enough - Earth Wind and Fire Getting down, there's a party in motion Everybody's on the scene And I can hear the sound, like the roar of the ocean As it rushes to the stream Live it up, don't ya hear people screaming Gotta do it all their way Until they burn it up and the lights nowhere gleaming What a price you have to pay Why do we feel whe have to feed the fire We're only caught up in our desire, ooh I've had enough, it's just too tough To keep it up, so I am calling out to you To lift us up, the world is rough I am so tired and I've had enough Spinning' round in perpetual motion Like a crystal ball of dreams And moving in the crowd, there's a hint of a notion That you never will be seen Slow it down, feel some emotion 'Cause there's nothing in between Reaching that higher ground, but your faith and devotion To be on the winning team Why do we feel we have to feed the fire We're only caught up in our own desire, ooh I've had enough, it's just too tough To keep it up, so I am calling out to you To lift us up, the world is rough I am so tired and I've had enough """.strip().split(b"\n") self.doc = {"lyrics": lyrics} def test_binary(self): dump = dumps(self.doc) decoded = loads(dump) self.assertEqual(decoded, self.doc)
<commit_before><commit_msg>Add test for PR-56(Fix binary decode)<commit_after>#!/usr/bin/env python from unittest import TestCase from bson import dumps, loads class TestBinary(TestCase): def setUp(self): lyrics = b""" I've Had Enough - Earth Wind and Fire Getting down, there's a party in motion Everybody's on the scene And I can hear the sound, like the roar of the ocean As it rushes to the stream Live it up, don't ya hear people screaming Gotta do it all their way Until they burn it up and the lights nowhere gleaming What a price you have to pay Why do we feel whe have to feed the fire We're only caught up in our desire, ooh I've had enough, it's just too tough To keep it up, so I am calling out to you To lift us up, the world is rough I am so tired and I've had enough Spinning' round in perpetual motion Like a crystal ball of dreams And moving in the crowd, there's a hint of a notion That you never will be seen Slow it down, feel some emotion 'Cause there's nothing in between Reaching that higher ground, but your faith and devotion To be on the winning team Why do we feel we have to feed the fire We're only caught up in our own desire, ooh I've had enough, it's just too tough To keep it up, so I am calling out to you To lift us up, the world is rough I am so tired and I've had enough """.strip().split(b"\n") self.doc = {"lyrics": lyrics} def test_binary(self): dump = dumps(self.doc) decoded = loads(dump) self.assertEqual(decoded, self.doc)
a2f9a972c5ccb4bcecff89c07ee8a9a73ca97fd1
tests/cache/test_mako.py
tests/cache/test_mako.py
from unittest import TestCase from dogpile.cache import util class MakoTest(TestCase): """ Test entry point for Mako """ def test_entry_point(self): import pkg_resources for impl in pkg_resources.iter_entry_points("mako.cache", "dogpile.cache"): print impl impl.load() return else: assert 0, "Missing entry point 'dogpile.cache' for 'mako.cache'"
Add unit test to cover Mako entry point.
Add unit test to cover Mako entry point.
Python
bsd-3-clause
thruflo/dogpile.cache,thruflo/dogpile.cache
Add unit test to cover Mako entry point.
from unittest import TestCase from dogpile.cache import util class MakoTest(TestCase): """ Test entry point for Mako """ def test_entry_point(self): import pkg_resources for impl in pkg_resources.iter_entry_points("mako.cache", "dogpile.cache"): print impl impl.load() return else: assert 0, "Missing entry point 'dogpile.cache' for 'mako.cache'"
<commit_before><commit_msg>Add unit test to cover Mako entry point.<commit_after>
from unittest import TestCase from dogpile.cache import util class MakoTest(TestCase): """ Test entry point for Mako """ def test_entry_point(self): import pkg_resources for impl in pkg_resources.iter_entry_points("mako.cache", "dogpile.cache"): print impl impl.load() return else: assert 0, "Missing entry point 'dogpile.cache' for 'mako.cache'"
Add unit test to cover Mako entry point.from unittest import TestCase from dogpile.cache import util class MakoTest(TestCase): """ Test entry point for Mako """ def test_entry_point(self): import pkg_resources for impl in pkg_resources.iter_entry_points("mako.cache", "dogpile.cache"): print impl impl.load() return else: assert 0, "Missing entry point 'dogpile.cache' for 'mako.cache'"
<commit_before><commit_msg>Add unit test to cover Mako entry point.<commit_after>from unittest import TestCase from dogpile.cache import util class MakoTest(TestCase): """ Test entry point for Mako """ def test_entry_point(self): import pkg_resources for impl in pkg_resources.iter_entry_points("mako.cache", "dogpile.cache"): print impl impl.load() return else: assert 0, "Missing entry point 'dogpile.cache' for 'mako.cache'"
0eae8a12cbbc21469fd4401692223ef51b8bc7a7
tests/test_identifier.py
tests/test_identifier.py
import angr import nose import identifier import os bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries-private')) import logging logging.getLogger("identifier").setLevel("DEBUG") def test_palindrome(): ''' Test identification of functions in palindrome. ''' p = angr.Project(os.path.join(bin_location, "cgc_scored_event_1/cgc/0b32aa01_01")) idfer = identifier.Identifier(p) seen = dict() for addr, symbol in idfer.run(): seen[addr] = symbol nose.tools.assert_equals(seen[134513765], "receive_n4") def run_all(): functions = globals() all_functions = dict(filter((lambda (k, v): k.startswith('test_')), functions.items())) for f in sorted(all_functions.keys()): if hasattr(all_functions[f], '__call__'): all_functions[f]() if __name__ == "__main__": import sys if len(sys.argv) > 1: globals()['test_' + sys.argv[1]]() else: run_all()
Add simple testcase for identifier
Add simple testcase for identifier
Python
bsd-2-clause
tyb0807/angr,iamahuman/angr,tyb0807/angr,angr/angr,f-prettyland/angr,chubbymaggie/angr,iamahuman/angr,chubbymaggie/angr,f-prettyland/angr,schieb/angr,chubbymaggie/angr,schieb/angr,axt/angr,iamahuman/angr,tyb0807/angr,axt/angr,axt/angr,angr/angr,f-prettyland/angr,angr/angr,schieb/angr
Add simple testcase for identifier
import angr import nose import identifier import os bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries-private')) import logging logging.getLogger("identifier").setLevel("DEBUG") def test_palindrome(): ''' Test identification of functions in palindrome. ''' p = angr.Project(os.path.join(bin_location, "cgc_scored_event_1/cgc/0b32aa01_01")) idfer = identifier.Identifier(p) seen = dict() for addr, symbol in idfer.run(): seen[addr] = symbol nose.tools.assert_equals(seen[134513765], "receive_n4") def run_all(): functions = globals() all_functions = dict(filter((lambda (k, v): k.startswith('test_')), functions.items())) for f in sorted(all_functions.keys()): if hasattr(all_functions[f], '__call__'): all_functions[f]() if __name__ == "__main__": import sys if len(sys.argv) > 1: globals()['test_' + sys.argv[1]]() else: run_all()
<commit_before><commit_msg>Add simple testcase for identifier<commit_after>
import angr import nose import identifier import os bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries-private')) import logging logging.getLogger("identifier").setLevel("DEBUG") def test_palindrome(): ''' Test identification of functions in palindrome. ''' p = angr.Project(os.path.join(bin_location, "cgc_scored_event_1/cgc/0b32aa01_01")) idfer = identifier.Identifier(p) seen = dict() for addr, symbol in idfer.run(): seen[addr] = symbol nose.tools.assert_equals(seen[134513765], "receive_n4") def run_all(): functions = globals() all_functions = dict(filter((lambda (k, v): k.startswith('test_')), functions.items())) for f in sorted(all_functions.keys()): if hasattr(all_functions[f], '__call__'): all_functions[f]() if __name__ == "__main__": import sys if len(sys.argv) > 1: globals()['test_' + sys.argv[1]]() else: run_all()
Add simple testcase for identifierimport angr import nose import identifier import os bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries-private')) import logging logging.getLogger("identifier").setLevel("DEBUG") def test_palindrome(): ''' Test identification of functions in palindrome. ''' p = angr.Project(os.path.join(bin_location, "cgc_scored_event_1/cgc/0b32aa01_01")) idfer = identifier.Identifier(p) seen = dict() for addr, symbol in idfer.run(): seen[addr] = symbol nose.tools.assert_equals(seen[134513765], "receive_n4") def run_all(): functions = globals() all_functions = dict(filter((lambda (k, v): k.startswith('test_')), functions.items())) for f in sorted(all_functions.keys()): if hasattr(all_functions[f], '__call__'): all_functions[f]() if __name__ == "__main__": import sys if len(sys.argv) > 1: globals()['test_' + sys.argv[1]]() else: run_all()
<commit_before><commit_msg>Add simple testcase for identifier<commit_after>import angr import nose import identifier import os bin_location = str(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../binaries-private')) import logging logging.getLogger("identifier").setLevel("DEBUG") def test_palindrome(): ''' Test identification of functions in palindrome. ''' p = angr.Project(os.path.join(bin_location, "cgc_scored_event_1/cgc/0b32aa01_01")) idfer = identifier.Identifier(p) seen = dict() for addr, symbol in idfer.run(): seen[addr] = symbol nose.tools.assert_equals(seen[134513765], "receive_n4") def run_all(): functions = globals() all_functions = dict(filter((lambda (k, v): k.startswith('test_')), functions.items())) for f in sorted(all_functions.keys()): if hasattr(all_functions[f], '__call__'): all_functions[f]() if __name__ == "__main__": import sys if len(sys.argv) > 1: globals()['test_' + sys.argv[1]]() else: run_all()
8af2e4a5023beeab22b87b4aab7133fd5a3c5be4
ConvertFiles.py
ConvertFiles.py
# -*- coding:utf-8 -*- ''' 這個程式能夠為同目錄下的所有檔案(含子資料夾)都建立一個同名的txt檔 ''' import os def create_txt_file(fileSimpleName): fileName = (fileSimpleName + ".txt") fileopen = open(fileName,'w') #fileopen.write(p) fileopen.close() def convert_all_files(path): for dirPath, dirNames, fileNames in os.walk(path): for fileName in fileNames: partialNames = os.path.splitext(fileName) # 純文字檔不必轉,python檔也預設不轉 if partialNames[1] != ".txt" and partialNames[1] != ".py": create_txt_file(partialNames[0]) pathMessage = "目前路徑是:「" + os.getcwd() + "」\r\n" + "確定要執行嗎?(1:Y 0:N)" isExecute = int(input(pathMessage)) if(isExecute): program = convert_all_files("./")
Copy from my project create-txt-file-by-name.
Copy from my project create-txt-file-by-name.
Python
mit
YiFanChen99/file-walker-for-windows
Copy from my project create-txt-file-by-name.
# -*- coding:utf-8 -*- ''' 這個程式能夠為同目錄下的所有檔案(含子資料夾)都建立一個同名的txt檔 ''' import os def create_txt_file(fileSimpleName): fileName = (fileSimpleName + ".txt") fileopen = open(fileName,'w') #fileopen.write(p) fileopen.close() def convert_all_files(path): for dirPath, dirNames, fileNames in os.walk(path): for fileName in fileNames: partialNames = os.path.splitext(fileName) # 純文字檔不必轉,python檔也預設不轉 if partialNames[1] != ".txt" and partialNames[1] != ".py": create_txt_file(partialNames[0]) pathMessage = "目前路徑是:「" + os.getcwd() + "」\r\n" + "確定要執行嗎?(1:Y 0:N)" isExecute = int(input(pathMessage)) if(isExecute): program = convert_all_files("./")
<commit_before><commit_msg>Copy from my project create-txt-file-by-name.<commit_after>
# -*- coding:utf-8 -*- ''' 這個程式能夠為同目錄下的所有檔案(含子資料夾)都建立一個同名的txt檔 ''' import os def create_txt_file(fileSimpleName): fileName = (fileSimpleName + ".txt") fileopen = open(fileName,'w') #fileopen.write(p) fileopen.close() def convert_all_files(path): for dirPath, dirNames, fileNames in os.walk(path): for fileName in fileNames: partialNames = os.path.splitext(fileName) # 純文字檔不必轉,python檔也預設不轉 if partialNames[1] != ".txt" and partialNames[1] != ".py": create_txt_file(partialNames[0]) pathMessage = "目前路徑是:「" + os.getcwd() + "」\r\n" + "確定要執行嗎?(1:Y 0:N)" isExecute = int(input(pathMessage)) if(isExecute): program = convert_all_files("./")
Copy from my project create-txt-file-by-name.# -*- coding:utf-8 -*- ''' 這個程式能夠為同目錄下的所有檔案(含子資料夾)都建立一個同名的txt檔 ''' import os def create_txt_file(fileSimpleName): fileName = (fileSimpleName + ".txt") fileopen = open(fileName,'w') #fileopen.write(p) fileopen.close() def convert_all_files(path): for dirPath, dirNames, fileNames in os.walk(path): for fileName in fileNames: partialNames = os.path.splitext(fileName) # 純文字檔不必轉,python檔也預設不轉 if partialNames[1] != ".txt" and partialNames[1] != ".py": create_txt_file(partialNames[0]) pathMessage = "目前路徑是:「" + os.getcwd() + "」\r\n" + "確定要執行嗎?(1:Y 0:N)" isExecute = int(input(pathMessage)) if(isExecute): program = convert_all_files("./")
<commit_before><commit_msg>Copy from my project create-txt-file-by-name.<commit_after># -*- coding:utf-8 -*- ''' 這個程式能夠為同目錄下的所有檔案(含子資料夾)都建立一個同名的txt檔 ''' import os def create_txt_file(fileSimpleName): fileName = (fileSimpleName + ".txt") fileopen = open(fileName,'w') #fileopen.write(p) fileopen.close() def convert_all_files(path): for dirPath, dirNames, fileNames in os.walk(path): for fileName in fileNames: partialNames = os.path.splitext(fileName) # 純文字檔不必轉,python檔也預設不轉 if partialNames[1] != ".txt" and partialNames[1] != ".py": create_txt_file(partialNames[0]) pathMessage = "目前路徑是:「" + os.getcwd() + "」\r\n" + "確定要執行嗎?(1:Y 0:N)" isExecute = int(input(pathMessage)) if(isExecute): program = convert_all_files("./")
3b852c14c76a95b386e6644b99a589c0ae4c19ed
course/migrations/0005_auto_20160622_2313.py
course/migrations/0005_auto_20160622_2313.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('course', '0004_auto_20160409_2159'), ] operations = [ migrations.AlterField( model_name='course', name='end_time', field=models.DateField(default=django.utils.timezone.now), ), migrations.AlterField( model_name='course', name='start_time', field=models.DateField(default=django.utils.timezone.now), ), ]
Add migration mentioned in last commit...
Add migration mentioned in last commit...
Python
bsd-3-clause
fsr/course-management,fsr/course-management
Add migration mentioned in last commit...
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('course', '0004_auto_20160409_2159'), ] operations = [ migrations.AlterField( model_name='course', name='end_time', field=models.DateField(default=django.utils.timezone.now), ), migrations.AlterField( model_name='course', name='start_time', field=models.DateField(default=django.utils.timezone.now), ), ]
<commit_before><commit_msg>Add migration mentioned in last commit...<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('course', '0004_auto_20160409_2159'), ] operations = [ migrations.AlterField( model_name='course', name='end_time', field=models.DateField(default=django.utils.timezone.now), ), migrations.AlterField( model_name='course', name='start_time', field=models.DateField(default=django.utils.timezone.now), ), ]
Add migration mentioned in last commit...# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('course', '0004_auto_20160409_2159'), ] operations = [ migrations.AlterField( model_name='course', name='end_time', field=models.DateField(default=django.utils.timezone.now), ), migrations.AlterField( model_name='course', name='start_time', field=models.DateField(default=django.utils.timezone.now), ), ]
<commit_before><commit_msg>Add migration mentioned in last commit...<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone class Migration(migrations.Migration): dependencies = [ ('course', '0004_auto_20160409_2159'), ] operations = [ migrations.AlterField( model_name='course', name='end_time', field=models.DateField(default=django.utils.timezone.now), ), migrations.AlterField( model_name='course', name='start_time', field=models.DateField(default=django.utils.timezone.now), ), ]
c6b9bb93f268b7c1dc100c75a7c36326d63450d5
tests/all_test.py
tests/all_test.py
import glob import unittest import os, sys if __name__ == '__main__': PROJECT_ROOT = os.path.dirname(__file__) test_file_strings = glob.glob(os.path.join(PROJECT_ROOT, 'test_*.py')) module_strings = [os.path.splitext(os.path.basename(str))[0] for str in test_file_strings] suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str in module_strings] testSuite = unittest.TestSuite(suites) text_runner = unittest.TextTestRunner().run(testSuite)
Add all test runner for command line. Now every test can be run for commandline
Add all test runner for command line. Now every test can be run for commandline
Python
mit
kyamaguchi/SublimeObjC2RubyMotion,kyamaguchi/SublimeObjC2RubyMotion
Add all test runner for command line. Now every test can be run for commandline
import glob import unittest import os, sys if __name__ == '__main__': PROJECT_ROOT = os.path.dirname(__file__) test_file_strings = glob.glob(os.path.join(PROJECT_ROOT, 'test_*.py')) module_strings = [os.path.splitext(os.path.basename(str))[0] for str in test_file_strings] suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str in module_strings] testSuite = unittest.TestSuite(suites) text_runner = unittest.TextTestRunner().run(testSuite)
<commit_before><commit_msg>Add all test runner for command line. Now every test can be run for commandline<commit_after>
import glob import unittest import os, sys if __name__ == '__main__': PROJECT_ROOT = os.path.dirname(__file__) test_file_strings = glob.glob(os.path.join(PROJECT_ROOT, 'test_*.py')) module_strings = [os.path.splitext(os.path.basename(str))[0] for str in test_file_strings] suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str in module_strings] testSuite = unittest.TestSuite(suites) text_runner = unittest.TextTestRunner().run(testSuite)
Add all test runner for command line. Now every test can be run for commandlineimport glob import unittest import os, sys if __name__ == '__main__': PROJECT_ROOT = os.path.dirname(__file__) test_file_strings = glob.glob(os.path.join(PROJECT_ROOT, 'test_*.py')) module_strings = [os.path.splitext(os.path.basename(str))[0] for str in test_file_strings] suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str in module_strings] testSuite = unittest.TestSuite(suites) text_runner = unittest.TextTestRunner().run(testSuite)
<commit_before><commit_msg>Add all test runner for command line. Now every test can be run for commandline<commit_after>import glob import unittest import os, sys if __name__ == '__main__': PROJECT_ROOT = os.path.dirname(__file__) test_file_strings = glob.glob(os.path.join(PROJECT_ROOT, 'test_*.py')) module_strings = [os.path.splitext(os.path.basename(str))[0] for str in test_file_strings] suites = [unittest.defaultTestLoader.loadTestsFromName(str) for str in module_strings] testSuite = unittest.TestSuite(suites) text_runner = unittest.TextTestRunner().run(testSuite)
891c53eee0c6e6487ab0ba0e9d1e92d117678c9a
src/python/read_back.py
src/python/read_back.py
import lcio fac = lcio.LCFactory.getInstance() rdr = fac.createLCReader() rdr.open("write_test.slcio") evt = rdr.readNextEvent() coll = evt.getSimCalorimeterHitCollection("hits") print repr(coll) hit=coll.getElementAt(0) print repr(hit)
Add example read of previously written LCIO using Python
JM: Add example read of previously written LCIO using Python
Python
bsd-3-clause
petricm/LCIO,petricm/LCIO,petricm/LCIO,iLCSoft/LCIO,petricm/LCIO,iLCSoft/LCIO,petricm/LCIO,petricm/LCIO,iLCSoft/LCIO,iLCSoft/LCIO,iLCSoft/LCIO,iLCSoft/LCIO
JM: Add example read of previously written LCIO using Python
import lcio fac = lcio.LCFactory.getInstance() rdr = fac.createLCReader() rdr.open("write_test.slcio") evt = rdr.readNextEvent() coll = evt.getSimCalorimeterHitCollection("hits") print repr(coll) hit=coll.getElementAt(0) print repr(hit)
<commit_before><commit_msg>JM: Add example read of previously written LCIO using Python<commit_after>
import lcio fac = lcio.LCFactory.getInstance() rdr = fac.createLCReader() rdr.open("write_test.slcio") evt = rdr.readNextEvent() coll = evt.getSimCalorimeterHitCollection("hits") print repr(coll) hit=coll.getElementAt(0) print repr(hit)
JM: Add example read of previously written LCIO using Pythonimport lcio fac = lcio.LCFactory.getInstance() rdr = fac.createLCReader() rdr.open("write_test.slcio") evt = rdr.readNextEvent() coll = evt.getSimCalorimeterHitCollection("hits") print repr(coll) hit=coll.getElementAt(0) print repr(hit)
<commit_before><commit_msg>JM: Add example read of previously written LCIO using Python<commit_after>import lcio fac = lcio.LCFactory.getInstance() rdr = fac.createLCReader() rdr.open("write_test.slcio") evt = rdr.readNextEvent() coll = evt.getSimCalorimeterHitCollection("hits") print repr(coll) hit=coll.getElementAt(0) print repr(hit)
2a5c513c1916b42a044aef15f0d407229c7adc7e
utilities/decodetrace.py
utilities/decodetrace.py
''' Script to decode an internal error backtrace ''' import sys from subprocess import Popen, PIPE if len(sys.argv) != 2 and len(sys.argv) != 3: print 'Usage: %s <path to initium.elf> [<path to addr2line>] << <output>' % (sys.argv[0]) sys.exit(1) loader = sys.argv[1] if len(sys.argv) == 3: addr2line = sys.argv[2] else: addr2line = 'addr2line' lines = sys.stdin.readlines() for line in lines: if not line.startswith(" 0x"): continue line = line.strip() process = Popen([addr2line, '-f', '-e', loader, line], stdout = PIPE, strerr = PIPE) output = process.communicate()[0].stip('\n') if process.returncode != 0: continue print '%s - %s @ %s' % (line, output[0], output[1])
Add script to decode an internal error backtrace
Add script to decode an internal error backtrace
Python
mit
gil0mendes/Initium,gil0mendes/Initium,gil0mendes/Initium
Add script to decode an internal error backtrace
''' Script to decode an internal error backtrace ''' import sys from subprocess import Popen, PIPE if len(sys.argv) != 2 and len(sys.argv) != 3: print 'Usage: %s <path to initium.elf> [<path to addr2line>] << <output>' % (sys.argv[0]) sys.exit(1) loader = sys.argv[1] if len(sys.argv) == 3: addr2line = sys.argv[2] else: addr2line = 'addr2line' lines = sys.stdin.readlines() for line in lines: if not line.startswith(" 0x"): continue line = line.strip() process = Popen([addr2line, '-f', '-e', loader, line], stdout = PIPE, strerr = PIPE) output = process.communicate()[0].stip('\n') if process.returncode != 0: continue print '%s - %s @ %s' % (line, output[0], output[1])
<commit_before><commit_msg>Add script to decode an internal error backtrace<commit_after>
''' Script to decode an internal error backtrace ''' import sys from subprocess import Popen, PIPE if len(sys.argv) != 2 and len(sys.argv) != 3: print 'Usage: %s <path to initium.elf> [<path to addr2line>] << <output>' % (sys.argv[0]) sys.exit(1) loader = sys.argv[1] if len(sys.argv) == 3: addr2line = sys.argv[2] else: addr2line = 'addr2line' lines = sys.stdin.readlines() for line in lines: if not line.startswith(" 0x"): continue line = line.strip() process = Popen([addr2line, '-f', '-e', loader, line], stdout = PIPE, strerr = PIPE) output = process.communicate()[0].stip('\n') if process.returncode != 0: continue print '%s - %s @ %s' % (line, output[0], output[1])
Add script to decode an internal error backtrace''' Script to decode an internal error backtrace ''' import sys from subprocess import Popen, PIPE if len(sys.argv) != 2 and len(sys.argv) != 3: print 'Usage: %s <path to initium.elf> [<path to addr2line>] << <output>' % (sys.argv[0]) sys.exit(1) loader = sys.argv[1] if len(sys.argv) == 3: addr2line = sys.argv[2] else: addr2line = 'addr2line' lines = sys.stdin.readlines() for line in lines: if not line.startswith(" 0x"): continue line = line.strip() process = Popen([addr2line, '-f', '-e', loader, line], stdout = PIPE, strerr = PIPE) output = process.communicate()[0].stip('\n') if process.returncode != 0: continue print '%s - %s @ %s' % (line, output[0], output[1])
<commit_before><commit_msg>Add script to decode an internal error backtrace<commit_after>''' Script to decode an internal error backtrace ''' import sys from subprocess import Popen, PIPE if len(sys.argv) != 2 and len(sys.argv) != 3: print 'Usage: %s <path to initium.elf> [<path to addr2line>] << <output>' % (sys.argv[0]) sys.exit(1) loader = sys.argv[1] if len(sys.argv) == 3: addr2line = sys.argv[2] else: addr2line = 'addr2line' lines = sys.stdin.readlines() for line in lines: if not line.startswith(" 0x"): continue line = line.strip() process = Popen([addr2line, '-f', '-e', loader, line], stdout = PIPE, strerr = PIPE) output = process.communicate()[0].stip('\n') if process.returncode != 0: continue print '%s - %s @ %s' % (line, output[0], output[1])
23da61b3887b98df4d4f943101a2673f39920b7e
src/collectors/jsoncommon/jsoncommon.py
src/collectors/jsoncommon/jsoncommon.py
# coding=utf-8 """ Simple collector which get JSON and parse it into flat metrics #### Dependencies * urllib2 """ import urllib2 import json import diamond.collector class JSONCommonCollector(diamond.collector.Collector): def get_default_config_help(self): config_help = super(JSONCommonCollector, self).get_default_config_help() config_help.update({ 'host': 'Hostname', 'port': 'Port', 'path': 'Path', }) return config_help def get_default_config(self): default_config = super(JSONCommonCollector, self).get_default_config() default_config['host'] = 'localhost' default_config['port'] = 80 default_config['path'] = '/stat' return default_config def _json_to_flat_metrics(self, prefix, data): for key, value in data.items(): if isinstance(value, dict): for k, v in self._json_to_flat_metrics("%s.%s" % (prefix, key), value): yield k, v else: try: int(value) except ValueError: value = None finally: yield ("%s.%s" % (prefix, key), value) def collect(self): url = 'http://%s:%i/%s' % (self.config['host'], int(self.config['port']), self.config['path']) req = urllib2.Request(url) req.add_header('Content-type', 'application/json') try: resp = urllib2.urlopen(req) except urllib2.URLError as e: self.log.error("Can't open url ", e) else: content = resp.read() try: data = json.loads(content) except ValueError as e: self.log.error("Can't parse JSON object from %s" % url, e) else: for metric_name, metric_value in self._json_to_flat_metrics("", data): self.publish(metric_name, metric_value)
Add new collector Get JSON and transform it into flat metrics
Add new collector Get JSON and transform it into flat metrics
Python
mit
Ormod/Diamond,Netuitive/Diamond,zoidbergwill/Diamond,thardie/Diamond,sebbrandt87/Diamond,codepython/Diamond,stuartbfox/Diamond,MichaelDoyle/Diamond,thardie/Diamond,gg7/diamond,hamelg/Diamond,krbaker/Diamond,zoidbergwill/Diamond,gg7/diamond,cannium/Diamond,Netuitive/Diamond,jaingaurav/Diamond,tuenti/Diamond,disqus/Diamond,thardie/Diamond,zoidbergwill/Diamond,signalfx/Diamond,tellapart/Diamond,socialwareinc/Diamond,joel-airspring/Diamond,ceph/Diamond,TinLe/Diamond,python-diamond/Diamond,Precis/Diamond,bmhatfield/Diamond,signalfx/Diamond,python-diamond/Diamond,CYBERBUGJR/Diamond,Clever/Diamond,janisz/Diamond-1,anandbhoraskar/Diamond,acquia/Diamond,MichaelDoyle/Diamond,hvnsweeting/Diamond,russss/Diamond,dcsquared13/Diamond,Ensighten/Diamond,bmhatfield/Diamond,ceph/Diamond,tusharmakkar08/Diamond,bmhatfield/Diamond,Ensighten/Diamond,ramjothikumar/Diamond,works-mobile/Diamond,dcsquared13/Diamond,h00dy/Diamond,TAKEALOT/Diamond,Netuitive/netuitive-diamond,timchenxiaoyu/Diamond,Precis/Diamond,jumping/Diamond,Basis/Diamond,actmd/Diamond,MichaelDoyle/Diamond,Precis/Diamond,rtoma/Diamond,russss/Diamond,socialwareinc/Diamond,stuartbfox/Diamond,jumping/Diamond,Ssawa/Diamond,janisz/Diamond-1,gg7/diamond,eMerzh/Diamond-1,rtoma/Diamond,mzupan/Diamond,hamelg/Diamond,dcsquared13/Diamond,Netuitive/Diamond,Clever/Diamond,acquia/Diamond,Ormod/Diamond,Nihn/Diamond-1,Netuitive/netuitive-diamond,TinLe/Diamond,joel-airspring/Diamond,cannium/Diamond,zoidbergwill/Diamond,tusharmakkar08/Diamond,h00dy/Diamond,tellapart/Diamond,russss/Diamond,szibis/Diamond,h00dy/Diamond,TAKEALOT/Diamond,socialwareinc/Diamond,eMerzh/Diamond-1,joel-airspring/Diamond,Slach/Diamond,mfriedenhagen/Diamond,krbaker/Diamond,Slach/Diamond,Ensighten/Diamond,hvnsweeting/Diamond,works-mobile/Diamond,skbkontur/Diamond,hvnsweeting/Diamond,janisz/Diamond-1,jriguera/Diamond,szibis/Diamond,h00dy/Diamond,sebbrandt87/Diamond,mfriedenhagen/Diamond,mfriedenhagen/Diamond,metamx/Diamond,mzupan/Diamond,metamx/Diamond,saucelabs/Diamond,timchenxiaoyu/Diamond,disqus/Diamond,Basis/Diamond,Ormod/Diamond,eMerzh/Diamond-1,bmhatfield/Diamond,krbaker/Diamond,CYBERBUGJR/Diamond,hvnsweeting/Diamond,TinLe/Diamond,skbkontur/Diamond,skbkontur/Diamond,socialwareinc/Diamond,EzyInsights/Diamond,Ssawa/Diamond,ramjothikumar/Diamond,actmd/Diamond,tellapart/Diamond,jriguera/Diamond,cannium/Diamond,Slach/Diamond,Netuitive/netuitive-diamond,CYBERBUGJR/Diamond,codepython/Diamond,thardie/Diamond,anandbhoraskar/Diamond,tuenti/Diamond,mzupan/Diamond,acquia/Diamond,janisz/Diamond-1,actmd/Diamond,jaingaurav/Diamond,works-mobile/Diamond,dcsquared13/Diamond,Nihn/Diamond-1,Nihn/Diamond-1,timchenxiaoyu/Diamond,codepython/Diamond,rtoma/Diamond,metamx/Diamond,Ssawa/Diamond,saucelabs/Diamond,EzyInsights/Diamond,Clever/Diamond,anandbhoraskar/Diamond,Ssawa/Diamond,eMerzh/Diamond-1,ceph/Diamond,EzyInsights/Diamond,signalfx/Diamond,Precis/Diamond,russss/Diamond,TAKEALOT/Diamond,tuenti/Diamond,acquia/Diamond,tuenti/Diamond,jumping/Diamond,krbaker/Diamond,tusharmakkar08/Diamond,sebbrandt87/Diamond,rtoma/Diamond,skbkontur/Diamond,Nihn/Diamond-1,codepython/Diamond,jriguera/Diamond,sebbrandt87/Diamond,Netuitive/netuitive-diamond,mzupan/Diamond,Basis/Diamond,anandbhoraskar/Diamond,actmd/Diamond,jaingaurav/Diamond,Slach/Diamond,gg7/diamond,Netuitive/Diamond,EzyInsights/Diamond,cannium/Diamond,ceph/Diamond,MichaelDoyle/Diamond,works-mobile/Diamond,TAKEALOT/Diamond,signalfx/Diamond,timchenxiaoyu/Diamond,jaingaurav/Diamond,szibis/Diamond,saucelabs/Diamond,ramjothikumar/Diamond,disqus/Diamond,hamelg/Diamond,mfriedenhagen/Diamond,Basis/Diamond,tusharmakkar08/Diamond,szibis/Diamond,saucelabs/Diamond,CYBERBUGJR/Diamond,tellapart/Diamond,Ensighten/Diamond,jriguera/Diamond,Clever/Diamond,stuartbfox/Diamond,hamelg/Diamond,python-diamond/Diamond,TinLe/Diamond,joel-airspring/Diamond,Ormod/Diamond,ramjothikumar/Diamond,jumping/Diamond,stuartbfox/Diamond
Add new collector Get JSON and transform it into flat metrics
# coding=utf-8 """ Simple collector which get JSON and parse it into flat metrics #### Dependencies * urllib2 """ import urllib2 import json import diamond.collector class JSONCommonCollector(diamond.collector.Collector): def get_default_config_help(self): config_help = super(JSONCommonCollector, self).get_default_config_help() config_help.update({ 'host': 'Hostname', 'port': 'Port', 'path': 'Path', }) return config_help def get_default_config(self): default_config = super(JSONCommonCollector, self).get_default_config() default_config['host'] = 'localhost' default_config['port'] = 80 default_config['path'] = '/stat' return default_config def _json_to_flat_metrics(self, prefix, data): for key, value in data.items(): if isinstance(value, dict): for k, v in self._json_to_flat_metrics("%s.%s" % (prefix, key), value): yield k, v else: try: int(value) except ValueError: value = None finally: yield ("%s.%s" % (prefix, key), value) def collect(self): url = 'http://%s:%i/%s' % (self.config['host'], int(self.config['port']), self.config['path']) req = urllib2.Request(url) req.add_header('Content-type', 'application/json') try: resp = urllib2.urlopen(req) except urllib2.URLError as e: self.log.error("Can't open url ", e) else: content = resp.read() try: data = json.loads(content) except ValueError as e: self.log.error("Can't parse JSON object from %s" % url, e) else: for metric_name, metric_value in self._json_to_flat_metrics("", data): self.publish(metric_name, metric_value)
<commit_before><commit_msg>Add new collector Get JSON and transform it into flat metrics<commit_after>
# coding=utf-8 """ Simple collector which get JSON and parse it into flat metrics #### Dependencies * urllib2 """ import urllib2 import json import diamond.collector class JSONCommonCollector(diamond.collector.Collector): def get_default_config_help(self): config_help = super(JSONCommonCollector, self).get_default_config_help() config_help.update({ 'host': 'Hostname', 'port': 'Port', 'path': 'Path', }) return config_help def get_default_config(self): default_config = super(JSONCommonCollector, self).get_default_config() default_config['host'] = 'localhost' default_config['port'] = 80 default_config['path'] = '/stat' return default_config def _json_to_flat_metrics(self, prefix, data): for key, value in data.items(): if isinstance(value, dict): for k, v in self._json_to_flat_metrics("%s.%s" % (prefix, key), value): yield k, v else: try: int(value) except ValueError: value = None finally: yield ("%s.%s" % (prefix, key), value) def collect(self): url = 'http://%s:%i/%s' % (self.config['host'], int(self.config['port']), self.config['path']) req = urllib2.Request(url) req.add_header('Content-type', 'application/json') try: resp = urllib2.urlopen(req) except urllib2.URLError as e: self.log.error("Can't open url ", e) else: content = resp.read() try: data = json.loads(content) except ValueError as e: self.log.error("Can't parse JSON object from %s" % url, e) else: for metric_name, metric_value in self._json_to_flat_metrics("", data): self.publish(metric_name, metric_value)
Add new collector Get JSON and transform it into flat metrics# coding=utf-8 """ Simple collector which get JSON and parse it into flat metrics #### Dependencies * urllib2 """ import urllib2 import json import diamond.collector class JSONCommonCollector(diamond.collector.Collector): def get_default_config_help(self): config_help = super(JSONCommonCollector, self).get_default_config_help() config_help.update({ 'host': 'Hostname', 'port': 'Port', 'path': 'Path', }) return config_help def get_default_config(self): default_config = super(JSONCommonCollector, self).get_default_config() default_config['host'] = 'localhost' default_config['port'] = 80 default_config['path'] = '/stat' return default_config def _json_to_flat_metrics(self, prefix, data): for key, value in data.items(): if isinstance(value, dict): for k, v in self._json_to_flat_metrics("%s.%s" % (prefix, key), value): yield k, v else: try: int(value) except ValueError: value = None finally: yield ("%s.%s" % (prefix, key), value) def collect(self): url = 'http://%s:%i/%s' % (self.config['host'], int(self.config['port']), self.config['path']) req = urllib2.Request(url) req.add_header('Content-type', 'application/json') try: resp = urllib2.urlopen(req) except urllib2.URLError as e: self.log.error("Can't open url ", e) else: content = resp.read() try: data = json.loads(content) except ValueError as e: self.log.error("Can't parse JSON object from %s" % url, e) else: for metric_name, metric_value in self._json_to_flat_metrics("", data): self.publish(metric_name, metric_value)
<commit_before><commit_msg>Add new collector Get JSON and transform it into flat metrics<commit_after># coding=utf-8 """ Simple collector which get JSON and parse it into flat metrics #### Dependencies * urllib2 """ import urllib2 import json import diamond.collector class JSONCommonCollector(diamond.collector.Collector): def get_default_config_help(self): config_help = super(JSONCommonCollector, self).get_default_config_help() config_help.update({ 'host': 'Hostname', 'port': 'Port', 'path': 'Path', }) return config_help def get_default_config(self): default_config = super(JSONCommonCollector, self).get_default_config() default_config['host'] = 'localhost' default_config['port'] = 80 default_config['path'] = '/stat' return default_config def _json_to_flat_metrics(self, prefix, data): for key, value in data.items(): if isinstance(value, dict): for k, v in self._json_to_flat_metrics("%s.%s" % (prefix, key), value): yield k, v else: try: int(value) except ValueError: value = None finally: yield ("%s.%s" % (prefix, key), value) def collect(self): url = 'http://%s:%i/%s' % (self.config['host'], int(self.config['port']), self.config['path']) req = urllib2.Request(url) req.add_header('Content-type', 'application/json') try: resp = urllib2.urlopen(req) except urllib2.URLError as e: self.log.error("Can't open url ", e) else: content = resp.read() try: data = json.loads(content) except ValueError as e: self.log.error("Can't parse JSON object from %s" % url, e) else: for metric_name, metric_value in self._json_to_flat_metrics("", data): self.publish(metric_name, metric_value)
4816e8a6b2c1c9ef416bab5cd7d53005cd6d72c2
hardware/sense_hat/demo_buttons.py
hardware/sense_hat/demo_buttons.py
#!/usr/bin/env python # from https://pythonhosted.org/sense-hat/api/#joystick from sense_hat import SenseHat, ACTION_PRESSED, ACTION_HELD, ACTION_RELEASED from signal import pause x = 3 y = 3 sense = SenseHat() def clamp(value, min_value=0, max_value=7): return min(max_value, max(min_value, value)) def pushed_up(event): global y if event.action != ACTION_RELEASED: y = clamp(y - 1) def pushed_down(event): global y if event.action != ACTION_RELEASED: y = clamp(y + 1) def pushed_left(event): global x if event.action != ACTION_RELEASED: x = clamp(x - 1) def pushed_right(event): global x if event.action != ACTION_RELEASED: x = clamp(x + 1) def refresh(): sense.clear() sense.set_pixel(x, y, 255, 255, 255) sense.stick.direction_up = pushed_up sense.stick.direction_down = pushed_down sense.stick.direction_left = pushed_left sense.stick.direction_right = pushed_right sense.stick.direction_any = refresh refresh() pause()
Add script to demo Sense HAT buttons
Add script to demo Sense HAT buttons
Python
mit
claremacrae/raspi_code,claremacrae/raspi_code,claremacrae/raspi_code
Add script to demo Sense HAT buttons
#!/usr/bin/env python # from https://pythonhosted.org/sense-hat/api/#joystick from sense_hat import SenseHat, ACTION_PRESSED, ACTION_HELD, ACTION_RELEASED from signal import pause x = 3 y = 3 sense = SenseHat() def clamp(value, min_value=0, max_value=7): return min(max_value, max(min_value, value)) def pushed_up(event): global y if event.action != ACTION_RELEASED: y = clamp(y - 1) def pushed_down(event): global y if event.action != ACTION_RELEASED: y = clamp(y + 1) def pushed_left(event): global x if event.action != ACTION_RELEASED: x = clamp(x - 1) def pushed_right(event): global x if event.action != ACTION_RELEASED: x = clamp(x + 1) def refresh(): sense.clear() sense.set_pixel(x, y, 255, 255, 255) sense.stick.direction_up = pushed_up sense.stick.direction_down = pushed_down sense.stick.direction_left = pushed_left sense.stick.direction_right = pushed_right sense.stick.direction_any = refresh refresh() pause()
<commit_before><commit_msg>Add script to demo Sense HAT buttons<commit_after>
#!/usr/bin/env python # from https://pythonhosted.org/sense-hat/api/#joystick from sense_hat import SenseHat, ACTION_PRESSED, ACTION_HELD, ACTION_RELEASED from signal import pause x = 3 y = 3 sense = SenseHat() def clamp(value, min_value=0, max_value=7): return min(max_value, max(min_value, value)) def pushed_up(event): global y if event.action != ACTION_RELEASED: y = clamp(y - 1) def pushed_down(event): global y if event.action != ACTION_RELEASED: y = clamp(y + 1) def pushed_left(event): global x if event.action != ACTION_RELEASED: x = clamp(x - 1) def pushed_right(event): global x if event.action != ACTION_RELEASED: x = clamp(x + 1) def refresh(): sense.clear() sense.set_pixel(x, y, 255, 255, 255) sense.stick.direction_up = pushed_up sense.stick.direction_down = pushed_down sense.stick.direction_left = pushed_left sense.stick.direction_right = pushed_right sense.stick.direction_any = refresh refresh() pause()
Add script to demo Sense HAT buttons#!/usr/bin/env python # from https://pythonhosted.org/sense-hat/api/#joystick from sense_hat import SenseHat, ACTION_PRESSED, ACTION_HELD, ACTION_RELEASED from signal import pause x = 3 y = 3 sense = SenseHat() def clamp(value, min_value=0, max_value=7): return min(max_value, max(min_value, value)) def pushed_up(event): global y if event.action != ACTION_RELEASED: y = clamp(y - 1) def pushed_down(event): global y if event.action != ACTION_RELEASED: y = clamp(y + 1) def pushed_left(event): global x if event.action != ACTION_RELEASED: x = clamp(x - 1) def pushed_right(event): global x if event.action != ACTION_RELEASED: x = clamp(x + 1) def refresh(): sense.clear() sense.set_pixel(x, y, 255, 255, 255) sense.stick.direction_up = pushed_up sense.stick.direction_down = pushed_down sense.stick.direction_left = pushed_left sense.stick.direction_right = pushed_right sense.stick.direction_any = refresh refresh() pause()
<commit_before><commit_msg>Add script to demo Sense HAT buttons<commit_after>#!/usr/bin/env python # from https://pythonhosted.org/sense-hat/api/#joystick from sense_hat import SenseHat, ACTION_PRESSED, ACTION_HELD, ACTION_RELEASED from signal import pause x = 3 y = 3 sense = SenseHat() def clamp(value, min_value=0, max_value=7): return min(max_value, max(min_value, value)) def pushed_up(event): global y if event.action != ACTION_RELEASED: y = clamp(y - 1) def pushed_down(event): global y if event.action != ACTION_RELEASED: y = clamp(y + 1) def pushed_left(event): global x if event.action != ACTION_RELEASED: x = clamp(x - 1) def pushed_right(event): global x if event.action != ACTION_RELEASED: x = clamp(x + 1) def refresh(): sense.clear() sense.set_pixel(x, y, 255, 255, 255) sense.stick.direction_up = pushed_up sense.stick.direction_down = pushed_down sense.stick.direction_left = pushed_left sense.stick.direction_right = pushed_right sense.stick.direction_any = refresh refresh() pause()
c93b8f20a65fbb2e6a2e23aad7de0c496778aa23
py/single-element-in-a-sorted-array.py
py/single-element-in-a-sorted-array.py
class Solution(object): def singleNonDuplicate(self, nums): """ :type nums: List[int] :rtype: int """ ln = len(nums) L, U = -1, ln while L + 1 < U: mid = L + (U - L) / 2 if mid == ln - 1: return nums[mid] other = mid / 2 * 2 + 1 - (mid % 2) if nums[other] == nums[mid]: L = mid else: U = mid return nums[U / 2 * 2]
Add py solution for 540. Single Element in a Sorted Array
Add py solution for 540. Single Element in a Sorted Array 540. Single Element in a Sorted Array: https://leetcode.com/problems/single-element-in-a-sorted-array/
Python
apache-2.0
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
Add py solution for 540. Single Element in a Sorted Array 540. Single Element in a Sorted Array: https://leetcode.com/problems/single-element-in-a-sorted-array/
class Solution(object): def singleNonDuplicate(self, nums): """ :type nums: List[int] :rtype: int """ ln = len(nums) L, U = -1, ln while L + 1 < U: mid = L + (U - L) / 2 if mid == ln - 1: return nums[mid] other = mid / 2 * 2 + 1 - (mid % 2) if nums[other] == nums[mid]: L = mid else: U = mid return nums[U / 2 * 2]
<commit_before><commit_msg>Add py solution for 540. Single Element in a Sorted Array 540. Single Element in a Sorted Array: https://leetcode.com/problems/single-element-in-a-sorted-array/<commit_after>
class Solution(object): def singleNonDuplicate(self, nums): """ :type nums: List[int] :rtype: int """ ln = len(nums) L, U = -1, ln while L + 1 < U: mid = L + (U - L) / 2 if mid == ln - 1: return nums[mid] other = mid / 2 * 2 + 1 - (mid % 2) if nums[other] == nums[mid]: L = mid else: U = mid return nums[U / 2 * 2]
Add py solution for 540. Single Element in a Sorted Array 540. Single Element in a Sorted Array: https://leetcode.com/problems/single-element-in-a-sorted-array/class Solution(object): def singleNonDuplicate(self, nums): """ :type nums: List[int] :rtype: int """ ln = len(nums) L, U = -1, ln while L + 1 < U: mid = L + (U - L) / 2 if mid == ln - 1: return nums[mid] other = mid / 2 * 2 + 1 - (mid % 2) if nums[other] == nums[mid]: L = mid else: U = mid return nums[U / 2 * 2]
<commit_before><commit_msg>Add py solution for 540. Single Element in a Sorted Array 540. Single Element in a Sorted Array: https://leetcode.com/problems/single-element-in-a-sorted-array/<commit_after>class Solution(object): def singleNonDuplicate(self, nums): """ :type nums: List[int] :rtype: int """ ln = len(nums) L, U = -1, ln while L + 1 < U: mid = L + (U - L) / 2 if mid == ln - 1: return nums[mid] other = mid / 2 * 2 + 1 - (mid % 2) if nums[other] == nums[mid]: L = mid else: U = mid return nums[U / 2 * 2]
02c833fe31a31f0dd8cefe28f81a5feee0f9bedd
add_and_search_word_data_structure_design.py
add_and_search_word_data_structure_design.py
# coding: utf-8 # author: Fei Gao # # Add And Search Word Data Structure Design # Design a data structure that supports the following two operations: # void addWord(word) # bool search(word) # search(word) can search a literal word or a regular expression string containing only letters a-z or .. A . means it can represent any one letter. # For example: # addWord("bad") # addWord("dad") # addWord("mad") # search("pad") -> false # search("bad") -> true # search(".ad") -> true # search("b..") -> true # Note: # You may assume that all words are consist of lowercase letters a-z. # click to show hint. # You should be familiar with how a Trie works. If not, please work on this problem: Implement Trie (Prefix Tree) first. class WordDictionary: def __init__(self): from collections import defaultdict self.words = defaultdict(set) # @param {string} word # @return {void} # Adds a word into the data structure. def addWord(self, word): self.words[len(word)].add(word) # @param {string} word # @return {boolean} # Returns if the word is in the data structure. A word could # contain the dot character '.' to represent any one letter. def search(self, word): import re for w in self.words[len(word)]: if re.findall(word, w): return True return False def main(): wordDictionary = WordDictionary() wordDictionary.addWord("word") print(wordDictionary.search("pattern")) if __name__ == '__main__': main() pass
Add and Search Word - Data structure design: cheat with `re`
Add and Search Word - Data structure design: cheat with `re`
Python
mit
feigaochn/leetcode
Add and Search Word - Data structure design: cheat with `re`
# coding: utf-8 # author: Fei Gao # # Add And Search Word Data Structure Design # Design a data structure that supports the following two operations: # void addWord(word) # bool search(word) # search(word) can search a literal word or a regular expression string containing only letters a-z or .. A . means it can represent any one letter. # For example: # addWord("bad") # addWord("dad") # addWord("mad") # search("pad") -> false # search("bad") -> true # search(".ad") -> true # search("b..") -> true # Note: # You may assume that all words are consist of lowercase letters a-z. # click to show hint. # You should be familiar with how a Trie works. If not, please work on this problem: Implement Trie (Prefix Tree) first. class WordDictionary: def __init__(self): from collections import defaultdict self.words = defaultdict(set) # @param {string} word # @return {void} # Adds a word into the data structure. def addWord(self, word): self.words[len(word)].add(word) # @param {string} word # @return {boolean} # Returns if the word is in the data structure. A word could # contain the dot character '.' to represent any one letter. def search(self, word): import re for w in self.words[len(word)]: if re.findall(word, w): return True return False def main(): wordDictionary = WordDictionary() wordDictionary.addWord("word") print(wordDictionary.search("pattern")) if __name__ == '__main__': main() pass
<commit_before><commit_msg>Add and Search Word - Data structure design: cheat with `re`<commit_after>
# coding: utf-8 # author: Fei Gao # # Add And Search Word Data Structure Design # Design a data structure that supports the following two operations: # void addWord(word) # bool search(word) # search(word) can search a literal word or a regular expression string containing only letters a-z or .. A . means it can represent any one letter. # For example: # addWord("bad") # addWord("dad") # addWord("mad") # search("pad") -> false # search("bad") -> true # search(".ad") -> true # search("b..") -> true # Note: # You may assume that all words are consist of lowercase letters a-z. # click to show hint. # You should be familiar with how a Trie works. If not, please work on this problem: Implement Trie (Prefix Tree) first. class WordDictionary: def __init__(self): from collections import defaultdict self.words = defaultdict(set) # @param {string} word # @return {void} # Adds a word into the data structure. def addWord(self, word): self.words[len(word)].add(word) # @param {string} word # @return {boolean} # Returns if the word is in the data structure. A word could # contain the dot character '.' to represent any one letter. def search(self, word): import re for w in self.words[len(word)]: if re.findall(word, w): return True return False def main(): wordDictionary = WordDictionary() wordDictionary.addWord("word") print(wordDictionary.search("pattern")) if __name__ == '__main__': main() pass
Add and Search Word - Data structure design: cheat with `re`# coding: utf-8 # author: Fei Gao # # Add And Search Word Data Structure Design # Design a data structure that supports the following two operations: # void addWord(word) # bool search(word) # search(word) can search a literal word or a regular expression string containing only letters a-z or .. A . means it can represent any one letter. # For example: # addWord("bad") # addWord("dad") # addWord("mad") # search("pad") -> false # search("bad") -> true # search(".ad") -> true # search("b..") -> true # Note: # You may assume that all words are consist of lowercase letters a-z. # click to show hint. # You should be familiar with how a Trie works. If not, please work on this problem: Implement Trie (Prefix Tree) first. class WordDictionary: def __init__(self): from collections import defaultdict self.words = defaultdict(set) # @param {string} word # @return {void} # Adds a word into the data structure. def addWord(self, word): self.words[len(word)].add(word) # @param {string} word # @return {boolean} # Returns if the word is in the data structure. A word could # contain the dot character '.' to represent any one letter. def search(self, word): import re for w in self.words[len(word)]: if re.findall(word, w): return True return False def main(): wordDictionary = WordDictionary() wordDictionary.addWord("word") print(wordDictionary.search("pattern")) if __name__ == '__main__': main() pass
<commit_before><commit_msg>Add and Search Word - Data structure design: cheat with `re`<commit_after># coding: utf-8 # author: Fei Gao # # Add And Search Word Data Structure Design # Design a data structure that supports the following two operations: # void addWord(word) # bool search(word) # search(word) can search a literal word or a regular expression string containing only letters a-z or .. A . means it can represent any one letter. # For example: # addWord("bad") # addWord("dad") # addWord("mad") # search("pad") -> false # search("bad") -> true # search(".ad") -> true # search("b..") -> true # Note: # You may assume that all words are consist of lowercase letters a-z. # click to show hint. # You should be familiar with how a Trie works. If not, please work on this problem: Implement Trie (Prefix Tree) first. class WordDictionary: def __init__(self): from collections import defaultdict self.words = defaultdict(set) # @param {string} word # @return {void} # Adds a word into the data structure. def addWord(self, word): self.words[len(word)].add(word) # @param {string} word # @return {boolean} # Returns if the word is in the data structure. A word could # contain the dot character '.' to represent any one letter. def search(self, word): import re for w in self.words[len(word)]: if re.findall(word, w): return True return False def main(): wordDictionary = WordDictionary() wordDictionary.addWord("word") print(wordDictionary.search("pattern")) if __name__ == '__main__': main() pass
a2b08ba7f575bde42c57d24b4effaceea0ac4829
django_banking/parsers.py
django_banking/parsers.py
# -*- coding: utf-8 -*- from django_banking.models import MT940 def parse_mt940(input): messages = [] message = None for line in input: if line.startswith(':20:'): message = MT940() message.trn = line[4:] continue if line.startswith('-'): messages += message message = None continue
Add initial version of the MT-940 parser
Add initial version of the MT-940 parser
Python
bsd-3-clause
headcr4sh/django-banking
Add initial version of the MT-940 parser
# -*- coding: utf-8 -*- from django_banking.models import MT940 def parse_mt940(input): messages = [] message = None for line in input: if line.startswith(':20:'): message = MT940() message.trn = line[4:] continue if line.startswith('-'): messages += message message = None continue
<commit_before><commit_msg>Add initial version of the MT-940 parser<commit_after>
# -*- coding: utf-8 -*- from django_banking.models import MT940 def parse_mt940(input): messages = [] message = None for line in input: if line.startswith(':20:'): message = MT940() message.trn = line[4:] continue if line.startswith('-'): messages += message message = None continue
Add initial version of the MT-940 parser# -*- coding: utf-8 -*- from django_banking.models import MT940 def parse_mt940(input): messages = [] message = None for line in input: if line.startswith(':20:'): message = MT940() message.trn = line[4:] continue if line.startswith('-'): messages += message message = None continue
<commit_before><commit_msg>Add initial version of the MT-940 parser<commit_after># -*- coding: utf-8 -*- from django_banking.models import MT940 def parse_mt940(input): messages = [] message = None for line in input: if line.startswith(':20:'): message = MT940() message.trn = line[4:] continue if line.startswith('-'): messages += message message = None continue
5d743690e1d236090064c7bb95f872d1fa279b52
php4dvd/test_deletefilm.py
php4dvd/test_deletefilm.py
# -*- coding: utf-8 -*- from selenium import webdriver from selenium.common.exceptions import NoSuchElementException import unittest class AddFilm(unittest.TestCase): def setUp(self): self.driver = webdriver.Firefox() self.driver.implicitly_wait(10) self.base_url = "http://hub.wart.ru/" self.verificationErrors = [] self.accept_next_alert = True def test_addfilm(self): driver = self.driver driver.get(self.base_url + "php4dvd/") driver.find_element_by_id("username").clear() driver.find_element_by_id("username").send_keys("admin") driver.find_element_by_name("password").clear() driver.find_element_by_name("password").send_keys("admin") driver.find_element_by_name("submit").click() driver.find_element_by_css_selector(u"img[alt=\"Солнце\"]").click() driver.find_element_by_css_selector("img[alt=\"Remove\"]").click() self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to remove this[\s\S]$") driver.find_element_by_link_text("Home").click() driver.find_element_by_link_text("Log out").click() self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to log out[\s\S]$") def is_element_present(self, how, what): try: self.driver.find_element(by=how, value=what) except NoSuchElementException, e: return False return True def close_alert_and_get_its_text(self): try: alert = self.driver.switch_to_alert() alert_text = alert.text if self.accept_next_alert: alert.accept() else: alert.dismiss() return alert_text finally: self.accept_next_alert = True def tearDown(self): self.driver.quit() self.assertEqual([], self.verificationErrors) if __name__ == "__main__": unittest.main()
Add a test for a film deletting.
Add a test for a film deletting.
Python
bsd-2-clause
bsamorodov/selenium-py-training-samorodov
Add a test for a film deletting.
# -*- coding: utf-8 -*- from selenium import webdriver from selenium.common.exceptions import NoSuchElementException import unittest class AddFilm(unittest.TestCase): def setUp(self): self.driver = webdriver.Firefox() self.driver.implicitly_wait(10) self.base_url = "http://hub.wart.ru/" self.verificationErrors = [] self.accept_next_alert = True def test_addfilm(self): driver = self.driver driver.get(self.base_url + "php4dvd/") driver.find_element_by_id("username").clear() driver.find_element_by_id("username").send_keys("admin") driver.find_element_by_name("password").clear() driver.find_element_by_name("password").send_keys("admin") driver.find_element_by_name("submit").click() driver.find_element_by_css_selector(u"img[alt=\"Солнце\"]").click() driver.find_element_by_css_selector("img[alt=\"Remove\"]").click() self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to remove this[\s\S]$") driver.find_element_by_link_text("Home").click() driver.find_element_by_link_text("Log out").click() self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to log out[\s\S]$") def is_element_present(self, how, what): try: self.driver.find_element(by=how, value=what) except NoSuchElementException, e: return False return True def close_alert_and_get_its_text(self): try: alert = self.driver.switch_to_alert() alert_text = alert.text if self.accept_next_alert: alert.accept() else: alert.dismiss() return alert_text finally: self.accept_next_alert = True def tearDown(self): self.driver.quit() self.assertEqual([], self.verificationErrors) if __name__ == "__main__": unittest.main()
<commit_before><commit_msg>Add a test for a film deletting.<commit_after>
# -*- coding: utf-8 -*- from selenium import webdriver from selenium.common.exceptions import NoSuchElementException import unittest class AddFilm(unittest.TestCase): def setUp(self): self.driver = webdriver.Firefox() self.driver.implicitly_wait(10) self.base_url = "http://hub.wart.ru/" self.verificationErrors = [] self.accept_next_alert = True def test_addfilm(self): driver = self.driver driver.get(self.base_url + "php4dvd/") driver.find_element_by_id("username").clear() driver.find_element_by_id("username").send_keys("admin") driver.find_element_by_name("password").clear() driver.find_element_by_name("password").send_keys("admin") driver.find_element_by_name("submit").click() driver.find_element_by_css_selector(u"img[alt=\"Солнце\"]").click() driver.find_element_by_css_selector("img[alt=\"Remove\"]").click() self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to remove this[\s\S]$") driver.find_element_by_link_text("Home").click() driver.find_element_by_link_text("Log out").click() self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to log out[\s\S]$") def is_element_present(self, how, what): try: self.driver.find_element(by=how, value=what) except NoSuchElementException, e: return False return True def close_alert_and_get_its_text(self): try: alert = self.driver.switch_to_alert() alert_text = alert.text if self.accept_next_alert: alert.accept() else: alert.dismiss() return alert_text finally: self.accept_next_alert = True def tearDown(self): self.driver.quit() self.assertEqual([], self.verificationErrors) if __name__ == "__main__": unittest.main()
Add a test for a film deletting.# -*- coding: utf-8 -*- from selenium import webdriver from selenium.common.exceptions import NoSuchElementException import unittest class AddFilm(unittest.TestCase): def setUp(self): self.driver = webdriver.Firefox() self.driver.implicitly_wait(10) self.base_url = "http://hub.wart.ru/" self.verificationErrors = [] self.accept_next_alert = True def test_addfilm(self): driver = self.driver driver.get(self.base_url + "php4dvd/") driver.find_element_by_id("username").clear() driver.find_element_by_id("username").send_keys("admin") driver.find_element_by_name("password").clear() driver.find_element_by_name("password").send_keys("admin") driver.find_element_by_name("submit").click() driver.find_element_by_css_selector(u"img[alt=\"Солнце\"]").click() driver.find_element_by_css_selector("img[alt=\"Remove\"]").click() self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to remove this[\s\S]$") driver.find_element_by_link_text("Home").click() driver.find_element_by_link_text("Log out").click() self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to log out[\s\S]$") def is_element_present(self, how, what): try: self.driver.find_element(by=how, value=what) except NoSuchElementException, e: return False return True def close_alert_and_get_its_text(self): try: alert = self.driver.switch_to_alert() alert_text = alert.text if self.accept_next_alert: alert.accept() else: alert.dismiss() return alert_text finally: self.accept_next_alert = True def tearDown(self): self.driver.quit() self.assertEqual([], self.verificationErrors) if __name__ == "__main__": unittest.main()
<commit_before><commit_msg>Add a test for a film deletting.<commit_after># -*- coding: utf-8 -*- from selenium import webdriver from selenium.common.exceptions import NoSuchElementException import unittest class AddFilm(unittest.TestCase): def setUp(self): self.driver = webdriver.Firefox() self.driver.implicitly_wait(10) self.base_url = "http://hub.wart.ru/" self.verificationErrors = [] self.accept_next_alert = True def test_addfilm(self): driver = self.driver driver.get(self.base_url + "php4dvd/") driver.find_element_by_id("username").clear() driver.find_element_by_id("username").send_keys("admin") driver.find_element_by_name("password").clear() driver.find_element_by_name("password").send_keys("admin") driver.find_element_by_name("submit").click() driver.find_element_by_css_selector(u"img[alt=\"Солнце\"]").click() driver.find_element_by_css_selector("img[alt=\"Remove\"]").click() self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to remove this[\s\S]$") driver.find_element_by_link_text("Home").click() driver.find_element_by_link_text("Log out").click() self.assertRegexpMatches(self.close_alert_and_get_its_text(), r"^Are you sure you want to log out[\s\S]$") def is_element_present(self, how, what): try: self.driver.find_element(by=how, value=what) except NoSuchElementException, e: return False return True def close_alert_and_get_its_text(self): try: alert = self.driver.switch_to_alert() alert_text = alert.text if self.accept_next_alert: alert.accept() else: alert.dismiss() return alert_text finally: self.accept_next_alert = True def tearDown(self): self.driver.quit() self.assertEqual([], self.verificationErrors) if __name__ == "__main__": unittest.main()
f9a9ce064f9a09ee0e039239aebdb742c8683a3c
readMouse.py
readMouse.py
import struct file = open ("/dev/input/mice","rb"); def getMouseEvent(): buf = file.read(3); x,y = struct.unpack("bb", buf[1:] ); print ("x:%d, y:%d\n" % (x,y)); while(1): getMouseEvent() file.close;
Add Python script to read mouse delta position events
Add Python script to read mouse delta position events
Python
mit
cschulee/ee542-code,cschulee/ee542-code,cschulee/ee542-code
Add Python script to read mouse delta position events
import struct file = open ("/dev/input/mice","rb"); def getMouseEvent(): buf = file.read(3); x,y = struct.unpack("bb", buf[1:] ); print ("x:%d, y:%d\n" % (x,y)); while(1): getMouseEvent() file.close;
<commit_before><commit_msg>Add Python script to read mouse delta position events<commit_after>
import struct file = open ("/dev/input/mice","rb"); def getMouseEvent(): buf = file.read(3); x,y = struct.unpack("bb", buf[1:] ); print ("x:%d, y:%d\n" % (x,y)); while(1): getMouseEvent() file.close;
Add Python script to read mouse delta position eventsimport struct file = open ("/dev/input/mice","rb"); def getMouseEvent(): buf = file.read(3); x,y = struct.unpack("bb", buf[1:] ); print ("x:%d, y:%d\n" % (x,y)); while(1): getMouseEvent() file.close;
<commit_before><commit_msg>Add Python script to read mouse delta position events<commit_after>import struct file = open ("/dev/input/mice","rb"); def getMouseEvent(): buf = file.read(3); x,y = struct.unpack("bb", buf[1:] ); print ("x:%d, y:%d\n" % (x,y)); while(1): getMouseEvent() file.close;
4ffa049553050ab22c5c90f544bb03fcc4259bfe
pmdarima/tests/test_metrics.py
pmdarima/tests/test_metrics.py
# -*- coding: utf-8 -*- from pmdarima.metrics import smape import numpy as np import pytest @pytest.mark.parametrize( 'actual,forecasted,expected', [ pytest.param([0.07533, 0.07533, 0.07533, 0.07533, 0.07533, 0.07533, 0.0672, 0.0672], [0.102, 0.107, 0.047, 0.1, 0.032, 0.047, 0.108, 0.089], 42.60306631890196), ] ) def test_smape(actual, forecasted, expected): err = smape(actual, forecasted) assert np.allclose(expected, err)
Add unit test for SMAPE
Add unit test for SMAPE
Python
mit
tgsmith61591/pyramid,alkaline-ml/pmdarima,alkaline-ml/pmdarima,alkaline-ml/pmdarima,tgsmith61591/pyramid,tgsmith61591/pyramid
Add unit test for SMAPE
# -*- coding: utf-8 -*- from pmdarima.metrics import smape import numpy as np import pytest @pytest.mark.parametrize( 'actual,forecasted,expected', [ pytest.param([0.07533, 0.07533, 0.07533, 0.07533, 0.07533, 0.07533, 0.0672, 0.0672], [0.102, 0.107, 0.047, 0.1, 0.032, 0.047, 0.108, 0.089], 42.60306631890196), ] ) def test_smape(actual, forecasted, expected): err = smape(actual, forecasted) assert np.allclose(expected, err)
<commit_before><commit_msg>Add unit test for SMAPE<commit_after>
# -*- coding: utf-8 -*- from pmdarima.metrics import smape import numpy as np import pytest @pytest.mark.parametrize( 'actual,forecasted,expected', [ pytest.param([0.07533, 0.07533, 0.07533, 0.07533, 0.07533, 0.07533, 0.0672, 0.0672], [0.102, 0.107, 0.047, 0.1, 0.032, 0.047, 0.108, 0.089], 42.60306631890196), ] ) def test_smape(actual, forecasted, expected): err = smape(actual, forecasted) assert np.allclose(expected, err)
Add unit test for SMAPE# -*- coding: utf-8 -*- from pmdarima.metrics import smape import numpy as np import pytest @pytest.mark.parametrize( 'actual,forecasted,expected', [ pytest.param([0.07533, 0.07533, 0.07533, 0.07533, 0.07533, 0.07533, 0.0672, 0.0672], [0.102, 0.107, 0.047, 0.1, 0.032, 0.047, 0.108, 0.089], 42.60306631890196), ] ) def test_smape(actual, forecasted, expected): err = smape(actual, forecasted) assert np.allclose(expected, err)
<commit_before><commit_msg>Add unit test for SMAPE<commit_after># -*- coding: utf-8 -*- from pmdarima.metrics import smape import numpy as np import pytest @pytest.mark.parametrize( 'actual,forecasted,expected', [ pytest.param([0.07533, 0.07533, 0.07533, 0.07533, 0.07533, 0.07533, 0.0672, 0.0672], [0.102, 0.107, 0.047, 0.1, 0.032, 0.047, 0.108, 0.089], 42.60306631890196), ] ) def test_smape(actual, forecasted, expected): err = smape(actual, forecasted) assert np.allclose(expected, err)
88470edf159ef662886a4af9dc782fe173e196bf
examples/python/burnin.py
examples/python/burnin.py
#!/usr/bin/env python # Burn-in test: Keep LEDs at full brightness most of the time, but dim periodically # so it's clear when there's a problem. import opc, time, math numLEDs = 512 client = opc.Client('localhost:7890') t = 0 while True: t += 0.4 brightness = int(min(1, 1.25 + math.sin(t)) * 255) frame = [ (brightness, brightness, brightness) ] * numLEDs client.put_pixels(frame) time.sleep(0.05)
Add a really simple burn-in test helper
Add a really simple burn-in test helper
Python
mit
fragmede/fadecandy,lincomatic/fadecandy,pixelmatix/fadecandy,PimentNoir/fadecandy,fragmede/fadecandy,poe/fadecandy,scanlime/fadecandy,poe/fadecandy,lincomatic/fadecandy,hakan42/fadecandy,nomis52/fadecandy,PimentNoir/fadecandy,lincomatic/fadecandy,adam-back/fadecandy,adam-back/fadecandy,adam-back/fadecandy,nomis52/fadecandy,piers7/fadecandy,Jorgen-VikingGod/fadecandy,scanlime/fadecandy,nomis52/fadecandy,adam-back/fadecandy,jsestrich/fadecandy,piers7/fadecandy,scanlime/fadecandy,jsestrich/fadecandy,hakan42/fadecandy,scanlime/fadecandy,PimentNoir/fadecandy,Protoneer/fadecandy,lincomatic/fadecandy,Jorgen-VikingGod/fadecandy,hakan42/fadecandy,PimentNoir/fadecandy,poe/fadecandy,PimentNoir/fadecandy,fragmede/fadecandy,piers7/fadecandy,scanlime/fadecandy,pixelmatix/fadecandy,nomis52/fadecandy,hakan42/fadecandy,nomis52/fadecandy,fragmede/fadecandy,piers7/fadecandy,Jorgen-VikingGod/fadecandy,fragmede/fadecandy,fragmede/fadecandy,Protoneer/fadecandy,nomis52/fadecandy,poe/fadecandy,pixelmatix/fadecandy,Jorgen-VikingGod/fadecandy,poe/fadecandy,Jorgen-VikingGod/fadecandy,jsestrich/fadecandy,PimentNoir/fadecandy,lincomatic/fadecandy,Protoneer/fadecandy,Protoneer/fadecandy,poe/fadecandy,lincomatic/fadecandy,hakan42/fadecandy,pixelmatix/fadecandy,nomis52/fadecandy,lincomatic/fadecandy,adam-back/fadecandy,nomis52/fadecandy,jsestrich/fadecandy,scanlime/fadecandy,fragmede/fadecandy,fragmede/fadecandy,Protoneer/fadecandy,poe/fadecandy,piers7/fadecandy,poe/fadecandy,pixelmatix/fadecandy,lincomatic/fadecandy,jsestrich/fadecandy
Add a really simple burn-in test helper
#!/usr/bin/env python # Burn-in test: Keep LEDs at full brightness most of the time, but dim periodically # so it's clear when there's a problem. import opc, time, math numLEDs = 512 client = opc.Client('localhost:7890') t = 0 while True: t += 0.4 brightness = int(min(1, 1.25 + math.sin(t)) * 255) frame = [ (brightness, brightness, brightness) ] * numLEDs client.put_pixels(frame) time.sleep(0.05)
<commit_before><commit_msg>Add a really simple burn-in test helper<commit_after>
#!/usr/bin/env python # Burn-in test: Keep LEDs at full brightness most of the time, but dim periodically # so it's clear when there's a problem. import opc, time, math numLEDs = 512 client = opc.Client('localhost:7890') t = 0 while True: t += 0.4 brightness = int(min(1, 1.25 + math.sin(t)) * 255) frame = [ (brightness, brightness, brightness) ] * numLEDs client.put_pixels(frame) time.sleep(0.05)
Add a really simple burn-in test helper#!/usr/bin/env python # Burn-in test: Keep LEDs at full brightness most of the time, but dim periodically # so it's clear when there's a problem. import opc, time, math numLEDs = 512 client = opc.Client('localhost:7890') t = 0 while True: t += 0.4 brightness = int(min(1, 1.25 + math.sin(t)) * 255) frame = [ (brightness, brightness, brightness) ] * numLEDs client.put_pixels(frame) time.sleep(0.05)
<commit_before><commit_msg>Add a really simple burn-in test helper<commit_after>#!/usr/bin/env python # Burn-in test: Keep LEDs at full brightness most of the time, but dim periodically # so it's clear when there's a problem. import opc, time, math numLEDs = 512 client = opc.Client('localhost:7890') t = 0 while True: t += 0.4 brightness = int(min(1, 1.25 + math.sin(t)) * 255) frame = [ (brightness, brightness, brightness) ] * numLEDs client.put_pixels(frame) time.sleep(0.05)
c06cffd3de56e5da4cf5d19bcb7f6e1982fc7621
generate_historic_wnaffect.py
generate_historic_wnaffect.py
#!/usr/bin/env python # -*- coding: utf-8 -*- from time import sleep import json import codecs from emotools.lexicon import get_spelling_variants import pandas as pd if __name__ == '__main__': # read file and convert byte strings to unicode with codecs.open('SWN-NL-voor-Janneke.txt', 'rb', 'latin1') as f: lines = f.readlines() count = 0 spelling_vars = {} for line in lines: count += 1 entry = line.split(';') # lexicon service needs lower case input term = entry[0].lower() term = term.replace('"', '') while True: try: sleep(1) words = get_spelling_variants(term, [], 1600, 1830) words = list(set(words)) break except: print 'Retry!' sleep(5) pass if len(words) > 0: spelling_vars[term] = words if count % 1000 == 0: print count print term, words # write spelling variants to file with codecs.open('swn-nl_spelling_variants.json', 'w', 'utf8') as f: json.dump(spelling_vars, f, sort_keys=True, ensure_ascii=False, indent=2)
Add script to generate a historic version of wna affect
Add script to generate a historic version of wna affect
Python
apache-2.0
NLeSC/embodied-emotions-scripts,NLeSC/embodied-emotions-scripts
Add script to generate a historic version of wna affect
#!/usr/bin/env python # -*- coding: utf-8 -*- from time import sleep import json import codecs from emotools.lexicon import get_spelling_variants import pandas as pd if __name__ == '__main__': # read file and convert byte strings to unicode with codecs.open('SWN-NL-voor-Janneke.txt', 'rb', 'latin1') as f: lines = f.readlines() count = 0 spelling_vars = {} for line in lines: count += 1 entry = line.split(';') # lexicon service needs lower case input term = entry[0].lower() term = term.replace('"', '') while True: try: sleep(1) words = get_spelling_variants(term, [], 1600, 1830) words = list(set(words)) break except: print 'Retry!' sleep(5) pass if len(words) > 0: spelling_vars[term] = words if count % 1000 == 0: print count print term, words # write spelling variants to file with codecs.open('swn-nl_spelling_variants.json', 'w', 'utf8') as f: json.dump(spelling_vars, f, sort_keys=True, ensure_ascii=False, indent=2)
<commit_before><commit_msg>Add script to generate a historic version of wna affect<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- from time import sleep import json import codecs from emotools.lexicon import get_spelling_variants import pandas as pd if __name__ == '__main__': # read file and convert byte strings to unicode with codecs.open('SWN-NL-voor-Janneke.txt', 'rb', 'latin1') as f: lines = f.readlines() count = 0 spelling_vars = {} for line in lines: count += 1 entry = line.split(';') # lexicon service needs lower case input term = entry[0].lower() term = term.replace('"', '') while True: try: sleep(1) words = get_spelling_variants(term, [], 1600, 1830) words = list(set(words)) break except: print 'Retry!' sleep(5) pass if len(words) > 0: spelling_vars[term] = words if count % 1000 == 0: print count print term, words # write spelling variants to file with codecs.open('swn-nl_spelling_variants.json', 'w', 'utf8') as f: json.dump(spelling_vars, f, sort_keys=True, ensure_ascii=False, indent=2)
Add script to generate a historic version of wna affect#!/usr/bin/env python # -*- coding: utf-8 -*- from time import sleep import json import codecs from emotools.lexicon import get_spelling_variants import pandas as pd if __name__ == '__main__': # read file and convert byte strings to unicode with codecs.open('SWN-NL-voor-Janneke.txt', 'rb', 'latin1') as f: lines = f.readlines() count = 0 spelling_vars = {} for line in lines: count += 1 entry = line.split(';') # lexicon service needs lower case input term = entry[0].lower() term = term.replace('"', '') while True: try: sleep(1) words = get_spelling_variants(term, [], 1600, 1830) words = list(set(words)) break except: print 'Retry!' sleep(5) pass if len(words) > 0: spelling_vars[term] = words if count % 1000 == 0: print count print term, words # write spelling variants to file with codecs.open('swn-nl_spelling_variants.json', 'w', 'utf8') as f: json.dump(spelling_vars, f, sort_keys=True, ensure_ascii=False, indent=2)
<commit_before><commit_msg>Add script to generate a historic version of wna affect<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- from time import sleep import json import codecs from emotools.lexicon import get_spelling_variants import pandas as pd if __name__ == '__main__': # read file and convert byte strings to unicode with codecs.open('SWN-NL-voor-Janneke.txt', 'rb', 'latin1') as f: lines = f.readlines() count = 0 spelling_vars = {} for line in lines: count += 1 entry = line.split(';') # lexicon service needs lower case input term = entry[0].lower() term = term.replace('"', '') while True: try: sleep(1) words = get_spelling_variants(term, [], 1600, 1830) words = list(set(words)) break except: print 'Retry!' sleep(5) pass if len(words) > 0: spelling_vars[term] = words if count % 1000 == 0: print count print term, words # write spelling variants to file with codecs.open('swn-nl_spelling_variants.json', 'w', 'utf8') as f: json.dump(spelling_vars, f, sort_keys=True, ensure_ascii=False, indent=2)
75073c57d247a1424250ae98b870347af2725d1f
test/mbed_gt_cmake_handlers.py
test/mbed_gt_cmake_handlers.py
#!/usr/bin/env python """ mbed SDK Copyright (c) 2011-2015 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest from mbed_greentea import cmake_handlers class CmakeHandlers(unittest.TestCase): """ Basic true asserts to see that testing is executed """ def setUp(self): self.ctesttestfile = """# CMake generated Testfile for # Source directory: c:/Work2/mbed-client/test # Build directory: c:/Work2/mbed-client/build/frdm-k64f-gcc/test # # This file includes the relevant testing commands required for # testing this directory and lists subdirectories to be tested as well. add_test(mbed-client-test-mbedclient-smokeTest "mbed-client-test-mbedclient-smokeTest") add_test(mbed-client-test-helloworld-mbedclient "mbed-client-test-helloworld-mbedclient") """ def tearDown(self): pass def test_example(self): self.assertEqual(True, True) self.assertNotEqual(True, False) def test_parse_ctesttestfile_line(self): link_target = '/dir/to/target' binary_type = '.bin' result = {} no_skipped_lines = 0 for line in self.ctesttestfile.splitlines(): line_parse = cmake_handlers.parse_ctesttestfile_line(link_target, binary_type, line, verbose=False) if line_parse: test_case, test_case_path = line_parse result[test_case] = test_case_path else: no_skipped_lines += 1 self.assertIn('mbed-client-test-mbedclient-smokeTest', result) self.assertIn('mbed-client-test-helloworld-mbedclient', result) self.assertEqual(len(result), 2) # We parse two entries self.assertEqual(no_skipped_lines, 6) # We skip six lines in this file if __name__ == '__main__': unittest.main()
Add unit tests to function parsing CTestTestfile.cmake
Add unit tests to function parsing CTestTestfile.cmake
Python
apache-2.0
ARMmbed/greentea
Add unit tests to function parsing CTestTestfile.cmake
#!/usr/bin/env python """ mbed SDK Copyright (c) 2011-2015 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest from mbed_greentea import cmake_handlers class CmakeHandlers(unittest.TestCase): """ Basic true asserts to see that testing is executed """ def setUp(self): self.ctesttestfile = """# CMake generated Testfile for # Source directory: c:/Work2/mbed-client/test # Build directory: c:/Work2/mbed-client/build/frdm-k64f-gcc/test # # This file includes the relevant testing commands required for # testing this directory and lists subdirectories to be tested as well. add_test(mbed-client-test-mbedclient-smokeTest "mbed-client-test-mbedclient-smokeTest") add_test(mbed-client-test-helloworld-mbedclient "mbed-client-test-helloworld-mbedclient") """ def tearDown(self): pass def test_example(self): self.assertEqual(True, True) self.assertNotEqual(True, False) def test_parse_ctesttestfile_line(self): link_target = '/dir/to/target' binary_type = '.bin' result = {} no_skipped_lines = 0 for line in self.ctesttestfile.splitlines(): line_parse = cmake_handlers.parse_ctesttestfile_line(link_target, binary_type, line, verbose=False) if line_parse: test_case, test_case_path = line_parse result[test_case] = test_case_path else: no_skipped_lines += 1 self.assertIn('mbed-client-test-mbedclient-smokeTest', result) self.assertIn('mbed-client-test-helloworld-mbedclient', result) self.assertEqual(len(result), 2) # We parse two entries self.assertEqual(no_skipped_lines, 6) # We skip six lines in this file if __name__ == '__main__': unittest.main()
<commit_before><commit_msg>Add unit tests to function parsing CTestTestfile.cmake<commit_after>
#!/usr/bin/env python """ mbed SDK Copyright (c) 2011-2015 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest from mbed_greentea import cmake_handlers class CmakeHandlers(unittest.TestCase): """ Basic true asserts to see that testing is executed """ def setUp(self): self.ctesttestfile = """# CMake generated Testfile for # Source directory: c:/Work2/mbed-client/test # Build directory: c:/Work2/mbed-client/build/frdm-k64f-gcc/test # # This file includes the relevant testing commands required for # testing this directory and lists subdirectories to be tested as well. add_test(mbed-client-test-mbedclient-smokeTest "mbed-client-test-mbedclient-smokeTest") add_test(mbed-client-test-helloworld-mbedclient "mbed-client-test-helloworld-mbedclient") """ def tearDown(self): pass def test_example(self): self.assertEqual(True, True) self.assertNotEqual(True, False) def test_parse_ctesttestfile_line(self): link_target = '/dir/to/target' binary_type = '.bin' result = {} no_skipped_lines = 0 for line in self.ctesttestfile.splitlines(): line_parse = cmake_handlers.parse_ctesttestfile_line(link_target, binary_type, line, verbose=False) if line_parse: test_case, test_case_path = line_parse result[test_case] = test_case_path else: no_skipped_lines += 1 self.assertIn('mbed-client-test-mbedclient-smokeTest', result) self.assertIn('mbed-client-test-helloworld-mbedclient', result) self.assertEqual(len(result), 2) # We parse two entries self.assertEqual(no_skipped_lines, 6) # We skip six lines in this file if __name__ == '__main__': unittest.main()
Add unit tests to function parsing CTestTestfile.cmake#!/usr/bin/env python """ mbed SDK Copyright (c) 2011-2015 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest from mbed_greentea import cmake_handlers class CmakeHandlers(unittest.TestCase): """ Basic true asserts to see that testing is executed """ def setUp(self): self.ctesttestfile = """# CMake generated Testfile for # Source directory: c:/Work2/mbed-client/test # Build directory: c:/Work2/mbed-client/build/frdm-k64f-gcc/test # # This file includes the relevant testing commands required for # testing this directory and lists subdirectories to be tested as well. add_test(mbed-client-test-mbedclient-smokeTest "mbed-client-test-mbedclient-smokeTest") add_test(mbed-client-test-helloworld-mbedclient "mbed-client-test-helloworld-mbedclient") """ def tearDown(self): pass def test_example(self): self.assertEqual(True, True) self.assertNotEqual(True, False) def test_parse_ctesttestfile_line(self): link_target = '/dir/to/target' binary_type = '.bin' result = {} no_skipped_lines = 0 for line in self.ctesttestfile.splitlines(): line_parse = cmake_handlers.parse_ctesttestfile_line(link_target, binary_type, line, verbose=False) if line_parse: test_case, test_case_path = line_parse result[test_case] = test_case_path else: no_skipped_lines += 1 self.assertIn('mbed-client-test-mbedclient-smokeTest', result) self.assertIn('mbed-client-test-helloworld-mbedclient', result) self.assertEqual(len(result), 2) # We parse two entries self.assertEqual(no_skipped_lines, 6) # We skip six lines in this file if __name__ == '__main__': unittest.main()
<commit_before><commit_msg>Add unit tests to function parsing CTestTestfile.cmake<commit_after>#!/usr/bin/env python """ mbed SDK Copyright (c) 2011-2015 ARM Limited Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. """ import unittest from mbed_greentea import cmake_handlers class CmakeHandlers(unittest.TestCase): """ Basic true asserts to see that testing is executed """ def setUp(self): self.ctesttestfile = """# CMake generated Testfile for # Source directory: c:/Work2/mbed-client/test # Build directory: c:/Work2/mbed-client/build/frdm-k64f-gcc/test # # This file includes the relevant testing commands required for # testing this directory and lists subdirectories to be tested as well. add_test(mbed-client-test-mbedclient-smokeTest "mbed-client-test-mbedclient-smokeTest") add_test(mbed-client-test-helloworld-mbedclient "mbed-client-test-helloworld-mbedclient") """ def tearDown(self): pass def test_example(self): self.assertEqual(True, True) self.assertNotEqual(True, False) def test_parse_ctesttestfile_line(self): link_target = '/dir/to/target' binary_type = '.bin' result = {} no_skipped_lines = 0 for line in self.ctesttestfile.splitlines(): line_parse = cmake_handlers.parse_ctesttestfile_line(link_target, binary_type, line, verbose=False) if line_parse: test_case, test_case_path = line_parse result[test_case] = test_case_path else: no_skipped_lines += 1 self.assertIn('mbed-client-test-mbedclient-smokeTest', result) self.assertIn('mbed-client-test-helloworld-mbedclient', result) self.assertEqual(len(result), 2) # We parse two entries self.assertEqual(no_skipped_lines, 6) # We skip six lines in this file if __name__ == '__main__': unittest.main()
2697859df141b2dc6c34df5c0ac3ec87741d6b84
examples/tour_examples/bootstrap_google_tour.py
examples/tour_examples/bootstrap_google_tour.py
from seleniumbase import BaseCase class MyTourClass(BaseCase): def test_google_tour(self): self.open('https://google.com') self.wait_for_element('input[title="Search"]') self.create_bootstrap_tour() # OR self.create_tour(theme="bootstrap") self.add_tour_step( "Click to begin the Google Tour!", title="SeleniumBase Tours") self.add_tour_step( "Type in your search query here.", 'input[title="Search"]') self.add_tour_step( "Then click here to search!", 'input[value="Google Search"]', alignment="bottom") self.add_tour_step( "Or click here to see the top result.", '''[value="I'm Feeling Lucky"]''', alignment="bottom") self.add_tour_step("Here's an example Google search:") self.play_tour() self.highlight_update_text('input[title="Search"]', "GitHub") self.highlight_click('input[value="Google Search"]') self.create_bootstrap_tour() # OR self.create_tour(theme="bootstrap") self.add_tour_step( "Search results appear here!", title="(5-second autoplay on)") self.add_tour_step("Let's take another tour:") self.play_tour(interval=5) # tour automatically continues after 5 sec self.open("https://www.google.com/maps/@42.3598616,-71.0912631,15z") self.wait_for_element('input#searchboxinput') self.create_bootstrap_tour() # OR self.create_tour(theme="bootstrap") self.add_tour_step("Welcome to Google Maps!") self.add_tour_step( "Type in a location here.", "#searchboxinput", title="Search Box") self.add_tour_step( "Then click here to show it on the map.", "#searchbox-searchbutton", alignment="bottom") self.add_tour_step( "Or click here to get driving directions.", "#searchbox-directions", alignment="bottom") self.add_tour_step( "Use this button to switch to Satellite view.", "div.widget-minimap", alignment="right") self.add_tour_step( "Click here to zoom in.", "#widget-zoom-in", alignment="left") self.add_tour_step( "Or click here to zoom out.", "#widget-zoom-out", alignment="left") self.add_tour_step( "Use the Menu button to see more options.", ".searchbox-hamburger-container", alignment="right") self.add_tour_step( "Or click here to see more Google apps.", '[title="Google apps"]', alignment="left") self.add_tour_step( "Thanks for trying out SeleniumBase Tours!", title="End of Guided Tour") self.play_tour()
Add Bootstrap Google Tour example
Add Bootstrap Google Tour example
Python
mit
mdmintz/SeleniumBase,mdmintz/seleniumspot,mdmintz/SeleniumBase,mdmintz/seleniumspot,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase,seleniumbase/SeleniumBase,seleniumbase/SeleniumBase,mdmintz/SeleniumBase
Add Bootstrap Google Tour example
from seleniumbase import BaseCase class MyTourClass(BaseCase): def test_google_tour(self): self.open('https://google.com') self.wait_for_element('input[title="Search"]') self.create_bootstrap_tour() # OR self.create_tour(theme="bootstrap") self.add_tour_step( "Click to begin the Google Tour!", title="SeleniumBase Tours") self.add_tour_step( "Type in your search query here.", 'input[title="Search"]') self.add_tour_step( "Then click here to search!", 'input[value="Google Search"]', alignment="bottom") self.add_tour_step( "Or click here to see the top result.", '''[value="I'm Feeling Lucky"]''', alignment="bottom") self.add_tour_step("Here's an example Google search:") self.play_tour() self.highlight_update_text('input[title="Search"]', "GitHub") self.highlight_click('input[value="Google Search"]') self.create_bootstrap_tour() # OR self.create_tour(theme="bootstrap") self.add_tour_step( "Search results appear here!", title="(5-second autoplay on)") self.add_tour_step("Let's take another tour:") self.play_tour(interval=5) # tour automatically continues after 5 sec self.open("https://www.google.com/maps/@42.3598616,-71.0912631,15z") self.wait_for_element('input#searchboxinput') self.create_bootstrap_tour() # OR self.create_tour(theme="bootstrap") self.add_tour_step("Welcome to Google Maps!") self.add_tour_step( "Type in a location here.", "#searchboxinput", title="Search Box") self.add_tour_step( "Then click here to show it on the map.", "#searchbox-searchbutton", alignment="bottom") self.add_tour_step( "Or click here to get driving directions.", "#searchbox-directions", alignment="bottom") self.add_tour_step( "Use this button to switch to Satellite view.", "div.widget-minimap", alignment="right") self.add_tour_step( "Click here to zoom in.", "#widget-zoom-in", alignment="left") self.add_tour_step( "Or click here to zoom out.", "#widget-zoom-out", alignment="left") self.add_tour_step( "Use the Menu button to see more options.", ".searchbox-hamburger-container", alignment="right") self.add_tour_step( "Or click here to see more Google apps.", '[title="Google apps"]', alignment="left") self.add_tour_step( "Thanks for trying out SeleniumBase Tours!", title="End of Guided Tour") self.play_tour()
<commit_before><commit_msg>Add Bootstrap Google Tour example<commit_after>
from seleniumbase import BaseCase class MyTourClass(BaseCase): def test_google_tour(self): self.open('https://google.com') self.wait_for_element('input[title="Search"]') self.create_bootstrap_tour() # OR self.create_tour(theme="bootstrap") self.add_tour_step( "Click to begin the Google Tour!", title="SeleniumBase Tours") self.add_tour_step( "Type in your search query here.", 'input[title="Search"]') self.add_tour_step( "Then click here to search!", 'input[value="Google Search"]', alignment="bottom") self.add_tour_step( "Or click here to see the top result.", '''[value="I'm Feeling Lucky"]''', alignment="bottom") self.add_tour_step("Here's an example Google search:") self.play_tour() self.highlight_update_text('input[title="Search"]', "GitHub") self.highlight_click('input[value="Google Search"]') self.create_bootstrap_tour() # OR self.create_tour(theme="bootstrap") self.add_tour_step( "Search results appear here!", title="(5-second autoplay on)") self.add_tour_step("Let's take another tour:") self.play_tour(interval=5) # tour automatically continues after 5 sec self.open("https://www.google.com/maps/@42.3598616,-71.0912631,15z") self.wait_for_element('input#searchboxinput') self.create_bootstrap_tour() # OR self.create_tour(theme="bootstrap") self.add_tour_step("Welcome to Google Maps!") self.add_tour_step( "Type in a location here.", "#searchboxinput", title="Search Box") self.add_tour_step( "Then click here to show it on the map.", "#searchbox-searchbutton", alignment="bottom") self.add_tour_step( "Or click here to get driving directions.", "#searchbox-directions", alignment="bottom") self.add_tour_step( "Use this button to switch to Satellite view.", "div.widget-minimap", alignment="right") self.add_tour_step( "Click here to zoom in.", "#widget-zoom-in", alignment="left") self.add_tour_step( "Or click here to zoom out.", "#widget-zoom-out", alignment="left") self.add_tour_step( "Use the Menu button to see more options.", ".searchbox-hamburger-container", alignment="right") self.add_tour_step( "Or click here to see more Google apps.", '[title="Google apps"]', alignment="left") self.add_tour_step( "Thanks for trying out SeleniumBase Tours!", title="End of Guided Tour") self.play_tour()
Add Bootstrap Google Tour examplefrom seleniumbase import BaseCase class MyTourClass(BaseCase): def test_google_tour(self): self.open('https://google.com') self.wait_for_element('input[title="Search"]') self.create_bootstrap_tour() # OR self.create_tour(theme="bootstrap") self.add_tour_step( "Click to begin the Google Tour!", title="SeleniumBase Tours") self.add_tour_step( "Type in your search query here.", 'input[title="Search"]') self.add_tour_step( "Then click here to search!", 'input[value="Google Search"]', alignment="bottom") self.add_tour_step( "Or click here to see the top result.", '''[value="I'm Feeling Lucky"]''', alignment="bottom") self.add_tour_step("Here's an example Google search:") self.play_tour() self.highlight_update_text('input[title="Search"]', "GitHub") self.highlight_click('input[value="Google Search"]') self.create_bootstrap_tour() # OR self.create_tour(theme="bootstrap") self.add_tour_step( "Search results appear here!", title="(5-second autoplay on)") self.add_tour_step("Let's take another tour:") self.play_tour(interval=5) # tour automatically continues after 5 sec self.open("https://www.google.com/maps/@42.3598616,-71.0912631,15z") self.wait_for_element('input#searchboxinput') self.create_bootstrap_tour() # OR self.create_tour(theme="bootstrap") self.add_tour_step("Welcome to Google Maps!") self.add_tour_step( "Type in a location here.", "#searchboxinput", title="Search Box") self.add_tour_step( "Then click here to show it on the map.", "#searchbox-searchbutton", alignment="bottom") self.add_tour_step( "Or click here to get driving directions.", "#searchbox-directions", alignment="bottom") self.add_tour_step( "Use this button to switch to Satellite view.", "div.widget-minimap", alignment="right") self.add_tour_step( "Click here to zoom in.", "#widget-zoom-in", alignment="left") self.add_tour_step( "Or click here to zoom out.", "#widget-zoom-out", alignment="left") self.add_tour_step( "Use the Menu button to see more options.", ".searchbox-hamburger-container", alignment="right") self.add_tour_step( "Or click here to see more Google apps.", '[title="Google apps"]', alignment="left") self.add_tour_step( "Thanks for trying out SeleniumBase Tours!", title="End of Guided Tour") self.play_tour()
<commit_before><commit_msg>Add Bootstrap Google Tour example<commit_after>from seleniumbase import BaseCase class MyTourClass(BaseCase): def test_google_tour(self): self.open('https://google.com') self.wait_for_element('input[title="Search"]') self.create_bootstrap_tour() # OR self.create_tour(theme="bootstrap") self.add_tour_step( "Click to begin the Google Tour!", title="SeleniumBase Tours") self.add_tour_step( "Type in your search query here.", 'input[title="Search"]') self.add_tour_step( "Then click here to search!", 'input[value="Google Search"]', alignment="bottom") self.add_tour_step( "Or click here to see the top result.", '''[value="I'm Feeling Lucky"]''', alignment="bottom") self.add_tour_step("Here's an example Google search:") self.play_tour() self.highlight_update_text('input[title="Search"]', "GitHub") self.highlight_click('input[value="Google Search"]') self.create_bootstrap_tour() # OR self.create_tour(theme="bootstrap") self.add_tour_step( "Search results appear here!", title="(5-second autoplay on)") self.add_tour_step("Let's take another tour:") self.play_tour(interval=5) # tour automatically continues after 5 sec self.open("https://www.google.com/maps/@42.3598616,-71.0912631,15z") self.wait_for_element('input#searchboxinput') self.create_bootstrap_tour() # OR self.create_tour(theme="bootstrap") self.add_tour_step("Welcome to Google Maps!") self.add_tour_step( "Type in a location here.", "#searchboxinput", title="Search Box") self.add_tour_step( "Then click here to show it on the map.", "#searchbox-searchbutton", alignment="bottom") self.add_tour_step( "Or click here to get driving directions.", "#searchbox-directions", alignment="bottom") self.add_tour_step( "Use this button to switch to Satellite view.", "div.widget-minimap", alignment="right") self.add_tour_step( "Click here to zoom in.", "#widget-zoom-in", alignment="left") self.add_tour_step( "Or click here to zoom out.", "#widget-zoom-out", alignment="left") self.add_tour_step( "Use the Menu button to see more options.", ".searchbox-hamburger-container", alignment="right") self.add_tour_step( "Or click here to see more Google apps.", '[title="Google apps"]', alignment="left") self.add_tour_step( "Thanks for trying out SeleniumBase Tours!", title="End of Guided Tour") self.play_tour()
69dd9057c1009d9c047f45908080a55ac3223e4c
examples/svm/plot_svm_regression.py
examples/svm/plot_svm_regression.py
"""Non linear regression with Support Vector Regression (SVR) using RBF kernel """ ############################################################################### # Generate sample data import numpy as np X = np.sort(5*np.random.rand(40, 1), axis=0) y = np.sin(X).ravel() ############################################################################### # Add noise to targets y[::5] += 3*(0.5 - np.random.rand(8)) ############################################################################### # Fit regression model from scikits.learn.svm import SVR svr_rbf = SVR(kernel='rbf', C=1e4, gamma=0.1) svr_lin = SVR(kernel='linear', C=1e4) svr_poly = SVR(kernel='poly', C=1e4, degree=2) y_rbf = svr_rbf.fit(X, y).predict(X) y_lin = svr_lin.fit(X, y).predict(X) y_poly = svr_poly.fit(X, y).predict(X) ############################################################################### # look at the results pl.scatter(X, y, c='k', label='data') pl.hold('on') pl.plot(X, y_rbf, c='g', label='RBF model') pl.plot(X, y_lin, c='r', label='Linear model') pl.plot(X, y_poly, c='b', label='Polynomial model') pl.xlabel('data') pl.ylabel('target') pl.title('Support Vector Regression') pl.legend() pl.show()
"""Non linear regression with Support Vector Regression (SVR) using RBF kernel """ ############################################################################### # Generate sample data import numpy as np X = np.sort(5*np.random.rand(40, 1), axis=0) y = np.sin(X).ravel() ############################################################################### # Add noise to targets y[::5] += 3*(0.5 - np.random.rand(8)) ############################################################################### # Fit regression model from scikits.learn.svm import SVR svr_rbf = SVR(kernel='rbf', C=1e4, gamma=0.1) svr_lin = SVR(kernel='linear', C=1e4) svr_poly = SVR(kernel='poly', C=1e4, degree=2) y_rbf = svr_rbf.fit(X, y).predict(X) y_lin = svr_lin.fit(X, y).predict(X) y_poly = svr_poly.fit(X, y).predict(X) ############################################################################### # look at the results import pylab as pl pl.scatter(X, y, c='k', label='data') pl.hold('on') pl.plot(X, y_rbf, c='g', label='RBF model') pl.plot(X, y_lin, c='r', label='Linear model') pl.plot(X, y_poly, c='b', label='Polynomial model') pl.xlabel('data') pl.ylabel('target') pl.title('Support Vector Regression') pl.legend() pl.show()
Fix forgotten import in example
BUG: Fix forgotten import in example
Python
bsd-3-clause
NunoEdgarGub1/scikit-learn,f3r/scikit-learn,mehdidc/scikit-learn,icdishb/scikit-learn,xiaoxiamii/scikit-learn,MatthieuBizien/scikit-learn,Vimos/scikit-learn,walterreade/scikit-learn,lesteve/scikit-learn,hugobowne/scikit-learn,cauchycui/scikit-learn,joshloyal/scikit-learn,shangwuhencc/scikit-learn,mattilyra/scikit-learn,vermouthmjl/scikit-learn,mlyundin/scikit-learn,mugizico/scikit-learn,mhue/scikit-learn,fabioticconi/scikit-learn,chrsrds/scikit-learn,3manuek/scikit-learn,spallavolu/scikit-learn,CforED/Machine-Learning,OshynSong/scikit-learn,adamgreenhall/scikit-learn,henrykironde/scikit-learn,shyamalschandra/scikit-learn,billy-inn/scikit-learn,samuel1208/scikit-learn,HolgerPeters/scikit-learn,justincassidy/scikit-learn,rsivapr/scikit-learn,yonglehou/scikit-learn,rahuldhote/scikit-learn,fzalkow/scikit-learn,zuku1985/scikit-learn,smartscheduling/scikit-learn-categorical-tree,jpautom/scikit-learn,gclenaghan/scikit-learn,liangz0707/scikit-learn,ky822/scikit-learn,shikhardb/scikit-learn,quheng/scikit-learn,chrsrds/scikit-learn,dhruv13J/scikit-learn,AlexRobson/scikit-learn,nvoron23/scikit-learn,ky822/scikit-learn,mayblue9/scikit-learn,tomlof/scikit-learn,jmschrei/scikit-learn,alexeyum/scikit-learn,shikhardb/scikit-learn,hsuantien/scikit-learn,sumspr/scikit-learn,lazywei/scikit-learn,yunfeilu/scikit-learn,lesteve/scikit-learn,pkruskal/scikit-learn,beepee14/scikit-learn,ashhher3/scikit-learn,toastedcornflakes/scikit-learn,glouppe/scikit-learn,466152112/scikit-learn,gclenaghan/scikit-learn,rahul-c1/scikit-learn,nikitasingh981/scikit-learn,ivannz/scikit-learn,PrashntS/scikit-learn,ishanic/scikit-learn,mojoboss/scikit-learn,jaidevd/scikit-learn,nmayorov/scikit-learn,hugobowne/scikit-learn,mikebenfield/scikit-learn,pianomania/scikit-learn,zihua/scikit-learn,ahoyosid/scikit-learn,joshloyal/scikit-learn,Aasmi/scikit-learn,wlamond/scikit-learn,cybernet14/scikit-learn,sarahgrogan/scikit-learn,shangwuhencc/scikit-learn,PatrickChrist/scikit-learn,AIML/scikit-learn,aewhatley/scikit-learn,lenovor/scikit-learn,xavierwu/scikit-learn,terkkila/scikit-learn,henrykironde/scikit-learn,mjudsp/Tsallis,zaxtax/scikit-learn,RPGOne/scikit-learn,macks22/scikit-learn,procoder317/scikit-learn,PatrickChrist/scikit-learn,LiaoPan/scikit-learn,mehdidc/scikit-learn,plissonf/scikit-learn,deepesch/scikit-learn,nomadcube/scikit-learn,r-mart/scikit-learn,hainm/scikit-learn,maheshakya/scikit-learn,DonBeo/scikit-learn,LohithBlaze/scikit-learn,kjung/scikit-learn,xiaoxiamii/scikit-learn,zuku1985/scikit-learn,glennq/scikit-learn,spallavolu/scikit-learn,saiwing-yeung/scikit-learn,Aasmi/scikit-learn,wzbozon/scikit-learn,glennq/scikit-learn,simon-pepin/scikit-learn,etkirsch/scikit-learn,equialgo/scikit-learn,idlead/scikit-learn,arabenjamin/scikit-learn,Achuth17/scikit-learn,yunfeilu/scikit-learn,anntzer/scikit-learn,arjoly/scikit-learn,massmutual/scikit-learn,kagayakidan/scikit-learn,samuel1208/scikit-learn,ZENGXH/scikit-learn,pythonvietnam/scikit-learn,joernhees/scikit-learn,bigdataelephants/scikit-learn,jaidevd/scikit-learn,Titan-C/scikit-learn,mhue/scikit-learn,quheng/scikit-learn,heli522/scikit-learn,spallavolu/scikit-learn,dsullivan7/scikit-learn,untom/scikit-learn,pratapvardhan/scikit-learn,ahoyosid/scikit-learn,florian-f/sklearn,dsquareindia/scikit-learn,mxjl620/scikit-learn,roxyboy/scikit-learn,fyffyt/scikit-learn,abimannans/scikit-learn,shenzebang/scikit-learn,MartinDelzant/scikit-learn,moutai/scikit-learn,tmhm/scikit-learn,maheshakya/scikit-learn,Akshay0724/scikit-learn,LohithBlaze/scikit-learn,LiaoPan/scikit-learn,mrshu/scikit-learn,justincassidy/scikit-learn,alexsavio/scikit-learn,rsivapr/scikit-learn,f3r/scikit-learn,jmetzen/scikit-learn,cybernet14/scikit-learn,ngoix/OCRF,Srisai85/scikit-learn,treycausey/scikit-learn,bnaul/scikit-learn,hainm/scikit-learn,Windy-Ground/scikit-learn,RPGOne/scikit-learn,cl4rke/scikit-learn,bikong2/scikit-learn,cwu2011/scikit-learn,jseabold/scikit-learn,xuewei4d/scikit-learn,mehdidc/scikit-learn,akionakamura/scikit-learn,CforED/Machine-Learning,liangz0707/scikit-learn,phdowling/scikit-learn,pv/scikit-learn,hrjn/scikit-learn,abimannans/scikit-learn,anurag313/scikit-learn,andrewnc/scikit-learn,bikong2/scikit-learn,PrashntS/scikit-learn,voxlol/scikit-learn,tawsifkhan/scikit-learn,akionakamura/scikit-learn,Garrett-R/scikit-learn,jzt5132/scikit-learn,vibhorag/scikit-learn,wazeerzulfikar/scikit-learn,ephes/scikit-learn,rajat1994/scikit-learn,lin-credible/scikit-learn,thilbern/scikit-learn,victorbergelin/scikit-learn,MohammedWasim/scikit-learn,PrashntS/scikit-learn,jayflo/scikit-learn,sanketloke/scikit-learn,espg/scikit-learn,aflaxman/scikit-learn,shangwuhencc/scikit-learn,cauchycui/scikit-learn,meduz/scikit-learn,sumspr/scikit-learn,ningchi/scikit-learn,sinhrks/scikit-learn,shusenl/scikit-learn,UNR-AERIAL/scikit-learn,0asa/scikit-learn,poryfly/scikit-learn,xubenben/scikit-learn,xavierwu/scikit-learn,Sentient07/scikit-learn,Titan-C/scikit-learn,vshtanko/scikit-learn,RayMick/scikit-learn,cdegroc/scikit-learn,nikitasingh981/scikit-learn,mblondel/scikit-learn,plissonf/scikit-learn,ningchi/scikit-learn,rishikksh20/scikit-learn,mjudsp/Tsallis,dingocuster/scikit-learn,imaculate/scikit-learn,costypetrisor/scikit-learn,ycaihua/scikit-learn,fbagirov/scikit-learn,aetilley/scikit-learn,heli522/scikit-learn,poryfly/scikit-learn,clemkoa/scikit-learn,robin-lai/scikit-learn,LohithBlaze/scikit-learn,giorgiop/scikit-learn,arjoly/scikit-learn,MartinSavc/scikit-learn,trungnt13/scikit-learn,Nyker510/scikit-learn,ldirer/scikit-learn,costypetrisor/scikit-learn,pypot/scikit-learn,terkkila/scikit-learn,rohanp/scikit-learn,toastedcornflakes/scikit-learn,mwv/scikit-learn,yunfeilu/scikit-learn,zaxtax/scikit-learn,ashhher3/scikit-learn,maheshakya/scikit-learn,plissonf/scikit-learn,aflaxman/scikit-learn,aabadie/scikit-learn,NelisVerhoef/scikit-learn,beepee14/scikit-learn,kmike/scikit-learn,jakirkham/scikit-learn,tawsifkhan/scikit-learn,mugizico/scikit-learn,mattgiguere/scikit-learn,pkruskal/scikit-learn,thientu/scikit-learn,ycaihua/scikit-learn,Barmaley-exe/scikit-learn,fbagirov/scikit-learn,MechCoder/scikit-learn,NunoEdgarGub1/scikit-learn,frank-tancf/scikit-learn,cl4rke/scikit-learn,murali-munna/scikit-learn,anirudhjayaraman/scikit-learn,ivannz/scikit-learn,nelson-liu/scikit-learn,jjx02230808/project0223,larsmans/scikit-learn,sarahgrogan/scikit-learn,vinayak-mehta/scikit-learn,glemaitre/scikit-learn,RachitKansal/scikit-learn,UNR-AERIAL/scikit-learn,simon-pepin/scikit-learn,petosegan/scikit-learn,dsullivan7/scikit-learn,fredhusser/scikit-learn,khkaminska/scikit-learn,Adai0808/scikit-learn,yonglehou/scikit-learn,hsuantien/scikit-learn,cauchycui/scikit-learn,shahankhatch/scikit-learn,TomDLT/scikit-learn,ChanderG/scikit-learn,khkaminska/scikit-learn,nhejazi/scikit-learn,russel1237/scikit-learn,schets/scikit-learn,kmike/scikit-learn,RachitKansal/scikit-learn,kjung/scikit-learn,rrohan/scikit-learn,roxyboy/scikit-learn,schets/scikit-learn,nomadcube/scikit-learn,untom/scikit-learn,xuewei4d/scikit-learn,0asa/scikit-learn,IssamLaradji/scikit-learn,victorbergelin/scikit-learn,idlead/scikit-learn,vortex-ape/scikit-learn,andrewnc/scikit-learn,scikit-learn/scikit-learn,alvarofierroclavero/scikit-learn,procoder317/scikit-learn,Lawrence-Liu/scikit-learn,vigilv/scikit-learn,qifeigit/scikit-learn,nvoron23/scikit-learn,lucidfrontier45/scikit-learn,q1ang/scikit-learn,btabibian/scikit-learn,zorojean/scikit-learn,abhishekgahlot/scikit-learn,0x0all/scikit-learn,JPFrancoia/scikit-learn,aetilley/scikit-learn,pypot/scikit-learn,0asa/scikit-learn,icdishb/scikit-learn,pnedunuri/scikit-learn,AlexandreAbraham/scikit-learn,nrhine1/scikit-learn,kmike/scikit-learn,MechCoder/scikit-learn,andrewnc/scikit-learn,mojoboss/scikit-learn,voxlol/scikit-learn,AlexandreAbraham/scikit-learn,ClimbsRocks/scikit-learn,aminert/scikit-learn,lenovor/scikit-learn,gotomypc/scikit-learn,adamgreenhall/scikit-learn,Vimos/scikit-learn,rajat1994/scikit-learn,belltailjp/scikit-learn,devanshdalal/scikit-learn,jakobworldpeace/scikit-learn,aetilley/scikit-learn,abhishekgahlot/scikit-learn,Myasuka/scikit-learn,henrykironde/scikit-learn,murali-munna/scikit-learn,NunoEdgarGub1/scikit-learn,jorik041/scikit-learn,Achuth17/scikit-learn,ivannz/scikit-learn,Adai0808/scikit-learn,ngoix/OCRF,pkruskal/scikit-learn,mehdidc/scikit-learn,RomainBrault/scikit-learn,sinhrks/scikit-learn,jpautom/scikit-learn,bhargav/scikit-learn,massmutual/scikit-learn,ElDeveloper/scikit-learn,iismd17/scikit-learn,imaculate/scikit-learn,mikebenfield/scikit-learn,manashmndl/scikit-learn,simon-pepin/scikit-learn,bthirion/scikit-learn,sgenoud/scikit-learn,olologin/scikit-learn,ngoix/OCRF,glennq/scikit-learn,yyjiang/scikit-learn,jakirkham/scikit-learn,cwu2011/scikit-learn,huzq/scikit-learn,walterreade/scikit-learn,TomDLT/scikit-learn,untom/scikit-learn,xzh86/scikit-learn,etkirsch/scikit-learn,yyjiang/scikit-learn,B3AU/waveTree,kevin-intel/scikit-learn,shahankhatch/scikit-learn,waterponey/scikit-learn,hdmetor/scikit-learn,f3r/scikit-learn,r-mart/scikit-learn,pythonvietnam/scikit-learn,marcocaccin/scikit-learn,PatrickOReilly/scikit-learn,kylerbrown/scikit-learn,ankurankan/scikit-learn,eickenberg/scikit-learn,jmschrei/scikit-learn,akionakamura/scikit-learn,jayflo/scikit-learn,schets/scikit-learn,MechCoder/scikit-learn,glouppe/scikit-learn,icdishb/scikit-learn,Fireblend/scikit-learn,jm-begon/scikit-learn,mblondel/scikit-learn,xwolf12/scikit-learn,bhargav/scikit-learn,wzbozon/scikit-learn,0x0all/scikit-learn,jlegendary/scikit-learn,MatthieuBizien/scikit-learn,jblackburne/scikit-learn,PatrickOReilly/scikit-learn,abhishekgahlot/scikit-learn,depet/scikit-learn,sonnyhu/scikit-learn,mwv/scikit-learn,nikitasingh981/scikit-learn,tawsifkhan/scikit-learn,NelisVerhoef/scikit-learn,hsiaoyi0504/scikit-learn,vinayak-mehta/scikit-learn,UNR-AERIAL/scikit-learn,q1ang/scikit-learn,huobaowangxi/scikit-learn,luo66/scikit-learn,chrsrds/scikit-learn,mwv/scikit-learn,BiaDarkia/scikit-learn,marcocaccin/scikit-learn,luo66/scikit-learn,appapantula/scikit-learn,rsivapr/scikit-learn,OshynSong/scikit-learn,toastedcornflakes/scikit-learn,pompiduskus/scikit-learn,jereze/scikit-learn,depet/scikit-learn,0asa/scikit-learn,tomlof/scikit-learn,ndingwall/scikit-learn,altairpearl/scikit-learn,devanshdalal/scikit-learn,samuel1208/scikit-learn,zaxtax/scikit-learn,jorge2703/scikit-learn,Djabbz/scikit-learn,466152112/scikit-learn,Djabbz/scikit-learn,vermouthmjl/scikit-learn,fabioticconi/scikit-learn,jmetzen/scikit-learn,kevin-intel/scikit-learn,madjelan/scikit-learn,pianomania/scikit-learn,lazywei/scikit-learn,IndraVikas/scikit-learn,ChanderG/scikit-learn,jpautom/scikit-learn,alvarofierroclavero/scikit-learn,belltailjp/scikit-learn,mjgrav2001/scikit-learn,treycausey/scikit-learn,manashmndl/scikit-learn,herilalaina/scikit-learn,liangz0707/scikit-learn,Jimmy-Morzaria/scikit-learn,aminert/scikit-learn,theoryno3/scikit-learn,arahuja/scikit-learn,kmike/scikit-learn,loli/semisupervisedforests,gotomypc/scikit-learn,petosegan/scikit-learn,pythonvietnam/scikit-learn,henridwyer/scikit-learn,shyamalschandra/scikit-learn,hrjn/scikit-learn,sinhrks/scikit-learn,vibhorag/scikit-learn,ankurankan/scikit-learn,idlead/scikit-learn,anurag313/scikit-learn,liyu1990/sklearn,giorgiop/scikit-learn,maheshakya/scikit-learn,poryfly/scikit-learn,OshynSong/scikit-learn,hitszxp/scikit-learn,jereze/scikit-learn,siutanwong/scikit-learn,ycaihua/scikit-learn,xiaoxiamii/scikit-learn,belltailjp/scikit-learn,BiaDarkia/scikit-learn,hlin117/scikit-learn,ZENGXH/scikit-learn,pianomania/scikit-learn,trungnt13/scikit-learn,kaichogami/scikit-learn,bhargav/scikit-learn,jorik041/scikit-learn,henrykironde/scikit-learn,meduz/scikit-learn,equialgo/scikit-learn,florian-f/sklearn,fabioticconi/scikit-learn,andaag/scikit-learn,jayflo/scikit-learn,Myasuka/scikit-learn,larsmans/scikit-learn,adamgreenhall/scikit-learn,jakobworldpeace/scikit-learn,sanketloke/scikit-learn,tomlof/scikit-learn,depet/scikit-learn,abhishekkrthakur/scikit-learn,lucidfrontier45/scikit-learn,466152112/scikit-learn,jm-begon/scikit-learn,mikebenfield/scikit-learn,potash/scikit-learn,themrmax/scikit-learn,florian-f/sklearn,manhhomienbienthuy/scikit-learn,sinhrks/scikit-learn,zaxtax/scikit-learn,sgenoud/scikit-learn,tdhopper/scikit-learn,marcocaccin/scikit-learn,alexsavio/scikit-learn,nvoron23/scikit-learn,NelisVerhoef/scikit-learn,MohammedWasim/scikit-learn,cl4rke/scikit-learn,nesterione/scikit-learn,Sentient07/scikit-learn,ephes/scikit-learn,fabioticconi/scikit-learn,mikebenfield/scikit-learn,zhenv5/scikit-learn,rajat1994/scikit-learn,yask123/scikit-learn,shahankhatch/scikit-learn,nelson-liu/scikit-learn,PatrickOReilly/scikit-learn,beepee14/scikit-learn,BiaDarkia/scikit-learn,russel1237/scikit-learn,rvraghav93/scikit-learn,djgagne/scikit-learn,PrashntS/scikit-learn,ominux/scikit-learn,robin-lai/scikit-learn,robin-lai/scikit-learn,ltiao/scikit-learn,vivekmishra1991/scikit-learn,rahul-c1/scikit-learn,michigraber/scikit-learn,rvraghav93/scikit-learn,themrmax/scikit-learn,mattilyra/scikit-learn,0x0all/scikit-learn,elkingtonmcb/scikit-learn,rishikksh20/scikit-learn,yunfeilu/scikit-learn,scikit-learn/scikit-learn,ankurankan/scikit-learn,Lawrence-Liu/scikit-learn,rahul-c1/scikit-learn,vigilv/scikit-learn,hlin117/scikit-learn,h2educ/scikit-learn,pypot/scikit-learn,eickenberg/scikit-learn,xuewei4d/scikit-learn,AlexRobson/scikit-learn,heli522/scikit-learn,liyu1990/sklearn,harshaneelhg/scikit-learn,ningchi/scikit-learn,jseabold/scikit-learn,carrillo/scikit-learn,rohanp/scikit-learn,nomadcube/scikit-learn,0x0all/scikit-learn,zorroblue/scikit-learn,YinongLong/scikit-learn,rishikksh20/scikit-learn,luo66/scikit-learn,nvoron23/scikit-learn,glouppe/scikit-learn,xubenben/scikit-learn,fabianp/scikit-learn,sumspr/scikit-learn,betatim/scikit-learn,mattgiguere/scikit-learn,glennq/scikit-learn,vybstat/scikit-learn,shahankhatch/scikit-learn,Sentient07/scikit-learn,trankmichael/scikit-learn,aetilley/scikit-learn,trankmichael/scikit-learn,smartscheduling/scikit-learn-categorical-tree,JosmanPS/scikit-learn,jakobworldpeace/scikit-learn,amueller/scikit-learn,jzt5132/scikit-learn,cwu2011/scikit-learn,xuewei4d/scikit-learn,kevin-intel/scikit-learn,BiaDarkia/scikit-learn,mugizico/scikit-learn,rahuldhote/scikit-learn,ClimbsRocks/scikit-learn,kagayakidan/scikit-learn,manhhomienbienthuy/scikit-learn,andaag/scikit-learn,kjung/scikit-learn,Titan-C/scikit-learn,JPFrancoia/scikit-learn,rrohan/scikit-learn,Srisai85/scikit-learn,quheng/scikit-learn,potash/scikit-learn,murali-munna/scikit-learn,3manuek/scikit-learn,sonnyhu/scikit-learn,yyjiang/scikit-learn,zorojean/scikit-learn,ycaihua/scikit-learn,lucidfrontier45/scikit-learn,wlamond/scikit-learn,krez13/scikit-learn,JosmanPS/scikit-learn,mxjl620/scikit-learn,vivekmishra1991/scikit-learn,michigraber/scikit-learn,ZENGXH/scikit-learn,murali-munna/scikit-learn,ZenDevelopmentSystems/scikit-learn,aflaxman/scikit-learn,YinongLong/scikit-learn,CforED/Machine-Learning,jblackburne/scikit-learn,MartinDelzant/scikit-learn,aewhatley/scikit-learn,wazeerzulfikar/scikit-learn,thilbern/scikit-learn,jjx02230808/project0223,eickenberg/scikit-learn,mrshu/scikit-learn,stylianos-kampakis/scikit-learn,nhejazi/scikit-learn,aabadie/scikit-learn,mlyundin/scikit-learn,B3AU/waveTree,fyffyt/scikit-learn,ominux/scikit-learn,ZenDevelopmentSystems/scikit-learn,altairpearl/scikit-learn,ZenDevelopmentSystems/scikit-learn,xyguo/scikit-learn,cainiaocome/scikit-learn,RayMick/scikit-learn,terkkila/scikit-learn,Garrett-R/scikit-learn,MatthieuBizien/scikit-learn,eickenberg/scikit-learn,Barmaley-exe/scikit-learn,RPGOne/scikit-learn,hsuantien/scikit-learn,devanshdalal/scikit-learn,justincassidy/scikit-learn,JosmanPS/scikit-learn,vivekmishra1991/scikit-learn,tdhopper/scikit-learn,AIML/scikit-learn,ogrisel/scikit-learn,loli/sklearn-ensembletrees,JeanKossaifi/scikit-learn,yonglehou/scikit-learn,equialgo/scikit-learn,raghavrv/scikit-learn,vortex-ape/scikit-learn,fbagirov/scikit-learn,jpautom/scikit-learn,shikhardb/scikit-learn,loli/semisupervisedforests,dsullivan7/scikit-learn,liyu1990/sklearn,arahuja/scikit-learn,ZenDevelopmentSystems/scikit-learn,mfjb/scikit-learn,ky822/scikit-learn,pianomania/scikit-learn,AlexandreAbraham/scikit-learn,procoder317/scikit-learn,jblackburne/scikit-learn,idlead/scikit-learn,ilo10/scikit-learn,samzhang111/scikit-learn,florian-f/sklearn,robbymeals/scikit-learn,siutanwong/scikit-learn,giorgiop/scikit-learn,hsiaoyi0504/scikit-learn,depet/scikit-learn,abhishekgahlot/scikit-learn,maheshakya/scikit-learn,mwv/scikit-learn,Windy-Ground/scikit-learn,nrhine1/scikit-learn,sergeyf/scikit-learn,rvraghav93/scikit-learn,jereze/scikit-learn,dhruv13J/scikit-learn,Djabbz/scikit-learn,zuku1985/scikit-learn,0x0all/scikit-learn,kaichogami/scikit-learn,dsquareindia/scikit-learn,ankurankan/scikit-learn,abhishekgahlot/scikit-learn,Aasmi/scikit-learn,larsmans/scikit-learn,shyamalschandra/scikit-learn,fzalkow/scikit-learn,Obus/scikit-learn,khkaminska/scikit-learn,bigdataelephants/scikit-learn,IndraVikas/scikit-learn,costypetrisor/scikit-learn,bthirion/scikit-learn,hrjn/scikit-learn,ChanChiChoi/scikit-learn,aabadie/scikit-learn,xwolf12/scikit-learn,abhishekkrthakur/scikit-learn,TomDLT/scikit-learn,hlin117/scikit-learn,fabianp/scikit-learn,mojoboss/scikit-learn,hsiaoyi0504/scikit-learn,glemaitre/scikit-learn,3manuek/scikit-learn,lucidfrontier45/scikit-learn,nesterione/scikit-learn,waterponey/scikit-learn,Nyker510/scikit-learn,YinongLong/scikit-learn,andrewnc/scikit-learn,MartinSavc/scikit-learn,alvarofierroclavero/scikit-learn,rsivapr/scikit-learn,hsuantien/scikit-learn,mattilyra/scikit-learn,chrsrds/scikit-learn,macks22/scikit-learn,ilyes14/scikit-learn,mxjl620/scikit-learn,huobaowangxi/scikit-learn,dhruv13J/scikit-learn,florian-f/sklearn,IndraVikas/scikit-learn,evgchz/scikit-learn,jorge2703/scikit-learn,AlexRobson/scikit-learn,lin-credible/scikit-learn,ishanic/scikit-learn,wazeerzulfikar/scikit-learn,stylianos-kampakis/scikit-learn,mhdella/scikit-learn,raghavrv/scikit-learn,jaidevd/scikit-learn,bthirion/scikit-learn,kashif/scikit-learn,frank-tancf/scikit-learn,Lawrence-Liu/scikit-learn,jlegendary/scikit-learn,JeanKossaifi/scikit-learn,RomainBrault/scikit-learn,betatim/scikit-learn,mjudsp/Tsallis,ChanChiChoi/scikit-learn,andaag/scikit-learn,deepesch/scikit-learn,petosegan/scikit-learn,scikit-learn/scikit-learn,cainiaocome/scikit-learn,nmayorov/scikit-learn,DSLituiev/scikit-learn,eg-zhang/scikit-learn,moutai/scikit-learn,kaichogami/scikit-learn,mjudsp/Tsallis,dingocuster/scikit-learn,AlexRobson/scikit-learn,MohammedWasim/scikit-learn,vermouthmjl/scikit-learn,jorik041/scikit-learn,shenzebang/scikit-learn,kylerbrown/scikit-learn,ilo10/scikit-learn,Jimmy-Morzaria/scikit-learn,ssaeger/scikit-learn,cauchycui/scikit-learn,yonglehou/scikit-learn,h2educ/scikit-learn,lbishal/scikit-learn,cwu2011/scikit-learn,appapantula/scikit-learn,akionakamura/scikit-learn,bnaul/scikit-learn,anntzer/scikit-learn,pv/scikit-learn,treycausey/scikit-learn,Myasuka/scikit-learn,abimannans/scikit-learn,zhenv5/scikit-learn,sanketloke/scikit-learn,jakirkham/scikit-learn,herilalaina/scikit-learn,UNR-AERIAL/scikit-learn,poryfly/scikit-learn,frank-tancf/scikit-learn,r-mart/scikit-learn,Fireblend/scikit-learn,espg/scikit-learn,anirudhjayaraman/scikit-learn,glemaitre/scikit-learn,pompiduskus/scikit-learn,aminert/scikit-learn,qifeigit/scikit-learn,phdowling/scikit-learn,fengzhyuan/scikit-learn,rahuldhote/scikit-learn,massmutual/scikit-learn,xyguo/scikit-learn,3manuek/scikit-learn,arahuja/scikit-learn,potash/scikit-learn,ashhher3/scikit-learn,vermouthmjl/scikit-learn,Fireblend/scikit-learn,loli/sklearn-ensembletrees,alexsavio/scikit-learn,yanlend/scikit-learn,bnaul/scikit-learn,zorroblue/scikit-learn,jkarnows/scikit-learn,cdegroc/scikit-learn,pv/scikit-learn,davidgbe/scikit-learn,rohanp/scikit-learn,billy-inn/scikit-learn,beepee14/scikit-learn,AnasGhrab/scikit-learn,sonnyhu/scikit-learn,cdegroc/scikit-learn,eg-zhang/scikit-learn,kagayakidan/scikit-learn,chrisburr/scikit-learn,mrshu/scikit-learn,depet/scikit-learn,dsullivan7/scikit-learn,elkingtonmcb/scikit-learn,hainm/scikit-learn,michigraber/scikit-learn,abimannans/scikit-learn,stylianos-kampakis/scikit-learn,CforED/Machine-Learning,ltiao/scikit-learn,tosolveit/scikit-learn,qifeigit/scikit-learn,jorge2703/scikit-learn,clemkoa/scikit-learn,manhhomienbienthuy/scikit-learn,bigdataelephants/scikit-learn,jmschrei/scikit-learn,Obus/scikit-learn,nhejazi/scikit-learn,ahoyosid/scikit-learn,xyguo/scikit-learn,sanketloke/scikit-learn,ldirer/scikit-learn,herilalaina/scikit-learn,saiwing-yeung/scikit-learn,Garrett-R/scikit-learn,pompiduskus/scikit-learn,loli/sklearn-ensembletrees,Achuth17/scikit-learn,btabibian/scikit-learn,ahoyosid/scikit-learn,sumspr/scikit-learn,betatim/scikit-learn,saiwing-yeung/scikit-learn,siutanwong/scikit-learn,ClimbsRocks/scikit-learn,samzhang111/scikit-learn,fredhusser/scikit-learn,theoryno3/scikit-learn,aflaxman/scikit-learn,olologin/scikit-learn,mblondel/scikit-learn,liyu1990/sklearn,treycausey/scikit-learn,DonBeo/scikit-learn,ephes/scikit-learn,elkingtonmcb/scikit-learn,justincassidy/scikit-learn,alexeyum/scikit-learn,henridwyer/scikit-learn,MatthieuBizien/scikit-learn,arabenjamin/scikit-learn,carrillo/scikit-learn,xavierwu/scikit-learn,zorojean/scikit-learn,robbymeals/scikit-learn,tmhm/scikit-learn,jlegendary/scikit-learn,NelisVerhoef/scikit-learn,heli522/scikit-learn,vinayak-mehta/scikit-learn,ilo10/scikit-learn,pnedunuri/scikit-learn,iismd17/scikit-learn,petosegan/scikit-learn,yanlend/scikit-learn,chrisburr/scikit-learn,ElDeveloper/scikit-learn,Obus/scikit-learn,mlyundin/scikit-learn,henridwyer/scikit-learn,phdowling/scikit-learn,rahul-c1/scikit-learn,mjgrav2001/scikit-learn,sarahgrogan/scikit-learn,ominux/scikit-learn,xiaoxiamii/scikit-learn,mrshu/scikit-learn,Clyde-fare/scikit-learn,abhishekkrthakur/scikit-learn,lazywei/scikit-learn,wlamond/scikit-learn,IshankGulati/scikit-learn,DonBeo/scikit-learn,Djabbz/scikit-learn,hitszxp/scikit-learn,rexshihaoren/scikit-learn,lbishal/scikit-learn,ogrisel/scikit-learn,imaculate/scikit-learn,massmutual/scikit-learn,jmetzen/scikit-learn,harshaneelhg/scikit-learn,eg-zhang/scikit-learn,mhdella/scikit-learn,herilalaina/scikit-learn,billy-inn/scikit-learn,hdmetor/scikit-learn,ogrisel/scikit-learn,roxyboy/scikit-learn,macks22/scikit-learn,liberatorqjw/scikit-learn,Akshay0724/scikit-learn,loli/sklearn-ensembletrees,HolgerPeters/scikit-learn,fredhusser/scikit-learn,xzh86/scikit-learn,iismd17/scikit-learn,jzt5132/scikit-learn,AIML/scikit-learn,harshaneelhg/scikit-learn,Garrett-R/scikit-learn,nrhine1/scikit-learn,raghavrv/scikit-learn,huzq/scikit-learn,pnedunuri/scikit-learn,manashmndl/scikit-learn,etkirsch/scikit-learn,raghavrv/scikit-learn,meduz/scikit-learn,davidgbe/scikit-learn,ChanChiChoi/scikit-learn,pratapvardhan/scikit-learn,rishikksh20/scikit-learn,qifeigit/scikit-learn,LiaoPan/scikit-learn,terkkila/scikit-learn,fengzhyuan/scikit-learn,anntzer/scikit-learn,wanggang3333/scikit-learn,mjgrav2001/scikit-learn,ChanderG/scikit-learn,trungnt13/scikit-learn,procoder317/scikit-learn,CVML/scikit-learn,r-mart/scikit-learn,vortex-ape/scikit-learn,walterreade/scikit-learn,kylerbrown/scikit-learn,glemaitre/scikit-learn,mugizico/scikit-learn,stylianos-kampakis/scikit-learn,altairpearl/scikit-learn,mhue/scikit-learn,YinongLong/scikit-learn,hlin117/scikit-learn,cl4rke/scikit-learn,AlexanderFabisch/scikit-learn,pratapvardhan/scikit-learn,gotomypc/scikit-learn,MartinSavc/scikit-learn,olologin/scikit-learn,altairpearl/scikit-learn,jaidevd/scikit-learn,treycausey/scikit-learn,yask123/scikit-learn,amueller/scikit-learn,jmetzen/scikit-learn,marcocaccin/scikit-learn,RomainBrault/scikit-learn,ningchi/scikit-learn,khkaminska/scikit-learn,roxyboy/scikit-learn,kjung/scikit-learn,vibhorag/scikit-learn,gclenaghan/scikit-learn,CVML/scikit-learn,krez13/scikit-learn,plissonf/scikit-learn,ChanChiChoi/scikit-learn,anurag313/scikit-learn,jorge2703/scikit-learn,shyamalschandra/scikit-learn,ominux/scikit-learn,victorbergelin/scikit-learn,MechCoder/scikit-learn,mattilyra/scikit-learn,ElDeveloper/scikit-learn,zorroblue/scikit-learn,joernhees/scikit-learn,icdishb/scikit-learn,robbymeals/scikit-learn,ankurankan/scikit-learn,zihua/scikit-learn,PatrickOReilly/scikit-learn,IshankGulati/scikit-learn,mhdella/scikit-learn,nrhine1/scikit-learn,smartscheduling/scikit-learn-categorical-tree,MartinSavc/scikit-learn,Jimmy-Morzaria/scikit-learn,HolgerPeters/scikit-learn,vinayak-mehta/scikit-learn,nelson-liu/scikit-learn,glouppe/scikit-learn,vshtanko/scikit-learn,samuel1208/scikit-learn,evgchz/scikit-learn,shangwuhencc/scikit-learn,gotomypc/scikit-learn,OshynSong/scikit-learn,hdmetor/scikit-learn,mjudsp/Tsallis,russel1237/scikit-learn,ldirer/scikit-learn,bikong2/scikit-learn,ishanic/scikit-learn,ssaeger/scikit-learn,fengzhyuan/scikit-learn,lin-credible/scikit-learn,victorbergelin/scikit-learn,billy-inn/scikit-learn,nhejazi/scikit-learn,JsNoNo/scikit-learn,themrmax/scikit-learn,hsiaoyi0504/scikit-learn,alexeyum/scikit-learn,waterponey/scikit-learn,liberatorqjw/scikit-learn,dingocuster/scikit-learn,potash/scikit-learn,clemkoa/scikit-learn,PatrickChrist/scikit-learn,IssamLaradji/scikit-learn,aminert/scikit-learn,h2educ/scikit-learn,rexshihaoren/scikit-learn,fredhusser/scikit-learn,rohanp/scikit-learn,fyffyt/scikit-learn,wzbozon/scikit-learn,zorroblue/scikit-learn,schets/scikit-learn,IshankGulati/scikit-learn,hdmetor/scikit-learn,q1ang/scikit-learn,frank-tancf/scikit-learn,JsNoNo/scikit-learn,kashif/scikit-learn,nelson-liu/scikit-learn,arjoly/scikit-learn,robbymeals/scikit-learn,Windy-Ground/scikit-learn,ngoix/OCRF,rsivapr/scikit-learn,mfjb/scikit-learn,andaag/scikit-learn,mblondel/scikit-learn,ilyes14/scikit-learn,q1ang/scikit-learn,ilyes14/scikit-learn,shenzebang/scikit-learn,thientu/scikit-learn,AlexandreAbraham/scikit-learn,jzt5132/scikit-learn,ndingwall/scikit-learn,vshtanko/scikit-learn,JsNoNo/scikit-learn,sergeyf/scikit-learn,moutai/scikit-learn,toastedcornflakes/scikit-learn,pv/scikit-learn,JPFrancoia/scikit-learn,manashmndl/scikit-learn,nesterione/scikit-learn,kagayakidan/scikit-learn,kevin-intel/scikit-learn,saiwing-yeung/scikit-learn,abhishekkrthakur/scikit-learn,DSLituiev/scikit-learn,rrohan/scikit-learn,mrshu/scikit-learn,B3AU/waveTree,466152112/scikit-learn,wanggang3333/scikit-learn,B3AU/waveTree,bigdataelephants/scikit-learn,bthirion/scikit-learn,xzh86/scikit-learn,dingocuster/scikit-learn,manhhomienbienthuy/scikit-learn,tmhm/scikit-learn,Akshay0724/scikit-learn,xzh86/scikit-learn,aabadie/scikit-learn,betatim/scikit-learn,voxlol/scikit-learn,jblackburne/scikit-learn,Clyde-fare/scikit-learn,kashif/scikit-learn,dhruv13J/scikit-learn,RomainBrault/scikit-learn,thientu/scikit-learn,vybstat/scikit-learn,eickenberg/scikit-learn,djgagne/scikit-learn,krez13/scikit-learn,hitszxp/scikit-learn,ndingwall/scikit-learn,wlamond/scikit-learn,NunoEdgarGub1/scikit-learn,madjelan/scikit-learn,appapantula/scikit-learn,evgchz/scikit-learn,sergeyf/scikit-learn,hugobowne/scikit-learn,vybstat/scikit-learn,chrisburr/scikit-learn,mattgiguere/scikit-learn,ElDeveloper/scikit-learn,IndraVikas/scikit-learn,arabenjamin/scikit-learn,wzbozon/scikit-learn,rajat1994/scikit-learn,vybstat/scikit-learn,imaculate/scikit-learn,jkarnows/scikit-learn,tmhm/scikit-learn,RayMick/scikit-learn,mjgrav2001/scikit-learn,nmayorov/scikit-learn,macks22/scikit-learn,mayblue9/scikit-learn,davidgbe/scikit-learn,lbishal/scikit-learn,MohammedWasim/scikit-learn,mojoboss/scikit-learn,lenovor/scikit-learn,iismd17/scikit-learn,krez13/scikit-learn,jjx02230808/project0223,arabenjamin/scikit-learn,huzq/scikit-learn,ltiao/scikit-learn,mayblue9/scikit-learn,mattilyra/scikit-learn,jayflo/scikit-learn,xubenben/scikit-learn,pratapvardhan/scikit-learn,espg/scikit-learn,AlexanderFabisch/scikit-learn,Windy-Ground/scikit-learn,shikhardb/scikit-learn,ilyes14/scikit-learn,joshloyal/scikit-learn,pkruskal/scikit-learn,sergeyf/scikit-learn,madjelan/scikit-learn,JsNoNo/scikit-learn,trungnt13/scikit-learn,belltailjp/scikit-learn,Adai0808/scikit-learn,RayMick/scikit-learn,meduz/scikit-learn,Srisai85/scikit-learn,cdegroc/scikit-learn,shusenl/scikit-learn,ogrisel/scikit-learn,tdhopper/scikit-learn,quheng/scikit-learn,Jimmy-Morzaria/scikit-learn,bhargav/scikit-learn,etkirsch/scikit-learn,AnasGhrab/scikit-learn,ngoix/OCRF,vshtanko/scikit-learn,anirudhjayaraman/scikit-learn,cainiaocome/scikit-learn,kashif/scikit-learn,JeanKossaifi/scikit-learn,fengzhyuan/scikit-learn,yanlend/scikit-learn,ChanderG/scikit-learn,lbishal/scikit-learn,thilbern/scikit-learn,Fireblend/scikit-learn,mxjl620/scikit-learn,anirudhjayaraman/scikit-learn,trankmichael/scikit-learn,hainm/scikit-learn,hrjn/scikit-learn,lucidfrontier45/scikit-learn,Achuth17/scikit-learn,nmayorov/scikit-learn,vigilv/scikit-learn,zihua/scikit-learn,IssamLaradji/scikit-learn,luo66/scikit-learn,siutanwong/scikit-learn,zuku1985/scikit-learn,shusenl/scikit-learn,themrmax/scikit-learn,michigraber/scikit-learn,pythonvietnam/scikit-learn,untom/scikit-learn,Srisai85/scikit-learn,ssaeger/scikit-learn,ZENGXH/scikit-learn,Nyker510/scikit-learn,jmschrei/scikit-learn,Vimos/scikit-learn,MartinDelzant/scikit-learn,theoryno3/scikit-learn,dsquareindia/scikit-learn,rrohan/scikit-learn,yask123/scikit-learn,aewhatley/scikit-learn,amueller/scikit-learn,trankmichael/scikit-learn,equialgo/scikit-learn,liberatorqjw/scikit-learn,ishanic/scikit-learn,DSLituiev/scikit-learn,jereze/scikit-learn,ngoix/OCRF,0asa/scikit-learn,hitszxp/scikit-learn,scikit-learn/scikit-learn,Titan-C/scikit-learn,mlyundin/scikit-learn,Adai0808/scikit-learn,PatrickChrist/scikit-learn,bikong2/scikit-learn,ssaeger/scikit-learn,Myasuka/scikit-learn,ndingwall/scikit-learn,CVML/scikit-learn,mayblue9/scikit-learn,ltiao/scikit-learn,Clyde-fare/scikit-learn,lazywei/scikit-learn,ilo10/scikit-learn,mfjb/scikit-learn,joernhees/scikit-learn,carrillo/scikit-learn,jkarnows/scikit-learn,fzalkow/scikit-learn,Nyker510/scikit-learn,AIML/scikit-learn,HolgerPeters/scikit-learn,aewhatley/scikit-learn,vibhorag/scikit-learn,walterreade/scikit-learn,fyffyt/scikit-learn,ivannz/scikit-learn,anurag313/scikit-learn,chrisburr/scikit-learn,rvraghav93/scikit-learn,spallavolu/scikit-learn,davidgbe/scikit-learn,joshloyal/scikit-learn,waterponey/scikit-learn,jkarnows/scikit-learn,deepesch/scikit-learn,Aasmi/scikit-learn,cainiaocome/scikit-learn,tosolveit/scikit-learn,bnaul/scikit-learn,Vimos/scikit-learn,fabianp/scikit-learn,JeanKossaifi/scikit-learn,RachitKansal/scikit-learn,appapantula/scikit-learn,B3AU/waveTree,IshankGulati/scikit-learn,henridwyer/scikit-learn,robin-lai/scikit-learn,amueller/scikit-learn,Garrett-R/scikit-learn,yanlend/scikit-learn,tomlof/scikit-learn,Barmaley-exe/scikit-learn,pypot/scikit-learn,joernhees/scikit-learn,phdowling/scikit-learn,lesteve/scikit-learn,zhenv5/scikit-learn,mhue/scikit-learn,fzalkow/scikit-learn,loli/semisupervisedforests,xyguo/scikit-learn,xubenben/scikit-learn,madjelan/scikit-learn,sgenoud/scikit-learn,jorik041/scikit-learn,thilbern/scikit-learn,AnasGhrab/scikit-learn,Barmaley-exe/scikit-learn,thientu/scikit-learn,voxlol/scikit-learn,CVML/scikit-learn,clemkoa/scikit-learn,yask123/scikit-learn,carrillo/scikit-learn,nesterione/scikit-learn,JPFrancoia/scikit-learn,Lawrence-Liu/scikit-learn,huobaowangxi/scikit-learn,Clyde-fare/scikit-learn,ashhher3/scikit-learn,ClimbsRocks/scikit-learn,costypetrisor/scikit-learn,LohithBlaze/scikit-learn,AlexanderFabisch/scikit-learn,theoryno3/scikit-learn,kaichogami/scikit-learn,russel1237/scikit-learn,shenzebang/scikit-learn,simon-pepin/scikit-learn,sgenoud/scikit-learn,samzhang111/scikit-learn,AnasGhrab/scikit-learn,evgchz/scikit-learn,jakirkham/scikit-learn,wanggang3333/scikit-learn,ldirer/scikit-learn,Akshay0724/scikit-learn,anntzer/scikit-learn,nikitasingh981/scikit-learn,evgchz/scikit-learn,jseabold/scikit-learn,lin-credible/scikit-learn,ephes/scikit-learn,liangz0707/scikit-learn,olologin/scikit-learn,wazeerzulfikar/scikit-learn,larsmans/scikit-learn,eg-zhang/scikit-learn,mfjb/scikit-learn,jjx02230808/project0223,alexeyum/scikit-learn,vortex-ape/scikit-learn,Obus/scikit-learn,btabibian/scikit-learn,ky822/scikit-learn,gclenaghan/scikit-learn,h2educ/scikit-learn,cybernet14/scikit-learn,ycaihua/scikit-learn,mattgiguere/scikit-learn,DonBeo/scikit-learn,yyjiang/scikit-learn,rexshihaoren/scikit-learn,sgenoud/scikit-learn,DSLituiev/scikit-learn,rahuldhote/scikit-learn,mhdella/scikit-learn,espg/scikit-learn,samzhang111/scikit-learn,shusenl/scikit-learn,hugobowne/scikit-learn,MartinDelzant/scikit-learn,wanggang3333/scikit-learn,moutai/scikit-learn,Sentient07/scikit-learn,larsmans/scikit-learn,lesteve/scikit-learn,arjoly/scikit-learn,btabibian/scikit-learn,hitszxp/scikit-learn,jm-begon/scikit-learn,xwolf12/scikit-learn,jakobworldpeace/scikit-learn,dsquareindia/scikit-learn,arahuja/scikit-learn,zhenv5/scikit-learn,alvarofierroclavero/scikit-learn,smartscheduling/scikit-learn-categorical-tree,vivekmishra1991/scikit-learn,liberatorqjw/scikit-learn,LiaoPan/scikit-learn,giorgiop/scikit-learn,deepesch/scikit-learn,TomDLT/scikit-learn,devanshdalal/scikit-learn,tosolveit/scikit-learn,djgagne/scikit-learn,pnedunuri/scikit-learn,RPGOne/scikit-learn,xavierwu/scikit-learn,rexshihaoren/scikit-learn,loli/sklearn-ensembletrees,fabianp/scikit-learn,vigilv/scikit-learn,alexsavio/scikit-learn,fbagirov/scikit-learn,kmike/scikit-learn,jseabold/scikit-learn,xwolf12/scikit-learn,lenovor/scikit-learn,tosolveit/scikit-learn,djgagne/scikit-learn,zihua/scikit-learn,adamgreenhall/scikit-learn,IssamLaradji/scikit-learn,tawsifkhan/scikit-learn,loli/semisupervisedforests,kylerbrown/scikit-learn,pompiduskus/scikit-learn,JosmanPS/scikit-learn,f3r/scikit-learn,huzq/scikit-learn,huobaowangxi/scikit-learn,harshaneelhg/scikit-learn,sarahgrogan/scikit-learn,RachitKansal/scikit-learn,AlexanderFabisch/scikit-learn,elkingtonmcb/scikit-learn,nomadcube/scikit-learn,tdhopper/scikit-learn,zorojean/scikit-learn,sonnyhu/scikit-learn,cybernet14/scikit-learn,jm-begon/scikit-learn,jlegendary/scikit-learn
"""Non linear regression with Support Vector Regression (SVR) using RBF kernel """ ############################################################################### # Generate sample data import numpy as np X = np.sort(5*np.random.rand(40, 1), axis=0) y = np.sin(X).ravel() ############################################################################### # Add noise to targets y[::5] += 3*(0.5 - np.random.rand(8)) ############################################################################### # Fit regression model from scikits.learn.svm import SVR svr_rbf = SVR(kernel='rbf', C=1e4, gamma=0.1) svr_lin = SVR(kernel='linear', C=1e4) svr_poly = SVR(kernel='poly', C=1e4, degree=2) y_rbf = svr_rbf.fit(X, y).predict(X) y_lin = svr_lin.fit(X, y).predict(X) y_poly = svr_poly.fit(X, y).predict(X) ############################################################################### # look at the results pl.scatter(X, y, c='k', label='data') pl.hold('on') pl.plot(X, y_rbf, c='g', label='RBF model') pl.plot(X, y_lin, c='r', label='Linear model') pl.plot(X, y_poly, c='b', label='Polynomial model') pl.xlabel('data') pl.ylabel('target') pl.title('Support Vector Regression') pl.legend() pl.show() BUG: Fix forgotten import in example
"""Non linear regression with Support Vector Regression (SVR) using RBF kernel """ ############################################################################### # Generate sample data import numpy as np X = np.sort(5*np.random.rand(40, 1), axis=0) y = np.sin(X).ravel() ############################################################################### # Add noise to targets y[::5] += 3*(0.5 - np.random.rand(8)) ############################################################################### # Fit regression model from scikits.learn.svm import SVR svr_rbf = SVR(kernel='rbf', C=1e4, gamma=0.1) svr_lin = SVR(kernel='linear', C=1e4) svr_poly = SVR(kernel='poly', C=1e4, degree=2) y_rbf = svr_rbf.fit(X, y).predict(X) y_lin = svr_lin.fit(X, y).predict(X) y_poly = svr_poly.fit(X, y).predict(X) ############################################################################### # look at the results import pylab as pl pl.scatter(X, y, c='k', label='data') pl.hold('on') pl.plot(X, y_rbf, c='g', label='RBF model') pl.plot(X, y_lin, c='r', label='Linear model') pl.plot(X, y_poly, c='b', label='Polynomial model') pl.xlabel('data') pl.ylabel('target') pl.title('Support Vector Regression') pl.legend() pl.show()
<commit_before>"""Non linear regression with Support Vector Regression (SVR) using RBF kernel """ ############################################################################### # Generate sample data import numpy as np X = np.sort(5*np.random.rand(40, 1), axis=0) y = np.sin(X).ravel() ############################################################################### # Add noise to targets y[::5] += 3*(0.5 - np.random.rand(8)) ############################################################################### # Fit regression model from scikits.learn.svm import SVR svr_rbf = SVR(kernel='rbf', C=1e4, gamma=0.1) svr_lin = SVR(kernel='linear', C=1e4) svr_poly = SVR(kernel='poly', C=1e4, degree=2) y_rbf = svr_rbf.fit(X, y).predict(X) y_lin = svr_lin.fit(X, y).predict(X) y_poly = svr_poly.fit(X, y).predict(X) ############################################################################### # look at the results pl.scatter(X, y, c='k', label='data') pl.hold('on') pl.plot(X, y_rbf, c='g', label='RBF model') pl.plot(X, y_lin, c='r', label='Linear model') pl.plot(X, y_poly, c='b', label='Polynomial model') pl.xlabel('data') pl.ylabel('target') pl.title('Support Vector Regression') pl.legend() pl.show() <commit_msg>BUG: Fix forgotten import in example<commit_after>
"""Non linear regression with Support Vector Regression (SVR) using RBF kernel """ ############################################################################### # Generate sample data import numpy as np X = np.sort(5*np.random.rand(40, 1), axis=0) y = np.sin(X).ravel() ############################################################################### # Add noise to targets y[::5] += 3*(0.5 - np.random.rand(8)) ############################################################################### # Fit regression model from scikits.learn.svm import SVR svr_rbf = SVR(kernel='rbf', C=1e4, gamma=0.1) svr_lin = SVR(kernel='linear', C=1e4) svr_poly = SVR(kernel='poly', C=1e4, degree=2) y_rbf = svr_rbf.fit(X, y).predict(X) y_lin = svr_lin.fit(X, y).predict(X) y_poly = svr_poly.fit(X, y).predict(X) ############################################################################### # look at the results import pylab as pl pl.scatter(X, y, c='k', label='data') pl.hold('on') pl.plot(X, y_rbf, c='g', label='RBF model') pl.plot(X, y_lin, c='r', label='Linear model') pl.plot(X, y_poly, c='b', label='Polynomial model') pl.xlabel('data') pl.ylabel('target') pl.title('Support Vector Regression') pl.legend() pl.show()
"""Non linear regression with Support Vector Regression (SVR) using RBF kernel """ ############################################################################### # Generate sample data import numpy as np X = np.sort(5*np.random.rand(40, 1), axis=0) y = np.sin(X).ravel() ############################################################################### # Add noise to targets y[::5] += 3*(0.5 - np.random.rand(8)) ############################################################################### # Fit regression model from scikits.learn.svm import SVR svr_rbf = SVR(kernel='rbf', C=1e4, gamma=0.1) svr_lin = SVR(kernel='linear', C=1e4) svr_poly = SVR(kernel='poly', C=1e4, degree=2) y_rbf = svr_rbf.fit(X, y).predict(X) y_lin = svr_lin.fit(X, y).predict(X) y_poly = svr_poly.fit(X, y).predict(X) ############################################################################### # look at the results pl.scatter(X, y, c='k', label='data') pl.hold('on') pl.plot(X, y_rbf, c='g', label='RBF model') pl.plot(X, y_lin, c='r', label='Linear model') pl.plot(X, y_poly, c='b', label='Polynomial model') pl.xlabel('data') pl.ylabel('target') pl.title('Support Vector Regression') pl.legend() pl.show() BUG: Fix forgotten import in example"""Non linear regression with Support Vector Regression (SVR) using RBF kernel """ ############################################################################### # Generate sample data import numpy as np X = np.sort(5*np.random.rand(40, 1), axis=0) y = np.sin(X).ravel() ############################################################################### # Add noise to targets y[::5] += 3*(0.5 - np.random.rand(8)) ############################################################################### # Fit regression model from scikits.learn.svm import SVR svr_rbf = SVR(kernel='rbf', C=1e4, gamma=0.1) svr_lin = SVR(kernel='linear', C=1e4) svr_poly = SVR(kernel='poly', C=1e4, degree=2) y_rbf = svr_rbf.fit(X, y).predict(X) y_lin = svr_lin.fit(X, y).predict(X) y_poly = svr_poly.fit(X, y).predict(X) ############################################################################### # look at the results import pylab as pl pl.scatter(X, y, c='k', label='data') pl.hold('on') pl.plot(X, y_rbf, c='g', label='RBF model') pl.plot(X, y_lin, c='r', label='Linear model') pl.plot(X, y_poly, c='b', label='Polynomial model') pl.xlabel('data') pl.ylabel('target') pl.title('Support Vector Regression') pl.legend() pl.show()
<commit_before>"""Non linear regression with Support Vector Regression (SVR) using RBF kernel """ ############################################################################### # Generate sample data import numpy as np X = np.sort(5*np.random.rand(40, 1), axis=0) y = np.sin(X).ravel() ############################################################################### # Add noise to targets y[::5] += 3*(0.5 - np.random.rand(8)) ############################################################################### # Fit regression model from scikits.learn.svm import SVR svr_rbf = SVR(kernel='rbf', C=1e4, gamma=0.1) svr_lin = SVR(kernel='linear', C=1e4) svr_poly = SVR(kernel='poly', C=1e4, degree=2) y_rbf = svr_rbf.fit(X, y).predict(X) y_lin = svr_lin.fit(X, y).predict(X) y_poly = svr_poly.fit(X, y).predict(X) ############################################################################### # look at the results pl.scatter(X, y, c='k', label='data') pl.hold('on') pl.plot(X, y_rbf, c='g', label='RBF model') pl.plot(X, y_lin, c='r', label='Linear model') pl.plot(X, y_poly, c='b', label='Polynomial model') pl.xlabel('data') pl.ylabel('target') pl.title('Support Vector Regression') pl.legend() pl.show() <commit_msg>BUG: Fix forgotten import in example<commit_after>"""Non linear regression with Support Vector Regression (SVR) using RBF kernel """ ############################################################################### # Generate sample data import numpy as np X = np.sort(5*np.random.rand(40, 1), axis=0) y = np.sin(X).ravel() ############################################################################### # Add noise to targets y[::5] += 3*(0.5 - np.random.rand(8)) ############################################################################### # Fit regression model from scikits.learn.svm import SVR svr_rbf = SVR(kernel='rbf', C=1e4, gamma=0.1) svr_lin = SVR(kernel='linear', C=1e4) svr_poly = SVR(kernel='poly', C=1e4, degree=2) y_rbf = svr_rbf.fit(X, y).predict(X) y_lin = svr_lin.fit(X, y).predict(X) y_poly = svr_poly.fit(X, y).predict(X) ############################################################################### # look at the results import pylab as pl pl.scatter(X, y, c='k', label='data') pl.hold('on') pl.plot(X, y_rbf, c='g', label='RBF model') pl.plot(X, y_lin, c='r', label='Linear model') pl.plot(X, y_poly, c='b', label='Polynomial model') pl.xlabel('data') pl.ylabel('target') pl.title('Support Vector Regression') pl.legend() pl.show()
029c3f46731cd3a4746043833e912447ababf1a7
build/extra_gitignore.py
build/extra_gitignore.py
#!/usr/bin/env python # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # # Use of this source code is governed by a BSD-style license # that can be found in the LICENSE file in the root of the source # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. """ Adds extra patterns to the root .gitignore file. Reads the contents of the filename given as the first argument and appends them to the root .gitignore file. The new entires are intended to be additional ignoring patterns, or negating patterns to override existing entries (man gitignore for more details). """ import os import sys MODIFY_STRING = '# The following added by %s\n' def main(argv): if not argv[1]: # Special case; do nothing. return 0 modify_string = (MODIFY_STRING % argv[0]) gitignore_file = os.path.dirname(argv[0]) + '/../.gitignore' lines = open(gitignore_file, 'r').readlines() for i, line in enumerate(lines): if line == modify_string: lines = lines[:i] break lines.append(modify_string) f = open(gitignore_file, 'w') f.write(''.join(lines)) f.write(open(argv[1], 'r').read()) f.close() if __name__ == '__main__': sys.exit(main(sys.argv))
Add script for appending entries to .gitignore.
Add script for appending entries to .gitignore. TBR=kjellander Review URL: https://webrtc-codereview.appspot.com/1629004 git-svn-id: 03ae4fbe531b1eefc9d815f31e49022782c42458@4193 4adac7df-926f-26a2-2b94-8c16560cd09d
Python
bsd-3-clause
jchavanton/webrtc,lukeweber/webrtc-src-override,AOSPU/external_chromium_org_third_party_webrtc,lukeweber/webrtc-src-override,MIPS/external-chromium_org-third_party-webrtc,bpsinc-native/src_third_party_webrtc,krieger-od/webrtc,AOSPU/external_chromium_org_third_party_webrtc,AOSPU/external_chromium_org_third_party_webrtc,MIPS/external-chromium_org-third_party-webrtc,CyanogenMod/android_external_chromium_org_third_party_webrtc,android-ia/platform_external_chromium_org_third_party_webrtc,svn2github/webrtc-Revision-8758,krieger-od/webrtc,jgcaaprom/android_external_chromium_org_third_party_webrtc,bpsinc-native/src_third_party_webrtc,Omegaphora/external_chromium_org_third_party_webrtc,SlimXperiments/external_chromium_org_third_party_webrtc,SlimXperiments/external_chromium_org_third_party_webrtc,MIPS/external-chromium_org-third_party-webrtc,aleonliao/webrtc-trunk,jchavanton/webrtc,xin3liang/platform_external_chromium_org_third_party_webrtc,bpsinc-native/src_third_party_webrtc,CyanogenMod/android_external_chromium_org_third_party_webrtc,CyanogenMod/android_external_chromium_org_third_party_webrtc,PersonifyInc/chromium_webrtc,jgcaaprom/android_external_chromium_org_third_party_webrtc,sippet/webrtc,krieger-od/nwjs_chromium_webrtc,svn2github/webrtc-Revision-8758,jchavanton/webrtc,krieger-od/webrtc,lukeweber/webrtc-src-override,android-ia/platform_external_chromium_org_third_party_webrtc,xin3liang/platform_external_chromium_org_third_party_webrtc,sippet/webrtc,krieger-od/nwjs_chromium_webrtc,android-ia/platform_external_chromium_org_third_party_webrtc,lukeweber/webrtc-src-override,krieger-od/webrtc,android-ia/platform_external_chromium_org_third_party_webrtc,PersonifyInc/chromium_webrtc,AOSPU/external_chromium_org_third_party_webrtc,krieger-od/webrtc,Omegaphora/external_chromium_org_third_party_webrtc,aleonliao/webrtc-trunk,android-ia/platform_external_chromium_org_third_party_webrtc,bpsinc-native/src_third_party_webrtc,svn2github/webrtc-Revision-8758,aleonliao/webrtc-trunk,MIPS/external-chromium_org-third_party-webrtc,Omegaphora/external_chromium_org_third_party_webrtc,MIPS/external-chromium_org-third_party-webrtc,jchavanton/webrtc,SlimXperiments/external_chromium_org_third_party_webrtc,SlimXperiments/external_chromium_org_third_party_webrtc,lukeweber/webrtc-src-override,bpsinc-native/src_third_party_webrtc,xin3liang/platform_external_chromium_org_third_party_webrtc,svn2github/webrtc-Revision-8758,PersonifyInc/chromium_webrtc,jgcaaprom/android_external_chromium_org_third_party_webrtc,xin3liang/platform_external_chromium_org_third_party_webrtc,CyanogenMod/android_external_chromium_org_third_party_webrtc,bpsinc-native/src_third_party_webrtc,Omegaphora/external_chromium_org_third_party_webrtc,bpsinc-native/src_third_party_webrtc,CyanogenMod/android_external_chromium_org_third_party_webrtc,Alkalyne/webrtctrunk,CyanogenMod/android_external_chromium_org_third_party_webrtc,AOSPU/external_chromium_org_third_party_webrtc,SlimXperiments/external_chromium_org_third_party_webrtc,android-ia/platform_external_chromium_org_third_party_webrtc,lukeweber/webrtc-src-override,Omegaphora/external_chromium_org_third_party_webrtc,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,Alkalyne/webrtctrunk,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,Alkalyne/webrtctrunk,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,android-ia/platform_external_chromium_org_third_party_webrtc,svn2github/webrtc-Revision-8758,jgcaaprom/android_external_chromium_org_third_party_webrtc,jchavanton/webrtc,sippet/webrtc,sippet/webrtc,Omegaphora/external_chromium_org_third_party_webrtc,Omegaphora/external_chromium_org_third_party_webrtc,Omegaphora/external_chromium_org_third_party_webrtc,sippet/webrtc,Alkalyne/webrtctrunk,jgcaaprom/android_external_chromium_org_third_party_webrtc,SlimXperiments/external_chromium_org_third_party_webrtc,krieger-od/nwjs_chromium_webrtc,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,Alkalyne/webrtctrunk,aleonliao/webrtc-trunk,PersonifyInc/chromium_webrtc,jgcaaprom/android_external_chromium_org_third_party_webrtc,xin3liang/platform_external_chromium_org_third_party_webrtc,CyanogenMod/android_external_chromium_org_third_party_webrtc,aleonliao/webrtc-trunk,android-ia/platform_external_chromium_org_third_party_webrtc,Alkalyne/webrtctrunk,bpsinc-native/src_third_party_webrtc,svn2github/webrtc-Revision-8758,aleonliao/webrtc-trunk,PersonifyInc/chromium_webrtc,krieger-od/nwjs_chromium_webrtc,AOSPU/external_chromium_org_third_party_webrtc,xin3liang/platform_external_chromium_org_third_party_webrtc,MIPS/external-chromium_org-third_party-webrtc,SlimXperiments/external_chromium_org_third_party_webrtc,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,xin3liang/platform_external_chromium_org_third_party_webrtc,sippet/webrtc,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,lukeweber/webrtc-src-override,MIPS/external-chromium_org-third_party-webrtc,krieger-od/nwjs_chromium_webrtc,krieger-od/nwjs_chromium_webrtc,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,Alkalyne/webrtctrunk,jchavanton/webrtc,jchavanton/webrtc,krieger-od/webrtc,PersonifyInc/chromium_webrtc,MIPS/external-chromium_org-third_party-webrtc,geekboxzone/lollipop_external_chromium_org_third_party_webrtc,Alkalyne/webrtctrunk,jgcaaprom/android_external_chromium_org_third_party_webrtc,AOSPU/external_chromium_org_third_party_webrtc,jgcaaprom/android_external_chromium_org_third_party_webrtc
Add script for appending entries to .gitignore. TBR=kjellander Review URL: https://webrtc-codereview.appspot.com/1629004 git-svn-id: 03ae4fbe531b1eefc9d815f31e49022782c42458@4193 4adac7df-926f-26a2-2b94-8c16560cd09d
#!/usr/bin/env python # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # # Use of this source code is governed by a BSD-style license # that can be found in the LICENSE file in the root of the source # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. """ Adds extra patterns to the root .gitignore file. Reads the contents of the filename given as the first argument and appends them to the root .gitignore file. The new entires are intended to be additional ignoring patterns, or negating patterns to override existing entries (man gitignore for more details). """ import os import sys MODIFY_STRING = '# The following added by %s\n' def main(argv): if not argv[1]: # Special case; do nothing. return 0 modify_string = (MODIFY_STRING % argv[0]) gitignore_file = os.path.dirname(argv[0]) + '/../.gitignore' lines = open(gitignore_file, 'r').readlines() for i, line in enumerate(lines): if line == modify_string: lines = lines[:i] break lines.append(modify_string) f = open(gitignore_file, 'w') f.write(''.join(lines)) f.write(open(argv[1], 'r').read()) f.close() if __name__ == '__main__': sys.exit(main(sys.argv))
<commit_before><commit_msg>Add script for appending entries to .gitignore. TBR=kjellander Review URL: https://webrtc-codereview.appspot.com/1629004 git-svn-id: 03ae4fbe531b1eefc9d815f31e49022782c42458@4193 4adac7df-926f-26a2-2b94-8c16560cd09d<commit_after>
#!/usr/bin/env python # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # # Use of this source code is governed by a BSD-style license # that can be found in the LICENSE file in the root of the source # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. """ Adds extra patterns to the root .gitignore file. Reads the contents of the filename given as the first argument and appends them to the root .gitignore file. The new entires are intended to be additional ignoring patterns, or negating patterns to override existing entries (man gitignore for more details). """ import os import sys MODIFY_STRING = '# The following added by %s\n' def main(argv): if not argv[1]: # Special case; do nothing. return 0 modify_string = (MODIFY_STRING % argv[0]) gitignore_file = os.path.dirname(argv[0]) + '/../.gitignore' lines = open(gitignore_file, 'r').readlines() for i, line in enumerate(lines): if line == modify_string: lines = lines[:i] break lines.append(modify_string) f = open(gitignore_file, 'w') f.write(''.join(lines)) f.write(open(argv[1], 'r').read()) f.close() if __name__ == '__main__': sys.exit(main(sys.argv))
Add script for appending entries to .gitignore. TBR=kjellander Review URL: https://webrtc-codereview.appspot.com/1629004 git-svn-id: 03ae4fbe531b1eefc9d815f31e49022782c42458@4193 4adac7df-926f-26a2-2b94-8c16560cd09d#!/usr/bin/env python # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # # Use of this source code is governed by a BSD-style license # that can be found in the LICENSE file in the root of the source # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. """ Adds extra patterns to the root .gitignore file. Reads the contents of the filename given as the first argument and appends them to the root .gitignore file. The new entires are intended to be additional ignoring patterns, or negating patterns to override existing entries (man gitignore for more details). """ import os import sys MODIFY_STRING = '# The following added by %s\n' def main(argv): if not argv[1]: # Special case; do nothing. return 0 modify_string = (MODIFY_STRING % argv[0]) gitignore_file = os.path.dirname(argv[0]) + '/../.gitignore' lines = open(gitignore_file, 'r').readlines() for i, line in enumerate(lines): if line == modify_string: lines = lines[:i] break lines.append(modify_string) f = open(gitignore_file, 'w') f.write(''.join(lines)) f.write(open(argv[1], 'r').read()) f.close() if __name__ == '__main__': sys.exit(main(sys.argv))
<commit_before><commit_msg>Add script for appending entries to .gitignore. TBR=kjellander Review URL: https://webrtc-codereview.appspot.com/1629004 git-svn-id: 03ae4fbe531b1eefc9d815f31e49022782c42458@4193 4adac7df-926f-26a2-2b94-8c16560cd09d<commit_after>#!/usr/bin/env python # Copyright (c) 2012 The WebRTC project authors. All Rights Reserved. # # Use of this source code is governed by a BSD-style license # that can be found in the LICENSE file in the root of the source # tree. An additional intellectual property rights grant can be found # in the file PATENTS. All contributing project authors may # be found in the AUTHORS file in the root of the source tree. """ Adds extra patterns to the root .gitignore file. Reads the contents of the filename given as the first argument and appends them to the root .gitignore file. The new entires are intended to be additional ignoring patterns, or negating patterns to override existing entries (man gitignore for more details). """ import os import sys MODIFY_STRING = '# The following added by %s\n' def main(argv): if not argv[1]: # Special case; do nothing. return 0 modify_string = (MODIFY_STRING % argv[0]) gitignore_file = os.path.dirname(argv[0]) + '/../.gitignore' lines = open(gitignore_file, 'r').readlines() for i, line in enumerate(lines): if line == modify_string: lines = lines[:i] break lines.append(modify_string) f = open(gitignore_file, 'w') f.write(''.join(lines)) f.write(open(argv[1], 'r').read()) f.close() if __name__ == '__main__': sys.exit(main(sys.argv))
45bb10691ba7021bfb834a8941ac8492ae8509af
igor/plugins/copy/copy.py
igor/plugins/copy/copy.py
"""Copy values or subtrees, either locally or remotely. Currently a quick hack using either direct database access or httplib2, synchronously. Should use callUrl, so local/remote becomes similar, and some form of callback mechanism so it can run asynchronously. """ import requests import web import httplib2 DATABASE_ACCESS=None def myWebError(msg): return web.HTTPError(msg, {"Content-type": "text/plain"}, msg+'\n\n') def copy(src=None, dst=None, mimetype="text/plain", method='PUT'): if not src: raise myWebError("401 Required argument name missing") if not dst: raise myWebError("401 Required argument dst missing") srcParsed = urlparse.urlparse(src) if srcParsed.scheme == '' and srcParsed.netloc == '': # Local source srcValue = DATABASE_ACCESS.get_key(srcParsed.path, mimetype, None) else: # Remote source h = httplib2.Http() resp, srcValue = h.request(src, headers=dict(Accepts=mimetype) if resp.status != 200: raise myWebError("%d %s (%s)" % (resp.status, resp.reason, src)) dstParsed = urlparse.urlparse(dst) if dstParsed.scheme == '' and dstParsed.netloc == '': rv = DATABASE_ACCESS.put_key(dstParsed.path, None, srcValue, mimetype, method=='PUT') else: headers = {'Content-type' : mimetype} h = httplib2.Http() resp, rv = h.request(dst, method=method, headers=headers, data=srcValue) if resp.status != 200: raise myWebError("%d %s (%s)" % (resp.status, resp.reason, dst)) return rv
Copy subtree plugin. Unfinished and untested.
Copy subtree plugin. Unfinished and untested.
Python
mit
cwi-dis/igor,cwi-dis/igor,cwi-dis/igor
Copy subtree plugin. Unfinished and untested.
"""Copy values or subtrees, either locally or remotely. Currently a quick hack using either direct database access or httplib2, synchronously. Should use callUrl, so local/remote becomes similar, and some form of callback mechanism so it can run asynchronously. """ import requests import web import httplib2 DATABASE_ACCESS=None def myWebError(msg): return web.HTTPError(msg, {"Content-type": "text/plain"}, msg+'\n\n') def copy(src=None, dst=None, mimetype="text/plain", method='PUT'): if not src: raise myWebError("401 Required argument name missing") if not dst: raise myWebError("401 Required argument dst missing") srcParsed = urlparse.urlparse(src) if srcParsed.scheme == '' and srcParsed.netloc == '': # Local source srcValue = DATABASE_ACCESS.get_key(srcParsed.path, mimetype, None) else: # Remote source h = httplib2.Http() resp, srcValue = h.request(src, headers=dict(Accepts=mimetype) if resp.status != 200: raise myWebError("%d %s (%s)" % (resp.status, resp.reason, src)) dstParsed = urlparse.urlparse(dst) if dstParsed.scheme == '' and dstParsed.netloc == '': rv = DATABASE_ACCESS.put_key(dstParsed.path, None, srcValue, mimetype, method=='PUT') else: headers = {'Content-type' : mimetype} h = httplib2.Http() resp, rv = h.request(dst, method=method, headers=headers, data=srcValue) if resp.status != 200: raise myWebError("%d %s (%s)" % (resp.status, resp.reason, dst)) return rv
<commit_before><commit_msg>Copy subtree plugin. Unfinished and untested.<commit_after>
"""Copy values or subtrees, either locally or remotely. Currently a quick hack using either direct database access or httplib2, synchronously. Should use callUrl, so local/remote becomes similar, and some form of callback mechanism so it can run asynchronously. """ import requests import web import httplib2 DATABASE_ACCESS=None def myWebError(msg): return web.HTTPError(msg, {"Content-type": "text/plain"}, msg+'\n\n') def copy(src=None, dst=None, mimetype="text/plain", method='PUT'): if not src: raise myWebError("401 Required argument name missing") if not dst: raise myWebError("401 Required argument dst missing") srcParsed = urlparse.urlparse(src) if srcParsed.scheme == '' and srcParsed.netloc == '': # Local source srcValue = DATABASE_ACCESS.get_key(srcParsed.path, mimetype, None) else: # Remote source h = httplib2.Http() resp, srcValue = h.request(src, headers=dict(Accepts=mimetype) if resp.status != 200: raise myWebError("%d %s (%s)" % (resp.status, resp.reason, src)) dstParsed = urlparse.urlparse(dst) if dstParsed.scheme == '' and dstParsed.netloc == '': rv = DATABASE_ACCESS.put_key(dstParsed.path, None, srcValue, mimetype, method=='PUT') else: headers = {'Content-type' : mimetype} h = httplib2.Http() resp, rv = h.request(dst, method=method, headers=headers, data=srcValue) if resp.status != 200: raise myWebError("%d %s (%s)" % (resp.status, resp.reason, dst)) return rv
Copy subtree plugin. Unfinished and untested."""Copy values or subtrees, either locally or remotely. Currently a quick hack using either direct database access or httplib2, synchronously. Should use callUrl, so local/remote becomes similar, and some form of callback mechanism so it can run asynchronously. """ import requests import web import httplib2 DATABASE_ACCESS=None def myWebError(msg): return web.HTTPError(msg, {"Content-type": "text/plain"}, msg+'\n\n') def copy(src=None, dst=None, mimetype="text/plain", method='PUT'): if not src: raise myWebError("401 Required argument name missing") if not dst: raise myWebError("401 Required argument dst missing") srcParsed = urlparse.urlparse(src) if srcParsed.scheme == '' and srcParsed.netloc == '': # Local source srcValue = DATABASE_ACCESS.get_key(srcParsed.path, mimetype, None) else: # Remote source h = httplib2.Http() resp, srcValue = h.request(src, headers=dict(Accepts=mimetype) if resp.status != 200: raise myWebError("%d %s (%s)" % (resp.status, resp.reason, src)) dstParsed = urlparse.urlparse(dst) if dstParsed.scheme == '' and dstParsed.netloc == '': rv = DATABASE_ACCESS.put_key(dstParsed.path, None, srcValue, mimetype, method=='PUT') else: headers = {'Content-type' : mimetype} h = httplib2.Http() resp, rv = h.request(dst, method=method, headers=headers, data=srcValue) if resp.status != 200: raise myWebError("%d %s (%s)" % (resp.status, resp.reason, dst)) return rv
<commit_before><commit_msg>Copy subtree plugin. Unfinished and untested.<commit_after>"""Copy values or subtrees, either locally or remotely. Currently a quick hack using either direct database access or httplib2, synchronously. Should use callUrl, so local/remote becomes similar, and some form of callback mechanism so it can run asynchronously. """ import requests import web import httplib2 DATABASE_ACCESS=None def myWebError(msg): return web.HTTPError(msg, {"Content-type": "text/plain"}, msg+'\n\n') def copy(src=None, dst=None, mimetype="text/plain", method='PUT'): if not src: raise myWebError("401 Required argument name missing") if not dst: raise myWebError("401 Required argument dst missing") srcParsed = urlparse.urlparse(src) if srcParsed.scheme == '' and srcParsed.netloc == '': # Local source srcValue = DATABASE_ACCESS.get_key(srcParsed.path, mimetype, None) else: # Remote source h = httplib2.Http() resp, srcValue = h.request(src, headers=dict(Accepts=mimetype) if resp.status != 200: raise myWebError("%d %s (%s)" % (resp.status, resp.reason, src)) dstParsed = urlparse.urlparse(dst) if dstParsed.scheme == '' and dstParsed.netloc == '': rv = DATABASE_ACCESS.put_key(dstParsed.path, None, srcValue, mimetype, method=='PUT') else: headers = {'Content-type' : mimetype} h = httplib2.Http() resp, rv = h.request(dst, method=method, headers=headers, data=srcValue) if resp.status != 200: raise myWebError("%d %s (%s)" % (resp.status, resp.reason, dst)) return rv
0f39968ee0cfd4b38a599b6fc9dc6d8369392513
benchmarks/benchmark3.py
benchmarks/benchmark3.py
from time import clock from random import choice, randint, seed from sys import stdout import ahocorasick def write(str): stdout.write(str) stdout.flush() def writeln(str): stdout.write(str) stdout.write('\n') class ElapsedTime: def __init__(self, msg): self.msg = msg def __enter__(self): write("%-40s: " % self.msg) self.start = clock() def __exit__(self, a1, a2, a3): self.stop = clock() writeln("%0.3f s" % self.get_time()) def get_time(self): return self.stop - self.start class Test: def __init__(self, max_word_length, count): self.min_word_length = 3 self.max_word_length = max_word_length self.count = count self.words = [] self.inexisting = [] self.input = "" self.automaton = None seed(0) # make sure that tests will be repeatable def init_data(self): def random_word(length): chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" return ''.join(choice(chars) for _ in range(length)) for i in range(self.count): length = randint(self.min_word_length, self.max_word_length) self.words.append(random_word(length)) length = randint(self.min_word_length, self.max_word_length) self.inexisting.append(random_word(length)) self.input = random_word(self.count) assert(len(self.words) == len(self.inexisting)) def add_words(self): self.automaton = ahocorasick.Automaton() A = self.automaton for word in self.words: A.add_word(word, word) def build(self): self.automaton.make_automaton() def lookup(self): n = len(self.words) A = self.automaton for i in range(n): A.get(self.words[i]) A.get(self.inexisting[i], "unknown") def search(self): A = self.automaton n = 0 for item in A.iter(self.input): n += 1 def run(self): with ElapsedTime("Generating data (%d words)" % self.count): self.init_data() with ElapsedTime("Add words"): self.add_words() with ElapsedTime("Building automaton"): self.build() with ElapsedTime("Look up"): self.lookup() with ElapsedTime("Search"): self.search() def main(): test = Test(32, 1000000) test.run() if __name__ == '__main__': main()
Add benchmark script for python 3
Add benchmark script for python 3
Python
bsd-3-clause
WojciechMula/pyahocorasick,WojciechMula/pyahocorasick,pombredanne/pyahocorasick,WojciechMula/pyahocorasick,pombredanne/pyahocorasick,pombredanne/pyahocorasick,pombredanne/pyahocorasick,WojciechMula/pyahocorasick,woakesd/pyahocorasick,woakesd/pyahocorasick
Add benchmark script for python 3
from time import clock from random import choice, randint, seed from sys import stdout import ahocorasick def write(str): stdout.write(str) stdout.flush() def writeln(str): stdout.write(str) stdout.write('\n') class ElapsedTime: def __init__(self, msg): self.msg = msg def __enter__(self): write("%-40s: " % self.msg) self.start = clock() def __exit__(self, a1, a2, a3): self.stop = clock() writeln("%0.3f s" % self.get_time()) def get_time(self): return self.stop - self.start class Test: def __init__(self, max_word_length, count): self.min_word_length = 3 self.max_word_length = max_word_length self.count = count self.words = [] self.inexisting = [] self.input = "" self.automaton = None seed(0) # make sure that tests will be repeatable def init_data(self): def random_word(length): chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" return ''.join(choice(chars) for _ in range(length)) for i in range(self.count): length = randint(self.min_word_length, self.max_word_length) self.words.append(random_word(length)) length = randint(self.min_word_length, self.max_word_length) self.inexisting.append(random_word(length)) self.input = random_word(self.count) assert(len(self.words) == len(self.inexisting)) def add_words(self): self.automaton = ahocorasick.Automaton() A = self.automaton for word in self.words: A.add_word(word, word) def build(self): self.automaton.make_automaton() def lookup(self): n = len(self.words) A = self.automaton for i in range(n): A.get(self.words[i]) A.get(self.inexisting[i], "unknown") def search(self): A = self.automaton n = 0 for item in A.iter(self.input): n += 1 def run(self): with ElapsedTime("Generating data (%d words)" % self.count): self.init_data() with ElapsedTime("Add words"): self.add_words() with ElapsedTime("Building automaton"): self.build() with ElapsedTime("Look up"): self.lookup() with ElapsedTime("Search"): self.search() def main(): test = Test(32, 1000000) test.run() if __name__ == '__main__': main()
<commit_before><commit_msg>Add benchmark script for python 3<commit_after>
from time import clock from random import choice, randint, seed from sys import stdout import ahocorasick def write(str): stdout.write(str) stdout.flush() def writeln(str): stdout.write(str) stdout.write('\n') class ElapsedTime: def __init__(self, msg): self.msg = msg def __enter__(self): write("%-40s: " % self.msg) self.start = clock() def __exit__(self, a1, a2, a3): self.stop = clock() writeln("%0.3f s" % self.get_time()) def get_time(self): return self.stop - self.start class Test: def __init__(self, max_word_length, count): self.min_word_length = 3 self.max_word_length = max_word_length self.count = count self.words = [] self.inexisting = [] self.input = "" self.automaton = None seed(0) # make sure that tests will be repeatable def init_data(self): def random_word(length): chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" return ''.join(choice(chars) for _ in range(length)) for i in range(self.count): length = randint(self.min_word_length, self.max_word_length) self.words.append(random_word(length)) length = randint(self.min_word_length, self.max_word_length) self.inexisting.append(random_word(length)) self.input = random_word(self.count) assert(len(self.words) == len(self.inexisting)) def add_words(self): self.automaton = ahocorasick.Automaton() A = self.automaton for word in self.words: A.add_word(word, word) def build(self): self.automaton.make_automaton() def lookup(self): n = len(self.words) A = self.automaton for i in range(n): A.get(self.words[i]) A.get(self.inexisting[i], "unknown") def search(self): A = self.automaton n = 0 for item in A.iter(self.input): n += 1 def run(self): with ElapsedTime("Generating data (%d words)" % self.count): self.init_data() with ElapsedTime("Add words"): self.add_words() with ElapsedTime("Building automaton"): self.build() with ElapsedTime("Look up"): self.lookup() with ElapsedTime("Search"): self.search() def main(): test = Test(32, 1000000) test.run() if __name__ == '__main__': main()
Add benchmark script for python 3from time import clock from random import choice, randint, seed from sys import stdout import ahocorasick def write(str): stdout.write(str) stdout.flush() def writeln(str): stdout.write(str) stdout.write('\n') class ElapsedTime: def __init__(self, msg): self.msg = msg def __enter__(self): write("%-40s: " % self.msg) self.start = clock() def __exit__(self, a1, a2, a3): self.stop = clock() writeln("%0.3f s" % self.get_time()) def get_time(self): return self.stop - self.start class Test: def __init__(self, max_word_length, count): self.min_word_length = 3 self.max_word_length = max_word_length self.count = count self.words = [] self.inexisting = [] self.input = "" self.automaton = None seed(0) # make sure that tests will be repeatable def init_data(self): def random_word(length): chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" return ''.join(choice(chars) for _ in range(length)) for i in range(self.count): length = randint(self.min_word_length, self.max_word_length) self.words.append(random_word(length)) length = randint(self.min_word_length, self.max_word_length) self.inexisting.append(random_word(length)) self.input = random_word(self.count) assert(len(self.words) == len(self.inexisting)) def add_words(self): self.automaton = ahocorasick.Automaton() A = self.automaton for word in self.words: A.add_word(word, word) def build(self): self.automaton.make_automaton() def lookup(self): n = len(self.words) A = self.automaton for i in range(n): A.get(self.words[i]) A.get(self.inexisting[i], "unknown") def search(self): A = self.automaton n = 0 for item in A.iter(self.input): n += 1 def run(self): with ElapsedTime("Generating data (%d words)" % self.count): self.init_data() with ElapsedTime("Add words"): self.add_words() with ElapsedTime("Building automaton"): self.build() with ElapsedTime("Look up"): self.lookup() with ElapsedTime("Search"): self.search() def main(): test = Test(32, 1000000) test.run() if __name__ == '__main__': main()
<commit_before><commit_msg>Add benchmark script for python 3<commit_after>from time import clock from random import choice, randint, seed from sys import stdout import ahocorasick def write(str): stdout.write(str) stdout.flush() def writeln(str): stdout.write(str) stdout.write('\n') class ElapsedTime: def __init__(self, msg): self.msg = msg def __enter__(self): write("%-40s: " % self.msg) self.start = clock() def __exit__(self, a1, a2, a3): self.stop = clock() writeln("%0.3f s" % self.get_time()) def get_time(self): return self.stop - self.start class Test: def __init__(self, max_word_length, count): self.min_word_length = 3 self.max_word_length = max_word_length self.count = count self.words = [] self.inexisting = [] self.input = "" self.automaton = None seed(0) # make sure that tests will be repeatable def init_data(self): def random_word(length): chars = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789" return ''.join(choice(chars) for _ in range(length)) for i in range(self.count): length = randint(self.min_word_length, self.max_word_length) self.words.append(random_word(length)) length = randint(self.min_word_length, self.max_word_length) self.inexisting.append(random_word(length)) self.input = random_word(self.count) assert(len(self.words) == len(self.inexisting)) def add_words(self): self.automaton = ahocorasick.Automaton() A = self.automaton for word in self.words: A.add_word(word, word) def build(self): self.automaton.make_automaton() def lookup(self): n = len(self.words) A = self.automaton for i in range(n): A.get(self.words[i]) A.get(self.inexisting[i], "unknown") def search(self): A = self.automaton n = 0 for item in A.iter(self.input): n += 1 def run(self): with ElapsedTime("Generating data (%d words)" % self.count): self.init_data() with ElapsedTime("Add words"): self.add_words() with ElapsedTime("Building automaton"): self.build() with ElapsedTime("Look up"): self.lookup() with ElapsedTime("Search"): self.search() def main(): test = Test(32, 1000000) test.run() if __name__ == '__main__': main()
d707fd1593b47d228d33bb283bb8634075df12a7
telethon_examples/print_updates.py
telethon_examples/print_updates.py
#!/usr/bin/env python3 # A simple script to print all updates received from telethon import TelegramClient from getpass import getpass from os import environ # environ is used to get API information from environment variables # You could also use a config file, pass them as arguments, # or even hardcode them (not recommended) def main(): session_name = environ.get('TG_SESSION','session') user_phone = environ['TG_PHONE'], client = TelegramClient(session_name, int(environ['TG_API_ID']), environ['TG_API_HASH'], proxy=None, update_workers=4) print('INFO: Connecting to Telegram Servers...', end='', flush=True) client.connect() print('Done!') if not client.is_user_authorized(): print('INFO: Unauthorized user') client.send_code_request(user_phone) code_ok = False while not code_ok: code = input('Enter the auth code: ') try: code_ok = client.sign_in(user_phone, code) except SessionPasswordNeededError: pw = getpass('Two step verification enabled. Please enter your password: ') code_ok = client.sign_in(password=pw) print('INFO: Client initialized succesfully!') client.add_update_handler(update_handler) input('Press Enter to stop this!\n') def update_handler(update): print(update) print('Press Enter to stop this!') if __name__ == '__main__': main()
Add example script to print out all updates
Add example script to print out all updates
Python
mit
LonamiWebs/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon,LonamiWebs/Telethon,expectocode/Telethon,andr-04/Telethon
Add example script to print out all updates
#!/usr/bin/env python3 # A simple script to print all updates received from telethon import TelegramClient from getpass import getpass from os import environ # environ is used to get API information from environment variables # You could also use a config file, pass them as arguments, # or even hardcode them (not recommended) def main(): session_name = environ.get('TG_SESSION','session') user_phone = environ['TG_PHONE'], client = TelegramClient(session_name, int(environ['TG_API_ID']), environ['TG_API_HASH'], proxy=None, update_workers=4) print('INFO: Connecting to Telegram Servers...', end='', flush=True) client.connect() print('Done!') if not client.is_user_authorized(): print('INFO: Unauthorized user') client.send_code_request(user_phone) code_ok = False while not code_ok: code = input('Enter the auth code: ') try: code_ok = client.sign_in(user_phone, code) except SessionPasswordNeededError: pw = getpass('Two step verification enabled. Please enter your password: ') code_ok = client.sign_in(password=pw) print('INFO: Client initialized succesfully!') client.add_update_handler(update_handler) input('Press Enter to stop this!\n') def update_handler(update): print(update) print('Press Enter to stop this!') if __name__ == '__main__': main()
<commit_before><commit_msg>Add example script to print out all updates<commit_after>
#!/usr/bin/env python3 # A simple script to print all updates received from telethon import TelegramClient from getpass import getpass from os import environ # environ is used to get API information from environment variables # You could also use a config file, pass them as arguments, # or even hardcode them (not recommended) def main(): session_name = environ.get('TG_SESSION','session') user_phone = environ['TG_PHONE'], client = TelegramClient(session_name, int(environ['TG_API_ID']), environ['TG_API_HASH'], proxy=None, update_workers=4) print('INFO: Connecting to Telegram Servers...', end='', flush=True) client.connect() print('Done!') if not client.is_user_authorized(): print('INFO: Unauthorized user') client.send_code_request(user_phone) code_ok = False while not code_ok: code = input('Enter the auth code: ') try: code_ok = client.sign_in(user_phone, code) except SessionPasswordNeededError: pw = getpass('Two step verification enabled. Please enter your password: ') code_ok = client.sign_in(password=pw) print('INFO: Client initialized succesfully!') client.add_update_handler(update_handler) input('Press Enter to stop this!\n') def update_handler(update): print(update) print('Press Enter to stop this!') if __name__ == '__main__': main()
Add example script to print out all updates#!/usr/bin/env python3 # A simple script to print all updates received from telethon import TelegramClient from getpass import getpass from os import environ # environ is used to get API information from environment variables # You could also use a config file, pass them as arguments, # or even hardcode them (not recommended) def main(): session_name = environ.get('TG_SESSION','session') user_phone = environ['TG_PHONE'], client = TelegramClient(session_name, int(environ['TG_API_ID']), environ['TG_API_HASH'], proxy=None, update_workers=4) print('INFO: Connecting to Telegram Servers...', end='', flush=True) client.connect() print('Done!') if not client.is_user_authorized(): print('INFO: Unauthorized user') client.send_code_request(user_phone) code_ok = False while not code_ok: code = input('Enter the auth code: ') try: code_ok = client.sign_in(user_phone, code) except SessionPasswordNeededError: pw = getpass('Two step verification enabled. Please enter your password: ') code_ok = client.sign_in(password=pw) print('INFO: Client initialized succesfully!') client.add_update_handler(update_handler) input('Press Enter to stop this!\n') def update_handler(update): print(update) print('Press Enter to stop this!') if __name__ == '__main__': main()
<commit_before><commit_msg>Add example script to print out all updates<commit_after>#!/usr/bin/env python3 # A simple script to print all updates received from telethon import TelegramClient from getpass import getpass from os import environ # environ is used to get API information from environment variables # You could also use a config file, pass them as arguments, # or even hardcode them (not recommended) def main(): session_name = environ.get('TG_SESSION','session') user_phone = environ['TG_PHONE'], client = TelegramClient(session_name, int(environ['TG_API_ID']), environ['TG_API_HASH'], proxy=None, update_workers=4) print('INFO: Connecting to Telegram Servers...', end='', flush=True) client.connect() print('Done!') if not client.is_user_authorized(): print('INFO: Unauthorized user') client.send_code_request(user_phone) code_ok = False while not code_ok: code = input('Enter the auth code: ') try: code_ok = client.sign_in(user_phone, code) except SessionPasswordNeededError: pw = getpass('Two step verification enabled. Please enter your password: ') code_ok = client.sign_in(password=pw) print('INFO: Client initialized succesfully!') client.add_update_handler(update_handler) input('Press Enter to stop this!\n') def update_handler(update): print(update) print('Press Enter to stop this!') if __name__ == '__main__': main()
4f5b495a0051b39e1e6fdd54f13bad0bd2b1bd29
myria/test/test_logs.py
myria/test/test_logs.py
from httmock import urlmatch, HTTMock import unittest from myria import MyriaConnection @urlmatch(netloc=r'localhost:8753') def local_mock(url, request): print url if url.path == '/logs/sent': body = 'foo,bar\nbaz,ban' return {'status_code': 200, 'content': body} return None class TestQuery(unittest.TestCase): def __init__(self, args): with HTTMock(local_mock): self.connection = MyriaConnection(hostname='localhost', port=8753) unittest.TestCase.__init__(self, args) def test_sent_logs(self): with HTTMock(local_mock): logs = self.connection.get_sent_logs(42) self.assertEquals(list(logs), [['foo', 'bar'], ['baz', 'ban']])
Test getting the logs (broken)
Test getting the logs (broken)
Python
bsd-3-clause
uwescience/myria-python,uwescience/myria-python
Test getting the logs (broken)
from httmock import urlmatch, HTTMock import unittest from myria import MyriaConnection @urlmatch(netloc=r'localhost:8753') def local_mock(url, request): print url if url.path == '/logs/sent': body = 'foo,bar\nbaz,ban' return {'status_code': 200, 'content': body} return None class TestQuery(unittest.TestCase): def __init__(self, args): with HTTMock(local_mock): self.connection = MyriaConnection(hostname='localhost', port=8753) unittest.TestCase.__init__(self, args) def test_sent_logs(self): with HTTMock(local_mock): logs = self.connection.get_sent_logs(42) self.assertEquals(list(logs), [['foo', 'bar'], ['baz', 'ban']])
<commit_before><commit_msg>Test getting the logs (broken)<commit_after>
from httmock import urlmatch, HTTMock import unittest from myria import MyriaConnection @urlmatch(netloc=r'localhost:8753') def local_mock(url, request): print url if url.path == '/logs/sent': body = 'foo,bar\nbaz,ban' return {'status_code': 200, 'content': body} return None class TestQuery(unittest.TestCase): def __init__(self, args): with HTTMock(local_mock): self.connection = MyriaConnection(hostname='localhost', port=8753) unittest.TestCase.__init__(self, args) def test_sent_logs(self): with HTTMock(local_mock): logs = self.connection.get_sent_logs(42) self.assertEquals(list(logs), [['foo', 'bar'], ['baz', 'ban']])
Test getting the logs (broken)from httmock import urlmatch, HTTMock import unittest from myria import MyriaConnection @urlmatch(netloc=r'localhost:8753') def local_mock(url, request): print url if url.path == '/logs/sent': body = 'foo,bar\nbaz,ban' return {'status_code': 200, 'content': body} return None class TestQuery(unittest.TestCase): def __init__(self, args): with HTTMock(local_mock): self.connection = MyriaConnection(hostname='localhost', port=8753) unittest.TestCase.__init__(self, args) def test_sent_logs(self): with HTTMock(local_mock): logs = self.connection.get_sent_logs(42) self.assertEquals(list(logs), [['foo', 'bar'], ['baz', 'ban']])
<commit_before><commit_msg>Test getting the logs (broken)<commit_after>from httmock import urlmatch, HTTMock import unittest from myria import MyriaConnection @urlmatch(netloc=r'localhost:8753') def local_mock(url, request): print url if url.path == '/logs/sent': body = 'foo,bar\nbaz,ban' return {'status_code': 200, 'content': body} return None class TestQuery(unittest.TestCase): def __init__(self, args): with HTTMock(local_mock): self.connection = MyriaConnection(hostname='localhost', port=8753) unittest.TestCase.__init__(self, args) def test_sent_logs(self): with HTTMock(local_mock): logs = self.connection.get_sent_logs(42) self.assertEquals(list(logs), [['foo', 'bar'], ['baz', 'ban']])
111b296112cf3b21f9abc1da37b047c1d0bc0ab8
tests/scoring_engine/web/test_about.py
tests/scoring_engine/web/test_about.py
from tests.scoring_engine.web.web_test import WebTest from scoring_engine.version import version from scoring_engine.engine.config import config class TestAbout(WebTest): # def setup(self): # super(TestWelcome, self).setup() # self.expected_sponsorship_images = OrderedDict() # self.expected_sponsorship_images['diamond'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] # self.expected_sponsorship_images['platinum'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] # self.expected_sponsorship_images['somecustomlevel'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] # self.expected_sponsorship_images['gold'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] def test_about(self): resp = self.client.get('/about') assert self.mock_obj.call_args == self.build_args('about.html', version=version, config_about_content=config.web_about_us_page_content) assert resp.status_code == 200 # def test_home_index(self): # resp = self.client.get('/index') # assert self.mock_obj.call_args == self.build_args('welcome.html', sponsorship_images=self.expected_sponsorship_images) # assert resp.status_code == 200
Add test for about view
Add test for about view
Python
mit
pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine
Add test for about view
from tests.scoring_engine.web.web_test import WebTest from scoring_engine.version import version from scoring_engine.engine.config import config class TestAbout(WebTest): # def setup(self): # super(TestWelcome, self).setup() # self.expected_sponsorship_images = OrderedDict() # self.expected_sponsorship_images['diamond'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] # self.expected_sponsorship_images['platinum'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] # self.expected_sponsorship_images['somecustomlevel'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] # self.expected_sponsorship_images['gold'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] def test_about(self): resp = self.client.get('/about') assert self.mock_obj.call_args == self.build_args('about.html', version=version, config_about_content=config.web_about_us_page_content) assert resp.status_code == 200 # def test_home_index(self): # resp = self.client.get('/index') # assert self.mock_obj.call_args == self.build_args('welcome.html', sponsorship_images=self.expected_sponsorship_images) # assert resp.status_code == 200
<commit_before><commit_msg>Add test for about view<commit_after>
from tests.scoring_engine.web.web_test import WebTest from scoring_engine.version import version from scoring_engine.engine.config import config class TestAbout(WebTest): # def setup(self): # super(TestWelcome, self).setup() # self.expected_sponsorship_images = OrderedDict() # self.expected_sponsorship_images['diamond'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] # self.expected_sponsorship_images['platinum'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] # self.expected_sponsorship_images['somecustomlevel'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] # self.expected_sponsorship_images['gold'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] def test_about(self): resp = self.client.get('/about') assert self.mock_obj.call_args == self.build_args('about.html', version=version, config_about_content=config.web_about_us_page_content) assert resp.status_code == 200 # def test_home_index(self): # resp = self.client.get('/index') # assert self.mock_obj.call_args == self.build_args('welcome.html', sponsorship_images=self.expected_sponsorship_images) # assert resp.status_code == 200
Add test for about viewfrom tests.scoring_engine.web.web_test import WebTest from scoring_engine.version import version from scoring_engine.engine.config import config class TestAbout(WebTest): # def setup(self): # super(TestWelcome, self).setup() # self.expected_sponsorship_images = OrderedDict() # self.expected_sponsorship_images['diamond'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] # self.expected_sponsorship_images['platinum'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] # self.expected_sponsorship_images['somecustomlevel'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] # self.expected_sponsorship_images['gold'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] def test_about(self): resp = self.client.get('/about') assert self.mock_obj.call_args == self.build_args('about.html', version=version, config_about_content=config.web_about_us_page_content) assert resp.status_code == 200 # def test_home_index(self): # resp = self.client.get('/index') # assert self.mock_obj.call_args == self.build_args('welcome.html', sponsorship_images=self.expected_sponsorship_images) # assert resp.status_code == 200
<commit_before><commit_msg>Add test for about view<commit_after>from tests.scoring_engine.web.web_test import WebTest from scoring_engine.version import version from scoring_engine.engine.config import config class TestAbout(WebTest): # def setup(self): # super(TestWelcome, self).setup() # self.expected_sponsorship_images = OrderedDict() # self.expected_sponsorship_images['diamond'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] # self.expected_sponsorship_images['platinum'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] # self.expected_sponsorship_images['somecustomlevel'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] # self.expected_sponsorship_images['gold'] = ['/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg', '/static/images/logo-placeholder.jpg'] def test_about(self): resp = self.client.get('/about') assert self.mock_obj.call_args == self.build_args('about.html', version=version, config_about_content=config.web_about_us_page_content) assert resp.status_code == 200 # def test_home_index(self): # resp = self.client.get('/index') # assert self.mock_obj.call_args == self.build_args('welcome.html', sponsorship_images=self.expected_sponsorship_images) # assert resp.status_code == 200
3a6bb4b7c282ee6c3ede1f3b662a70c9dc3ca638
benchmarks/bench_nbody.py
benchmarks/bench_nbody.py
""" Benchmark an implementation of the N-body simulation. As in the CUDA version, we only compute accelerations and don't care to update speeds and positions. """ from __future__ import division import math import sys import numpy as np from numba import jit, float32, float64 eps_2 = np.float32(1e-6) zero = np.float32(0.0) one = np.float32(1.0) @jit def run_numba_nbody(positions, weights): accelerations = np.zeros_like(positions) n = weights.shape[0] for i in range(n): ax = zero ay = zero for j in range(n): rx = positions[j,0] - positions[i,0] ry = positions[j,1] - positions[i,1] sqr_dist = rx * rx + ry * ry + eps_2 sixth_dist = sqr_dist * sqr_dist * sqr_dist inv_dist_cube = one / math.sqrt(sixth_dist) s = weights[j] * inv_dist_cube ax += s * rx ay += s * ry accelerations[i,0] = ax accelerations[i,1] = ay return accelerations def run_numpy_nbody(positions, weights): accelerations = np.zeros_like(positions) n = weights.size for j in range(n): # Compute influence of j'th body on all bodies r = positions[j] - positions rx = r[:,0] ry = r[:,1] sqr_dist = rx * rx + ry * ry + eps_2 sixth_dist = sqr_dist * sqr_dist * sqr_dist inv_dist_cube = one / np.sqrt(sixth_dist) s = weights[j] * inv_dist_cube accelerations += (r.transpose() * s).transpose() return accelerations def make_nbody_samples(n_bodies): positions = np.random.RandomState(0).uniform(-1.0, 1.0, (n_bodies, 2)) weights = np.random.RandomState(0).uniform(1.0, 2.0, n_bodies) return positions.astype(np.float32), weights.astype(np.float32) class NBody: n_bodies = 4096 def setup(self): # Sanity check our implementation p, w = make_nbody_samples(10) numba_res = run_numba_nbody(p, w) numpy_res = run_numpy_nbody(p, w) assert np.allclose(numba_res, numpy_res, 1e-4), (numba_res, numpy_res) # Actual benchmark samples self.positions, self.weights = make_nbody_samples(self.n_bodies) def time_numba_nbody(self): run_numba_nbody(self.positions, self.weights)
Add a Numba n-body benchmark
Add a Numba n-body benchmark
Python
bsd-2-clause
numba/numba-benchmark,gmarkall/numba-benchmark
Add a Numba n-body benchmark
""" Benchmark an implementation of the N-body simulation. As in the CUDA version, we only compute accelerations and don't care to update speeds and positions. """ from __future__ import division import math import sys import numpy as np from numba import jit, float32, float64 eps_2 = np.float32(1e-6) zero = np.float32(0.0) one = np.float32(1.0) @jit def run_numba_nbody(positions, weights): accelerations = np.zeros_like(positions) n = weights.shape[0] for i in range(n): ax = zero ay = zero for j in range(n): rx = positions[j,0] - positions[i,0] ry = positions[j,1] - positions[i,1] sqr_dist = rx * rx + ry * ry + eps_2 sixth_dist = sqr_dist * sqr_dist * sqr_dist inv_dist_cube = one / math.sqrt(sixth_dist) s = weights[j] * inv_dist_cube ax += s * rx ay += s * ry accelerations[i,0] = ax accelerations[i,1] = ay return accelerations def run_numpy_nbody(positions, weights): accelerations = np.zeros_like(positions) n = weights.size for j in range(n): # Compute influence of j'th body on all bodies r = positions[j] - positions rx = r[:,0] ry = r[:,1] sqr_dist = rx * rx + ry * ry + eps_2 sixth_dist = sqr_dist * sqr_dist * sqr_dist inv_dist_cube = one / np.sqrt(sixth_dist) s = weights[j] * inv_dist_cube accelerations += (r.transpose() * s).transpose() return accelerations def make_nbody_samples(n_bodies): positions = np.random.RandomState(0).uniform(-1.0, 1.0, (n_bodies, 2)) weights = np.random.RandomState(0).uniform(1.0, 2.0, n_bodies) return positions.astype(np.float32), weights.astype(np.float32) class NBody: n_bodies = 4096 def setup(self): # Sanity check our implementation p, w = make_nbody_samples(10) numba_res = run_numba_nbody(p, w) numpy_res = run_numpy_nbody(p, w) assert np.allclose(numba_res, numpy_res, 1e-4), (numba_res, numpy_res) # Actual benchmark samples self.positions, self.weights = make_nbody_samples(self.n_bodies) def time_numba_nbody(self): run_numba_nbody(self.positions, self.weights)
<commit_before><commit_msg>Add a Numba n-body benchmark<commit_after>
""" Benchmark an implementation of the N-body simulation. As in the CUDA version, we only compute accelerations and don't care to update speeds and positions. """ from __future__ import division import math import sys import numpy as np from numba import jit, float32, float64 eps_2 = np.float32(1e-6) zero = np.float32(0.0) one = np.float32(1.0) @jit def run_numba_nbody(positions, weights): accelerations = np.zeros_like(positions) n = weights.shape[0] for i in range(n): ax = zero ay = zero for j in range(n): rx = positions[j,0] - positions[i,0] ry = positions[j,1] - positions[i,1] sqr_dist = rx * rx + ry * ry + eps_2 sixth_dist = sqr_dist * sqr_dist * sqr_dist inv_dist_cube = one / math.sqrt(sixth_dist) s = weights[j] * inv_dist_cube ax += s * rx ay += s * ry accelerations[i,0] = ax accelerations[i,1] = ay return accelerations def run_numpy_nbody(positions, weights): accelerations = np.zeros_like(positions) n = weights.size for j in range(n): # Compute influence of j'th body on all bodies r = positions[j] - positions rx = r[:,0] ry = r[:,1] sqr_dist = rx * rx + ry * ry + eps_2 sixth_dist = sqr_dist * sqr_dist * sqr_dist inv_dist_cube = one / np.sqrt(sixth_dist) s = weights[j] * inv_dist_cube accelerations += (r.transpose() * s).transpose() return accelerations def make_nbody_samples(n_bodies): positions = np.random.RandomState(0).uniform(-1.0, 1.0, (n_bodies, 2)) weights = np.random.RandomState(0).uniform(1.0, 2.0, n_bodies) return positions.astype(np.float32), weights.astype(np.float32) class NBody: n_bodies = 4096 def setup(self): # Sanity check our implementation p, w = make_nbody_samples(10) numba_res = run_numba_nbody(p, w) numpy_res = run_numpy_nbody(p, w) assert np.allclose(numba_res, numpy_res, 1e-4), (numba_res, numpy_res) # Actual benchmark samples self.positions, self.weights = make_nbody_samples(self.n_bodies) def time_numba_nbody(self): run_numba_nbody(self.positions, self.weights)
Add a Numba n-body benchmark""" Benchmark an implementation of the N-body simulation. As in the CUDA version, we only compute accelerations and don't care to update speeds and positions. """ from __future__ import division import math import sys import numpy as np from numba import jit, float32, float64 eps_2 = np.float32(1e-6) zero = np.float32(0.0) one = np.float32(1.0) @jit def run_numba_nbody(positions, weights): accelerations = np.zeros_like(positions) n = weights.shape[0] for i in range(n): ax = zero ay = zero for j in range(n): rx = positions[j,0] - positions[i,0] ry = positions[j,1] - positions[i,1] sqr_dist = rx * rx + ry * ry + eps_2 sixth_dist = sqr_dist * sqr_dist * sqr_dist inv_dist_cube = one / math.sqrt(sixth_dist) s = weights[j] * inv_dist_cube ax += s * rx ay += s * ry accelerations[i,0] = ax accelerations[i,1] = ay return accelerations def run_numpy_nbody(positions, weights): accelerations = np.zeros_like(positions) n = weights.size for j in range(n): # Compute influence of j'th body on all bodies r = positions[j] - positions rx = r[:,0] ry = r[:,1] sqr_dist = rx * rx + ry * ry + eps_2 sixth_dist = sqr_dist * sqr_dist * sqr_dist inv_dist_cube = one / np.sqrt(sixth_dist) s = weights[j] * inv_dist_cube accelerations += (r.transpose() * s).transpose() return accelerations def make_nbody_samples(n_bodies): positions = np.random.RandomState(0).uniform(-1.0, 1.0, (n_bodies, 2)) weights = np.random.RandomState(0).uniform(1.0, 2.0, n_bodies) return positions.astype(np.float32), weights.astype(np.float32) class NBody: n_bodies = 4096 def setup(self): # Sanity check our implementation p, w = make_nbody_samples(10) numba_res = run_numba_nbody(p, w) numpy_res = run_numpy_nbody(p, w) assert np.allclose(numba_res, numpy_res, 1e-4), (numba_res, numpy_res) # Actual benchmark samples self.positions, self.weights = make_nbody_samples(self.n_bodies) def time_numba_nbody(self): run_numba_nbody(self.positions, self.weights)
<commit_before><commit_msg>Add a Numba n-body benchmark<commit_after>""" Benchmark an implementation of the N-body simulation. As in the CUDA version, we only compute accelerations and don't care to update speeds and positions. """ from __future__ import division import math import sys import numpy as np from numba import jit, float32, float64 eps_2 = np.float32(1e-6) zero = np.float32(0.0) one = np.float32(1.0) @jit def run_numba_nbody(positions, weights): accelerations = np.zeros_like(positions) n = weights.shape[0] for i in range(n): ax = zero ay = zero for j in range(n): rx = positions[j,0] - positions[i,0] ry = positions[j,1] - positions[i,1] sqr_dist = rx * rx + ry * ry + eps_2 sixth_dist = sqr_dist * sqr_dist * sqr_dist inv_dist_cube = one / math.sqrt(sixth_dist) s = weights[j] * inv_dist_cube ax += s * rx ay += s * ry accelerations[i,0] = ax accelerations[i,1] = ay return accelerations def run_numpy_nbody(positions, weights): accelerations = np.zeros_like(positions) n = weights.size for j in range(n): # Compute influence of j'th body on all bodies r = positions[j] - positions rx = r[:,0] ry = r[:,1] sqr_dist = rx * rx + ry * ry + eps_2 sixth_dist = sqr_dist * sqr_dist * sqr_dist inv_dist_cube = one / np.sqrt(sixth_dist) s = weights[j] * inv_dist_cube accelerations += (r.transpose() * s).transpose() return accelerations def make_nbody_samples(n_bodies): positions = np.random.RandomState(0).uniform(-1.0, 1.0, (n_bodies, 2)) weights = np.random.RandomState(0).uniform(1.0, 2.0, n_bodies) return positions.astype(np.float32), weights.astype(np.float32) class NBody: n_bodies = 4096 def setup(self): # Sanity check our implementation p, w = make_nbody_samples(10) numba_res = run_numba_nbody(p, w) numpy_res = run_numpy_nbody(p, w) assert np.allclose(numba_res, numpy_res, 1e-4), (numba_res, numpy_res) # Actual benchmark samples self.positions, self.weights = make_nbody_samples(self.n_bodies) def time_numba_nbody(self): run_numba_nbody(self.positions, self.weights)
c75fec9298be07a9162db3b9218013661af7c5b5
raiden/tests/unit/transfer/mediated_transfer/test_events.py
raiden/tests/unit/transfer/mediated_transfer/test_events.py
from raiden.tests.utils.factories import make_address, make_channel_identifier, make_transfer from raiden.transfer.mediated_transfer.events import SendRefundTransfer def test_send_refund_transfer_contains_balance_proof(): recipient = make_address() transfer = make_transfer() message_identifier = 1 channel_identifier = make_channel_identifier() event = SendRefundTransfer( recipient=recipient, channel_identifier=channel_identifier, message_identifier=message_identifier, transfer=transfer, ) assert hasattr(event, 'balance_proof') assert SendRefundTransfer.from_dict(event.to_dict()) == event
Add test that SendRefundTransfer contains a balance proof
Add test that SendRefundTransfer contains a balance proof
Python
mit
hackaugusto/raiden,hackaugusto/raiden
Add test that SendRefundTransfer contains a balance proof
from raiden.tests.utils.factories import make_address, make_channel_identifier, make_transfer from raiden.transfer.mediated_transfer.events import SendRefundTransfer def test_send_refund_transfer_contains_balance_proof(): recipient = make_address() transfer = make_transfer() message_identifier = 1 channel_identifier = make_channel_identifier() event = SendRefundTransfer( recipient=recipient, channel_identifier=channel_identifier, message_identifier=message_identifier, transfer=transfer, ) assert hasattr(event, 'balance_proof') assert SendRefundTransfer.from_dict(event.to_dict()) == event
<commit_before><commit_msg>Add test that SendRefundTransfer contains a balance proof<commit_after>
from raiden.tests.utils.factories import make_address, make_channel_identifier, make_transfer from raiden.transfer.mediated_transfer.events import SendRefundTransfer def test_send_refund_transfer_contains_balance_proof(): recipient = make_address() transfer = make_transfer() message_identifier = 1 channel_identifier = make_channel_identifier() event = SendRefundTransfer( recipient=recipient, channel_identifier=channel_identifier, message_identifier=message_identifier, transfer=transfer, ) assert hasattr(event, 'balance_proof') assert SendRefundTransfer.from_dict(event.to_dict()) == event
Add test that SendRefundTransfer contains a balance prooffrom raiden.tests.utils.factories import make_address, make_channel_identifier, make_transfer from raiden.transfer.mediated_transfer.events import SendRefundTransfer def test_send_refund_transfer_contains_balance_proof(): recipient = make_address() transfer = make_transfer() message_identifier = 1 channel_identifier = make_channel_identifier() event = SendRefundTransfer( recipient=recipient, channel_identifier=channel_identifier, message_identifier=message_identifier, transfer=transfer, ) assert hasattr(event, 'balance_proof') assert SendRefundTransfer.from_dict(event.to_dict()) == event
<commit_before><commit_msg>Add test that SendRefundTransfer contains a balance proof<commit_after>from raiden.tests.utils.factories import make_address, make_channel_identifier, make_transfer from raiden.transfer.mediated_transfer.events import SendRefundTransfer def test_send_refund_transfer_contains_balance_proof(): recipient = make_address() transfer = make_transfer() message_identifier = 1 channel_identifier = make_channel_identifier() event = SendRefundTransfer( recipient=recipient, channel_identifier=channel_identifier, message_identifier=message_identifier, transfer=transfer, ) assert hasattr(event, 'balance_proof') assert SendRefundTransfer.from_dict(event.to_dict()) == event
59c20c07d01ae1ebde8a6a8bb3d6fd4652507929
bluebottle/time_based/migrations/0007_auto_20201023_1433.py
bluebottle/time_based/migrations/0007_auto_20201023_1433.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.17 on 2020-10-23 12:33 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('time_based', '0006_auto_20201021_1315'), ] operations = [ migrations.AddField( model_name='ongoingactivity', name='start', field=models.DateTimeField(blank=True, null=True, verbose_name='Start of activity'), ), migrations.AddField( model_name='withadeadlineactivity', name='start', field=models.DateTimeField(blank=True, null=True, verbose_name='Start of activity'), ), migrations.AlterField( model_name='onadateactivity', name='start', field=models.DateTimeField(blank=True, null=True, verbose_name='activity date'), ), migrations.AlterField( model_name='ongoingactivity', name='duration_period', field=models.CharField(blank=True, choices=[('overall', 'overall'), ('day', 'per day'), ('week', 'per week'), ('month', 'per month')], max_length=20, null=True, verbose_name='duration period'), ), migrations.AlterField( model_name='withadeadlineactivity', name='duration_period', field=models.CharField(blank=True, choices=[('overall', 'overall'), ('day', 'per day'), ('week', 'per week'), ('month', 'per month')], max_length=20, null=True, verbose_name='duration period'), ), ]
Add optional start of ongoing and with a deadline activities
Add optional start of ongoing and with a deadline activities
Python
bsd-3-clause
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
Add optional start of ongoing and with a deadline activities
# -*- coding: utf-8 -*- # Generated by Django 1.11.17 on 2020-10-23 12:33 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('time_based', '0006_auto_20201021_1315'), ] operations = [ migrations.AddField( model_name='ongoingactivity', name='start', field=models.DateTimeField(blank=True, null=True, verbose_name='Start of activity'), ), migrations.AddField( model_name='withadeadlineactivity', name='start', field=models.DateTimeField(blank=True, null=True, verbose_name='Start of activity'), ), migrations.AlterField( model_name='onadateactivity', name='start', field=models.DateTimeField(blank=True, null=True, verbose_name='activity date'), ), migrations.AlterField( model_name='ongoingactivity', name='duration_period', field=models.CharField(blank=True, choices=[('overall', 'overall'), ('day', 'per day'), ('week', 'per week'), ('month', 'per month')], max_length=20, null=True, verbose_name='duration period'), ), migrations.AlterField( model_name='withadeadlineactivity', name='duration_period', field=models.CharField(blank=True, choices=[('overall', 'overall'), ('day', 'per day'), ('week', 'per week'), ('month', 'per month')], max_length=20, null=True, verbose_name='duration period'), ), ]
<commit_before><commit_msg>Add optional start of ongoing and with a deadline activities<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.11.17 on 2020-10-23 12:33 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('time_based', '0006_auto_20201021_1315'), ] operations = [ migrations.AddField( model_name='ongoingactivity', name='start', field=models.DateTimeField(blank=True, null=True, verbose_name='Start of activity'), ), migrations.AddField( model_name='withadeadlineactivity', name='start', field=models.DateTimeField(blank=True, null=True, verbose_name='Start of activity'), ), migrations.AlterField( model_name='onadateactivity', name='start', field=models.DateTimeField(blank=True, null=True, verbose_name='activity date'), ), migrations.AlterField( model_name='ongoingactivity', name='duration_period', field=models.CharField(blank=True, choices=[('overall', 'overall'), ('day', 'per day'), ('week', 'per week'), ('month', 'per month')], max_length=20, null=True, verbose_name='duration period'), ), migrations.AlterField( model_name='withadeadlineactivity', name='duration_period', field=models.CharField(blank=True, choices=[('overall', 'overall'), ('day', 'per day'), ('week', 'per week'), ('month', 'per month')], max_length=20, null=True, verbose_name='duration period'), ), ]
Add optional start of ongoing and with a deadline activities# -*- coding: utf-8 -*- # Generated by Django 1.11.17 on 2020-10-23 12:33 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('time_based', '0006_auto_20201021_1315'), ] operations = [ migrations.AddField( model_name='ongoingactivity', name='start', field=models.DateTimeField(blank=True, null=True, verbose_name='Start of activity'), ), migrations.AddField( model_name='withadeadlineactivity', name='start', field=models.DateTimeField(blank=True, null=True, verbose_name='Start of activity'), ), migrations.AlterField( model_name='onadateactivity', name='start', field=models.DateTimeField(blank=True, null=True, verbose_name='activity date'), ), migrations.AlterField( model_name='ongoingactivity', name='duration_period', field=models.CharField(blank=True, choices=[('overall', 'overall'), ('day', 'per day'), ('week', 'per week'), ('month', 'per month')], max_length=20, null=True, verbose_name='duration period'), ), migrations.AlterField( model_name='withadeadlineactivity', name='duration_period', field=models.CharField(blank=True, choices=[('overall', 'overall'), ('day', 'per day'), ('week', 'per week'), ('month', 'per month')], max_length=20, null=True, verbose_name='duration period'), ), ]
<commit_before><commit_msg>Add optional start of ongoing and with a deadline activities<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.11.17 on 2020-10-23 12:33 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('time_based', '0006_auto_20201021_1315'), ] operations = [ migrations.AddField( model_name='ongoingactivity', name='start', field=models.DateTimeField(blank=True, null=True, verbose_name='Start of activity'), ), migrations.AddField( model_name='withadeadlineactivity', name='start', field=models.DateTimeField(blank=True, null=True, verbose_name='Start of activity'), ), migrations.AlterField( model_name='onadateactivity', name='start', field=models.DateTimeField(blank=True, null=True, verbose_name='activity date'), ), migrations.AlterField( model_name='ongoingactivity', name='duration_period', field=models.CharField(blank=True, choices=[('overall', 'overall'), ('day', 'per day'), ('week', 'per week'), ('month', 'per month')], max_length=20, null=True, verbose_name='duration period'), ), migrations.AlterField( model_name='withadeadlineactivity', name='duration_period', field=models.CharField(blank=True, choices=[('overall', 'overall'), ('day', 'per day'), ('week', 'per week'), ('month', 'per month')], max_length=20, null=True, verbose_name='duration period'), ), ]
727143357853591862335eb9e556855e6056a6a8
build/android/pylib/uiautomator/test_runner.py
build/android/pylib/uiautomator/test_runner.py
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Class for running uiautomator tests on a single device.""" from pylib.instrumentation import test_runner as instr_test_runner class TestRunner(instr_test_runner.TestRunner): """Responsible for running a series of tests connected to a single device.""" def __init__(self, options, device, shard_index, test_pkg, ports_to_forward): """Create a new TestRunner. Args: options: An options object similar to the one in parent class plus: - package_name: Application package name under test. """ options.ensure_value('install_apk', True) options.ensure_value('wait_for_debugger', False) super(TestRunner, self).__init__( options, device, shard_index, test_pkg, ports_to_forward) self.package_name = options.package_name #override def InstallTestPackage(self): self.test_pkg.Install(self.adb) #override def _RunTest(self, test, timeout): self.adb.ClearApplicationState(self.package_name) if 'Feature:FirstRunExperience' in self.test_pkg.GetTestAnnotations(test): self.flags.RemoveFlags(['--disable-fre']) else: self.flags.AddFlags(['--disable-fre']) return self.adb.RunUIAutomatorTest( test, self.test_pkg.GetPackageName(), timeout)
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Class for running uiautomator tests on a single device.""" from pylib.instrumentation import test_runner as instr_test_runner class TestRunner(instr_test_runner.TestRunner): """Responsible for running a series of tests connected to a single device.""" def __init__(self, options, device, shard_index, test_pkg, ports_to_forward): """Create a new TestRunner. Args: options: An options object similar to the one in parent class plus: - package_name: Application package name under test. """ options.ensure_value('install_apk', True) options.ensure_value('wait_for_debugger', False) super(TestRunner, self).__init__( options, device, shard_index, test_pkg, ports_to_forward) self.package_name = options.package_name #override def InstallTestPackage(self): self.test_pkg.Install(self.adb) #override def PushDataDeps(self): pass #override def _RunTest(self, test, timeout): self.adb.ClearApplicationState(self.package_name) if 'Feature:FirstRunExperience' in self.test_pkg.GetTestAnnotations(test): self.flags.RemoveFlags(['--disable-fre']) else: self.flags.AddFlags(['--disable-fre']) return self.adb.RunUIAutomatorTest( test, self.test_pkg.GetPackageName(), timeout)
Fix uiautomator test runner after r206096
[Android] Fix uiautomator test runner after r206096 TBR=craigdh@chromium.org NOTRY=True BUG= Review URL: https://chromiumcodereview.appspot.com/17004003 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@206257 0039d316-1c4b-4281-b951-d872f2087c98
Python
bsd-3-clause
mogoweb/chromium-crosswalk,jaruba/chromium.src,Pluto-tv/chromium-crosswalk,patrickm/chromium.src,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,Fireblend/chromium-crosswalk,axinging/chromium-crosswalk,fujunwei/chromium-crosswalk,patrickm/chromium.src,krieger-od/nwjs_chromium.src,ondra-novak/chromium.src,Chilledheart/chromium,mogoweb/chromium-crosswalk,jaruba/chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,ondra-novak/chromium.src,jaruba/chromium.src,hujiajie/pa-chromium,mohamed--abdel-maksoud/chromium.src,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,M4sse/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,krieger-od/nwjs_chromium.src,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,chuan9/chromium-crosswalk,hujiajie/pa-chromium,anirudhSK/chromium,krieger-od/nwjs_chromium.src,littlstar/chromium.src,TheTypoMaster/chromium-crosswalk,anirudhSK/chromium,dednal/chromium.src,markYoungH/chromium.src,dushu1203/chromium.src,hujiajie/pa-chromium,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,M4sse/chromium.src,krieger-od/nwjs_chromium.src,anirudhSK/chromium,Pluto-tv/chromium-crosswalk,ondra-novak/chromium.src,pozdnyakov/chromium-crosswalk,patrickm/chromium.src,jaruba/chromium.src,pozdnyakov/chromium-crosswalk,mogoweb/chromium-crosswalk,mogoweb/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,markYoungH/chromium.src,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,M4sse/chromium.src,dednal/chromium.src,pozdnyakov/chromium-crosswalk,Chilledheart/chromium,ondra-novak/chromium.src,markYoungH/chromium.src,littlstar/chromium.src,krieger-od/nwjs_chromium.src,Chilledheart/chromium,markYoungH/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk,anirudhSK/chromium,fujunwei/chromium-crosswalk,anirudhSK/chromium,patrickm/chromium.src,fujunwei/chromium-crosswalk,bright-sparks/chromium-spacewalk,littlstar/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,fujunwei/chromium-crosswalk,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,crosswalk-project/chromium-crosswalk-efl,jaruba/chromium.src,Jonekee/chromium.src,pozdnyakov/chromium-crosswalk,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,M4sse/chromium.src,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,Just-D/chromium-1,crosswalk-project/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,patrickm/chromium.src,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,hujiajie/pa-chromium,axinging/chromium-crosswalk,Jonekee/chromium.src,TheTypoMaster/chromium-crosswalk,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,PeterWangIntel/chromium-crosswalk,hgl888/chromium-crosswalk,chuan9/chromium-crosswalk,ltilve/chromium,M4sse/chromium.src,chuan9/chromium-crosswalk,ltilve/chromium,anirudhSK/chromium,axinging/chromium-crosswalk,hgl888/chromium-crosswalk,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,littlstar/chromium.src,Chilledheart/chromium,markYoungH/chromium.src,ChromiumWebApps/chromium,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,Just-D/chromium-1,dednal/chromium.src,anirudhSK/chromium,dushu1203/chromium.src,littlstar/chromium.src,ltilve/chromium,PeterWangIntel/chromium-crosswalk,ChromiumWebApps/chromium,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,markYoungH/chromium.src,M4sse/chromium.src,ChromiumWebApps/chromium,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,ondra-novak/chromium.src,littlstar/chromium.src,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,fujunwei/chromium-crosswalk,dushu1203/chromium.src,krieger-od/nwjs_chromium.src,pozdnyakov/chromium-crosswalk,ChromiumWebApps/chromium,hgl888/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,Just-D/chromium-1,ChromiumWebApps/chromium,jaruba/chromium.src,markYoungH/chromium.src,hujiajie/pa-chromium,mohamed--abdel-maksoud/chromium.src,ltilve/chromium,chuan9/chromium-crosswalk,M4sse/chromium.src,mogoweb/chromium-crosswalk,anirudhSK/chromium,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,ondra-novak/chromium.src,dednal/chromium.src,dednal/chromium.src,bright-sparks/chromium-spacewalk,hgl888/chromium-crosswalk-efl,PeterWangIntel/chromium-crosswalk,Jonekee/chromium.src,ltilve/chromium,patrickm/chromium.src,mogoweb/chromium-crosswalk,ltilve/chromium,crosswalk-project/chromium-crosswalk-efl,dednal/chromium.src,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,dednal/chromium.src,anirudhSK/chromium,markYoungH/chromium.src,mohamed--abdel-maksoud/chromium.src,pozdnyakov/chromium-crosswalk,Fireblend/chromium-crosswalk,dednal/chromium.src,dushu1203/chromium.src,bright-sparks/chromium-spacewalk,fujunwei/chromium-crosswalk,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,axinging/chromium-crosswalk,chuan9/chromium-crosswalk,Jonekee/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,Jonekee/chromium.src,Pluto-tv/chromium-crosswalk,mogoweb/chromium-crosswalk,Jonekee/chromium.src,hujiajie/pa-chromium,ltilve/chromium,pozdnyakov/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Chilledheart/chromium,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,dednal/chromium.src,ChromiumWebApps/chromium,M4sse/chromium.src,jaruba/chromium.src,dushu1203/chromium.src,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,Chilledheart/chromium,Just-D/chromium-1,Just-D/chromium-1,Just-D/chromium-1,dushu1203/chromium.src,axinging/chromium-crosswalk,Chilledheart/chromium,Chilledheart/chromium,axinging/chromium-crosswalk,ChromiumWebApps/chromium,hujiajie/pa-chromium,littlstar/chromium.src,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,ltilve/chromium,hgl888/chromium-crosswalk-efl,Jonekee/chromium.src,Fireblend/chromium-crosswalk,Jonekee/chromium.src,hujiajie/pa-chromium,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,littlstar/chromium.src,Jonekee/chromium.src,M4sse/chromium.src,anirudhSK/chromium,patrickm/chromium.src,mohamed--abdel-maksoud/chromium.src,fujunwei/chromium-crosswalk,hgl888/chromium-crosswalk-efl,Chilledheart/chromium,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,ondra-novak/chromium.src,Just-D/chromium-1,PeterWangIntel/chromium-crosswalk,jaruba/chromium.src,dushu1203/chromium.src,ChromiumWebApps/chromium,mohamed--abdel-maksoud/chromium.src,hujiajie/pa-chromium,patrickm/chromium.src,jaruba/chromium.src,jaruba/chromium.src,Just-D/chromium-1,Fireblend/chromium-crosswalk,mogoweb/chromium-crosswalk,ChromiumWebApps/chromium
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Class for running uiautomator tests on a single device.""" from pylib.instrumentation import test_runner as instr_test_runner class TestRunner(instr_test_runner.TestRunner): """Responsible for running a series of tests connected to a single device.""" def __init__(self, options, device, shard_index, test_pkg, ports_to_forward): """Create a new TestRunner. Args: options: An options object similar to the one in parent class plus: - package_name: Application package name under test. """ options.ensure_value('install_apk', True) options.ensure_value('wait_for_debugger', False) super(TestRunner, self).__init__( options, device, shard_index, test_pkg, ports_to_forward) self.package_name = options.package_name #override def InstallTestPackage(self): self.test_pkg.Install(self.adb) #override def _RunTest(self, test, timeout): self.adb.ClearApplicationState(self.package_name) if 'Feature:FirstRunExperience' in self.test_pkg.GetTestAnnotations(test): self.flags.RemoveFlags(['--disable-fre']) else: self.flags.AddFlags(['--disable-fre']) return self.adb.RunUIAutomatorTest( test, self.test_pkg.GetPackageName(), timeout) [Android] Fix uiautomator test runner after r206096 TBR=craigdh@chromium.org NOTRY=True BUG= Review URL: https://chromiumcodereview.appspot.com/17004003 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@206257 0039d316-1c4b-4281-b951-d872f2087c98
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Class for running uiautomator tests on a single device.""" from pylib.instrumentation import test_runner as instr_test_runner class TestRunner(instr_test_runner.TestRunner): """Responsible for running a series of tests connected to a single device.""" def __init__(self, options, device, shard_index, test_pkg, ports_to_forward): """Create a new TestRunner. Args: options: An options object similar to the one in parent class plus: - package_name: Application package name under test. """ options.ensure_value('install_apk', True) options.ensure_value('wait_for_debugger', False) super(TestRunner, self).__init__( options, device, shard_index, test_pkg, ports_to_forward) self.package_name = options.package_name #override def InstallTestPackage(self): self.test_pkg.Install(self.adb) #override def PushDataDeps(self): pass #override def _RunTest(self, test, timeout): self.adb.ClearApplicationState(self.package_name) if 'Feature:FirstRunExperience' in self.test_pkg.GetTestAnnotations(test): self.flags.RemoveFlags(['--disable-fre']) else: self.flags.AddFlags(['--disable-fre']) return self.adb.RunUIAutomatorTest( test, self.test_pkg.GetPackageName(), timeout)
<commit_before># Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Class for running uiautomator tests on a single device.""" from pylib.instrumentation import test_runner as instr_test_runner class TestRunner(instr_test_runner.TestRunner): """Responsible for running a series of tests connected to a single device.""" def __init__(self, options, device, shard_index, test_pkg, ports_to_forward): """Create a new TestRunner. Args: options: An options object similar to the one in parent class plus: - package_name: Application package name under test. """ options.ensure_value('install_apk', True) options.ensure_value('wait_for_debugger', False) super(TestRunner, self).__init__( options, device, shard_index, test_pkg, ports_to_forward) self.package_name = options.package_name #override def InstallTestPackage(self): self.test_pkg.Install(self.adb) #override def _RunTest(self, test, timeout): self.adb.ClearApplicationState(self.package_name) if 'Feature:FirstRunExperience' in self.test_pkg.GetTestAnnotations(test): self.flags.RemoveFlags(['--disable-fre']) else: self.flags.AddFlags(['--disable-fre']) return self.adb.RunUIAutomatorTest( test, self.test_pkg.GetPackageName(), timeout) <commit_msg>[Android] Fix uiautomator test runner after r206096 TBR=craigdh@chromium.org NOTRY=True BUG= Review URL: https://chromiumcodereview.appspot.com/17004003 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@206257 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Class for running uiautomator tests on a single device.""" from pylib.instrumentation import test_runner as instr_test_runner class TestRunner(instr_test_runner.TestRunner): """Responsible for running a series of tests connected to a single device.""" def __init__(self, options, device, shard_index, test_pkg, ports_to_forward): """Create a new TestRunner. Args: options: An options object similar to the one in parent class plus: - package_name: Application package name under test. """ options.ensure_value('install_apk', True) options.ensure_value('wait_for_debugger', False) super(TestRunner, self).__init__( options, device, shard_index, test_pkg, ports_to_forward) self.package_name = options.package_name #override def InstallTestPackage(self): self.test_pkg.Install(self.adb) #override def PushDataDeps(self): pass #override def _RunTest(self, test, timeout): self.adb.ClearApplicationState(self.package_name) if 'Feature:FirstRunExperience' in self.test_pkg.GetTestAnnotations(test): self.flags.RemoveFlags(['--disable-fre']) else: self.flags.AddFlags(['--disable-fre']) return self.adb.RunUIAutomatorTest( test, self.test_pkg.GetPackageName(), timeout)
# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Class for running uiautomator tests on a single device.""" from pylib.instrumentation import test_runner as instr_test_runner class TestRunner(instr_test_runner.TestRunner): """Responsible for running a series of tests connected to a single device.""" def __init__(self, options, device, shard_index, test_pkg, ports_to_forward): """Create a new TestRunner. Args: options: An options object similar to the one in parent class plus: - package_name: Application package name under test. """ options.ensure_value('install_apk', True) options.ensure_value('wait_for_debugger', False) super(TestRunner, self).__init__( options, device, shard_index, test_pkg, ports_to_forward) self.package_name = options.package_name #override def InstallTestPackage(self): self.test_pkg.Install(self.adb) #override def _RunTest(self, test, timeout): self.adb.ClearApplicationState(self.package_name) if 'Feature:FirstRunExperience' in self.test_pkg.GetTestAnnotations(test): self.flags.RemoveFlags(['--disable-fre']) else: self.flags.AddFlags(['--disable-fre']) return self.adb.RunUIAutomatorTest( test, self.test_pkg.GetPackageName(), timeout) [Android] Fix uiautomator test runner after r206096 TBR=craigdh@chromium.org NOTRY=True BUG= Review URL: https://chromiumcodereview.appspot.com/17004003 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@206257 0039d316-1c4b-4281-b951-d872f2087c98# Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Class for running uiautomator tests on a single device.""" from pylib.instrumentation import test_runner as instr_test_runner class TestRunner(instr_test_runner.TestRunner): """Responsible for running a series of tests connected to a single device.""" def __init__(self, options, device, shard_index, test_pkg, ports_to_forward): """Create a new TestRunner. Args: options: An options object similar to the one in parent class plus: - package_name: Application package name under test. """ options.ensure_value('install_apk', True) options.ensure_value('wait_for_debugger', False) super(TestRunner, self).__init__( options, device, shard_index, test_pkg, ports_to_forward) self.package_name = options.package_name #override def InstallTestPackage(self): self.test_pkg.Install(self.adb) #override def PushDataDeps(self): pass #override def _RunTest(self, test, timeout): self.adb.ClearApplicationState(self.package_name) if 'Feature:FirstRunExperience' in self.test_pkg.GetTestAnnotations(test): self.flags.RemoveFlags(['--disable-fre']) else: self.flags.AddFlags(['--disable-fre']) return self.adb.RunUIAutomatorTest( test, self.test_pkg.GetPackageName(), timeout)
<commit_before># Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Class for running uiautomator tests on a single device.""" from pylib.instrumentation import test_runner as instr_test_runner class TestRunner(instr_test_runner.TestRunner): """Responsible for running a series of tests connected to a single device.""" def __init__(self, options, device, shard_index, test_pkg, ports_to_forward): """Create a new TestRunner. Args: options: An options object similar to the one in parent class plus: - package_name: Application package name under test. """ options.ensure_value('install_apk', True) options.ensure_value('wait_for_debugger', False) super(TestRunner, self).__init__( options, device, shard_index, test_pkg, ports_to_forward) self.package_name = options.package_name #override def InstallTestPackage(self): self.test_pkg.Install(self.adb) #override def _RunTest(self, test, timeout): self.adb.ClearApplicationState(self.package_name) if 'Feature:FirstRunExperience' in self.test_pkg.GetTestAnnotations(test): self.flags.RemoveFlags(['--disable-fre']) else: self.flags.AddFlags(['--disable-fre']) return self.adb.RunUIAutomatorTest( test, self.test_pkg.GetPackageName(), timeout) <commit_msg>[Android] Fix uiautomator test runner after r206096 TBR=craigdh@chromium.org NOTRY=True BUG= Review URL: https://chromiumcodereview.appspot.com/17004003 git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@206257 0039d316-1c4b-4281-b951-d872f2087c98<commit_after># Copyright (c) 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Class for running uiautomator tests on a single device.""" from pylib.instrumentation import test_runner as instr_test_runner class TestRunner(instr_test_runner.TestRunner): """Responsible for running a series of tests connected to a single device.""" def __init__(self, options, device, shard_index, test_pkg, ports_to_forward): """Create a new TestRunner. Args: options: An options object similar to the one in parent class plus: - package_name: Application package name under test. """ options.ensure_value('install_apk', True) options.ensure_value('wait_for_debugger', False) super(TestRunner, self).__init__( options, device, shard_index, test_pkg, ports_to_forward) self.package_name = options.package_name #override def InstallTestPackage(self): self.test_pkg.Install(self.adb) #override def PushDataDeps(self): pass #override def _RunTest(self, test, timeout): self.adb.ClearApplicationState(self.package_name) if 'Feature:FirstRunExperience' in self.test_pkg.GetTestAnnotations(test): self.flags.RemoveFlags(['--disable-fre']) else: self.flags.AddFlags(['--disable-fre']) return self.adb.RunUIAutomatorTest( test, self.test_pkg.GetPackageName(), timeout)
87e4d3bc9d3efd9f84025b25a8bd975ab73626f9
pythran/tests/cases/calculate_u.py
pythran/tests/cases/calculate_u.py
# from the paper `using cython to speedup numerical python programs' #pythran export timeloop(float, float, float, float, float, float list list, float list list, float list list) #runas A=[range(2000) for i in xrange(100)] ; B=[range(2000) for i in xrange(100)] ; C=[range(2000) for i in xrange(100)] ; timeloop(1,2,.01,.1,.18, A,B,C ) def timeloop(t, t_stop, dt, dx, dy, u, um, k): while t <= t_stop: t += dt new_u = calculate_u(dt, dx, dy, u, um, k) um = u u = new_u return u def calculate_u(dt, dx, dy, u, um, k): up = [ [0.]*len(u[0]) for i in xrange(len(u)) ] "omp parallel for private(i,j)" for i in xrange(1, len(u)-1): for j in xrange(1, len(u[0])-1): up[i][j] = 2*u[i][j] - um[i][j] + \ (dt/dx)**2*( (0.5*(k[i+1][j] + k[i][j])*(u[i+1][j] - u[i][j]) - 0.5*(k[i][j] + k[i-1][j])*(u[i][j] - u[i-1][j]))) + \ (dt/dy)**2*( (0.5*(k[i][j+1] + k[i][j])*(u[i][j+1] - u[i][j]) - 0.5*(k[i][j] + k[i][j-1])*(u[i][j] - u[i][j-1]))) return up
Add a new test case, extracted from a cython paper.
Add a new test case, extracted from a cython paper.
Python
bsd-3-clause
serge-sans-paille/pythran,pbrunet/pythran,serge-sans-paille/pythran,pombredanne/pythran,pombredanne/pythran,artas360/pythran,artas360/pythran,pombredanne/pythran,pbrunet/pythran,hainm/pythran,pbrunet/pythran,hainm/pythran,hainm/pythran,artas360/pythran
Add a new test case, extracted from a cython paper.
# from the paper `using cython to speedup numerical python programs' #pythran export timeloop(float, float, float, float, float, float list list, float list list, float list list) #runas A=[range(2000) for i in xrange(100)] ; B=[range(2000) for i in xrange(100)] ; C=[range(2000) for i in xrange(100)] ; timeloop(1,2,.01,.1,.18, A,B,C ) def timeloop(t, t_stop, dt, dx, dy, u, um, k): while t <= t_stop: t += dt new_u = calculate_u(dt, dx, dy, u, um, k) um = u u = new_u return u def calculate_u(dt, dx, dy, u, um, k): up = [ [0.]*len(u[0]) for i in xrange(len(u)) ] "omp parallel for private(i,j)" for i in xrange(1, len(u)-1): for j in xrange(1, len(u[0])-1): up[i][j] = 2*u[i][j] - um[i][j] + \ (dt/dx)**2*( (0.5*(k[i+1][j] + k[i][j])*(u[i+1][j] - u[i][j]) - 0.5*(k[i][j] + k[i-1][j])*(u[i][j] - u[i-1][j]))) + \ (dt/dy)**2*( (0.5*(k[i][j+1] + k[i][j])*(u[i][j+1] - u[i][j]) - 0.5*(k[i][j] + k[i][j-1])*(u[i][j] - u[i][j-1]))) return up
<commit_before><commit_msg>Add a new test case, extracted from a cython paper.<commit_after>
# from the paper `using cython to speedup numerical python programs' #pythran export timeloop(float, float, float, float, float, float list list, float list list, float list list) #runas A=[range(2000) for i in xrange(100)] ; B=[range(2000) for i in xrange(100)] ; C=[range(2000) for i in xrange(100)] ; timeloop(1,2,.01,.1,.18, A,B,C ) def timeloop(t, t_stop, dt, dx, dy, u, um, k): while t <= t_stop: t += dt new_u = calculate_u(dt, dx, dy, u, um, k) um = u u = new_u return u def calculate_u(dt, dx, dy, u, um, k): up = [ [0.]*len(u[0]) for i in xrange(len(u)) ] "omp parallel for private(i,j)" for i in xrange(1, len(u)-1): for j in xrange(1, len(u[0])-1): up[i][j] = 2*u[i][j] - um[i][j] + \ (dt/dx)**2*( (0.5*(k[i+1][j] + k[i][j])*(u[i+1][j] - u[i][j]) - 0.5*(k[i][j] + k[i-1][j])*(u[i][j] - u[i-1][j]))) + \ (dt/dy)**2*( (0.5*(k[i][j+1] + k[i][j])*(u[i][j+1] - u[i][j]) - 0.5*(k[i][j] + k[i][j-1])*(u[i][j] - u[i][j-1]))) return up
Add a new test case, extracted from a cython paper.# from the paper `using cython to speedup numerical python programs' #pythran export timeloop(float, float, float, float, float, float list list, float list list, float list list) #runas A=[range(2000) for i in xrange(100)] ; B=[range(2000) for i in xrange(100)] ; C=[range(2000) for i in xrange(100)] ; timeloop(1,2,.01,.1,.18, A,B,C ) def timeloop(t, t_stop, dt, dx, dy, u, um, k): while t <= t_stop: t += dt new_u = calculate_u(dt, dx, dy, u, um, k) um = u u = new_u return u def calculate_u(dt, dx, dy, u, um, k): up = [ [0.]*len(u[0]) for i in xrange(len(u)) ] "omp parallel for private(i,j)" for i in xrange(1, len(u)-1): for j in xrange(1, len(u[0])-1): up[i][j] = 2*u[i][j] - um[i][j] + \ (dt/dx)**2*( (0.5*(k[i+1][j] + k[i][j])*(u[i+1][j] - u[i][j]) - 0.5*(k[i][j] + k[i-1][j])*(u[i][j] - u[i-1][j]))) + \ (dt/dy)**2*( (0.5*(k[i][j+1] + k[i][j])*(u[i][j+1] - u[i][j]) - 0.5*(k[i][j] + k[i][j-1])*(u[i][j] - u[i][j-1]))) return up
<commit_before><commit_msg>Add a new test case, extracted from a cython paper.<commit_after># from the paper `using cython to speedup numerical python programs' #pythran export timeloop(float, float, float, float, float, float list list, float list list, float list list) #runas A=[range(2000) for i in xrange(100)] ; B=[range(2000) for i in xrange(100)] ; C=[range(2000) for i in xrange(100)] ; timeloop(1,2,.01,.1,.18, A,B,C ) def timeloop(t, t_stop, dt, dx, dy, u, um, k): while t <= t_stop: t += dt new_u = calculate_u(dt, dx, dy, u, um, k) um = u u = new_u return u def calculate_u(dt, dx, dy, u, um, k): up = [ [0.]*len(u[0]) for i in xrange(len(u)) ] "omp parallel for private(i,j)" for i in xrange(1, len(u)-1): for j in xrange(1, len(u[0])-1): up[i][j] = 2*u[i][j] - um[i][j] + \ (dt/dx)**2*( (0.5*(k[i+1][j] + k[i][j])*(u[i+1][j] - u[i][j]) - 0.5*(k[i][j] + k[i-1][j])*(u[i][j] - u[i-1][j]))) + \ (dt/dy)**2*( (0.5*(k[i][j+1] + k[i][j])*(u[i][j+1] - u[i][j]) - 0.5*(k[i][j] + k[i][j-1])*(u[i][j] - u[i][j-1]))) return up
5e14a02a9f670f3558a07e6c3ef592753bda3f50
getFile.py
getFile.py
import os.path from FormatConverter import * import timeSeriesFrame extmap = {'.csv':1, '.txt':2, '.xls':3, '.sql':4} dir = "C:\Documents and Settings\MARY\My Documents\Test\Data" #Conversion function def doConv(file, id): f = FormatConverter(file) #creates a FormatConverter object for the file #reads in file with correct function if id == 1: f.readCSV() elif id == 2: f.readTXT() elif id == 3: f.readXLS() elif id == 4: f.readSQL() #converts to all other file types ## f.toCSV() ## f.toTXT() f.toXLS() ## f.toSQL() print f.toTSF() if __name__ =="__main__": #traverse directories and search for applicable file types def callback( arg, dirname, fnames ): for file in fnames: ext = os.path.splitext(file)[1] if ext in extmap: print file doConv(file, extmap[ext]) arglist = [] os.path.walk(dir,callback,arglist)
Use as main with FormatConverter; walks directory
Use as main with FormatConverter; walks directory
Python
bsd-3-clause
wingsit/KF,wingsit/KF
Use as main with FormatConverter; walks directory
import os.path from FormatConverter import * import timeSeriesFrame extmap = {'.csv':1, '.txt':2, '.xls':3, '.sql':4} dir = "C:\Documents and Settings\MARY\My Documents\Test\Data" #Conversion function def doConv(file, id): f = FormatConverter(file) #creates a FormatConverter object for the file #reads in file with correct function if id == 1: f.readCSV() elif id == 2: f.readTXT() elif id == 3: f.readXLS() elif id == 4: f.readSQL() #converts to all other file types ## f.toCSV() ## f.toTXT() f.toXLS() ## f.toSQL() print f.toTSF() if __name__ =="__main__": #traverse directories and search for applicable file types def callback( arg, dirname, fnames ): for file in fnames: ext = os.path.splitext(file)[1] if ext in extmap: print file doConv(file, extmap[ext]) arglist = [] os.path.walk(dir,callback,arglist)
<commit_before><commit_msg>Use as main with FormatConverter; walks directory<commit_after>
import os.path from FormatConverter import * import timeSeriesFrame extmap = {'.csv':1, '.txt':2, '.xls':3, '.sql':4} dir = "C:\Documents and Settings\MARY\My Documents\Test\Data" #Conversion function def doConv(file, id): f = FormatConverter(file) #creates a FormatConverter object for the file #reads in file with correct function if id == 1: f.readCSV() elif id == 2: f.readTXT() elif id == 3: f.readXLS() elif id == 4: f.readSQL() #converts to all other file types ## f.toCSV() ## f.toTXT() f.toXLS() ## f.toSQL() print f.toTSF() if __name__ =="__main__": #traverse directories and search for applicable file types def callback( arg, dirname, fnames ): for file in fnames: ext = os.path.splitext(file)[1] if ext in extmap: print file doConv(file, extmap[ext]) arglist = [] os.path.walk(dir,callback,arglist)
Use as main with FormatConverter; walks directoryimport os.path from FormatConverter import * import timeSeriesFrame extmap = {'.csv':1, '.txt':2, '.xls':3, '.sql':4} dir = "C:\Documents and Settings\MARY\My Documents\Test\Data" #Conversion function def doConv(file, id): f = FormatConverter(file) #creates a FormatConverter object for the file #reads in file with correct function if id == 1: f.readCSV() elif id == 2: f.readTXT() elif id == 3: f.readXLS() elif id == 4: f.readSQL() #converts to all other file types ## f.toCSV() ## f.toTXT() f.toXLS() ## f.toSQL() print f.toTSF() if __name__ =="__main__": #traverse directories and search for applicable file types def callback( arg, dirname, fnames ): for file in fnames: ext = os.path.splitext(file)[1] if ext in extmap: print file doConv(file, extmap[ext]) arglist = [] os.path.walk(dir,callback,arglist)
<commit_before><commit_msg>Use as main with FormatConverter; walks directory<commit_after>import os.path from FormatConverter import * import timeSeriesFrame extmap = {'.csv':1, '.txt':2, '.xls':3, '.sql':4} dir = "C:\Documents and Settings\MARY\My Documents\Test\Data" #Conversion function def doConv(file, id): f = FormatConverter(file) #creates a FormatConverter object for the file #reads in file with correct function if id == 1: f.readCSV() elif id == 2: f.readTXT() elif id == 3: f.readXLS() elif id == 4: f.readSQL() #converts to all other file types ## f.toCSV() ## f.toTXT() f.toXLS() ## f.toSQL() print f.toTSF() if __name__ =="__main__": #traverse directories and search for applicable file types def callback( arg, dirname, fnames ): for file in fnames: ext = os.path.splitext(file)[1] if ext in extmap: print file doConv(file, extmap[ext]) arglist = [] os.path.walk(dir,callback,arglist)
5d12703a706498f24da09c368f626a78e0269afd
parsers/event_parser.py
parsers/event_parser.py
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from collections import defaultdict logger = logging.getLogger(__name__) class EventParser(): def __init__(self, raw_data): self.raw_data = raw_data self.score = defaultdict(int) # self.score['road'] = 0 # self.score['home'] = 0 self.score_diff = 0 self.curr_period = 0 def create_events(self, game, rosters): self.game = game self.rosters = rosters self.load_data for event_data_item in self.event_data: self.get_event(event_data_item) def get_event(self, event_data_item): # retrieving data item contents as list tokens = event_data_item.xpath("td/text()") print(tokens) def load_data(self): """ Loads structured raw data and pre-processes it. """ self.event_data = list() # finding all table rows on play-by-play page for tr in self.raw_data.xpath("body/table/tr"): # adding table row to play-by-play info if the first entry is a # digit, i.e. an in-game event id try: int(tr.xpath("td[1]/text()")[0]) self.event_data.append(tr) # checking whether exactly eight table cells are located in row if len(tr.xpath("td")) != 8: # TODO: proper logging print len(tr.xpath("td")) except: pass
Add initial version of event parser
Add initial version of event parser
Python
mit
leaffan/pynhldb
Add initial version of event parser
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from collections import defaultdict logger = logging.getLogger(__name__) class EventParser(): def __init__(self, raw_data): self.raw_data = raw_data self.score = defaultdict(int) # self.score['road'] = 0 # self.score['home'] = 0 self.score_diff = 0 self.curr_period = 0 def create_events(self, game, rosters): self.game = game self.rosters = rosters self.load_data for event_data_item in self.event_data: self.get_event(event_data_item) def get_event(self, event_data_item): # retrieving data item contents as list tokens = event_data_item.xpath("td/text()") print(tokens) def load_data(self): """ Loads structured raw data and pre-processes it. """ self.event_data = list() # finding all table rows on play-by-play page for tr in self.raw_data.xpath("body/table/tr"): # adding table row to play-by-play info if the first entry is a # digit, i.e. an in-game event id try: int(tr.xpath("td[1]/text()")[0]) self.event_data.append(tr) # checking whether exactly eight table cells are located in row if len(tr.xpath("td")) != 8: # TODO: proper logging print len(tr.xpath("td")) except: pass
<commit_before><commit_msg>Add initial version of event parser<commit_after>
#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from collections import defaultdict logger = logging.getLogger(__name__) class EventParser(): def __init__(self, raw_data): self.raw_data = raw_data self.score = defaultdict(int) # self.score['road'] = 0 # self.score['home'] = 0 self.score_diff = 0 self.curr_period = 0 def create_events(self, game, rosters): self.game = game self.rosters = rosters self.load_data for event_data_item in self.event_data: self.get_event(event_data_item) def get_event(self, event_data_item): # retrieving data item contents as list tokens = event_data_item.xpath("td/text()") print(tokens) def load_data(self): """ Loads structured raw data and pre-processes it. """ self.event_data = list() # finding all table rows on play-by-play page for tr in self.raw_data.xpath("body/table/tr"): # adding table row to play-by-play info if the first entry is a # digit, i.e. an in-game event id try: int(tr.xpath("td[1]/text()")[0]) self.event_data.append(tr) # checking whether exactly eight table cells are located in row if len(tr.xpath("td")) != 8: # TODO: proper logging print len(tr.xpath("td")) except: pass
Add initial version of event parser#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from collections import defaultdict logger = logging.getLogger(__name__) class EventParser(): def __init__(self, raw_data): self.raw_data = raw_data self.score = defaultdict(int) # self.score['road'] = 0 # self.score['home'] = 0 self.score_diff = 0 self.curr_period = 0 def create_events(self, game, rosters): self.game = game self.rosters = rosters self.load_data for event_data_item in self.event_data: self.get_event(event_data_item) def get_event(self, event_data_item): # retrieving data item contents as list tokens = event_data_item.xpath("td/text()") print(tokens) def load_data(self): """ Loads structured raw data and pre-processes it. """ self.event_data = list() # finding all table rows on play-by-play page for tr in self.raw_data.xpath("body/table/tr"): # adding table row to play-by-play info if the first entry is a # digit, i.e. an in-game event id try: int(tr.xpath("td[1]/text()")[0]) self.event_data.append(tr) # checking whether exactly eight table cells are located in row if len(tr.xpath("td")) != 8: # TODO: proper logging print len(tr.xpath("td")) except: pass
<commit_before><commit_msg>Add initial version of event parser<commit_after>#!/usr/bin/env python # -*- coding: utf-8 -*- import logging from collections import defaultdict logger = logging.getLogger(__name__) class EventParser(): def __init__(self, raw_data): self.raw_data = raw_data self.score = defaultdict(int) # self.score['road'] = 0 # self.score['home'] = 0 self.score_diff = 0 self.curr_period = 0 def create_events(self, game, rosters): self.game = game self.rosters = rosters self.load_data for event_data_item in self.event_data: self.get_event(event_data_item) def get_event(self, event_data_item): # retrieving data item contents as list tokens = event_data_item.xpath("td/text()") print(tokens) def load_data(self): """ Loads structured raw data and pre-processes it. """ self.event_data = list() # finding all table rows on play-by-play page for tr in self.raw_data.xpath("body/table/tr"): # adding table row to play-by-play info if the first entry is a # digit, i.e. an in-game event id try: int(tr.xpath("td[1]/text()")[0]) self.event_data.append(tr) # checking whether exactly eight table cells are located in row if len(tr.xpath("td")) != 8: # TODO: proper logging print len(tr.xpath("td")) except: pass
996c9428ce3f56a5f3914d2b02d670c88a198230
morse_trainer/test_grouping.py
morse_trainer/test_grouping.py
#!/usr/bin/python3 # -*- coding: utf-8 -*- """ Test code for 'grouping' widget used by Morse Trainer. """ import sys from grouping import Grouping from PyQt5.QtWidgets import (QApplication, QWidget, QHBoxLayout, QVBoxLayout, QPushButton) class TestGrouping(QWidget): """Application to demonstrate the Morse Trainer 'grouping' widget.""" def __init__(self): super().__init__() self.initUI() def initUI(self): self.grouping = Grouping() vbox = QVBoxLayout() vbox.addWidget(self.grouping) self.setLayout(vbox) self.setGeometry(100, 100, 800, 200) self.setWindowTitle('Example of Grouping widget') self.show() app = QApplication(sys.argv) ex = TestGrouping() sys.exit(app.exec())
Test code for 'grouping' module
Test code for 'grouping' module
Python
mit
rzzzwilson/morse,rzzzwilson/morse
Test code for 'grouping' module
#!/usr/bin/python3 # -*- coding: utf-8 -*- """ Test code for 'grouping' widget used by Morse Trainer. """ import sys from grouping import Grouping from PyQt5.QtWidgets import (QApplication, QWidget, QHBoxLayout, QVBoxLayout, QPushButton) class TestGrouping(QWidget): """Application to demonstrate the Morse Trainer 'grouping' widget.""" def __init__(self): super().__init__() self.initUI() def initUI(self): self.grouping = Grouping() vbox = QVBoxLayout() vbox.addWidget(self.grouping) self.setLayout(vbox) self.setGeometry(100, 100, 800, 200) self.setWindowTitle('Example of Grouping widget') self.show() app = QApplication(sys.argv) ex = TestGrouping() sys.exit(app.exec())
<commit_before><commit_msg>Test code for 'grouping' module<commit_after>
#!/usr/bin/python3 # -*- coding: utf-8 -*- """ Test code for 'grouping' widget used by Morse Trainer. """ import sys from grouping import Grouping from PyQt5.QtWidgets import (QApplication, QWidget, QHBoxLayout, QVBoxLayout, QPushButton) class TestGrouping(QWidget): """Application to demonstrate the Morse Trainer 'grouping' widget.""" def __init__(self): super().__init__() self.initUI() def initUI(self): self.grouping = Grouping() vbox = QVBoxLayout() vbox.addWidget(self.grouping) self.setLayout(vbox) self.setGeometry(100, 100, 800, 200) self.setWindowTitle('Example of Grouping widget') self.show() app = QApplication(sys.argv) ex = TestGrouping() sys.exit(app.exec())
Test code for 'grouping' module#!/usr/bin/python3 # -*- coding: utf-8 -*- """ Test code for 'grouping' widget used by Morse Trainer. """ import sys from grouping import Grouping from PyQt5.QtWidgets import (QApplication, QWidget, QHBoxLayout, QVBoxLayout, QPushButton) class TestGrouping(QWidget): """Application to demonstrate the Morse Trainer 'grouping' widget.""" def __init__(self): super().__init__() self.initUI() def initUI(self): self.grouping = Grouping() vbox = QVBoxLayout() vbox.addWidget(self.grouping) self.setLayout(vbox) self.setGeometry(100, 100, 800, 200) self.setWindowTitle('Example of Grouping widget') self.show() app = QApplication(sys.argv) ex = TestGrouping() sys.exit(app.exec())
<commit_before><commit_msg>Test code for 'grouping' module<commit_after>#!/usr/bin/python3 # -*- coding: utf-8 -*- """ Test code for 'grouping' widget used by Morse Trainer. """ import sys from grouping import Grouping from PyQt5.QtWidgets import (QApplication, QWidget, QHBoxLayout, QVBoxLayout, QPushButton) class TestGrouping(QWidget): """Application to demonstrate the Morse Trainer 'grouping' widget.""" def __init__(self): super().__init__() self.initUI() def initUI(self): self.grouping = Grouping() vbox = QVBoxLayout() vbox.addWidget(self.grouping) self.setLayout(vbox) self.setGeometry(100, 100, 800, 200) self.setWindowTitle('Example of Grouping widget') self.show() app = QApplication(sys.argv) ex = TestGrouping() sys.exit(app.exec())
ecce482fd99263c6b2f9aa8eddc668954ab41cc6
tests/test_strategy.py
tests/test_strategy.py
import unittest from decimal import Decimal as D from oscar_vat_moss.partner.strategy import * # noqa from mock import Mock class DeferredVATSelectorTest(unittest.TestCase): def test_selector(self): selector = DeferredVATSelector() strategy = selector.strategy() self.assertEqual(strategy.__class__, DeferredVATStrategy) with self.assertRaises(AttributeError): strategy.getRate(None, None) class PerUserVATSelectorTest(unittest.TestCase): def test_selector(self): selector = PerUserVATSelector() strategy = selector.strategy() self.assertEqual(strategy.__class__, PerUserVATStrategy) self.assertTrue(hasattr(strategy, 'get_rate')) class PerUserVATStrategyTest(unittest.TestCase): def test_valid_user(self): address = Mock() address.country = Mock() address.country.code = 'AT' address.line4 = 'Vienna' address.postcode = '1010' address.phone_number = '+43 1 234 5678' address.line1 = 'hastexo Professional Services GmbH' address.vatin = '' request = Mock() request.user = Mock() request.user.addresses = Mock() request.user.addresses.order_by = Mock(return_value=[address]) request.user.is_authenticated = Mock(return_value=True) selector = PerUserVATSelector() strategy = selector.strategy(request=request) result_rate = strategy.get_rate(None, None) self.assertEqual(result_rate, D('0.20')) address.vatin = 'ATU66688202' result_rate = strategy.get_rate(None, None) self.assertEqual(result_rate, D('0.00'))
Add unit tests for Strategy and Selector classes
Add unit tests for Strategy and Selector classes
Python
bsd-3-clause
fghaas/django-oscar-vat_moss,arbrandes/django-oscar-vat_moss,fghaas/django-oscar-vat_moss,hastexo/django-oscar-vat_moss,hastexo/django-oscar-vat_moss,arbrandes/django-oscar-vat_moss
Add unit tests for Strategy and Selector classes
import unittest from decimal import Decimal as D from oscar_vat_moss.partner.strategy import * # noqa from mock import Mock class DeferredVATSelectorTest(unittest.TestCase): def test_selector(self): selector = DeferredVATSelector() strategy = selector.strategy() self.assertEqual(strategy.__class__, DeferredVATStrategy) with self.assertRaises(AttributeError): strategy.getRate(None, None) class PerUserVATSelectorTest(unittest.TestCase): def test_selector(self): selector = PerUserVATSelector() strategy = selector.strategy() self.assertEqual(strategy.__class__, PerUserVATStrategy) self.assertTrue(hasattr(strategy, 'get_rate')) class PerUserVATStrategyTest(unittest.TestCase): def test_valid_user(self): address = Mock() address.country = Mock() address.country.code = 'AT' address.line4 = 'Vienna' address.postcode = '1010' address.phone_number = '+43 1 234 5678' address.line1 = 'hastexo Professional Services GmbH' address.vatin = '' request = Mock() request.user = Mock() request.user.addresses = Mock() request.user.addresses.order_by = Mock(return_value=[address]) request.user.is_authenticated = Mock(return_value=True) selector = PerUserVATSelector() strategy = selector.strategy(request=request) result_rate = strategy.get_rate(None, None) self.assertEqual(result_rate, D('0.20')) address.vatin = 'ATU66688202' result_rate = strategy.get_rate(None, None) self.assertEqual(result_rate, D('0.00'))
<commit_before><commit_msg>Add unit tests for Strategy and Selector classes<commit_after>
import unittest from decimal import Decimal as D from oscar_vat_moss.partner.strategy import * # noqa from mock import Mock class DeferredVATSelectorTest(unittest.TestCase): def test_selector(self): selector = DeferredVATSelector() strategy = selector.strategy() self.assertEqual(strategy.__class__, DeferredVATStrategy) with self.assertRaises(AttributeError): strategy.getRate(None, None) class PerUserVATSelectorTest(unittest.TestCase): def test_selector(self): selector = PerUserVATSelector() strategy = selector.strategy() self.assertEqual(strategy.__class__, PerUserVATStrategy) self.assertTrue(hasattr(strategy, 'get_rate')) class PerUserVATStrategyTest(unittest.TestCase): def test_valid_user(self): address = Mock() address.country = Mock() address.country.code = 'AT' address.line4 = 'Vienna' address.postcode = '1010' address.phone_number = '+43 1 234 5678' address.line1 = 'hastexo Professional Services GmbH' address.vatin = '' request = Mock() request.user = Mock() request.user.addresses = Mock() request.user.addresses.order_by = Mock(return_value=[address]) request.user.is_authenticated = Mock(return_value=True) selector = PerUserVATSelector() strategy = selector.strategy(request=request) result_rate = strategy.get_rate(None, None) self.assertEqual(result_rate, D('0.20')) address.vatin = 'ATU66688202' result_rate = strategy.get_rate(None, None) self.assertEqual(result_rate, D('0.00'))
Add unit tests for Strategy and Selector classesimport unittest from decimal import Decimal as D from oscar_vat_moss.partner.strategy import * # noqa from mock import Mock class DeferredVATSelectorTest(unittest.TestCase): def test_selector(self): selector = DeferredVATSelector() strategy = selector.strategy() self.assertEqual(strategy.__class__, DeferredVATStrategy) with self.assertRaises(AttributeError): strategy.getRate(None, None) class PerUserVATSelectorTest(unittest.TestCase): def test_selector(self): selector = PerUserVATSelector() strategy = selector.strategy() self.assertEqual(strategy.__class__, PerUserVATStrategy) self.assertTrue(hasattr(strategy, 'get_rate')) class PerUserVATStrategyTest(unittest.TestCase): def test_valid_user(self): address = Mock() address.country = Mock() address.country.code = 'AT' address.line4 = 'Vienna' address.postcode = '1010' address.phone_number = '+43 1 234 5678' address.line1 = 'hastexo Professional Services GmbH' address.vatin = '' request = Mock() request.user = Mock() request.user.addresses = Mock() request.user.addresses.order_by = Mock(return_value=[address]) request.user.is_authenticated = Mock(return_value=True) selector = PerUserVATSelector() strategy = selector.strategy(request=request) result_rate = strategy.get_rate(None, None) self.assertEqual(result_rate, D('0.20')) address.vatin = 'ATU66688202' result_rate = strategy.get_rate(None, None) self.assertEqual(result_rate, D('0.00'))
<commit_before><commit_msg>Add unit tests for Strategy and Selector classes<commit_after>import unittest from decimal import Decimal as D from oscar_vat_moss.partner.strategy import * # noqa from mock import Mock class DeferredVATSelectorTest(unittest.TestCase): def test_selector(self): selector = DeferredVATSelector() strategy = selector.strategy() self.assertEqual(strategy.__class__, DeferredVATStrategy) with self.assertRaises(AttributeError): strategy.getRate(None, None) class PerUserVATSelectorTest(unittest.TestCase): def test_selector(self): selector = PerUserVATSelector() strategy = selector.strategy() self.assertEqual(strategy.__class__, PerUserVATStrategy) self.assertTrue(hasattr(strategy, 'get_rate')) class PerUserVATStrategyTest(unittest.TestCase): def test_valid_user(self): address = Mock() address.country = Mock() address.country.code = 'AT' address.line4 = 'Vienna' address.postcode = '1010' address.phone_number = '+43 1 234 5678' address.line1 = 'hastexo Professional Services GmbH' address.vatin = '' request = Mock() request.user = Mock() request.user.addresses = Mock() request.user.addresses.order_by = Mock(return_value=[address]) request.user.is_authenticated = Mock(return_value=True) selector = PerUserVATSelector() strategy = selector.strategy(request=request) result_rate = strategy.get_rate(None, None) self.assertEqual(result_rate, D('0.20')) address.vatin = 'ATU66688202' result_rate = strategy.get_rate(None, None) self.assertEqual(result_rate, D('0.00'))
889e6d72fed0b51fbca9ca717279ec31aa3f12b0
kpi/management/commands/is_database_empty.py
kpi/management/commands/is_database_empty.py
# coding: utf-8 from django.core.management.base import BaseCommand, CommandError from django.db import connections from django.db.utils import ConnectionDoesNotExist, OperationalError class Command(BaseCommand): help = ( 'Determine if one or more databases are empty, returning a ' 'tab-separated list of True or False. Non-existent databases are ' 'considered empty.' ) def add_arguments(self, parser): parser.add_argument( 'database', type=str, nargs='+', help='a database configured in django.conf.settings.DATABASES' ) @staticmethod def test_table_exists_and_has_any_row(cursor, table): cursor.execute( 'SELECT (1) AS "exists" FROM "pg_tables" ' 'WHERE "tablename" = %s ' 'LIMIT 1;', [table] ) if not cursor.fetchone(): return False cursor.execute( f'SELECT (1) AS "exists" FROM "{table}" LIMIT 1;' ) return cursor.fetchone() is not None def handle(self, *args, **options): connection_keys = options.get('database') connection_keys = [ # For convenience, allow 'kpi' to be an alias for 'default' 'default' if x == 'kpi' else x for x in connection_keys ] table_to_test_for_connection_key = { 'default': 'kpi_asset', 'kobocat': 'logger_xform', } results = [] for connection_key in connection_keys: try: connection = connections[connection_key] except ConnectionDoesNotExist: raise CommandError( f'{connection_key} is not a configured database' ) try: table_to_test = table_to_test_for_connection_key[ connection_key ] except KeyError: raise CommandError( f"I don't know how to handle {connection_key}. Sorry!" ) try: with connection.cursor() as cursor: results.append( not self.test_table_exists_and_has_any_row( cursor, table_to_test ) ) except OperationalError as e: if str(e).strip().endswith('does not exist'): results.append(True) self.stdout.write('\t'.join([str(x) for x in results]))
Add management command to check for empty database
Add management command to check for empty database Will be used by kobo-install; see kobotoolbox/kobo-install#65
Python
agpl-3.0
kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi,kobotoolbox/kpi
Add management command to check for empty database Will be used by kobo-install; see kobotoolbox/kobo-install#65
# coding: utf-8 from django.core.management.base import BaseCommand, CommandError from django.db import connections from django.db.utils import ConnectionDoesNotExist, OperationalError class Command(BaseCommand): help = ( 'Determine if one or more databases are empty, returning a ' 'tab-separated list of True or False. Non-existent databases are ' 'considered empty.' ) def add_arguments(self, parser): parser.add_argument( 'database', type=str, nargs='+', help='a database configured in django.conf.settings.DATABASES' ) @staticmethod def test_table_exists_and_has_any_row(cursor, table): cursor.execute( 'SELECT (1) AS "exists" FROM "pg_tables" ' 'WHERE "tablename" = %s ' 'LIMIT 1;', [table] ) if not cursor.fetchone(): return False cursor.execute( f'SELECT (1) AS "exists" FROM "{table}" LIMIT 1;' ) return cursor.fetchone() is not None def handle(self, *args, **options): connection_keys = options.get('database') connection_keys = [ # For convenience, allow 'kpi' to be an alias for 'default' 'default' if x == 'kpi' else x for x in connection_keys ] table_to_test_for_connection_key = { 'default': 'kpi_asset', 'kobocat': 'logger_xform', } results = [] for connection_key in connection_keys: try: connection = connections[connection_key] except ConnectionDoesNotExist: raise CommandError( f'{connection_key} is not a configured database' ) try: table_to_test = table_to_test_for_connection_key[ connection_key ] except KeyError: raise CommandError( f"I don't know how to handle {connection_key}. Sorry!" ) try: with connection.cursor() as cursor: results.append( not self.test_table_exists_and_has_any_row( cursor, table_to_test ) ) except OperationalError as e: if str(e).strip().endswith('does not exist'): results.append(True) self.stdout.write('\t'.join([str(x) for x in results]))
<commit_before><commit_msg>Add management command to check for empty database Will be used by kobo-install; see kobotoolbox/kobo-install#65<commit_after>
# coding: utf-8 from django.core.management.base import BaseCommand, CommandError from django.db import connections from django.db.utils import ConnectionDoesNotExist, OperationalError class Command(BaseCommand): help = ( 'Determine if one or more databases are empty, returning a ' 'tab-separated list of True or False. Non-existent databases are ' 'considered empty.' ) def add_arguments(self, parser): parser.add_argument( 'database', type=str, nargs='+', help='a database configured in django.conf.settings.DATABASES' ) @staticmethod def test_table_exists_and_has_any_row(cursor, table): cursor.execute( 'SELECT (1) AS "exists" FROM "pg_tables" ' 'WHERE "tablename" = %s ' 'LIMIT 1;', [table] ) if not cursor.fetchone(): return False cursor.execute( f'SELECT (1) AS "exists" FROM "{table}" LIMIT 1;' ) return cursor.fetchone() is not None def handle(self, *args, **options): connection_keys = options.get('database') connection_keys = [ # For convenience, allow 'kpi' to be an alias for 'default' 'default' if x == 'kpi' else x for x in connection_keys ] table_to_test_for_connection_key = { 'default': 'kpi_asset', 'kobocat': 'logger_xform', } results = [] for connection_key in connection_keys: try: connection = connections[connection_key] except ConnectionDoesNotExist: raise CommandError( f'{connection_key} is not a configured database' ) try: table_to_test = table_to_test_for_connection_key[ connection_key ] except KeyError: raise CommandError( f"I don't know how to handle {connection_key}. Sorry!" ) try: with connection.cursor() as cursor: results.append( not self.test_table_exists_and_has_any_row( cursor, table_to_test ) ) except OperationalError as e: if str(e).strip().endswith('does not exist'): results.append(True) self.stdout.write('\t'.join([str(x) for x in results]))
Add management command to check for empty database Will be used by kobo-install; see kobotoolbox/kobo-install#65# coding: utf-8 from django.core.management.base import BaseCommand, CommandError from django.db import connections from django.db.utils import ConnectionDoesNotExist, OperationalError class Command(BaseCommand): help = ( 'Determine if one or more databases are empty, returning a ' 'tab-separated list of True or False. Non-existent databases are ' 'considered empty.' ) def add_arguments(self, parser): parser.add_argument( 'database', type=str, nargs='+', help='a database configured in django.conf.settings.DATABASES' ) @staticmethod def test_table_exists_and_has_any_row(cursor, table): cursor.execute( 'SELECT (1) AS "exists" FROM "pg_tables" ' 'WHERE "tablename" = %s ' 'LIMIT 1;', [table] ) if not cursor.fetchone(): return False cursor.execute( f'SELECT (1) AS "exists" FROM "{table}" LIMIT 1;' ) return cursor.fetchone() is not None def handle(self, *args, **options): connection_keys = options.get('database') connection_keys = [ # For convenience, allow 'kpi' to be an alias for 'default' 'default' if x == 'kpi' else x for x in connection_keys ] table_to_test_for_connection_key = { 'default': 'kpi_asset', 'kobocat': 'logger_xform', } results = [] for connection_key in connection_keys: try: connection = connections[connection_key] except ConnectionDoesNotExist: raise CommandError( f'{connection_key} is not a configured database' ) try: table_to_test = table_to_test_for_connection_key[ connection_key ] except KeyError: raise CommandError( f"I don't know how to handle {connection_key}. Sorry!" ) try: with connection.cursor() as cursor: results.append( not self.test_table_exists_and_has_any_row( cursor, table_to_test ) ) except OperationalError as e: if str(e).strip().endswith('does not exist'): results.append(True) self.stdout.write('\t'.join([str(x) for x in results]))
<commit_before><commit_msg>Add management command to check for empty database Will be used by kobo-install; see kobotoolbox/kobo-install#65<commit_after># coding: utf-8 from django.core.management.base import BaseCommand, CommandError from django.db import connections from django.db.utils import ConnectionDoesNotExist, OperationalError class Command(BaseCommand): help = ( 'Determine if one or more databases are empty, returning a ' 'tab-separated list of True or False. Non-existent databases are ' 'considered empty.' ) def add_arguments(self, parser): parser.add_argument( 'database', type=str, nargs='+', help='a database configured in django.conf.settings.DATABASES' ) @staticmethod def test_table_exists_and_has_any_row(cursor, table): cursor.execute( 'SELECT (1) AS "exists" FROM "pg_tables" ' 'WHERE "tablename" = %s ' 'LIMIT 1;', [table] ) if not cursor.fetchone(): return False cursor.execute( f'SELECT (1) AS "exists" FROM "{table}" LIMIT 1;' ) return cursor.fetchone() is not None def handle(self, *args, **options): connection_keys = options.get('database') connection_keys = [ # For convenience, allow 'kpi' to be an alias for 'default' 'default' if x == 'kpi' else x for x in connection_keys ] table_to_test_for_connection_key = { 'default': 'kpi_asset', 'kobocat': 'logger_xform', } results = [] for connection_key in connection_keys: try: connection = connections[connection_key] except ConnectionDoesNotExist: raise CommandError( f'{connection_key} is not a configured database' ) try: table_to_test = table_to_test_for_connection_key[ connection_key ] except KeyError: raise CommandError( f"I don't know how to handle {connection_key}. Sorry!" ) try: with connection.cursor() as cursor: results.append( not self.test_table_exists_and_has_any_row( cursor, table_to_test ) ) except OperationalError as e: if str(e).strip().endswith('does not exist'): results.append(True) self.stdout.write('\t'.join([str(x) for x in results]))
121eefa0dd37b2dac75c56a33f1f96258b206c4c
record_cubes.py
record_cubes.py
from __future__ import print_function import os import sys # Hack to get parent folder in path sys.path.insert(1, os.path.join(sys.path[0], '..')) import magmo def main(): #for day in range(11, 30+1): for day in range(21, 21+1): sources = magmo.get_day_obs_data(day) print (sources) for src in sources: cube_path = "day{}/1420/magmo-{}_1420_sl_restor.fits" \ .format(day, src['source']) cube_found = os.path.exists(cube_path) print("{},{},{}".format(day, src['source'], "Y" if cube_found else "N")) return 0 # Run the script if it is called from the command line if __name__ == "__main__": exit(main())
Add stats on which fields had cubes produced or had spectra used. Tidy up console output Add zoomed in l-v plot
Add stats on which fields had cubes produced or had spectra used. Tidy up console output Add zoomed in l-v plot
Python
apache-2.0
jd-au/magmo-HI,jd-au/magmo-HI
Add stats on which fields had cubes produced or had spectra used. Tidy up console output Add zoomed in l-v plot
from __future__ import print_function import os import sys # Hack to get parent folder in path sys.path.insert(1, os.path.join(sys.path[0], '..')) import magmo def main(): #for day in range(11, 30+1): for day in range(21, 21+1): sources = magmo.get_day_obs_data(day) print (sources) for src in sources: cube_path = "day{}/1420/magmo-{}_1420_sl_restor.fits" \ .format(day, src['source']) cube_found = os.path.exists(cube_path) print("{},{},{}".format(day, src['source'], "Y" if cube_found else "N")) return 0 # Run the script if it is called from the command line if __name__ == "__main__": exit(main())
<commit_before><commit_msg>Add stats on which fields had cubes produced or had spectra used. Tidy up console output Add zoomed in l-v plot<commit_after>
from __future__ import print_function import os import sys # Hack to get parent folder in path sys.path.insert(1, os.path.join(sys.path[0], '..')) import magmo def main(): #for day in range(11, 30+1): for day in range(21, 21+1): sources = magmo.get_day_obs_data(day) print (sources) for src in sources: cube_path = "day{}/1420/magmo-{}_1420_sl_restor.fits" \ .format(day, src['source']) cube_found = os.path.exists(cube_path) print("{},{},{}".format(day, src['source'], "Y" if cube_found else "N")) return 0 # Run the script if it is called from the command line if __name__ == "__main__": exit(main())
Add stats on which fields had cubes produced or had spectra used. Tidy up console output Add zoomed in l-v plot from __future__ import print_function import os import sys # Hack to get parent folder in path sys.path.insert(1, os.path.join(sys.path[0], '..')) import magmo def main(): #for day in range(11, 30+1): for day in range(21, 21+1): sources = magmo.get_day_obs_data(day) print (sources) for src in sources: cube_path = "day{}/1420/magmo-{}_1420_sl_restor.fits" \ .format(day, src['source']) cube_found = os.path.exists(cube_path) print("{},{},{}".format(day, src['source'], "Y" if cube_found else "N")) return 0 # Run the script if it is called from the command line if __name__ == "__main__": exit(main())
<commit_before><commit_msg>Add stats on which fields had cubes produced or had spectra used. Tidy up console output Add zoomed in l-v plot<commit_after> from __future__ import print_function import os import sys # Hack to get parent folder in path sys.path.insert(1, os.path.join(sys.path[0], '..')) import magmo def main(): #for day in range(11, 30+1): for day in range(21, 21+1): sources = magmo.get_day_obs_data(day) print (sources) for src in sources: cube_path = "day{}/1420/magmo-{}_1420_sl_restor.fits" \ .format(day, src['source']) cube_found = os.path.exists(cube_path) print("{},{},{}".format(day, src['source'], "Y" if cube_found else "N")) return 0 # Run the script if it is called from the command line if __name__ == "__main__": exit(main())
7405c342522cf3686b5946fb30a59c74c410c655
tests/site/pages/migrations/0002_regularpage.py
tests/site/pages/migrations/0002_regularpage.py
# -*- coding: utf-8 -*- # Generated by Django 1.11.1 on 2017-06-02 04:26 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wagtail.wagtailcore.fields class Migration(migrations.Migration): dependencies = [ ('wagtailcore', '0033_remove_golive_expiry_help_text'), ('pages', '0001_initial'), ] operations = [ migrations.CreateModel( name='RegularPage', fields=[ ('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')), ('subtitle', models.CharField(blank=True, default='', max_length=255)), ('body', wagtail.wagtailcore.fields.RichTextField(blank=True, default='')), ], options={ 'abstract': False, }, bases=('wagtailcore.page',), ), ]
Add missing migration for tests.app.pages (fixes build)
Add missing migration for tests.app.pages (fixes build)
Python
mit
LabD/wagtail-personalisation,LabD/wagtail-personalisation,LabD/wagtail-personalisation
Add missing migration for tests.app.pages (fixes build)
# -*- coding: utf-8 -*- # Generated by Django 1.11.1 on 2017-06-02 04:26 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wagtail.wagtailcore.fields class Migration(migrations.Migration): dependencies = [ ('wagtailcore', '0033_remove_golive_expiry_help_text'), ('pages', '0001_initial'), ] operations = [ migrations.CreateModel( name='RegularPage', fields=[ ('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')), ('subtitle', models.CharField(blank=True, default='', max_length=255)), ('body', wagtail.wagtailcore.fields.RichTextField(blank=True, default='')), ], options={ 'abstract': False, }, bases=('wagtailcore.page',), ), ]
<commit_before><commit_msg>Add missing migration for tests.app.pages (fixes build)<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.11.1 on 2017-06-02 04:26 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wagtail.wagtailcore.fields class Migration(migrations.Migration): dependencies = [ ('wagtailcore', '0033_remove_golive_expiry_help_text'), ('pages', '0001_initial'), ] operations = [ migrations.CreateModel( name='RegularPage', fields=[ ('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')), ('subtitle', models.CharField(blank=True, default='', max_length=255)), ('body', wagtail.wagtailcore.fields.RichTextField(blank=True, default='')), ], options={ 'abstract': False, }, bases=('wagtailcore.page',), ), ]
Add missing migration for tests.app.pages (fixes build)# -*- coding: utf-8 -*- # Generated by Django 1.11.1 on 2017-06-02 04:26 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wagtail.wagtailcore.fields class Migration(migrations.Migration): dependencies = [ ('wagtailcore', '0033_remove_golive_expiry_help_text'), ('pages', '0001_initial'), ] operations = [ migrations.CreateModel( name='RegularPage', fields=[ ('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')), ('subtitle', models.CharField(blank=True, default='', max_length=255)), ('body', wagtail.wagtailcore.fields.RichTextField(blank=True, default='')), ], options={ 'abstract': False, }, bases=('wagtailcore.page',), ), ]
<commit_before><commit_msg>Add missing migration for tests.app.pages (fixes build)<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.11.1 on 2017-06-02 04:26 from __future__ import unicode_literals from django.db import migrations, models import django.db.models.deletion import wagtail.wagtailcore.fields class Migration(migrations.Migration): dependencies = [ ('wagtailcore', '0033_remove_golive_expiry_help_text'), ('pages', '0001_initial'), ] operations = [ migrations.CreateModel( name='RegularPage', fields=[ ('page_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='wagtailcore.Page')), ('subtitle', models.CharField(blank=True, default='', max_length=255)), ('body', wagtail.wagtailcore.fields.RichTextField(blank=True, default='')), ], options={ 'abstract': False, }, bases=('wagtailcore.page',), ), ]
254403f507ea8ae075a791f24a031eaa79fc2447
tools/dev/wc-format.py
tools/dev/wc-format.py
#!/usr/bin/env python import os import sqlite3 import sys # helper def usage(): sys.stderr.write("USAGE: %s [PATH]\n" + \ "\n" + \ "Prints to stdout the format of the working copy at PATH.\n") # parse argv wc = (sys.argv[1:] + ['.'])[0] # main() entries = os.path.join(wc, '.svn', 'entries') wc_db = os.path.join(wc, '.svn', 'wc.db') if os.path.exists(entries): formatno = int(open(entries).readline()) elif os.path.exists(wc_db): formatno = sqlite3.connect(wc_db).execute('pragma user_version;').fetchone()[0] else: usage() sys.exit(1) # 1.0.x -> 1.3.x: format 4 # 1.4.x: format 8 # 1.5.x: format 9 # 1.6.x: format 10 # 1.7.x: format XXX print("%s: %d" % (wc, formatno))
Add a helper script, ported to Python.
Add a helper script, ported to Python. * tools/dev/wc-format.py: New. Prints the working copy format of a given directory.
Python
apache-2.0
jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion,jmckaskill/subversion
Add a helper script, ported to Python. * tools/dev/wc-format.py: New. Prints the working copy format of a given directory.
#!/usr/bin/env python import os import sqlite3 import sys # helper def usage(): sys.stderr.write("USAGE: %s [PATH]\n" + \ "\n" + \ "Prints to stdout the format of the working copy at PATH.\n") # parse argv wc = (sys.argv[1:] + ['.'])[0] # main() entries = os.path.join(wc, '.svn', 'entries') wc_db = os.path.join(wc, '.svn', 'wc.db') if os.path.exists(entries): formatno = int(open(entries).readline()) elif os.path.exists(wc_db): formatno = sqlite3.connect(wc_db).execute('pragma user_version;').fetchone()[0] else: usage() sys.exit(1) # 1.0.x -> 1.3.x: format 4 # 1.4.x: format 8 # 1.5.x: format 9 # 1.6.x: format 10 # 1.7.x: format XXX print("%s: %d" % (wc, formatno))
<commit_before><commit_msg>Add a helper script, ported to Python. * tools/dev/wc-format.py: New. Prints the working copy format of a given directory.<commit_after>
#!/usr/bin/env python import os import sqlite3 import sys # helper def usage(): sys.stderr.write("USAGE: %s [PATH]\n" + \ "\n" + \ "Prints to stdout the format of the working copy at PATH.\n") # parse argv wc = (sys.argv[1:] + ['.'])[0] # main() entries = os.path.join(wc, '.svn', 'entries') wc_db = os.path.join(wc, '.svn', 'wc.db') if os.path.exists(entries): formatno = int(open(entries).readline()) elif os.path.exists(wc_db): formatno = sqlite3.connect(wc_db).execute('pragma user_version;').fetchone()[0] else: usage() sys.exit(1) # 1.0.x -> 1.3.x: format 4 # 1.4.x: format 8 # 1.5.x: format 9 # 1.6.x: format 10 # 1.7.x: format XXX print("%s: %d" % (wc, formatno))
Add a helper script, ported to Python. * tools/dev/wc-format.py: New. Prints the working copy format of a given directory.#!/usr/bin/env python import os import sqlite3 import sys # helper def usage(): sys.stderr.write("USAGE: %s [PATH]\n" + \ "\n" + \ "Prints to stdout the format of the working copy at PATH.\n") # parse argv wc = (sys.argv[1:] + ['.'])[0] # main() entries = os.path.join(wc, '.svn', 'entries') wc_db = os.path.join(wc, '.svn', 'wc.db') if os.path.exists(entries): formatno = int(open(entries).readline()) elif os.path.exists(wc_db): formatno = sqlite3.connect(wc_db).execute('pragma user_version;').fetchone()[0] else: usage() sys.exit(1) # 1.0.x -> 1.3.x: format 4 # 1.4.x: format 8 # 1.5.x: format 9 # 1.6.x: format 10 # 1.7.x: format XXX print("%s: %d" % (wc, formatno))
<commit_before><commit_msg>Add a helper script, ported to Python. * tools/dev/wc-format.py: New. Prints the working copy format of a given directory.<commit_after>#!/usr/bin/env python import os import sqlite3 import sys # helper def usage(): sys.stderr.write("USAGE: %s [PATH]\n" + \ "\n" + \ "Prints to stdout the format of the working copy at PATH.\n") # parse argv wc = (sys.argv[1:] + ['.'])[0] # main() entries = os.path.join(wc, '.svn', 'entries') wc_db = os.path.join(wc, '.svn', 'wc.db') if os.path.exists(entries): formatno = int(open(entries).readline()) elif os.path.exists(wc_db): formatno = sqlite3.connect(wc_db).execute('pragma user_version;').fetchone()[0] else: usage() sys.exit(1) # 1.0.x -> 1.3.x: format 4 # 1.4.x: format 8 # 1.5.x: format 9 # 1.6.x: format 10 # 1.7.x: format XXX print("%s: %d" % (wc, formatno))
ab5c5b5d7c214b17b48add9caeaa36a81e3886f5
tests/test_exc.py
tests/test_exc.py
# Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import testtools from glanceclient import exc FakeResponse = collections.namedtuple('HTTPResponse', ['status']) class TestHTTPExceptions(testtools.TestCase): def test_from_response(self): """exc.from_response should return instance of an HTTP exception.""" out = exc.from_response(FakeResponse(400)) self.assertTrue(isinstance(out, exc.HTTPBadRequest))
# Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import testtools from glanceclient import exc FakeResponse = collections.namedtuple('HTTPResponse', ['status']) class TestHTTPExceptions(testtools.TestCase): def test_from_response(self): """exc.from_response should return instance of an HTTP exception.""" out = exc.from_response(FakeResponse(400)) self.assertIsInstance(out, exc.HTTPBadRequest)
Change assertTrue(isinstance()) by optimal assert
Change assertTrue(isinstance()) by optimal assert assertTrue(isinstance(A, B)) or assertEqual(type(A), B) in tests should be replaced by assertIsInstance(A, B) provided by testtools. I have searched all the tests, there is only one wrong usage. Change-Id: Ib1db1a2dca7b5d8cbfe823973e4b571d0f0925c5 Closes-bug: #1268480
Python
apache-2.0
klmitch/python-glanceclient,alexpilotti/python-glanceclient,JioCloud/python-glanceclient,mmasaki/python-glanceclient,mmasaki/python-glanceclient,openstack/python-glanceclient,JioCloud/python-glanceclient,varunarya10/python-glanceclient,varunarya10/python-glanceclient,klmitch/python-glanceclient,openstack/python-glanceclient,alexpilotti/python-glanceclient
# Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import testtools from glanceclient import exc FakeResponse = collections.namedtuple('HTTPResponse', ['status']) class TestHTTPExceptions(testtools.TestCase): def test_from_response(self): """exc.from_response should return instance of an HTTP exception.""" out = exc.from_response(FakeResponse(400)) self.assertTrue(isinstance(out, exc.HTTPBadRequest)) Change assertTrue(isinstance()) by optimal assert assertTrue(isinstance(A, B)) or assertEqual(type(A), B) in tests should be replaced by assertIsInstance(A, B) provided by testtools. I have searched all the tests, there is only one wrong usage. Change-Id: Ib1db1a2dca7b5d8cbfe823973e4b571d0f0925c5 Closes-bug: #1268480
# Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import testtools from glanceclient import exc FakeResponse = collections.namedtuple('HTTPResponse', ['status']) class TestHTTPExceptions(testtools.TestCase): def test_from_response(self): """exc.from_response should return instance of an HTTP exception.""" out = exc.from_response(FakeResponse(400)) self.assertIsInstance(out, exc.HTTPBadRequest)
<commit_before># Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import testtools from glanceclient import exc FakeResponse = collections.namedtuple('HTTPResponse', ['status']) class TestHTTPExceptions(testtools.TestCase): def test_from_response(self): """exc.from_response should return instance of an HTTP exception.""" out = exc.from_response(FakeResponse(400)) self.assertTrue(isinstance(out, exc.HTTPBadRequest)) <commit_msg>Change assertTrue(isinstance()) by optimal assert assertTrue(isinstance(A, B)) or assertEqual(type(A), B) in tests should be replaced by assertIsInstance(A, B) provided by testtools. I have searched all the tests, there is only one wrong usage. Change-Id: Ib1db1a2dca7b5d8cbfe823973e4b571d0f0925c5 Closes-bug: #1268480<commit_after>
# Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import testtools from glanceclient import exc FakeResponse = collections.namedtuple('HTTPResponse', ['status']) class TestHTTPExceptions(testtools.TestCase): def test_from_response(self): """exc.from_response should return instance of an HTTP exception.""" out = exc.from_response(FakeResponse(400)) self.assertIsInstance(out, exc.HTTPBadRequest)
# Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import testtools from glanceclient import exc FakeResponse = collections.namedtuple('HTTPResponse', ['status']) class TestHTTPExceptions(testtools.TestCase): def test_from_response(self): """exc.from_response should return instance of an HTTP exception.""" out = exc.from_response(FakeResponse(400)) self.assertTrue(isinstance(out, exc.HTTPBadRequest)) Change assertTrue(isinstance()) by optimal assert assertTrue(isinstance(A, B)) or assertEqual(type(A), B) in tests should be replaced by assertIsInstance(A, B) provided by testtools. I have searched all the tests, there is only one wrong usage. Change-Id: Ib1db1a2dca7b5d8cbfe823973e4b571d0f0925c5 Closes-bug: #1268480# Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import testtools from glanceclient import exc FakeResponse = collections.namedtuple('HTTPResponse', ['status']) class TestHTTPExceptions(testtools.TestCase): def test_from_response(self): """exc.from_response should return instance of an HTTP exception.""" out = exc.from_response(FakeResponse(400)) self.assertIsInstance(out, exc.HTTPBadRequest)
<commit_before># Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import testtools from glanceclient import exc FakeResponse = collections.namedtuple('HTTPResponse', ['status']) class TestHTTPExceptions(testtools.TestCase): def test_from_response(self): """exc.from_response should return instance of an HTTP exception.""" out = exc.from_response(FakeResponse(400)) self.assertTrue(isinstance(out, exc.HTTPBadRequest)) <commit_msg>Change assertTrue(isinstance()) by optimal assert assertTrue(isinstance(A, B)) or assertEqual(type(A), B) in tests should be replaced by assertIsInstance(A, B) provided by testtools. I have searched all the tests, there is only one wrong usage. Change-Id: Ib1db1a2dca7b5d8cbfe823973e4b571d0f0925c5 Closes-bug: #1268480<commit_after># Copyright 2012 OpenStack Foundation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import collections import testtools from glanceclient import exc FakeResponse = collections.namedtuple('HTTPResponse', ['status']) class TestHTTPExceptions(testtools.TestCase): def test_from_response(self): """exc.from_response should return instance of an HTTP exception.""" out = exc.from_response(FakeResponse(400)) self.assertIsInstance(out, exc.HTTPBadRequest)
576fa1969c09554d6d5b6ceaf4c9a2b33fbc2238
git-get.py
git-get.py
#!/usr/bin/env python3 import logging import os import subprocess def git(command): """Runs command""" command = "git " + command logger.debug("Running: " + command) try: subprocess.run(command.split(" "), stdout=subprocess.PIPE) except Exception as error_message: logger.error("Failed to run command: " + command) logger.debug("Error message: " + str(error_message)) def set_logger(): """Initialises logger""" global logger logger = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter( "%(asctime)s %(name)-8s %(levelname)-8s %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) def install(): target = os.path.abspath(os.path.expanduser("~/pro/git-get/files")) if not os.path.exists(target): os.makedirs(target) git("clone https://github.com/" + "abactel/smhwr") def main(): set_logger() install() if __name__ == '__main__': main()
Allow basic downloading of repos
Allow basic downloading of repos
Python
mit
abactel/git-get
Allow basic downloading of repos
#!/usr/bin/env python3 import logging import os import subprocess def git(command): """Runs command""" command = "git " + command logger.debug("Running: " + command) try: subprocess.run(command.split(" "), stdout=subprocess.PIPE) except Exception as error_message: logger.error("Failed to run command: " + command) logger.debug("Error message: " + str(error_message)) def set_logger(): """Initialises logger""" global logger logger = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter( "%(asctime)s %(name)-8s %(levelname)-8s %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) def install(): target = os.path.abspath(os.path.expanduser("~/pro/git-get/files")) if not os.path.exists(target): os.makedirs(target) git("clone https://github.com/" + "abactel/smhwr") def main(): set_logger() install() if __name__ == '__main__': main()
<commit_before><commit_msg>Allow basic downloading of repos<commit_after>
#!/usr/bin/env python3 import logging import os import subprocess def git(command): """Runs command""" command = "git " + command logger.debug("Running: " + command) try: subprocess.run(command.split(" "), stdout=subprocess.PIPE) except Exception as error_message: logger.error("Failed to run command: " + command) logger.debug("Error message: " + str(error_message)) def set_logger(): """Initialises logger""" global logger logger = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter( "%(asctime)s %(name)-8s %(levelname)-8s %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) def install(): target = os.path.abspath(os.path.expanduser("~/pro/git-get/files")) if not os.path.exists(target): os.makedirs(target) git("clone https://github.com/" + "abactel/smhwr") def main(): set_logger() install() if __name__ == '__main__': main()
Allow basic downloading of repos#!/usr/bin/env python3 import logging import os import subprocess def git(command): """Runs command""" command = "git " + command logger.debug("Running: " + command) try: subprocess.run(command.split(" "), stdout=subprocess.PIPE) except Exception as error_message: logger.error("Failed to run command: " + command) logger.debug("Error message: " + str(error_message)) def set_logger(): """Initialises logger""" global logger logger = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter( "%(asctime)s %(name)-8s %(levelname)-8s %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) def install(): target = os.path.abspath(os.path.expanduser("~/pro/git-get/files")) if not os.path.exists(target): os.makedirs(target) git("clone https://github.com/" + "abactel/smhwr") def main(): set_logger() install() if __name__ == '__main__': main()
<commit_before><commit_msg>Allow basic downloading of repos<commit_after>#!/usr/bin/env python3 import logging import os import subprocess def git(command): """Runs command""" command = "git " + command logger.debug("Running: " + command) try: subprocess.run(command.split(" "), stdout=subprocess.PIPE) except Exception as error_message: logger.error("Failed to run command: " + command) logger.debug("Error message: " + str(error_message)) def set_logger(): """Initialises logger""" global logger logger = logging.getLogger() handler = logging.StreamHandler() formatter = logging.Formatter( "%(asctime)s %(name)-8s %(levelname)-8s %(message)s") handler.setFormatter(formatter) logger.addHandler(handler) logger.setLevel(logging.DEBUG) def install(): target = os.path.abspath(os.path.expanduser("~/pro/git-get/files")) if not os.path.exists(target): os.makedirs(target) git("clone https://github.com/" + "abactel/smhwr") def main(): set_logger() install() if __name__ == '__main__': main()
2cc44f5b97653d4b1b2070a90f079bab57116ebf
edisgo/opf/results/opf_expand_network.py
edisgo/opf/results/opf_expand_network.py
import numpy as np def expand_network(edisgo, tolerance=1e-6): """ Apply network expansion factors that were obtained by optimization to eDisGo MVGrid Parameters ---------- edisgo : :class:`~.edisgo.EDisGo` tolerance : float The acceptable margin with which an expansion factor can deviate from the nearest Integer before it gets rounded up """ if edisgo.opf_results is None: raise ValueError("OPF results not found. Run optimization first.") nep_factor = edisgo.opf_results.lines.nep.values.astype('float') # Only round up numbers that are reasonably far away from the nearest # Integer nep_factor = np.ceil(nep_factor - tolerance) # Get the names of all MV grid lines mv_lines = edisgo.topology.mv_grid.lines_df.index # Increase number of parallel lines, shrink respective resistance edisgo.topology.lines_df.loc[mv_lines, 'num_parallel'] *= nep_factor edisgo.topology.lines_df.loc[mv_lines, 'r'] /= nep_factor edisgo.topology.lines_df.loc[mv_lines, 'x'] /= nep_factor
Add function to map OPF results to eDisGo network
Add function to map OPF results to eDisGo network This adds a function that applies the OPF network expansion factors to the MV grid of an eDisGo object
Python
agpl-3.0
openego/eDisGo,openego/eDisGo
Add function to map OPF results to eDisGo network This adds a function that applies the OPF network expansion factors to the MV grid of an eDisGo object
import numpy as np def expand_network(edisgo, tolerance=1e-6): """ Apply network expansion factors that were obtained by optimization to eDisGo MVGrid Parameters ---------- edisgo : :class:`~.edisgo.EDisGo` tolerance : float The acceptable margin with which an expansion factor can deviate from the nearest Integer before it gets rounded up """ if edisgo.opf_results is None: raise ValueError("OPF results not found. Run optimization first.") nep_factor = edisgo.opf_results.lines.nep.values.astype('float') # Only round up numbers that are reasonably far away from the nearest # Integer nep_factor = np.ceil(nep_factor - tolerance) # Get the names of all MV grid lines mv_lines = edisgo.topology.mv_grid.lines_df.index # Increase number of parallel lines, shrink respective resistance edisgo.topology.lines_df.loc[mv_lines, 'num_parallel'] *= nep_factor edisgo.topology.lines_df.loc[mv_lines, 'r'] /= nep_factor edisgo.topology.lines_df.loc[mv_lines, 'x'] /= nep_factor
<commit_before><commit_msg>Add function to map OPF results to eDisGo network This adds a function that applies the OPF network expansion factors to the MV grid of an eDisGo object<commit_after>
import numpy as np def expand_network(edisgo, tolerance=1e-6): """ Apply network expansion factors that were obtained by optimization to eDisGo MVGrid Parameters ---------- edisgo : :class:`~.edisgo.EDisGo` tolerance : float The acceptable margin with which an expansion factor can deviate from the nearest Integer before it gets rounded up """ if edisgo.opf_results is None: raise ValueError("OPF results not found. Run optimization first.") nep_factor = edisgo.opf_results.lines.nep.values.astype('float') # Only round up numbers that are reasonably far away from the nearest # Integer nep_factor = np.ceil(nep_factor - tolerance) # Get the names of all MV grid lines mv_lines = edisgo.topology.mv_grid.lines_df.index # Increase number of parallel lines, shrink respective resistance edisgo.topology.lines_df.loc[mv_lines, 'num_parallel'] *= nep_factor edisgo.topology.lines_df.loc[mv_lines, 'r'] /= nep_factor edisgo.topology.lines_df.loc[mv_lines, 'x'] /= nep_factor
Add function to map OPF results to eDisGo network This adds a function that applies the OPF network expansion factors to the MV grid of an eDisGo objectimport numpy as np def expand_network(edisgo, tolerance=1e-6): """ Apply network expansion factors that were obtained by optimization to eDisGo MVGrid Parameters ---------- edisgo : :class:`~.edisgo.EDisGo` tolerance : float The acceptable margin with which an expansion factor can deviate from the nearest Integer before it gets rounded up """ if edisgo.opf_results is None: raise ValueError("OPF results not found. Run optimization first.") nep_factor = edisgo.opf_results.lines.nep.values.astype('float') # Only round up numbers that are reasonably far away from the nearest # Integer nep_factor = np.ceil(nep_factor - tolerance) # Get the names of all MV grid lines mv_lines = edisgo.topology.mv_grid.lines_df.index # Increase number of parallel lines, shrink respective resistance edisgo.topology.lines_df.loc[mv_lines, 'num_parallel'] *= nep_factor edisgo.topology.lines_df.loc[mv_lines, 'r'] /= nep_factor edisgo.topology.lines_df.loc[mv_lines, 'x'] /= nep_factor
<commit_before><commit_msg>Add function to map OPF results to eDisGo network This adds a function that applies the OPF network expansion factors to the MV grid of an eDisGo object<commit_after>import numpy as np def expand_network(edisgo, tolerance=1e-6): """ Apply network expansion factors that were obtained by optimization to eDisGo MVGrid Parameters ---------- edisgo : :class:`~.edisgo.EDisGo` tolerance : float The acceptable margin with which an expansion factor can deviate from the nearest Integer before it gets rounded up """ if edisgo.opf_results is None: raise ValueError("OPF results not found. Run optimization first.") nep_factor = edisgo.opf_results.lines.nep.values.astype('float') # Only round up numbers that are reasonably far away from the nearest # Integer nep_factor = np.ceil(nep_factor - tolerance) # Get the names of all MV grid lines mv_lines = edisgo.topology.mv_grid.lines_df.index # Increase number of parallel lines, shrink respective resistance edisgo.topology.lines_df.loc[mv_lines, 'num_parallel'] *= nep_factor edisgo.topology.lines_df.loc[mv_lines, 'r'] /= nep_factor edisgo.topology.lines_df.loc[mv_lines, 'x'] /= nep_factor
3bcfcc4717014227de3775a2870e65d157862852
unioncal.py
unioncal.py
import os.path import pytz import icalendar import datetime from urllib.request import urlopen url1 = 'https://www.google.com/calendar/ical/loganlyf%40gmail.com/private-d45c0973da1e18ebc6394c484ac5bbfb/basic.ics' url2 = 'https://www.google.com/calendar/ical/6v928aad58pqdh360ruh1t9dps%40group.calendar.google.com/private-b5a1100cbe0c252c82aed028c4d91c59/basic.ics' cal1 = icalendar.Calendar.from_ical(urlopen(url1).read()) cal2 = icalendar.Calendar.from_ical(urlopen(url2).read()) def getAllEvents(calendar): events = [] for comp in calendar.subcomponents: if type(comp) == icalendar.cal.Event: events.append(comp) return events events1 = getAllEvents(cal1) print(cal1.subcomponents[0]['tzid']) print(events1[0]['summary'],events1[0]['dtstart'],events1[0]['dtend'])
Read ics, explore data structure.
Read ics, explore data structure.
Python
agpl-3.0
louy2/Calenssist,asm-products/calenssist
Read ics, explore data structure.
import os.path import pytz import icalendar import datetime from urllib.request import urlopen url1 = 'https://www.google.com/calendar/ical/loganlyf%40gmail.com/private-d45c0973da1e18ebc6394c484ac5bbfb/basic.ics' url2 = 'https://www.google.com/calendar/ical/6v928aad58pqdh360ruh1t9dps%40group.calendar.google.com/private-b5a1100cbe0c252c82aed028c4d91c59/basic.ics' cal1 = icalendar.Calendar.from_ical(urlopen(url1).read()) cal2 = icalendar.Calendar.from_ical(urlopen(url2).read()) def getAllEvents(calendar): events = [] for comp in calendar.subcomponents: if type(comp) == icalendar.cal.Event: events.append(comp) return events events1 = getAllEvents(cal1) print(cal1.subcomponents[0]['tzid']) print(events1[0]['summary'],events1[0]['dtstart'],events1[0]['dtend'])
<commit_before><commit_msg>Read ics, explore data structure.<commit_after>
import os.path import pytz import icalendar import datetime from urllib.request import urlopen url1 = 'https://www.google.com/calendar/ical/loganlyf%40gmail.com/private-d45c0973da1e18ebc6394c484ac5bbfb/basic.ics' url2 = 'https://www.google.com/calendar/ical/6v928aad58pqdh360ruh1t9dps%40group.calendar.google.com/private-b5a1100cbe0c252c82aed028c4d91c59/basic.ics' cal1 = icalendar.Calendar.from_ical(urlopen(url1).read()) cal2 = icalendar.Calendar.from_ical(urlopen(url2).read()) def getAllEvents(calendar): events = [] for comp in calendar.subcomponents: if type(comp) == icalendar.cal.Event: events.append(comp) return events events1 = getAllEvents(cal1) print(cal1.subcomponents[0]['tzid']) print(events1[0]['summary'],events1[0]['dtstart'],events1[0]['dtend'])
Read ics, explore data structure.import os.path import pytz import icalendar import datetime from urllib.request import urlopen url1 = 'https://www.google.com/calendar/ical/loganlyf%40gmail.com/private-d45c0973da1e18ebc6394c484ac5bbfb/basic.ics' url2 = 'https://www.google.com/calendar/ical/6v928aad58pqdh360ruh1t9dps%40group.calendar.google.com/private-b5a1100cbe0c252c82aed028c4d91c59/basic.ics' cal1 = icalendar.Calendar.from_ical(urlopen(url1).read()) cal2 = icalendar.Calendar.from_ical(urlopen(url2).read()) def getAllEvents(calendar): events = [] for comp in calendar.subcomponents: if type(comp) == icalendar.cal.Event: events.append(comp) return events events1 = getAllEvents(cal1) print(cal1.subcomponents[0]['tzid']) print(events1[0]['summary'],events1[0]['dtstart'],events1[0]['dtend'])
<commit_before><commit_msg>Read ics, explore data structure.<commit_after>import os.path import pytz import icalendar import datetime from urllib.request import urlopen url1 = 'https://www.google.com/calendar/ical/loganlyf%40gmail.com/private-d45c0973da1e18ebc6394c484ac5bbfb/basic.ics' url2 = 'https://www.google.com/calendar/ical/6v928aad58pqdh360ruh1t9dps%40group.calendar.google.com/private-b5a1100cbe0c252c82aed028c4d91c59/basic.ics' cal1 = icalendar.Calendar.from_ical(urlopen(url1).read()) cal2 = icalendar.Calendar.from_ical(urlopen(url2).read()) def getAllEvents(calendar): events = [] for comp in calendar.subcomponents: if type(comp) == icalendar.cal.Event: events.append(comp) return events events1 = getAllEvents(cal1) print(cal1.subcomponents[0]['tzid']) print(events1[0]['summary'],events1[0]['dtstart'],events1[0]['dtend'])
0f50bcddeeb0f7c63e7885b2bd306509654460f1
dear_astrid/parser.py
dear_astrid/parser.py
"""Parse Astrid xml backup file into simple data structures.""" from datetime import datetime import re # TODO: ArgumentError? class AstridValueError(Exception): """Value does not match expected format and cannot be parsed""" def __init__(self, key, val): Exception.__init__(self, 'Unknown format for Astrid {}: {}'.format(key, val) ) def parse_date(due): """Parse astrid date value to object. >>> parse_date('1361905200000') datetime.datetime(2013, 2, 26, 12, 0) >>> parse_date('1389812400000').isoformat() '2014-01-15T12:00:00' """ # astrid dates have three extra digits on the end (ms?) sec, tail = due[0:-3], due[-3:] # TODO: check for minimum length? # TODO: check that result is between 1900 and 2100? if not tail == '000': raise AstridValueError('date', due) # NOTE: this uses local timezone, which is probably what you want # but I'm not entirely sure about that yet return datetime.fromtimestamp(int(sec)) # TODO: consider parsing with https://github.com/collective/icalendar def parse_recurrence(rule): """Convert astrid recurrence rule into dictionary >>> parse_recurrence('RRULE:FREQ=MONTHLY;INTERVAL=12') {'FREQ': 'MONTHLY', 'INTERVAL': '12'} """ if not rule: return matched = re.match(r'^RRULE:((?:[A-Z]+=[^;]+;?)+)$', rule) if not matched: raise AstridValueError('recurrence', rule) # TODO: use constants for normalization? # TODO: see icalendar.prop (vWeekday, etc) for parsing? return dict(s.split('=') for s in matched.group(1).split(';'))
Create functions for date and recurrence parsing
Create functions for date and recurrence parsing
Python
mit
rwstauner/dear_astrid,rwstauner/dear_astrid
Create functions for date and recurrence parsing
"""Parse Astrid xml backup file into simple data structures.""" from datetime import datetime import re # TODO: ArgumentError? class AstridValueError(Exception): """Value does not match expected format and cannot be parsed""" def __init__(self, key, val): Exception.__init__(self, 'Unknown format for Astrid {}: {}'.format(key, val) ) def parse_date(due): """Parse astrid date value to object. >>> parse_date('1361905200000') datetime.datetime(2013, 2, 26, 12, 0) >>> parse_date('1389812400000').isoformat() '2014-01-15T12:00:00' """ # astrid dates have three extra digits on the end (ms?) sec, tail = due[0:-3], due[-3:] # TODO: check for minimum length? # TODO: check that result is between 1900 and 2100? if not tail == '000': raise AstridValueError('date', due) # NOTE: this uses local timezone, which is probably what you want # but I'm not entirely sure about that yet return datetime.fromtimestamp(int(sec)) # TODO: consider parsing with https://github.com/collective/icalendar def parse_recurrence(rule): """Convert astrid recurrence rule into dictionary >>> parse_recurrence('RRULE:FREQ=MONTHLY;INTERVAL=12') {'FREQ': 'MONTHLY', 'INTERVAL': '12'} """ if not rule: return matched = re.match(r'^RRULE:((?:[A-Z]+=[^;]+;?)+)$', rule) if not matched: raise AstridValueError('recurrence', rule) # TODO: use constants for normalization? # TODO: see icalendar.prop (vWeekday, etc) for parsing? return dict(s.split('=') for s in matched.group(1).split(';'))
<commit_before><commit_msg>Create functions for date and recurrence parsing<commit_after>
"""Parse Astrid xml backup file into simple data structures.""" from datetime import datetime import re # TODO: ArgumentError? class AstridValueError(Exception): """Value does not match expected format and cannot be parsed""" def __init__(self, key, val): Exception.__init__(self, 'Unknown format for Astrid {}: {}'.format(key, val) ) def parse_date(due): """Parse astrid date value to object. >>> parse_date('1361905200000') datetime.datetime(2013, 2, 26, 12, 0) >>> parse_date('1389812400000').isoformat() '2014-01-15T12:00:00' """ # astrid dates have three extra digits on the end (ms?) sec, tail = due[0:-3], due[-3:] # TODO: check for minimum length? # TODO: check that result is between 1900 and 2100? if not tail == '000': raise AstridValueError('date', due) # NOTE: this uses local timezone, which is probably what you want # but I'm not entirely sure about that yet return datetime.fromtimestamp(int(sec)) # TODO: consider parsing with https://github.com/collective/icalendar def parse_recurrence(rule): """Convert astrid recurrence rule into dictionary >>> parse_recurrence('RRULE:FREQ=MONTHLY;INTERVAL=12') {'FREQ': 'MONTHLY', 'INTERVAL': '12'} """ if not rule: return matched = re.match(r'^RRULE:((?:[A-Z]+=[^;]+;?)+)$', rule) if not matched: raise AstridValueError('recurrence', rule) # TODO: use constants for normalization? # TODO: see icalendar.prop (vWeekday, etc) for parsing? return dict(s.split('=') for s in matched.group(1).split(';'))
Create functions for date and recurrence parsing"""Parse Astrid xml backup file into simple data structures.""" from datetime import datetime import re # TODO: ArgumentError? class AstridValueError(Exception): """Value does not match expected format and cannot be parsed""" def __init__(self, key, val): Exception.__init__(self, 'Unknown format for Astrid {}: {}'.format(key, val) ) def parse_date(due): """Parse astrid date value to object. >>> parse_date('1361905200000') datetime.datetime(2013, 2, 26, 12, 0) >>> parse_date('1389812400000').isoformat() '2014-01-15T12:00:00' """ # astrid dates have three extra digits on the end (ms?) sec, tail = due[0:-3], due[-3:] # TODO: check for minimum length? # TODO: check that result is between 1900 and 2100? if not tail == '000': raise AstridValueError('date', due) # NOTE: this uses local timezone, which is probably what you want # but I'm not entirely sure about that yet return datetime.fromtimestamp(int(sec)) # TODO: consider parsing with https://github.com/collective/icalendar def parse_recurrence(rule): """Convert astrid recurrence rule into dictionary >>> parse_recurrence('RRULE:FREQ=MONTHLY;INTERVAL=12') {'FREQ': 'MONTHLY', 'INTERVAL': '12'} """ if not rule: return matched = re.match(r'^RRULE:((?:[A-Z]+=[^;]+;?)+)$', rule) if not matched: raise AstridValueError('recurrence', rule) # TODO: use constants for normalization? # TODO: see icalendar.prop (vWeekday, etc) for parsing? return dict(s.split('=') for s in matched.group(1).split(';'))
<commit_before><commit_msg>Create functions for date and recurrence parsing<commit_after>"""Parse Astrid xml backup file into simple data structures.""" from datetime import datetime import re # TODO: ArgumentError? class AstridValueError(Exception): """Value does not match expected format and cannot be parsed""" def __init__(self, key, val): Exception.__init__(self, 'Unknown format for Astrid {}: {}'.format(key, val) ) def parse_date(due): """Parse astrid date value to object. >>> parse_date('1361905200000') datetime.datetime(2013, 2, 26, 12, 0) >>> parse_date('1389812400000').isoformat() '2014-01-15T12:00:00' """ # astrid dates have three extra digits on the end (ms?) sec, tail = due[0:-3], due[-3:] # TODO: check for minimum length? # TODO: check that result is between 1900 and 2100? if not tail == '000': raise AstridValueError('date', due) # NOTE: this uses local timezone, which is probably what you want # but I'm not entirely sure about that yet return datetime.fromtimestamp(int(sec)) # TODO: consider parsing with https://github.com/collective/icalendar def parse_recurrence(rule): """Convert astrid recurrence rule into dictionary >>> parse_recurrence('RRULE:FREQ=MONTHLY;INTERVAL=12') {'FREQ': 'MONTHLY', 'INTERVAL': '12'} """ if not rule: return matched = re.match(r'^RRULE:((?:[A-Z]+=[^;]+;?)+)$', rule) if not matched: raise AstridValueError('recurrence', rule) # TODO: use constants for normalization? # TODO: see icalendar.prop (vWeekday, etc) for parsing? return dict(s.split('=') for s in matched.group(1).split(';'))
73fd98aee14ff800ec53bd4296a5ea97c6b754b8
test/unittests/skills/test_fallback_skill.py
test/unittests/skills/test_fallback_skill.py
from unittest import TestCase, mock from mycroft.skills import FallbackSkill def setup_fallback(fb_class): fb_skill = fb_class() fb_skill.bind(mock.Mock(name='bus')) fb_skill.initialize() return fb_skill class TestFallbackSkill(TestCase): def test_life_cycle(self): """Test startup and shutdown of a fallback skill. Ensure that an added handler is removed as part of default shutdown. """ self.assertEqual(len(FallbackSkill.fallback_handlers), 0) fb_skill = setup_fallback(SimpleFallback) self.assertEqual(len(FallbackSkill.fallback_handlers), 1) self.assertEqual(FallbackSkill.wrapper_map[0][0], fb_skill.fallback_handler) self.assertEqual(len(FallbackSkill.wrapper_map), 1) fb_skill.default_shutdown() self.assertEqual(len(FallbackSkill.fallback_handlers), 0) self.assertEqual(len(FallbackSkill.wrapper_map), 0) def test_manual_removal(self): """Test that the call to remove_fallback() removes the handler""" self.assertEqual(len(FallbackSkill.fallback_handlers), 0) # Create skill adding a single handler fb_skill = setup_fallback(SimpleFallback) self.assertEqual(len(FallbackSkill.fallback_handlers), 1) fb_skill.remove_fallback(fb_skill.fallback_handler) # Both internal trackers of handlers should be cleared now self.assertEqual(len(FallbackSkill.fallback_handlers), 0) self.assertEqual(len(FallbackSkill.wrapper_map), 0) class SimpleFallback(FallbackSkill): """Simple fallback skill used for test.""" def initialize(self): self.register_fallback(self.fallback_handler, 42) def fallback_handler(self): pass
Add test cases for adding / removing fallbacks
Add test cases for adding / removing fallbacks
Python
apache-2.0
MycroftAI/mycroft-core,MycroftAI/mycroft-core,forslund/mycroft-core,forslund/mycroft-core
Add test cases for adding / removing fallbacks
from unittest import TestCase, mock from mycroft.skills import FallbackSkill def setup_fallback(fb_class): fb_skill = fb_class() fb_skill.bind(mock.Mock(name='bus')) fb_skill.initialize() return fb_skill class TestFallbackSkill(TestCase): def test_life_cycle(self): """Test startup and shutdown of a fallback skill. Ensure that an added handler is removed as part of default shutdown. """ self.assertEqual(len(FallbackSkill.fallback_handlers), 0) fb_skill = setup_fallback(SimpleFallback) self.assertEqual(len(FallbackSkill.fallback_handlers), 1) self.assertEqual(FallbackSkill.wrapper_map[0][0], fb_skill.fallback_handler) self.assertEqual(len(FallbackSkill.wrapper_map), 1) fb_skill.default_shutdown() self.assertEqual(len(FallbackSkill.fallback_handlers), 0) self.assertEqual(len(FallbackSkill.wrapper_map), 0) def test_manual_removal(self): """Test that the call to remove_fallback() removes the handler""" self.assertEqual(len(FallbackSkill.fallback_handlers), 0) # Create skill adding a single handler fb_skill = setup_fallback(SimpleFallback) self.assertEqual(len(FallbackSkill.fallback_handlers), 1) fb_skill.remove_fallback(fb_skill.fallback_handler) # Both internal trackers of handlers should be cleared now self.assertEqual(len(FallbackSkill.fallback_handlers), 0) self.assertEqual(len(FallbackSkill.wrapper_map), 0) class SimpleFallback(FallbackSkill): """Simple fallback skill used for test.""" def initialize(self): self.register_fallback(self.fallback_handler, 42) def fallback_handler(self): pass
<commit_before><commit_msg>Add test cases for adding / removing fallbacks<commit_after>
from unittest import TestCase, mock from mycroft.skills import FallbackSkill def setup_fallback(fb_class): fb_skill = fb_class() fb_skill.bind(mock.Mock(name='bus')) fb_skill.initialize() return fb_skill class TestFallbackSkill(TestCase): def test_life_cycle(self): """Test startup and shutdown of a fallback skill. Ensure that an added handler is removed as part of default shutdown. """ self.assertEqual(len(FallbackSkill.fallback_handlers), 0) fb_skill = setup_fallback(SimpleFallback) self.assertEqual(len(FallbackSkill.fallback_handlers), 1) self.assertEqual(FallbackSkill.wrapper_map[0][0], fb_skill.fallback_handler) self.assertEqual(len(FallbackSkill.wrapper_map), 1) fb_skill.default_shutdown() self.assertEqual(len(FallbackSkill.fallback_handlers), 0) self.assertEqual(len(FallbackSkill.wrapper_map), 0) def test_manual_removal(self): """Test that the call to remove_fallback() removes the handler""" self.assertEqual(len(FallbackSkill.fallback_handlers), 0) # Create skill adding a single handler fb_skill = setup_fallback(SimpleFallback) self.assertEqual(len(FallbackSkill.fallback_handlers), 1) fb_skill.remove_fallback(fb_skill.fallback_handler) # Both internal trackers of handlers should be cleared now self.assertEqual(len(FallbackSkill.fallback_handlers), 0) self.assertEqual(len(FallbackSkill.wrapper_map), 0) class SimpleFallback(FallbackSkill): """Simple fallback skill used for test.""" def initialize(self): self.register_fallback(self.fallback_handler, 42) def fallback_handler(self): pass
Add test cases for adding / removing fallbacksfrom unittest import TestCase, mock from mycroft.skills import FallbackSkill def setup_fallback(fb_class): fb_skill = fb_class() fb_skill.bind(mock.Mock(name='bus')) fb_skill.initialize() return fb_skill class TestFallbackSkill(TestCase): def test_life_cycle(self): """Test startup and shutdown of a fallback skill. Ensure that an added handler is removed as part of default shutdown. """ self.assertEqual(len(FallbackSkill.fallback_handlers), 0) fb_skill = setup_fallback(SimpleFallback) self.assertEqual(len(FallbackSkill.fallback_handlers), 1) self.assertEqual(FallbackSkill.wrapper_map[0][0], fb_skill.fallback_handler) self.assertEqual(len(FallbackSkill.wrapper_map), 1) fb_skill.default_shutdown() self.assertEqual(len(FallbackSkill.fallback_handlers), 0) self.assertEqual(len(FallbackSkill.wrapper_map), 0) def test_manual_removal(self): """Test that the call to remove_fallback() removes the handler""" self.assertEqual(len(FallbackSkill.fallback_handlers), 0) # Create skill adding a single handler fb_skill = setup_fallback(SimpleFallback) self.assertEqual(len(FallbackSkill.fallback_handlers), 1) fb_skill.remove_fallback(fb_skill.fallback_handler) # Both internal trackers of handlers should be cleared now self.assertEqual(len(FallbackSkill.fallback_handlers), 0) self.assertEqual(len(FallbackSkill.wrapper_map), 0) class SimpleFallback(FallbackSkill): """Simple fallback skill used for test.""" def initialize(self): self.register_fallback(self.fallback_handler, 42) def fallback_handler(self): pass
<commit_before><commit_msg>Add test cases for adding / removing fallbacks<commit_after>from unittest import TestCase, mock from mycroft.skills import FallbackSkill def setup_fallback(fb_class): fb_skill = fb_class() fb_skill.bind(mock.Mock(name='bus')) fb_skill.initialize() return fb_skill class TestFallbackSkill(TestCase): def test_life_cycle(self): """Test startup and shutdown of a fallback skill. Ensure that an added handler is removed as part of default shutdown. """ self.assertEqual(len(FallbackSkill.fallback_handlers), 0) fb_skill = setup_fallback(SimpleFallback) self.assertEqual(len(FallbackSkill.fallback_handlers), 1) self.assertEqual(FallbackSkill.wrapper_map[0][0], fb_skill.fallback_handler) self.assertEqual(len(FallbackSkill.wrapper_map), 1) fb_skill.default_shutdown() self.assertEqual(len(FallbackSkill.fallback_handlers), 0) self.assertEqual(len(FallbackSkill.wrapper_map), 0) def test_manual_removal(self): """Test that the call to remove_fallback() removes the handler""" self.assertEqual(len(FallbackSkill.fallback_handlers), 0) # Create skill adding a single handler fb_skill = setup_fallback(SimpleFallback) self.assertEqual(len(FallbackSkill.fallback_handlers), 1) fb_skill.remove_fallback(fb_skill.fallback_handler) # Both internal trackers of handlers should be cleared now self.assertEqual(len(FallbackSkill.fallback_handlers), 0) self.assertEqual(len(FallbackSkill.wrapper_map), 0) class SimpleFallback(FallbackSkill): """Simple fallback skill used for test.""" def initialize(self): self.register_fallback(self.fallback_handler, 42) def fallback_handler(self): pass
43ecf3f61feef5d046770c2ff816ba98ef88aad4
python/leetcode/test/test_ex771.py
python/leetcode/test/test_ex771.py
from nose.tools import raises, assert_raises import unittest from ex771 import Solution class TestClass: def setup(self): self.solution = Solution() def test_empty_jewels(self): result = self.solution.numJewelsInStones("", "ABC") assert result == 0 def test_non_empty_jewels(self): result = self.solution.numJewelsInStones("AB", "AAACBBL") assert result == 5 def test_mixed_case_jewels(self): result = self.solution.numJewelsInStones("aA", "aAAbbbb") assert result == 3 if __name__ == "__main__": unittest.main()
Add tests for leetcode exercise (771)
Add tests for leetcode exercise (771)
Python
mit
vilisimo/ads,vilisimo/ads
Add tests for leetcode exercise (771)
from nose.tools import raises, assert_raises import unittest from ex771 import Solution class TestClass: def setup(self): self.solution = Solution() def test_empty_jewels(self): result = self.solution.numJewelsInStones("", "ABC") assert result == 0 def test_non_empty_jewels(self): result = self.solution.numJewelsInStones("AB", "AAACBBL") assert result == 5 def test_mixed_case_jewels(self): result = self.solution.numJewelsInStones("aA", "aAAbbbb") assert result == 3 if __name__ == "__main__": unittest.main()
<commit_before><commit_msg>Add tests for leetcode exercise (771)<commit_after>
from nose.tools import raises, assert_raises import unittest from ex771 import Solution class TestClass: def setup(self): self.solution = Solution() def test_empty_jewels(self): result = self.solution.numJewelsInStones("", "ABC") assert result == 0 def test_non_empty_jewels(self): result = self.solution.numJewelsInStones("AB", "AAACBBL") assert result == 5 def test_mixed_case_jewels(self): result = self.solution.numJewelsInStones("aA", "aAAbbbb") assert result == 3 if __name__ == "__main__": unittest.main()
Add tests for leetcode exercise (771)from nose.tools import raises, assert_raises import unittest from ex771 import Solution class TestClass: def setup(self): self.solution = Solution() def test_empty_jewels(self): result = self.solution.numJewelsInStones("", "ABC") assert result == 0 def test_non_empty_jewels(self): result = self.solution.numJewelsInStones("AB", "AAACBBL") assert result == 5 def test_mixed_case_jewels(self): result = self.solution.numJewelsInStones("aA", "aAAbbbb") assert result == 3 if __name__ == "__main__": unittest.main()
<commit_before><commit_msg>Add tests for leetcode exercise (771)<commit_after>from nose.tools import raises, assert_raises import unittest from ex771 import Solution class TestClass: def setup(self): self.solution = Solution() def test_empty_jewels(self): result = self.solution.numJewelsInStones("", "ABC") assert result == 0 def test_non_empty_jewels(self): result = self.solution.numJewelsInStones("AB", "AAACBBL") assert result == 5 def test_mixed_case_jewels(self): result = self.solution.numJewelsInStones("aA", "aAAbbbb") assert result == 3 if __name__ == "__main__": unittest.main()
6ad9395de430fedbabf79ec4bb136e5f5e2da7f7
tests/test_group_create.py
tests/test_group_create.py
import argparse import getpass import logging import pyics try: import httplib except ImportError: import http.client as httplib # super debug mode - print all HTTP requests/responses #httplib.HTTPConnection.debuglevel = 1 TEST_GROUP_NAME = 'pyics-test-group' def parse_args(): p = argparse.ArgumentParser() p.add_argument('--user', '-u', required=True) p.add_argument('--space-id', '-s', required=True) return p.parse_args() def get_client(): args = parse_args() passwd = getpass.getpass() client = pyics.Client(args.user, passwd, space_id=args.space_id) return client def main(): svc = get_client() try: group = svc.groups.show(TEST_GROUP_NAME) except Exception as ex: if ex.response.status_code == 404: group = None else: raise if group is None: print "Creating group '{0}'".format(TEST_GROUP_NAME) try: resp = svc.groups.create( name=TEST_GROUP_NAME, image='ibmliberty', port=9080, memory=128, number_instances={'Desired': 1,'Min': 1, 'Max': 2}) except Exception as ex: print str(ex) print ex.response.text raise print resp print 'Listing groups...' print svc.groups.list() print "Finding group %s" % TEST_GROUP_NAME print svc.groups.show(TEST_GROUP_NAME) if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) main()
Add basic testing script for groups
Add basic testing script for groups
Python
apache-2.0
locke105/pyics
Add basic testing script for groups
import argparse import getpass import logging import pyics try: import httplib except ImportError: import http.client as httplib # super debug mode - print all HTTP requests/responses #httplib.HTTPConnection.debuglevel = 1 TEST_GROUP_NAME = 'pyics-test-group' def parse_args(): p = argparse.ArgumentParser() p.add_argument('--user', '-u', required=True) p.add_argument('--space-id', '-s', required=True) return p.parse_args() def get_client(): args = parse_args() passwd = getpass.getpass() client = pyics.Client(args.user, passwd, space_id=args.space_id) return client def main(): svc = get_client() try: group = svc.groups.show(TEST_GROUP_NAME) except Exception as ex: if ex.response.status_code == 404: group = None else: raise if group is None: print "Creating group '{0}'".format(TEST_GROUP_NAME) try: resp = svc.groups.create( name=TEST_GROUP_NAME, image='ibmliberty', port=9080, memory=128, number_instances={'Desired': 1,'Min': 1, 'Max': 2}) except Exception as ex: print str(ex) print ex.response.text raise print resp print 'Listing groups...' print svc.groups.list() print "Finding group %s" % TEST_GROUP_NAME print svc.groups.show(TEST_GROUP_NAME) if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) main()
<commit_before><commit_msg>Add basic testing script for groups<commit_after>
import argparse import getpass import logging import pyics try: import httplib except ImportError: import http.client as httplib # super debug mode - print all HTTP requests/responses #httplib.HTTPConnection.debuglevel = 1 TEST_GROUP_NAME = 'pyics-test-group' def parse_args(): p = argparse.ArgumentParser() p.add_argument('--user', '-u', required=True) p.add_argument('--space-id', '-s', required=True) return p.parse_args() def get_client(): args = parse_args() passwd = getpass.getpass() client = pyics.Client(args.user, passwd, space_id=args.space_id) return client def main(): svc = get_client() try: group = svc.groups.show(TEST_GROUP_NAME) except Exception as ex: if ex.response.status_code == 404: group = None else: raise if group is None: print "Creating group '{0}'".format(TEST_GROUP_NAME) try: resp = svc.groups.create( name=TEST_GROUP_NAME, image='ibmliberty', port=9080, memory=128, number_instances={'Desired': 1,'Min': 1, 'Max': 2}) except Exception as ex: print str(ex) print ex.response.text raise print resp print 'Listing groups...' print svc.groups.list() print "Finding group %s" % TEST_GROUP_NAME print svc.groups.show(TEST_GROUP_NAME) if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) main()
Add basic testing script for groupsimport argparse import getpass import logging import pyics try: import httplib except ImportError: import http.client as httplib # super debug mode - print all HTTP requests/responses #httplib.HTTPConnection.debuglevel = 1 TEST_GROUP_NAME = 'pyics-test-group' def parse_args(): p = argparse.ArgumentParser() p.add_argument('--user', '-u', required=True) p.add_argument('--space-id', '-s', required=True) return p.parse_args() def get_client(): args = parse_args() passwd = getpass.getpass() client = pyics.Client(args.user, passwd, space_id=args.space_id) return client def main(): svc = get_client() try: group = svc.groups.show(TEST_GROUP_NAME) except Exception as ex: if ex.response.status_code == 404: group = None else: raise if group is None: print "Creating group '{0}'".format(TEST_GROUP_NAME) try: resp = svc.groups.create( name=TEST_GROUP_NAME, image='ibmliberty', port=9080, memory=128, number_instances={'Desired': 1,'Min': 1, 'Max': 2}) except Exception as ex: print str(ex) print ex.response.text raise print resp print 'Listing groups...' print svc.groups.list() print "Finding group %s" % TEST_GROUP_NAME print svc.groups.show(TEST_GROUP_NAME) if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) main()
<commit_before><commit_msg>Add basic testing script for groups<commit_after>import argparse import getpass import logging import pyics try: import httplib except ImportError: import http.client as httplib # super debug mode - print all HTTP requests/responses #httplib.HTTPConnection.debuglevel = 1 TEST_GROUP_NAME = 'pyics-test-group' def parse_args(): p = argparse.ArgumentParser() p.add_argument('--user', '-u', required=True) p.add_argument('--space-id', '-s', required=True) return p.parse_args() def get_client(): args = parse_args() passwd = getpass.getpass() client = pyics.Client(args.user, passwd, space_id=args.space_id) return client def main(): svc = get_client() try: group = svc.groups.show(TEST_GROUP_NAME) except Exception as ex: if ex.response.status_code == 404: group = None else: raise if group is None: print "Creating group '{0}'".format(TEST_GROUP_NAME) try: resp = svc.groups.create( name=TEST_GROUP_NAME, image='ibmliberty', port=9080, memory=128, number_instances={'Desired': 1,'Min': 1, 'Max': 2}) except Exception as ex: print str(ex) print ex.response.text raise print resp print 'Listing groups...' print svc.groups.list() print "Finding group %s" % TEST_GROUP_NAME print svc.groups.show(TEST_GROUP_NAME) if __name__ == '__main__': logging.basicConfig(level=logging.DEBUG) main()
e023f8786765d8a57f45a77f0acfe70b90c2e098
2018/python/aoc_common.py
2018/python/aoc_common.py
"""aoc_common Common utility functions for Advent of Code solutions """ import pathlib def load_puzzle_input(day): """Return the puzzle input for the day’s puzzle""" input_directory = pathlib.Path(__file__).parent.with_name('input') year = input_directory.parent.name input_filename = f'{year}-{day:02}.txt' return input_directory.joinpath(input_filename).read_text()
Add utility module to load puzzle input
Add utility module to load puzzle input
Python
mit
robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions
Add utility module to load puzzle input
"""aoc_common Common utility functions for Advent of Code solutions """ import pathlib def load_puzzle_input(day): """Return the puzzle input for the day’s puzzle""" input_directory = pathlib.Path(__file__).parent.with_name('input') year = input_directory.parent.name input_filename = f'{year}-{day:02}.txt' return input_directory.joinpath(input_filename).read_text()
<commit_before><commit_msg>Add utility module to load puzzle input<commit_after>
"""aoc_common Common utility functions for Advent of Code solutions """ import pathlib def load_puzzle_input(day): """Return the puzzle input for the day’s puzzle""" input_directory = pathlib.Path(__file__).parent.with_name('input') year = input_directory.parent.name input_filename = f'{year}-{day:02}.txt' return input_directory.joinpath(input_filename).read_text()
Add utility module to load puzzle input"""aoc_common Common utility functions for Advent of Code solutions """ import pathlib def load_puzzle_input(day): """Return the puzzle input for the day’s puzzle""" input_directory = pathlib.Path(__file__).parent.with_name('input') year = input_directory.parent.name input_filename = f'{year}-{day:02}.txt' return input_directory.joinpath(input_filename).read_text()
<commit_before><commit_msg>Add utility module to load puzzle input<commit_after>"""aoc_common Common utility functions for Advent of Code solutions """ import pathlib def load_puzzle_input(day): """Return the puzzle input for the day’s puzzle""" input_directory = pathlib.Path(__file__).parent.with_name('input') year = input_directory.parent.name input_filename = f'{year}-{day:02}.txt' return input_directory.joinpath(input_filename).read_text()
2d5f39bd68481c81ecf676eb99d1d0e88e9540f7
test_version.py
test_version.py
# -*- coding: utf-8 -*- # # test_version.py # Part of ‘python-daemon’, an implementation of PEP 3143. # # Copyright © 2008–2014 Ben Finney <ben+python@benfinney.id.au> # # This is free software: you may copy, modify, and/or distribute this work # under the terms of the GNU General Public License as published by the # Free Software Foundation; version 3 of that license or any later version. # No warranty expressed or implied. See the file ‘LICENSE.GPL-3’ for details. """ Unit test for ‘version’ packaging module. """ from __future__ import (absolute_import, unicode_literals) import mock import testtools import docutils import version from version import (basestring, unicode) class VersionInfoWriter_TestCase(testtools.TestCase): """ Test cases for ‘VersionInfoWriter’ class. """ def setUp(self): """ Set up test fixtures. """ super(VersionInfoWriter_TestCase, self).setUp() self.test_instance = version.VersionInfoWriter() def test_declares_version_info_support(self): """ Should declare support for ‘version_info’. """ instance = self.test_instance expected_support = "version_info" result = instance.supports(expected_support) self.assertTrue(result) class VersionInfoWriter_translate_TestCase(testtools.TestCase): """ Test cases for ‘VersionInfoWriter.translate’ method. """ def setUp(self): """ Set up test fixtures. """ super(VersionInfoWriter_translate_TestCase, self).setUp() patcher_translator = mock.patch.object( version, 'VersionInfoTranslator') self.mock_class_translator = patcher_translator.start() self.addCleanup(patcher_translator.stop) self.mock_translator = self.mock_class_translator.return_value self.test_instance = version.VersionInfoWriter() patcher_document = mock.patch.object( self.test_instance, 'document') patcher_document.start() self.addCleanup(patcher_document.stop) def test_creates_translator_with_document(self): """ Should create a translator with the writer's document. """ instance = self.test_instance expected_document = self.test_instance.document instance.translate() self.mock_class_translator.assert_called_with(expected_document) def test_calls_document_walkabout_with_translator(self): """ Should call document.walkabout with the translator. """ instance = self.test_instance instance.translate() instance.document.walkabout.assert_called_with(self.mock_translator) def test_output_from_translator_astext(self): """ Should have output from translator.astext(). """ instance = self.test_instance instance.translate() expected_output = self.mock_translator.astext.return_value self.assertEqual(expected_output, instance.output) # Local variables: # coding: utf-8 # mode: python # End: # vim: fileencoding=utf-8 filetype=python :
Add test cases for ‘VersionInfoWriter’.
Add test cases for ‘VersionInfoWriter’.
Python
apache-2.0
wting/python-daemon,eaufavor/python-daemon
Add test cases for ‘VersionInfoWriter’.
# -*- coding: utf-8 -*- # # test_version.py # Part of ‘python-daemon’, an implementation of PEP 3143. # # Copyright © 2008–2014 Ben Finney <ben+python@benfinney.id.au> # # This is free software: you may copy, modify, and/or distribute this work # under the terms of the GNU General Public License as published by the # Free Software Foundation; version 3 of that license or any later version. # No warranty expressed or implied. See the file ‘LICENSE.GPL-3’ for details. """ Unit test for ‘version’ packaging module. """ from __future__ import (absolute_import, unicode_literals) import mock import testtools import docutils import version from version import (basestring, unicode) class VersionInfoWriter_TestCase(testtools.TestCase): """ Test cases for ‘VersionInfoWriter’ class. """ def setUp(self): """ Set up test fixtures. """ super(VersionInfoWriter_TestCase, self).setUp() self.test_instance = version.VersionInfoWriter() def test_declares_version_info_support(self): """ Should declare support for ‘version_info’. """ instance = self.test_instance expected_support = "version_info" result = instance.supports(expected_support) self.assertTrue(result) class VersionInfoWriter_translate_TestCase(testtools.TestCase): """ Test cases for ‘VersionInfoWriter.translate’ method. """ def setUp(self): """ Set up test fixtures. """ super(VersionInfoWriter_translate_TestCase, self).setUp() patcher_translator = mock.patch.object( version, 'VersionInfoTranslator') self.mock_class_translator = patcher_translator.start() self.addCleanup(patcher_translator.stop) self.mock_translator = self.mock_class_translator.return_value self.test_instance = version.VersionInfoWriter() patcher_document = mock.patch.object( self.test_instance, 'document') patcher_document.start() self.addCleanup(patcher_document.stop) def test_creates_translator_with_document(self): """ Should create a translator with the writer's document. """ instance = self.test_instance expected_document = self.test_instance.document instance.translate() self.mock_class_translator.assert_called_with(expected_document) def test_calls_document_walkabout_with_translator(self): """ Should call document.walkabout with the translator. """ instance = self.test_instance instance.translate() instance.document.walkabout.assert_called_with(self.mock_translator) def test_output_from_translator_astext(self): """ Should have output from translator.astext(). """ instance = self.test_instance instance.translate() expected_output = self.mock_translator.astext.return_value self.assertEqual(expected_output, instance.output) # Local variables: # coding: utf-8 # mode: python # End: # vim: fileencoding=utf-8 filetype=python :
<commit_before><commit_msg>Add test cases for ‘VersionInfoWriter’.<commit_after>
# -*- coding: utf-8 -*- # # test_version.py # Part of ‘python-daemon’, an implementation of PEP 3143. # # Copyright © 2008–2014 Ben Finney <ben+python@benfinney.id.au> # # This is free software: you may copy, modify, and/or distribute this work # under the terms of the GNU General Public License as published by the # Free Software Foundation; version 3 of that license or any later version. # No warranty expressed or implied. See the file ‘LICENSE.GPL-3’ for details. """ Unit test for ‘version’ packaging module. """ from __future__ import (absolute_import, unicode_literals) import mock import testtools import docutils import version from version import (basestring, unicode) class VersionInfoWriter_TestCase(testtools.TestCase): """ Test cases for ‘VersionInfoWriter’ class. """ def setUp(self): """ Set up test fixtures. """ super(VersionInfoWriter_TestCase, self).setUp() self.test_instance = version.VersionInfoWriter() def test_declares_version_info_support(self): """ Should declare support for ‘version_info’. """ instance = self.test_instance expected_support = "version_info" result = instance.supports(expected_support) self.assertTrue(result) class VersionInfoWriter_translate_TestCase(testtools.TestCase): """ Test cases for ‘VersionInfoWriter.translate’ method. """ def setUp(self): """ Set up test fixtures. """ super(VersionInfoWriter_translate_TestCase, self).setUp() patcher_translator = mock.patch.object( version, 'VersionInfoTranslator') self.mock_class_translator = patcher_translator.start() self.addCleanup(patcher_translator.stop) self.mock_translator = self.mock_class_translator.return_value self.test_instance = version.VersionInfoWriter() patcher_document = mock.patch.object( self.test_instance, 'document') patcher_document.start() self.addCleanup(patcher_document.stop) def test_creates_translator_with_document(self): """ Should create a translator with the writer's document. """ instance = self.test_instance expected_document = self.test_instance.document instance.translate() self.mock_class_translator.assert_called_with(expected_document) def test_calls_document_walkabout_with_translator(self): """ Should call document.walkabout with the translator. """ instance = self.test_instance instance.translate() instance.document.walkabout.assert_called_with(self.mock_translator) def test_output_from_translator_astext(self): """ Should have output from translator.astext(). """ instance = self.test_instance instance.translate() expected_output = self.mock_translator.astext.return_value self.assertEqual(expected_output, instance.output) # Local variables: # coding: utf-8 # mode: python # End: # vim: fileencoding=utf-8 filetype=python :
Add test cases for ‘VersionInfoWriter’.# -*- coding: utf-8 -*- # # test_version.py # Part of ‘python-daemon’, an implementation of PEP 3143. # # Copyright © 2008–2014 Ben Finney <ben+python@benfinney.id.au> # # This is free software: you may copy, modify, and/or distribute this work # under the terms of the GNU General Public License as published by the # Free Software Foundation; version 3 of that license or any later version. # No warranty expressed or implied. See the file ‘LICENSE.GPL-3’ for details. """ Unit test for ‘version’ packaging module. """ from __future__ import (absolute_import, unicode_literals) import mock import testtools import docutils import version from version import (basestring, unicode) class VersionInfoWriter_TestCase(testtools.TestCase): """ Test cases for ‘VersionInfoWriter’ class. """ def setUp(self): """ Set up test fixtures. """ super(VersionInfoWriter_TestCase, self).setUp() self.test_instance = version.VersionInfoWriter() def test_declares_version_info_support(self): """ Should declare support for ‘version_info’. """ instance = self.test_instance expected_support = "version_info" result = instance.supports(expected_support) self.assertTrue(result) class VersionInfoWriter_translate_TestCase(testtools.TestCase): """ Test cases for ‘VersionInfoWriter.translate’ method. """ def setUp(self): """ Set up test fixtures. """ super(VersionInfoWriter_translate_TestCase, self).setUp() patcher_translator = mock.patch.object( version, 'VersionInfoTranslator') self.mock_class_translator = patcher_translator.start() self.addCleanup(patcher_translator.stop) self.mock_translator = self.mock_class_translator.return_value self.test_instance = version.VersionInfoWriter() patcher_document = mock.patch.object( self.test_instance, 'document') patcher_document.start() self.addCleanup(patcher_document.stop) def test_creates_translator_with_document(self): """ Should create a translator with the writer's document. """ instance = self.test_instance expected_document = self.test_instance.document instance.translate() self.mock_class_translator.assert_called_with(expected_document) def test_calls_document_walkabout_with_translator(self): """ Should call document.walkabout with the translator. """ instance = self.test_instance instance.translate() instance.document.walkabout.assert_called_with(self.mock_translator) def test_output_from_translator_astext(self): """ Should have output from translator.astext(). """ instance = self.test_instance instance.translate() expected_output = self.mock_translator.astext.return_value self.assertEqual(expected_output, instance.output) # Local variables: # coding: utf-8 # mode: python # End: # vim: fileencoding=utf-8 filetype=python :
<commit_before><commit_msg>Add test cases for ‘VersionInfoWriter’.<commit_after># -*- coding: utf-8 -*- # # test_version.py # Part of ‘python-daemon’, an implementation of PEP 3143. # # Copyright © 2008–2014 Ben Finney <ben+python@benfinney.id.au> # # This is free software: you may copy, modify, and/or distribute this work # under the terms of the GNU General Public License as published by the # Free Software Foundation; version 3 of that license or any later version. # No warranty expressed or implied. See the file ‘LICENSE.GPL-3’ for details. """ Unit test for ‘version’ packaging module. """ from __future__ import (absolute_import, unicode_literals) import mock import testtools import docutils import version from version import (basestring, unicode) class VersionInfoWriter_TestCase(testtools.TestCase): """ Test cases for ‘VersionInfoWriter’ class. """ def setUp(self): """ Set up test fixtures. """ super(VersionInfoWriter_TestCase, self).setUp() self.test_instance = version.VersionInfoWriter() def test_declares_version_info_support(self): """ Should declare support for ‘version_info’. """ instance = self.test_instance expected_support = "version_info" result = instance.supports(expected_support) self.assertTrue(result) class VersionInfoWriter_translate_TestCase(testtools.TestCase): """ Test cases for ‘VersionInfoWriter.translate’ method. """ def setUp(self): """ Set up test fixtures. """ super(VersionInfoWriter_translate_TestCase, self).setUp() patcher_translator = mock.patch.object( version, 'VersionInfoTranslator') self.mock_class_translator = patcher_translator.start() self.addCleanup(patcher_translator.stop) self.mock_translator = self.mock_class_translator.return_value self.test_instance = version.VersionInfoWriter() patcher_document = mock.patch.object( self.test_instance, 'document') patcher_document.start() self.addCleanup(patcher_document.stop) def test_creates_translator_with_document(self): """ Should create a translator with the writer's document. """ instance = self.test_instance expected_document = self.test_instance.document instance.translate() self.mock_class_translator.assert_called_with(expected_document) def test_calls_document_walkabout_with_translator(self): """ Should call document.walkabout with the translator. """ instance = self.test_instance instance.translate() instance.document.walkabout.assert_called_with(self.mock_translator) def test_output_from_translator_astext(self): """ Should have output from translator.astext(). """ instance = self.test_instance instance.translate() expected_output = self.mock_translator.astext.return_value self.assertEqual(expected_output, instance.output) # Local variables: # coding: utf-8 # mode: python # End: # vim: fileencoding=utf-8 filetype=python :
301a7ff10f4a630ca403571aee8624bf98329b16
zipfsong_old.py
zipfsong_old.py
#!/usr/bin/python2 """" Zipf's song problem v1.0 without using any class structure like creating a Song class Jose Antonio Navarrete @joseanavarrete """ import sys def process_info(n_played, song_name, song_number, songs_array): """ Inserts into songs_array song_name processed with its zipf coeficient """ zipf = n_played*song_number song = {'zipf': zipf, 'name': song_name, 'song_number': song_number} songs_array.append(song) def my_reverse_multi_sort(x, y): """" Reverse order for zipf and song_number sorting """ n_cmp = cmp(y['zipf'], x['zipf']) if n_cmp == 0: return cmp(x['song_number'], y['song_number']) else: return n_cmp def sort_and_print_list(songs_array, max_length): returning_list = sorted(songs_array, cmp=my_reverse_multi_sort)[:max_length] print "\n".join([s['name'] for s in returning_list]) def main(): songs_array = [] options = map(int, sys.stdin.readline().split()) for x in xrange(options[0]): spl_line = map(lambda s: s.strip(), sys.stdin.readline().split()) process_info(long(spl_line[0]), spl_line[1], x+1.0, songs_array) sort_and_print_list(songs_array, options[1]) if __name__ == '__main__': main()
Update zipfsong with unit test refactor and python3
Update zipfsong with unit test refactor and python3
Python
mit
josenava/spotify_puzzle
Update zipfsong with unit test refactor and python3
#!/usr/bin/python2 """" Zipf's song problem v1.0 without using any class structure like creating a Song class Jose Antonio Navarrete @joseanavarrete """ import sys def process_info(n_played, song_name, song_number, songs_array): """ Inserts into songs_array song_name processed with its zipf coeficient """ zipf = n_played*song_number song = {'zipf': zipf, 'name': song_name, 'song_number': song_number} songs_array.append(song) def my_reverse_multi_sort(x, y): """" Reverse order for zipf and song_number sorting """ n_cmp = cmp(y['zipf'], x['zipf']) if n_cmp == 0: return cmp(x['song_number'], y['song_number']) else: return n_cmp def sort_and_print_list(songs_array, max_length): returning_list = sorted(songs_array, cmp=my_reverse_multi_sort)[:max_length] print "\n".join([s['name'] for s in returning_list]) def main(): songs_array = [] options = map(int, sys.stdin.readline().split()) for x in xrange(options[0]): spl_line = map(lambda s: s.strip(), sys.stdin.readline().split()) process_info(long(spl_line[0]), spl_line[1], x+1.0, songs_array) sort_and_print_list(songs_array, options[1]) if __name__ == '__main__': main()
<commit_before><commit_msg>Update zipfsong with unit test refactor and python3<commit_after>
#!/usr/bin/python2 """" Zipf's song problem v1.0 without using any class structure like creating a Song class Jose Antonio Navarrete @joseanavarrete """ import sys def process_info(n_played, song_name, song_number, songs_array): """ Inserts into songs_array song_name processed with its zipf coeficient """ zipf = n_played*song_number song = {'zipf': zipf, 'name': song_name, 'song_number': song_number} songs_array.append(song) def my_reverse_multi_sort(x, y): """" Reverse order for zipf and song_number sorting """ n_cmp = cmp(y['zipf'], x['zipf']) if n_cmp == 0: return cmp(x['song_number'], y['song_number']) else: return n_cmp def sort_and_print_list(songs_array, max_length): returning_list = sorted(songs_array, cmp=my_reverse_multi_sort)[:max_length] print "\n".join([s['name'] for s in returning_list]) def main(): songs_array = [] options = map(int, sys.stdin.readline().split()) for x in xrange(options[0]): spl_line = map(lambda s: s.strip(), sys.stdin.readline().split()) process_info(long(spl_line[0]), spl_line[1], x+1.0, songs_array) sort_and_print_list(songs_array, options[1]) if __name__ == '__main__': main()
Update zipfsong with unit test refactor and python3#!/usr/bin/python2 """" Zipf's song problem v1.0 without using any class structure like creating a Song class Jose Antonio Navarrete @joseanavarrete """ import sys def process_info(n_played, song_name, song_number, songs_array): """ Inserts into songs_array song_name processed with its zipf coeficient """ zipf = n_played*song_number song = {'zipf': zipf, 'name': song_name, 'song_number': song_number} songs_array.append(song) def my_reverse_multi_sort(x, y): """" Reverse order for zipf and song_number sorting """ n_cmp = cmp(y['zipf'], x['zipf']) if n_cmp == 0: return cmp(x['song_number'], y['song_number']) else: return n_cmp def sort_and_print_list(songs_array, max_length): returning_list = sorted(songs_array, cmp=my_reverse_multi_sort)[:max_length] print "\n".join([s['name'] for s in returning_list]) def main(): songs_array = [] options = map(int, sys.stdin.readline().split()) for x in xrange(options[0]): spl_line = map(lambda s: s.strip(), sys.stdin.readline().split()) process_info(long(spl_line[0]), spl_line[1], x+1.0, songs_array) sort_and_print_list(songs_array, options[1]) if __name__ == '__main__': main()
<commit_before><commit_msg>Update zipfsong with unit test refactor and python3<commit_after>#!/usr/bin/python2 """" Zipf's song problem v1.0 without using any class structure like creating a Song class Jose Antonio Navarrete @joseanavarrete """ import sys def process_info(n_played, song_name, song_number, songs_array): """ Inserts into songs_array song_name processed with its zipf coeficient """ zipf = n_played*song_number song = {'zipf': zipf, 'name': song_name, 'song_number': song_number} songs_array.append(song) def my_reverse_multi_sort(x, y): """" Reverse order for zipf and song_number sorting """ n_cmp = cmp(y['zipf'], x['zipf']) if n_cmp == 0: return cmp(x['song_number'], y['song_number']) else: return n_cmp def sort_and_print_list(songs_array, max_length): returning_list = sorted(songs_array, cmp=my_reverse_multi_sort)[:max_length] print "\n".join([s['name'] for s in returning_list]) def main(): songs_array = [] options = map(int, sys.stdin.readline().split()) for x in xrange(options[0]): spl_line = map(lambda s: s.strip(), sys.stdin.readline().split()) process_info(long(spl_line[0]), spl_line[1], x+1.0, songs_array) sort_and_print_list(songs_array, options[1]) if __name__ == '__main__': main()
e39187779b0bd2a10290ef019a331a8a64a57a25
generate_nodes_module.py
generate_nodes_module.py
#!/usr/bin/env python3 from viper.parser.ast.generate_nodes_module import generate_text_from_parsed_rules from viper.parser.grammar import GRAMMAR_FILE from viper.parser.grammar_parsing.parse_grammar import parse_grammar_file from os.path import dirname, join basedir = dirname(__file__) output = join(basedir, 'viper', 'parser', 'ast', 'nodes.py') def generate_nodes_module(): parsed_rules = parse_grammar_file(GRAMMAR_FILE) text = generate_text_from_parsed_rules(parsed_rules) with open(output, 'w') as of: of.write(text) if __name__ == '__main__': generate_nodes_module()
Add script to generate nodes.py module for AST
Add script to generate nodes.py module for AST
Python
apache-2.0
pdarragh/Viper
Add script to generate nodes.py module for AST
#!/usr/bin/env python3 from viper.parser.ast.generate_nodes_module import generate_text_from_parsed_rules from viper.parser.grammar import GRAMMAR_FILE from viper.parser.grammar_parsing.parse_grammar import parse_grammar_file from os.path import dirname, join basedir = dirname(__file__) output = join(basedir, 'viper', 'parser', 'ast', 'nodes.py') def generate_nodes_module(): parsed_rules = parse_grammar_file(GRAMMAR_FILE) text = generate_text_from_parsed_rules(parsed_rules) with open(output, 'w') as of: of.write(text) if __name__ == '__main__': generate_nodes_module()
<commit_before><commit_msg>Add script to generate nodes.py module for AST<commit_after>
#!/usr/bin/env python3 from viper.parser.ast.generate_nodes_module import generate_text_from_parsed_rules from viper.parser.grammar import GRAMMAR_FILE from viper.parser.grammar_parsing.parse_grammar import parse_grammar_file from os.path import dirname, join basedir = dirname(__file__) output = join(basedir, 'viper', 'parser', 'ast', 'nodes.py') def generate_nodes_module(): parsed_rules = parse_grammar_file(GRAMMAR_FILE) text = generate_text_from_parsed_rules(parsed_rules) with open(output, 'w') as of: of.write(text) if __name__ == '__main__': generate_nodes_module()
Add script to generate nodes.py module for AST#!/usr/bin/env python3 from viper.parser.ast.generate_nodes_module import generate_text_from_parsed_rules from viper.parser.grammar import GRAMMAR_FILE from viper.parser.grammar_parsing.parse_grammar import parse_grammar_file from os.path import dirname, join basedir = dirname(__file__) output = join(basedir, 'viper', 'parser', 'ast', 'nodes.py') def generate_nodes_module(): parsed_rules = parse_grammar_file(GRAMMAR_FILE) text = generate_text_from_parsed_rules(parsed_rules) with open(output, 'w') as of: of.write(text) if __name__ == '__main__': generate_nodes_module()
<commit_before><commit_msg>Add script to generate nodes.py module for AST<commit_after>#!/usr/bin/env python3 from viper.parser.ast.generate_nodes_module import generate_text_from_parsed_rules from viper.parser.grammar import GRAMMAR_FILE from viper.parser.grammar_parsing.parse_grammar import parse_grammar_file from os.path import dirname, join basedir = dirname(__file__) output = join(basedir, 'viper', 'parser', 'ast', 'nodes.py') def generate_nodes_module(): parsed_rules = parse_grammar_file(GRAMMAR_FILE) text = generate_text_from_parsed_rules(parsed_rules) with open(output, 'w') as of: of.write(text) if __name__ == '__main__': generate_nodes_module()
74cf0ba2d329475870d72574da32cd134c5bef97
examples/gen_dhcpconf.py
examples/gen_dhcpconf.py
#!/usr/bin/python """A script which generates DHCP configuration for hosts matching a regex. Usage: gen_dhcpconf.py <regex> <compute_resource> e.g. gen_dhcpconf.py 'ssi2+' 'Online Engineering' """ import re import sys from psphere.client import Client client = Client() host_regex = sys.argv[1] p = re.compile(host_regex) compute_resource = sys.argv[2] cr = client.find_entity_view("ComputeResource", filter={"name": compute_resource}) for vm in sorted(cr.resourcePool.vm): if p.match(vm.name) is None: continue print("host %s {" % vm.name) print(" option host-name \"%s\";" % vm.name) nic_found = False for device in vm.config.hardware.device: if "macAddress" in device: print(" hardware ethernet %s;" % device["macAddress"]) nic_found = True if nic_found is False: print("ERROR: Did not find a NIC to get MAC address from.") sys.exit(1) try: print(" fixed-address %s;" % vm.guest.ipAddress) except AttributeError: print(" fixed-address ") print("}") client.logout()
Add an example which generates DHCP configuration
Add an example which generates DHCP configuration
Python
apache-2.0
jkinred/psphere,graphite-server/psphere
Add an example which generates DHCP configuration
#!/usr/bin/python """A script which generates DHCP configuration for hosts matching a regex. Usage: gen_dhcpconf.py <regex> <compute_resource> e.g. gen_dhcpconf.py 'ssi2+' 'Online Engineering' """ import re import sys from psphere.client import Client client = Client() host_regex = sys.argv[1] p = re.compile(host_regex) compute_resource = sys.argv[2] cr = client.find_entity_view("ComputeResource", filter={"name": compute_resource}) for vm in sorted(cr.resourcePool.vm): if p.match(vm.name) is None: continue print("host %s {" % vm.name) print(" option host-name \"%s\";" % vm.name) nic_found = False for device in vm.config.hardware.device: if "macAddress" in device: print(" hardware ethernet %s;" % device["macAddress"]) nic_found = True if nic_found is False: print("ERROR: Did not find a NIC to get MAC address from.") sys.exit(1) try: print(" fixed-address %s;" % vm.guest.ipAddress) except AttributeError: print(" fixed-address ") print("}") client.logout()
<commit_before><commit_msg>Add an example which generates DHCP configuration<commit_after>
#!/usr/bin/python """A script which generates DHCP configuration for hosts matching a regex. Usage: gen_dhcpconf.py <regex> <compute_resource> e.g. gen_dhcpconf.py 'ssi2+' 'Online Engineering' """ import re import sys from psphere.client import Client client = Client() host_regex = sys.argv[1] p = re.compile(host_regex) compute_resource = sys.argv[2] cr = client.find_entity_view("ComputeResource", filter={"name": compute_resource}) for vm in sorted(cr.resourcePool.vm): if p.match(vm.name) is None: continue print("host %s {" % vm.name) print(" option host-name \"%s\";" % vm.name) nic_found = False for device in vm.config.hardware.device: if "macAddress" in device: print(" hardware ethernet %s;" % device["macAddress"]) nic_found = True if nic_found is False: print("ERROR: Did not find a NIC to get MAC address from.") sys.exit(1) try: print(" fixed-address %s;" % vm.guest.ipAddress) except AttributeError: print(" fixed-address ") print("}") client.logout()
Add an example which generates DHCP configuration#!/usr/bin/python """A script which generates DHCP configuration for hosts matching a regex. Usage: gen_dhcpconf.py <regex> <compute_resource> e.g. gen_dhcpconf.py 'ssi2+' 'Online Engineering' """ import re import sys from psphere.client import Client client = Client() host_regex = sys.argv[1] p = re.compile(host_regex) compute_resource = sys.argv[2] cr = client.find_entity_view("ComputeResource", filter={"name": compute_resource}) for vm in sorted(cr.resourcePool.vm): if p.match(vm.name) is None: continue print("host %s {" % vm.name) print(" option host-name \"%s\";" % vm.name) nic_found = False for device in vm.config.hardware.device: if "macAddress" in device: print(" hardware ethernet %s;" % device["macAddress"]) nic_found = True if nic_found is False: print("ERROR: Did not find a NIC to get MAC address from.") sys.exit(1) try: print(" fixed-address %s;" % vm.guest.ipAddress) except AttributeError: print(" fixed-address ") print("}") client.logout()
<commit_before><commit_msg>Add an example which generates DHCP configuration<commit_after>#!/usr/bin/python """A script which generates DHCP configuration for hosts matching a regex. Usage: gen_dhcpconf.py <regex> <compute_resource> e.g. gen_dhcpconf.py 'ssi2+' 'Online Engineering' """ import re import sys from psphere.client import Client client = Client() host_regex = sys.argv[1] p = re.compile(host_regex) compute_resource = sys.argv[2] cr = client.find_entity_view("ComputeResource", filter={"name": compute_resource}) for vm in sorted(cr.resourcePool.vm): if p.match(vm.name) is None: continue print("host %s {" % vm.name) print(" option host-name \"%s\";" % vm.name) nic_found = False for device in vm.config.hardware.device: if "macAddress" in device: print(" hardware ethernet %s;" % device["macAddress"]) nic_found = True if nic_found is False: print("ERROR: Did not find a NIC to get MAC address from.") sys.exit(1) try: print(" fixed-address %s;" % vm.guest.ipAddress) except AttributeError: print(" fixed-address ") print("}") client.logout()
47e1b1e2a023c0298f56b61159e76b27135b09ba
nuage_neutron/tests/unit/test_nuage_redirect_target.py
nuage_neutron/tests/unit/test_nuage_redirect_target.py
# Copyright 2020 NOKIA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock import testtools from nuage_neutron.plugins.common.base_plugin import RootNuagePlugin from nuage_neutron.plugins.common.service_plugins.port_attributes \ import nuage_redirect_target from nuage_neutron.vsdclient import restproxy class TestNuageRedirectTarget(testtools.TestCase): @mock.patch.object(RootNuagePlugin, 'init_vsd_client') @mock.patch.object(nuage_redirect_target.NuageRedirectTarget, 'core_plugin') def test_create_pg_for_rt(self, *_): driver = nuage_redirect_target.NuageRedirectTarget() fake_sg = { 'security_group_rules': [] } driver.core_plugin.get_security_group = mock.MagicMock( return_value=fake_sg) vsdclient_mock = mock.MagicMock() driver.vsdclient = vsdclient_mock vsdclient_mock.create_security_group_using_parent.side_effect = ( restproxy.RESTProxyError( vsd_code=restproxy.REST_PG_EXISTS_ERR_CODE)) rt = {'parentID': '1', 'parentType': 'l2domain'} pg = driver._create_pg_for_rt(mock.MagicMock(), mock.MagicMock(), rt, mock.MagicMock()) self.assertIsNotNone(pg) vsdclient_mock.create_security_group_rules.assert_not_called() vsdclient_mock.get_nuage_l2domain_policy_groups.assert_called()
Add unittest for concurrent redirect target create
Add unittest for concurrent redirect target create Change-Id: I445901a6c1fd0105fdd4a82454c8767da751d1bc Closes-Bug: OPENSTACK-2865
Python
apache-2.0
nuagenetworks/nuage-openstack-neutron,nuagenetworks/nuage-openstack-neutron
Add unittest for concurrent redirect target create Change-Id: I445901a6c1fd0105fdd4a82454c8767da751d1bc Closes-Bug: OPENSTACK-2865
# Copyright 2020 NOKIA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock import testtools from nuage_neutron.plugins.common.base_plugin import RootNuagePlugin from nuage_neutron.plugins.common.service_plugins.port_attributes \ import nuage_redirect_target from nuage_neutron.vsdclient import restproxy class TestNuageRedirectTarget(testtools.TestCase): @mock.patch.object(RootNuagePlugin, 'init_vsd_client') @mock.patch.object(nuage_redirect_target.NuageRedirectTarget, 'core_plugin') def test_create_pg_for_rt(self, *_): driver = nuage_redirect_target.NuageRedirectTarget() fake_sg = { 'security_group_rules': [] } driver.core_plugin.get_security_group = mock.MagicMock( return_value=fake_sg) vsdclient_mock = mock.MagicMock() driver.vsdclient = vsdclient_mock vsdclient_mock.create_security_group_using_parent.side_effect = ( restproxy.RESTProxyError( vsd_code=restproxy.REST_PG_EXISTS_ERR_CODE)) rt = {'parentID': '1', 'parentType': 'l2domain'} pg = driver._create_pg_for_rt(mock.MagicMock(), mock.MagicMock(), rt, mock.MagicMock()) self.assertIsNotNone(pg) vsdclient_mock.create_security_group_rules.assert_not_called() vsdclient_mock.get_nuage_l2domain_policy_groups.assert_called()
<commit_before><commit_msg>Add unittest for concurrent redirect target create Change-Id: I445901a6c1fd0105fdd4a82454c8767da751d1bc Closes-Bug: OPENSTACK-2865<commit_after>
# Copyright 2020 NOKIA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock import testtools from nuage_neutron.plugins.common.base_plugin import RootNuagePlugin from nuage_neutron.plugins.common.service_plugins.port_attributes \ import nuage_redirect_target from nuage_neutron.vsdclient import restproxy class TestNuageRedirectTarget(testtools.TestCase): @mock.patch.object(RootNuagePlugin, 'init_vsd_client') @mock.patch.object(nuage_redirect_target.NuageRedirectTarget, 'core_plugin') def test_create_pg_for_rt(self, *_): driver = nuage_redirect_target.NuageRedirectTarget() fake_sg = { 'security_group_rules': [] } driver.core_plugin.get_security_group = mock.MagicMock( return_value=fake_sg) vsdclient_mock = mock.MagicMock() driver.vsdclient = vsdclient_mock vsdclient_mock.create_security_group_using_parent.side_effect = ( restproxy.RESTProxyError( vsd_code=restproxy.REST_PG_EXISTS_ERR_CODE)) rt = {'parentID': '1', 'parentType': 'l2domain'} pg = driver._create_pg_for_rt(mock.MagicMock(), mock.MagicMock(), rt, mock.MagicMock()) self.assertIsNotNone(pg) vsdclient_mock.create_security_group_rules.assert_not_called() vsdclient_mock.get_nuage_l2domain_policy_groups.assert_called()
Add unittest for concurrent redirect target create Change-Id: I445901a6c1fd0105fdd4a82454c8767da751d1bc Closes-Bug: OPENSTACK-2865# Copyright 2020 NOKIA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock import testtools from nuage_neutron.plugins.common.base_plugin import RootNuagePlugin from nuage_neutron.plugins.common.service_plugins.port_attributes \ import nuage_redirect_target from nuage_neutron.vsdclient import restproxy class TestNuageRedirectTarget(testtools.TestCase): @mock.patch.object(RootNuagePlugin, 'init_vsd_client') @mock.patch.object(nuage_redirect_target.NuageRedirectTarget, 'core_plugin') def test_create_pg_for_rt(self, *_): driver = nuage_redirect_target.NuageRedirectTarget() fake_sg = { 'security_group_rules': [] } driver.core_plugin.get_security_group = mock.MagicMock( return_value=fake_sg) vsdclient_mock = mock.MagicMock() driver.vsdclient = vsdclient_mock vsdclient_mock.create_security_group_using_parent.side_effect = ( restproxy.RESTProxyError( vsd_code=restproxy.REST_PG_EXISTS_ERR_CODE)) rt = {'parentID': '1', 'parentType': 'l2domain'} pg = driver._create_pg_for_rt(mock.MagicMock(), mock.MagicMock(), rt, mock.MagicMock()) self.assertIsNotNone(pg) vsdclient_mock.create_security_group_rules.assert_not_called() vsdclient_mock.get_nuage_l2domain_policy_groups.assert_called()
<commit_before><commit_msg>Add unittest for concurrent redirect target create Change-Id: I445901a6c1fd0105fdd4a82454c8767da751d1bc Closes-Bug: OPENSTACK-2865<commit_after># Copyright 2020 NOKIA # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import mock import testtools from nuage_neutron.plugins.common.base_plugin import RootNuagePlugin from nuage_neutron.plugins.common.service_plugins.port_attributes \ import nuage_redirect_target from nuage_neutron.vsdclient import restproxy class TestNuageRedirectTarget(testtools.TestCase): @mock.patch.object(RootNuagePlugin, 'init_vsd_client') @mock.patch.object(nuage_redirect_target.NuageRedirectTarget, 'core_plugin') def test_create_pg_for_rt(self, *_): driver = nuage_redirect_target.NuageRedirectTarget() fake_sg = { 'security_group_rules': [] } driver.core_plugin.get_security_group = mock.MagicMock( return_value=fake_sg) vsdclient_mock = mock.MagicMock() driver.vsdclient = vsdclient_mock vsdclient_mock.create_security_group_using_parent.side_effect = ( restproxy.RESTProxyError( vsd_code=restproxy.REST_PG_EXISTS_ERR_CODE)) rt = {'parentID': '1', 'parentType': 'l2domain'} pg = driver._create_pg_for_rt(mock.MagicMock(), mock.MagicMock(), rt, mock.MagicMock()) self.assertIsNotNone(pg) vsdclient_mock.create_security_group_rules.assert_not_called() vsdclient_mock.get_nuage_l2domain_policy_groups.assert_called()
e3e5f1862651e2809e0178bc19f79d67f30a86f6
tests/cpydiff/types_dict_keys_set.py
tests/cpydiff/types_dict_keys_set.py
""" categories: Types,dict description: Dictionary keys view does not behave as a set. cause: Not implemented. workaround: Explicitly convert keys to a set before using set operations. """ print({1:2, 3:4}.keys() & {1})
Add CPy diff-test for using dict.keys() as a set.
tests/cpydiff: Add CPy diff-test for using dict.keys() as a set. See issue #5493.
Python
mit
pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython,pfalcon/micropython
tests/cpydiff: Add CPy diff-test for using dict.keys() as a set. See issue #5493.
""" categories: Types,dict description: Dictionary keys view does not behave as a set. cause: Not implemented. workaround: Explicitly convert keys to a set before using set operations. """ print({1:2, 3:4}.keys() & {1})
<commit_before><commit_msg>tests/cpydiff: Add CPy diff-test for using dict.keys() as a set. See issue #5493.<commit_after>
""" categories: Types,dict description: Dictionary keys view does not behave as a set. cause: Not implemented. workaround: Explicitly convert keys to a set before using set operations. """ print({1:2, 3:4}.keys() & {1})
tests/cpydiff: Add CPy diff-test for using dict.keys() as a set. See issue #5493.""" categories: Types,dict description: Dictionary keys view does not behave as a set. cause: Not implemented. workaround: Explicitly convert keys to a set before using set operations. """ print({1:2, 3:4}.keys() & {1})
<commit_before><commit_msg>tests/cpydiff: Add CPy diff-test for using dict.keys() as a set. See issue #5493.<commit_after>""" categories: Types,dict description: Dictionary keys view does not behave as a set. cause: Not implemented. workaround: Explicitly convert keys to a set before using set operations. """ print({1:2, 3:4}.keys() & {1})
6bd6203813bc4ca377baed842d7934a50f86f37e
Server/encryptedServer.py
Server/encryptedServer.py
__author__ = 'masudurrahman' import sys from twisted.protocols import ftp from twisted.protocols.ftp import FTPFactory, FTPAnonymousShell, FTPRealm, FTP, FTPShell, IFTPShell from twisted.cred.portal import Portal from twisted.cred import checkers from twisted.cred.checkers import AllowAnonymousAccess, FilePasswordDB from twisted.internet import reactor, ssl from twisted.python import log from twisted.internet.defer import succeed, failure from twisted.internet.protocol import Factory, Protocol from OpenSSL import SSL def opsCall(obj): print "Processing", obj.fObj.name return "Completed" ## EITHER WE USE THE PRE-MADE FTP SHELL, WHICH COULD WORK, OR: ## MAKE AN FTP SHELL TO HANDLE THE REQUESTS FOR FILES ## METHODS NEEDED: CREATE, RENAME, UPDATE, DELETE for files ## METHODS NEEDED: access, list, changeDir, makeDir class MyFTPRealm(FTPRealm): def __init__( self, anonymousRoot, userHome="/home", callback=None ): FTPRealm.__init__( self, anonymousRoot, userHome=userHome ) self.callback = callback def requestAvatar(self, avatarId, mind, *interfaces): print "TRYING TO LOG IN" for iface in interfaces: if iface is IFTPShell: if avatarId is checkers.ANONYMOUS: avatar = FTPShell(self.anonymousRoot) ###Test Directory so we can get a folder to monitor avatar.makeDirectory("test") else: avatar = FTPShell(self.getHomeDirectory(avatarId)) return (IFTPShell, avatar, getattr(avatar, 'logout', lambda: None)) raise NotImplementedError("Only IFTPShell interface is supported by this realm") def verifyCallback(connection, x509, errnum, errdepth, ok): if not ok: print 'invalid cert from subject:', x509.get_subject() return False else: print "Certs are fine" return True if __name__ == "__main__": p = Portal(MyFTPRealm('./'),[AllowAnonymousAccess(), FilePasswordDB("pass1.dat")]) #p = Portal(MyFTPRealm('/no_anon_access/', userHome="/tmp/", callback=opsCall),[FilePasswordDB("pass.dat"), ]) f = ftp.FTPFactory(p) myContextFactory = ssl.DefaultOpenSSLContextFactory( 'keys/server.key', 'keys/server.crt' ) ctx = myContextFactory.getContext() ctx.set_verify( SSL.VERIFY_PEER | SSL.VERIFY_FAIL_IF_NO_PEER_CERT, verifyCallback ) ctx.load_verify_locations("keys/ca.pem") f.welcomeMessage = "CS3240 Team 4 Project" log.startLogging(sys.stdout) reactor.listenSSL(21, f, myContextFactory) #reactor.listenTCP(21, f) reactor.run()
Test -- Encrypted File Transfer Server
Test -- Encrypted File Transfer Server Test server for SSL based file transfers
Python
apache-2.0
mrahman1122/Team4CS3240
Test -- Encrypted File Transfer Server Test server for SSL based file transfers
__author__ = 'masudurrahman' import sys from twisted.protocols import ftp from twisted.protocols.ftp import FTPFactory, FTPAnonymousShell, FTPRealm, FTP, FTPShell, IFTPShell from twisted.cred.portal import Portal from twisted.cred import checkers from twisted.cred.checkers import AllowAnonymousAccess, FilePasswordDB from twisted.internet import reactor, ssl from twisted.python import log from twisted.internet.defer import succeed, failure from twisted.internet.protocol import Factory, Protocol from OpenSSL import SSL def opsCall(obj): print "Processing", obj.fObj.name return "Completed" ## EITHER WE USE THE PRE-MADE FTP SHELL, WHICH COULD WORK, OR: ## MAKE AN FTP SHELL TO HANDLE THE REQUESTS FOR FILES ## METHODS NEEDED: CREATE, RENAME, UPDATE, DELETE for files ## METHODS NEEDED: access, list, changeDir, makeDir class MyFTPRealm(FTPRealm): def __init__( self, anonymousRoot, userHome="/home", callback=None ): FTPRealm.__init__( self, anonymousRoot, userHome=userHome ) self.callback = callback def requestAvatar(self, avatarId, mind, *interfaces): print "TRYING TO LOG IN" for iface in interfaces: if iface is IFTPShell: if avatarId is checkers.ANONYMOUS: avatar = FTPShell(self.anonymousRoot) ###Test Directory so we can get a folder to monitor avatar.makeDirectory("test") else: avatar = FTPShell(self.getHomeDirectory(avatarId)) return (IFTPShell, avatar, getattr(avatar, 'logout', lambda: None)) raise NotImplementedError("Only IFTPShell interface is supported by this realm") def verifyCallback(connection, x509, errnum, errdepth, ok): if not ok: print 'invalid cert from subject:', x509.get_subject() return False else: print "Certs are fine" return True if __name__ == "__main__": p = Portal(MyFTPRealm('./'),[AllowAnonymousAccess(), FilePasswordDB("pass1.dat")]) #p = Portal(MyFTPRealm('/no_anon_access/', userHome="/tmp/", callback=opsCall),[FilePasswordDB("pass.dat"), ]) f = ftp.FTPFactory(p) myContextFactory = ssl.DefaultOpenSSLContextFactory( 'keys/server.key', 'keys/server.crt' ) ctx = myContextFactory.getContext() ctx.set_verify( SSL.VERIFY_PEER | SSL.VERIFY_FAIL_IF_NO_PEER_CERT, verifyCallback ) ctx.load_verify_locations("keys/ca.pem") f.welcomeMessage = "CS3240 Team 4 Project" log.startLogging(sys.stdout) reactor.listenSSL(21, f, myContextFactory) #reactor.listenTCP(21, f) reactor.run()
<commit_before><commit_msg>Test -- Encrypted File Transfer Server Test server for SSL based file transfers<commit_after>
__author__ = 'masudurrahman' import sys from twisted.protocols import ftp from twisted.protocols.ftp import FTPFactory, FTPAnonymousShell, FTPRealm, FTP, FTPShell, IFTPShell from twisted.cred.portal import Portal from twisted.cred import checkers from twisted.cred.checkers import AllowAnonymousAccess, FilePasswordDB from twisted.internet import reactor, ssl from twisted.python import log from twisted.internet.defer import succeed, failure from twisted.internet.protocol import Factory, Protocol from OpenSSL import SSL def opsCall(obj): print "Processing", obj.fObj.name return "Completed" ## EITHER WE USE THE PRE-MADE FTP SHELL, WHICH COULD WORK, OR: ## MAKE AN FTP SHELL TO HANDLE THE REQUESTS FOR FILES ## METHODS NEEDED: CREATE, RENAME, UPDATE, DELETE for files ## METHODS NEEDED: access, list, changeDir, makeDir class MyFTPRealm(FTPRealm): def __init__( self, anonymousRoot, userHome="/home", callback=None ): FTPRealm.__init__( self, anonymousRoot, userHome=userHome ) self.callback = callback def requestAvatar(self, avatarId, mind, *interfaces): print "TRYING TO LOG IN" for iface in interfaces: if iface is IFTPShell: if avatarId is checkers.ANONYMOUS: avatar = FTPShell(self.anonymousRoot) ###Test Directory so we can get a folder to monitor avatar.makeDirectory("test") else: avatar = FTPShell(self.getHomeDirectory(avatarId)) return (IFTPShell, avatar, getattr(avatar, 'logout', lambda: None)) raise NotImplementedError("Only IFTPShell interface is supported by this realm") def verifyCallback(connection, x509, errnum, errdepth, ok): if not ok: print 'invalid cert from subject:', x509.get_subject() return False else: print "Certs are fine" return True if __name__ == "__main__": p = Portal(MyFTPRealm('./'),[AllowAnonymousAccess(), FilePasswordDB("pass1.dat")]) #p = Portal(MyFTPRealm('/no_anon_access/', userHome="/tmp/", callback=opsCall),[FilePasswordDB("pass.dat"), ]) f = ftp.FTPFactory(p) myContextFactory = ssl.DefaultOpenSSLContextFactory( 'keys/server.key', 'keys/server.crt' ) ctx = myContextFactory.getContext() ctx.set_verify( SSL.VERIFY_PEER | SSL.VERIFY_FAIL_IF_NO_PEER_CERT, verifyCallback ) ctx.load_verify_locations("keys/ca.pem") f.welcomeMessage = "CS3240 Team 4 Project" log.startLogging(sys.stdout) reactor.listenSSL(21, f, myContextFactory) #reactor.listenTCP(21, f) reactor.run()
Test -- Encrypted File Transfer Server Test server for SSL based file transfers__author__ = 'masudurrahman' import sys from twisted.protocols import ftp from twisted.protocols.ftp import FTPFactory, FTPAnonymousShell, FTPRealm, FTP, FTPShell, IFTPShell from twisted.cred.portal import Portal from twisted.cred import checkers from twisted.cred.checkers import AllowAnonymousAccess, FilePasswordDB from twisted.internet import reactor, ssl from twisted.python import log from twisted.internet.defer import succeed, failure from twisted.internet.protocol import Factory, Protocol from OpenSSL import SSL def opsCall(obj): print "Processing", obj.fObj.name return "Completed" ## EITHER WE USE THE PRE-MADE FTP SHELL, WHICH COULD WORK, OR: ## MAKE AN FTP SHELL TO HANDLE THE REQUESTS FOR FILES ## METHODS NEEDED: CREATE, RENAME, UPDATE, DELETE for files ## METHODS NEEDED: access, list, changeDir, makeDir class MyFTPRealm(FTPRealm): def __init__( self, anonymousRoot, userHome="/home", callback=None ): FTPRealm.__init__( self, anonymousRoot, userHome=userHome ) self.callback = callback def requestAvatar(self, avatarId, mind, *interfaces): print "TRYING TO LOG IN" for iface in interfaces: if iface is IFTPShell: if avatarId is checkers.ANONYMOUS: avatar = FTPShell(self.anonymousRoot) ###Test Directory so we can get a folder to monitor avatar.makeDirectory("test") else: avatar = FTPShell(self.getHomeDirectory(avatarId)) return (IFTPShell, avatar, getattr(avatar, 'logout', lambda: None)) raise NotImplementedError("Only IFTPShell interface is supported by this realm") def verifyCallback(connection, x509, errnum, errdepth, ok): if not ok: print 'invalid cert from subject:', x509.get_subject() return False else: print "Certs are fine" return True if __name__ == "__main__": p = Portal(MyFTPRealm('./'),[AllowAnonymousAccess(), FilePasswordDB("pass1.dat")]) #p = Portal(MyFTPRealm('/no_anon_access/', userHome="/tmp/", callback=opsCall),[FilePasswordDB("pass.dat"), ]) f = ftp.FTPFactory(p) myContextFactory = ssl.DefaultOpenSSLContextFactory( 'keys/server.key', 'keys/server.crt' ) ctx = myContextFactory.getContext() ctx.set_verify( SSL.VERIFY_PEER | SSL.VERIFY_FAIL_IF_NO_PEER_CERT, verifyCallback ) ctx.load_verify_locations("keys/ca.pem") f.welcomeMessage = "CS3240 Team 4 Project" log.startLogging(sys.stdout) reactor.listenSSL(21, f, myContextFactory) #reactor.listenTCP(21, f) reactor.run()
<commit_before><commit_msg>Test -- Encrypted File Transfer Server Test server for SSL based file transfers<commit_after>__author__ = 'masudurrahman' import sys from twisted.protocols import ftp from twisted.protocols.ftp import FTPFactory, FTPAnonymousShell, FTPRealm, FTP, FTPShell, IFTPShell from twisted.cred.portal import Portal from twisted.cred import checkers from twisted.cred.checkers import AllowAnonymousAccess, FilePasswordDB from twisted.internet import reactor, ssl from twisted.python import log from twisted.internet.defer import succeed, failure from twisted.internet.protocol import Factory, Protocol from OpenSSL import SSL def opsCall(obj): print "Processing", obj.fObj.name return "Completed" ## EITHER WE USE THE PRE-MADE FTP SHELL, WHICH COULD WORK, OR: ## MAKE AN FTP SHELL TO HANDLE THE REQUESTS FOR FILES ## METHODS NEEDED: CREATE, RENAME, UPDATE, DELETE for files ## METHODS NEEDED: access, list, changeDir, makeDir class MyFTPRealm(FTPRealm): def __init__( self, anonymousRoot, userHome="/home", callback=None ): FTPRealm.__init__( self, anonymousRoot, userHome=userHome ) self.callback = callback def requestAvatar(self, avatarId, mind, *interfaces): print "TRYING TO LOG IN" for iface in interfaces: if iface is IFTPShell: if avatarId is checkers.ANONYMOUS: avatar = FTPShell(self.anonymousRoot) ###Test Directory so we can get a folder to monitor avatar.makeDirectory("test") else: avatar = FTPShell(self.getHomeDirectory(avatarId)) return (IFTPShell, avatar, getattr(avatar, 'logout', lambda: None)) raise NotImplementedError("Only IFTPShell interface is supported by this realm") def verifyCallback(connection, x509, errnum, errdepth, ok): if not ok: print 'invalid cert from subject:', x509.get_subject() return False else: print "Certs are fine" return True if __name__ == "__main__": p = Portal(MyFTPRealm('./'),[AllowAnonymousAccess(), FilePasswordDB("pass1.dat")]) #p = Portal(MyFTPRealm('/no_anon_access/', userHome="/tmp/", callback=opsCall),[FilePasswordDB("pass.dat"), ]) f = ftp.FTPFactory(p) myContextFactory = ssl.DefaultOpenSSLContextFactory( 'keys/server.key', 'keys/server.crt' ) ctx = myContextFactory.getContext() ctx.set_verify( SSL.VERIFY_PEER | SSL.VERIFY_FAIL_IF_NO_PEER_CERT, verifyCallback ) ctx.load_verify_locations("keys/ca.pem") f.welcomeMessage = "CS3240 Team 4 Project" log.startLogging(sys.stdout) reactor.listenSSL(21, f, myContextFactory) #reactor.listenTCP(21, f) reactor.run()
29f8cc38a03f773726cfb1bce83eb3aaa34607fe
samples/tina/genPhilos.py
samples/tina/genPhilos.py
#!/usr/bin/env python import sys ##################################################### one_philo_places = \ """ pl P%(id)dThink (1) pl P%(id)dHasLeft (0) pl P%(id)dHasRight (0) pl P%(id)dEat (0) pl P%(id)dFork (1) """ ##################################################### one_philo_arcs = \ """ # Internal evolution of philosopher %(id)d tr P%(id)dTakeLeft1 P%(id)dThink P%(id)dFork -> P%(id)dHasLeft tr P%(id)dTakeRight1 P%(id)dThink P%(di)dFork -> P%(id)dHasRight tr P%(id)dTakeRight2 P%(id)dHasLeft P%(di)dFork -> P%(id)dEat tr P%(id)dTakeLeft2 P%(id)dHasRight P%(id)dFork -> P%(id)dEat tr P%(id)dGoThink P%(id)dEat -> P%(id)dThink P%(id)dFork P%(di)dFork """ ##################################################### nb_philos = 2 if len(sys.argv) > 1: nb_philos = int(sys.argv[1]) f = open("philos" + str(nb_philos) + ".net", "w" ) nodes = [] arcs = [] for p in range(nb_philos): nodes += [one_philo_places % {"id":p}] di = (p-1) % (nb_philos) arcs += [one_philo_arcs % {"id":p , "di":di}] f.write("net philos\n") for n in nodes: f.write(n) for a in arcs: f.write(a)
Add a TINA philosophers (Python) generator.
Add a TINA philosophers (Python) generator.
Python
bsd-2-clause
ahamez/caesar.sdd
Add a TINA philosophers (Python) generator.
#!/usr/bin/env python import sys ##################################################### one_philo_places = \ """ pl P%(id)dThink (1) pl P%(id)dHasLeft (0) pl P%(id)dHasRight (0) pl P%(id)dEat (0) pl P%(id)dFork (1) """ ##################################################### one_philo_arcs = \ """ # Internal evolution of philosopher %(id)d tr P%(id)dTakeLeft1 P%(id)dThink P%(id)dFork -> P%(id)dHasLeft tr P%(id)dTakeRight1 P%(id)dThink P%(di)dFork -> P%(id)dHasRight tr P%(id)dTakeRight2 P%(id)dHasLeft P%(di)dFork -> P%(id)dEat tr P%(id)dTakeLeft2 P%(id)dHasRight P%(id)dFork -> P%(id)dEat tr P%(id)dGoThink P%(id)dEat -> P%(id)dThink P%(id)dFork P%(di)dFork """ ##################################################### nb_philos = 2 if len(sys.argv) > 1: nb_philos = int(sys.argv[1]) f = open("philos" + str(nb_philos) + ".net", "w" ) nodes = [] arcs = [] for p in range(nb_philos): nodes += [one_philo_places % {"id":p}] di = (p-1) % (nb_philos) arcs += [one_philo_arcs % {"id":p , "di":di}] f.write("net philos\n") for n in nodes: f.write(n) for a in arcs: f.write(a)
<commit_before><commit_msg>Add a TINA philosophers (Python) generator.<commit_after>
#!/usr/bin/env python import sys ##################################################### one_philo_places = \ """ pl P%(id)dThink (1) pl P%(id)dHasLeft (0) pl P%(id)dHasRight (0) pl P%(id)dEat (0) pl P%(id)dFork (1) """ ##################################################### one_philo_arcs = \ """ # Internal evolution of philosopher %(id)d tr P%(id)dTakeLeft1 P%(id)dThink P%(id)dFork -> P%(id)dHasLeft tr P%(id)dTakeRight1 P%(id)dThink P%(di)dFork -> P%(id)dHasRight tr P%(id)dTakeRight2 P%(id)dHasLeft P%(di)dFork -> P%(id)dEat tr P%(id)dTakeLeft2 P%(id)dHasRight P%(id)dFork -> P%(id)dEat tr P%(id)dGoThink P%(id)dEat -> P%(id)dThink P%(id)dFork P%(di)dFork """ ##################################################### nb_philos = 2 if len(sys.argv) > 1: nb_philos = int(sys.argv[1]) f = open("philos" + str(nb_philos) + ".net", "w" ) nodes = [] arcs = [] for p in range(nb_philos): nodes += [one_philo_places % {"id":p}] di = (p-1) % (nb_philos) arcs += [one_philo_arcs % {"id":p , "di":di}] f.write("net philos\n") for n in nodes: f.write(n) for a in arcs: f.write(a)
Add a TINA philosophers (Python) generator.#!/usr/bin/env python import sys ##################################################### one_philo_places = \ """ pl P%(id)dThink (1) pl P%(id)dHasLeft (0) pl P%(id)dHasRight (0) pl P%(id)dEat (0) pl P%(id)dFork (1) """ ##################################################### one_philo_arcs = \ """ # Internal evolution of philosopher %(id)d tr P%(id)dTakeLeft1 P%(id)dThink P%(id)dFork -> P%(id)dHasLeft tr P%(id)dTakeRight1 P%(id)dThink P%(di)dFork -> P%(id)dHasRight tr P%(id)dTakeRight2 P%(id)dHasLeft P%(di)dFork -> P%(id)dEat tr P%(id)dTakeLeft2 P%(id)dHasRight P%(id)dFork -> P%(id)dEat tr P%(id)dGoThink P%(id)dEat -> P%(id)dThink P%(id)dFork P%(di)dFork """ ##################################################### nb_philos = 2 if len(sys.argv) > 1: nb_philos = int(sys.argv[1]) f = open("philos" + str(nb_philos) + ".net", "w" ) nodes = [] arcs = [] for p in range(nb_philos): nodes += [one_philo_places % {"id":p}] di = (p-1) % (nb_philos) arcs += [one_philo_arcs % {"id":p , "di":di}] f.write("net philos\n") for n in nodes: f.write(n) for a in arcs: f.write(a)
<commit_before><commit_msg>Add a TINA philosophers (Python) generator.<commit_after>#!/usr/bin/env python import sys ##################################################### one_philo_places = \ """ pl P%(id)dThink (1) pl P%(id)dHasLeft (0) pl P%(id)dHasRight (0) pl P%(id)dEat (0) pl P%(id)dFork (1) """ ##################################################### one_philo_arcs = \ """ # Internal evolution of philosopher %(id)d tr P%(id)dTakeLeft1 P%(id)dThink P%(id)dFork -> P%(id)dHasLeft tr P%(id)dTakeRight1 P%(id)dThink P%(di)dFork -> P%(id)dHasRight tr P%(id)dTakeRight2 P%(id)dHasLeft P%(di)dFork -> P%(id)dEat tr P%(id)dTakeLeft2 P%(id)dHasRight P%(id)dFork -> P%(id)dEat tr P%(id)dGoThink P%(id)dEat -> P%(id)dThink P%(id)dFork P%(di)dFork """ ##################################################### nb_philos = 2 if len(sys.argv) > 1: nb_philos = int(sys.argv[1]) f = open("philos" + str(nb_philos) + ".net", "w" ) nodes = [] arcs = [] for p in range(nb_philos): nodes += [one_philo_places % {"id":p}] di = (p-1) % (nb_philos) arcs += [one_philo_arcs % {"id":p , "di":di}] f.write("net philos\n") for n in nodes: f.write(n) for a in arcs: f.write(a)
02f7c8e6f374f2d570fb6256612393018b2d3064
learntools/computer_vision/ex2.py
learntools/computer_vision/ex2.py
from learntools.core import * import tensorflow as tf class Q1(CodingProblem): _var = 'kernel' _solution = CS(""" # This is just one possibility. kernel = tf.constant([ [-2, -1, 0], [-1, 1, 1], [0, 1, 2], ]) """) def check(self, kernel): assert (isinstance(kernel, tf.Tensor)) assert ((len(kernel.shape) == 2), ("Your kernel needs to have have a shape with only two dimensions, " + "but you defined a kernel with shape {}, which has {} dimensions. " + "You should have only one level of nesting in your brackets, like " + "`[[1, 2], [3, 4]].` See the kernel in the tutorial for a guide.")) class Q2(CodingProblem): _var = 'image_filter' _hint = "" _solution = CS(""" image_filter = tf.nn.conv2d( input=image, filters=kernel, strides=1, padding='SAME', ) """) def check(self, image_filter): pass # TODO: Q2 check class Q3(CodingProblem): _var = 'image_detect' _hint = "" _solution = CS(""" image_detect = tf.nn.relu(image_filter) """) def check(self, image_detect): pass # TODO: Q3 check class Q4A(CodingProblem): _hint = "" _solution = "" def check(self): pass class Q4B(ThoughtExperiment): _solution = "In the tutorial, we talked about how the pattern of positive numbers will tell you the kind of features the kernel will extract. This kernel has a vertical column of 1's, and so we would expect it to return features of vertical lines." class Q4C(CodingProblem): _hint = "" _solution = "" def check(self): pass Q4 = MultipartProblem(Q4A, Q4B, Q4C) qvars = bind_exercises(globals(), [ Q1, Q2, Q3, Q4, ], var_format='q_{n}', ) __all__ = list(qvars)
Add checking code for exercise 2
Add checking code for exercise 2
Python
apache-2.0
Kaggle/learntools,Kaggle/learntools
Add checking code for exercise 2
from learntools.core import * import tensorflow as tf class Q1(CodingProblem): _var = 'kernel' _solution = CS(""" # This is just one possibility. kernel = tf.constant([ [-2, -1, 0], [-1, 1, 1], [0, 1, 2], ]) """) def check(self, kernel): assert (isinstance(kernel, tf.Tensor)) assert ((len(kernel.shape) == 2), ("Your kernel needs to have have a shape with only two dimensions, " + "but you defined a kernel with shape {}, which has {} dimensions. " + "You should have only one level of nesting in your brackets, like " + "`[[1, 2], [3, 4]].` See the kernel in the tutorial for a guide.")) class Q2(CodingProblem): _var = 'image_filter' _hint = "" _solution = CS(""" image_filter = tf.nn.conv2d( input=image, filters=kernel, strides=1, padding='SAME', ) """) def check(self, image_filter): pass # TODO: Q2 check class Q3(CodingProblem): _var = 'image_detect' _hint = "" _solution = CS(""" image_detect = tf.nn.relu(image_filter) """) def check(self, image_detect): pass # TODO: Q3 check class Q4A(CodingProblem): _hint = "" _solution = "" def check(self): pass class Q4B(ThoughtExperiment): _solution = "In the tutorial, we talked about how the pattern of positive numbers will tell you the kind of features the kernel will extract. This kernel has a vertical column of 1's, and so we would expect it to return features of vertical lines." class Q4C(CodingProblem): _hint = "" _solution = "" def check(self): pass Q4 = MultipartProblem(Q4A, Q4B, Q4C) qvars = bind_exercises(globals(), [ Q1, Q2, Q3, Q4, ], var_format='q_{n}', ) __all__ = list(qvars)
<commit_before><commit_msg>Add checking code for exercise 2<commit_after>
from learntools.core import * import tensorflow as tf class Q1(CodingProblem): _var = 'kernel' _solution = CS(""" # This is just one possibility. kernel = tf.constant([ [-2, -1, 0], [-1, 1, 1], [0, 1, 2], ]) """) def check(self, kernel): assert (isinstance(kernel, tf.Tensor)) assert ((len(kernel.shape) == 2), ("Your kernel needs to have have a shape with only two dimensions, " + "but you defined a kernel with shape {}, which has {} dimensions. " + "You should have only one level of nesting in your brackets, like " + "`[[1, 2], [3, 4]].` See the kernel in the tutorial for a guide.")) class Q2(CodingProblem): _var = 'image_filter' _hint = "" _solution = CS(""" image_filter = tf.nn.conv2d( input=image, filters=kernel, strides=1, padding='SAME', ) """) def check(self, image_filter): pass # TODO: Q2 check class Q3(CodingProblem): _var = 'image_detect' _hint = "" _solution = CS(""" image_detect = tf.nn.relu(image_filter) """) def check(self, image_detect): pass # TODO: Q3 check class Q4A(CodingProblem): _hint = "" _solution = "" def check(self): pass class Q4B(ThoughtExperiment): _solution = "In the tutorial, we talked about how the pattern of positive numbers will tell you the kind of features the kernel will extract. This kernel has a vertical column of 1's, and so we would expect it to return features of vertical lines." class Q4C(CodingProblem): _hint = "" _solution = "" def check(self): pass Q4 = MultipartProblem(Q4A, Q4B, Q4C) qvars = bind_exercises(globals(), [ Q1, Q2, Q3, Q4, ], var_format='q_{n}', ) __all__ = list(qvars)
Add checking code for exercise 2from learntools.core import * import tensorflow as tf class Q1(CodingProblem): _var = 'kernel' _solution = CS(""" # This is just one possibility. kernel = tf.constant([ [-2, -1, 0], [-1, 1, 1], [0, 1, 2], ]) """) def check(self, kernel): assert (isinstance(kernel, tf.Tensor)) assert ((len(kernel.shape) == 2), ("Your kernel needs to have have a shape with only two dimensions, " + "but you defined a kernel with shape {}, which has {} dimensions. " + "You should have only one level of nesting in your brackets, like " + "`[[1, 2], [3, 4]].` See the kernel in the tutorial for a guide.")) class Q2(CodingProblem): _var = 'image_filter' _hint = "" _solution = CS(""" image_filter = tf.nn.conv2d( input=image, filters=kernel, strides=1, padding='SAME', ) """) def check(self, image_filter): pass # TODO: Q2 check class Q3(CodingProblem): _var = 'image_detect' _hint = "" _solution = CS(""" image_detect = tf.nn.relu(image_filter) """) def check(self, image_detect): pass # TODO: Q3 check class Q4A(CodingProblem): _hint = "" _solution = "" def check(self): pass class Q4B(ThoughtExperiment): _solution = "In the tutorial, we talked about how the pattern of positive numbers will tell you the kind of features the kernel will extract. This kernel has a vertical column of 1's, and so we would expect it to return features of vertical lines." class Q4C(CodingProblem): _hint = "" _solution = "" def check(self): pass Q4 = MultipartProblem(Q4A, Q4B, Q4C) qvars = bind_exercises(globals(), [ Q1, Q2, Q3, Q4, ], var_format='q_{n}', ) __all__ = list(qvars)
<commit_before><commit_msg>Add checking code for exercise 2<commit_after>from learntools.core import * import tensorflow as tf class Q1(CodingProblem): _var = 'kernel' _solution = CS(""" # This is just one possibility. kernel = tf.constant([ [-2, -1, 0], [-1, 1, 1], [0, 1, 2], ]) """) def check(self, kernel): assert (isinstance(kernel, tf.Tensor)) assert ((len(kernel.shape) == 2), ("Your kernel needs to have have a shape with only two dimensions, " + "but you defined a kernel with shape {}, which has {} dimensions. " + "You should have only one level of nesting in your brackets, like " + "`[[1, 2], [3, 4]].` See the kernel in the tutorial for a guide.")) class Q2(CodingProblem): _var = 'image_filter' _hint = "" _solution = CS(""" image_filter = tf.nn.conv2d( input=image, filters=kernel, strides=1, padding='SAME', ) """) def check(self, image_filter): pass # TODO: Q2 check class Q3(CodingProblem): _var = 'image_detect' _hint = "" _solution = CS(""" image_detect = tf.nn.relu(image_filter) """) def check(self, image_detect): pass # TODO: Q3 check class Q4A(CodingProblem): _hint = "" _solution = "" def check(self): pass class Q4B(ThoughtExperiment): _solution = "In the tutorial, we talked about how the pattern of positive numbers will tell you the kind of features the kernel will extract. This kernel has a vertical column of 1's, and so we would expect it to return features of vertical lines." class Q4C(CodingProblem): _hint = "" _solution = "" def check(self): pass Q4 = MultipartProblem(Q4A, Q4B, Q4C) qvars = bind_exercises(globals(), [ Q1, Q2, Q3, Q4, ], var_format='q_{n}', ) __all__ = list(qvars)
9456944a66ded9bff986bdeda1cd8367d3a1ee37
soundbridge-write.py
soundbridge-write.py
#!/usr/bin/python # # $Id$ """ Write some text on a Roku soundbridge, using the telnet interface. """ import telnetlib import syslog from optparse import OptionParser parser = OptionParser() parser.add_option("-H", "--hostname", dest="hostname", default="soundbridge") parser.add_option("-t", "--text", dest="text", default="use the --text option to set the text to display") (options, args) = parser.parse_args() port = 4444 syslog.openlog("soundbridge-write") tn = telnetlib.Telnet(options.hostname, port) tn.read_until("SoundBridge> ") tn.write("sketch\n") tn.read_until("sketch> ") tn.write("font 10\n") tn.read_until("sketch> ") syslog.syslog("Writing '" + options.text + "' on the soundbridge at hostname '" + options.hostname + "'") tn.write("text 0 0 \"" + options.text + "\"\n") while 1 == 1: pass tn.close()
Add script to write text to a Roku Soundbridge.
Add script to write text to a Roku Soundbridge.
Python
mit
andrewferrier/misc-scripts,andrewferrier/misc-scripts,jbreitbart/dropbox-cleanup
Add script to write text to a Roku Soundbridge.
#!/usr/bin/python # # $Id$ """ Write some text on a Roku soundbridge, using the telnet interface. """ import telnetlib import syslog from optparse import OptionParser parser = OptionParser() parser.add_option("-H", "--hostname", dest="hostname", default="soundbridge") parser.add_option("-t", "--text", dest="text", default="use the --text option to set the text to display") (options, args) = parser.parse_args() port = 4444 syslog.openlog("soundbridge-write") tn = telnetlib.Telnet(options.hostname, port) tn.read_until("SoundBridge> ") tn.write("sketch\n") tn.read_until("sketch> ") tn.write("font 10\n") tn.read_until("sketch> ") syslog.syslog("Writing '" + options.text + "' on the soundbridge at hostname '" + options.hostname + "'") tn.write("text 0 0 \"" + options.text + "\"\n") while 1 == 1: pass tn.close()
<commit_before><commit_msg>Add script to write text to a Roku Soundbridge.<commit_after>
#!/usr/bin/python # # $Id$ """ Write some text on a Roku soundbridge, using the telnet interface. """ import telnetlib import syslog from optparse import OptionParser parser = OptionParser() parser.add_option("-H", "--hostname", dest="hostname", default="soundbridge") parser.add_option("-t", "--text", dest="text", default="use the --text option to set the text to display") (options, args) = parser.parse_args() port = 4444 syslog.openlog("soundbridge-write") tn = telnetlib.Telnet(options.hostname, port) tn.read_until("SoundBridge> ") tn.write("sketch\n") tn.read_until("sketch> ") tn.write("font 10\n") tn.read_until("sketch> ") syslog.syslog("Writing '" + options.text + "' on the soundbridge at hostname '" + options.hostname + "'") tn.write("text 0 0 \"" + options.text + "\"\n") while 1 == 1: pass tn.close()
Add script to write text to a Roku Soundbridge.#!/usr/bin/python # # $Id$ """ Write some text on a Roku soundbridge, using the telnet interface. """ import telnetlib import syslog from optparse import OptionParser parser = OptionParser() parser.add_option("-H", "--hostname", dest="hostname", default="soundbridge") parser.add_option("-t", "--text", dest="text", default="use the --text option to set the text to display") (options, args) = parser.parse_args() port = 4444 syslog.openlog("soundbridge-write") tn = telnetlib.Telnet(options.hostname, port) tn.read_until("SoundBridge> ") tn.write("sketch\n") tn.read_until("sketch> ") tn.write("font 10\n") tn.read_until("sketch> ") syslog.syslog("Writing '" + options.text + "' on the soundbridge at hostname '" + options.hostname + "'") tn.write("text 0 0 \"" + options.text + "\"\n") while 1 == 1: pass tn.close()
<commit_before><commit_msg>Add script to write text to a Roku Soundbridge.<commit_after>#!/usr/bin/python # # $Id$ """ Write some text on a Roku soundbridge, using the telnet interface. """ import telnetlib import syslog from optparse import OptionParser parser = OptionParser() parser.add_option("-H", "--hostname", dest="hostname", default="soundbridge") parser.add_option("-t", "--text", dest="text", default="use the --text option to set the text to display") (options, args) = parser.parse_args() port = 4444 syslog.openlog("soundbridge-write") tn = telnetlib.Telnet(options.hostname, port) tn.read_until("SoundBridge> ") tn.write("sketch\n") tn.read_until("sketch> ") tn.write("font 10\n") tn.read_until("sketch> ") syslog.syslog("Writing '" + options.text + "' on the soundbridge at hostname '" + options.hostname + "'") tn.write("text 0 0 \"" + options.text + "\"\n") while 1 == 1: pass tn.close()
d3d1e9eb32af7a38f99fc602e2e9e897460dfbec
fellowms/migrations/0022_fellow_user.py
fellowms/migrations/0022_fellow_user.py
# -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-06-02 16:22 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('fellowms', '0021_blog_status'), ] operations = [ migrations.AddField( model_name='fellow', name='user', field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ]
Create migration for user field on fellow
Create migration for user field on fellow
Python
bsd-3-clause
softwaresaved/fat,softwaresaved/fat,softwaresaved/fat,softwaresaved/fat
Create migration for user field on fellow
# -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-06-02 16:22 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('fellowms', '0021_blog_status'), ] operations = [ migrations.AddField( model_name='fellow', name='user', field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ]
<commit_before><commit_msg>Create migration for user field on fellow<commit_after>
# -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-06-02 16:22 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('fellowms', '0021_blog_status'), ] operations = [ migrations.AddField( model_name='fellow', name='user', field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ]
Create migration for user field on fellow# -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-06-02 16:22 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('fellowms', '0021_blog_status'), ] operations = [ migrations.AddField( model_name='fellow', name='user', field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ]
<commit_before><commit_msg>Create migration for user field on fellow<commit_after># -*- coding: utf-8 -*- # Generated by Django 1.9.6 on 2016-06-02 16:22 from __future__ import unicode_literals from django.conf import settings from django.db import migrations, models import django.db.models.deletion class Migration(migrations.Migration): dependencies = [ migrations.swappable_dependency(settings.AUTH_USER_MODEL), ('fellowms', '0021_blog_status'), ] operations = [ migrations.AddField( model_name='fellow', name='user', field=models.OneToOneField(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL), ), ]
35f2cea484e2080e0b64fbcb995ef1adbc2d65dd
read_serial.py
read_serial.py
__author__ = 'miahi' ## Logger for serial APC Smart UPS import serial import csv import time import datetime PORT = 'COM2' BAUDRATE = 2400 SLEEP_SECONDS = 3 class APCSerial(object): def __init__(self, port, baudrate=2400): # todo: check that port exists & init errors self.serial = serial.Serial(port, baudrate, timeout=1) self.serial.write('Y') mode = self.serial.readline() # todo: test init in Smart mode (UPS returns 'SM') def read_power(self): return self._read_number('P') def read_batt_voltage(self): return self._read_number('B') def read_temperature(self): return self._read_number('C') def read_frequency(self): return self._read_number('F') def read_line_voltage(self): return self._read_number('L') def read_max_line_voltage(self): return self._read_number('M') def read_min_line_voltage(self): return self._read_number('N') def read_output_voltage(self): return self._read_number('O') def read_batt_level(self): return self._read_number('f') def read_runtime(self): self.serial.write('j') response = self.serial.readline() return int(float(response.rstrip().rstrip(':'))) def _read_number(self, command): self.serial.write(command) response = self.serial.readline() return float(response.rstrip()) def main(): apcserial = APCSerial(PORT, BAUDRATE) filename = 'apc_log_' + time.strftime("%Y-%m-%d_%H%M%S", time.gmtime()) + '.csv' with open(filename, 'a+b', buffering=1) as csvfile: outwriter = csv.writer(csvfile, delimiter=',') outwriter.writerow(['Time', 'Power[%]', 'BattLevel[%]', 'BattVoltage[V]', 'LineVoltage[V]', 'MaxLineVoltage[V]', 'MinLineVoltage[V]', 'OutputVoltage[V]', 'Frequency[Hz]', 'EstimatedRuntime[min]', 'Temperature[C]']) while True: outwriter.writerow([datetime.datetime.now(), apcserial.read_power(), apcserial.read_batt_level(), apcserial.read_batt_voltage(), apcserial.read_line_voltage(), apcserial.read_max_line_voltage(), apcserial.read_min_line_voltage(), apcserial.read_output_voltage(), apcserial.read_frequency(), apcserial.read_runtime(), apcserial.read_temperature()]) csvfile.flush() time.sleep(SLEEP_SECONDS) if __name__ == '__main__': main()
Read from serial, write to CSV
Read from serial, write to CSV
Python
mit
miahi/python.apcserial
Read from serial, write to CSV
__author__ = 'miahi' ## Logger for serial APC Smart UPS import serial import csv import time import datetime PORT = 'COM2' BAUDRATE = 2400 SLEEP_SECONDS = 3 class APCSerial(object): def __init__(self, port, baudrate=2400): # todo: check that port exists & init errors self.serial = serial.Serial(port, baudrate, timeout=1) self.serial.write('Y') mode = self.serial.readline() # todo: test init in Smart mode (UPS returns 'SM') def read_power(self): return self._read_number('P') def read_batt_voltage(self): return self._read_number('B') def read_temperature(self): return self._read_number('C') def read_frequency(self): return self._read_number('F') def read_line_voltage(self): return self._read_number('L') def read_max_line_voltage(self): return self._read_number('M') def read_min_line_voltage(self): return self._read_number('N') def read_output_voltage(self): return self._read_number('O') def read_batt_level(self): return self._read_number('f') def read_runtime(self): self.serial.write('j') response = self.serial.readline() return int(float(response.rstrip().rstrip(':'))) def _read_number(self, command): self.serial.write(command) response = self.serial.readline() return float(response.rstrip()) def main(): apcserial = APCSerial(PORT, BAUDRATE) filename = 'apc_log_' + time.strftime("%Y-%m-%d_%H%M%S", time.gmtime()) + '.csv' with open(filename, 'a+b', buffering=1) as csvfile: outwriter = csv.writer(csvfile, delimiter=',') outwriter.writerow(['Time', 'Power[%]', 'BattLevel[%]', 'BattVoltage[V]', 'LineVoltage[V]', 'MaxLineVoltage[V]', 'MinLineVoltage[V]', 'OutputVoltage[V]', 'Frequency[Hz]', 'EstimatedRuntime[min]', 'Temperature[C]']) while True: outwriter.writerow([datetime.datetime.now(), apcserial.read_power(), apcserial.read_batt_level(), apcserial.read_batt_voltage(), apcserial.read_line_voltage(), apcserial.read_max_line_voltage(), apcserial.read_min_line_voltage(), apcserial.read_output_voltage(), apcserial.read_frequency(), apcserial.read_runtime(), apcserial.read_temperature()]) csvfile.flush() time.sleep(SLEEP_SECONDS) if __name__ == '__main__': main()
<commit_before><commit_msg>Read from serial, write to CSV<commit_after>
__author__ = 'miahi' ## Logger for serial APC Smart UPS import serial import csv import time import datetime PORT = 'COM2' BAUDRATE = 2400 SLEEP_SECONDS = 3 class APCSerial(object): def __init__(self, port, baudrate=2400): # todo: check that port exists & init errors self.serial = serial.Serial(port, baudrate, timeout=1) self.serial.write('Y') mode = self.serial.readline() # todo: test init in Smart mode (UPS returns 'SM') def read_power(self): return self._read_number('P') def read_batt_voltage(self): return self._read_number('B') def read_temperature(self): return self._read_number('C') def read_frequency(self): return self._read_number('F') def read_line_voltage(self): return self._read_number('L') def read_max_line_voltage(self): return self._read_number('M') def read_min_line_voltage(self): return self._read_number('N') def read_output_voltage(self): return self._read_number('O') def read_batt_level(self): return self._read_number('f') def read_runtime(self): self.serial.write('j') response = self.serial.readline() return int(float(response.rstrip().rstrip(':'))) def _read_number(self, command): self.serial.write(command) response = self.serial.readline() return float(response.rstrip()) def main(): apcserial = APCSerial(PORT, BAUDRATE) filename = 'apc_log_' + time.strftime("%Y-%m-%d_%H%M%S", time.gmtime()) + '.csv' with open(filename, 'a+b', buffering=1) as csvfile: outwriter = csv.writer(csvfile, delimiter=',') outwriter.writerow(['Time', 'Power[%]', 'BattLevel[%]', 'BattVoltage[V]', 'LineVoltage[V]', 'MaxLineVoltage[V]', 'MinLineVoltage[V]', 'OutputVoltage[V]', 'Frequency[Hz]', 'EstimatedRuntime[min]', 'Temperature[C]']) while True: outwriter.writerow([datetime.datetime.now(), apcserial.read_power(), apcserial.read_batt_level(), apcserial.read_batt_voltage(), apcserial.read_line_voltage(), apcserial.read_max_line_voltage(), apcserial.read_min_line_voltage(), apcserial.read_output_voltage(), apcserial.read_frequency(), apcserial.read_runtime(), apcserial.read_temperature()]) csvfile.flush() time.sleep(SLEEP_SECONDS) if __name__ == '__main__': main()
Read from serial, write to CSV__author__ = 'miahi' ## Logger for serial APC Smart UPS import serial import csv import time import datetime PORT = 'COM2' BAUDRATE = 2400 SLEEP_SECONDS = 3 class APCSerial(object): def __init__(self, port, baudrate=2400): # todo: check that port exists & init errors self.serial = serial.Serial(port, baudrate, timeout=1) self.serial.write('Y') mode = self.serial.readline() # todo: test init in Smart mode (UPS returns 'SM') def read_power(self): return self._read_number('P') def read_batt_voltage(self): return self._read_number('B') def read_temperature(self): return self._read_number('C') def read_frequency(self): return self._read_number('F') def read_line_voltage(self): return self._read_number('L') def read_max_line_voltage(self): return self._read_number('M') def read_min_line_voltage(self): return self._read_number('N') def read_output_voltage(self): return self._read_number('O') def read_batt_level(self): return self._read_number('f') def read_runtime(self): self.serial.write('j') response = self.serial.readline() return int(float(response.rstrip().rstrip(':'))) def _read_number(self, command): self.serial.write(command) response = self.serial.readline() return float(response.rstrip()) def main(): apcserial = APCSerial(PORT, BAUDRATE) filename = 'apc_log_' + time.strftime("%Y-%m-%d_%H%M%S", time.gmtime()) + '.csv' with open(filename, 'a+b', buffering=1) as csvfile: outwriter = csv.writer(csvfile, delimiter=',') outwriter.writerow(['Time', 'Power[%]', 'BattLevel[%]', 'BattVoltage[V]', 'LineVoltage[V]', 'MaxLineVoltage[V]', 'MinLineVoltage[V]', 'OutputVoltage[V]', 'Frequency[Hz]', 'EstimatedRuntime[min]', 'Temperature[C]']) while True: outwriter.writerow([datetime.datetime.now(), apcserial.read_power(), apcserial.read_batt_level(), apcserial.read_batt_voltage(), apcserial.read_line_voltage(), apcserial.read_max_line_voltage(), apcserial.read_min_line_voltage(), apcserial.read_output_voltage(), apcserial.read_frequency(), apcserial.read_runtime(), apcserial.read_temperature()]) csvfile.flush() time.sleep(SLEEP_SECONDS) if __name__ == '__main__': main()
<commit_before><commit_msg>Read from serial, write to CSV<commit_after>__author__ = 'miahi' ## Logger for serial APC Smart UPS import serial import csv import time import datetime PORT = 'COM2' BAUDRATE = 2400 SLEEP_SECONDS = 3 class APCSerial(object): def __init__(self, port, baudrate=2400): # todo: check that port exists & init errors self.serial = serial.Serial(port, baudrate, timeout=1) self.serial.write('Y') mode = self.serial.readline() # todo: test init in Smart mode (UPS returns 'SM') def read_power(self): return self._read_number('P') def read_batt_voltage(self): return self._read_number('B') def read_temperature(self): return self._read_number('C') def read_frequency(self): return self._read_number('F') def read_line_voltage(self): return self._read_number('L') def read_max_line_voltage(self): return self._read_number('M') def read_min_line_voltage(self): return self._read_number('N') def read_output_voltage(self): return self._read_number('O') def read_batt_level(self): return self._read_number('f') def read_runtime(self): self.serial.write('j') response = self.serial.readline() return int(float(response.rstrip().rstrip(':'))) def _read_number(self, command): self.serial.write(command) response = self.serial.readline() return float(response.rstrip()) def main(): apcserial = APCSerial(PORT, BAUDRATE) filename = 'apc_log_' + time.strftime("%Y-%m-%d_%H%M%S", time.gmtime()) + '.csv' with open(filename, 'a+b', buffering=1) as csvfile: outwriter = csv.writer(csvfile, delimiter=',') outwriter.writerow(['Time', 'Power[%]', 'BattLevel[%]', 'BattVoltage[V]', 'LineVoltage[V]', 'MaxLineVoltage[V]', 'MinLineVoltage[V]', 'OutputVoltage[V]', 'Frequency[Hz]', 'EstimatedRuntime[min]', 'Temperature[C]']) while True: outwriter.writerow([datetime.datetime.now(), apcserial.read_power(), apcserial.read_batt_level(), apcserial.read_batt_voltage(), apcserial.read_line_voltage(), apcserial.read_max_line_voltage(), apcserial.read_min_line_voltage(), apcserial.read_output_voltage(), apcserial.read_frequency(), apcserial.read_runtime(), apcserial.read_temperature()]) csvfile.flush() time.sleep(SLEEP_SECONDS) if __name__ == '__main__': main()
7b35909083325ba6a22119b0f7a1cd352503cd93
examples/concept_full_syntax.py
examples/concept_full_syntax.py
"""Concept example of `Dependency Injector`.""" import sqlite3 from dependency_injector import catalogs from dependency_injector import providers from dependency_injector import injections class UsersService(object): """Users service, that has dependency on database.""" def __init__(self, db): """Initializer.""" self.db = db class AuthService(object): """Auth service, that has dependencies on users service and database.""" def __init__(self, db, users_service): """Initializer.""" self.db = db self.users_service = users_service class Services(catalogs.DeclarativeCatalog): """Catalog of service providers.""" database = providers.Singleton(sqlite3.connect, injections.Arg(':memory:')) """:type: providers.Provider -> sqlite3.Connection""" users = providers.Factory(UsersService, injections.KwArg('db', database)) """:type: providers.Provider -> UsersService""" auth = providers.Factory(AuthService, injections.KwArg('db', database), injections.KwArg('users_service', users)) """:type: providers.Provider -> AuthService""" # Retrieving catalog providers: users_service = Services.users() auth_service = Services.auth() # Making some asserts: assert users_service.db is auth_service.db is Services.database() assert isinstance(auth_service.users_service, UsersService) assert users_service is not Services.users() assert auth_service is not Services.auth() # Making some "inline" injections: @injections.inject(users_service=Services.users) @injections.inject(auth_service=Services.auth) @injections.inject(database=Services.database) def example(users_service, auth_service, database): """Example callback.""" assert users_service.db is auth_service.db assert auth_service.db is database assert database is Services.database() # Making a call of decorated callback: example()
Add new one concept example that uses full syntax
Add new one concept example that uses full syntax
Python
bsd-3-clause
ets-labs/python-dependency-injector,rmk135/objects,ets-labs/dependency_injector,rmk135/dependency_injector
Add new one concept example that uses full syntax
"""Concept example of `Dependency Injector`.""" import sqlite3 from dependency_injector import catalogs from dependency_injector import providers from dependency_injector import injections class UsersService(object): """Users service, that has dependency on database.""" def __init__(self, db): """Initializer.""" self.db = db class AuthService(object): """Auth service, that has dependencies on users service and database.""" def __init__(self, db, users_service): """Initializer.""" self.db = db self.users_service = users_service class Services(catalogs.DeclarativeCatalog): """Catalog of service providers.""" database = providers.Singleton(sqlite3.connect, injections.Arg(':memory:')) """:type: providers.Provider -> sqlite3.Connection""" users = providers.Factory(UsersService, injections.KwArg('db', database)) """:type: providers.Provider -> UsersService""" auth = providers.Factory(AuthService, injections.KwArg('db', database), injections.KwArg('users_service', users)) """:type: providers.Provider -> AuthService""" # Retrieving catalog providers: users_service = Services.users() auth_service = Services.auth() # Making some asserts: assert users_service.db is auth_service.db is Services.database() assert isinstance(auth_service.users_service, UsersService) assert users_service is not Services.users() assert auth_service is not Services.auth() # Making some "inline" injections: @injections.inject(users_service=Services.users) @injections.inject(auth_service=Services.auth) @injections.inject(database=Services.database) def example(users_service, auth_service, database): """Example callback.""" assert users_service.db is auth_service.db assert auth_service.db is database assert database is Services.database() # Making a call of decorated callback: example()
<commit_before><commit_msg>Add new one concept example that uses full syntax<commit_after>
"""Concept example of `Dependency Injector`.""" import sqlite3 from dependency_injector import catalogs from dependency_injector import providers from dependency_injector import injections class UsersService(object): """Users service, that has dependency on database.""" def __init__(self, db): """Initializer.""" self.db = db class AuthService(object): """Auth service, that has dependencies on users service and database.""" def __init__(self, db, users_service): """Initializer.""" self.db = db self.users_service = users_service class Services(catalogs.DeclarativeCatalog): """Catalog of service providers.""" database = providers.Singleton(sqlite3.connect, injections.Arg(':memory:')) """:type: providers.Provider -> sqlite3.Connection""" users = providers.Factory(UsersService, injections.KwArg('db', database)) """:type: providers.Provider -> UsersService""" auth = providers.Factory(AuthService, injections.KwArg('db', database), injections.KwArg('users_service', users)) """:type: providers.Provider -> AuthService""" # Retrieving catalog providers: users_service = Services.users() auth_service = Services.auth() # Making some asserts: assert users_service.db is auth_service.db is Services.database() assert isinstance(auth_service.users_service, UsersService) assert users_service is not Services.users() assert auth_service is not Services.auth() # Making some "inline" injections: @injections.inject(users_service=Services.users) @injections.inject(auth_service=Services.auth) @injections.inject(database=Services.database) def example(users_service, auth_service, database): """Example callback.""" assert users_service.db is auth_service.db assert auth_service.db is database assert database is Services.database() # Making a call of decorated callback: example()
Add new one concept example that uses full syntax"""Concept example of `Dependency Injector`.""" import sqlite3 from dependency_injector import catalogs from dependency_injector import providers from dependency_injector import injections class UsersService(object): """Users service, that has dependency on database.""" def __init__(self, db): """Initializer.""" self.db = db class AuthService(object): """Auth service, that has dependencies on users service and database.""" def __init__(self, db, users_service): """Initializer.""" self.db = db self.users_service = users_service class Services(catalogs.DeclarativeCatalog): """Catalog of service providers.""" database = providers.Singleton(sqlite3.connect, injections.Arg(':memory:')) """:type: providers.Provider -> sqlite3.Connection""" users = providers.Factory(UsersService, injections.KwArg('db', database)) """:type: providers.Provider -> UsersService""" auth = providers.Factory(AuthService, injections.KwArg('db', database), injections.KwArg('users_service', users)) """:type: providers.Provider -> AuthService""" # Retrieving catalog providers: users_service = Services.users() auth_service = Services.auth() # Making some asserts: assert users_service.db is auth_service.db is Services.database() assert isinstance(auth_service.users_service, UsersService) assert users_service is not Services.users() assert auth_service is not Services.auth() # Making some "inline" injections: @injections.inject(users_service=Services.users) @injections.inject(auth_service=Services.auth) @injections.inject(database=Services.database) def example(users_service, auth_service, database): """Example callback.""" assert users_service.db is auth_service.db assert auth_service.db is database assert database is Services.database() # Making a call of decorated callback: example()
<commit_before><commit_msg>Add new one concept example that uses full syntax<commit_after>"""Concept example of `Dependency Injector`.""" import sqlite3 from dependency_injector import catalogs from dependency_injector import providers from dependency_injector import injections class UsersService(object): """Users service, that has dependency on database.""" def __init__(self, db): """Initializer.""" self.db = db class AuthService(object): """Auth service, that has dependencies on users service and database.""" def __init__(self, db, users_service): """Initializer.""" self.db = db self.users_service = users_service class Services(catalogs.DeclarativeCatalog): """Catalog of service providers.""" database = providers.Singleton(sqlite3.connect, injections.Arg(':memory:')) """:type: providers.Provider -> sqlite3.Connection""" users = providers.Factory(UsersService, injections.KwArg('db', database)) """:type: providers.Provider -> UsersService""" auth = providers.Factory(AuthService, injections.KwArg('db', database), injections.KwArg('users_service', users)) """:type: providers.Provider -> AuthService""" # Retrieving catalog providers: users_service = Services.users() auth_service = Services.auth() # Making some asserts: assert users_service.db is auth_service.db is Services.database() assert isinstance(auth_service.users_service, UsersService) assert users_service is not Services.users() assert auth_service is not Services.auth() # Making some "inline" injections: @injections.inject(users_service=Services.users) @injections.inject(auth_service=Services.auth) @injections.inject(database=Services.database) def example(users_service, auth_service, database): """Example callback.""" assert users_service.db is auth_service.db assert auth_service.db is database assert database is Services.database() # Making a call of decorated callback: example()
8905993c0daa140b10cb04dca1e7bed7b813ea7a
imagedownloader/libs/console.py
imagedownloader/libs/console.py
import sys import pyttsx import aspects from datetime import datetime engine = pyttsx.init() def show(*objs): begin = '' if '\r' in objs[0] or '\b' in objs[0] else '\n' sys.stdout.write(begin) for part in objs: sys.stdout.write(str(part)) sys.stdout.flush() def say(speech): #NOT engine.startLoop() show(speech) engine.say(speech) engine.runAndWait() progress = ['/','-','\\','|'] def show_progress(i): show('\b \b', progress[i % len(progress)]) def show_times(*args): begin = datetime.now() result = yield aspects.proceed(*args) end = datetime.now() say("\t[time consumed: %.2f seconds]\n" % (end - begin).total_seconds()) yield aspects.return_stop(result)
import sys import pyttsx import aspects from datetime import datetime engine = pyttsx.init() def show(*objs): begin = '' if '\r' in objs[0] or '\b' in objs[0] else '\n' sys.stdout.write(begin) for part in objs: sys.stdout.write(str(part)) sys.stdout.flush() def say(speech): #NOT engine.startLoop() show(speech) engine.say(speech) engine.runAndWait() progress = ['/','-','\\','|'] def show_progress(i): show('\b \b', progress[i % len(progress)]) def show_times(*args): begin = datetime.utcnow().replace(tzinfo=pytz.UTC) result = yield aspects.proceed(*args) end = datetime.utcnow().replace(tzinfo=pytz.UTC) say("\t[time consumed: %.2f seconds]\n" % (end - begin).total_seconds()) yield aspects.return_stop(result)
Add UTC timezone to datetimes in the libs folder.
Add UTC timezone to datetimes in the libs folder.
Python
mit
ahMarrone/solar_radiation_model,scottlittle/solar_radiation_model,gersolar/solar_radiation_model
import sys import pyttsx import aspects from datetime import datetime engine = pyttsx.init() def show(*objs): begin = '' if '\r' in objs[0] or '\b' in objs[0] else '\n' sys.stdout.write(begin) for part in objs: sys.stdout.write(str(part)) sys.stdout.flush() def say(speech): #NOT engine.startLoop() show(speech) engine.say(speech) engine.runAndWait() progress = ['/','-','\\','|'] def show_progress(i): show('\b \b', progress[i % len(progress)]) def show_times(*args): begin = datetime.now() result = yield aspects.proceed(*args) end = datetime.now() say("\t[time consumed: %.2f seconds]\n" % (end - begin).total_seconds()) yield aspects.return_stop(result) Add UTC timezone to datetimes in the libs folder.
import sys import pyttsx import aspects from datetime import datetime engine = pyttsx.init() def show(*objs): begin = '' if '\r' in objs[0] or '\b' in objs[0] else '\n' sys.stdout.write(begin) for part in objs: sys.stdout.write(str(part)) sys.stdout.flush() def say(speech): #NOT engine.startLoop() show(speech) engine.say(speech) engine.runAndWait() progress = ['/','-','\\','|'] def show_progress(i): show('\b \b', progress[i % len(progress)]) def show_times(*args): begin = datetime.utcnow().replace(tzinfo=pytz.UTC) result = yield aspects.proceed(*args) end = datetime.utcnow().replace(tzinfo=pytz.UTC) say("\t[time consumed: %.2f seconds]\n" % (end - begin).total_seconds()) yield aspects.return_stop(result)
<commit_before>import sys import pyttsx import aspects from datetime import datetime engine = pyttsx.init() def show(*objs): begin = '' if '\r' in objs[0] or '\b' in objs[0] else '\n' sys.stdout.write(begin) for part in objs: sys.stdout.write(str(part)) sys.stdout.flush() def say(speech): #NOT engine.startLoop() show(speech) engine.say(speech) engine.runAndWait() progress = ['/','-','\\','|'] def show_progress(i): show('\b \b', progress[i % len(progress)]) def show_times(*args): begin = datetime.now() result = yield aspects.proceed(*args) end = datetime.now() say("\t[time consumed: %.2f seconds]\n" % (end - begin).total_seconds()) yield aspects.return_stop(result) <commit_msg>Add UTC timezone to datetimes in the libs folder.<commit_after>
import sys import pyttsx import aspects from datetime import datetime engine = pyttsx.init() def show(*objs): begin = '' if '\r' in objs[0] or '\b' in objs[0] else '\n' sys.stdout.write(begin) for part in objs: sys.stdout.write(str(part)) sys.stdout.flush() def say(speech): #NOT engine.startLoop() show(speech) engine.say(speech) engine.runAndWait() progress = ['/','-','\\','|'] def show_progress(i): show('\b \b', progress[i % len(progress)]) def show_times(*args): begin = datetime.utcnow().replace(tzinfo=pytz.UTC) result = yield aspects.proceed(*args) end = datetime.utcnow().replace(tzinfo=pytz.UTC) say("\t[time consumed: %.2f seconds]\n" % (end - begin).total_seconds()) yield aspects.return_stop(result)
import sys import pyttsx import aspects from datetime import datetime engine = pyttsx.init() def show(*objs): begin = '' if '\r' in objs[0] or '\b' in objs[0] else '\n' sys.stdout.write(begin) for part in objs: sys.stdout.write(str(part)) sys.stdout.flush() def say(speech): #NOT engine.startLoop() show(speech) engine.say(speech) engine.runAndWait() progress = ['/','-','\\','|'] def show_progress(i): show('\b \b', progress[i % len(progress)]) def show_times(*args): begin = datetime.now() result = yield aspects.proceed(*args) end = datetime.now() say("\t[time consumed: %.2f seconds]\n" % (end - begin).total_seconds()) yield aspects.return_stop(result) Add UTC timezone to datetimes in the libs folder.import sys import pyttsx import aspects from datetime import datetime engine = pyttsx.init() def show(*objs): begin = '' if '\r' in objs[0] or '\b' in objs[0] else '\n' sys.stdout.write(begin) for part in objs: sys.stdout.write(str(part)) sys.stdout.flush() def say(speech): #NOT engine.startLoop() show(speech) engine.say(speech) engine.runAndWait() progress = ['/','-','\\','|'] def show_progress(i): show('\b \b', progress[i % len(progress)]) def show_times(*args): begin = datetime.utcnow().replace(tzinfo=pytz.UTC) result = yield aspects.proceed(*args) end = datetime.utcnow().replace(tzinfo=pytz.UTC) say("\t[time consumed: %.2f seconds]\n" % (end - begin).total_seconds()) yield aspects.return_stop(result)
<commit_before>import sys import pyttsx import aspects from datetime import datetime engine = pyttsx.init() def show(*objs): begin = '' if '\r' in objs[0] or '\b' in objs[0] else '\n' sys.stdout.write(begin) for part in objs: sys.stdout.write(str(part)) sys.stdout.flush() def say(speech): #NOT engine.startLoop() show(speech) engine.say(speech) engine.runAndWait() progress = ['/','-','\\','|'] def show_progress(i): show('\b \b', progress[i % len(progress)]) def show_times(*args): begin = datetime.now() result = yield aspects.proceed(*args) end = datetime.now() say("\t[time consumed: %.2f seconds]\n" % (end - begin).total_seconds()) yield aspects.return_stop(result) <commit_msg>Add UTC timezone to datetimes in the libs folder.<commit_after>import sys import pyttsx import aspects from datetime import datetime engine = pyttsx.init() def show(*objs): begin = '' if '\r' in objs[0] or '\b' in objs[0] else '\n' sys.stdout.write(begin) for part in objs: sys.stdout.write(str(part)) sys.stdout.flush() def say(speech): #NOT engine.startLoop() show(speech) engine.say(speech) engine.runAndWait() progress = ['/','-','\\','|'] def show_progress(i): show('\b \b', progress[i % len(progress)]) def show_times(*args): begin = datetime.utcnow().replace(tzinfo=pytz.UTC) result = yield aspects.proceed(*args) end = datetime.utcnow().replace(tzinfo=pytz.UTC) say("\t[time consumed: %.2f seconds]\n" % (end - begin).total_seconds()) yield aspects.return_stop(result)
3f0ef7d273eaecad8f5b278e762a056bf17ffdb8
test/tools/lldb-mi/TestMiSyntax.py
test/tools/lldb-mi/TestMiSyntax.py
""" Test that the lldb-mi driver understands MI command syntax. """ import os import unittest2 import lldb from lldbtest import * class MiSyntaxTestCase(TestBase): mydir = TestBase.compute_mydir(__file__) myexe = "a.out" @classmethod def classCleanup(cls): """Cleanup the test byproducts.""" try: os.remove("child_send.txt") os.remove("child_read.txt") os.remove(cls.myexe) except: pass @lldbmi_test def test_lldbmi_tokens(self): """Test that 'lldb-mi --interpreter' echos command tokens.""" import pexpect self.buildDefault() # The default lldb-mi prompt (seriously?!). prompt = "(gdb)" # So that the child gets torn down after the test. self.child = pexpect.spawn('%s --interpreter' % (self.lldbMiExec)) child = self.child child.setecho(True) # Turn on logging for input/output to/from the child. with open('child_send.txt', 'w') as f_send: with open('child_read.txt', 'w') as f_read: child.logfile_send = f_send child.logfile_read = f_read child.sendline("000-file-exec-and-symbols " + self.myexe) child.expect("000\^done") child.sendline("100000001-break-insert -f a_MyFunction") child.expect("100000001\^done,bkpt={number=\"1\"") child.sendline("2-exec-run") child.sendline("") # FIXME: lldb-mi hangs here, so extra return is needed child.expect("2\^running") child.expect("\*stopped,reason=\"breakpoint-hit\"") child.sendline("0000000000000000000003-exec-continue") child.expect("0000000000000000000003\^running") child.expect("\*stopped,reason=\"exited-normally\"") child.expect_exact(prompt) child.sendline("quit") # Now that the necessary logging is done, restore logfile to None to # stop further logging. child.logfile_send = None child.logfile_read = None with open('child_send.txt', 'r') as fs: if self.TraceOn(): print "\n\nContents of child_send.txt:" print fs.read() with open('child_read.txt', 'r') as fr: from_child = fr.read() if self.TraceOn(): print "\n\nContents of child_read.txt:" print from_child self.expect(from_child, exe=False, substrs = ["breakpoint-hit"]) if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main()
Add test for MI tokens. This file tests the sequence of digits that can come before an MI command.
Add test for MI tokens. This file tests the sequence of digits that can come before an MI command. Patch from dawn@burble.org. git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@222873 91177308-0d34-0410-b5e6-96231b3b80d8
Python
apache-2.0
llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,llvm-mirror/lldb,apple/swift-lldb
Add test for MI tokens. This file tests the sequence of digits that can come before an MI command. Patch from dawn@burble.org. git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@222873 91177308-0d34-0410-b5e6-96231b3b80d8
""" Test that the lldb-mi driver understands MI command syntax. """ import os import unittest2 import lldb from lldbtest import * class MiSyntaxTestCase(TestBase): mydir = TestBase.compute_mydir(__file__) myexe = "a.out" @classmethod def classCleanup(cls): """Cleanup the test byproducts.""" try: os.remove("child_send.txt") os.remove("child_read.txt") os.remove(cls.myexe) except: pass @lldbmi_test def test_lldbmi_tokens(self): """Test that 'lldb-mi --interpreter' echos command tokens.""" import pexpect self.buildDefault() # The default lldb-mi prompt (seriously?!). prompt = "(gdb)" # So that the child gets torn down after the test. self.child = pexpect.spawn('%s --interpreter' % (self.lldbMiExec)) child = self.child child.setecho(True) # Turn on logging for input/output to/from the child. with open('child_send.txt', 'w') as f_send: with open('child_read.txt', 'w') as f_read: child.logfile_send = f_send child.logfile_read = f_read child.sendline("000-file-exec-and-symbols " + self.myexe) child.expect("000\^done") child.sendline("100000001-break-insert -f a_MyFunction") child.expect("100000001\^done,bkpt={number=\"1\"") child.sendline("2-exec-run") child.sendline("") # FIXME: lldb-mi hangs here, so extra return is needed child.expect("2\^running") child.expect("\*stopped,reason=\"breakpoint-hit\"") child.sendline("0000000000000000000003-exec-continue") child.expect("0000000000000000000003\^running") child.expect("\*stopped,reason=\"exited-normally\"") child.expect_exact(prompt) child.sendline("quit") # Now that the necessary logging is done, restore logfile to None to # stop further logging. child.logfile_send = None child.logfile_read = None with open('child_send.txt', 'r') as fs: if self.TraceOn(): print "\n\nContents of child_send.txt:" print fs.read() with open('child_read.txt', 'r') as fr: from_child = fr.read() if self.TraceOn(): print "\n\nContents of child_read.txt:" print from_child self.expect(from_child, exe=False, substrs = ["breakpoint-hit"]) if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main()
<commit_before><commit_msg>Add test for MI tokens. This file tests the sequence of digits that can come before an MI command. Patch from dawn@burble.org. git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@222873 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
""" Test that the lldb-mi driver understands MI command syntax. """ import os import unittest2 import lldb from lldbtest import * class MiSyntaxTestCase(TestBase): mydir = TestBase.compute_mydir(__file__) myexe = "a.out" @classmethod def classCleanup(cls): """Cleanup the test byproducts.""" try: os.remove("child_send.txt") os.remove("child_read.txt") os.remove(cls.myexe) except: pass @lldbmi_test def test_lldbmi_tokens(self): """Test that 'lldb-mi --interpreter' echos command tokens.""" import pexpect self.buildDefault() # The default lldb-mi prompt (seriously?!). prompt = "(gdb)" # So that the child gets torn down after the test. self.child = pexpect.spawn('%s --interpreter' % (self.lldbMiExec)) child = self.child child.setecho(True) # Turn on logging for input/output to/from the child. with open('child_send.txt', 'w') as f_send: with open('child_read.txt', 'w') as f_read: child.logfile_send = f_send child.logfile_read = f_read child.sendline("000-file-exec-and-symbols " + self.myexe) child.expect("000\^done") child.sendline("100000001-break-insert -f a_MyFunction") child.expect("100000001\^done,bkpt={number=\"1\"") child.sendline("2-exec-run") child.sendline("") # FIXME: lldb-mi hangs here, so extra return is needed child.expect("2\^running") child.expect("\*stopped,reason=\"breakpoint-hit\"") child.sendline("0000000000000000000003-exec-continue") child.expect("0000000000000000000003\^running") child.expect("\*stopped,reason=\"exited-normally\"") child.expect_exact(prompt) child.sendline("quit") # Now that the necessary logging is done, restore logfile to None to # stop further logging. child.logfile_send = None child.logfile_read = None with open('child_send.txt', 'r') as fs: if self.TraceOn(): print "\n\nContents of child_send.txt:" print fs.read() with open('child_read.txt', 'r') as fr: from_child = fr.read() if self.TraceOn(): print "\n\nContents of child_read.txt:" print from_child self.expect(from_child, exe=False, substrs = ["breakpoint-hit"]) if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main()
Add test for MI tokens. This file tests the sequence of digits that can come before an MI command. Patch from dawn@burble.org. git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@222873 91177308-0d34-0410-b5e6-96231b3b80d8""" Test that the lldb-mi driver understands MI command syntax. """ import os import unittest2 import lldb from lldbtest import * class MiSyntaxTestCase(TestBase): mydir = TestBase.compute_mydir(__file__) myexe = "a.out" @classmethod def classCleanup(cls): """Cleanup the test byproducts.""" try: os.remove("child_send.txt") os.remove("child_read.txt") os.remove(cls.myexe) except: pass @lldbmi_test def test_lldbmi_tokens(self): """Test that 'lldb-mi --interpreter' echos command tokens.""" import pexpect self.buildDefault() # The default lldb-mi prompt (seriously?!). prompt = "(gdb)" # So that the child gets torn down after the test. self.child = pexpect.spawn('%s --interpreter' % (self.lldbMiExec)) child = self.child child.setecho(True) # Turn on logging for input/output to/from the child. with open('child_send.txt', 'w') as f_send: with open('child_read.txt', 'w') as f_read: child.logfile_send = f_send child.logfile_read = f_read child.sendline("000-file-exec-and-symbols " + self.myexe) child.expect("000\^done") child.sendline("100000001-break-insert -f a_MyFunction") child.expect("100000001\^done,bkpt={number=\"1\"") child.sendline("2-exec-run") child.sendline("") # FIXME: lldb-mi hangs here, so extra return is needed child.expect("2\^running") child.expect("\*stopped,reason=\"breakpoint-hit\"") child.sendline("0000000000000000000003-exec-continue") child.expect("0000000000000000000003\^running") child.expect("\*stopped,reason=\"exited-normally\"") child.expect_exact(prompt) child.sendline("quit") # Now that the necessary logging is done, restore logfile to None to # stop further logging. child.logfile_send = None child.logfile_read = None with open('child_send.txt', 'r') as fs: if self.TraceOn(): print "\n\nContents of child_send.txt:" print fs.read() with open('child_read.txt', 'r') as fr: from_child = fr.read() if self.TraceOn(): print "\n\nContents of child_read.txt:" print from_child self.expect(from_child, exe=False, substrs = ["breakpoint-hit"]) if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main()
<commit_before><commit_msg>Add test for MI tokens. This file tests the sequence of digits that can come before an MI command. Patch from dawn@burble.org. git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@222873 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>""" Test that the lldb-mi driver understands MI command syntax. """ import os import unittest2 import lldb from lldbtest import * class MiSyntaxTestCase(TestBase): mydir = TestBase.compute_mydir(__file__) myexe = "a.out" @classmethod def classCleanup(cls): """Cleanup the test byproducts.""" try: os.remove("child_send.txt") os.remove("child_read.txt") os.remove(cls.myexe) except: pass @lldbmi_test def test_lldbmi_tokens(self): """Test that 'lldb-mi --interpreter' echos command tokens.""" import pexpect self.buildDefault() # The default lldb-mi prompt (seriously?!). prompt = "(gdb)" # So that the child gets torn down after the test. self.child = pexpect.spawn('%s --interpreter' % (self.lldbMiExec)) child = self.child child.setecho(True) # Turn on logging for input/output to/from the child. with open('child_send.txt', 'w') as f_send: with open('child_read.txt', 'w') as f_read: child.logfile_send = f_send child.logfile_read = f_read child.sendline("000-file-exec-and-symbols " + self.myexe) child.expect("000\^done") child.sendline("100000001-break-insert -f a_MyFunction") child.expect("100000001\^done,bkpt={number=\"1\"") child.sendline("2-exec-run") child.sendline("") # FIXME: lldb-mi hangs here, so extra return is needed child.expect("2\^running") child.expect("\*stopped,reason=\"breakpoint-hit\"") child.sendline("0000000000000000000003-exec-continue") child.expect("0000000000000000000003\^running") child.expect("\*stopped,reason=\"exited-normally\"") child.expect_exact(prompt) child.sendline("quit") # Now that the necessary logging is done, restore logfile to None to # stop further logging. child.logfile_send = None child.logfile_read = None with open('child_send.txt', 'r') as fs: if self.TraceOn(): print "\n\nContents of child_send.txt:" print fs.read() with open('child_read.txt', 'r') as fr: from_child = fr.read() if self.TraceOn(): print "\n\nContents of child_read.txt:" print from_child self.expect(from_child, exe=False, substrs = ["breakpoint-hit"]) if __name__ == '__main__': import atexit lldb.SBDebugger.Initialize() atexit.register(lambda: lldb.SBDebugger.Terminate()) unittest2.main()
0e2dda38e19523ab2c5ac935ee4f9886b2067d5b
platforms/migrations/0008_platform_short_name.py
platforms/migrations/0008_platform_short_name.py
# Generated by Django 2.2.12 on 2021-04-18 00:16 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('platforms', '0007_auto_20180515_2130'), ] operations = [ migrations.AddField( model_name='platform', name='short_name', field=models.CharField(blank=True, max_length=32, null=True), ), ]
Add migration for short name
Add migration for short name
Python
agpl-3.0
lutris/website,lutris/website,lutris/website,lutris/website
Add migration for short name
# Generated by Django 2.2.12 on 2021-04-18 00:16 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('platforms', '0007_auto_20180515_2130'), ] operations = [ migrations.AddField( model_name='platform', name='short_name', field=models.CharField(blank=True, max_length=32, null=True), ), ]
<commit_before><commit_msg>Add migration for short name<commit_after>
# Generated by Django 2.2.12 on 2021-04-18 00:16 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('platforms', '0007_auto_20180515_2130'), ] operations = [ migrations.AddField( model_name='platform', name='short_name', field=models.CharField(blank=True, max_length=32, null=True), ), ]
Add migration for short name# Generated by Django 2.2.12 on 2021-04-18 00:16 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('platforms', '0007_auto_20180515_2130'), ] operations = [ migrations.AddField( model_name='platform', name='short_name', field=models.CharField(blank=True, max_length=32, null=True), ), ]
<commit_before><commit_msg>Add migration for short name<commit_after># Generated by Django 2.2.12 on 2021-04-18 00:16 from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('platforms', '0007_auto_20180515_2130'), ] operations = [ migrations.AddField( model_name='platform', name='short_name', field=models.CharField(blank=True, max_length=32, null=True), ), ]
d2375ebc4908d58c1a168619ed1e1e8eb59ebf25
geotrek/diving/tests/test_models.py
geotrek/diving/tests/test_models.py
from django.test import TestCase from geotrek.common.tests import TranslationResetMixin from geotrek.diving.models import Dive from geotrek.diving.factories import DiveFactory, DivingManagerFactory, PracticeFactory, LevelFactory from mapentity.factories import SuperUserFactory class DiveTest(TranslationResetMixin, TestCase): def test_levels_display(self): """Test if levels_display works""" l1 = LevelFactory.create() l2 = LevelFactory.create() d = DiveFactory() d.levels.add(l1) d.levels.add(l2) self.assertEquals(d.levels_display, "{0}, {1}".format(l1, l2))
Add test model for dives
Add test model for dives
Python
bsd-2-clause
GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,GeotrekCE/Geotrek-admin,makinacorpus/Geotrek,makinacorpus/Geotrek
Add test model for dives
from django.test import TestCase from geotrek.common.tests import TranslationResetMixin from geotrek.diving.models import Dive from geotrek.diving.factories import DiveFactory, DivingManagerFactory, PracticeFactory, LevelFactory from mapentity.factories import SuperUserFactory class DiveTest(TranslationResetMixin, TestCase): def test_levels_display(self): """Test if levels_display works""" l1 = LevelFactory.create() l2 = LevelFactory.create() d = DiveFactory() d.levels.add(l1) d.levels.add(l2) self.assertEquals(d.levels_display, "{0}, {1}".format(l1, l2))
<commit_before><commit_msg>Add test model for dives<commit_after>
from django.test import TestCase from geotrek.common.tests import TranslationResetMixin from geotrek.diving.models import Dive from geotrek.diving.factories import DiveFactory, DivingManagerFactory, PracticeFactory, LevelFactory from mapentity.factories import SuperUserFactory class DiveTest(TranslationResetMixin, TestCase): def test_levels_display(self): """Test if levels_display works""" l1 = LevelFactory.create() l2 = LevelFactory.create() d = DiveFactory() d.levels.add(l1) d.levels.add(l2) self.assertEquals(d.levels_display, "{0}, {1}".format(l1, l2))
Add test model for divesfrom django.test import TestCase from geotrek.common.tests import TranslationResetMixin from geotrek.diving.models import Dive from geotrek.diving.factories import DiveFactory, DivingManagerFactory, PracticeFactory, LevelFactory from mapentity.factories import SuperUserFactory class DiveTest(TranslationResetMixin, TestCase): def test_levels_display(self): """Test if levels_display works""" l1 = LevelFactory.create() l2 = LevelFactory.create() d = DiveFactory() d.levels.add(l1) d.levels.add(l2) self.assertEquals(d.levels_display, "{0}, {1}".format(l1, l2))
<commit_before><commit_msg>Add test model for dives<commit_after>from django.test import TestCase from geotrek.common.tests import TranslationResetMixin from geotrek.diving.models import Dive from geotrek.diving.factories import DiveFactory, DivingManagerFactory, PracticeFactory, LevelFactory from mapentity.factories import SuperUserFactory class DiveTest(TranslationResetMixin, TestCase): def test_levels_display(self): """Test if levels_display works""" l1 = LevelFactory.create() l2 = LevelFactory.create() d = DiveFactory() d.levels.add(l1) d.levels.add(l2) self.assertEquals(d.levels_display, "{0}, {1}".format(l1, l2))
a0990c986635e4079b9c7145497e44fdbde48280
tests/parser/test_ast_integrity.py
tests/parser/test_ast_integrity.py
import pytest from tests.infrastructure.test_utils import parse_local, parse_full from thinglang.compiler.errors import NoExceptionHandlers from thinglang.parser.errors import StructureError IN_THING_DEFINITION = ''' thing Person {} '''.strip() IN_METHOD_DEFINITION = ''' thing Person does something {} ''' COMMON_DISALLOWED = [ 'name', 'else', #'else if something eq something', TODO: reinstate #'handle Exception' TODO: reinstate ] EXAMPLES = COMMON_DISALLOWED + [ 'has text name', 'setup with text name', 'static does something with A container returns B', ] + [ IN_THING_DEFINITION.format(x) for x in [ 'thing Container', 'number n = 5', 'name', 'for number x in numbers' ] + COMMON_DISALLOWED ] + [ IN_METHOD_DEFINITION.format(x) for x in [ 'thing Container', 'setup', 'does gcd with number a, number b' ] + COMMON_DISALLOWED ] @pytest.mark.parametrize('code', EXAMPLES) def test_structural_integrity(code): with pytest.raises(StructureError) as exc: parse_full(code) print(exc.value) def test_secondary_integrity(): with pytest.raises(NoExceptionHandlers): parse_full('try')
Add test for AST integrity
Add test for AST integrity
Python
mit
ytanay/thinglang,ytanay/thinglang,ytanay/thinglang,ytanay/thinglang
Add test for AST integrity
import pytest from tests.infrastructure.test_utils import parse_local, parse_full from thinglang.compiler.errors import NoExceptionHandlers from thinglang.parser.errors import StructureError IN_THING_DEFINITION = ''' thing Person {} '''.strip() IN_METHOD_DEFINITION = ''' thing Person does something {} ''' COMMON_DISALLOWED = [ 'name', 'else', #'else if something eq something', TODO: reinstate #'handle Exception' TODO: reinstate ] EXAMPLES = COMMON_DISALLOWED + [ 'has text name', 'setup with text name', 'static does something with A container returns B', ] + [ IN_THING_DEFINITION.format(x) for x in [ 'thing Container', 'number n = 5', 'name', 'for number x in numbers' ] + COMMON_DISALLOWED ] + [ IN_METHOD_DEFINITION.format(x) for x in [ 'thing Container', 'setup', 'does gcd with number a, number b' ] + COMMON_DISALLOWED ] @pytest.mark.parametrize('code', EXAMPLES) def test_structural_integrity(code): with pytest.raises(StructureError) as exc: parse_full(code) print(exc.value) def test_secondary_integrity(): with pytest.raises(NoExceptionHandlers): parse_full('try')
<commit_before><commit_msg>Add test for AST integrity<commit_after>
import pytest from tests.infrastructure.test_utils import parse_local, parse_full from thinglang.compiler.errors import NoExceptionHandlers from thinglang.parser.errors import StructureError IN_THING_DEFINITION = ''' thing Person {} '''.strip() IN_METHOD_DEFINITION = ''' thing Person does something {} ''' COMMON_DISALLOWED = [ 'name', 'else', #'else if something eq something', TODO: reinstate #'handle Exception' TODO: reinstate ] EXAMPLES = COMMON_DISALLOWED + [ 'has text name', 'setup with text name', 'static does something with A container returns B', ] + [ IN_THING_DEFINITION.format(x) for x in [ 'thing Container', 'number n = 5', 'name', 'for number x in numbers' ] + COMMON_DISALLOWED ] + [ IN_METHOD_DEFINITION.format(x) for x in [ 'thing Container', 'setup', 'does gcd with number a, number b' ] + COMMON_DISALLOWED ] @pytest.mark.parametrize('code', EXAMPLES) def test_structural_integrity(code): with pytest.raises(StructureError) as exc: parse_full(code) print(exc.value) def test_secondary_integrity(): with pytest.raises(NoExceptionHandlers): parse_full('try')
Add test for AST integrityimport pytest from tests.infrastructure.test_utils import parse_local, parse_full from thinglang.compiler.errors import NoExceptionHandlers from thinglang.parser.errors import StructureError IN_THING_DEFINITION = ''' thing Person {} '''.strip() IN_METHOD_DEFINITION = ''' thing Person does something {} ''' COMMON_DISALLOWED = [ 'name', 'else', #'else if something eq something', TODO: reinstate #'handle Exception' TODO: reinstate ] EXAMPLES = COMMON_DISALLOWED + [ 'has text name', 'setup with text name', 'static does something with A container returns B', ] + [ IN_THING_DEFINITION.format(x) for x in [ 'thing Container', 'number n = 5', 'name', 'for number x in numbers' ] + COMMON_DISALLOWED ] + [ IN_METHOD_DEFINITION.format(x) for x in [ 'thing Container', 'setup', 'does gcd with number a, number b' ] + COMMON_DISALLOWED ] @pytest.mark.parametrize('code', EXAMPLES) def test_structural_integrity(code): with pytest.raises(StructureError) as exc: parse_full(code) print(exc.value) def test_secondary_integrity(): with pytest.raises(NoExceptionHandlers): parse_full('try')
<commit_before><commit_msg>Add test for AST integrity<commit_after>import pytest from tests.infrastructure.test_utils import parse_local, parse_full from thinglang.compiler.errors import NoExceptionHandlers from thinglang.parser.errors import StructureError IN_THING_DEFINITION = ''' thing Person {} '''.strip() IN_METHOD_DEFINITION = ''' thing Person does something {} ''' COMMON_DISALLOWED = [ 'name', 'else', #'else if something eq something', TODO: reinstate #'handle Exception' TODO: reinstate ] EXAMPLES = COMMON_DISALLOWED + [ 'has text name', 'setup with text name', 'static does something with A container returns B', ] + [ IN_THING_DEFINITION.format(x) for x in [ 'thing Container', 'number n = 5', 'name', 'for number x in numbers' ] + COMMON_DISALLOWED ] + [ IN_METHOD_DEFINITION.format(x) for x in [ 'thing Container', 'setup', 'does gcd with number a, number b' ] + COMMON_DISALLOWED ] @pytest.mark.parametrize('code', EXAMPLES) def test_structural_integrity(code): with pytest.raises(StructureError) as exc: parse_full(code) print(exc.value) def test_secondary_integrity(): with pytest.raises(NoExceptionHandlers): parse_full('try')
359d5bb5b40cb1a15b9b3ed6fb4710ed7a6e5f89
tests/test_integration/test_user_resource.py
tests/test_integration/test_user_resource.py
import demands from tests.test_integration.helpers import create_user from tests.test_integration.test_case import YolaServiceTestCase class TestYolaUser(YolaServiceTestCase): """Yola: User resource""" @classmethod def setUpClass(cls): super(TestYolaUser, cls).setUpClass() cls.user = cls._create_user() cls.user_id = cls.user['id'] @classmethod def _create_user(cls, **custom_attrs): custom_attrs.setdefault('name', 'John') return create_user(cls.service, **custom_attrs) def test_can_create_user(self): self.assertEqual(self.user['name'], 'John') def test_can_update_user(self): user = self._create_user(name='Original Name') user = self.service.update_user(user['id'], name='New Name') self.assertEqual(user['name'], 'New Name') def test_can_get_user(self): user = self.service.get_user(self.user_id) self.assertEqual(user['name'], 'John') def test_can_list_users(self): response = self.service.list_users(page_size=1) self.assertEqual(len(response['results']), 1) def test_can_request_filtered_user_list(self): response = self.service.list_users(id=self.user_id) self.assertEqual(len(response['results']), 1) self.assertEqual(response['results'][0]['id'], self.user_id) def test_can_delete_user(self): user = self._create_user() self.service.delete_user(user['id']) with self.assertRaises(demands.HTTPServiceError): self.service.get_user(user['id']) def test_can_suspend_and_resume_user(self): self.assertTrue(self.user['active']) self.service.suspend_user(self.user_id) user = self.service.get_user(self.user_id) self.assertFalse(user['active']) self.service.resume_user(self.user_id) user = self.service.get_user(self.user_id) self.assertTrue(user['active']) def test_can_get_sso_create_site_url(self): url = self.service.get_sso_create_site_url(self.user_id, 'example.com') self.assertTrue(url.startswith('http')) def test_can_get_sso_open_site_url(self): # TODO: create site for user # url = self.service.get_sso_open_site_url(self.user_id) # self.assertTrue(url.startswith('http')) pass
Add integration tests for user resource
Add integration tests for user resource
Python
mit
yola/yolapy
Add integration tests for user resource
import demands from tests.test_integration.helpers import create_user from tests.test_integration.test_case import YolaServiceTestCase class TestYolaUser(YolaServiceTestCase): """Yola: User resource""" @classmethod def setUpClass(cls): super(TestYolaUser, cls).setUpClass() cls.user = cls._create_user() cls.user_id = cls.user['id'] @classmethod def _create_user(cls, **custom_attrs): custom_attrs.setdefault('name', 'John') return create_user(cls.service, **custom_attrs) def test_can_create_user(self): self.assertEqual(self.user['name'], 'John') def test_can_update_user(self): user = self._create_user(name='Original Name') user = self.service.update_user(user['id'], name='New Name') self.assertEqual(user['name'], 'New Name') def test_can_get_user(self): user = self.service.get_user(self.user_id) self.assertEqual(user['name'], 'John') def test_can_list_users(self): response = self.service.list_users(page_size=1) self.assertEqual(len(response['results']), 1) def test_can_request_filtered_user_list(self): response = self.service.list_users(id=self.user_id) self.assertEqual(len(response['results']), 1) self.assertEqual(response['results'][0]['id'], self.user_id) def test_can_delete_user(self): user = self._create_user() self.service.delete_user(user['id']) with self.assertRaises(demands.HTTPServiceError): self.service.get_user(user['id']) def test_can_suspend_and_resume_user(self): self.assertTrue(self.user['active']) self.service.suspend_user(self.user_id) user = self.service.get_user(self.user_id) self.assertFalse(user['active']) self.service.resume_user(self.user_id) user = self.service.get_user(self.user_id) self.assertTrue(user['active']) def test_can_get_sso_create_site_url(self): url = self.service.get_sso_create_site_url(self.user_id, 'example.com') self.assertTrue(url.startswith('http')) def test_can_get_sso_open_site_url(self): # TODO: create site for user # url = self.service.get_sso_open_site_url(self.user_id) # self.assertTrue(url.startswith('http')) pass
<commit_before><commit_msg>Add integration tests for user resource<commit_after>
import demands from tests.test_integration.helpers import create_user from tests.test_integration.test_case import YolaServiceTestCase class TestYolaUser(YolaServiceTestCase): """Yola: User resource""" @classmethod def setUpClass(cls): super(TestYolaUser, cls).setUpClass() cls.user = cls._create_user() cls.user_id = cls.user['id'] @classmethod def _create_user(cls, **custom_attrs): custom_attrs.setdefault('name', 'John') return create_user(cls.service, **custom_attrs) def test_can_create_user(self): self.assertEqual(self.user['name'], 'John') def test_can_update_user(self): user = self._create_user(name='Original Name') user = self.service.update_user(user['id'], name='New Name') self.assertEqual(user['name'], 'New Name') def test_can_get_user(self): user = self.service.get_user(self.user_id) self.assertEqual(user['name'], 'John') def test_can_list_users(self): response = self.service.list_users(page_size=1) self.assertEqual(len(response['results']), 1) def test_can_request_filtered_user_list(self): response = self.service.list_users(id=self.user_id) self.assertEqual(len(response['results']), 1) self.assertEqual(response['results'][0]['id'], self.user_id) def test_can_delete_user(self): user = self._create_user() self.service.delete_user(user['id']) with self.assertRaises(demands.HTTPServiceError): self.service.get_user(user['id']) def test_can_suspend_and_resume_user(self): self.assertTrue(self.user['active']) self.service.suspend_user(self.user_id) user = self.service.get_user(self.user_id) self.assertFalse(user['active']) self.service.resume_user(self.user_id) user = self.service.get_user(self.user_id) self.assertTrue(user['active']) def test_can_get_sso_create_site_url(self): url = self.service.get_sso_create_site_url(self.user_id, 'example.com') self.assertTrue(url.startswith('http')) def test_can_get_sso_open_site_url(self): # TODO: create site for user # url = self.service.get_sso_open_site_url(self.user_id) # self.assertTrue(url.startswith('http')) pass
Add integration tests for user resourceimport demands from tests.test_integration.helpers import create_user from tests.test_integration.test_case import YolaServiceTestCase class TestYolaUser(YolaServiceTestCase): """Yola: User resource""" @classmethod def setUpClass(cls): super(TestYolaUser, cls).setUpClass() cls.user = cls._create_user() cls.user_id = cls.user['id'] @classmethod def _create_user(cls, **custom_attrs): custom_attrs.setdefault('name', 'John') return create_user(cls.service, **custom_attrs) def test_can_create_user(self): self.assertEqual(self.user['name'], 'John') def test_can_update_user(self): user = self._create_user(name='Original Name') user = self.service.update_user(user['id'], name='New Name') self.assertEqual(user['name'], 'New Name') def test_can_get_user(self): user = self.service.get_user(self.user_id) self.assertEqual(user['name'], 'John') def test_can_list_users(self): response = self.service.list_users(page_size=1) self.assertEqual(len(response['results']), 1) def test_can_request_filtered_user_list(self): response = self.service.list_users(id=self.user_id) self.assertEqual(len(response['results']), 1) self.assertEqual(response['results'][0]['id'], self.user_id) def test_can_delete_user(self): user = self._create_user() self.service.delete_user(user['id']) with self.assertRaises(demands.HTTPServiceError): self.service.get_user(user['id']) def test_can_suspend_and_resume_user(self): self.assertTrue(self.user['active']) self.service.suspend_user(self.user_id) user = self.service.get_user(self.user_id) self.assertFalse(user['active']) self.service.resume_user(self.user_id) user = self.service.get_user(self.user_id) self.assertTrue(user['active']) def test_can_get_sso_create_site_url(self): url = self.service.get_sso_create_site_url(self.user_id, 'example.com') self.assertTrue(url.startswith('http')) def test_can_get_sso_open_site_url(self): # TODO: create site for user # url = self.service.get_sso_open_site_url(self.user_id) # self.assertTrue(url.startswith('http')) pass
<commit_before><commit_msg>Add integration tests for user resource<commit_after>import demands from tests.test_integration.helpers import create_user from tests.test_integration.test_case import YolaServiceTestCase class TestYolaUser(YolaServiceTestCase): """Yola: User resource""" @classmethod def setUpClass(cls): super(TestYolaUser, cls).setUpClass() cls.user = cls._create_user() cls.user_id = cls.user['id'] @classmethod def _create_user(cls, **custom_attrs): custom_attrs.setdefault('name', 'John') return create_user(cls.service, **custom_attrs) def test_can_create_user(self): self.assertEqual(self.user['name'], 'John') def test_can_update_user(self): user = self._create_user(name='Original Name') user = self.service.update_user(user['id'], name='New Name') self.assertEqual(user['name'], 'New Name') def test_can_get_user(self): user = self.service.get_user(self.user_id) self.assertEqual(user['name'], 'John') def test_can_list_users(self): response = self.service.list_users(page_size=1) self.assertEqual(len(response['results']), 1) def test_can_request_filtered_user_list(self): response = self.service.list_users(id=self.user_id) self.assertEqual(len(response['results']), 1) self.assertEqual(response['results'][0]['id'], self.user_id) def test_can_delete_user(self): user = self._create_user() self.service.delete_user(user['id']) with self.assertRaises(demands.HTTPServiceError): self.service.get_user(user['id']) def test_can_suspend_and_resume_user(self): self.assertTrue(self.user['active']) self.service.suspend_user(self.user_id) user = self.service.get_user(self.user_id) self.assertFalse(user['active']) self.service.resume_user(self.user_id) user = self.service.get_user(self.user_id) self.assertTrue(user['active']) def test_can_get_sso_create_site_url(self): url = self.service.get_sso_create_site_url(self.user_id, 'example.com') self.assertTrue(url.startswith('http')) def test_can_get_sso_open_site_url(self): # TODO: create site for user # url = self.service.get_sso_open_site_url(self.user_id) # self.assertTrue(url.startswith('http')) pass
ab754d5924168d027bfc2eeed7881b9b2e469535
tools/autobuild/build-downloads.py
tools/autobuild/build-downloads.py
#!/usr/bin/env python3 import glob import json import os import sys def main(repo_path, output_path): boards_index = [] board_ids = set() for board_json in glob.glob(os.path.join(repo_path, "ports/*/boards/*/board.json")): # Relative path to the board directory (e.g. "ports/stm32/boards/PYBV11"). board_dir = os.path.dirname(board_json) # Relative path to the port (e.g. "ports/stm32") port_dir = os.path.dirname(os.path.dirname(board_dir)) with open(board_json, "r") as f: blob = json.load(f) # Use "id" if specified, otherwise default to board dir (e.g. "PYBV11"). # We allow boards to override ID for the historical build names. blob["id"] = blob.get("id", os.path.basename(board_dir)) # Check for duplicate board IDs. if blob["id"] in board_ids: print("Duplicate board ID: '{}'".format(blob["id"]), file=sys.stderr) board_ids.add(blob["id"]) # Add in default fields. blob["port"] = os.path.basename(port_dir) blob["build"] = os.path.basename(board_dir) boards_index.append(blob) # Create the board markdown, which is the concatenation of the # default "board.md" file (if exists), as well as any flashing # instructions. board_markdown = os.path.join(board_dir, "board.md") with open(os.path.join(output_path, blob["id"] + ".md"), "w") as f: if os.path.exists(board_markdown): with open(board_markdown, "r") as fin: f.write(fin.read()) if blob["deploy"]: f.write("\n\n## Installation instructions\n") for deploy in blob["deploy"]: with open(os.path.join(board_dir, deploy), "r") as fin: f.write(fin.read()) # Write the full index for the website to load. with open(os.path.join(output_path, "index.json"), "w") as f: json.dump(boards_index, f, indent=4, sort_keys=True) f.write("\n") if __name__ == "__main__": main(sys.argv[1], sys.argv[2])
Add script to generate website board metadata.
tools/autobuild: Add script to generate website board metadata. Signed-off-by: Jim Mussared <e84f5c941266186d0c97dcc873413469b954847e@gmail.com>
Python
mit
adafruit/circuitpython,bvernoux/micropython,adafruit/circuitpython,adafruit/circuitpython,bvernoux/micropython,adafruit/circuitpython,bvernoux/micropython,adafruit/circuitpython,bvernoux/micropython,adafruit/circuitpython,bvernoux/micropython
tools/autobuild: Add script to generate website board metadata. Signed-off-by: Jim Mussared <e84f5c941266186d0c97dcc873413469b954847e@gmail.com>
#!/usr/bin/env python3 import glob import json import os import sys def main(repo_path, output_path): boards_index = [] board_ids = set() for board_json in glob.glob(os.path.join(repo_path, "ports/*/boards/*/board.json")): # Relative path to the board directory (e.g. "ports/stm32/boards/PYBV11"). board_dir = os.path.dirname(board_json) # Relative path to the port (e.g. "ports/stm32") port_dir = os.path.dirname(os.path.dirname(board_dir)) with open(board_json, "r") as f: blob = json.load(f) # Use "id" if specified, otherwise default to board dir (e.g. "PYBV11"). # We allow boards to override ID for the historical build names. blob["id"] = blob.get("id", os.path.basename(board_dir)) # Check for duplicate board IDs. if blob["id"] in board_ids: print("Duplicate board ID: '{}'".format(blob["id"]), file=sys.stderr) board_ids.add(blob["id"]) # Add in default fields. blob["port"] = os.path.basename(port_dir) blob["build"] = os.path.basename(board_dir) boards_index.append(blob) # Create the board markdown, which is the concatenation of the # default "board.md" file (if exists), as well as any flashing # instructions. board_markdown = os.path.join(board_dir, "board.md") with open(os.path.join(output_path, blob["id"] + ".md"), "w") as f: if os.path.exists(board_markdown): with open(board_markdown, "r") as fin: f.write(fin.read()) if blob["deploy"]: f.write("\n\n## Installation instructions\n") for deploy in blob["deploy"]: with open(os.path.join(board_dir, deploy), "r") as fin: f.write(fin.read()) # Write the full index for the website to load. with open(os.path.join(output_path, "index.json"), "w") as f: json.dump(boards_index, f, indent=4, sort_keys=True) f.write("\n") if __name__ == "__main__": main(sys.argv[1], sys.argv[2])
<commit_before><commit_msg>tools/autobuild: Add script to generate website board metadata. Signed-off-by: Jim Mussared <e84f5c941266186d0c97dcc873413469b954847e@gmail.com><commit_after>
#!/usr/bin/env python3 import glob import json import os import sys def main(repo_path, output_path): boards_index = [] board_ids = set() for board_json in glob.glob(os.path.join(repo_path, "ports/*/boards/*/board.json")): # Relative path to the board directory (e.g. "ports/stm32/boards/PYBV11"). board_dir = os.path.dirname(board_json) # Relative path to the port (e.g. "ports/stm32") port_dir = os.path.dirname(os.path.dirname(board_dir)) with open(board_json, "r") as f: blob = json.load(f) # Use "id" if specified, otherwise default to board dir (e.g. "PYBV11"). # We allow boards to override ID for the historical build names. blob["id"] = blob.get("id", os.path.basename(board_dir)) # Check for duplicate board IDs. if blob["id"] in board_ids: print("Duplicate board ID: '{}'".format(blob["id"]), file=sys.stderr) board_ids.add(blob["id"]) # Add in default fields. blob["port"] = os.path.basename(port_dir) blob["build"] = os.path.basename(board_dir) boards_index.append(blob) # Create the board markdown, which is the concatenation of the # default "board.md" file (if exists), as well as any flashing # instructions. board_markdown = os.path.join(board_dir, "board.md") with open(os.path.join(output_path, blob["id"] + ".md"), "w") as f: if os.path.exists(board_markdown): with open(board_markdown, "r") as fin: f.write(fin.read()) if blob["deploy"]: f.write("\n\n## Installation instructions\n") for deploy in blob["deploy"]: with open(os.path.join(board_dir, deploy), "r") as fin: f.write(fin.read()) # Write the full index for the website to load. with open(os.path.join(output_path, "index.json"), "w") as f: json.dump(boards_index, f, indent=4, sort_keys=True) f.write("\n") if __name__ == "__main__": main(sys.argv[1], sys.argv[2])
tools/autobuild: Add script to generate website board metadata. Signed-off-by: Jim Mussared <e84f5c941266186d0c97dcc873413469b954847e@gmail.com>#!/usr/bin/env python3 import glob import json import os import sys def main(repo_path, output_path): boards_index = [] board_ids = set() for board_json in glob.glob(os.path.join(repo_path, "ports/*/boards/*/board.json")): # Relative path to the board directory (e.g. "ports/stm32/boards/PYBV11"). board_dir = os.path.dirname(board_json) # Relative path to the port (e.g. "ports/stm32") port_dir = os.path.dirname(os.path.dirname(board_dir)) with open(board_json, "r") as f: blob = json.load(f) # Use "id" if specified, otherwise default to board dir (e.g. "PYBV11"). # We allow boards to override ID for the historical build names. blob["id"] = blob.get("id", os.path.basename(board_dir)) # Check for duplicate board IDs. if blob["id"] in board_ids: print("Duplicate board ID: '{}'".format(blob["id"]), file=sys.stderr) board_ids.add(blob["id"]) # Add in default fields. blob["port"] = os.path.basename(port_dir) blob["build"] = os.path.basename(board_dir) boards_index.append(blob) # Create the board markdown, which is the concatenation of the # default "board.md" file (if exists), as well as any flashing # instructions. board_markdown = os.path.join(board_dir, "board.md") with open(os.path.join(output_path, blob["id"] + ".md"), "w") as f: if os.path.exists(board_markdown): with open(board_markdown, "r") as fin: f.write(fin.read()) if blob["deploy"]: f.write("\n\n## Installation instructions\n") for deploy in blob["deploy"]: with open(os.path.join(board_dir, deploy), "r") as fin: f.write(fin.read()) # Write the full index for the website to load. with open(os.path.join(output_path, "index.json"), "w") as f: json.dump(boards_index, f, indent=4, sort_keys=True) f.write("\n") if __name__ == "__main__": main(sys.argv[1], sys.argv[2])
<commit_before><commit_msg>tools/autobuild: Add script to generate website board metadata. Signed-off-by: Jim Mussared <e84f5c941266186d0c97dcc873413469b954847e@gmail.com><commit_after>#!/usr/bin/env python3 import glob import json import os import sys def main(repo_path, output_path): boards_index = [] board_ids = set() for board_json in glob.glob(os.path.join(repo_path, "ports/*/boards/*/board.json")): # Relative path to the board directory (e.g. "ports/stm32/boards/PYBV11"). board_dir = os.path.dirname(board_json) # Relative path to the port (e.g. "ports/stm32") port_dir = os.path.dirname(os.path.dirname(board_dir)) with open(board_json, "r") as f: blob = json.load(f) # Use "id" if specified, otherwise default to board dir (e.g. "PYBV11"). # We allow boards to override ID for the historical build names. blob["id"] = blob.get("id", os.path.basename(board_dir)) # Check for duplicate board IDs. if blob["id"] in board_ids: print("Duplicate board ID: '{}'".format(blob["id"]), file=sys.stderr) board_ids.add(blob["id"]) # Add in default fields. blob["port"] = os.path.basename(port_dir) blob["build"] = os.path.basename(board_dir) boards_index.append(blob) # Create the board markdown, which is the concatenation of the # default "board.md" file (if exists), as well as any flashing # instructions. board_markdown = os.path.join(board_dir, "board.md") with open(os.path.join(output_path, blob["id"] + ".md"), "w") as f: if os.path.exists(board_markdown): with open(board_markdown, "r") as fin: f.write(fin.read()) if blob["deploy"]: f.write("\n\n## Installation instructions\n") for deploy in blob["deploy"]: with open(os.path.join(board_dir, deploy), "r") as fin: f.write(fin.read()) # Write the full index for the website to load. with open(os.path.join(output_path, "index.json"), "w") as f: json.dump(boards_index, f, indent=4, sort_keys=True) f.write("\n") if __name__ == "__main__": main(sys.argv[1], sys.argv[2])
a0e3d1c7bc0dfe321323f26db631747188218d1e
test_12.py
test_12.py
from lib.tweet.parseTwitter import retrieveTweetText from lib.querygen.tweets2query import QueryGenerator hashtag = "twitterblades" tweets = retrieveTweetText(hashtag) qgen = QueryGenerator() query_list = qgen.gen_query_list(hashtag, tweets) print("Query list for \"%s\" is " % hashtag) print(query_list)
Test for module 1 and 2.
Test for module 1 and 2.
Python
apache-2.0
mzweilin/HashTag-Understanding,mzweilin/HashTag-Understanding,mzweilin/HashTag-Understanding
Test for module 1 and 2.
from lib.tweet.parseTwitter import retrieveTweetText from lib.querygen.tweets2query import QueryGenerator hashtag = "twitterblades" tweets = retrieveTweetText(hashtag) qgen = QueryGenerator() query_list = qgen.gen_query_list(hashtag, tweets) print("Query list for \"%s\" is " % hashtag) print(query_list)
<commit_before><commit_msg>Test for module 1 and 2.<commit_after>
from lib.tweet.parseTwitter import retrieveTweetText from lib.querygen.tweets2query import QueryGenerator hashtag = "twitterblades" tweets = retrieveTweetText(hashtag) qgen = QueryGenerator() query_list = qgen.gen_query_list(hashtag, tweets) print("Query list for \"%s\" is " % hashtag) print(query_list)
Test for module 1 and 2.from lib.tweet.parseTwitter import retrieveTweetText from lib.querygen.tweets2query import QueryGenerator hashtag = "twitterblades" tweets = retrieveTweetText(hashtag) qgen = QueryGenerator() query_list = qgen.gen_query_list(hashtag, tweets) print("Query list for \"%s\" is " % hashtag) print(query_list)
<commit_before><commit_msg>Test for module 1 and 2.<commit_after>from lib.tweet.parseTwitter import retrieveTweetText from lib.querygen.tweets2query import QueryGenerator hashtag = "twitterblades" tweets = retrieveTweetText(hashtag) qgen = QueryGenerator() query_list = qgen.gen_query_list(hashtag, tweets) print("Query list for \"%s\" is " % hashtag) print(query_list)
7f6c9d7f577424b02ddd08ce3b189d31575a01de
lldbDataFormatters.py
lldbDataFormatters.py
""" Load into LLDB with: script import lldbDataFormatters type synthetic add -x "^llvm::SmallVectorImpl<.+>$" -l lldbDataFormatters.SmallVectorSynthProvider """ # Pretty printer for llvm::SmallVector/llvm::SmallVectorImpl class SmallVectorSynthProvider: def __init__(self, valobj, dict): self.valobj = valobj; self.update() # initialize this provider def num_children(self): begin = self.begin.GetValueAsUnsigned(0) end = self.end.GetValueAsUnsigned(0) return (end - begin)/self.type_size def get_child_index(self, name): try: return int(name.lstrip('[').rstrip(']')) except: return -1; def get_child_at_index(self, index): # Do bounds checking. if index < 0: return None if index >= self.num_children(): return None; offset = index * self.type_size return self.begin.CreateChildAtOffset('['+str(index)+']', offset, self.data_type) def get_type_from_name(self): import re name = self.valobj.GetType().GetName() # This class works with both SmallVectors and SmallVectorImpls. res = re.match("^(llvm::)?SmallVectorImpl<(.+)>$", name) if res: return res.group(2) res = re.match("^(llvm::)?SmallVector<(.+), \d+>$", name) if res: return res.group(2) return None def update(self): self.begin = self.valobj.GetChildMemberWithName('BeginX') self.end = self.valobj.GetChildMemberWithName('EndX') data_type = self.get_type_from_name() # FIXME: this sometimes returns an invalid type. self.data_type = self.valobj.GetTarget().FindFirstType(data_type) self.type_size = self.data_type.GetByteSize()
Add an LLDB data formatter script for llvm::SmallVector, maybe this is helpful to someone else.
Add an LLDB data formatter script for llvm::SmallVector, maybe this is helpful to someone else. This lets lldb give sane output for SmallVectors, e.g. Before: (lldb) p sv (llvm::SmallVector<int, 10>) $0 = { (llvm::SmallVectorImpl<int>) llvm::SmallVectorImpl<int> = { (llvm::SmallVectorTemplateBase<int>) llvm::SmallVectorTemplateBase<int> = { (llvm::SmallVectorTemplateCommon<int>) llvm::SmallVectorTemplateCommon<int> = { (llvm::SmallVectorBase) llvm::SmallVectorBase = { (void *) BeginX = 0x00007fff5fbff960 ... } After: (lldb) p sv (llvm::SmallVector<int, 10>) $0 = { (int) [0] = 42 (int) [1] = 23 ... } The script is still a bit rough so expect crashes for vectors of complex types. Synthetic children are _not_ available in xcode 4.2, newer LLDBs should work though. git-svn-id: a4a6f32337ebd29ad4763b423022f00f68d1c7b7@148308 91177308-0d34-0410-b5e6-96231b3b80d8
Python
bsd-3-clause
lodyagin/bare_cxx,lodyagin/bare_cxx,lodyagin/bare_cxx,lodyagin/bare_cxx,lodyagin/bare_cxx
Add an LLDB data formatter script for llvm::SmallVector, maybe this is helpful to someone else. This lets lldb give sane output for SmallVectors, e.g. Before: (lldb) p sv (llvm::SmallVector<int, 10>) $0 = { (llvm::SmallVectorImpl<int>) llvm::SmallVectorImpl<int> = { (llvm::SmallVectorTemplateBase<int>) llvm::SmallVectorTemplateBase<int> = { (llvm::SmallVectorTemplateCommon<int>) llvm::SmallVectorTemplateCommon<int> = { (llvm::SmallVectorBase) llvm::SmallVectorBase = { (void *) BeginX = 0x00007fff5fbff960 ... } After: (lldb) p sv (llvm::SmallVector<int, 10>) $0 = { (int) [0] = 42 (int) [1] = 23 ... } The script is still a bit rough so expect crashes for vectors of complex types. Synthetic children are _not_ available in xcode 4.2, newer LLDBs should work though. git-svn-id: a4a6f32337ebd29ad4763b423022f00f68d1c7b7@148308 91177308-0d34-0410-b5e6-96231b3b80d8
""" Load into LLDB with: script import lldbDataFormatters type synthetic add -x "^llvm::SmallVectorImpl<.+>$" -l lldbDataFormatters.SmallVectorSynthProvider """ # Pretty printer for llvm::SmallVector/llvm::SmallVectorImpl class SmallVectorSynthProvider: def __init__(self, valobj, dict): self.valobj = valobj; self.update() # initialize this provider def num_children(self): begin = self.begin.GetValueAsUnsigned(0) end = self.end.GetValueAsUnsigned(0) return (end - begin)/self.type_size def get_child_index(self, name): try: return int(name.lstrip('[').rstrip(']')) except: return -1; def get_child_at_index(self, index): # Do bounds checking. if index < 0: return None if index >= self.num_children(): return None; offset = index * self.type_size return self.begin.CreateChildAtOffset('['+str(index)+']', offset, self.data_type) def get_type_from_name(self): import re name = self.valobj.GetType().GetName() # This class works with both SmallVectors and SmallVectorImpls. res = re.match("^(llvm::)?SmallVectorImpl<(.+)>$", name) if res: return res.group(2) res = re.match("^(llvm::)?SmallVector<(.+), \d+>$", name) if res: return res.group(2) return None def update(self): self.begin = self.valobj.GetChildMemberWithName('BeginX') self.end = self.valobj.GetChildMemberWithName('EndX') data_type = self.get_type_from_name() # FIXME: this sometimes returns an invalid type. self.data_type = self.valobj.GetTarget().FindFirstType(data_type) self.type_size = self.data_type.GetByteSize()
<commit_before><commit_msg>Add an LLDB data formatter script for llvm::SmallVector, maybe this is helpful to someone else. This lets lldb give sane output for SmallVectors, e.g. Before: (lldb) p sv (llvm::SmallVector<int, 10>) $0 = { (llvm::SmallVectorImpl<int>) llvm::SmallVectorImpl<int> = { (llvm::SmallVectorTemplateBase<int>) llvm::SmallVectorTemplateBase<int> = { (llvm::SmallVectorTemplateCommon<int>) llvm::SmallVectorTemplateCommon<int> = { (llvm::SmallVectorBase) llvm::SmallVectorBase = { (void *) BeginX = 0x00007fff5fbff960 ... } After: (lldb) p sv (llvm::SmallVector<int, 10>) $0 = { (int) [0] = 42 (int) [1] = 23 ... } The script is still a bit rough so expect crashes for vectors of complex types. Synthetic children are _not_ available in xcode 4.2, newer LLDBs should work though. git-svn-id: a4a6f32337ebd29ad4763b423022f00f68d1c7b7@148308 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
""" Load into LLDB with: script import lldbDataFormatters type synthetic add -x "^llvm::SmallVectorImpl<.+>$" -l lldbDataFormatters.SmallVectorSynthProvider """ # Pretty printer for llvm::SmallVector/llvm::SmallVectorImpl class SmallVectorSynthProvider: def __init__(self, valobj, dict): self.valobj = valobj; self.update() # initialize this provider def num_children(self): begin = self.begin.GetValueAsUnsigned(0) end = self.end.GetValueAsUnsigned(0) return (end - begin)/self.type_size def get_child_index(self, name): try: return int(name.lstrip('[').rstrip(']')) except: return -1; def get_child_at_index(self, index): # Do bounds checking. if index < 0: return None if index >= self.num_children(): return None; offset = index * self.type_size return self.begin.CreateChildAtOffset('['+str(index)+']', offset, self.data_type) def get_type_from_name(self): import re name = self.valobj.GetType().GetName() # This class works with both SmallVectors and SmallVectorImpls. res = re.match("^(llvm::)?SmallVectorImpl<(.+)>$", name) if res: return res.group(2) res = re.match("^(llvm::)?SmallVector<(.+), \d+>$", name) if res: return res.group(2) return None def update(self): self.begin = self.valobj.GetChildMemberWithName('BeginX') self.end = self.valobj.GetChildMemberWithName('EndX') data_type = self.get_type_from_name() # FIXME: this sometimes returns an invalid type. self.data_type = self.valobj.GetTarget().FindFirstType(data_type) self.type_size = self.data_type.GetByteSize()
Add an LLDB data formatter script for llvm::SmallVector, maybe this is helpful to someone else. This lets lldb give sane output for SmallVectors, e.g. Before: (lldb) p sv (llvm::SmallVector<int, 10>) $0 = { (llvm::SmallVectorImpl<int>) llvm::SmallVectorImpl<int> = { (llvm::SmallVectorTemplateBase<int>) llvm::SmallVectorTemplateBase<int> = { (llvm::SmallVectorTemplateCommon<int>) llvm::SmallVectorTemplateCommon<int> = { (llvm::SmallVectorBase) llvm::SmallVectorBase = { (void *) BeginX = 0x00007fff5fbff960 ... } After: (lldb) p sv (llvm::SmallVector<int, 10>) $0 = { (int) [0] = 42 (int) [1] = 23 ... } The script is still a bit rough so expect crashes for vectors of complex types. Synthetic children are _not_ available in xcode 4.2, newer LLDBs should work though. git-svn-id: a4a6f32337ebd29ad4763b423022f00f68d1c7b7@148308 91177308-0d34-0410-b5e6-96231b3b80d8""" Load into LLDB with: script import lldbDataFormatters type synthetic add -x "^llvm::SmallVectorImpl<.+>$" -l lldbDataFormatters.SmallVectorSynthProvider """ # Pretty printer for llvm::SmallVector/llvm::SmallVectorImpl class SmallVectorSynthProvider: def __init__(self, valobj, dict): self.valobj = valobj; self.update() # initialize this provider def num_children(self): begin = self.begin.GetValueAsUnsigned(0) end = self.end.GetValueAsUnsigned(0) return (end - begin)/self.type_size def get_child_index(self, name): try: return int(name.lstrip('[').rstrip(']')) except: return -1; def get_child_at_index(self, index): # Do bounds checking. if index < 0: return None if index >= self.num_children(): return None; offset = index * self.type_size return self.begin.CreateChildAtOffset('['+str(index)+']', offset, self.data_type) def get_type_from_name(self): import re name = self.valobj.GetType().GetName() # This class works with both SmallVectors and SmallVectorImpls. res = re.match("^(llvm::)?SmallVectorImpl<(.+)>$", name) if res: return res.group(2) res = re.match("^(llvm::)?SmallVector<(.+), \d+>$", name) if res: return res.group(2) return None def update(self): self.begin = self.valobj.GetChildMemberWithName('BeginX') self.end = self.valobj.GetChildMemberWithName('EndX') data_type = self.get_type_from_name() # FIXME: this sometimes returns an invalid type. self.data_type = self.valobj.GetTarget().FindFirstType(data_type) self.type_size = self.data_type.GetByteSize()
<commit_before><commit_msg>Add an LLDB data formatter script for llvm::SmallVector, maybe this is helpful to someone else. This lets lldb give sane output for SmallVectors, e.g. Before: (lldb) p sv (llvm::SmallVector<int, 10>) $0 = { (llvm::SmallVectorImpl<int>) llvm::SmallVectorImpl<int> = { (llvm::SmallVectorTemplateBase<int>) llvm::SmallVectorTemplateBase<int> = { (llvm::SmallVectorTemplateCommon<int>) llvm::SmallVectorTemplateCommon<int> = { (llvm::SmallVectorBase) llvm::SmallVectorBase = { (void *) BeginX = 0x00007fff5fbff960 ... } After: (lldb) p sv (llvm::SmallVector<int, 10>) $0 = { (int) [0] = 42 (int) [1] = 23 ... } The script is still a bit rough so expect crashes for vectors of complex types. Synthetic children are _not_ available in xcode 4.2, newer LLDBs should work though. git-svn-id: a4a6f32337ebd29ad4763b423022f00f68d1c7b7@148308 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>""" Load into LLDB with: script import lldbDataFormatters type synthetic add -x "^llvm::SmallVectorImpl<.+>$" -l lldbDataFormatters.SmallVectorSynthProvider """ # Pretty printer for llvm::SmallVector/llvm::SmallVectorImpl class SmallVectorSynthProvider: def __init__(self, valobj, dict): self.valobj = valobj; self.update() # initialize this provider def num_children(self): begin = self.begin.GetValueAsUnsigned(0) end = self.end.GetValueAsUnsigned(0) return (end - begin)/self.type_size def get_child_index(self, name): try: return int(name.lstrip('[').rstrip(']')) except: return -1; def get_child_at_index(self, index): # Do bounds checking. if index < 0: return None if index >= self.num_children(): return None; offset = index * self.type_size return self.begin.CreateChildAtOffset('['+str(index)+']', offset, self.data_type) def get_type_from_name(self): import re name = self.valobj.GetType().GetName() # This class works with both SmallVectors and SmallVectorImpls. res = re.match("^(llvm::)?SmallVectorImpl<(.+)>$", name) if res: return res.group(2) res = re.match("^(llvm::)?SmallVector<(.+), \d+>$", name) if res: return res.group(2) return None def update(self): self.begin = self.valobj.GetChildMemberWithName('BeginX') self.end = self.valobj.GetChildMemberWithName('EndX') data_type = self.get_type_from_name() # FIXME: this sometimes returns an invalid type. self.data_type = self.valobj.GetTarget().FindFirstType(data_type) self.type_size = self.data_type.GetByteSize()
1c0a2ebeca404572bef2807dc697c90c1be1a65a
tools/count_lines.py
tools/count_lines.py
import os test_dir = ".." total_lines = 0 for _root, _dirs, _files in os.walk(test_dir): for _file in _files: file_lines = sum(1 for line in open(os.path.join(_root, _file))) total_lines += file_lines print("total lines: %d" % total_lines)
Add a line count script
Add a line count script Signed-off-by: xcgspring <8f4f8d15922e4269158d45cde01dc3497961f40d@126.com>
Python
apache-2.0
xcgspring/AXUI,xcgspring/AXUI,xcgspring/AXUI
Add a line count script Signed-off-by: xcgspring <8f4f8d15922e4269158d45cde01dc3497961f40d@126.com>
import os test_dir = ".." total_lines = 0 for _root, _dirs, _files in os.walk(test_dir): for _file in _files: file_lines = sum(1 for line in open(os.path.join(_root, _file))) total_lines += file_lines print("total lines: %d" % total_lines)
<commit_before><commit_msg>Add a line count script Signed-off-by: xcgspring <8f4f8d15922e4269158d45cde01dc3497961f40d@126.com><commit_after>
import os test_dir = ".." total_lines = 0 for _root, _dirs, _files in os.walk(test_dir): for _file in _files: file_lines = sum(1 for line in open(os.path.join(_root, _file))) total_lines += file_lines print("total lines: %d" % total_lines)
Add a line count script Signed-off-by: xcgspring <8f4f8d15922e4269158d45cde01dc3497961f40d@126.com> import os test_dir = ".." total_lines = 0 for _root, _dirs, _files in os.walk(test_dir): for _file in _files: file_lines = sum(1 for line in open(os.path.join(_root, _file))) total_lines += file_lines print("total lines: %d" % total_lines)
<commit_before><commit_msg>Add a line count script Signed-off-by: xcgspring <8f4f8d15922e4269158d45cde01dc3497961f40d@126.com><commit_after> import os test_dir = ".." total_lines = 0 for _root, _dirs, _files in os.walk(test_dir): for _file in _files: file_lines = sum(1 for line in open(os.path.join(_root, _file))) total_lines += file_lines print("total lines: %d" % total_lines)
49f588ba82151d4f2a8c8cbe0123775ef2514cf5
django-server/feel/core/db/update_fixtures.py
django-server/feel/core/db/update_fixtures.py
import subprocess from django.conf import settings MY_APPS = settings.MY_APPS COMMAND_FORMAT = "python manage.py dumpdata {app} > core/fixtures/{app}.json" def update_fixtures(): for app in MY_APPS: command = COMMAND_FORMAT.format(app=app) print(command) subprocess.check_output(command, shell=True) if __name__ == '__main__': update_fixtures()
Add script to update course content.
Fixtures: Add script to update course content.
Python
mit
pixyj/feel,pixyj/feel,pixyj/feel,pixyj/feel,pixyj/feel
Fixtures: Add script to update course content.
import subprocess from django.conf import settings MY_APPS = settings.MY_APPS COMMAND_FORMAT = "python manage.py dumpdata {app} > core/fixtures/{app}.json" def update_fixtures(): for app in MY_APPS: command = COMMAND_FORMAT.format(app=app) print(command) subprocess.check_output(command, shell=True) if __name__ == '__main__': update_fixtures()
<commit_before><commit_msg>Fixtures: Add script to update course content.<commit_after>
import subprocess from django.conf import settings MY_APPS = settings.MY_APPS COMMAND_FORMAT = "python manage.py dumpdata {app} > core/fixtures/{app}.json" def update_fixtures(): for app in MY_APPS: command = COMMAND_FORMAT.format(app=app) print(command) subprocess.check_output(command, shell=True) if __name__ == '__main__': update_fixtures()
Fixtures: Add script to update course content.import subprocess from django.conf import settings MY_APPS = settings.MY_APPS COMMAND_FORMAT = "python manage.py dumpdata {app} > core/fixtures/{app}.json" def update_fixtures(): for app in MY_APPS: command = COMMAND_FORMAT.format(app=app) print(command) subprocess.check_output(command, shell=True) if __name__ == '__main__': update_fixtures()
<commit_before><commit_msg>Fixtures: Add script to update course content.<commit_after>import subprocess from django.conf import settings MY_APPS = settings.MY_APPS COMMAND_FORMAT = "python manage.py dumpdata {app} > core/fixtures/{app}.json" def update_fixtures(): for app in MY_APPS: command = COMMAND_FORMAT.format(app=app) print(command) subprocess.check_output(command, shell=True) if __name__ == '__main__': update_fixtures()
2d57d39112e3abbc116f4a937c28711fa15ee89a
waffle/migrations/0002_auto_20150721_1437.py
waffle/migrations/0002_auto_20150721_1437.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('sites', '0001_initial'), ('waffle', '0001_initial'), ] operations = [ migrations.AddField( model_name='flag', name='site', field=models.ForeignKey(related_name='waffle_flags', blank=True, to='sites.Site', null=True), ), migrations.AddField( model_name='sample', name='site', field=models.ForeignKey(related_name='waffle_samples', blank=True, to='sites.Site', null=True), ), migrations.AddField( model_name='switch', name='site', field=models.ForeignKey(related_name='waffle_switches', blank=True, to='sites.Site', null=True), ), migrations.AlterField( model_name='flag', name='name', field=models.CharField(help_text=b'The human/computer readable name.', max_length=100), ), migrations.AlterField( model_name='sample', name='name', field=models.CharField(help_text=b'The human/computer readable name.', max_length=100), ), migrations.AlterField( model_name='switch', name='name', field=models.CharField(help_text=b'The human/computer readable name.', max_length=100), ), migrations.AlterUniqueTogether( name='flag', unique_together=set([('name', 'site')]), ), migrations.AlterUniqueTogether( name='sample', unique_together=set([('name', 'site')]), ), migrations.AlterUniqueTogether( name='switch', unique_together=set([('name', 'site')]), ), ]
Add updated migration for our work with 1.7
Add updated migration for our work with 1.7
Python
bsd-3-clause
isotoma/django-waffle,isotoma/django-waffle,isotoma/django-waffle,isotoma/django-waffle
Add updated migration for our work with 1.7
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('sites', '0001_initial'), ('waffle', '0001_initial'), ] operations = [ migrations.AddField( model_name='flag', name='site', field=models.ForeignKey(related_name='waffle_flags', blank=True, to='sites.Site', null=True), ), migrations.AddField( model_name='sample', name='site', field=models.ForeignKey(related_name='waffle_samples', blank=True, to='sites.Site', null=True), ), migrations.AddField( model_name='switch', name='site', field=models.ForeignKey(related_name='waffle_switches', blank=True, to='sites.Site', null=True), ), migrations.AlterField( model_name='flag', name='name', field=models.CharField(help_text=b'The human/computer readable name.', max_length=100), ), migrations.AlterField( model_name='sample', name='name', field=models.CharField(help_text=b'The human/computer readable name.', max_length=100), ), migrations.AlterField( model_name='switch', name='name', field=models.CharField(help_text=b'The human/computer readable name.', max_length=100), ), migrations.AlterUniqueTogether( name='flag', unique_together=set([('name', 'site')]), ), migrations.AlterUniqueTogether( name='sample', unique_together=set([('name', 'site')]), ), migrations.AlterUniqueTogether( name='switch', unique_together=set([('name', 'site')]), ), ]
<commit_before><commit_msg>Add updated migration for our work with 1.7<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('sites', '0001_initial'), ('waffle', '0001_initial'), ] operations = [ migrations.AddField( model_name='flag', name='site', field=models.ForeignKey(related_name='waffle_flags', blank=True, to='sites.Site', null=True), ), migrations.AddField( model_name='sample', name='site', field=models.ForeignKey(related_name='waffle_samples', blank=True, to='sites.Site', null=True), ), migrations.AddField( model_name='switch', name='site', field=models.ForeignKey(related_name='waffle_switches', blank=True, to='sites.Site', null=True), ), migrations.AlterField( model_name='flag', name='name', field=models.CharField(help_text=b'The human/computer readable name.', max_length=100), ), migrations.AlterField( model_name='sample', name='name', field=models.CharField(help_text=b'The human/computer readable name.', max_length=100), ), migrations.AlterField( model_name='switch', name='name', field=models.CharField(help_text=b'The human/computer readable name.', max_length=100), ), migrations.AlterUniqueTogether( name='flag', unique_together=set([('name', 'site')]), ), migrations.AlterUniqueTogether( name='sample', unique_together=set([('name', 'site')]), ), migrations.AlterUniqueTogether( name='switch', unique_together=set([('name', 'site')]), ), ]
Add updated migration for our work with 1.7# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('sites', '0001_initial'), ('waffle', '0001_initial'), ] operations = [ migrations.AddField( model_name='flag', name='site', field=models.ForeignKey(related_name='waffle_flags', blank=True, to='sites.Site', null=True), ), migrations.AddField( model_name='sample', name='site', field=models.ForeignKey(related_name='waffle_samples', blank=True, to='sites.Site', null=True), ), migrations.AddField( model_name='switch', name='site', field=models.ForeignKey(related_name='waffle_switches', blank=True, to='sites.Site', null=True), ), migrations.AlterField( model_name='flag', name='name', field=models.CharField(help_text=b'The human/computer readable name.', max_length=100), ), migrations.AlterField( model_name='sample', name='name', field=models.CharField(help_text=b'The human/computer readable name.', max_length=100), ), migrations.AlterField( model_name='switch', name='name', field=models.CharField(help_text=b'The human/computer readable name.', max_length=100), ), migrations.AlterUniqueTogether( name='flag', unique_together=set([('name', 'site')]), ), migrations.AlterUniqueTogether( name='sample', unique_together=set([('name', 'site')]), ), migrations.AlterUniqueTogether( name='switch', unique_together=set([('name', 'site')]), ), ]
<commit_before><commit_msg>Add updated migration for our work with 1.7<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('sites', '0001_initial'), ('waffle', '0001_initial'), ] operations = [ migrations.AddField( model_name='flag', name='site', field=models.ForeignKey(related_name='waffle_flags', blank=True, to='sites.Site', null=True), ), migrations.AddField( model_name='sample', name='site', field=models.ForeignKey(related_name='waffle_samples', blank=True, to='sites.Site', null=True), ), migrations.AddField( model_name='switch', name='site', field=models.ForeignKey(related_name='waffle_switches', blank=True, to='sites.Site', null=True), ), migrations.AlterField( model_name='flag', name='name', field=models.CharField(help_text=b'The human/computer readable name.', max_length=100), ), migrations.AlterField( model_name='sample', name='name', field=models.CharField(help_text=b'The human/computer readable name.', max_length=100), ), migrations.AlterField( model_name='switch', name='name', field=models.CharField(help_text=b'The human/computer readable name.', max_length=100), ), migrations.AlterUniqueTogether( name='flag', unique_together=set([('name', 'site')]), ), migrations.AlterUniqueTogether( name='sample', unique_together=set([('name', 'site')]), ), migrations.AlterUniqueTogether( name='switch', unique_together=set([('name', 'site')]), ), ]
359ea0dd38cdc2b48322a3ad21568176916dfede
examsys/migrations/0003_auto_20150315_1936.py
examsys/migrations/0003_auto_20150315_1936.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('examsys', '0002_auto_20150315_1640'), ] operations = [ migrations.RenameModel( old_name='TestToAnswer', new_name='TestToQuestion', ), ]
Change the name of the model.
Change the name of the model.
Python
mit
icyflame/test-taking-platform,icyflame/test-taking-platform
Change the name of the model.
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('examsys', '0002_auto_20150315_1640'), ] operations = [ migrations.RenameModel( old_name='TestToAnswer', new_name='TestToQuestion', ), ]
<commit_before><commit_msg>Change the name of the model.<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('examsys', '0002_auto_20150315_1640'), ] operations = [ migrations.RenameModel( old_name='TestToAnswer', new_name='TestToQuestion', ), ]
Change the name of the model.# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('examsys', '0002_auto_20150315_1640'), ] operations = [ migrations.RenameModel( old_name='TestToAnswer', new_name='TestToQuestion', ), ]
<commit_before><commit_msg>Change the name of the model.<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('examsys', '0002_auto_20150315_1640'), ] operations = [ migrations.RenameModel( old_name='TestToAnswer', new_name='TestToQuestion', ), ]
2c5dbf8fb147b9d9494de5f8abff52aeedf1cabf
src/bilor/core/handler.py
src/bilor/core/handler.py
import datetime import logging import sys import traceback import requests from werkzeug.debug.tbtools import get_current_traceback class BilorHandler(logging.Handler): """Logging handler for bilor. Based on raven. """ def __init__(self, host, *args, **kwargs): super(BilorHandler, self).__init__(level=kwargs.get('level', logging.NOTSET)) self.host = host def can_record(self, record): return not ( record.name == 'bilor' or record.name.startswith('bilor.') ) def emit(self, record): try: self.format(record) if not self.can_record(record): print(to_string(record.message), file=sys.stderr) return return self._emit(record) except Exception: if self.client.raise_send_errors: raise print("Top level Bilor exception caught - failed creating log record", file=sys.stderr) print(to_string(record.msg), file=sys.stderr) print(to_string(traceback.format_exc()), file=sys.stderr) def _emit(self, record, **kwargs): data = {} extra = getattr(record, 'data', None) if not isinstance(extra, dict): if extra: extra = {'data': extra} else: extra = {} stack = getattr(record, 'stack', None) if stack is True: stack = get_current_traceback(skip=1) date = datetime.datetime.utcfromtimestamp(record.created) handler_kwargs = { 'params': record.args, } try: handler_kwargs['message'] = six.text_type(record.msg) except UnicodeDecodeError: # Handle binary strings where it should be unicode... handler_kwargs['message'] = repr(record.msg)[1:-1] try: handler_kwargs['formatted'] = six.text_type(record.message) except UnicodeDecodeError: # Handle binary strings where it should be unicode... handler_kwargs['formatted'] = repr(record.message)[1:-1] # If there's no exception being processed, exc_info may be a 3-tuple of None # http://docs.python.org/library/sys.html#sys.exc_info if record.exc_info and all(record.exc_info): # capture the standard message first so that we ensure # the event is recorded as an exception, in addition to having our # message interface attached handler = self.client.get_handler(event_type) data.update(handler.capture(**handler_kwargs)) handler_kwargs = {'exc_info': record.exc_info} data['level'] = record.levelno data['logger'] = record.name if hasattr(record, 'tags'): kwargs['tags'] = record.tags kwargs.update(handler_kwargs) #requests.post()
Add temporary, not yet working client
Add temporary, not yet working client
Python
bsd-3-clause
EnTeQuAk/bilor,EnTeQuAk/bilor,EnTeQuAk/bilor,EnTeQuAk/bilor
Add temporary, not yet working client
import datetime import logging import sys import traceback import requests from werkzeug.debug.tbtools import get_current_traceback class BilorHandler(logging.Handler): """Logging handler for bilor. Based on raven. """ def __init__(self, host, *args, **kwargs): super(BilorHandler, self).__init__(level=kwargs.get('level', logging.NOTSET)) self.host = host def can_record(self, record): return not ( record.name == 'bilor' or record.name.startswith('bilor.') ) def emit(self, record): try: self.format(record) if not self.can_record(record): print(to_string(record.message), file=sys.stderr) return return self._emit(record) except Exception: if self.client.raise_send_errors: raise print("Top level Bilor exception caught - failed creating log record", file=sys.stderr) print(to_string(record.msg), file=sys.stderr) print(to_string(traceback.format_exc()), file=sys.stderr) def _emit(self, record, **kwargs): data = {} extra = getattr(record, 'data', None) if not isinstance(extra, dict): if extra: extra = {'data': extra} else: extra = {} stack = getattr(record, 'stack', None) if stack is True: stack = get_current_traceback(skip=1) date = datetime.datetime.utcfromtimestamp(record.created) handler_kwargs = { 'params': record.args, } try: handler_kwargs['message'] = six.text_type(record.msg) except UnicodeDecodeError: # Handle binary strings where it should be unicode... handler_kwargs['message'] = repr(record.msg)[1:-1] try: handler_kwargs['formatted'] = six.text_type(record.message) except UnicodeDecodeError: # Handle binary strings where it should be unicode... handler_kwargs['formatted'] = repr(record.message)[1:-1] # If there's no exception being processed, exc_info may be a 3-tuple of None # http://docs.python.org/library/sys.html#sys.exc_info if record.exc_info and all(record.exc_info): # capture the standard message first so that we ensure # the event is recorded as an exception, in addition to having our # message interface attached handler = self.client.get_handler(event_type) data.update(handler.capture(**handler_kwargs)) handler_kwargs = {'exc_info': record.exc_info} data['level'] = record.levelno data['logger'] = record.name if hasattr(record, 'tags'): kwargs['tags'] = record.tags kwargs.update(handler_kwargs) #requests.post()
<commit_before><commit_msg>Add temporary, not yet working client<commit_after>
import datetime import logging import sys import traceback import requests from werkzeug.debug.tbtools import get_current_traceback class BilorHandler(logging.Handler): """Logging handler for bilor. Based on raven. """ def __init__(self, host, *args, **kwargs): super(BilorHandler, self).__init__(level=kwargs.get('level', logging.NOTSET)) self.host = host def can_record(self, record): return not ( record.name == 'bilor' or record.name.startswith('bilor.') ) def emit(self, record): try: self.format(record) if not self.can_record(record): print(to_string(record.message), file=sys.stderr) return return self._emit(record) except Exception: if self.client.raise_send_errors: raise print("Top level Bilor exception caught - failed creating log record", file=sys.stderr) print(to_string(record.msg), file=sys.stderr) print(to_string(traceback.format_exc()), file=sys.stderr) def _emit(self, record, **kwargs): data = {} extra = getattr(record, 'data', None) if not isinstance(extra, dict): if extra: extra = {'data': extra} else: extra = {} stack = getattr(record, 'stack', None) if stack is True: stack = get_current_traceback(skip=1) date = datetime.datetime.utcfromtimestamp(record.created) handler_kwargs = { 'params': record.args, } try: handler_kwargs['message'] = six.text_type(record.msg) except UnicodeDecodeError: # Handle binary strings where it should be unicode... handler_kwargs['message'] = repr(record.msg)[1:-1] try: handler_kwargs['formatted'] = six.text_type(record.message) except UnicodeDecodeError: # Handle binary strings where it should be unicode... handler_kwargs['formatted'] = repr(record.message)[1:-1] # If there's no exception being processed, exc_info may be a 3-tuple of None # http://docs.python.org/library/sys.html#sys.exc_info if record.exc_info and all(record.exc_info): # capture the standard message first so that we ensure # the event is recorded as an exception, in addition to having our # message interface attached handler = self.client.get_handler(event_type) data.update(handler.capture(**handler_kwargs)) handler_kwargs = {'exc_info': record.exc_info} data['level'] = record.levelno data['logger'] = record.name if hasattr(record, 'tags'): kwargs['tags'] = record.tags kwargs.update(handler_kwargs) #requests.post()
Add temporary, not yet working clientimport datetime import logging import sys import traceback import requests from werkzeug.debug.tbtools import get_current_traceback class BilorHandler(logging.Handler): """Logging handler for bilor. Based on raven. """ def __init__(self, host, *args, **kwargs): super(BilorHandler, self).__init__(level=kwargs.get('level', logging.NOTSET)) self.host = host def can_record(self, record): return not ( record.name == 'bilor' or record.name.startswith('bilor.') ) def emit(self, record): try: self.format(record) if not self.can_record(record): print(to_string(record.message), file=sys.stderr) return return self._emit(record) except Exception: if self.client.raise_send_errors: raise print("Top level Bilor exception caught - failed creating log record", file=sys.stderr) print(to_string(record.msg), file=sys.stderr) print(to_string(traceback.format_exc()), file=sys.stderr) def _emit(self, record, **kwargs): data = {} extra = getattr(record, 'data', None) if not isinstance(extra, dict): if extra: extra = {'data': extra} else: extra = {} stack = getattr(record, 'stack', None) if stack is True: stack = get_current_traceback(skip=1) date = datetime.datetime.utcfromtimestamp(record.created) handler_kwargs = { 'params': record.args, } try: handler_kwargs['message'] = six.text_type(record.msg) except UnicodeDecodeError: # Handle binary strings where it should be unicode... handler_kwargs['message'] = repr(record.msg)[1:-1] try: handler_kwargs['formatted'] = six.text_type(record.message) except UnicodeDecodeError: # Handle binary strings where it should be unicode... handler_kwargs['formatted'] = repr(record.message)[1:-1] # If there's no exception being processed, exc_info may be a 3-tuple of None # http://docs.python.org/library/sys.html#sys.exc_info if record.exc_info and all(record.exc_info): # capture the standard message first so that we ensure # the event is recorded as an exception, in addition to having our # message interface attached handler = self.client.get_handler(event_type) data.update(handler.capture(**handler_kwargs)) handler_kwargs = {'exc_info': record.exc_info} data['level'] = record.levelno data['logger'] = record.name if hasattr(record, 'tags'): kwargs['tags'] = record.tags kwargs.update(handler_kwargs) #requests.post()
<commit_before><commit_msg>Add temporary, not yet working client<commit_after>import datetime import logging import sys import traceback import requests from werkzeug.debug.tbtools import get_current_traceback class BilorHandler(logging.Handler): """Logging handler for bilor. Based on raven. """ def __init__(self, host, *args, **kwargs): super(BilorHandler, self).__init__(level=kwargs.get('level', logging.NOTSET)) self.host = host def can_record(self, record): return not ( record.name == 'bilor' or record.name.startswith('bilor.') ) def emit(self, record): try: self.format(record) if not self.can_record(record): print(to_string(record.message), file=sys.stderr) return return self._emit(record) except Exception: if self.client.raise_send_errors: raise print("Top level Bilor exception caught - failed creating log record", file=sys.stderr) print(to_string(record.msg), file=sys.stderr) print(to_string(traceback.format_exc()), file=sys.stderr) def _emit(self, record, **kwargs): data = {} extra = getattr(record, 'data', None) if not isinstance(extra, dict): if extra: extra = {'data': extra} else: extra = {} stack = getattr(record, 'stack', None) if stack is True: stack = get_current_traceback(skip=1) date = datetime.datetime.utcfromtimestamp(record.created) handler_kwargs = { 'params': record.args, } try: handler_kwargs['message'] = six.text_type(record.msg) except UnicodeDecodeError: # Handle binary strings where it should be unicode... handler_kwargs['message'] = repr(record.msg)[1:-1] try: handler_kwargs['formatted'] = six.text_type(record.message) except UnicodeDecodeError: # Handle binary strings where it should be unicode... handler_kwargs['formatted'] = repr(record.message)[1:-1] # If there's no exception being processed, exc_info may be a 3-tuple of None # http://docs.python.org/library/sys.html#sys.exc_info if record.exc_info and all(record.exc_info): # capture the standard message first so that we ensure # the event is recorded as an exception, in addition to having our # message interface attached handler = self.client.get_handler(event_type) data.update(handler.capture(**handler_kwargs)) handler_kwargs = {'exc_info': record.exc_info} data['level'] = record.levelno data['logger'] = record.name if hasattr(record, 'tags'): kwargs['tags'] = record.tags kwargs.update(handler_kwargs) #requests.post()
cffb8551169c18f68eb8eaa506a8565b8080dc54
read_ts.py
read_ts.py
#!/usr/bin/env python3 from ts import * import sys pmt_pid = None pes_readers = {} for ts_packet in read_ts(sys.argv[1]): print(ts_packet) if ts_packet.pid == ProgramAssociationTable.PID: pat = ProgramAssociationTable(ts_packet.payload) input() print(pat) programs = list(pat.programs.values()) if len(programs) != 1: raise Exception("PAT has {} programs, but DASH only " "allows 1 program." .format(len(pat.programs))) if pmt_pid is not None and programs[0] != pmt_pid: raise Exception("PAT has new PMT PID. This program has " "not been tested to handled this case.") pmt_pid = programs[0] elif ts_packet.pid == pmt_pid: input() pmt = ProgramMapTable(ts_packet.payload) print(pmt) for pid in pmt.streams: if pid not in pes_readers: pes_readers[pid] = PESReader() elif ts_packet.pid in pes_readers: pes_packet = pes_readers[ts_packet.pid].add_ts_packet(ts_packet) if pes_packet: input() print(pes_packet) input()
Add tool to analyse MPEG-TS packets.
Add tool to analyse MPEG-TS packets.
Python
bsd-2-clause
brendanlong/dash-ts-tools,brendanlong/dash-ts-tools
Add tool to analyse MPEG-TS packets.
#!/usr/bin/env python3 from ts import * import sys pmt_pid = None pes_readers = {} for ts_packet in read_ts(sys.argv[1]): print(ts_packet) if ts_packet.pid == ProgramAssociationTable.PID: pat = ProgramAssociationTable(ts_packet.payload) input() print(pat) programs = list(pat.programs.values()) if len(programs) != 1: raise Exception("PAT has {} programs, but DASH only " "allows 1 program." .format(len(pat.programs))) if pmt_pid is not None and programs[0] != pmt_pid: raise Exception("PAT has new PMT PID. This program has " "not been tested to handled this case.") pmt_pid = programs[0] elif ts_packet.pid == pmt_pid: input() pmt = ProgramMapTable(ts_packet.payload) print(pmt) for pid in pmt.streams: if pid not in pes_readers: pes_readers[pid] = PESReader() elif ts_packet.pid in pes_readers: pes_packet = pes_readers[ts_packet.pid].add_ts_packet(ts_packet) if pes_packet: input() print(pes_packet) input()
<commit_before><commit_msg>Add tool to analyse MPEG-TS packets.<commit_after>
#!/usr/bin/env python3 from ts import * import sys pmt_pid = None pes_readers = {} for ts_packet in read_ts(sys.argv[1]): print(ts_packet) if ts_packet.pid == ProgramAssociationTable.PID: pat = ProgramAssociationTable(ts_packet.payload) input() print(pat) programs = list(pat.programs.values()) if len(programs) != 1: raise Exception("PAT has {} programs, but DASH only " "allows 1 program." .format(len(pat.programs))) if pmt_pid is not None and programs[0] != pmt_pid: raise Exception("PAT has new PMT PID. This program has " "not been tested to handled this case.") pmt_pid = programs[0] elif ts_packet.pid == pmt_pid: input() pmt = ProgramMapTable(ts_packet.payload) print(pmt) for pid in pmt.streams: if pid not in pes_readers: pes_readers[pid] = PESReader() elif ts_packet.pid in pes_readers: pes_packet = pes_readers[ts_packet.pid].add_ts_packet(ts_packet) if pes_packet: input() print(pes_packet) input()
Add tool to analyse MPEG-TS packets.#!/usr/bin/env python3 from ts import * import sys pmt_pid = None pes_readers = {} for ts_packet in read_ts(sys.argv[1]): print(ts_packet) if ts_packet.pid == ProgramAssociationTable.PID: pat = ProgramAssociationTable(ts_packet.payload) input() print(pat) programs = list(pat.programs.values()) if len(programs) != 1: raise Exception("PAT has {} programs, but DASH only " "allows 1 program." .format(len(pat.programs))) if pmt_pid is not None and programs[0] != pmt_pid: raise Exception("PAT has new PMT PID. This program has " "not been tested to handled this case.") pmt_pid = programs[0] elif ts_packet.pid == pmt_pid: input() pmt = ProgramMapTable(ts_packet.payload) print(pmt) for pid in pmt.streams: if pid not in pes_readers: pes_readers[pid] = PESReader() elif ts_packet.pid in pes_readers: pes_packet = pes_readers[ts_packet.pid].add_ts_packet(ts_packet) if pes_packet: input() print(pes_packet) input()
<commit_before><commit_msg>Add tool to analyse MPEG-TS packets.<commit_after>#!/usr/bin/env python3 from ts import * import sys pmt_pid = None pes_readers = {} for ts_packet in read_ts(sys.argv[1]): print(ts_packet) if ts_packet.pid == ProgramAssociationTable.PID: pat = ProgramAssociationTable(ts_packet.payload) input() print(pat) programs = list(pat.programs.values()) if len(programs) != 1: raise Exception("PAT has {} programs, but DASH only " "allows 1 program." .format(len(pat.programs))) if pmt_pid is not None and programs[0] != pmt_pid: raise Exception("PAT has new PMT PID. This program has " "not been tested to handled this case.") pmt_pid = programs[0] elif ts_packet.pid == pmt_pid: input() pmt = ProgramMapTable(ts_packet.payload) print(pmt) for pid in pmt.streams: if pid not in pes_readers: pes_readers[pid] = PESReader() elif ts_packet.pid in pes_readers: pes_packet = pes_readers[ts_packet.pid].add_ts_packet(ts_packet) if pes_packet: input() print(pes_packet) input()
5d360aaf0d619472026a1099e606043fe4910bcf
studygroups/management/commands/fix_start_end_dates.py
studygroups/management/commands/fix_start_end_dates.py
from django.core.management.base import BaseCommand, CommandError from studygroups.models import StudyGroup class Command(BaseCommand): help = 'Make sure all study groups with meetings have the correct start/end dates' def handle(self, *args, **options): for sg in StudyGroup.objects.active(): if sg.meeting_set.active().count(): start_date = sg.first_meeting().meeting_date end_date = sg.last_meeting().meeting_date if start_date != sg.start_date or end_date != sg.end_date: sg.start_date = start_date sg.end_date = end_date sg.save() print(f'Fixed start/end dates for {sg} starting {start_date} ending {end_date}')
Add task to correctly set start/end dates based on active meetings for past / current learning circles
Add task to correctly set start/end dates based on active meetings for past / current learning circles
Python
mit
p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles
Add task to correctly set start/end dates based on active meetings for past / current learning circles
from django.core.management.base import BaseCommand, CommandError from studygroups.models import StudyGroup class Command(BaseCommand): help = 'Make sure all study groups with meetings have the correct start/end dates' def handle(self, *args, **options): for sg in StudyGroup.objects.active(): if sg.meeting_set.active().count(): start_date = sg.first_meeting().meeting_date end_date = sg.last_meeting().meeting_date if start_date != sg.start_date or end_date != sg.end_date: sg.start_date = start_date sg.end_date = end_date sg.save() print(f'Fixed start/end dates for {sg} starting {start_date} ending {end_date}')
<commit_before><commit_msg>Add task to correctly set start/end dates based on active meetings for past / current learning circles<commit_after>
from django.core.management.base import BaseCommand, CommandError from studygroups.models import StudyGroup class Command(BaseCommand): help = 'Make sure all study groups with meetings have the correct start/end dates' def handle(self, *args, **options): for sg in StudyGroup.objects.active(): if sg.meeting_set.active().count(): start_date = sg.first_meeting().meeting_date end_date = sg.last_meeting().meeting_date if start_date != sg.start_date or end_date != sg.end_date: sg.start_date = start_date sg.end_date = end_date sg.save() print(f'Fixed start/end dates for {sg} starting {start_date} ending {end_date}')
Add task to correctly set start/end dates based on active meetings for past / current learning circlesfrom django.core.management.base import BaseCommand, CommandError from studygroups.models import StudyGroup class Command(BaseCommand): help = 'Make sure all study groups with meetings have the correct start/end dates' def handle(self, *args, **options): for sg in StudyGroup.objects.active(): if sg.meeting_set.active().count(): start_date = sg.first_meeting().meeting_date end_date = sg.last_meeting().meeting_date if start_date != sg.start_date or end_date != sg.end_date: sg.start_date = start_date sg.end_date = end_date sg.save() print(f'Fixed start/end dates for {sg} starting {start_date} ending {end_date}')
<commit_before><commit_msg>Add task to correctly set start/end dates based on active meetings for past / current learning circles<commit_after>from django.core.management.base import BaseCommand, CommandError from studygroups.models import StudyGroup class Command(BaseCommand): help = 'Make sure all study groups with meetings have the correct start/end dates' def handle(self, *args, **options): for sg in StudyGroup.objects.active(): if sg.meeting_set.active().count(): start_date = sg.first_meeting().meeting_date end_date = sg.last_meeting().meeting_date if start_date != sg.start_date or end_date != sg.end_date: sg.start_date = start_date sg.end_date = end_date sg.save() print(f'Fixed start/end dates for {sg} starting {start_date} ending {end_date}')
f540f024a99fb99e91b3cb38d88f211416c04d7c
munigeo/migrations/0002_auto_20150608_1607.py
munigeo/migrations/0002_auto_20150608_1607.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('munigeo', '0001_initial'), ] operations = [ migrations.AlterField( model_name='municipality', name='division', field=models.OneToOneField(related_name='muni', to='munigeo.AdministrativeDivision', null=True), ), ]
Add migration to go with 04fcda3.
Add migration to go with 04fcda3.
Python
agpl-3.0
City-of-Helsinki/munigeo
Add migration to go with 04fcda3.
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('munigeo', '0001_initial'), ] operations = [ migrations.AlterField( model_name='municipality', name='division', field=models.OneToOneField(related_name='muni', to='munigeo.AdministrativeDivision', null=True), ), ]
<commit_before><commit_msg>Add migration to go with 04fcda3.<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('munigeo', '0001_initial'), ] operations = [ migrations.AlterField( model_name='municipality', name='division', field=models.OneToOneField(related_name='muni', to='munigeo.AdministrativeDivision', null=True), ), ]
Add migration to go with 04fcda3.# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('munigeo', '0001_initial'), ] operations = [ migrations.AlterField( model_name='municipality', name='division', field=models.OneToOneField(related_name='muni', to='munigeo.AdministrativeDivision', null=True), ), ]
<commit_before><commit_msg>Add migration to go with 04fcda3.<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('munigeo', '0001_initial'), ] operations = [ migrations.AlterField( model_name='municipality', name='division', field=models.OneToOneField(related_name='muni', to='munigeo.AdministrativeDivision', null=True), ), ]
eb81bf33d26923e5d79d87aa0e4c1db32eaf7c7c
admin/base/migrations/0002_groups.py
admin/base/migrations/0002_groups.py
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations from django.contrib.auth.models import Group import logging logger = logging.getLogger(__file__) def add_groups(*args): group, created = Group.objects.get_or_create(name='nodes_and_users') if created: logger.info('nodes_and_users group created') try: group = Group.objects.get(name='prereg_group') group.name = 'prereg' group.save() logger.info('prereg_group renamed to prereg') except Group.DoesNotExist: group, created = Group.objects.get_or_create(name='prereg') if created: logger.info('prereg group created') class Migration(migrations.Migration): dependencies = [ ('base', '0001_groups'), ] operations = [ migrations.RunPython(add_groups), ]
Add start of a migration to add new groups for OSF Admin
Add start of a migration to add new groups for OSF Admin
Python
apache-2.0
Johnetordoff/osf.io,monikagrabowska/osf.io,brianjgeiger/osf.io,hmoco/osf.io,TomBaxter/osf.io,Johnetordoff/osf.io,mattclark/osf.io,HalcyonChimera/osf.io,cwisecarver/osf.io,cwisecarver/osf.io,caseyrollins/osf.io,erinspace/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,aaxelb/osf.io,crcresearch/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,erinspace/osf.io,saradbowman/osf.io,aaxelb/osf.io,HalcyonChimera/osf.io,Nesiehr/osf.io,aaxelb/osf.io,leb2dg/osf.io,aaxelb/osf.io,laurenrevere/osf.io,chrisseto/osf.io,icereval/osf.io,chennan47/osf.io,cslzchen/osf.io,monikagrabowska/osf.io,brianjgeiger/osf.io,chrisseto/osf.io,leb2dg/osf.io,hmoco/osf.io,adlius/osf.io,hmoco/osf.io,felliott/osf.io,sloria/osf.io,chrisseto/osf.io,Nesiehr/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,acshi/osf.io,CenterForOpenScience/osf.io,icereval/osf.io,adlius/osf.io,crcresearch/osf.io,erinspace/osf.io,acshi/osf.io,acshi/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,sloria/osf.io,Nesiehr/osf.io,pattisdr/osf.io,caseyrollins/osf.io,monikagrabowska/osf.io,brianjgeiger/osf.io,crcresearch/osf.io,hmoco/osf.io,pattisdr/osf.io,pattisdr/osf.io,laurenrevere/osf.io,acshi/osf.io,adlius/osf.io,cslzchen/osf.io,cwisecarver/osf.io,baylee-d/osf.io,cslzchen/osf.io,HalcyonChimera/osf.io,caneruguz/osf.io,HalcyonChimera/osf.io,monikagrabowska/osf.io,chennan47/osf.io,cwisecarver/osf.io,mfraezz/osf.io,acshi/osf.io,binoculars/osf.io,caneruguz/osf.io,CenterForOpenScience/osf.io,caneruguz/osf.io,mfraezz/osf.io,chennan47/osf.io,icereval/osf.io,TomBaxter/osf.io,sloria/osf.io,adlius/osf.io,chrisseto/osf.io,laurenrevere/osf.io,felliott/osf.io,Johnetordoff/osf.io,caseyrollins/osf.io,leb2dg/osf.io,binoculars/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,leb2dg/osf.io,felliott/osf.io,mfraezz/osf.io,TomBaxter/osf.io,mattclark/osf.io,binoculars/osf.io,mattclark/osf.io,caneruguz/osf.io
Add start of a migration to add new groups for OSF Admin
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations from django.contrib.auth.models import Group import logging logger = logging.getLogger(__file__) def add_groups(*args): group, created = Group.objects.get_or_create(name='nodes_and_users') if created: logger.info('nodes_and_users group created') try: group = Group.objects.get(name='prereg_group') group.name = 'prereg' group.save() logger.info('prereg_group renamed to prereg') except Group.DoesNotExist: group, created = Group.objects.get_or_create(name='prereg') if created: logger.info('prereg group created') class Migration(migrations.Migration): dependencies = [ ('base', '0001_groups'), ] operations = [ migrations.RunPython(add_groups), ]
<commit_before><commit_msg>Add start of a migration to add new groups for OSF Admin<commit_after>
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations from django.contrib.auth.models import Group import logging logger = logging.getLogger(__file__) def add_groups(*args): group, created = Group.objects.get_or_create(name='nodes_and_users') if created: logger.info('nodes_and_users group created') try: group = Group.objects.get(name='prereg_group') group.name = 'prereg' group.save() logger.info('prereg_group renamed to prereg') except Group.DoesNotExist: group, created = Group.objects.get_or_create(name='prereg') if created: logger.info('prereg group created') class Migration(migrations.Migration): dependencies = [ ('base', '0001_groups'), ] operations = [ migrations.RunPython(add_groups), ]
Add start of a migration to add new groups for OSF Admin# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations from django.contrib.auth.models import Group import logging logger = logging.getLogger(__file__) def add_groups(*args): group, created = Group.objects.get_or_create(name='nodes_and_users') if created: logger.info('nodes_and_users group created') try: group = Group.objects.get(name='prereg_group') group.name = 'prereg' group.save() logger.info('prereg_group renamed to prereg') except Group.DoesNotExist: group, created = Group.objects.get_or_create(name='prereg') if created: logger.info('prereg group created') class Migration(migrations.Migration): dependencies = [ ('base', '0001_groups'), ] operations = [ migrations.RunPython(add_groups), ]
<commit_before><commit_msg>Add start of a migration to add new groups for OSF Admin<commit_after># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations from django.contrib.auth.models import Group import logging logger = logging.getLogger(__file__) def add_groups(*args): group, created = Group.objects.get_or_create(name='nodes_and_users') if created: logger.info('nodes_and_users group created') try: group = Group.objects.get(name='prereg_group') group.name = 'prereg' group.save() logger.info('prereg_group renamed to prereg') except Group.DoesNotExist: group, created = Group.objects.get_or_create(name='prereg') if created: logger.info('prereg group created') class Migration(migrations.Migration): dependencies = [ ('base', '0001_groups'), ] operations = [ migrations.RunPython(add_groups), ]
4a5e5d07f3510394ac1112727c66ddd122762aa9
test/long_test.py
test/long_test.py
#!/usr/bin/python import sys from os import execv from optparse import OptionParser from cloud_retester import do_test, do_test_cloud, report_cloud, setup_testing_nodes, terminate_testing_nodes long_test_branch = "long-test" no_checkout_arg = "--no-checkout" def exec_self(args): execv("/usr/bin/env", ["python", sys.argv[0]] + args) def build_args_parser(): parser = OptionParser() parser.add_option(no_checkout_arg, dest="no_checkout", action="store_true") return parser def git_checkout(branch): do_test("git fetch -f origin {b}:refs/remotes/origin/{b} && git checkout -f origin/{b}".format(b=branch)) def git_local_changes(): return None (options, args) = build_args_parser().parse_args(sys.argv[1:]) if not(options.no_checkout): if git_local_changes(): print "found local changes, bailing out" sys.exit(1) print "Checking out" git_checkout(long_test_branch) # import pdb; pdb.set_trace() exec_self(sys.argv[1:] + [no_checkout_arg]) else: print "Checked out" # Clean the repo do_test("cd ../src/; make clean")
Check out 'long-test' branch and reexecute self at long test startup.
Check out 'long-test' branch and reexecute self at long test startup.
Python
agpl-3.0
pap/rethinkdb,tempbottle/rethinkdb,alash3al/rethinkdb,yaolinz/rethinkdb,Qinusty/rethinkdb,tempbottle/rethinkdb,wkennington/rethinkdb,wkennington/rethinkdb,sontek/rethinkdb,greyhwndz/rethinkdb,bpradipt/rethinkdb,scripni/rethinkdb,scripni/rethinkdb,eliangidoni/rethinkdb,lenstr/rethinkdb,eliangidoni/rethinkdb,captainpete/rethinkdb,yakovenkodenis/rethinkdb,niieani/rethinkdb,eliangidoni/rethinkdb,captainpete/rethinkdb,losywee/rethinkdb,elkingtonmcb/rethinkdb,bchavez/rethinkdb,jfriedly/rethinkdb,mcanthony/rethinkdb,grandquista/rethinkdb,wkennington/rethinkdb,pap/rethinkdb,rrampage/rethinkdb,spblightadv/rethinkdb,gdi2290/rethinkdb,jesseditson/rethinkdb,matthaywardwebdesign/rethinkdb,ayumilong/rethinkdb,spblightadv/rethinkdb,RubenKelevra/rethinkdb,mcanthony/rethinkdb,captainpete/rethinkdb,niieani/rethinkdb,AtnNn/rethinkdb,catroot/rethinkdb,wujf/rethinkdb,JackieXie168/rethinkdb,alash3al/rethinkdb,niieani/rethinkdb,wojons/rethinkdb,JackieXie168/rethinkdb,jmptrader/rethinkdb,JackieXie168/rethinkdb,Qinusty/rethinkdb,jfriedly/rethinkdb,AntouanK/rethinkdb,nviennot/rethinkdb,JackieXie168/rethinkdb,wojons/rethinkdb,mbroadst/rethinkdb,robertjpayne/rethinkdb,eliangidoni/rethinkdb,alash3al/rethinkdb,gavioto/rethinkdb,matthaywardwebdesign/rethinkdb,mbroadst/rethinkdb,AntouanK/rethinkdb,dparnell/rethinkdb,mquandalle/rethinkdb,Qinusty/rethinkdb,sbusso/rethinkdb,ajose01/rethinkdb,mbroadst/rethinkdb,yaolinz/rethinkdb,marshall007/rethinkdb,gavioto/rethinkdb,RubenKelevra/rethinkdb,nviennot/rethinkdb,sbusso/rethinkdb,4talesa/rethinkdb,niieani/rethinkdb,yaolinz/rethinkdb,nviennot/rethinkdb,lenstr/rethinkdb,rrampage/rethinkdb,greyhwndz/rethinkdb,wujf/rethinkdb,RubenKelevra/rethinkdb,mquandalle/rethinkdb,4talesa/rethinkdb,greyhwndz/rethinkdb,AntouanK/rethinkdb,wojons/rethinkdb,matthaywardwebdesign/rethinkdb,ajose01/rethinkdb,jesseditson/rethinkdb,greyhwndz/rethinkdb,losywee/rethinkdb,wkennington/rethinkdb,catroot/rethinkdb,bchavez/rethinkdb,RubenKelevra/rethinkdb,losywee/rethinkdb,catroot/rethinkdb,scripni/rethinkdb,sebadiaz/rethinkdb,robertjpayne/rethinkdb,urandu/rethinkdb,bpradipt/rethinkdb,Qinusty/rethinkdb,ajose01/rethinkdb,mcanthony/rethinkdb,mquandalle/rethinkdb,mcanthony/rethinkdb,wujf/rethinkdb,jmptrader/rethinkdb,jesseditson/rethinkdb,yaolinz/rethinkdb,wujf/rethinkdb,mcanthony/rethinkdb,bpradipt/rethinkdb,victorbriz/rethinkdb,Qinusty/rethinkdb,sontek/rethinkdb,wkennington/rethinkdb,grandquista/rethinkdb,KSanthanam/rethinkdb,RubenKelevra/rethinkdb,marshall007/rethinkdb,robertjpayne/rethinkdb,jfriedly/rethinkdb,AntouanK/rethinkdb,gdi2290/rethinkdb,gdi2290/rethinkdb,spblightadv/rethinkdb,nviennot/rethinkdb,urandu/rethinkdb,sontek/rethinkdb,sbusso/rethinkdb,yakovenkodenis/rethinkdb,niieani/rethinkdb,gavioto/rethinkdb,victorbriz/rethinkdb,jmptrader/rethinkdb,sbusso/rethinkdb,mquandalle/rethinkdb,scripni/rethinkdb,lenstr/rethinkdb,losywee/rethinkdb,gavioto/rethinkdb,4talesa/rethinkdb,scripni/rethinkdb,elkingtonmcb/rethinkdb,AtnNn/rethinkdb,niieani/rethinkdb,elkingtonmcb/rethinkdb,eliangidoni/rethinkdb,AntouanK/rethinkdb,ayumilong/rethinkdb,urandu/rethinkdb,catroot/rethinkdb,matthaywardwebdesign/rethinkdb,catroot/rethinkdb,mcanthony/rethinkdb,jfriedly/rethinkdb,greyhwndz/rethinkdb,nviennot/rethinkdb,sontek/rethinkdb,alash3al/rethinkdb,grandquista/rethinkdb,urandu/rethinkdb,bpradipt/rethinkdb,urandu/rethinkdb,ayumilong/rethinkdb,sontek/rethinkdb,wujf/rethinkdb,gdi2290/rethinkdb,KSanthanam/rethinkdb,sebadiaz/rethinkdb,victorbriz/rethinkdb,marshall007/rethinkdb,tempbottle/rethinkdb,Qinusty/rethinkdb,KSanthanam/rethinkdb,grandquista/rethinkdb,sbusso/rethinkdb,4talesa/rethinkdb,mquandalle/rethinkdb,robertjpayne/rethinkdb,captainpete/rethinkdb,matthaywardwebdesign/rethinkdb,matthaywardwebdesign/rethinkdb,gavioto/rethinkdb,marshall007/rethinkdb,losywee/rethinkdb,KSanthanam/rethinkdb,4talesa/rethinkdb,lenstr/rethinkdb,captainpete/rethinkdb,RubenKelevra/rethinkdb,marshall007/rethinkdb,pap/rethinkdb,lenstr/rethinkdb,victorbriz/rethinkdb,Wilbeibi/rethinkdb,rrampage/rethinkdb,gavioto/rethinkdb,robertjpayne/rethinkdb,niieani/rethinkdb,4talesa/rethinkdb,victorbriz/rethinkdb,grandquista/rethinkdb,rrampage/rethinkdb,tempbottle/rethinkdb,pap/rethinkdb,mbroadst/rethinkdb,sbusso/rethinkdb,tempbottle/rethinkdb,lenstr/rethinkdb,4talesa/rethinkdb,urandu/rethinkdb,scripni/rethinkdb,alash3al/rethinkdb,AtnNn/rethinkdb,ayumilong/rethinkdb,eliangidoni/rethinkdb,marshall007/rethinkdb,RubenKelevra/rethinkdb,tempbottle/rethinkdb,wojons/rethinkdb,scripni/rethinkdb,wojons/rethinkdb,sbusso/rethinkdb,captainpete/rethinkdb,pap/rethinkdb,eliangidoni/rethinkdb,ayumilong/rethinkdb,wujf/rethinkdb,dparnell/rethinkdb,dparnell/rethinkdb,rrampage/rethinkdb,bpradipt/rethinkdb,yaolinz/rethinkdb,AtnNn/rethinkdb,catroot/rethinkdb,ajose01/rethinkdb,JackieXie168/rethinkdb,AntouanK/rethinkdb,spblightadv/rethinkdb,losywee/rethinkdb,ajose01/rethinkdb,dparnell/rethinkdb,AtnNn/rethinkdb,grandquista/rethinkdb,robertjpayne/rethinkdb,spblightadv/rethinkdb,yakovenkodenis/rethinkdb,mquandalle/rethinkdb,ajose01/rethinkdb,sebadiaz/rethinkdb,bchavez/rethinkdb,KSanthanam/rethinkdb,dparnell/rethinkdb,jesseditson/rethinkdb,marshall007/rethinkdb,yakovenkodenis/rethinkdb,elkingtonmcb/rethinkdb,grandquista/rethinkdb,victorbriz/rethinkdb,JackieXie168/rethinkdb,ayumilong/rethinkdb,gdi2290/rethinkdb,sebadiaz/rethinkdb,jfriedly/rethinkdb,bpradipt/rethinkdb,greyhwndz/rethinkdb,spblightadv/rethinkdb,mquandalle/rethinkdb,yaolinz/rethinkdb,Wilbeibi/rethinkdb,jmptrader/rethinkdb,RubenKelevra/rethinkdb,elkingtonmcb/rethinkdb,marshall007/rethinkdb,ajose01/rethinkdb,robertjpayne/rethinkdb,scripni/rethinkdb,wujf/rethinkdb,bpradipt/rethinkdb,bpradipt/rethinkdb,yakovenkodenis/rethinkdb,elkingtonmcb/rethinkdb,catroot/rethinkdb,sebadiaz/rethinkdb,Qinusty/rethinkdb,victorbriz/rethinkdb,sontek/rethinkdb,losywee/rethinkdb,eliangidoni/rethinkdb,lenstr/rethinkdb,jmptrader/rethinkdb,wojons/rethinkdb,nviennot/rethinkdb,dparnell/rethinkdb,pap/rethinkdb,yaolinz/rethinkdb,wojons/rethinkdb,mbroadst/rethinkdb,rrampage/rethinkdb,alash3al/rethinkdb,bchavez/rethinkdb,mcanthony/rethinkdb,jesseditson/rethinkdb,elkingtonmcb/rethinkdb,niieani/rethinkdb,KSanthanam/rethinkdb,jfriedly/rethinkdb,jmptrader/rethinkdb,AtnNn/rethinkdb,eliangidoni/rethinkdb,jfriedly/rethinkdb,wkennington/rethinkdb,mbroadst/rethinkdb,tempbottle/rethinkdb,sebadiaz/rethinkdb,spblightadv/rethinkdb,Wilbeibi/rethinkdb,jesseditson/rethinkdb,jesseditson/rethinkdb,AtnNn/rethinkdb,yakovenkodenis/rethinkdb,grandquista/rethinkdb,alash3al/rethinkdb,matthaywardwebdesign/rethinkdb,mquandalle/rethinkdb,yaolinz/rethinkdb,gdi2290/rethinkdb,dparnell/rethinkdb,mbroadst/rethinkdb,tempbottle/rethinkdb,bchavez/rethinkdb,lenstr/rethinkdb,rrampage/rethinkdb,captainpete/rethinkdb,alash3al/rethinkdb,jfriedly/rethinkdb,dparnell/rethinkdb,bchavez/rethinkdb,KSanthanam/rethinkdb,bchavez/rethinkdb,4talesa/rethinkdb,ayumilong/rethinkdb,robertjpayne/rethinkdb,pap/rethinkdb,greyhwndz/rethinkdb,yakovenkodenis/rethinkdb,losywee/rethinkdb,catroot/rethinkdb,elkingtonmcb/rethinkdb,nviennot/rethinkdb,ayumilong/rethinkdb,mcanthony/rethinkdb,victorbriz/rethinkdb,AntouanK/rethinkdb,mbroadst/rethinkdb,sebadiaz/rethinkdb,gdi2290/rethinkdb,JackieXie168/rethinkdb,urandu/rethinkdb,jmptrader/rethinkdb,Wilbeibi/rethinkdb,pap/rethinkdb,gavioto/rethinkdb,wkennington/rethinkdb,captainpete/rethinkdb,Qinusty/rethinkdb,dparnell/rethinkdb,greyhwndz/rethinkdb,rrampage/rethinkdb,Wilbeibi/rethinkdb,Wilbeibi/rethinkdb,bchavez/rethinkdb,KSanthanam/rethinkdb,bpradipt/rethinkdb,AtnNn/rethinkdb,spblightadv/rethinkdb,bchavez/rethinkdb,JackieXie168/rethinkdb,sbusso/rethinkdb,matthaywardwebdesign/rethinkdb,gavioto/rethinkdb,Qinusty/rethinkdb,sebadiaz/rethinkdb,wojons/rethinkdb,Wilbeibi/rethinkdb,urandu/rethinkdb,AntouanK/rethinkdb,JackieXie168/rethinkdb,mbroadst/rethinkdb,wkennington/rethinkdb,Wilbeibi/rethinkdb,jmptrader/rethinkdb,jesseditson/rethinkdb,nviennot/rethinkdb,robertjpayne/rethinkdb,yakovenkodenis/rethinkdb,sontek/rethinkdb,grandquista/rethinkdb,sontek/rethinkdb,ajose01/rethinkdb
Check out 'long-test' branch and reexecute self at long test startup.
#!/usr/bin/python import sys from os import execv from optparse import OptionParser from cloud_retester import do_test, do_test_cloud, report_cloud, setup_testing_nodes, terminate_testing_nodes long_test_branch = "long-test" no_checkout_arg = "--no-checkout" def exec_self(args): execv("/usr/bin/env", ["python", sys.argv[0]] + args) def build_args_parser(): parser = OptionParser() parser.add_option(no_checkout_arg, dest="no_checkout", action="store_true") return parser def git_checkout(branch): do_test("git fetch -f origin {b}:refs/remotes/origin/{b} && git checkout -f origin/{b}".format(b=branch)) def git_local_changes(): return None (options, args) = build_args_parser().parse_args(sys.argv[1:]) if not(options.no_checkout): if git_local_changes(): print "found local changes, bailing out" sys.exit(1) print "Checking out" git_checkout(long_test_branch) # import pdb; pdb.set_trace() exec_self(sys.argv[1:] + [no_checkout_arg]) else: print "Checked out" # Clean the repo do_test("cd ../src/; make clean")
<commit_before><commit_msg>Check out 'long-test' branch and reexecute self at long test startup.<commit_after>
#!/usr/bin/python import sys from os import execv from optparse import OptionParser from cloud_retester import do_test, do_test_cloud, report_cloud, setup_testing_nodes, terminate_testing_nodes long_test_branch = "long-test" no_checkout_arg = "--no-checkout" def exec_self(args): execv("/usr/bin/env", ["python", sys.argv[0]] + args) def build_args_parser(): parser = OptionParser() parser.add_option(no_checkout_arg, dest="no_checkout", action="store_true") return parser def git_checkout(branch): do_test("git fetch -f origin {b}:refs/remotes/origin/{b} && git checkout -f origin/{b}".format(b=branch)) def git_local_changes(): return None (options, args) = build_args_parser().parse_args(sys.argv[1:]) if not(options.no_checkout): if git_local_changes(): print "found local changes, bailing out" sys.exit(1) print "Checking out" git_checkout(long_test_branch) # import pdb; pdb.set_trace() exec_self(sys.argv[1:] + [no_checkout_arg]) else: print "Checked out" # Clean the repo do_test("cd ../src/; make clean")
Check out 'long-test' branch and reexecute self at long test startup.#!/usr/bin/python import sys from os import execv from optparse import OptionParser from cloud_retester import do_test, do_test_cloud, report_cloud, setup_testing_nodes, terminate_testing_nodes long_test_branch = "long-test" no_checkout_arg = "--no-checkout" def exec_self(args): execv("/usr/bin/env", ["python", sys.argv[0]] + args) def build_args_parser(): parser = OptionParser() parser.add_option(no_checkout_arg, dest="no_checkout", action="store_true") return parser def git_checkout(branch): do_test("git fetch -f origin {b}:refs/remotes/origin/{b} && git checkout -f origin/{b}".format(b=branch)) def git_local_changes(): return None (options, args) = build_args_parser().parse_args(sys.argv[1:]) if not(options.no_checkout): if git_local_changes(): print "found local changes, bailing out" sys.exit(1) print "Checking out" git_checkout(long_test_branch) # import pdb; pdb.set_trace() exec_self(sys.argv[1:] + [no_checkout_arg]) else: print "Checked out" # Clean the repo do_test("cd ../src/; make clean")
<commit_before><commit_msg>Check out 'long-test' branch and reexecute self at long test startup.<commit_after>#!/usr/bin/python import sys from os import execv from optparse import OptionParser from cloud_retester import do_test, do_test_cloud, report_cloud, setup_testing_nodes, terminate_testing_nodes long_test_branch = "long-test" no_checkout_arg = "--no-checkout" def exec_self(args): execv("/usr/bin/env", ["python", sys.argv[0]] + args) def build_args_parser(): parser = OptionParser() parser.add_option(no_checkout_arg, dest="no_checkout", action="store_true") return parser def git_checkout(branch): do_test("git fetch -f origin {b}:refs/remotes/origin/{b} && git checkout -f origin/{b}".format(b=branch)) def git_local_changes(): return None (options, args) = build_args_parser().parse_args(sys.argv[1:]) if not(options.no_checkout): if git_local_changes(): print "found local changes, bailing out" sys.exit(1) print "Checking out" git_checkout(long_test_branch) # import pdb; pdb.set_trace() exec_self(sys.argv[1:] + [no_checkout_arg]) else: print "Checked out" # Clean the repo do_test("cd ../src/; make clean")
697f5fdc260157da86abbf4579cd3f5e04eb3c63
brazilnum/cei.py
brazilnum/cei.py
#!/usr/bin/env python import re import random from operator import mul """ Functions for working with Brazilian CEI identifiers. """ NONDIGIT = re.compile(r'[^0-9]') CEI_WEIGHTS = [7, 4, 1, 8, 5, 2, 1, 6, 3, 7, 4] def clean_cei(cei): """Takes a CEI and turns it into a string of only numbers.""" return NONDIGIT.sub('', str(cei)) def validate_cei(cei): """Check whether CEI is valid.""" cei = clean_cei(cei) # all complete CEI are 12 digits long if len(cei) != 12: return False digits = [int(k) for k in cei] # identifier digits # validate the check digit digsum = sum(mul(*k) for k in zip(CEI_WEIGHTS, digits[:-1])) check = 10 - (sum(divmod(digsum % 100, 10)) % 10) return check == digits[-1] def cei_check_digit(cei): """Find check digit needed to make a CEI valid.""" cei = clean_cei(cei) if len(cei) < 11: raise ValueError('CEI must have at least 11 digits: {0}'.format(cei)) digits = [int(k) for k in cei[:12]] # find the check digit digsum = sum(mul(*k) for k in zip(CEI_WEIGHTS, digits[:-1])) return 10 - (sum(divmod(digsum % 100, 10)) % 10) def format_cei(cei): """Applies typical 00.000.00000/00 formatting to CEI.""" fmt = '{0}.{1}.{2}/{3}' cei = clean_cei(cei) return fmt.format(cei[:3], cei[3:6], cei[6:10], cei[10:]) def pad_cei(cei, validate=True): """Takes a CEI that probably had leading zeros and pads it.""" cei = clean_cei(cei) cei = '%0.012i' % int(cei) if validate and not validate_cei(cei): raise ValueError('Invalid CEI: {0}'.format(cei)) return cei def random_cei(formatted=True): """Create a random, valid CEI identifier.""" stem = random.randint(10000000000, 99999999999) cei = str(stem) + '{0}{1}'.format(*cei_check_digits(stem)) if formatted: return format_cei(cei) return cei
Create validator for CEI identifiers
Create validator for CEI identifiers Cadastro Específico do INS (CEI) is a business identifier for entities that are not required to have a CNPJ.
Python
mit
poliquin/brazilnum
Create validator for CEI identifiers Cadastro Específico do INS (CEI) is a business identifier for entities that are not required to have a CNPJ.
#!/usr/bin/env python import re import random from operator import mul """ Functions for working with Brazilian CEI identifiers. """ NONDIGIT = re.compile(r'[^0-9]') CEI_WEIGHTS = [7, 4, 1, 8, 5, 2, 1, 6, 3, 7, 4] def clean_cei(cei): """Takes a CEI and turns it into a string of only numbers.""" return NONDIGIT.sub('', str(cei)) def validate_cei(cei): """Check whether CEI is valid.""" cei = clean_cei(cei) # all complete CEI are 12 digits long if len(cei) != 12: return False digits = [int(k) for k in cei] # identifier digits # validate the check digit digsum = sum(mul(*k) for k in zip(CEI_WEIGHTS, digits[:-1])) check = 10 - (sum(divmod(digsum % 100, 10)) % 10) return check == digits[-1] def cei_check_digit(cei): """Find check digit needed to make a CEI valid.""" cei = clean_cei(cei) if len(cei) < 11: raise ValueError('CEI must have at least 11 digits: {0}'.format(cei)) digits = [int(k) for k in cei[:12]] # find the check digit digsum = sum(mul(*k) for k in zip(CEI_WEIGHTS, digits[:-1])) return 10 - (sum(divmod(digsum % 100, 10)) % 10) def format_cei(cei): """Applies typical 00.000.00000/00 formatting to CEI.""" fmt = '{0}.{1}.{2}/{3}' cei = clean_cei(cei) return fmt.format(cei[:3], cei[3:6], cei[6:10], cei[10:]) def pad_cei(cei, validate=True): """Takes a CEI that probably had leading zeros and pads it.""" cei = clean_cei(cei) cei = '%0.012i' % int(cei) if validate and not validate_cei(cei): raise ValueError('Invalid CEI: {0}'.format(cei)) return cei def random_cei(formatted=True): """Create a random, valid CEI identifier.""" stem = random.randint(10000000000, 99999999999) cei = str(stem) + '{0}{1}'.format(*cei_check_digits(stem)) if formatted: return format_cei(cei) return cei
<commit_before><commit_msg>Create validator for CEI identifiers Cadastro Específico do INS (CEI) is a business identifier for entities that are not required to have a CNPJ.<commit_after>
#!/usr/bin/env python import re import random from operator import mul """ Functions for working with Brazilian CEI identifiers. """ NONDIGIT = re.compile(r'[^0-9]') CEI_WEIGHTS = [7, 4, 1, 8, 5, 2, 1, 6, 3, 7, 4] def clean_cei(cei): """Takes a CEI and turns it into a string of only numbers.""" return NONDIGIT.sub('', str(cei)) def validate_cei(cei): """Check whether CEI is valid.""" cei = clean_cei(cei) # all complete CEI are 12 digits long if len(cei) != 12: return False digits = [int(k) for k in cei] # identifier digits # validate the check digit digsum = sum(mul(*k) for k in zip(CEI_WEIGHTS, digits[:-1])) check = 10 - (sum(divmod(digsum % 100, 10)) % 10) return check == digits[-1] def cei_check_digit(cei): """Find check digit needed to make a CEI valid.""" cei = clean_cei(cei) if len(cei) < 11: raise ValueError('CEI must have at least 11 digits: {0}'.format(cei)) digits = [int(k) for k in cei[:12]] # find the check digit digsum = sum(mul(*k) for k in zip(CEI_WEIGHTS, digits[:-1])) return 10 - (sum(divmod(digsum % 100, 10)) % 10) def format_cei(cei): """Applies typical 00.000.00000/00 formatting to CEI.""" fmt = '{0}.{1}.{2}/{3}' cei = clean_cei(cei) return fmt.format(cei[:3], cei[3:6], cei[6:10], cei[10:]) def pad_cei(cei, validate=True): """Takes a CEI that probably had leading zeros and pads it.""" cei = clean_cei(cei) cei = '%0.012i' % int(cei) if validate and not validate_cei(cei): raise ValueError('Invalid CEI: {0}'.format(cei)) return cei def random_cei(formatted=True): """Create a random, valid CEI identifier.""" stem = random.randint(10000000000, 99999999999) cei = str(stem) + '{0}{1}'.format(*cei_check_digits(stem)) if formatted: return format_cei(cei) return cei
Create validator for CEI identifiers Cadastro Específico do INS (CEI) is a business identifier for entities that are not required to have a CNPJ.#!/usr/bin/env python import re import random from operator import mul """ Functions for working with Brazilian CEI identifiers. """ NONDIGIT = re.compile(r'[^0-9]') CEI_WEIGHTS = [7, 4, 1, 8, 5, 2, 1, 6, 3, 7, 4] def clean_cei(cei): """Takes a CEI and turns it into a string of only numbers.""" return NONDIGIT.sub('', str(cei)) def validate_cei(cei): """Check whether CEI is valid.""" cei = clean_cei(cei) # all complete CEI are 12 digits long if len(cei) != 12: return False digits = [int(k) for k in cei] # identifier digits # validate the check digit digsum = sum(mul(*k) for k in zip(CEI_WEIGHTS, digits[:-1])) check = 10 - (sum(divmod(digsum % 100, 10)) % 10) return check == digits[-1] def cei_check_digit(cei): """Find check digit needed to make a CEI valid.""" cei = clean_cei(cei) if len(cei) < 11: raise ValueError('CEI must have at least 11 digits: {0}'.format(cei)) digits = [int(k) for k in cei[:12]] # find the check digit digsum = sum(mul(*k) for k in zip(CEI_WEIGHTS, digits[:-1])) return 10 - (sum(divmod(digsum % 100, 10)) % 10) def format_cei(cei): """Applies typical 00.000.00000/00 formatting to CEI.""" fmt = '{0}.{1}.{2}/{3}' cei = clean_cei(cei) return fmt.format(cei[:3], cei[3:6], cei[6:10], cei[10:]) def pad_cei(cei, validate=True): """Takes a CEI that probably had leading zeros and pads it.""" cei = clean_cei(cei) cei = '%0.012i' % int(cei) if validate and not validate_cei(cei): raise ValueError('Invalid CEI: {0}'.format(cei)) return cei def random_cei(formatted=True): """Create a random, valid CEI identifier.""" stem = random.randint(10000000000, 99999999999) cei = str(stem) + '{0}{1}'.format(*cei_check_digits(stem)) if formatted: return format_cei(cei) return cei
<commit_before><commit_msg>Create validator for CEI identifiers Cadastro Específico do INS (CEI) is a business identifier for entities that are not required to have a CNPJ.<commit_after>#!/usr/bin/env python import re import random from operator import mul """ Functions for working with Brazilian CEI identifiers. """ NONDIGIT = re.compile(r'[^0-9]') CEI_WEIGHTS = [7, 4, 1, 8, 5, 2, 1, 6, 3, 7, 4] def clean_cei(cei): """Takes a CEI and turns it into a string of only numbers.""" return NONDIGIT.sub('', str(cei)) def validate_cei(cei): """Check whether CEI is valid.""" cei = clean_cei(cei) # all complete CEI are 12 digits long if len(cei) != 12: return False digits = [int(k) for k in cei] # identifier digits # validate the check digit digsum = sum(mul(*k) for k in zip(CEI_WEIGHTS, digits[:-1])) check = 10 - (sum(divmod(digsum % 100, 10)) % 10) return check == digits[-1] def cei_check_digit(cei): """Find check digit needed to make a CEI valid.""" cei = clean_cei(cei) if len(cei) < 11: raise ValueError('CEI must have at least 11 digits: {0}'.format(cei)) digits = [int(k) for k in cei[:12]] # find the check digit digsum = sum(mul(*k) for k in zip(CEI_WEIGHTS, digits[:-1])) return 10 - (sum(divmod(digsum % 100, 10)) % 10) def format_cei(cei): """Applies typical 00.000.00000/00 formatting to CEI.""" fmt = '{0}.{1}.{2}/{3}' cei = clean_cei(cei) return fmt.format(cei[:3], cei[3:6], cei[6:10], cei[10:]) def pad_cei(cei, validate=True): """Takes a CEI that probably had leading zeros and pads it.""" cei = clean_cei(cei) cei = '%0.012i' % int(cei) if validate and not validate_cei(cei): raise ValueError('Invalid CEI: {0}'.format(cei)) return cei def random_cei(formatted=True): """Create a random, valid CEI identifier.""" stem = random.randint(10000000000, 99999999999) cei = str(stem) + '{0}{1}'.format(*cei_check_digits(stem)) if formatted: return format_cei(cei) return cei
0528ee6f8170eac9bc928ea9d52c59f2cf4f8f26
test_3_amsaves.py
test_3_amsaves.py
#!/usr/bin/env python __author__ = "Eric Allen Youngson" __email__ = "eric@scneco.com" __copyright__ = "Copyright 2015, Succession Ecological Services" __license__ = "GNU Affero (GPLv3)" """ This module provides functions for requesting results from the DeltaMeter Services API * deltameterservices.com * """ import deltamtrsvs as dms_api import amsaves as ams import private as pvt import types as tp import pandas as pd import re from collections import defaultdict headers = pvt.headers properties_url = pvt.properties_url model_url = pvt.model_url comparison_url = pvt.comparison_url audit_url = pvt.audit_url sites = [pvt.FDL, pvt.HJSMS, pvt.Midlesboro] def test_am_saves_audit(): """ Pass the results of get_model_audits function, confirm DataFrame returned in requested results format for the 'America Saves!' program (write contents to .CSV file for review) """ for site in sites: bldgIDs = dms_api.get_property_bldg_IDs(properties_url, site, headers) (json_models, valBldgIDs) = dms_api.get_bldg_models(model_url, bldgIDs, headers) (modelIDs, comparisons) = dms_api.get_model_comparisons(comparison_url, json_models, headers) (refModelIDs, audits) = dms_api.get_model_audits(audit_url, modelIDs, headers) combinedUsageDfs = ams.am_saves_audit(refModelIDs, audits) df = pd.DataFrame(combinedUsageDfs[refModelIDs[0]]) fname = refModelIDs[0]+'-audit.csv' with open(fname, 'wb') as outf: outcsv = df.to_csv(fname) assert isinstance(combinedUsageDfs, defaultdict) assert [isinstance(combinedUsageDf, pd.DataFrame) for combinedUsageDf in combinedUsageDfs]
Split the amsaves tests to focus on the second test function
Split the amsaves tests to focus on the second test function
Python
agpl-3.0
eayoungs/DeltaMtrSvs,eayoungs/DeltaMtrSvs
Split the amsaves tests to focus on the second test function
#!/usr/bin/env python __author__ = "Eric Allen Youngson" __email__ = "eric@scneco.com" __copyright__ = "Copyright 2015, Succession Ecological Services" __license__ = "GNU Affero (GPLv3)" """ This module provides functions for requesting results from the DeltaMeter Services API * deltameterservices.com * """ import deltamtrsvs as dms_api import amsaves as ams import private as pvt import types as tp import pandas as pd import re from collections import defaultdict headers = pvt.headers properties_url = pvt.properties_url model_url = pvt.model_url comparison_url = pvt.comparison_url audit_url = pvt.audit_url sites = [pvt.FDL, pvt.HJSMS, pvt.Midlesboro] def test_am_saves_audit(): """ Pass the results of get_model_audits function, confirm DataFrame returned in requested results format for the 'America Saves!' program (write contents to .CSV file for review) """ for site in sites: bldgIDs = dms_api.get_property_bldg_IDs(properties_url, site, headers) (json_models, valBldgIDs) = dms_api.get_bldg_models(model_url, bldgIDs, headers) (modelIDs, comparisons) = dms_api.get_model_comparisons(comparison_url, json_models, headers) (refModelIDs, audits) = dms_api.get_model_audits(audit_url, modelIDs, headers) combinedUsageDfs = ams.am_saves_audit(refModelIDs, audits) df = pd.DataFrame(combinedUsageDfs[refModelIDs[0]]) fname = refModelIDs[0]+'-audit.csv' with open(fname, 'wb') as outf: outcsv = df.to_csv(fname) assert isinstance(combinedUsageDfs, defaultdict) assert [isinstance(combinedUsageDf, pd.DataFrame) for combinedUsageDf in combinedUsageDfs]
<commit_before><commit_msg>Split the amsaves tests to focus on the second test function<commit_after>
#!/usr/bin/env python __author__ = "Eric Allen Youngson" __email__ = "eric@scneco.com" __copyright__ = "Copyright 2015, Succession Ecological Services" __license__ = "GNU Affero (GPLv3)" """ This module provides functions for requesting results from the DeltaMeter Services API * deltameterservices.com * """ import deltamtrsvs as dms_api import amsaves as ams import private as pvt import types as tp import pandas as pd import re from collections import defaultdict headers = pvt.headers properties_url = pvt.properties_url model_url = pvt.model_url comparison_url = pvt.comparison_url audit_url = pvt.audit_url sites = [pvt.FDL, pvt.HJSMS, pvt.Midlesboro] def test_am_saves_audit(): """ Pass the results of get_model_audits function, confirm DataFrame returned in requested results format for the 'America Saves!' program (write contents to .CSV file for review) """ for site in sites: bldgIDs = dms_api.get_property_bldg_IDs(properties_url, site, headers) (json_models, valBldgIDs) = dms_api.get_bldg_models(model_url, bldgIDs, headers) (modelIDs, comparisons) = dms_api.get_model_comparisons(comparison_url, json_models, headers) (refModelIDs, audits) = dms_api.get_model_audits(audit_url, modelIDs, headers) combinedUsageDfs = ams.am_saves_audit(refModelIDs, audits) df = pd.DataFrame(combinedUsageDfs[refModelIDs[0]]) fname = refModelIDs[0]+'-audit.csv' with open(fname, 'wb') as outf: outcsv = df.to_csv(fname) assert isinstance(combinedUsageDfs, defaultdict) assert [isinstance(combinedUsageDf, pd.DataFrame) for combinedUsageDf in combinedUsageDfs]
Split the amsaves tests to focus on the second test function#!/usr/bin/env python __author__ = "Eric Allen Youngson" __email__ = "eric@scneco.com" __copyright__ = "Copyright 2015, Succession Ecological Services" __license__ = "GNU Affero (GPLv3)" """ This module provides functions for requesting results from the DeltaMeter Services API * deltameterservices.com * """ import deltamtrsvs as dms_api import amsaves as ams import private as pvt import types as tp import pandas as pd import re from collections import defaultdict headers = pvt.headers properties_url = pvt.properties_url model_url = pvt.model_url comparison_url = pvt.comparison_url audit_url = pvt.audit_url sites = [pvt.FDL, pvt.HJSMS, pvt.Midlesboro] def test_am_saves_audit(): """ Pass the results of get_model_audits function, confirm DataFrame returned in requested results format for the 'America Saves!' program (write contents to .CSV file for review) """ for site in sites: bldgIDs = dms_api.get_property_bldg_IDs(properties_url, site, headers) (json_models, valBldgIDs) = dms_api.get_bldg_models(model_url, bldgIDs, headers) (modelIDs, comparisons) = dms_api.get_model_comparisons(comparison_url, json_models, headers) (refModelIDs, audits) = dms_api.get_model_audits(audit_url, modelIDs, headers) combinedUsageDfs = ams.am_saves_audit(refModelIDs, audits) df = pd.DataFrame(combinedUsageDfs[refModelIDs[0]]) fname = refModelIDs[0]+'-audit.csv' with open(fname, 'wb') as outf: outcsv = df.to_csv(fname) assert isinstance(combinedUsageDfs, defaultdict) assert [isinstance(combinedUsageDf, pd.DataFrame) for combinedUsageDf in combinedUsageDfs]
<commit_before><commit_msg>Split the amsaves tests to focus on the second test function<commit_after>#!/usr/bin/env python __author__ = "Eric Allen Youngson" __email__ = "eric@scneco.com" __copyright__ = "Copyright 2015, Succession Ecological Services" __license__ = "GNU Affero (GPLv3)" """ This module provides functions for requesting results from the DeltaMeter Services API * deltameterservices.com * """ import deltamtrsvs as dms_api import amsaves as ams import private as pvt import types as tp import pandas as pd import re from collections import defaultdict headers = pvt.headers properties_url = pvt.properties_url model_url = pvt.model_url comparison_url = pvt.comparison_url audit_url = pvt.audit_url sites = [pvt.FDL, pvt.HJSMS, pvt.Midlesboro] def test_am_saves_audit(): """ Pass the results of get_model_audits function, confirm DataFrame returned in requested results format for the 'America Saves!' program (write contents to .CSV file for review) """ for site in sites: bldgIDs = dms_api.get_property_bldg_IDs(properties_url, site, headers) (json_models, valBldgIDs) = dms_api.get_bldg_models(model_url, bldgIDs, headers) (modelIDs, comparisons) = dms_api.get_model_comparisons(comparison_url, json_models, headers) (refModelIDs, audits) = dms_api.get_model_audits(audit_url, modelIDs, headers) combinedUsageDfs = ams.am_saves_audit(refModelIDs, audits) df = pd.DataFrame(combinedUsageDfs[refModelIDs[0]]) fname = refModelIDs[0]+'-audit.csv' with open(fname, 'wb') as outf: outcsv = df.to_csv(fname) assert isinstance(combinedUsageDfs, defaultdict) assert [isinstance(combinedUsageDf, pd.DataFrame) for combinedUsageDf in combinedUsageDfs]
58e22254e06b8112652b5875c02e69dcf46b8a63
src/rgbd_benchmark_tools/h5_collectSamples.py
src/rgbd_benchmark_tools/h5_collectSamples.py
#!/usr/bin/python # -*- coding: utf-8 -*- """ Created on Thu Sep 17 09:02:31 2015 @author: jesus """ import argparse import numpy as np import h5py if __name__ == '__main__': parser = argparse.ArgumentParser(description=''' This script collects the metrics and results from several samples of an experiment into its parent group. ''') parser.add_argument('h5file', help='HDF5 file in which the metrics are stored in the group eval for each sample') parser.add_argument('group', help='H5 path of the main group containing sample minor groups') parser.add_argument('delta_unit', help='delta_unit of the metrics to collect') args = parser.parse_args() h5f = h5py.File(args.h5file,'a') unit = args.delta_unit # Save the evaluation metric values in the samples' parent group main_group = h5f[args.group] # Check if eval group already exists in the main group if 'eval' in main_group: print "Removing existing eval group in" + main_group.name del main_group['eval'] numOfSamples = len(main_group) # Create new eval group in the main group samples = main_group.keys() eval_group = main_group.require_group('eval/'+args.delta_unit) names = ['rmse','median','mean','max'] for name in names: # Preallocate arrays t_arr = np.empty(numOfSamples) r_arr = np.empty(numOfSamples) # Store metrics in sample in an array for i, sample in enumerate(samples): t_arr[i] = main_group[sample+'/eval/'+unit+'/t_'+name][()] r_arr[i] = main_group[sample+'/eval/'+unit+'/r_'+name][()] # Check if dataset already exists in the group if 't_'+name in eval_group: print "Removing existing trans dataset in " + eval_group.name del eval_group['t_'+name] if 'r_'+name in eval_group: print "Removing existing rot dataset in " + eval_group.name del eval_group['r_'+name] # Save as a new dataset in the main group eval_group.create_dataset('t_'+name, data=t_arr) eval_group.create_dataset('r_'+name, data=r_arr)
Add script to collect metric results in a single dataset for a main group
Add script to collect metric results in a single dataset for a main group
Python
bsd-2-clause
jesusbriales/rgbd_benchmark_tools
Add script to collect metric results in a single dataset for a main group
#!/usr/bin/python # -*- coding: utf-8 -*- """ Created on Thu Sep 17 09:02:31 2015 @author: jesus """ import argparse import numpy as np import h5py if __name__ == '__main__': parser = argparse.ArgumentParser(description=''' This script collects the metrics and results from several samples of an experiment into its parent group. ''') parser.add_argument('h5file', help='HDF5 file in which the metrics are stored in the group eval for each sample') parser.add_argument('group', help='H5 path of the main group containing sample minor groups') parser.add_argument('delta_unit', help='delta_unit of the metrics to collect') args = parser.parse_args() h5f = h5py.File(args.h5file,'a') unit = args.delta_unit # Save the evaluation metric values in the samples' parent group main_group = h5f[args.group] # Check if eval group already exists in the main group if 'eval' in main_group: print "Removing existing eval group in" + main_group.name del main_group['eval'] numOfSamples = len(main_group) # Create new eval group in the main group samples = main_group.keys() eval_group = main_group.require_group('eval/'+args.delta_unit) names = ['rmse','median','mean','max'] for name in names: # Preallocate arrays t_arr = np.empty(numOfSamples) r_arr = np.empty(numOfSamples) # Store metrics in sample in an array for i, sample in enumerate(samples): t_arr[i] = main_group[sample+'/eval/'+unit+'/t_'+name][()] r_arr[i] = main_group[sample+'/eval/'+unit+'/r_'+name][()] # Check if dataset already exists in the group if 't_'+name in eval_group: print "Removing existing trans dataset in " + eval_group.name del eval_group['t_'+name] if 'r_'+name in eval_group: print "Removing existing rot dataset in " + eval_group.name del eval_group['r_'+name] # Save as a new dataset in the main group eval_group.create_dataset('t_'+name, data=t_arr) eval_group.create_dataset('r_'+name, data=r_arr)
<commit_before><commit_msg>Add script to collect metric results in a single dataset for a main group<commit_after>
#!/usr/bin/python # -*- coding: utf-8 -*- """ Created on Thu Sep 17 09:02:31 2015 @author: jesus """ import argparse import numpy as np import h5py if __name__ == '__main__': parser = argparse.ArgumentParser(description=''' This script collects the metrics and results from several samples of an experiment into its parent group. ''') parser.add_argument('h5file', help='HDF5 file in which the metrics are stored in the group eval for each sample') parser.add_argument('group', help='H5 path of the main group containing sample minor groups') parser.add_argument('delta_unit', help='delta_unit of the metrics to collect') args = parser.parse_args() h5f = h5py.File(args.h5file,'a') unit = args.delta_unit # Save the evaluation metric values in the samples' parent group main_group = h5f[args.group] # Check if eval group already exists in the main group if 'eval' in main_group: print "Removing existing eval group in" + main_group.name del main_group['eval'] numOfSamples = len(main_group) # Create new eval group in the main group samples = main_group.keys() eval_group = main_group.require_group('eval/'+args.delta_unit) names = ['rmse','median','mean','max'] for name in names: # Preallocate arrays t_arr = np.empty(numOfSamples) r_arr = np.empty(numOfSamples) # Store metrics in sample in an array for i, sample in enumerate(samples): t_arr[i] = main_group[sample+'/eval/'+unit+'/t_'+name][()] r_arr[i] = main_group[sample+'/eval/'+unit+'/r_'+name][()] # Check if dataset already exists in the group if 't_'+name in eval_group: print "Removing existing trans dataset in " + eval_group.name del eval_group['t_'+name] if 'r_'+name in eval_group: print "Removing existing rot dataset in " + eval_group.name del eval_group['r_'+name] # Save as a new dataset in the main group eval_group.create_dataset('t_'+name, data=t_arr) eval_group.create_dataset('r_'+name, data=r_arr)
Add script to collect metric results in a single dataset for a main group#!/usr/bin/python # -*- coding: utf-8 -*- """ Created on Thu Sep 17 09:02:31 2015 @author: jesus """ import argparse import numpy as np import h5py if __name__ == '__main__': parser = argparse.ArgumentParser(description=''' This script collects the metrics and results from several samples of an experiment into its parent group. ''') parser.add_argument('h5file', help='HDF5 file in which the metrics are stored in the group eval for each sample') parser.add_argument('group', help='H5 path of the main group containing sample minor groups') parser.add_argument('delta_unit', help='delta_unit of the metrics to collect') args = parser.parse_args() h5f = h5py.File(args.h5file,'a') unit = args.delta_unit # Save the evaluation metric values in the samples' parent group main_group = h5f[args.group] # Check if eval group already exists in the main group if 'eval' in main_group: print "Removing existing eval group in" + main_group.name del main_group['eval'] numOfSamples = len(main_group) # Create new eval group in the main group samples = main_group.keys() eval_group = main_group.require_group('eval/'+args.delta_unit) names = ['rmse','median','mean','max'] for name in names: # Preallocate arrays t_arr = np.empty(numOfSamples) r_arr = np.empty(numOfSamples) # Store metrics in sample in an array for i, sample in enumerate(samples): t_arr[i] = main_group[sample+'/eval/'+unit+'/t_'+name][()] r_arr[i] = main_group[sample+'/eval/'+unit+'/r_'+name][()] # Check if dataset already exists in the group if 't_'+name in eval_group: print "Removing existing trans dataset in " + eval_group.name del eval_group['t_'+name] if 'r_'+name in eval_group: print "Removing existing rot dataset in " + eval_group.name del eval_group['r_'+name] # Save as a new dataset in the main group eval_group.create_dataset('t_'+name, data=t_arr) eval_group.create_dataset('r_'+name, data=r_arr)
<commit_before><commit_msg>Add script to collect metric results in a single dataset for a main group<commit_after>#!/usr/bin/python # -*- coding: utf-8 -*- """ Created on Thu Sep 17 09:02:31 2015 @author: jesus """ import argparse import numpy as np import h5py if __name__ == '__main__': parser = argparse.ArgumentParser(description=''' This script collects the metrics and results from several samples of an experiment into its parent group. ''') parser.add_argument('h5file', help='HDF5 file in which the metrics are stored in the group eval for each sample') parser.add_argument('group', help='H5 path of the main group containing sample minor groups') parser.add_argument('delta_unit', help='delta_unit of the metrics to collect') args = parser.parse_args() h5f = h5py.File(args.h5file,'a') unit = args.delta_unit # Save the evaluation metric values in the samples' parent group main_group = h5f[args.group] # Check if eval group already exists in the main group if 'eval' in main_group: print "Removing existing eval group in" + main_group.name del main_group['eval'] numOfSamples = len(main_group) # Create new eval group in the main group samples = main_group.keys() eval_group = main_group.require_group('eval/'+args.delta_unit) names = ['rmse','median','mean','max'] for name in names: # Preallocate arrays t_arr = np.empty(numOfSamples) r_arr = np.empty(numOfSamples) # Store metrics in sample in an array for i, sample in enumerate(samples): t_arr[i] = main_group[sample+'/eval/'+unit+'/t_'+name][()] r_arr[i] = main_group[sample+'/eval/'+unit+'/r_'+name][()] # Check if dataset already exists in the group if 't_'+name in eval_group: print "Removing existing trans dataset in " + eval_group.name del eval_group['t_'+name] if 'r_'+name in eval_group: print "Removing existing rot dataset in " + eval_group.name del eval_group['r_'+name] # Save as a new dataset in the main group eval_group.create_dataset('t_'+name, data=t_arr) eval_group.create_dataset('r_'+name, data=r_arr)
e582644d41f3422128dfc4d45c290ac51361b4aa
pyatv/auth/hap_session.py
pyatv/auth/hap_session.py
"""Cryptograhpy routines used by HAP.""" from typing import Optional from pyatv.support.chacha20 import Chacha20Cipher class HAPSession: """Manages cryptography for a HAP session according to IP in specification. The HAP specification mandates that data is encrypted/decrypted in blocks of 1024 bytes. This class takes care of that. It is designed to be transparent until encryption is enabled, i.e. data is just passed through in case it has not yet been enabled. """ FRAME_LENGTH = 1024 # As specified by HAP, section 5.2.2 (Release R1) AUTH_TAG_LENGTH = 16 def __init__( self, ) -> None: """Initialize a new HAPSession instance.""" self.chacha20: Optional[Chacha20Cipher] = None def enable(self, output_key: bytes, input_key: bytes) -> None: """Enable encryption with specified keys.""" self.chacha20 = Chacha20Cipher(output_key, input_key) def decrypt(self, data: bytes) -> bytes: """Decrypt incoming data.""" if self.chacha20 is None: return data output = b"" while data: length = data[0:2] block_length = ( int.from_bytes(length, byteorder="little") + self.AUTH_TAG_LENGTH ) block = data[2 : 2 + block_length] output += self.chacha20.decrypt(block, aad=length) data = data[2 + block_length :] return output def encrypt(self, data: bytes) -> bytes: """Encrypt outgoing data.""" if self.chacha20 is None: return data output = b"" while data: frame = data[0 : self.FRAME_LENGTH] data = data[self.FRAME_LENGTH :] length = int.to_bytes(len(frame), 2, byteorder="little") frame = self.chacha20.encrypt(frame, aad=length) output += length + frame return output
Add HAPSession used for encryption
auth: Add HAPSession used for encryption Relates to #1255
Python
mit
postlund/pyatv,postlund/pyatv
auth: Add HAPSession used for encryption Relates to #1255
"""Cryptograhpy routines used by HAP.""" from typing import Optional from pyatv.support.chacha20 import Chacha20Cipher class HAPSession: """Manages cryptography for a HAP session according to IP in specification. The HAP specification mandates that data is encrypted/decrypted in blocks of 1024 bytes. This class takes care of that. It is designed to be transparent until encryption is enabled, i.e. data is just passed through in case it has not yet been enabled. """ FRAME_LENGTH = 1024 # As specified by HAP, section 5.2.2 (Release R1) AUTH_TAG_LENGTH = 16 def __init__( self, ) -> None: """Initialize a new HAPSession instance.""" self.chacha20: Optional[Chacha20Cipher] = None def enable(self, output_key: bytes, input_key: bytes) -> None: """Enable encryption with specified keys.""" self.chacha20 = Chacha20Cipher(output_key, input_key) def decrypt(self, data: bytes) -> bytes: """Decrypt incoming data.""" if self.chacha20 is None: return data output = b"" while data: length = data[0:2] block_length = ( int.from_bytes(length, byteorder="little") + self.AUTH_TAG_LENGTH ) block = data[2 : 2 + block_length] output += self.chacha20.decrypt(block, aad=length) data = data[2 + block_length :] return output def encrypt(self, data: bytes) -> bytes: """Encrypt outgoing data.""" if self.chacha20 is None: return data output = b"" while data: frame = data[0 : self.FRAME_LENGTH] data = data[self.FRAME_LENGTH :] length = int.to_bytes(len(frame), 2, byteorder="little") frame = self.chacha20.encrypt(frame, aad=length) output += length + frame return output
<commit_before><commit_msg>auth: Add HAPSession used for encryption Relates to #1255<commit_after>
"""Cryptograhpy routines used by HAP.""" from typing import Optional from pyatv.support.chacha20 import Chacha20Cipher class HAPSession: """Manages cryptography for a HAP session according to IP in specification. The HAP specification mandates that data is encrypted/decrypted in blocks of 1024 bytes. This class takes care of that. It is designed to be transparent until encryption is enabled, i.e. data is just passed through in case it has not yet been enabled. """ FRAME_LENGTH = 1024 # As specified by HAP, section 5.2.2 (Release R1) AUTH_TAG_LENGTH = 16 def __init__( self, ) -> None: """Initialize a new HAPSession instance.""" self.chacha20: Optional[Chacha20Cipher] = None def enable(self, output_key: bytes, input_key: bytes) -> None: """Enable encryption with specified keys.""" self.chacha20 = Chacha20Cipher(output_key, input_key) def decrypt(self, data: bytes) -> bytes: """Decrypt incoming data.""" if self.chacha20 is None: return data output = b"" while data: length = data[0:2] block_length = ( int.from_bytes(length, byteorder="little") + self.AUTH_TAG_LENGTH ) block = data[2 : 2 + block_length] output += self.chacha20.decrypt(block, aad=length) data = data[2 + block_length :] return output def encrypt(self, data: bytes) -> bytes: """Encrypt outgoing data.""" if self.chacha20 is None: return data output = b"" while data: frame = data[0 : self.FRAME_LENGTH] data = data[self.FRAME_LENGTH :] length = int.to_bytes(len(frame), 2, byteorder="little") frame = self.chacha20.encrypt(frame, aad=length) output += length + frame return output
auth: Add HAPSession used for encryption Relates to #1255"""Cryptograhpy routines used by HAP.""" from typing import Optional from pyatv.support.chacha20 import Chacha20Cipher class HAPSession: """Manages cryptography for a HAP session according to IP in specification. The HAP specification mandates that data is encrypted/decrypted in blocks of 1024 bytes. This class takes care of that. It is designed to be transparent until encryption is enabled, i.e. data is just passed through in case it has not yet been enabled. """ FRAME_LENGTH = 1024 # As specified by HAP, section 5.2.2 (Release R1) AUTH_TAG_LENGTH = 16 def __init__( self, ) -> None: """Initialize a new HAPSession instance.""" self.chacha20: Optional[Chacha20Cipher] = None def enable(self, output_key: bytes, input_key: bytes) -> None: """Enable encryption with specified keys.""" self.chacha20 = Chacha20Cipher(output_key, input_key) def decrypt(self, data: bytes) -> bytes: """Decrypt incoming data.""" if self.chacha20 is None: return data output = b"" while data: length = data[0:2] block_length = ( int.from_bytes(length, byteorder="little") + self.AUTH_TAG_LENGTH ) block = data[2 : 2 + block_length] output += self.chacha20.decrypt(block, aad=length) data = data[2 + block_length :] return output def encrypt(self, data: bytes) -> bytes: """Encrypt outgoing data.""" if self.chacha20 is None: return data output = b"" while data: frame = data[0 : self.FRAME_LENGTH] data = data[self.FRAME_LENGTH :] length = int.to_bytes(len(frame), 2, byteorder="little") frame = self.chacha20.encrypt(frame, aad=length) output += length + frame return output
<commit_before><commit_msg>auth: Add HAPSession used for encryption Relates to #1255<commit_after>"""Cryptograhpy routines used by HAP.""" from typing import Optional from pyatv.support.chacha20 import Chacha20Cipher class HAPSession: """Manages cryptography for a HAP session according to IP in specification. The HAP specification mandates that data is encrypted/decrypted in blocks of 1024 bytes. This class takes care of that. It is designed to be transparent until encryption is enabled, i.e. data is just passed through in case it has not yet been enabled. """ FRAME_LENGTH = 1024 # As specified by HAP, section 5.2.2 (Release R1) AUTH_TAG_LENGTH = 16 def __init__( self, ) -> None: """Initialize a new HAPSession instance.""" self.chacha20: Optional[Chacha20Cipher] = None def enable(self, output_key: bytes, input_key: bytes) -> None: """Enable encryption with specified keys.""" self.chacha20 = Chacha20Cipher(output_key, input_key) def decrypt(self, data: bytes) -> bytes: """Decrypt incoming data.""" if self.chacha20 is None: return data output = b"" while data: length = data[0:2] block_length = ( int.from_bytes(length, byteorder="little") + self.AUTH_TAG_LENGTH ) block = data[2 : 2 + block_length] output += self.chacha20.decrypt(block, aad=length) data = data[2 + block_length :] return output def encrypt(self, data: bytes) -> bytes: """Encrypt outgoing data.""" if self.chacha20 is None: return data output = b"" while data: frame = data[0 : self.FRAME_LENGTH] data = data[self.FRAME_LENGTH :] length = int.to_bytes(len(frame), 2, byteorder="little") frame = self.chacha20.encrypt(frame, aad=length) output += length + frame return output
555dcd9ca022f7479415a69dd5cffee46366e055
scripts/showlog.py
scripts/showlog.py
#!/usr/bin/python3 import sys import argparse from kafka import KafkaConsumer parser = argparse.ArgumentParser(description='Zoe Kafka log viewer') parser.add_argument('kafka_address', help='Address of the Kafka broker') parser.add_argument('--list-logs', action='store_true', help='List all the available service logs') parser.add_argument('--topic', help='Service name to fetch and monitor for activity') args = parser.parse_args() consumer = KafkaConsumer(bootstrap_servers=args.kafka_address) if args.list_logs: for topic in consumer.topics(): if topic[0] != '_': print(topic) sys.exit(0) consumer.subscribe(pattern=args.topic) consumer.poll(1) consumer.seek_to_beginning() try: for msg in consumer: print(msg.value.decode('utf-8')) except KeyboardInterrupt: print('showlog exiting...')
Add a script to retrieve logs from kafka
Add a script to retrieve logs from kafka
Python
apache-2.0
DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe,DistributedSystemsGroup/zoe
Add a script to retrieve logs from kafka
#!/usr/bin/python3 import sys import argparse from kafka import KafkaConsumer parser = argparse.ArgumentParser(description='Zoe Kafka log viewer') parser.add_argument('kafka_address', help='Address of the Kafka broker') parser.add_argument('--list-logs', action='store_true', help='List all the available service logs') parser.add_argument('--topic', help='Service name to fetch and monitor for activity') args = parser.parse_args() consumer = KafkaConsumer(bootstrap_servers=args.kafka_address) if args.list_logs: for topic in consumer.topics(): if topic[0] != '_': print(topic) sys.exit(0) consumer.subscribe(pattern=args.topic) consumer.poll(1) consumer.seek_to_beginning() try: for msg in consumer: print(msg.value.decode('utf-8')) except KeyboardInterrupt: print('showlog exiting...')
<commit_before><commit_msg>Add a script to retrieve logs from kafka<commit_after>
#!/usr/bin/python3 import sys import argparse from kafka import KafkaConsumer parser = argparse.ArgumentParser(description='Zoe Kafka log viewer') parser.add_argument('kafka_address', help='Address of the Kafka broker') parser.add_argument('--list-logs', action='store_true', help='List all the available service logs') parser.add_argument('--topic', help='Service name to fetch and monitor for activity') args = parser.parse_args() consumer = KafkaConsumer(bootstrap_servers=args.kafka_address) if args.list_logs: for topic in consumer.topics(): if topic[0] != '_': print(topic) sys.exit(0) consumer.subscribe(pattern=args.topic) consumer.poll(1) consumer.seek_to_beginning() try: for msg in consumer: print(msg.value.decode('utf-8')) except KeyboardInterrupt: print('showlog exiting...')
Add a script to retrieve logs from kafka#!/usr/bin/python3 import sys import argparse from kafka import KafkaConsumer parser = argparse.ArgumentParser(description='Zoe Kafka log viewer') parser.add_argument('kafka_address', help='Address of the Kafka broker') parser.add_argument('--list-logs', action='store_true', help='List all the available service logs') parser.add_argument('--topic', help='Service name to fetch and monitor for activity') args = parser.parse_args() consumer = KafkaConsumer(bootstrap_servers=args.kafka_address) if args.list_logs: for topic in consumer.topics(): if topic[0] != '_': print(topic) sys.exit(0) consumer.subscribe(pattern=args.topic) consumer.poll(1) consumer.seek_to_beginning() try: for msg in consumer: print(msg.value.decode('utf-8')) except KeyboardInterrupt: print('showlog exiting...')
<commit_before><commit_msg>Add a script to retrieve logs from kafka<commit_after>#!/usr/bin/python3 import sys import argparse from kafka import KafkaConsumer parser = argparse.ArgumentParser(description='Zoe Kafka log viewer') parser.add_argument('kafka_address', help='Address of the Kafka broker') parser.add_argument('--list-logs', action='store_true', help='List all the available service logs') parser.add_argument('--topic', help='Service name to fetch and monitor for activity') args = parser.parse_args() consumer = KafkaConsumer(bootstrap_servers=args.kafka_address) if args.list_logs: for topic in consumer.topics(): if topic[0] != '_': print(topic) sys.exit(0) consumer.subscribe(pattern=args.topic) consumer.poll(1) consumer.seek_to_beginning() try: for msg in consumer: print(msg.value.decode('utf-8')) except KeyboardInterrupt: print('showlog exiting...')
02a8745ddc0e9618f79e144a92258ff8ac3f35aa
bluebottle/utils/staticfiles_finders.py
bluebottle/utils/staticfiles_finders.py
from django.utils._os import safe_join import os from django.conf import settings from django.contrib.staticfiles.finders import FileSystemFinder from bluebottle.clients.models import Client class TenantStaticFilesFinder(FileSystemFinder): def find(self, path, all=False): """ Looks for files in the client static directories. static/assets/greatbarier/images/logo.jpg will translate to MULTITENANT_DIR/greatbarier/static/images/logo.jpg """ matches = [] tenants = Client.objects.all() tenant_dir = getattr(settings, 'MULTITENANT_DIR', None) if not tenant_dir: return matches for tenant in tenants: if "{0}/".format(tenant.client_name) in path: tenant_path = path.replace('{0}/'.format(tenant.client_name), '{0}/static/'.format(tenant.client_name)) print tenant_path local_path = safe_join(tenant_dir, tenant_path) print local_path if os.path.exists(local_path): return local_path return
Add tenant static files finder
Add tenant static files finder
Python
bsd-3-clause
jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,jfterpstra/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
Add tenant static files finder
from django.utils._os import safe_join import os from django.conf import settings from django.contrib.staticfiles.finders import FileSystemFinder from bluebottle.clients.models import Client class TenantStaticFilesFinder(FileSystemFinder): def find(self, path, all=False): """ Looks for files in the client static directories. static/assets/greatbarier/images/logo.jpg will translate to MULTITENANT_DIR/greatbarier/static/images/logo.jpg """ matches = [] tenants = Client.objects.all() tenant_dir = getattr(settings, 'MULTITENANT_DIR', None) if not tenant_dir: return matches for tenant in tenants: if "{0}/".format(tenant.client_name) in path: tenant_path = path.replace('{0}/'.format(tenant.client_name), '{0}/static/'.format(tenant.client_name)) print tenant_path local_path = safe_join(tenant_dir, tenant_path) print local_path if os.path.exists(local_path): return local_path return
<commit_before><commit_msg>Add tenant static files finder<commit_after>
from django.utils._os import safe_join import os from django.conf import settings from django.contrib.staticfiles.finders import FileSystemFinder from bluebottle.clients.models import Client class TenantStaticFilesFinder(FileSystemFinder): def find(self, path, all=False): """ Looks for files in the client static directories. static/assets/greatbarier/images/logo.jpg will translate to MULTITENANT_DIR/greatbarier/static/images/logo.jpg """ matches = [] tenants = Client.objects.all() tenant_dir = getattr(settings, 'MULTITENANT_DIR', None) if not tenant_dir: return matches for tenant in tenants: if "{0}/".format(tenant.client_name) in path: tenant_path = path.replace('{0}/'.format(tenant.client_name), '{0}/static/'.format(tenant.client_name)) print tenant_path local_path = safe_join(tenant_dir, tenant_path) print local_path if os.path.exists(local_path): return local_path return
Add tenant static files finderfrom django.utils._os import safe_join import os from django.conf import settings from django.contrib.staticfiles.finders import FileSystemFinder from bluebottle.clients.models import Client class TenantStaticFilesFinder(FileSystemFinder): def find(self, path, all=False): """ Looks for files in the client static directories. static/assets/greatbarier/images/logo.jpg will translate to MULTITENANT_DIR/greatbarier/static/images/logo.jpg """ matches = [] tenants = Client.objects.all() tenant_dir = getattr(settings, 'MULTITENANT_DIR', None) if not tenant_dir: return matches for tenant in tenants: if "{0}/".format(tenant.client_name) in path: tenant_path = path.replace('{0}/'.format(tenant.client_name), '{0}/static/'.format(tenant.client_name)) print tenant_path local_path = safe_join(tenant_dir, tenant_path) print local_path if os.path.exists(local_path): return local_path return
<commit_before><commit_msg>Add tenant static files finder<commit_after>from django.utils._os import safe_join import os from django.conf import settings from django.contrib.staticfiles.finders import FileSystemFinder from bluebottle.clients.models import Client class TenantStaticFilesFinder(FileSystemFinder): def find(self, path, all=False): """ Looks for files in the client static directories. static/assets/greatbarier/images/logo.jpg will translate to MULTITENANT_DIR/greatbarier/static/images/logo.jpg """ matches = [] tenants = Client.objects.all() tenant_dir = getattr(settings, 'MULTITENANT_DIR', None) if not tenant_dir: return matches for tenant in tenants: if "{0}/".format(tenant.client_name) in path: tenant_path = path.replace('{0}/'.format(tenant.client_name), '{0}/static/'.format(tenant.client_name)) print tenant_path local_path = safe_join(tenant_dir, tenant_path) print local_path if os.path.exists(local_path): return local_path return
dda477cb554d000f6a534848725629d20158aba7
main.py
main.py
#!/usr/bin/env python3 import sys import os.path import re import linecache import subprocess import urllib.request import zipfile if len(sys.argv) != 2: print ('''Invalid usage : Usage : ghost-update /path/to/your/ghost ''') sys.exit(1) ghostPath = sys.argv[1]; packageJson = os.path.join(ghostPath, 'package.json') if os.path.exists(ghostPath) and os.path.isdir(ghostPath): if os.path.exists(packageJson): pattern = re.compile('"name": "ghost"') line = linecache.getline(packageJson, 2) if re.search(pattern, line) != None: # It's a ghost instance # Download ghost latest archive localName, headers = urllib.request.urlretrieve('https://ghost.org/zip/ghost-latest.zip') zip = ZipFile(localName) zip.extractall('/tmp/ghost') else: print ("This is not a ghost instance") sys.exit(1) else: print ("Invalid path") sys.exit(1)
Check if it's a ghost instance and download latest ghost release
Check if it's a ghost instance and download latest ghost release
Python
mpl-2.0
MatonAnthony/ghost-update
Check if it's a ghost instance and download latest ghost release
#!/usr/bin/env python3 import sys import os.path import re import linecache import subprocess import urllib.request import zipfile if len(sys.argv) != 2: print ('''Invalid usage : Usage : ghost-update /path/to/your/ghost ''') sys.exit(1) ghostPath = sys.argv[1]; packageJson = os.path.join(ghostPath, 'package.json') if os.path.exists(ghostPath) and os.path.isdir(ghostPath): if os.path.exists(packageJson): pattern = re.compile('"name": "ghost"') line = linecache.getline(packageJson, 2) if re.search(pattern, line) != None: # It's a ghost instance # Download ghost latest archive localName, headers = urllib.request.urlretrieve('https://ghost.org/zip/ghost-latest.zip') zip = ZipFile(localName) zip.extractall('/tmp/ghost') else: print ("This is not a ghost instance") sys.exit(1) else: print ("Invalid path") sys.exit(1)
<commit_before><commit_msg>Check if it's a ghost instance and download latest ghost release<commit_after>
#!/usr/bin/env python3 import sys import os.path import re import linecache import subprocess import urllib.request import zipfile if len(sys.argv) != 2: print ('''Invalid usage : Usage : ghost-update /path/to/your/ghost ''') sys.exit(1) ghostPath = sys.argv[1]; packageJson = os.path.join(ghostPath, 'package.json') if os.path.exists(ghostPath) and os.path.isdir(ghostPath): if os.path.exists(packageJson): pattern = re.compile('"name": "ghost"') line = linecache.getline(packageJson, 2) if re.search(pattern, line) != None: # It's a ghost instance # Download ghost latest archive localName, headers = urllib.request.urlretrieve('https://ghost.org/zip/ghost-latest.zip') zip = ZipFile(localName) zip.extractall('/tmp/ghost') else: print ("This is not a ghost instance") sys.exit(1) else: print ("Invalid path") sys.exit(1)
Check if it's a ghost instance and download latest ghost release#!/usr/bin/env python3 import sys import os.path import re import linecache import subprocess import urllib.request import zipfile if len(sys.argv) != 2: print ('''Invalid usage : Usage : ghost-update /path/to/your/ghost ''') sys.exit(1) ghostPath = sys.argv[1]; packageJson = os.path.join(ghostPath, 'package.json') if os.path.exists(ghostPath) and os.path.isdir(ghostPath): if os.path.exists(packageJson): pattern = re.compile('"name": "ghost"') line = linecache.getline(packageJson, 2) if re.search(pattern, line) != None: # It's a ghost instance # Download ghost latest archive localName, headers = urllib.request.urlretrieve('https://ghost.org/zip/ghost-latest.zip') zip = ZipFile(localName) zip.extractall('/tmp/ghost') else: print ("This is not a ghost instance") sys.exit(1) else: print ("Invalid path") sys.exit(1)
<commit_before><commit_msg>Check if it's a ghost instance and download latest ghost release<commit_after>#!/usr/bin/env python3 import sys import os.path import re import linecache import subprocess import urllib.request import zipfile if len(sys.argv) != 2: print ('''Invalid usage : Usage : ghost-update /path/to/your/ghost ''') sys.exit(1) ghostPath = sys.argv[1]; packageJson = os.path.join(ghostPath, 'package.json') if os.path.exists(ghostPath) and os.path.isdir(ghostPath): if os.path.exists(packageJson): pattern = re.compile('"name": "ghost"') line = linecache.getline(packageJson, 2) if re.search(pattern, line) != None: # It's a ghost instance # Download ghost latest archive localName, headers = urllib.request.urlretrieve('https://ghost.org/zip/ghost-latest.zip') zip = ZipFile(localName) zip.extractall('/tmp/ghost') else: print ("This is not a ghost instance") sys.exit(1) else: print ("Invalid path") sys.exit(1)
b05649bb6c99195542569c7d378991eba2187d58
tsort_3.py
tsort_3.py
def merge(input_array, beg, mid, end): """Merge procedure for merge sort algorithm""" listA = input_array[beg:mid+1] listB = input_array[mid+1:end+1] current_position = beg while len(listA)>0 and len(listB)>0: if(listA[0] < listB[0]): input_array[current_position] = listA[0] listA.pop(0) else: input_array[current_position] = listB[0] listB.pop(0) current_position += 1 input_array[current_position:end+1] = listA + listB pass def msort(input_array, beg, end): """Main mergesort procedure""" # Boundary condition - subarray of size 1 is already sorted if beg==end: return mid = int((beg+end)/2) msort(input_array, beg, mid) msort(input_array, mid+1, end) merge(input_array, beg, mid, end) # stub - merge the 2 halves together def mergesort(input_array): """Wrapper for actual merge sort procedure""" array_length = len(input_array) msort(input_array, 0, array_length - 1) number_of_input_values = int(raw_input()) current_input_position = 1 number_list=[] for current_input_position in range(1, number_of_input_values+1): current_input = int(raw_input()) number_list.append(current_input) mergesort(number_list) for current_number in number_list: print current_number
Add alternate tsort solution - mergesort, with slightly modified merge procedure
Add alternate tsort solution - mergesort, with slightly modified merge procedure
Python
mit
sandy-8925/codechef
Add alternate tsort solution - mergesort, with slightly modified merge procedure
def merge(input_array, beg, mid, end): """Merge procedure for merge sort algorithm""" listA = input_array[beg:mid+1] listB = input_array[mid+1:end+1] current_position = beg while len(listA)>0 and len(listB)>0: if(listA[0] < listB[0]): input_array[current_position] = listA[0] listA.pop(0) else: input_array[current_position] = listB[0] listB.pop(0) current_position += 1 input_array[current_position:end+1] = listA + listB pass def msort(input_array, beg, end): """Main mergesort procedure""" # Boundary condition - subarray of size 1 is already sorted if beg==end: return mid = int((beg+end)/2) msort(input_array, beg, mid) msort(input_array, mid+1, end) merge(input_array, beg, mid, end) # stub - merge the 2 halves together def mergesort(input_array): """Wrapper for actual merge sort procedure""" array_length = len(input_array) msort(input_array, 0, array_length - 1) number_of_input_values = int(raw_input()) current_input_position = 1 number_list=[] for current_input_position in range(1, number_of_input_values+1): current_input = int(raw_input()) number_list.append(current_input) mergesort(number_list) for current_number in number_list: print current_number
<commit_before><commit_msg>Add alternate tsort solution - mergesort, with slightly modified merge procedure<commit_after>
def merge(input_array, beg, mid, end): """Merge procedure for merge sort algorithm""" listA = input_array[beg:mid+1] listB = input_array[mid+1:end+1] current_position = beg while len(listA)>0 and len(listB)>0: if(listA[0] < listB[0]): input_array[current_position] = listA[0] listA.pop(0) else: input_array[current_position] = listB[0] listB.pop(0) current_position += 1 input_array[current_position:end+1] = listA + listB pass def msort(input_array, beg, end): """Main mergesort procedure""" # Boundary condition - subarray of size 1 is already sorted if beg==end: return mid = int((beg+end)/2) msort(input_array, beg, mid) msort(input_array, mid+1, end) merge(input_array, beg, mid, end) # stub - merge the 2 halves together def mergesort(input_array): """Wrapper for actual merge sort procedure""" array_length = len(input_array) msort(input_array, 0, array_length - 1) number_of_input_values = int(raw_input()) current_input_position = 1 number_list=[] for current_input_position in range(1, number_of_input_values+1): current_input = int(raw_input()) number_list.append(current_input) mergesort(number_list) for current_number in number_list: print current_number
Add alternate tsort solution - mergesort, with slightly modified merge proceduredef merge(input_array, beg, mid, end): """Merge procedure for merge sort algorithm""" listA = input_array[beg:mid+1] listB = input_array[mid+1:end+1] current_position = beg while len(listA)>0 and len(listB)>0: if(listA[0] < listB[0]): input_array[current_position] = listA[0] listA.pop(0) else: input_array[current_position] = listB[0] listB.pop(0) current_position += 1 input_array[current_position:end+1] = listA + listB pass def msort(input_array, beg, end): """Main mergesort procedure""" # Boundary condition - subarray of size 1 is already sorted if beg==end: return mid = int((beg+end)/2) msort(input_array, beg, mid) msort(input_array, mid+1, end) merge(input_array, beg, mid, end) # stub - merge the 2 halves together def mergesort(input_array): """Wrapper for actual merge sort procedure""" array_length = len(input_array) msort(input_array, 0, array_length - 1) number_of_input_values = int(raw_input()) current_input_position = 1 number_list=[] for current_input_position in range(1, number_of_input_values+1): current_input = int(raw_input()) number_list.append(current_input) mergesort(number_list) for current_number in number_list: print current_number
<commit_before><commit_msg>Add alternate tsort solution - mergesort, with slightly modified merge procedure<commit_after>def merge(input_array, beg, mid, end): """Merge procedure for merge sort algorithm""" listA = input_array[beg:mid+1] listB = input_array[mid+1:end+1] current_position = beg while len(listA)>0 and len(listB)>0: if(listA[0] < listB[0]): input_array[current_position] = listA[0] listA.pop(0) else: input_array[current_position] = listB[0] listB.pop(0) current_position += 1 input_array[current_position:end+1] = listA + listB pass def msort(input_array, beg, end): """Main mergesort procedure""" # Boundary condition - subarray of size 1 is already sorted if beg==end: return mid = int((beg+end)/2) msort(input_array, beg, mid) msort(input_array, mid+1, end) merge(input_array, beg, mid, end) # stub - merge the 2 halves together def mergesort(input_array): """Wrapper for actual merge sort procedure""" array_length = len(input_array) msort(input_array, 0, array_length - 1) number_of_input_values = int(raw_input()) current_input_position = 1 number_list=[] for current_input_position in range(1, number_of_input_values+1): current_input = int(raw_input()) number_list.append(current_input) mergesort(number_list) for current_number in number_list: print current_number
1ebed09254aa5601956af9038a3f6cb8591913c8
buildlet/task/cachedtask.py
buildlet/task/cachedtask.py
""" Tasks cached on data store. """ from .base import BaseTask class BaseCachedTask(BaseTask): datastore = None """ Data store instance. Child class **must** set this attribute. """ def is_finished(self): current = self.get_taskhash() cached = self.get_cached_taskhash() return current is not None and \ cached is not None and \ current == cached def get_taskvalue(self): """ Return a hash-able object which can identify this task. Note that this value should not depend on the result of the :meth:`run` function. However, this value should provide enough information to get the same result. """ raise NotImplementedError def get_taskhash(self): if not self.is_parent_cacheable(): return None parent_hashes = self.get_parent_hashes() if any(h is None for h in parent_hashes): return None taskvalue = self.get_taskvalue() # TODO: This relies on that `hash` returns the same value for every # run. Does it hold always? Find out! return hash((taskvalue, parent_hashes)) def get_parent_hashes(self): return map(BaseCachedTask.get_cached_taskhash, self.get_parents()) def is_parent_cacheable(self): return all(isinstance(p, BaseCachedTask) for p in self.get_parents()) def get_taskhashstore(self): return self.datastore.get_metastore().get_filestore('taskhash') def get_cached_taskhash(self): store = self.get_taskhashstore() if not store.exists(): return None with store.open() as f: return int(f.read()) def set_cached_taskhash(self): taskhash = self.get_taskhash() with self.get_taskhashstore().open('w') as f: f.write(str(taskhash))
Add conceptual implementation of BaseCachedTask
Add conceptual implementation of BaseCachedTask
Python
bsd-3-clause
tkf/buildlet
Add conceptual implementation of BaseCachedTask
""" Tasks cached on data store. """ from .base import BaseTask class BaseCachedTask(BaseTask): datastore = None """ Data store instance. Child class **must** set this attribute. """ def is_finished(self): current = self.get_taskhash() cached = self.get_cached_taskhash() return current is not None and \ cached is not None and \ current == cached def get_taskvalue(self): """ Return a hash-able object which can identify this task. Note that this value should not depend on the result of the :meth:`run` function. However, this value should provide enough information to get the same result. """ raise NotImplementedError def get_taskhash(self): if not self.is_parent_cacheable(): return None parent_hashes = self.get_parent_hashes() if any(h is None for h in parent_hashes): return None taskvalue = self.get_taskvalue() # TODO: This relies on that `hash` returns the same value for every # run. Does it hold always? Find out! return hash((taskvalue, parent_hashes)) def get_parent_hashes(self): return map(BaseCachedTask.get_cached_taskhash, self.get_parents()) def is_parent_cacheable(self): return all(isinstance(p, BaseCachedTask) for p in self.get_parents()) def get_taskhashstore(self): return self.datastore.get_metastore().get_filestore('taskhash') def get_cached_taskhash(self): store = self.get_taskhashstore() if not store.exists(): return None with store.open() as f: return int(f.read()) def set_cached_taskhash(self): taskhash = self.get_taskhash() with self.get_taskhashstore().open('w') as f: f.write(str(taskhash))
<commit_before><commit_msg>Add conceptual implementation of BaseCachedTask<commit_after>
""" Tasks cached on data store. """ from .base import BaseTask class BaseCachedTask(BaseTask): datastore = None """ Data store instance. Child class **must** set this attribute. """ def is_finished(self): current = self.get_taskhash() cached = self.get_cached_taskhash() return current is not None and \ cached is not None and \ current == cached def get_taskvalue(self): """ Return a hash-able object which can identify this task. Note that this value should not depend on the result of the :meth:`run` function. However, this value should provide enough information to get the same result. """ raise NotImplementedError def get_taskhash(self): if not self.is_parent_cacheable(): return None parent_hashes = self.get_parent_hashes() if any(h is None for h in parent_hashes): return None taskvalue = self.get_taskvalue() # TODO: This relies on that `hash` returns the same value for every # run. Does it hold always? Find out! return hash((taskvalue, parent_hashes)) def get_parent_hashes(self): return map(BaseCachedTask.get_cached_taskhash, self.get_parents()) def is_parent_cacheable(self): return all(isinstance(p, BaseCachedTask) for p in self.get_parents()) def get_taskhashstore(self): return self.datastore.get_metastore().get_filestore('taskhash') def get_cached_taskhash(self): store = self.get_taskhashstore() if not store.exists(): return None with store.open() as f: return int(f.read()) def set_cached_taskhash(self): taskhash = self.get_taskhash() with self.get_taskhashstore().open('w') as f: f.write(str(taskhash))
Add conceptual implementation of BaseCachedTask""" Tasks cached on data store. """ from .base import BaseTask class BaseCachedTask(BaseTask): datastore = None """ Data store instance. Child class **must** set this attribute. """ def is_finished(self): current = self.get_taskhash() cached = self.get_cached_taskhash() return current is not None and \ cached is not None and \ current == cached def get_taskvalue(self): """ Return a hash-able object which can identify this task. Note that this value should not depend on the result of the :meth:`run` function. However, this value should provide enough information to get the same result. """ raise NotImplementedError def get_taskhash(self): if not self.is_parent_cacheable(): return None parent_hashes = self.get_parent_hashes() if any(h is None for h in parent_hashes): return None taskvalue = self.get_taskvalue() # TODO: This relies on that `hash` returns the same value for every # run. Does it hold always? Find out! return hash((taskvalue, parent_hashes)) def get_parent_hashes(self): return map(BaseCachedTask.get_cached_taskhash, self.get_parents()) def is_parent_cacheable(self): return all(isinstance(p, BaseCachedTask) for p in self.get_parents()) def get_taskhashstore(self): return self.datastore.get_metastore().get_filestore('taskhash') def get_cached_taskhash(self): store = self.get_taskhashstore() if not store.exists(): return None with store.open() as f: return int(f.read()) def set_cached_taskhash(self): taskhash = self.get_taskhash() with self.get_taskhashstore().open('w') as f: f.write(str(taskhash))
<commit_before><commit_msg>Add conceptual implementation of BaseCachedTask<commit_after>""" Tasks cached on data store. """ from .base import BaseTask class BaseCachedTask(BaseTask): datastore = None """ Data store instance. Child class **must** set this attribute. """ def is_finished(self): current = self.get_taskhash() cached = self.get_cached_taskhash() return current is not None and \ cached is not None and \ current == cached def get_taskvalue(self): """ Return a hash-able object which can identify this task. Note that this value should not depend on the result of the :meth:`run` function. However, this value should provide enough information to get the same result. """ raise NotImplementedError def get_taskhash(self): if not self.is_parent_cacheable(): return None parent_hashes = self.get_parent_hashes() if any(h is None for h in parent_hashes): return None taskvalue = self.get_taskvalue() # TODO: This relies on that `hash` returns the same value for every # run. Does it hold always? Find out! return hash((taskvalue, parent_hashes)) def get_parent_hashes(self): return map(BaseCachedTask.get_cached_taskhash, self.get_parents()) def is_parent_cacheable(self): return all(isinstance(p, BaseCachedTask) for p in self.get_parents()) def get_taskhashstore(self): return self.datastore.get_metastore().get_filestore('taskhash') def get_cached_taskhash(self): store = self.get_taskhashstore() if not store.exists(): return None with store.open() as f: return int(f.read()) def set_cached_taskhash(self): taskhash = self.get_taskhash() with self.get_taskhashstore().open('w') as f: f.write(str(taskhash))
9c0a4529c15cdc5f32b1d4718995092dfb9cb263
migrations/versions/30_rm_unique_constraint.py
migrations/versions/30_rm_unique_constraint.py
from alembic import op revision = '30_rm_unique_constraint' down_revision = '20_initialise_data' def upgrade(): op.drop_constraint("users_name_key", "users") def downgrade(): op.create_unique_constraint("users_name_key", "users", ["name"])
Remove unique constraint on users.name
Remove unique constraint on users.name
Python
mit
alphagov/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,alphagov/notifications-admin,gov-cjwaszczuk/notifications-admin,gov-cjwaszczuk/notifications-admin
Remove unique constraint on users.name
from alembic import op revision = '30_rm_unique_constraint' down_revision = '20_initialise_data' def upgrade(): op.drop_constraint("users_name_key", "users") def downgrade(): op.create_unique_constraint("users_name_key", "users", ["name"])
<commit_before><commit_msg>Remove unique constraint on users.name<commit_after>
from alembic import op revision = '30_rm_unique_constraint' down_revision = '20_initialise_data' def upgrade(): op.drop_constraint("users_name_key", "users") def downgrade(): op.create_unique_constraint("users_name_key", "users", ["name"])
Remove unique constraint on users.namefrom alembic import op revision = '30_rm_unique_constraint' down_revision = '20_initialise_data' def upgrade(): op.drop_constraint("users_name_key", "users") def downgrade(): op.create_unique_constraint("users_name_key", "users", ["name"])
<commit_before><commit_msg>Remove unique constraint on users.name<commit_after>from alembic import op revision = '30_rm_unique_constraint' down_revision = '20_initialise_data' def upgrade(): op.drop_constraint("users_name_key", "users") def downgrade(): op.create_unique_constraint("users_name_key", "users", ["name"])
33e23230315f7a922e50264948c42b2c68116cc2
numba/tests/issues/test_potential_gcc_error.py
numba/tests/issues/test_potential_gcc_error.py
# This tests a potential GCC 4.1.2 miscompile of LLVM. # The problem is observed as a error in greedy register allocation pass, # which resulted as a segfault. # No such problem in GCC 4.4.6. from numba import * import numpy as np @jit(uint8[:,:](f8, f8, f8, f8, uint8[:,:], int32)) def create_fractal(min_x, max_x, min_y, max_y, image, iters): return image
Add tests for the possibly gcc miscompile error of LLVM in the mandel example.
Add tests for the possibly gcc miscompile error of LLVM in the mandel example.
Python
bsd-2-clause
numba/numba,gmarkall/numba,pombredanne/numba,pitrou/numba,pitrou/numba,IntelLabs/numba,numba/numba,ssarangi/numba,gmarkall/numba,stuartarchibald/numba,jriehl/numba,stefanseefeld/numba,numba/numba,pitrou/numba,GaZ3ll3/numba,stonebig/numba,pitrou/numba,shiquanwang/numba,stefanseefeld/numba,stuartarchibald/numba,stonebig/numba,GaZ3ll3/numba,GaZ3ll3/numba,ssarangi/numba,stefanseefeld/numba,stonebig/numba,shiquanwang/numba,pitrou/numba,jriehl/numba,numba/numba,ssarangi/numba,gdementen/numba,gdementen/numba,numba/numba,stonebig/numba,sklam/numba,sklam/numba,stefanseefeld/numba,ssarangi/numba,IntelLabs/numba,GaZ3ll3/numba,pombredanne/numba,stuartarchibald/numba,jriehl/numba,stonebig/numba,gmarkall/numba,seibert/numba,cpcloud/numba,sklam/numba,stefanseefeld/numba,gdementen/numba,pombredanne/numba,stuartarchibald/numba,seibert/numba,jriehl/numba,seibert/numba,cpcloud/numba,IntelLabs/numba,ssarangi/numba,stuartarchibald/numba,cpcloud/numba,cpcloud/numba,gmarkall/numba,cpcloud/numba,sklam/numba,IntelLabs/numba,pombredanne/numba,gdementen/numba,seibert/numba,pombredanne/numba,IntelLabs/numba,GaZ3ll3/numba,seibert/numba,sklam/numba,jriehl/numba,gmarkall/numba,shiquanwang/numba,gdementen/numba
Add tests for the possibly gcc miscompile error of LLVM in the mandel example.
# This tests a potential GCC 4.1.2 miscompile of LLVM. # The problem is observed as a error in greedy register allocation pass, # which resulted as a segfault. # No such problem in GCC 4.4.6. from numba import * import numpy as np @jit(uint8[:,:](f8, f8, f8, f8, uint8[:,:], int32)) def create_fractal(min_x, max_x, min_y, max_y, image, iters): return image
<commit_before><commit_msg>Add tests for the possibly gcc miscompile error of LLVM in the mandel example.<commit_after>
# This tests a potential GCC 4.1.2 miscompile of LLVM. # The problem is observed as a error in greedy register allocation pass, # which resulted as a segfault. # No such problem in GCC 4.4.6. from numba import * import numpy as np @jit(uint8[:,:](f8, f8, f8, f8, uint8[:,:], int32)) def create_fractal(min_x, max_x, min_y, max_y, image, iters): return image
Add tests for the possibly gcc miscompile error of LLVM in the mandel example.# This tests a potential GCC 4.1.2 miscompile of LLVM. # The problem is observed as a error in greedy register allocation pass, # which resulted as a segfault. # No such problem in GCC 4.4.6. from numba import * import numpy as np @jit(uint8[:,:](f8, f8, f8, f8, uint8[:,:], int32)) def create_fractal(min_x, max_x, min_y, max_y, image, iters): return image
<commit_before><commit_msg>Add tests for the possibly gcc miscompile error of LLVM in the mandel example.<commit_after># This tests a potential GCC 4.1.2 miscompile of LLVM. # The problem is observed as a error in greedy register allocation pass, # which resulted as a segfault. # No such problem in GCC 4.4.6. from numba import * import numpy as np @jit(uint8[:,:](f8, f8, f8, f8, uint8[:,:], int32)) def create_fractal(min_x, max_x, min_y, max_y, image, iters): return image
cc003f9bf3dfe2d8f4eba8f123fb23be1d9a393c
tests/GrammarCopyTest.py
tests/GrammarCopyTest.py
#!/usr/bin/env python """ :Author Patrik Valkovic :Created 16.08.2017 19:16 :Licence GNUv3 Part of grammpy """ from unittest import main, TestCase from grammpy import * class GrammarCopyTest(TestCase): pass if __name__ == '__main__': main()
Add file for tests of grammar copy
Add file for tests of grammar copy
Python
mit
PatrikValkovic/grammpy
Add file for tests of grammar copy
#!/usr/bin/env python """ :Author Patrik Valkovic :Created 16.08.2017 19:16 :Licence GNUv3 Part of grammpy """ from unittest import main, TestCase from grammpy import * class GrammarCopyTest(TestCase): pass if __name__ == '__main__': main()
<commit_before><commit_msg>Add file for tests of grammar copy<commit_after>
#!/usr/bin/env python """ :Author Patrik Valkovic :Created 16.08.2017 19:16 :Licence GNUv3 Part of grammpy """ from unittest import main, TestCase from grammpy import * class GrammarCopyTest(TestCase): pass if __name__ == '__main__': main()
Add file for tests of grammar copy#!/usr/bin/env python """ :Author Patrik Valkovic :Created 16.08.2017 19:16 :Licence GNUv3 Part of grammpy """ from unittest import main, TestCase from grammpy import * class GrammarCopyTest(TestCase): pass if __name__ == '__main__': main()
<commit_before><commit_msg>Add file for tests of grammar copy<commit_after>#!/usr/bin/env python """ :Author Patrik Valkovic :Created 16.08.2017 19:16 :Licence GNUv3 Part of grammpy """ from unittest import main, TestCase from grammpy import * class GrammarCopyTest(TestCase): pass if __name__ == '__main__': main()
6d67213b80350fe63e46ea2a18688f4a5a3f0d81
spacy/tests/regression/test_issue850.py
spacy/tests/regression/test_issue850.py
''' Test Matcher matches with '*' operator and Boolean flag ''' from __future__ import unicode_literals import pytest from ...matcher import Matcher from ...vocab import Vocab from ...attrs import LOWER from ...tokens import Doc @pytest.mark.xfail def test_issue850(): matcher = Matcher(Vocab()) IS_ANY_TOKEN = matcher.vocab.add_flag(lambda x: True) matcher.add_pattern( "FarAway", [ {LOWER: "bob"}, {'OP': '*', IS_ANY_TOKEN: True}, {LOWER: 'frank'} ]) doc = Doc(matcher.vocab, words=['bob', 'and', 'and', 'cat', 'frank']) match = matcher(doc) assert len(match) == 1 start, end, label, ent_id = match assert start == 0 assert end == 4
Add test for 850: Matcher fails on zero-or-more.
Add test for 850: Matcher fails on zero-or-more.
Python
mit
Gregory-Howard/spaCy,honnibal/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,recognai/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,explosion/spaCy,raphael0202/spaCy,aikramer2/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,spacy-io/spaCy,spacy-io/spaCy,spacy-io/spaCy,honnibal/spaCy,recognai/spaCy,aikramer2/spaCy,explosion/spaCy,raphael0202/spaCy,aikramer2/spaCy,Gregory-Howard/spaCy,raphael0202/spaCy,explosion/spaCy,spacy-io/spaCy,spacy-io/spaCy,raphael0202/spaCy,explosion/spaCy,explosion/spaCy,oroszgy/spaCy.hu,honnibal/spaCy,Gregory-Howard/spaCy,oroszgy/spaCy.hu,recognai/spaCy,oroszgy/spaCy.hu,recognai/spaCy,raphael0202/spaCy,oroszgy/spaCy.hu,raphael0202/spaCy,recognai/spaCy,spacy-io/spaCy,aikramer2/spaCy,honnibal/spaCy,recognai/spaCy,explosion/spaCy
Add test for 850: Matcher fails on zero-or-more.
''' Test Matcher matches with '*' operator and Boolean flag ''' from __future__ import unicode_literals import pytest from ...matcher import Matcher from ...vocab import Vocab from ...attrs import LOWER from ...tokens import Doc @pytest.mark.xfail def test_issue850(): matcher = Matcher(Vocab()) IS_ANY_TOKEN = matcher.vocab.add_flag(lambda x: True) matcher.add_pattern( "FarAway", [ {LOWER: "bob"}, {'OP': '*', IS_ANY_TOKEN: True}, {LOWER: 'frank'} ]) doc = Doc(matcher.vocab, words=['bob', 'and', 'and', 'cat', 'frank']) match = matcher(doc) assert len(match) == 1 start, end, label, ent_id = match assert start == 0 assert end == 4
<commit_before><commit_msg>Add test for 850: Matcher fails on zero-or-more.<commit_after>
''' Test Matcher matches with '*' operator and Boolean flag ''' from __future__ import unicode_literals import pytest from ...matcher import Matcher from ...vocab import Vocab from ...attrs import LOWER from ...tokens import Doc @pytest.mark.xfail def test_issue850(): matcher = Matcher(Vocab()) IS_ANY_TOKEN = matcher.vocab.add_flag(lambda x: True) matcher.add_pattern( "FarAway", [ {LOWER: "bob"}, {'OP': '*', IS_ANY_TOKEN: True}, {LOWER: 'frank'} ]) doc = Doc(matcher.vocab, words=['bob', 'and', 'and', 'cat', 'frank']) match = matcher(doc) assert len(match) == 1 start, end, label, ent_id = match assert start == 0 assert end == 4
Add test for 850: Matcher fails on zero-or-more.''' Test Matcher matches with '*' operator and Boolean flag ''' from __future__ import unicode_literals import pytest from ...matcher import Matcher from ...vocab import Vocab from ...attrs import LOWER from ...tokens import Doc @pytest.mark.xfail def test_issue850(): matcher = Matcher(Vocab()) IS_ANY_TOKEN = matcher.vocab.add_flag(lambda x: True) matcher.add_pattern( "FarAway", [ {LOWER: "bob"}, {'OP': '*', IS_ANY_TOKEN: True}, {LOWER: 'frank'} ]) doc = Doc(matcher.vocab, words=['bob', 'and', 'and', 'cat', 'frank']) match = matcher(doc) assert len(match) == 1 start, end, label, ent_id = match assert start == 0 assert end == 4
<commit_before><commit_msg>Add test for 850: Matcher fails on zero-or-more.<commit_after>''' Test Matcher matches with '*' operator and Boolean flag ''' from __future__ import unicode_literals import pytest from ...matcher import Matcher from ...vocab import Vocab from ...attrs import LOWER from ...tokens import Doc @pytest.mark.xfail def test_issue850(): matcher = Matcher(Vocab()) IS_ANY_TOKEN = matcher.vocab.add_flag(lambda x: True) matcher.add_pattern( "FarAway", [ {LOWER: "bob"}, {'OP': '*', IS_ANY_TOKEN: True}, {LOWER: 'frank'} ]) doc = Doc(matcher.vocab, words=['bob', 'and', 'and', 'cat', 'frank']) match = matcher(doc) assert len(match) == 1 start, end, label, ent_id = match assert start == 0 assert end == 4
25f1e9d2b28c98dd8ccfeaab73179fc003deb6f5
tools/csmith-gen-many.py
tools/csmith-gen-many.py
#!/usr/bin/python # Copyright (c) 2013 The Native Client Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # # This code uses csmith to generate a large amount of .c files, change the entry # point of each from main to entry_N and then create a new main.c file that # would be linked to all of them. csmith needs to be built in CSMITH_HOME. # This is used to generate large executables from csmith. # from __future__ import print_function import os, fileinput, re, sys CSMITH_HOME = os.environ['CSMITH_HOME'] CSMITH_EXE = CSMITH_HOME + '/src/csmith' DEFAULT_N = 100 def shellrun(s): print(s) os.system(s) def main(args): if len(args) > 0: N = int(args[0]) else: N = DEFAULT_N n = 0 while n < N: cobjname = '/tmp/entry_obj_{}.c'.format(n) entryname = 'entry_{}'.format(n) shellrun(CSMITH_EXE + ' --no-argc --max-funcs 40 --no-volatiles' + ' --no-structs > ' + cobjname) # Retry if the file is too small if os.path.getsize(cobjname) < 400 * 1000: print('Redoing...') else: for line in fileinput.input(cobjname, inplace=True): fixed_line = re.sub('int main', 'int ' + entryname, line) sys.stdout.write(fixed_line) n += 1 print('Creating main file') with open('/tmp/main.c', 'w') as of: for n in range(N): of.write('int entry_{}(void);\n'.format(n)) of.write('int main(void) {\n') for n in range(N): of.write(' entry_{}();\n'.format(n)) of.write(' return 0;\n}\n') if __name__ == '__main__': main(sys.argv[1:])
Add a script for driving csmith to generate large compilation inputs.
Add a script for driving csmith to generate large compilation inputs. BUG=None R=dschuff@chromium.org Review URL: https://codereview.chromium.org/91043004 git-svn-id: 721b910a23eff8a86f00c8fd261a7587cddf18f8@12466 fcba33aa-ac0c-11dd-b9e7-8d5594d729c2
Python
bsd-3-clause
sbc100/native_client,sbc100/native_client,sbc100/native_client,sbc100/native_client,sbc100/native_client,sbc100/native_client
Add a script for driving csmith to generate large compilation inputs. BUG=None R=dschuff@chromium.org Review URL: https://codereview.chromium.org/91043004 git-svn-id: 721b910a23eff8a86f00c8fd261a7587cddf18f8@12466 fcba33aa-ac0c-11dd-b9e7-8d5594d729c2
#!/usr/bin/python # Copyright (c) 2013 The Native Client Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # # This code uses csmith to generate a large amount of .c files, change the entry # point of each from main to entry_N and then create a new main.c file that # would be linked to all of them. csmith needs to be built in CSMITH_HOME. # This is used to generate large executables from csmith. # from __future__ import print_function import os, fileinput, re, sys CSMITH_HOME = os.environ['CSMITH_HOME'] CSMITH_EXE = CSMITH_HOME + '/src/csmith' DEFAULT_N = 100 def shellrun(s): print(s) os.system(s) def main(args): if len(args) > 0: N = int(args[0]) else: N = DEFAULT_N n = 0 while n < N: cobjname = '/tmp/entry_obj_{}.c'.format(n) entryname = 'entry_{}'.format(n) shellrun(CSMITH_EXE + ' --no-argc --max-funcs 40 --no-volatiles' + ' --no-structs > ' + cobjname) # Retry if the file is too small if os.path.getsize(cobjname) < 400 * 1000: print('Redoing...') else: for line in fileinput.input(cobjname, inplace=True): fixed_line = re.sub('int main', 'int ' + entryname, line) sys.stdout.write(fixed_line) n += 1 print('Creating main file') with open('/tmp/main.c', 'w') as of: for n in range(N): of.write('int entry_{}(void);\n'.format(n)) of.write('int main(void) {\n') for n in range(N): of.write(' entry_{}();\n'.format(n)) of.write(' return 0;\n}\n') if __name__ == '__main__': main(sys.argv[1:])
<commit_before><commit_msg>Add a script for driving csmith to generate large compilation inputs. BUG=None R=dschuff@chromium.org Review URL: https://codereview.chromium.org/91043004 git-svn-id: 721b910a23eff8a86f00c8fd261a7587cddf18f8@12466 fcba33aa-ac0c-11dd-b9e7-8d5594d729c2<commit_after>
#!/usr/bin/python # Copyright (c) 2013 The Native Client Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # # This code uses csmith to generate a large amount of .c files, change the entry # point of each from main to entry_N and then create a new main.c file that # would be linked to all of them. csmith needs to be built in CSMITH_HOME. # This is used to generate large executables from csmith. # from __future__ import print_function import os, fileinput, re, sys CSMITH_HOME = os.environ['CSMITH_HOME'] CSMITH_EXE = CSMITH_HOME + '/src/csmith' DEFAULT_N = 100 def shellrun(s): print(s) os.system(s) def main(args): if len(args) > 0: N = int(args[0]) else: N = DEFAULT_N n = 0 while n < N: cobjname = '/tmp/entry_obj_{}.c'.format(n) entryname = 'entry_{}'.format(n) shellrun(CSMITH_EXE + ' --no-argc --max-funcs 40 --no-volatiles' + ' --no-structs > ' + cobjname) # Retry if the file is too small if os.path.getsize(cobjname) < 400 * 1000: print('Redoing...') else: for line in fileinput.input(cobjname, inplace=True): fixed_line = re.sub('int main', 'int ' + entryname, line) sys.stdout.write(fixed_line) n += 1 print('Creating main file') with open('/tmp/main.c', 'w') as of: for n in range(N): of.write('int entry_{}(void);\n'.format(n)) of.write('int main(void) {\n') for n in range(N): of.write(' entry_{}();\n'.format(n)) of.write(' return 0;\n}\n') if __name__ == '__main__': main(sys.argv[1:])
Add a script for driving csmith to generate large compilation inputs. BUG=None R=dschuff@chromium.org Review URL: https://codereview.chromium.org/91043004 git-svn-id: 721b910a23eff8a86f00c8fd261a7587cddf18f8@12466 fcba33aa-ac0c-11dd-b9e7-8d5594d729c2#!/usr/bin/python # Copyright (c) 2013 The Native Client Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # # This code uses csmith to generate a large amount of .c files, change the entry # point of each from main to entry_N and then create a new main.c file that # would be linked to all of them. csmith needs to be built in CSMITH_HOME. # This is used to generate large executables from csmith. # from __future__ import print_function import os, fileinput, re, sys CSMITH_HOME = os.environ['CSMITH_HOME'] CSMITH_EXE = CSMITH_HOME + '/src/csmith' DEFAULT_N = 100 def shellrun(s): print(s) os.system(s) def main(args): if len(args) > 0: N = int(args[0]) else: N = DEFAULT_N n = 0 while n < N: cobjname = '/tmp/entry_obj_{}.c'.format(n) entryname = 'entry_{}'.format(n) shellrun(CSMITH_EXE + ' --no-argc --max-funcs 40 --no-volatiles' + ' --no-structs > ' + cobjname) # Retry if the file is too small if os.path.getsize(cobjname) < 400 * 1000: print('Redoing...') else: for line in fileinput.input(cobjname, inplace=True): fixed_line = re.sub('int main', 'int ' + entryname, line) sys.stdout.write(fixed_line) n += 1 print('Creating main file') with open('/tmp/main.c', 'w') as of: for n in range(N): of.write('int entry_{}(void);\n'.format(n)) of.write('int main(void) {\n') for n in range(N): of.write(' entry_{}();\n'.format(n)) of.write(' return 0;\n}\n') if __name__ == '__main__': main(sys.argv[1:])
<commit_before><commit_msg>Add a script for driving csmith to generate large compilation inputs. BUG=None R=dschuff@chromium.org Review URL: https://codereview.chromium.org/91043004 git-svn-id: 721b910a23eff8a86f00c8fd261a7587cddf18f8@12466 fcba33aa-ac0c-11dd-b9e7-8d5594d729c2<commit_after>#!/usr/bin/python # Copyright (c) 2013 The Native Client Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. # # This code uses csmith to generate a large amount of .c files, change the entry # point of each from main to entry_N and then create a new main.c file that # would be linked to all of them. csmith needs to be built in CSMITH_HOME. # This is used to generate large executables from csmith. # from __future__ import print_function import os, fileinput, re, sys CSMITH_HOME = os.environ['CSMITH_HOME'] CSMITH_EXE = CSMITH_HOME + '/src/csmith' DEFAULT_N = 100 def shellrun(s): print(s) os.system(s) def main(args): if len(args) > 0: N = int(args[0]) else: N = DEFAULT_N n = 0 while n < N: cobjname = '/tmp/entry_obj_{}.c'.format(n) entryname = 'entry_{}'.format(n) shellrun(CSMITH_EXE + ' --no-argc --max-funcs 40 --no-volatiles' + ' --no-structs > ' + cobjname) # Retry if the file is too small if os.path.getsize(cobjname) < 400 * 1000: print('Redoing...') else: for line in fileinput.input(cobjname, inplace=True): fixed_line = re.sub('int main', 'int ' + entryname, line) sys.stdout.write(fixed_line) n += 1 print('Creating main file') with open('/tmp/main.c', 'w') as of: for n in range(N): of.write('int entry_{}(void);\n'.format(n)) of.write('int main(void) {\n') for n in range(N): of.write(' entry_{}();\n'.format(n)) of.write(' return 0;\n}\n') if __name__ == '__main__': main(sys.argv[1:])
09a51971d1350fcdaa0d863c3b15d142302a7516
scripts/request_capabilities.py
scripts/request_capabilities.py
from bluebottle.clients.models import Client from bluebottle.clients.utils import LocalTenant from bluebottle.funding_stripe.utils import stripe from bluebottle.funding_stripe.models import StripePayoutAccount def run(*args): for client in Client.objects.all(): with LocalTenant(client): for account in StripePayoutAccount.objects.filter( account_id__isnull=False ): try: stripe.Account.modify( account.account_id, requested_capabilities=["legacy_payments", "transfers"], ) print account.account.capabilities except Exception, e: print e
Add script that requests new capabilities
Add script that requests new capabilities
Python
bsd-3-clause
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
Add script that requests new capabilities
from bluebottle.clients.models import Client from bluebottle.clients.utils import LocalTenant from bluebottle.funding_stripe.utils import stripe from bluebottle.funding_stripe.models import StripePayoutAccount def run(*args): for client in Client.objects.all(): with LocalTenant(client): for account in StripePayoutAccount.objects.filter( account_id__isnull=False ): try: stripe.Account.modify( account.account_id, requested_capabilities=["legacy_payments", "transfers"], ) print account.account.capabilities except Exception, e: print e
<commit_before><commit_msg>Add script that requests new capabilities<commit_after>
from bluebottle.clients.models import Client from bluebottle.clients.utils import LocalTenant from bluebottle.funding_stripe.utils import stripe from bluebottle.funding_stripe.models import StripePayoutAccount def run(*args): for client in Client.objects.all(): with LocalTenant(client): for account in StripePayoutAccount.objects.filter( account_id__isnull=False ): try: stripe.Account.modify( account.account_id, requested_capabilities=["legacy_payments", "transfers"], ) print account.account.capabilities except Exception, e: print e
Add script that requests new capabilitiesfrom bluebottle.clients.models import Client from bluebottle.clients.utils import LocalTenant from bluebottle.funding_stripe.utils import stripe from bluebottle.funding_stripe.models import StripePayoutAccount def run(*args): for client in Client.objects.all(): with LocalTenant(client): for account in StripePayoutAccount.objects.filter( account_id__isnull=False ): try: stripe.Account.modify( account.account_id, requested_capabilities=["legacy_payments", "transfers"], ) print account.account.capabilities except Exception, e: print e
<commit_before><commit_msg>Add script that requests new capabilities<commit_after>from bluebottle.clients.models import Client from bluebottle.clients.utils import LocalTenant from bluebottle.funding_stripe.utils import stripe from bluebottle.funding_stripe.models import StripePayoutAccount def run(*args): for client in Client.objects.all(): with LocalTenant(client): for account in StripePayoutAccount.objects.filter( account_id__isnull=False ): try: stripe.Account.modify( account.account_id, requested_capabilities=["legacy_payments", "transfers"], ) print account.account.capabilities except Exception, e: print e
4719167b6ee1c6dd276218c84b15b9cba4fefc2e
config/pox_sync.py
config/pox_sync.py
from config.experiment_config_lib import ControllerConfig from sts.control_flow.fuzzer import Fuzzer from sts.input_traces.input_logger import InputLogger from sts.simulation_state import SimulationConfig from sts.invariant_checker import InvariantChecker from sts.topology import MeshTopology, BufferedPatchPanel from sts.happensbefore.hb_logger import HappensBeforeLogger from sts.control_flow.interactive import Interactive # Use POX as our controller start_cmd = ('''./pox.py --verbose --no-cli sts.syncproto.pox_syncer --blocking=False ''' '''forwarding.l2_learning ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=/tmp/sts_socket_pipe''') controllers = [ControllerConfig(start_cmd, address="/tmp/sts_socket_pipe", cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=2" results_dir = "experiments/pox_sync" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, multiplex_sockets=True, violation_persistence_threshold=None, kill_controllers_on_exit=True, hb_logger_class=HappensBeforeLogger, hb_logger_params=results_dir) # Manual, interactive mode control_flow = Interactive(simulation_config, input_logger=InputLogger())
Add example for using sync protocol in pox
Add example for using sync protocol in pox
Python
apache-2.0
jmiserez/sts,jmiserez/sts
Add example for using sync protocol in pox
from config.experiment_config_lib import ControllerConfig from sts.control_flow.fuzzer import Fuzzer from sts.input_traces.input_logger import InputLogger from sts.simulation_state import SimulationConfig from sts.invariant_checker import InvariantChecker from sts.topology import MeshTopology, BufferedPatchPanel from sts.happensbefore.hb_logger import HappensBeforeLogger from sts.control_flow.interactive import Interactive # Use POX as our controller start_cmd = ('''./pox.py --verbose --no-cli sts.syncproto.pox_syncer --blocking=False ''' '''forwarding.l2_learning ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=/tmp/sts_socket_pipe''') controllers = [ControllerConfig(start_cmd, address="/tmp/sts_socket_pipe", cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=2" results_dir = "experiments/pox_sync" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, multiplex_sockets=True, violation_persistence_threshold=None, kill_controllers_on_exit=True, hb_logger_class=HappensBeforeLogger, hb_logger_params=results_dir) # Manual, interactive mode control_flow = Interactive(simulation_config, input_logger=InputLogger())
<commit_before><commit_msg>Add example for using sync protocol in pox<commit_after>
from config.experiment_config_lib import ControllerConfig from sts.control_flow.fuzzer import Fuzzer from sts.input_traces.input_logger import InputLogger from sts.simulation_state import SimulationConfig from sts.invariant_checker import InvariantChecker from sts.topology import MeshTopology, BufferedPatchPanel from sts.happensbefore.hb_logger import HappensBeforeLogger from sts.control_flow.interactive import Interactive # Use POX as our controller start_cmd = ('''./pox.py --verbose --no-cli sts.syncproto.pox_syncer --blocking=False ''' '''forwarding.l2_learning ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=/tmp/sts_socket_pipe''') controllers = [ControllerConfig(start_cmd, address="/tmp/sts_socket_pipe", cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=2" results_dir = "experiments/pox_sync" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, multiplex_sockets=True, violation_persistence_threshold=None, kill_controllers_on_exit=True, hb_logger_class=HappensBeforeLogger, hb_logger_params=results_dir) # Manual, interactive mode control_flow = Interactive(simulation_config, input_logger=InputLogger())
Add example for using sync protocol in poxfrom config.experiment_config_lib import ControllerConfig from sts.control_flow.fuzzer import Fuzzer from sts.input_traces.input_logger import InputLogger from sts.simulation_state import SimulationConfig from sts.invariant_checker import InvariantChecker from sts.topology import MeshTopology, BufferedPatchPanel from sts.happensbefore.hb_logger import HappensBeforeLogger from sts.control_flow.interactive import Interactive # Use POX as our controller start_cmd = ('''./pox.py --verbose --no-cli sts.syncproto.pox_syncer --blocking=False ''' '''forwarding.l2_learning ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=/tmp/sts_socket_pipe''') controllers = [ControllerConfig(start_cmd, address="/tmp/sts_socket_pipe", cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=2" results_dir = "experiments/pox_sync" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, multiplex_sockets=True, violation_persistence_threshold=None, kill_controllers_on_exit=True, hb_logger_class=HappensBeforeLogger, hb_logger_params=results_dir) # Manual, interactive mode control_flow = Interactive(simulation_config, input_logger=InputLogger())
<commit_before><commit_msg>Add example for using sync protocol in pox<commit_after>from config.experiment_config_lib import ControllerConfig from sts.control_flow.fuzzer import Fuzzer from sts.input_traces.input_logger import InputLogger from sts.simulation_state import SimulationConfig from sts.invariant_checker import InvariantChecker from sts.topology import MeshTopology, BufferedPatchPanel from sts.happensbefore.hb_logger import HappensBeforeLogger from sts.control_flow.interactive import Interactive # Use POX as our controller start_cmd = ('''./pox.py --verbose --no-cli sts.syncproto.pox_syncer --blocking=False ''' '''forwarding.l2_learning ''' '''sts.util.socket_mux.pox_monkeypatcher ''' '''openflow.of_01 --address=/tmp/sts_socket_pipe''') controllers = [ControllerConfig(start_cmd, address="/tmp/sts_socket_pipe", cwd="pox", sync="tcp:localhost:18899")] topology_class = MeshTopology topology_params = "num_switches=2" results_dir = "experiments/pox_sync" simulation_config = SimulationConfig(controller_configs=controllers, topology_class=topology_class, topology_params=topology_params, multiplex_sockets=True, violation_persistence_threshold=None, kill_controllers_on_exit=True, hb_logger_class=HappensBeforeLogger, hb_logger_params=results_dir) # Manual, interactive mode control_flow = Interactive(simulation_config, input_logger=InputLogger())