commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1a73bdd12bbe504e832d18959f588c2f71d02031
|
lib_common/src/d1_common/typing.py
|
lib_common/src/d1_common/typing.py
|
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Support for type annotations in d1_common.
Types for DataONE objects are provided along with all the contents of Python's standard
``typing`` module.
"""
# noinspection PyUnresolvedReferences
from typing import *
D1Client = NewType("D1Client", Any)
AsyncD1Client = NewType("AsyncD1Client", Any)
Checksum = NewType("Checksum", Any)
|
Add placeholder for adding type hinting using the new syntax in Python.
|
Add placeholder for adding type hinting using the new syntax in Python.
|
Python
|
apache-2.0
|
DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python,DataONEorg/d1_python
|
Add placeholder for adding type hinting using the new syntax in Python.
|
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Support for type annotations in d1_common.
Types for DataONE objects are provided along with all the contents of Python's standard
``typing`` module.
"""
# noinspection PyUnresolvedReferences
from typing import *
D1Client = NewType("D1Client", Any)
AsyncD1Client = NewType("AsyncD1Client", Any)
Checksum = NewType("Checksum", Any)
|
<commit_before><commit_msg>Add placeholder for adding type hinting using the new syntax in Python.<commit_after>
|
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Support for type annotations in d1_common.
Types for DataONE objects are provided along with all the contents of Python's standard
``typing`` module.
"""
# noinspection PyUnresolvedReferences
from typing import *
D1Client = NewType("D1Client", Any)
AsyncD1Client = NewType("AsyncD1Client", Any)
Checksum = NewType("Checksum", Any)
|
Add placeholder for adding type hinting using the new syntax in Python.# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Support for type annotations in d1_common.
Types for DataONE objects are provided along with all the contents of Python's standard
``typing`` module.
"""
# noinspection PyUnresolvedReferences
from typing import *
D1Client = NewType("D1Client", Any)
AsyncD1Client = NewType("AsyncD1Client", Any)
Checksum = NewType("Checksum", Any)
|
<commit_before><commit_msg>Add placeholder for adding type hinting using the new syntax in Python.<commit_after># This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Support for type annotations in d1_common.
Types for DataONE objects are provided along with all the contents of Python's standard
``typing`` module.
"""
# noinspection PyUnresolvedReferences
from typing import *
D1Client = NewType("D1Client", Any)
AsyncD1Client = NewType("AsyncD1Client", Any)
Checksum = NewType("Checksum", Any)
|
|
0525df595e2252f7165271b62cb52dd0d8eff651
|
awx/main/tests/unit/notifications/test_rocketchat.py
|
awx/main/tests/unit/notifications/test_rocketchat.py
|
import mock
import pytest
from django.core.mail.message import EmailMessage
import awx.main.notifications.rocketchat_backend as rocketchat_backend
def test_send_messages():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend()
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject"}', verify=True)
assert sent_messages == 1
def test_send_messages_with_username():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend(rocketchat_username='testuser')
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"username": "testuser", "text": "test subject"}', verify=True)
assert sent_messages == 1
def test_send_messages_with_icon_url():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend(rocketchat_icon_url='http://example.com')
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject", "icon_url": "http://example.com"}', verify=True)
assert sent_messages == 1
def test_send_messages_with_no_verify_ssl():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend(rocketchat_no_verify_ssl=True)
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject"}', verify=False)
assert sent_messages == 1
|
Add unit test for rocket.chat notifications
|
Add unit test for rocket.chat notifications
Signed-off-by: Jeandre Le Roux <5c0729d9afb0a0db4c3e6949c77c15e4446fbca3@theblazehen.com>
|
Python
|
apache-2.0
|
wwitzel3/awx,wwitzel3/awx,wwitzel3/awx,wwitzel3/awx
|
Add unit test for rocket.chat notifications
Signed-off-by: Jeandre Le Roux <5c0729d9afb0a0db4c3e6949c77c15e4446fbca3@theblazehen.com>
|
import mock
import pytest
from django.core.mail.message import EmailMessage
import awx.main.notifications.rocketchat_backend as rocketchat_backend
def test_send_messages():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend()
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject"}', verify=True)
assert sent_messages == 1
def test_send_messages_with_username():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend(rocketchat_username='testuser')
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"username": "testuser", "text": "test subject"}', verify=True)
assert sent_messages == 1
def test_send_messages_with_icon_url():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend(rocketchat_icon_url='http://example.com')
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject", "icon_url": "http://example.com"}', verify=True)
assert sent_messages == 1
def test_send_messages_with_no_verify_ssl():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend(rocketchat_no_verify_ssl=True)
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject"}', verify=False)
assert sent_messages == 1
|
<commit_before><commit_msg>Add unit test for rocket.chat notifications
Signed-off-by: Jeandre Le Roux <5c0729d9afb0a0db4c3e6949c77c15e4446fbca3@theblazehen.com><commit_after>
|
import mock
import pytest
from django.core.mail.message import EmailMessage
import awx.main.notifications.rocketchat_backend as rocketchat_backend
def test_send_messages():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend()
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject"}', verify=True)
assert sent_messages == 1
def test_send_messages_with_username():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend(rocketchat_username='testuser')
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"username": "testuser", "text": "test subject"}', verify=True)
assert sent_messages == 1
def test_send_messages_with_icon_url():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend(rocketchat_icon_url='http://example.com')
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject", "icon_url": "http://example.com"}', verify=True)
assert sent_messages == 1
def test_send_messages_with_no_verify_ssl():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend(rocketchat_no_verify_ssl=True)
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject"}', verify=False)
assert sent_messages == 1
|
Add unit test for rocket.chat notifications
Signed-off-by: Jeandre Le Roux <5c0729d9afb0a0db4c3e6949c77c15e4446fbca3@theblazehen.com>import mock
import pytest
from django.core.mail.message import EmailMessage
import awx.main.notifications.rocketchat_backend as rocketchat_backend
def test_send_messages():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend()
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject"}', verify=True)
assert sent_messages == 1
def test_send_messages_with_username():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend(rocketchat_username='testuser')
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"username": "testuser", "text": "test subject"}', verify=True)
assert sent_messages == 1
def test_send_messages_with_icon_url():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend(rocketchat_icon_url='http://example.com')
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject", "icon_url": "http://example.com"}', verify=True)
assert sent_messages == 1
def test_send_messages_with_no_verify_ssl():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend(rocketchat_no_verify_ssl=True)
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject"}', verify=False)
assert sent_messages == 1
|
<commit_before><commit_msg>Add unit test for rocket.chat notifications
Signed-off-by: Jeandre Le Roux <5c0729d9afb0a0db4c3e6949c77c15e4446fbca3@theblazehen.com><commit_after>import mock
import pytest
from django.core.mail.message import EmailMessage
import awx.main.notifications.rocketchat_backend as rocketchat_backend
def test_send_messages():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend()
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject"}', verify=True)
assert sent_messages == 1
def test_send_messages_with_username():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend(rocketchat_username='testuser')
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"username": "testuser", "text": "test subject"}', verify=True)
assert sent_messages == 1
def test_send_messages_with_icon_url():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend(rocketchat_icon_url='http://example.com')
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject", "icon_url": "http://example.com"}', verify=True)
assert sent_messages == 1
def test_send_messages_with_no_verify_ssl():
with mock.patch('awx.main.notifications.rocketchat_backend.requests') as requests_mock:
backend = rocketchat_backend.RocketChatBackend(rocketchat_no_verify_ssl=True)
message = EmailMessage('test subject', 'test body', [], ['http://example.com', ])
sent_messages = backend.send_messages([message, ])
requests_mock.post.assert_called_once_with('http://example.com', data='{"text": "test subject"}', verify=False)
assert sent_messages == 1
|
|
766cc675c52488e658a0884f0e0b5c8b044731b2
|
lib/bridgedb/test/test_Storage.py
|
lib/bridgedb/test/test_Storage.py
|
#!/usr/bin/env python
"""Unittests for the :mod:`bridgedb.Storage` module."""
from twisted.python import log
from twisted.trial import unittest
import bridgedb.Storage as Storage
from twisted.internet import reactor
from twisted.internet.threads import deferToThread
import os
import threading
from time import sleep
class DatabaseTest(unittest.TestCase):
def setUp(self):
self.dbfname = 'test-bridgedb.sqlite'
Storage.setDBFilename(self.dbfname)
def tearDown(self):
if os.path.isfile(self.dbfname):
os.unlink(self.dbfname)
Storage.clearGlobalDB()
def _runAndDie(self, timeout, func):
with func():
sleep(timeout)
def _cb_assertTrue(self, result):
self.assertTrue(result)
def _cb_assertFalse(self, result):
self.assertFalse(result)
def _eb_Failure(self, failure):
self.fail(failure)
def test_getDB_FalseWhenLocked(self):
Storage._LOCK = threading.Lock()
Storage._LOCK.acquire()
self.assertFalse(Storage._LOCK.acquire(False))
def test_getDB_AcquireLock(self):
Storage.initializeDBLock()
with Storage.getDB() as db:
self.assertIsInstance(db, Storage.Database)
self.assertTrue(Storage.dbIsLocked())
self.assertEqual(db, Storage._OPENED_DB)
def test_getDB_ConcurrencyLock(self):
timeout = 1
d1 = deferToThread(self._runAndDie, timeout, Storage.getDB)
d1.addCallback(self._cb_assertFalse)
d1.addErrback(self._eb_Failure)
d2 = deferToThread(Storage.getDB, False)
d2.addCallback(self._cb_assertFalse)
d2.addErrback(self._eb_Failure)
d2.addCallback(self._cb_assertTrue, Storage.getDB(False))
|
Add unit tests for bridgedb.Storage
|
Add unit tests for bridgedb.Storage
|
Python
|
bsd-3-clause
|
mmaker/bridgedb,mmaker/bridgedb,pagea/bridgedb,pagea/bridgedb
|
Add unit tests for bridgedb.Storage
|
#!/usr/bin/env python
"""Unittests for the :mod:`bridgedb.Storage` module."""
from twisted.python import log
from twisted.trial import unittest
import bridgedb.Storage as Storage
from twisted.internet import reactor
from twisted.internet.threads import deferToThread
import os
import threading
from time import sleep
class DatabaseTest(unittest.TestCase):
def setUp(self):
self.dbfname = 'test-bridgedb.sqlite'
Storage.setDBFilename(self.dbfname)
def tearDown(self):
if os.path.isfile(self.dbfname):
os.unlink(self.dbfname)
Storage.clearGlobalDB()
def _runAndDie(self, timeout, func):
with func():
sleep(timeout)
def _cb_assertTrue(self, result):
self.assertTrue(result)
def _cb_assertFalse(self, result):
self.assertFalse(result)
def _eb_Failure(self, failure):
self.fail(failure)
def test_getDB_FalseWhenLocked(self):
Storage._LOCK = threading.Lock()
Storage._LOCK.acquire()
self.assertFalse(Storage._LOCK.acquire(False))
def test_getDB_AcquireLock(self):
Storage.initializeDBLock()
with Storage.getDB() as db:
self.assertIsInstance(db, Storage.Database)
self.assertTrue(Storage.dbIsLocked())
self.assertEqual(db, Storage._OPENED_DB)
def test_getDB_ConcurrencyLock(self):
timeout = 1
d1 = deferToThread(self._runAndDie, timeout, Storage.getDB)
d1.addCallback(self._cb_assertFalse)
d1.addErrback(self._eb_Failure)
d2 = deferToThread(Storage.getDB, False)
d2.addCallback(self._cb_assertFalse)
d2.addErrback(self._eb_Failure)
d2.addCallback(self._cb_assertTrue, Storage.getDB(False))
|
<commit_before><commit_msg>Add unit tests for bridgedb.Storage<commit_after>
|
#!/usr/bin/env python
"""Unittests for the :mod:`bridgedb.Storage` module."""
from twisted.python import log
from twisted.trial import unittest
import bridgedb.Storage as Storage
from twisted.internet import reactor
from twisted.internet.threads import deferToThread
import os
import threading
from time import sleep
class DatabaseTest(unittest.TestCase):
def setUp(self):
self.dbfname = 'test-bridgedb.sqlite'
Storage.setDBFilename(self.dbfname)
def tearDown(self):
if os.path.isfile(self.dbfname):
os.unlink(self.dbfname)
Storage.clearGlobalDB()
def _runAndDie(self, timeout, func):
with func():
sleep(timeout)
def _cb_assertTrue(self, result):
self.assertTrue(result)
def _cb_assertFalse(self, result):
self.assertFalse(result)
def _eb_Failure(self, failure):
self.fail(failure)
def test_getDB_FalseWhenLocked(self):
Storage._LOCK = threading.Lock()
Storage._LOCK.acquire()
self.assertFalse(Storage._LOCK.acquire(False))
def test_getDB_AcquireLock(self):
Storage.initializeDBLock()
with Storage.getDB() as db:
self.assertIsInstance(db, Storage.Database)
self.assertTrue(Storage.dbIsLocked())
self.assertEqual(db, Storage._OPENED_DB)
def test_getDB_ConcurrencyLock(self):
timeout = 1
d1 = deferToThread(self._runAndDie, timeout, Storage.getDB)
d1.addCallback(self._cb_assertFalse)
d1.addErrback(self._eb_Failure)
d2 = deferToThread(Storage.getDB, False)
d2.addCallback(self._cb_assertFalse)
d2.addErrback(self._eb_Failure)
d2.addCallback(self._cb_assertTrue, Storage.getDB(False))
|
Add unit tests for bridgedb.Storage#!/usr/bin/env python
"""Unittests for the :mod:`bridgedb.Storage` module."""
from twisted.python import log
from twisted.trial import unittest
import bridgedb.Storage as Storage
from twisted.internet import reactor
from twisted.internet.threads import deferToThread
import os
import threading
from time import sleep
class DatabaseTest(unittest.TestCase):
def setUp(self):
self.dbfname = 'test-bridgedb.sqlite'
Storage.setDBFilename(self.dbfname)
def tearDown(self):
if os.path.isfile(self.dbfname):
os.unlink(self.dbfname)
Storage.clearGlobalDB()
def _runAndDie(self, timeout, func):
with func():
sleep(timeout)
def _cb_assertTrue(self, result):
self.assertTrue(result)
def _cb_assertFalse(self, result):
self.assertFalse(result)
def _eb_Failure(self, failure):
self.fail(failure)
def test_getDB_FalseWhenLocked(self):
Storage._LOCK = threading.Lock()
Storage._LOCK.acquire()
self.assertFalse(Storage._LOCK.acquire(False))
def test_getDB_AcquireLock(self):
Storage.initializeDBLock()
with Storage.getDB() as db:
self.assertIsInstance(db, Storage.Database)
self.assertTrue(Storage.dbIsLocked())
self.assertEqual(db, Storage._OPENED_DB)
def test_getDB_ConcurrencyLock(self):
timeout = 1
d1 = deferToThread(self._runAndDie, timeout, Storage.getDB)
d1.addCallback(self._cb_assertFalse)
d1.addErrback(self._eb_Failure)
d2 = deferToThread(Storage.getDB, False)
d2.addCallback(self._cb_assertFalse)
d2.addErrback(self._eb_Failure)
d2.addCallback(self._cb_assertTrue, Storage.getDB(False))
|
<commit_before><commit_msg>Add unit tests for bridgedb.Storage<commit_after>#!/usr/bin/env python
"""Unittests for the :mod:`bridgedb.Storage` module."""
from twisted.python import log
from twisted.trial import unittest
import bridgedb.Storage as Storage
from twisted.internet import reactor
from twisted.internet.threads import deferToThread
import os
import threading
from time import sleep
class DatabaseTest(unittest.TestCase):
def setUp(self):
self.dbfname = 'test-bridgedb.sqlite'
Storage.setDBFilename(self.dbfname)
def tearDown(self):
if os.path.isfile(self.dbfname):
os.unlink(self.dbfname)
Storage.clearGlobalDB()
def _runAndDie(self, timeout, func):
with func():
sleep(timeout)
def _cb_assertTrue(self, result):
self.assertTrue(result)
def _cb_assertFalse(self, result):
self.assertFalse(result)
def _eb_Failure(self, failure):
self.fail(failure)
def test_getDB_FalseWhenLocked(self):
Storage._LOCK = threading.Lock()
Storage._LOCK.acquire()
self.assertFalse(Storage._LOCK.acquire(False))
def test_getDB_AcquireLock(self):
Storage.initializeDBLock()
with Storage.getDB() as db:
self.assertIsInstance(db, Storage.Database)
self.assertTrue(Storage.dbIsLocked())
self.assertEqual(db, Storage._OPENED_DB)
def test_getDB_ConcurrencyLock(self):
timeout = 1
d1 = deferToThread(self._runAndDie, timeout, Storage.getDB)
d1.addCallback(self._cb_assertFalse)
d1.addErrback(self._eb_Failure)
d2 = deferToThread(Storage.getDB, False)
d2.addCallback(self._cb_assertFalse)
d2.addErrback(self._eb_Failure)
d2.addCallback(self._cb_assertTrue, Storage.getDB(False))
|
|
84b31bb02746dec1667cc93a189c6e1c40ffac28
|
studygroups/migrations/0015_auto_20150430_0126.py
|
studygroups/migrations/0015_auto_20150430_0126.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0014_application_accepted_at'),
]
operations = [
migrations.AlterUniqueTogether(
name='studygroupsignup',
unique_together=set([]),
),
]
|
Remove unique requirement for study group applications
|
Remove unique requirement for study group applications
|
Python
|
mit
|
p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles,p2pu/learning-circles
|
Remove unique requirement for study group applications
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0014_application_accepted_at'),
]
operations = [
migrations.AlterUniqueTogether(
name='studygroupsignup',
unique_together=set([]),
),
]
|
<commit_before><commit_msg>Remove unique requirement for study group applications<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0014_application_accepted_at'),
]
operations = [
migrations.AlterUniqueTogether(
name='studygroupsignup',
unique_together=set([]),
),
]
|
Remove unique requirement for study group applications# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0014_application_accepted_at'),
]
operations = [
migrations.AlterUniqueTogether(
name='studygroupsignup',
unique_together=set([]),
),
]
|
<commit_before><commit_msg>Remove unique requirement for study group applications<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('studygroups', '0014_application_accepted_at'),
]
operations = [
migrations.AlterUniqueTogether(
name='studygroupsignup',
unique_together=set([]),
),
]
|
|
183faa3b13d933a201f909465f8fcd432d616e04
|
tests/basics/class_inherit_mul.py
|
tests/basics/class_inherit_mul.py
|
class A:
def __init__(self, x):
print('A init', x)
self.x = x
def f(self):
print(self.x)
def f2(self):
print(self.x)
class B:
def __init__(self, x):
print('B init', x)
self.x = x
def f(self):
print(self.x)
def f3(self):
print(self.x)
class Sub(A, B):
def __init__(self):
A.__init__(self, 1)
B.__init__(self, 2)
print('Sub init')
def g(self):
print(self.x)
o = Sub()
print(o.x)
o.f()
o.f2()
o.f3()
|
Add testcase for multiple inheritance.
|
tests: Add testcase for multiple inheritance.
|
Python
|
mit
|
martinribelotta/micropython,vitiral/micropython,micropython/micropython-esp32,torwag/micropython,rubencabrera/micropython,trezor/micropython,puuu/micropython,neilh10/micropython,xyb/micropython,chrisdearman/micropython,dinau/micropython,slzatz/micropython,praemdonck/micropython,ruffy91/micropython,ChuckM/micropython,henriknelson/micropython,SHA2017-badge/micropython-esp32,drrk/micropython,adamkh/micropython,suda/micropython,matthewelse/micropython,redbear/micropython,xyb/micropython,galenhz/micropython,mhoffma/micropython,dmazzella/micropython,mpalomer/micropython,blmorris/micropython,kerneltask/micropython,blazewicz/micropython,SungEun-Steve-Kim/test-mp,blmorris/micropython,vitiral/micropython,ahotam/micropython,feilongfl/micropython,lowRISC/micropython,paul-xxx/micropython,Peetz0r/micropython-esp32,orionrobots/micropython,ryannathans/micropython,ahotam/micropython,ruffy91/micropython,drrk/micropython,misterdanb/micropython,Peetz0r/micropython-esp32,adafruit/micropython,stonegithubs/micropython,mhoffma/micropython,omtinez/micropython,kerneltask/micropython,ganshun666/micropython,AriZuu/micropython,xhat/micropython,redbear/micropython,jimkmc/micropython,PappaPeppar/micropython,chrisdearman/micropython,Vogtinator/micropython,aethaniel/micropython,SungEun-Steve-Kim/test-mp,torwag/micropython,selste/micropython,mpalomer/micropython,redbear/micropython,hiway/micropython,praemdonck/micropython,tobbad/micropython,danicampora/micropython,jmarcelino/pycom-micropython,kerneltask/micropython,omtinez/micropython,aitjcize/micropython,micropython/micropython-esp32,AriZuu/micropython,jmarcelino/pycom-micropython,aitjcize/micropython,oopy/micropython,slzatz/micropython,dxxb/micropython,misterdanb/micropython,jlillest/micropython,cwyark/micropython,utopiaprince/micropython,pozetroninc/micropython,TDAbboud/micropython,tdautc19841202/micropython,MrSurly/micropython-esp32,blmorris/micropython,xhat/micropython,ceramos/micropython,rubencabrera/micropython,stonegithubs/micropython,vriera/micropython,alex-march/micropython,kerneltask/micropython,neilh10/micropython,firstval/micropython,micropython/micropython-esp32,EcmaXp/micropython,ceramos/micropython,supergis/micropython,ryannathans/micropython,danicampora/micropython,turbinenreiter/micropython,neilh10/micropython,trezor/micropython,trezor/micropython,drrk/micropython,mianos/micropython,puuu/micropython,henriknelson/micropython,xyb/micropython,mgyenik/micropython,MrSurly/micropython,supergis/micropython,lbattraw/micropython,KISSMonX/micropython,orionrobots/micropython,AriZuu/micropython,adafruit/circuitpython,alex-robbins/micropython,infinnovation/micropython,swegener/micropython,skybird6672/micropython,feilongfl/micropython,danicampora/micropython,ChuckM/micropython,ahotam/micropython,misterdanb/micropython,toolmacher/micropython,MrSurly/micropython-esp32,heisewangluo/micropython,xyb/micropython,warner83/micropython,dinau/micropython,tobbad/micropython,tobbad/micropython,pramasoul/micropython,vitiral/micropython,alex-march/micropython,utopiaprince/micropython,methoxid/micropystat,ChuckM/micropython,ganshun666/micropython,slzatz/micropython,cnoviello/micropython,selste/micropython,HenrikSolver/micropython,dxxb/micropython,ChuckM/micropython,KISSMonX/micropython,SHA2017-badge/micropython-esp32,xyb/micropython,cwyark/micropython,firstval/micropython,vriera/micropython,martinribelotta/micropython,lbattraw/micropython,deshipu/micropython,ernesto-g/micropython,turbinenreiter/micropython,redbear/micropython,kostyll/micropython,EcmaXp/micropython,lbattraw/micropython,ernesto-g/micropython,adafruit/circuitpython,paul-xxx/micropython,ruffy91/micropython,emfcamp/micropython,adafruit/micropython,puuu/micropython,xhat/micropython,neilh10/micropython,alex-robbins/micropython,cloudformdesign/micropython,noahchense/micropython,deshipu/micropython,SHA2017-badge/micropython-esp32,swegener/micropython,lowRISC/micropython,cloudformdesign/micropython,oopy/micropython,paul-xxx/micropython,Timmenem/micropython,galenhz/micropython,ruffy91/micropython,pozetroninc/micropython,suda/micropython,pfalcon/micropython,SHA2017-badge/micropython-esp32,adafruit/circuitpython,mianos/micropython,cloudformdesign/micropython,KISSMonX/micropython,pramasoul/micropython,cloudformdesign/micropython,noahwilliamsson/micropython,deshipu/micropython,adamkh/micropython,lowRISC/micropython,utopiaprince/micropython,adafruit/circuitpython,warner83/micropython,turbinenreiter/micropython,martinribelotta/micropython,noahchense/micropython,ernesto-g/micropython,TDAbboud/micropython,oopy/micropython,KISSMonX/micropython,omtinez/micropython,MrSurly/micropython,dhylands/micropython,dmazzella/micropython,feilongfl/micropython,aethaniel/micropython,hosaka/micropython,mpalomer/micropython,heisewangluo/micropython,SungEun-Steve-Kim/test-mp,torwag/micropython,mgyenik/micropython,mhoffma/micropython,blazewicz/micropython,methoxid/micropystat,trezor/micropython,pramasoul/micropython,dxxb/micropython,trezor/micropython,adamkh/micropython,toolmacher/micropython,oopy/micropython,puuu/micropython,selste/micropython,ericsnowcurrently/micropython,methoxid/micropystat,adafruit/micropython,jimkmc/micropython,aitjcize/micropython,xuxiaoxin/micropython,martinribelotta/micropython,MrSurly/micropython-esp32,ceramos/micropython,ryannathans/micropython,aitjcize/micropython,tuc-osg/micropython,adafruit/circuitpython,feilongfl/micropython,alex-robbins/micropython,ganshun666/micropython,alex-robbins/micropython,KISSMonX/micropython,TDAbboud/micropython,jimkmc/micropython,henriknelson/micropython,redbear/micropython,matthewelse/micropython,martinribelotta/micropython,cwyark/micropython,vriera/micropython,bvernoux/micropython,deshipu/micropython,Peetz0r/micropython-esp32,bvernoux/micropython,ericsnowcurrently/micropython,dinau/micropython,blmorris/micropython,henriknelson/micropython,tralamazza/micropython,xhat/micropython,mpalomer/micropython,noahchense/micropython,heisewangluo/micropython,mhoffma/micropython,oopy/micropython,pfalcon/micropython,vriera/micropython,Timmenem/micropython,skybird6672/micropython,suda/micropython,firstval/micropython,lbattraw/micropython,tuc-osg/micropython,Peetz0r/micropython-esp32,cloudformdesign/micropython,MrSurly/micropython,pfalcon/micropython,tralamazza/micropython,HenrikSolver/micropython,Vogtinator/micropython,vitiral/micropython,noahwilliamsson/micropython,aethaniel/micropython,rubencabrera/micropython,ceramos/micropython,emfcamp/micropython,bvernoux/micropython,jlillest/micropython,turbinenreiter/micropython,pfalcon/micropython,drrk/micropython,ganshun666/micropython,danicampora/micropython,TDAbboud/micropython,adafruit/micropython,Timmenem/micropython,aethaniel/micropython,supergis/micropython,pozetroninc/micropython,dinau/micropython,tuc-osg/micropython,galenhz/micropython,cwyark/micropython,blazewicz/micropython,ericsnowcurrently/micropython,PappaPeppar/micropython,EcmaXp/micropython,kostyll/micropython,jmarcelino/pycom-micropython,toolmacher/micropython,HenrikSolver/micropython,aethaniel/micropython,dhylands/micropython,supergis/micropython,ahotam/micropython,lowRISC/micropython,tralamazza/micropython,PappaPeppar/micropython,swegener/micropython,praemdonck/micropython,pozetroninc/micropython,noahwilliamsson/micropython,slzatz/micropython,ganshun666/micropython,orionrobots/micropython,warner83/micropython,hiway/micropython,Vogtinator/micropython,matthewelse/micropython,tdautc19841202/micropython,ericsnowcurrently/micropython,rubencabrera/micropython,omtinez/micropython,tralamazza/micropython,noahchense/micropython,Vogtinator/micropython,jimkmc/micropython,xuxiaoxin/micropython,alex-march/micropython,tdautc19841202/micropython,cnoviello/micropython,galenhz/micropython,feilongfl/micropython,dxxb/micropython,ernesto-g/micropython,matthewelse/micropython,jlillest/micropython,utopiaprince/micropython,pramasoul/micropython,matthewelse/micropython,omtinez/micropython,jlillest/micropython,henriknelson/micropython,PappaPeppar/micropython,vriera/micropython,dhylands/micropython,puuu/micropython,orionrobots/micropython,mianos/micropython,cnoviello/micropython,ruffy91/micropython,ChuckM/micropython,mgyenik/micropython,neilh10/micropython,EcmaXp/micropython,kerneltask/micropython,danicampora/micropython,stonegithubs/micropython,suda/micropython,xuxiaoxin/micropython,warner83/micropython,pfalcon/micropython,rubencabrera/micropython,hosaka/micropython,jimkmc/micropython,jmarcelino/pycom-micropython,dxxb/micropython,lbattraw/micropython,dhylands/micropython,noahwilliamsson/micropython,infinnovation/micropython,mpalomer/micropython,SHA2017-badge/micropython-esp32,Peetz0r/micropython-esp32,PappaPeppar/micropython,drrk/micropython,stonegithubs/micropython,mhoffma/micropython,pramasoul/micropython,hosaka/micropython,xuxiaoxin/micropython,MrSurly/micropython,swegener/micropython,HenrikSolver/micropython,Timmenem/micropython,HenrikSolver/micropython,ahotam/micropython,galenhz/micropython,mgyenik/micropython,pozetroninc/micropython,lowRISC/micropython,skybird6672/micropython,blazewicz/micropython,skybird6672/micropython,selste/micropython,micropython/micropython-esp32,slzatz/micropython,cnoviello/micropython,Timmenem/micropython,misterdanb/micropython,MrSurly/micropython-esp32,dmazzella/micropython,MrSurly/micropython,mianos/micropython,tobbad/micropython,infinnovation/micropython,ryannathans/micropython,torwag/micropython,ryannathans/micropython,emfcamp/micropython,tdautc19841202/micropython,warner83/micropython,supergis/micropython,adamkh/micropython,swegener/micropython,adafruit/micropython,TDAbboud/micropython,methoxid/micropystat,bvernoux/micropython,Vogtinator/micropython,AriZuu/micropython,adafruit/circuitpython,deshipu/micropython,hosaka/micropython,hiway/micropython,vitiral/micropython,micropython/micropython-esp32,SungEun-Steve-Kim/test-mp,infinnovation/micropython,mianos/micropython,suda/micropython,MrSurly/micropython-esp32,hiway/micropython,kostyll/micropython,firstval/micropython,emfcamp/micropython,emfcamp/micropython,torwag/micropython,praemdonck/micropython,infinnovation/micropython,adamkh/micropython,dhylands/micropython,xhat/micropython,noahchense/micropython,ceramos/micropython,mgyenik/micropython,tdautc19841202/micropython,EcmaXp/micropython,paul-xxx/micropython,alex-march/micropython,dmazzella/micropython,hiway/micropython,skybird6672/micropython,praemdonck/micropython,matthewelse/micropython,heisewangluo/micropython,orionrobots/micropython,cnoviello/micropython,utopiaprince/micropython,toolmacher/micropython,tobbad/micropython,paul-xxx/micropython,stonegithubs/micropython,blazewicz/micropython,jmarcelino/pycom-micropython,bvernoux/micropython,chrisdearman/micropython,cwyark/micropython,chrisdearman/micropython,noahwilliamsson/micropython,firstval/micropython,heisewangluo/micropython,ernesto-g/micropython,kostyll/micropython,SungEun-Steve-Kim/test-mp,tuc-osg/micropython,alex-robbins/micropython,selste/micropython,dinau/micropython,toolmacher/micropython,kostyll/micropython,tuc-osg/micropython,alex-march/micropython,ericsnowcurrently/micropython,blmorris/micropython,methoxid/micropystat,xuxiaoxin/micropython,misterdanb/micropython,chrisdearman/micropython,AriZuu/micropython,hosaka/micropython,jlillest/micropython,turbinenreiter/micropython
|
tests: Add testcase for multiple inheritance.
|
class A:
def __init__(self, x):
print('A init', x)
self.x = x
def f(self):
print(self.x)
def f2(self):
print(self.x)
class B:
def __init__(self, x):
print('B init', x)
self.x = x
def f(self):
print(self.x)
def f3(self):
print(self.x)
class Sub(A, B):
def __init__(self):
A.__init__(self, 1)
B.__init__(self, 2)
print('Sub init')
def g(self):
print(self.x)
o = Sub()
print(o.x)
o.f()
o.f2()
o.f3()
|
<commit_before><commit_msg>tests: Add testcase for multiple inheritance.<commit_after>
|
class A:
def __init__(self, x):
print('A init', x)
self.x = x
def f(self):
print(self.x)
def f2(self):
print(self.x)
class B:
def __init__(self, x):
print('B init', x)
self.x = x
def f(self):
print(self.x)
def f3(self):
print(self.x)
class Sub(A, B):
def __init__(self):
A.__init__(self, 1)
B.__init__(self, 2)
print('Sub init')
def g(self):
print(self.x)
o = Sub()
print(o.x)
o.f()
o.f2()
o.f3()
|
tests: Add testcase for multiple inheritance.class A:
def __init__(self, x):
print('A init', x)
self.x = x
def f(self):
print(self.x)
def f2(self):
print(self.x)
class B:
def __init__(self, x):
print('B init', x)
self.x = x
def f(self):
print(self.x)
def f3(self):
print(self.x)
class Sub(A, B):
def __init__(self):
A.__init__(self, 1)
B.__init__(self, 2)
print('Sub init')
def g(self):
print(self.x)
o = Sub()
print(o.x)
o.f()
o.f2()
o.f3()
|
<commit_before><commit_msg>tests: Add testcase for multiple inheritance.<commit_after>class A:
def __init__(self, x):
print('A init', x)
self.x = x
def f(self):
print(self.x)
def f2(self):
print(self.x)
class B:
def __init__(self, x):
print('B init', x)
self.x = x
def f(self):
print(self.x)
def f3(self):
print(self.x)
class Sub(A, B):
def __init__(self):
A.__init__(self, 1)
B.__init__(self, 2)
print('Sub init')
def g(self):
print(self.x)
o = Sub()
print(o.x)
o.f()
o.f2()
o.f3()
|
|
94de257a82a54ed5cb6a4d8c15107e889814b07f
|
examples/calculations/Gradient.py
|
examples/calculations/Gradient.py
|
# Copyright (c) 2015-2018 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Gradient
========
Use `metpy.calc.gradient`.
This example demonstrates the various ways that MetPy's gradient function
can be utilized.
"""
import numpy as np
import metpy.calc as mpcalc
from metpy.units import units
###########################################
# Create some test data to use for our example
data = np.array([[23, 24, 23],
[25, 26, 25],
[27, 28, 27],
[24, 25, 24]]) * units.degC
# Create an array of x position data (the coordinates of our temperature data)
x = np.array([[1, 2, 3],
[1, 2, 3],
[1, 2, 3],
[1, 2, 3]]) * units.kilometer
y = np.array([[1, 1, 1],
[2, 2, 2],
[3, 3, 3],
[4, 4, 4]]) * units.kilometer
###########################################
# Calculate the gradient using the coordinates of the data
grad = mpcalc.gradient(data, x=(y, x))
print('Gradient in y direction: ', grad[0])
print('Gradient in x direction: ', grad[1])
###########################################
# It's also possible that we do not have the position of data points, but know
# that they are evenly spaced. We can then specify a scalar delta value for each
# axes.
x_delta = 2 * units.km
y_delta = 1 * units.km
grad = mpcalc.gradient(data, deltas=(y_delta, x_delta))
print('Gradient in y direction: ', grad[0])
print('Gradient in x direction: ', grad[1])
###########################################
# Finally, the deltas can be arrays for unevenly spaced data.
x_deltas = np.array([[2, 3],
[1, 3],
[2, 3],
[1, 2]]) * units.kilometer
y_deltas = np.array([[2, 3, 1],
[1, 3, 2],
[2, 3, 1]]) * units.kilometer
grad = mpcalc.gradient(data, deltas=(y_deltas, x_deltas))
print('Gradient in y direction: ', grad[0])
print('Gradient in x direction: ', grad[1])
|
Add example of using gradient.
|
Add example of using gradient.
|
Python
|
bsd-3-clause
|
dopplershift/MetPy,ahaberlie/MetPy,Unidata/MetPy,jrleeman/MetPy,ShawnMurd/MetPy,ahaberlie/MetPy,Unidata/MetPy,jrleeman/MetPy,dopplershift/MetPy
|
Add example of using gradient.
|
# Copyright (c) 2015-2018 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Gradient
========
Use `metpy.calc.gradient`.
This example demonstrates the various ways that MetPy's gradient function
can be utilized.
"""
import numpy as np
import metpy.calc as mpcalc
from metpy.units import units
###########################################
# Create some test data to use for our example
data = np.array([[23, 24, 23],
[25, 26, 25],
[27, 28, 27],
[24, 25, 24]]) * units.degC
# Create an array of x position data (the coordinates of our temperature data)
x = np.array([[1, 2, 3],
[1, 2, 3],
[1, 2, 3],
[1, 2, 3]]) * units.kilometer
y = np.array([[1, 1, 1],
[2, 2, 2],
[3, 3, 3],
[4, 4, 4]]) * units.kilometer
###########################################
# Calculate the gradient using the coordinates of the data
grad = mpcalc.gradient(data, x=(y, x))
print('Gradient in y direction: ', grad[0])
print('Gradient in x direction: ', grad[1])
###########################################
# It's also possible that we do not have the position of data points, but know
# that they are evenly spaced. We can then specify a scalar delta value for each
# axes.
x_delta = 2 * units.km
y_delta = 1 * units.km
grad = mpcalc.gradient(data, deltas=(y_delta, x_delta))
print('Gradient in y direction: ', grad[0])
print('Gradient in x direction: ', grad[1])
###########################################
# Finally, the deltas can be arrays for unevenly spaced data.
x_deltas = np.array([[2, 3],
[1, 3],
[2, 3],
[1, 2]]) * units.kilometer
y_deltas = np.array([[2, 3, 1],
[1, 3, 2],
[2, 3, 1]]) * units.kilometer
grad = mpcalc.gradient(data, deltas=(y_deltas, x_deltas))
print('Gradient in y direction: ', grad[0])
print('Gradient in x direction: ', grad[1])
|
<commit_before><commit_msg>Add example of using gradient.<commit_after>
|
# Copyright (c) 2015-2018 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Gradient
========
Use `metpy.calc.gradient`.
This example demonstrates the various ways that MetPy's gradient function
can be utilized.
"""
import numpy as np
import metpy.calc as mpcalc
from metpy.units import units
###########################################
# Create some test data to use for our example
data = np.array([[23, 24, 23],
[25, 26, 25],
[27, 28, 27],
[24, 25, 24]]) * units.degC
# Create an array of x position data (the coordinates of our temperature data)
x = np.array([[1, 2, 3],
[1, 2, 3],
[1, 2, 3],
[1, 2, 3]]) * units.kilometer
y = np.array([[1, 1, 1],
[2, 2, 2],
[3, 3, 3],
[4, 4, 4]]) * units.kilometer
###########################################
# Calculate the gradient using the coordinates of the data
grad = mpcalc.gradient(data, x=(y, x))
print('Gradient in y direction: ', grad[0])
print('Gradient in x direction: ', grad[1])
###########################################
# It's also possible that we do not have the position of data points, but know
# that they are evenly spaced. We can then specify a scalar delta value for each
# axes.
x_delta = 2 * units.km
y_delta = 1 * units.km
grad = mpcalc.gradient(data, deltas=(y_delta, x_delta))
print('Gradient in y direction: ', grad[0])
print('Gradient in x direction: ', grad[1])
###########################################
# Finally, the deltas can be arrays for unevenly spaced data.
x_deltas = np.array([[2, 3],
[1, 3],
[2, 3],
[1, 2]]) * units.kilometer
y_deltas = np.array([[2, 3, 1],
[1, 3, 2],
[2, 3, 1]]) * units.kilometer
grad = mpcalc.gradient(data, deltas=(y_deltas, x_deltas))
print('Gradient in y direction: ', grad[0])
print('Gradient in x direction: ', grad[1])
|
Add example of using gradient.# Copyright (c) 2015-2018 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Gradient
========
Use `metpy.calc.gradient`.
This example demonstrates the various ways that MetPy's gradient function
can be utilized.
"""
import numpy as np
import metpy.calc as mpcalc
from metpy.units import units
###########################################
# Create some test data to use for our example
data = np.array([[23, 24, 23],
[25, 26, 25],
[27, 28, 27],
[24, 25, 24]]) * units.degC
# Create an array of x position data (the coordinates of our temperature data)
x = np.array([[1, 2, 3],
[1, 2, 3],
[1, 2, 3],
[1, 2, 3]]) * units.kilometer
y = np.array([[1, 1, 1],
[2, 2, 2],
[3, 3, 3],
[4, 4, 4]]) * units.kilometer
###########################################
# Calculate the gradient using the coordinates of the data
grad = mpcalc.gradient(data, x=(y, x))
print('Gradient in y direction: ', grad[0])
print('Gradient in x direction: ', grad[1])
###########################################
# It's also possible that we do not have the position of data points, but know
# that they are evenly spaced. We can then specify a scalar delta value for each
# axes.
x_delta = 2 * units.km
y_delta = 1 * units.km
grad = mpcalc.gradient(data, deltas=(y_delta, x_delta))
print('Gradient in y direction: ', grad[0])
print('Gradient in x direction: ', grad[1])
###########################################
# Finally, the deltas can be arrays for unevenly spaced data.
x_deltas = np.array([[2, 3],
[1, 3],
[2, 3],
[1, 2]]) * units.kilometer
y_deltas = np.array([[2, 3, 1],
[1, 3, 2],
[2, 3, 1]]) * units.kilometer
grad = mpcalc.gradient(data, deltas=(y_deltas, x_deltas))
print('Gradient in y direction: ', grad[0])
print('Gradient in x direction: ', grad[1])
|
<commit_before><commit_msg>Add example of using gradient.<commit_after># Copyright (c) 2015-2018 MetPy Developers.
# Distributed under the terms of the BSD 3-Clause License.
# SPDX-License-Identifier: BSD-3-Clause
"""
Gradient
========
Use `metpy.calc.gradient`.
This example demonstrates the various ways that MetPy's gradient function
can be utilized.
"""
import numpy as np
import metpy.calc as mpcalc
from metpy.units import units
###########################################
# Create some test data to use for our example
data = np.array([[23, 24, 23],
[25, 26, 25],
[27, 28, 27],
[24, 25, 24]]) * units.degC
# Create an array of x position data (the coordinates of our temperature data)
x = np.array([[1, 2, 3],
[1, 2, 3],
[1, 2, 3],
[1, 2, 3]]) * units.kilometer
y = np.array([[1, 1, 1],
[2, 2, 2],
[3, 3, 3],
[4, 4, 4]]) * units.kilometer
###########################################
# Calculate the gradient using the coordinates of the data
grad = mpcalc.gradient(data, x=(y, x))
print('Gradient in y direction: ', grad[0])
print('Gradient in x direction: ', grad[1])
###########################################
# It's also possible that we do not have the position of data points, but know
# that they are evenly spaced. We can then specify a scalar delta value for each
# axes.
x_delta = 2 * units.km
y_delta = 1 * units.km
grad = mpcalc.gradient(data, deltas=(y_delta, x_delta))
print('Gradient in y direction: ', grad[0])
print('Gradient in x direction: ', grad[1])
###########################################
# Finally, the deltas can be arrays for unevenly spaced data.
x_deltas = np.array([[2, 3],
[1, 3],
[2, 3],
[1, 2]]) * units.kilometer
y_deltas = np.array([[2, 3, 1],
[1, 3, 2],
[2, 3, 1]]) * units.kilometer
grad = mpcalc.gradient(data, deltas=(y_deltas, x_deltas))
print('Gradient in y direction: ', grad[0])
print('Gradient in x direction: ', grad[1])
|
|
e8f1f4290b7eb2630cecf7e18da0f3ed2cad58d8
|
autobuilder/slack.py
|
autobuilder/slack.py
|
import json
from twisted.internet import defer
from twisted.python import log
from buildbot.process.results import CANCELLED, EXCEPTION, FAILURE, SUCCESS, WARNINGS
from buildbot.reporters.notifier import NotifierBase
from buildbot.util import httpclientservice
COLORS = {
CANCELLED: 'warning',
EXCEPTION: 'warning',
FAILURE: 'danger',
SUCCESS: 'good',
WARNINGS: 'warning'
}
class SlackNotifier(NotifierBase):
def checkConfig(self, hook,
mode=("failing", "passing", "warnings"),
tags=None, builders=None,
buildSetSummary=False, messageFormatter=None,
subject="Buildbot %(result)s in %(title)s on %(builder)s",
schedulers=None, branches=None,
colors=None, base_url='https://hooks.slack.com/services',
watchedWorkers=None, messageFormatterMissingWorker=None):
super(SlackNotifier, self).checkConfig(mode, tags, builders,
buildSetSummary, messageFormatter,
subject, False, False,
schedulers,
branches, watchedWorkers)
httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__)
@defer.inlineCallbacks
def reconfigService(self, hook,
mode=("failing", "passing", "warnings"),
tags=None, builders=None,
buildSetSummary=False, messageFormatter=None,
subject="Buildbot %(result)s in %(title)s on %(builder)s",
schedulers=None, branches=None,
colors=None, base_url='https://hooks.slack.com/services',
watchedWorkers=None, messageFormatterMissingWorker=None):
super(SlackNotifier, self).reconfigService(mode, tags, builders,
buildSetSummary, messageFormatter,
subject, False, False,
schedulers, branches,
watchedWorkers, messageFormatterMissingWorker)
self.hook = hook
self.colors = colors if colors is not None else COLORS
self._http = yield httpclientservice.HTTPClientService.getService(
self.master, base_url)
@defer.inlineCallbacks
def sendMessage(self, body, subject=None, type='plain', builderName=None,
results=None, builds=None, users=None, patches=None,
logs=None, worker=None):
msgtext = "%s\n%s" % (subject, body)
msg = {'attachments': [{'color': self.colors.get(results, 'warning'), 'text': msgtext}]}
response = yield self._http.post(self.hook, json=msg)
if response.code != 200:
log.msg("POST response code %s: %s" % (response.code, response.content))
|
Add Slack notifier supporting buildset summaries
|
Add Slack notifier supporting buildset summaries
|
Python
|
mit
|
madisongh/autobuilder
|
Add Slack notifier supporting buildset summaries
|
import json
from twisted.internet import defer
from twisted.python import log
from buildbot.process.results import CANCELLED, EXCEPTION, FAILURE, SUCCESS, WARNINGS
from buildbot.reporters.notifier import NotifierBase
from buildbot.util import httpclientservice
COLORS = {
CANCELLED: 'warning',
EXCEPTION: 'warning',
FAILURE: 'danger',
SUCCESS: 'good',
WARNINGS: 'warning'
}
class SlackNotifier(NotifierBase):
def checkConfig(self, hook,
mode=("failing", "passing", "warnings"),
tags=None, builders=None,
buildSetSummary=False, messageFormatter=None,
subject="Buildbot %(result)s in %(title)s on %(builder)s",
schedulers=None, branches=None,
colors=None, base_url='https://hooks.slack.com/services',
watchedWorkers=None, messageFormatterMissingWorker=None):
super(SlackNotifier, self).checkConfig(mode, tags, builders,
buildSetSummary, messageFormatter,
subject, False, False,
schedulers,
branches, watchedWorkers)
httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__)
@defer.inlineCallbacks
def reconfigService(self, hook,
mode=("failing", "passing", "warnings"),
tags=None, builders=None,
buildSetSummary=False, messageFormatter=None,
subject="Buildbot %(result)s in %(title)s on %(builder)s",
schedulers=None, branches=None,
colors=None, base_url='https://hooks.slack.com/services',
watchedWorkers=None, messageFormatterMissingWorker=None):
super(SlackNotifier, self).reconfigService(mode, tags, builders,
buildSetSummary, messageFormatter,
subject, False, False,
schedulers, branches,
watchedWorkers, messageFormatterMissingWorker)
self.hook = hook
self.colors = colors if colors is not None else COLORS
self._http = yield httpclientservice.HTTPClientService.getService(
self.master, base_url)
@defer.inlineCallbacks
def sendMessage(self, body, subject=None, type='plain', builderName=None,
results=None, builds=None, users=None, patches=None,
logs=None, worker=None):
msgtext = "%s\n%s" % (subject, body)
msg = {'attachments': [{'color': self.colors.get(results, 'warning'), 'text': msgtext}]}
response = yield self._http.post(self.hook, json=msg)
if response.code != 200:
log.msg("POST response code %s: %s" % (response.code, response.content))
|
<commit_before><commit_msg>Add Slack notifier supporting buildset summaries<commit_after>
|
import json
from twisted.internet import defer
from twisted.python import log
from buildbot.process.results import CANCELLED, EXCEPTION, FAILURE, SUCCESS, WARNINGS
from buildbot.reporters.notifier import NotifierBase
from buildbot.util import httpclientservice
COLORS = {
CANCELLED: 'warning',
EXCEPTION: 'warning',
FAILURE: 'danger',
SUCCESS: 'good',
WARNINGS: 'warning'
}
class SlackNotifier(NotifierBase):
def checkConfig(self, hook,
mode=("failing", "passing", "warnings"),
tags=None, builders=None,
buildSetSummary=False, messageFormatter=None,
subject="Buildbot %(result)s in %(title)s on %(builder)s",
schedulers=None, branches=None,
colors=None, base_url='https://hooks.slack.com/services',
watchedWorkers=None, messageFormatterMissingWorker=None):
super(SlackNotifier, self).checkConfig(mode, tags, builders,
buildSetSummary, messageFormatter,
subject, False, False,
schedulers,
branches, watchedWorkers)
httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__)
@defer.inlineCallbacks
def reconfigService(self, hook,
mode=("failing", "passing", "warnings"),
tags=None, builders=None,
buildSetSummary=False, messageFormatter=None,
subject="Buildbot %(result)s in %(title)s on %(builder)s",
schedulers=None, branches=None,
colors=None, base_url='https://hooks.slack.com/services',
watchedWorkers=None, messageFormatterMissingWorker=None):
super(SlackNotifier, self).reconfigService(mode, tags, builders,
buildSetSummary, messageFormatter,
subject, False, False,
schedulers, branches,
watchedWorkers, messageFormatterMissingWorker)
self.hook = hook
self.colors = colors if colors is not None else COLORS
self._http = yield httpclientservice.HTTPClientService.getService(
self.master, base_url)
@defer.inlineCallbacks
def sendMessage(self, body, subject=None, type='plain', builderName=None,
results=None, builds=None, users=None, patches=None,
logs=None, worker=None):
msgtext = "%s\n%s" % (subject, body)
msg = {'attachments': [{'color': self.colors.get(results, 'warning'), 'text': msgtext}]}
response = yield self._http.post(self.hook, json=msg)
if response.code != 200:
log.msg("POST response code %s: %s" % (response.code, response.content))
|
Add Slack notifier supporting buildset summariesimport json
from twisted.internet import defer
from twisted.python import log
from buildbot.process.results import CANCELLED, EXCEPTION, FAILURE, SUCCESS, WARNINGS
from buildbot.reporters.notifier import NotifierBase
from buildbot.util import httpclientservice
COLORS = {
CANCELLED: 'warning',
EXCEPTION: 'warning',
FAILURE: 'danger',
SUCCESS: 'good',
WARNINGS: 'warning'
}
class SlackNotifier(NotifierBase):
def checkConfig(self, hook,
mode=("failing", "passing", "warnings"),
tags=None, builders=None,
buildSetSummary=False, messageFormatter=None,
subject="Buildbot %(result)s in %(title)s on %(builder)s",
schedulers=None, branches=None,
colors=None, base_url='https://hooks.slack.com/services',
watchedWorkers=None, messageFormatterMissingWorker=None):
super(SlackNotifier, self).checkConfig(mode, tags, builders,
buildSetSummary, messageFormatter,
subject, False, False,
schedulers,
branches, watchedWorkers)
httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__)
@defer.inlineCallbacks
def reconfigService(self, hook,
mode=("failing", "passing", "warnings"),
tags=None, builders=None,
buildSetSummary=False, messageFormatter=None,
subject="Buildbot %(result)s in %(title)s on %(builder)s",
schedulers=None, branches=None,
colors=None, base_url='https://hooks.slack.com/services',
watchedWorkers=None, messageFormatterMissingWorker=None):
super(SlackNotifier, self).reconfigService(mode, tags, builders,
buildSetSummary, messageFormatter,
subject, False, False,
schedulers, branches,
watchedWorkers, messageFormatterMissingWorker)
self.hook = hook
self.colors = colors if colors is not None else COLORS
self._http = yield httpclientservice.HTTPClientService.getService(
self.master, base_url)
@defer.inlineCallbacks
def sendMessage(self, body, subject=None, type='plain', builderName=None,
results=None, builds=None, users=None, patches=None,
logs=None, worker=None):
msgtext = "%s\n%s" % (subject, body)
msg = {'attachments': [{'color': self.colors.get(results, 'warning'), 'text': msgtext}]}
response = yield self._http.post(self.hook, json=msg)
if response.code != 200:
log.msg("POST response code %s: %s" % (response.code, response.content))
|
<commit_before><commit_msg>Add Slack notifier supporting buildset summaries<commit_after>import json
from twisted.internet import defer
from twisted.python import log
from buildbot.process.results import CANCELLED, EXCEPTION, FAILURE, SUCCESS, WARNINGS
from buildbot.reporters.notifier import NotifierBase
from buildbot.util import httpclientservice
COLORS = {
CANCELLED: 'warning',
EXCEPTION: 'warning',
FAILURE: 'danger',
SUCCESS: 'good',
WARNINGS: 'warning'
}
class SlackNotifier(NotifierBase):
def checkConfig(self, hook,
mode=("failing", "passing", "warnings"),
tags=None, builders=None,
buildSetSummary=False, messageFormatter=None,
subject="Buildbot %(result)s in %(title)s on %(builder)s",
schedulers=None, branches=None,
colors=None, base_url='https://hooks.slack.com/services',
watchedWorkers=None, messageFormatterMissingWorker=None):
super(SlackNotifier, self).checkConfig(mode, tags, builders,
buildSetSummary, messageFormatter,
subject, False, False,
schedulers,
branches, watchedWorkers)
httpclientservice.HTTPClientService.checkAvailable(self.__class__.__name__)
@defer.inlineCallbacks
def reconfigService(self, hook,
mode=("failing", "passing", "warnings"),
tags=None, builders=None,
buildSetSummary=False, messageFormatter=None,
subject="Buildbot %(result)s in %(title)s on %(builder)s",
schedulers=None, branches=None,
colors=None, base_url='https://hooks.slack.com/services',
watchedWorkers=None, messageFormatterMissingWorker=None):
super(SlackNotifier, self).reconfigService(mode, tags, builders,
buildSetSummary, messageFormatter,
subject, False, False,
schedulers, branches,
watchedWorkers, messageFormatterMissingWorker)
self.hook = hook
self.colors = colors if colors is not None else COLORS
self._http = yield httpclientservice.HTTPClientService.getService(
self.master, base_url)
@defer.inlineCallbacks
def sendMessage(self, body, subject=None, type='plain', builderName=None,
results=None, builds=None, users=None, patches=None,
logs=None, worker=None):
msgtext = "%s\n%s" % (subject, body)
msg = {'attachments': [{'color': self.colors.get(results, 'warning'), 'text': msgtext}]}
response = yield self._http.post(self.hook, json=msg)
if response.code != 200:
log.msg("POST response code %s: %s" % (response.code, response.content))
|
|
5a97b4b6327dee09fa32eff68c8a934e85405853
|
txircd/modules/extra/connlimit.py
|
txircd/modules/extra/connlimit.py
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ConnectionLimit(ModuleData):
implements(IPlugin, IModuleData)
name = "ConnectionLimit"
peerConnections = {}
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [ ("userconnect", 100, self.handleLocalConnect),
("remoteregister", 100, self.handleRemoteConnect),
("quit", 100, self.handleDisconnect),
("remotequit", 100, self.handleDisconnect) ]
def handleLocalConnect(self, user, *params):
ip = user.ip
if self.addToConnections(ip) and self.peerConnections[ip] > self.ircd.config.getWithDefault("connlimit_globmax", 3):
user.disconnect("No more connections allowed from your IP ({})".format(ip))
return None
return True
def handleRemoteConnect(self, user, *params):
self.addToConnections(user.ip)
def handleDisconnect(self, user, *params):
ip = user.ip
if ip in self.peerConnections:
self.peerConnections[ip] -= 1
if self.peerConnections[ip] < 1:
del self.peerConnections[ip]
def addToConnections(self, ip):
if ip in self.ircd.config.getWithDefault("connlimit_whitelist", []):
return False
if ip in self.peerConnections:
self.peerConnections[ip] += 1
else:
self.peerConnections[ip] = 1
return True
def load(self):
for user in self.ircd.users:
self.addToConnections(user.ip)
connLimit = ConnectionLimit()
|
Implement the connection limit module
|
Implement the connection limit module
|
Python
|
bsd-3-clause
|
Heufneutje/txircd,ElementalAlchemist/txircd
|
Implement the connection limit module
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ConnectionLimit(ModuleData):
implements(IPlugin, IModuleData)
name = "ConnectionLimit"
peerConnections = {}
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [ ("userconnect", 100, self.handleLocalConnect),
("remoteregister", 100, self.handleRemoteConnect),
("quit", 100, self.handleDisconnect),
("remotequit", 100, self.handleDisconnect) ]
def handleLocalConnect(self, user, *params):
ip = user.ip
if self.addToConnections(ip) and self.peerConnections[ip] > self.ircd.config.getWithDefault("connlimit_globmax", 3):
user.disconnect("No more connections allowed from your IP ({})".format(ip))
return None
return True
def handleRemoteConnect(self, user, *params):
self.addToConnections(user.ip)
def handleDisconnect(self, user, *params):
ip = user.ip
if ip in self.peerConnections:
self.peerConnections[ip] -= 1
if self.peerConnections[ip] < 1:
del self.peerConnections[ip]
def addToConnections(self, ip):
if ip in self.ircd.config.getWithDefault("connlimit_whitelist", []):
return False
if ip in self.peerConnections:
self.peerConnections[ip] += 1
else:
self.peerConnections[ip] = 1
return True
def load(self):
for user in self.ircd.users:
self.addToConnections(user.ip)
connLimit = ConnectionLimit()
|
<commit_before><commit_msg>Implement the connection limit module<commit_after>
|
from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ConnectionLimit(ModuleData):
implements(IPlugin, IModuleData)
name = "ConnectionLimit"
peerConnections = {}
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [ ("userconnect", 100, self.handleLocalConnect),
("remoteregister", 100, self.handleRemoteConnect),
("quit", 100, self.handleDisconnect),
("remotequit", 100, self.handleDisconnect) ]
def handleLocalConnect(self, user, *params):
ip = user.ip
if self.addToConnections(ip) and self.peerConnections[ip] > self.ircd.config.getWithDefault("connlimit_globmax", 3):
user.disconnect("No more connections allowed from your IP ({})".format(ip))
return None
return True
def handleRemoteConnect(self, user, *params):
self.addToConnections(user.ip)
def handleDisconnect(self, user, *params):
ip = user.ip
if ip in self.peerConnections:
self.peerConnections[ip] -= 1
if self.peerConnections[ip] < 1:
del self.peerConnections[ip]
def addToConnections(self, ip):
if ip in self.ircd.config.getWithDefault("connlimit_whitelist", []):
return False
if ip in self.peerConnections:
self.peerConnections[ip] += 1
else:
self.peerConnections[ip] = 1
return True
def load(self):
for user in self.ircd.users:
self.addToConnections(user.ip)
connLimit = ConnectionLimit()
|
Implement the connection limit modulefrom twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ConnectionLimit(ModuleData):
implements(IPlugin, IModuleData)
name = "ConnectionLimit"
peerConnections = {}
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [ ("userconnect", 100, self.handleLocalConnect),
("remoteregister", 100, self.handleRemoteConnect),
("quit", 100, self.handleDisconnect),
("remotequit", 100, self.handleDisconnect) ]
def handleLocalConnect(self, user, *params):
ip = user.ip
if self.addToConnections(ip) and self.peerConnections[ip] > self.ircd.config.getWithDefault("connlimit_globmax", 3):
user.disconnect("No more connections allowed from your IP ({})".format(ip))
return None
return True
def handleRemoteConnect(self, user, *params):
self.addToConnections(user.ip)
def handleDisconnect(self, user, *params):
ip = user.ip
if ip in self.peerConnections:
self.peerConnections[ip] -= 1
if self.peerConnections[ip] < 1:
del self.peerConnections[ip]
def addToConnections(self, ip):
if ip in self.ircd.config.getWithDefault("connlimit_whitelist", []):
return False
if ip in self.peerConnections:
self.peerConnections[ip] += 1
else:
self.peerConnections[ip] = 1
return True
def load(self):
for user in self.ircd.users:
self.addToConnections(user.ip)
connLimit = ConnectionLimit()
|
<commit_before><commit_msg>Implement the connection limit module<commit_after>from twisted.plugin import IPlugin
from txircd.module_interface import IModuleData, ModuleData
from zope.interface import implements
class ConnectionLimit(ModuleData):
implements(IPlugin, IModuleData)
name = "ConnectionLimit"
peerConnections = {}
def hookIRCd(self, ircd):
self.ircd = ircd
def actions(self):
return [ ("userconnect", 100, self.handleLocalConnect),
("remoteregister", 100, self.handleRemoteConnect),
("quit", 100, self.handleDisconnect),
("remotequit", 100, self.handleDisconnect) ]
def handleLocalConnect(self, user, *params):
ip = user.ip
if self.addToConnections(ip) and self.peerConnections[ip] > self.ircd.config.getWithDefault("connlimit_globmax", 3):
user.disconnect("No more connections allowed from your IP ({})".format(ip))
return None
return True
def handleRemoteConnect(self, user, *params):
self.addToConnections(user.ip)
def handleDisconnect(self, user, *params):
ip = user.ip
if ip in self.peerConnections:
self.peerConnections[ip] -= 1
if self.peerConnections[ip] < 1:
del self.peerConnections[ip]
def addToConnections(self, ip):
if ip in self.ircd.config.getWithDefault("connlimit_whitelist", []):
return False
if ip in self.peerConnections:
self.peerConnections[ip] += 1
else:
self.peerConnections[ip] = 1
return True
def load(self):
for user in self.ircd.users:
self.addToConnections(user.ip)
connLimit = ConnectionLimit()
|
|
c3009dfd74f61813950160d9a4a826613db75249
|
dynd/tests/test_type_pattern_match.py
|
dynd/tests/test_type_pattern_match.py
|
import sys
import unittest
from dynd import nd, ndt
class TestTypePatternMatch(unittest.TestCase):
def test_simple(self):
self.assertTrue(ndt.int32.matches(ndt.int32))
self.assertTrue(ndt.int16.matches('T'))
self.assertTrue(ndt.int16.matches('... * T'))
self.assertTrue(ndt.int16.matches('A... * T'))
self.assertTrue(ndt.type('strided * var * int').matches('M * A... * N * T'))
self.assertFalse(ndt.type('strided * int').matches('M * A... * N * T'))
def test_tuple(self):
pat = ndt.type('(T, ?T, 3 * T, A... * S)')
self.assertTrue(ndt.type('(int, ?int, 3 * int, real)').matches(pat))
self.assertTrue(ndt.type('(string, ?string, 3 * string, 10 * complex)').matches(pat))
self.assertFalse(ndt.type('(string, ?int, 3 * string, 10 * complex)').matches(pat))
self.assertFalse(ndt.type('(string, string, 3 * string, 10 * complex)').matches(pat))
self.assertFalse(ndt.type('(string, ?string, 4 * string, 10 * complex)').matches(pat))
|
Add some python tests of the type pattern matching
|
Add some python tests of the type pattern matching
|
Python
|
bsd-2-clause
|
insertinterestingnamehere/dynd-python,izaid/dynd-python,izaid/dynd-python,michaelpacer/dynd-python,mwiebe/dynd-python,pombredanne/dynd-python,insertinterestingnamehere/dynd-python,ContinuumIO/dynd-python,izaid/dynd-python,cpcloud/dynd-python,izaid/dynd-python,aterrel/dynd-python,aterrel/dynd-python,aterrel/dynd-python,mwiebe/dynd-python,pombredanne/dynd-python,aterrel/dynd-python,cpcloud/dynd-python,mwiebe/dynd-python,pombredanne/dynd-python,pombredanne/dynd-python,ContinuumIO/dynd-python,cpcloud/dynd-python,michaelpacer/dynd-python,michaelpacer/dynd-python,insertinterestingnamehere/dynd-python,ContinuumIO/dynd-python,insertinterestingnamehere/dynd-python,michaelpacer/dynd-python,ContinuumIO/dynd-python,cpcloud/dynd-python,mwiebe/dynd-python
|
Add some python tests of the type pattern matching
|
import sys
import unittest
from dynd import nd, ndt
class TestTypePatternMatch(unittest.TestCase):
def test_simple(self):
self.assertTrue(ndt.int32.matches(ndt.int32))
self.assertTrue(ndt.int16.matches('T'))
self.assertTrue(ndt.int16.matches('... * T'))
self.assertTrue(ndt.int16.matches('A... * T'))
self.assertTrue(ndt.type('strided * var * int').matches('M * A... * N * T'))
self.assertFalse(ndt.type('strided * int').matches('M * A... * N * T'))
def test_tuple(self):
pat = ndt.type('(T, ?T, 3 * T, A... * S)')
self.assertTrue(ndt.type('(int, ?int, 3 * int, real)').matches(pat))
self.assertTrue(ndt.type('(string, ?string, 3 * string, 10 * complex)').matches(pat))
self.assertFalse(ndt.type('(string, ?int, 3 * string, 10 * complex)').matches(pat))
self.assertFalse(ndt.type('(string, string, 3 * string, 10 * complex)').matches(pat))
self.assertFalse(ndt.type('(string, ?string, 4 * string, 10 * complex)').matches(pat))
|
<commit_before><commit_msg>Add some python tests of the type pattern matching<commit_after>
|
import sys
import unittest
from dynd import nd, ndt
class TestTypePatternMatch(unittest.TestCase):
def test_simple(self):
self.assertTrue(ndt.int32.matches(ndt.int32))
self.assertTrue(ndt.int16.matches('T'))
self.assertTrue(ndt.int16.matches('... * T'))
self.assertTrue(ndt.int16.matches('A... * T'))
self.assertTrue(ndt.type('strided * var * int').matches('M * A... * N * T'))
self.assertFalse(ndt.type('strided * int').matches('M * A... * N * T'))
def test_tuple(self):
pat = ndt.type('(T, ?T, 3 * T, A... * S)')
self.assertTrue(ndt.type('(int, ?int, 3 * int, real)').matches(pat))
self.assertTrue(ndt.type('(string, ?string, 3 * string, 10 * complex)').matches(pat))
self.assertFalse(ndt.type('(string, ?int, 3 * string, 10 * complex)').matches(pat))
self.assertFalse(ndt.type('(string, string, 3 * string, 10 * complex)').matches(pat))
self.assertFalse(ndt.type('(string, ?string, 4 * string, 10 * complex)').matches(pat))
|
Add some python tests of the type pattern matchingimport sys
import unittest
from dynd import nd, ndt
class TestTypePatternMatch(unittest.TestCase):
def test_simple(self):
self.assertTrue(ndt.int32.matches(ndt.int32))
self.assertTrue(ndt.int16.matches('T'))
self.assertTrue(ndt.int16.matches('... * T'))
self.assertTrue(ndt.int16.matches('A... * T'))
self.assertTrue(ndt.type('strided * var * int').matches('M * A... * N * T'))
self.assertFalse(ndt.type('strided * int').matches('M * A... * N * T'))
def test_tuple(self):
pat = ndt.type('(T, ?T, 3 * T, A... * S)')
self.assertTrue(ndt.type('(int, ?int, 3 * int, real)').matches(pat))
self.assertTrue(ndt.type('(string, ?string, 3 * string, 10 * complex)').matches(pat))
self.assertFalse(ndt.type('(string, ?int, 3 * string, 10 * complex)').matches(pat))
self.assertFalse(ndt.type('(string, string, 3 * string, 10 * complex)').matches(pat))
self.assertFalse(ndt.type('(string, ?string, 4 * string, 10 * complex)').matches(pat))
|
<commit_before><commit_msg>Add some python tests of the type pattern matching<commit_after>import sys
import unittest
from dynd import nd, ndt
class TestTypePatternMatch(unittest.TestCase):
def test_simple(self):
self.assertTrue(ndt.int32.matches(ndt.int32))
self.assertTrue(ndt.int16.matches('T'))
self.assertTrue(ndt.int16.matches('... * T'))
self.assertTrue(ndt.int16.matches('A... * T'))
self.assertTrue(ndt.type('strided * var * int').matches('M * A... * N * T'))
self.assertFalse(ndt.type('strided * int').matches('M * A... * N * T'))
def test_tuple(self):
pat = ndt.type('(T, ?T, 3 * T, A... * S)')
self.assertTrue(ndt.type('(int, ?int, 3 * int, real)').matches(pat))
self.assertTrue(ndt.type('(string, ?string, 3 * string, 10 * complex)').matches(pat))
self.assertFalse(ndt.type('(string, ?int, 3 * string, 10 * complex)').matches(pat))
self.assertFalse(ndt.type('(string, string, 3 * string, 10 * complex)').matches(pat))
self.assertFalse(ndt.type('(string, ?string, 4 * string, 10 * complex)').matches(pat))
|
|
71a8209f5b68a8edc663e876b369fd891a79a863
|
lucid/misc/convert_matplotlib.py
|
lucid/misc/convert_matplotlib.py
|
import io
import numpy as np
from PIL import Image
def matplotlib_to_numpy(plt):
"""Convert a matplotlib plot to a numpy array represent it as an image.
Inputs:
plot - matplotlib plot
Returns:
A numpy array with shape [W, H, 3], representing RGB values between 0 and 1.
"""
f = io.BytesIO()
plt.savefig(f, format="png")
f.seek(0)
arr = np.array(Image.open(f)).copy()
f.close()
return arr/255.
|
Add matplotlib plot -> numpy image util
|
Add matplotlib plot -> numpy image util
|
Python
|
apache-2.0
|
tensorflow/lucid,tensorflow/lucid,tensorflow/lucid,tensorflow/lucid
|
Add matplotlib plot -> numpy image util
|
import io
import numpy as np
from PIL import Image
def matplotlib_to_numpy(plt):
"""Convert a matplotlib plot to a numpy array represent it as an image.
Inputs:
plot - matplotlib plot
Returns:
A numpy array with shape [W, H, 3], representing RGB values between 0 and 1.
"""
f = io.BytesIO()
plt.savefig(f, format="png")
f.seek(0)
arr = np.array(Image.open(f)).copy()
f.close()
return arr/255.
|
<commit_before><commit_msg>Add matplotlib plot -> numpy image util<commit_after>
|
import io
import numpy as np
from PIL import Image
def matplotlib_to_numpy(plt):
"""Convert a matplotlib plot to a numpy array represent it as an image.
Inputs:
plot - matplotlib plot
Returns:
A numpy array with shape [W, H, 3], representing RGB values between 0 and 1.
"""
f = io.BytesIO()
plt.savefig(f, format="png")
f.seek(0)
arr = np.array(Image.open(f)).copy()
f.close()
return arr/255.
|
Add matplotlib plot -> numpy image utilimport io
import numpy as np
from PIL import Image
def matplotlib_to_numpy(plt):
"""Convert a matplotlib plot to a numpy array represent it as an image.
Inputs:
plot - matplotlib plot
Returns:
A numpy array with shape [W, H, 3], representing RGB values between 0 and 1.
"""
f = io.BytesIO()
plt.savefig(f, format="png")
f.seek(0)
arr = np.array(Image.open(f)).copy()
f.close()
return arr/255.
|
<commit_before><commit_msg>Add matplotlib plot -> numpy image util<commit_after>import io
import numpy as np
from PIL import Image
def matplotlib_to_numpy(plt):
"""Convert a matplotlib plot to a numpy array represent it as an image.
Inputs:
plot - matplotlib plot
Returns:
A numpy array with shape [W, H, 3], representing RGB values between 0 and 1.
"""
f = io.BytesIO()
plt.savefig(f, format="png")
f.seek(0)
arr = np.array(Image.open(f)).copy()
f.close()
return arr/255.
|
|
32a55869f9b7c1493e00db3f5fc622f5f73dced0
|
mecode/devices/efd_pico_pulse.py
|
mecode/devices/efd_pico_pulse.py
|
##############################################################################
#
# For EFD PICO Touch/Pulse controller and jetter
#
##############################################################################
import serial
# Constants
EOT = '\r'
ACK = '<3'
class EFDPicoPulse(object):
def __init__(self, comport='/dev/ttyUSB0'):
self.comport = comport
self.connect()
def connect(self):
self.s = serial.Serial(self.comport,
baudrate=115200,
parity='N',
stopbits=1,
bytesize=8,
timeout=2,
write_timeout=2)
def disconnect(self):
self.s.close()
def send(self, command):
msg = command + EOT
self.s.write(msg)
self.s.read_until(ACK)
def set_valve_mode(mode):
def set_dispense_count(self, count):
def get_valve_status(self):
self.send('rdr1')
def cycle_valve(self):
self.send('1cycl')
self.send('0cycl')
def set_heater_mode(self, mode):
def set_heater_temp(self, temp):
def get_heater_status(self):
self.send('rhtr')
def get_valve_info(self):
self.send('info')
def get_alarm_hist(self):
self.send('ralr')
def reset_alarm(self):
self.send('arst')
|
Add initial picoPulse to devices
|
Add initial picoPulse to devices
|
Python
|
mit
|
razeh/mecode,jminardi/mecode
|
Add initial picoPulse to devices
|
##############################################################################
#
# For EFD PICO Touch/Pulse controller and jetter
#
##############################################################################
import serial
# Constants
EOT = '\r'
ACK = '<3'
class EFDPicoPulse(object):
def __init__(self, comport='/dev/ttyUSB0'):
self.comport = comport
self.connect()
def connect(self):
self.s = serial.Serial(self.comport,
baudrate=115200,
parity='N',
stopbits=1,
bytesize=8,
timeout=2,
write_timeout=2)
def disconnect(self):
self.s.close()
def send(self, command):
msg = command + EOT
self.s.write(msg)
self.s.read_until(ACK)
def set_valve_mode(mode):
def set_dispense_count(self, count):
def get_valve_status(self):
self.send('rdr1')
def cycle_valve(self):
self.send('1cycl')
self.send('0cycl')
def set_heater_mode(self, mode):
def set_heater_temp(self, temp):
def get_heater_status(self):
self.send('rhtr')
def get_valve_info(self):
self.send('info')
def get_alarm_hist(self):
self.send('ralr')
def reset_alarm(self):
self.send('arst')
|
<commit_before><commit_msg>Add initial picoPulse to devices<commit_after>
|
##############################################################################
#
# For EFD PICO Touch/Pulse controller and jetter
#
##############################################################################
import serial
# Constants
EOT = '\r'
ACK = '<3'
class EFDPicoPulse(object):
def __init__(self, comport='/dev/ttyUSB0'):
self.comport = comport
self.connect()
def connect(self):
self.s = serial.Serial(self.comport,
baudrate=115200,
parity='N',
stopbits=1,
bytesize=8,
timeout=2,
write_timeout=2)
def disconnect(self):
self.s.close()
def send(self, command):
msg = command + EOT
self.s.write(msg)
self.s.read_until(ACK)
def set_valve_mode(mode):
def set_dispense_count(self, count):
def get_valve_status(self):
self.send('rdr1')
def cycle_valve(self):
self.send('1cycl')
self.send('0cycl')
def set_heater_mode(self, mode):
def set_heater_temp(self, temp):
def get_heater_status(self):
self.send('rhtr')
def get_valve_info(self):
self.send('info')
def get_alarm_hist(self):
self.send('ralr')
def reset_alarm(self):
self.send('arst')
|
Add initial picoPulse to devices##############################################################################
#
# For EFD PICO Touch/Pulse controller and jetter
#
##############################################################################
import serial
# Constants
EOT = '\r'
ACK = '<3'
class EFDPicoPulse(object):
def __init__(self, comport='/dev/ttyUSB0'):
self.comport = comport
self.connect()
def connect(self):
self.s = serial.Serial(self.comport,
baudrate=115200,
parity='N',
stopbits=1,
bytesize=8,
timeout=2,
write_timeout=2)
def disconnect(self):
self.s.close()
def send(self, command):
msg = command + EOT
self.s.write(msg)
self.s.read_until(ACK)
def set_valve_mode(mode):
def set_dispense_count(self, count):
def get_valve_status(self):
self.send('rdr1')
def cycle_valve(self):
self.send('1cycl')
self.send('0cycl')
def set_heater_mode(self, mode):
def set_heater_temp(self, temp):
def get_heater_status(self):
self.send('rhtr')
def get_valve_info(self):
self.send('info')
def get_alarm_hist(self):
self.send('ralr')
def reset_alarm(self):
self.send('arst')
|
<commit_before><commit_msg>Add initial picoPulse to devices<commit_after>##############################################################################
#
# For EFD PICO Touch/Pulse controller and jetter
#
##############################################################################
import serial
# Constants
EOT = '\r'
ACK = '<3'
class EFDPicoPulse(object):
def __init__(self, comport='/dev/ttyUSB0'):
self.comport = comport
self.connect()
def connect(self):
self.s = serial.Serial(self.comport,
baudrate=115200,
parity='N',
stopbits=1,
bytesize=8,
timeout=2,
write_timeout=2)
def disconnect(self):
self.s.close()
def send(self, command):
msg = command + EOT
self.s.write(msg)
self.s.read_until(ACK)
def set_valve_mode(mode):
def set_dispense_count(self, count):
def get_valve_status(self):
self.send('rdr1')
def cycle_valve(self):
self.send('1cycl')
self.send('0cycl')
def set_heater_mode(self, mode):
def set_heater_temp(self, temp):
def get_heater_status(self):
self.send('rhtr')
def get_valve_info(self):
self.send('info')
def get_alarm_hist(self):
self.send('ralr')
def reset_alarm(self):
self.send('arst')
|
|
58b1a723af7ba161398dbb6b77096c2974ec311f
|
pyli.py
|
pyli.py
|
import parser
import token
import symbol
import sys
from pprint import pprint
tree = parser.st2tuple(parser.suite(sys.argv[1]))
def convert_readable(tree):
return tuple(((token.tok_name[i]
if token.tok_name.get(i) else
symbol.sym_name[i])
if isinstance(i, int) else
(i if isinstance(i, str) else convert_readable(i)))
for i in tree)
read_tree = convert_readable(tree)
pprint(read_tree)
|
Add routine to make the parse tree ammenable
|
Add routine to make the parse tree ammenable
|
Python
|
mit
|
thenoviceoof/pyli
|
Add routine to make the parse tree ammenable
|
import parser
import token
import symbol
import sys
from pprint import pprint
tree = parser.st2tuple(parser.suite(sys.argv[1]))
def convert_readable(tree):
return tuple(((token.tok_name[i]
if token.tok_name.get(i) else
symbol.sym_name[i])
if isinstance(i, int) else
(i if isinstance(i, str) else convert_readable(i)))
for i in tree)
read_tree = convert_readable(tree)
pprint(read_tree)
|
<commit_before><commit_msg>Add routine to make the parse tree ammenable<commit_after>
|
import parser
import token
import symbol
import sys
from pprint import pprint
tree = parser.st2tuple(parser.suite(sys.argv[1]))
def convert_readable(tree):
return tuple(((token.tok_name[i]
if token.tok_name.get(i) else
symbol.sym_name[i])
if isinstance(i, int) else
(i if isinstance(i, str) else convert_readable(i)))
for i in tree)
read_tree = convert_readable(tree)
pprint(read_tree)
|
Add routine to make the parse tree ammenableimport parser
import token
import symbol
import sys
from pprint import pprint
tree = parser.st2tuple(parser.suite(sys.argv[1]))
def convert_readable(tree):
return tuple(((token.tok_name[i]
if token.tok_name.get(i) else
symbol.sym_name[i])
if isinstance(i, int) else
(i if isinstance(i, str) else convert_readable(i)))
for i in tree)
read_tree = convert_readable(tree)
pprint(read_tree)
|
<commit_before><commit_msg>Add routine to make the parse tree ammenable<commit_after>import parser
import token
import symbol
import sys
from pprint import pprint
tree = parser.st2tuple(parser.suite(sys.argv[1]))
def convert_readable(tree):
return tuple(((token.tok_name[i]
if token.tok_name.get(i) else
symbol.sym_name[i])
if isinstance(i, int) else
(i if isinstance(i, str) else convert_readable(i)))
for i in tree)
read_tree = convert_readable(tree)
pprint(read_tree)
|
|
46ae9c191abca889eeb601542eb1ea33224b3b94
|
scripts/monitoring/cron-send-node-taints-status.py
|
scripts/monitoring/cron-send-node-taints-status.py
|
#!/usr/bin/env python
""" Node taints check for OpenShift V3 """
# Adding the ignore because it does not like the naming of the script
# to be different than the class name
# pylint: disable=invalid-name
# pylint: disable=wrong-import-position
# pylint: disable=broad-except
# pylint: disable=line-too-long
import argparse
import time
import logging
logging.basicConfig(
format='%(asctime)s - %(relativeCreated)6d - %(levelname)-8s - %(message)s',
)
logger = logging.getLogger()
logger.setLevel(logging.WARN)
# Our jenkins server does not include these rpms.
# In the future we might move this to a container where these
# libs might exist
#pylint: disable=import-error
from openshift_tools.monitoring.ocutil import OCUtil
from openshift_tools.monitoring.metric_sender import MetricSender
ocutil = OCUtil()
def runOCcmd_yaml(cmd, base_cmd='oc'):
""" log commands through ocutil """
logger.info(base_cmd + " " + cmd)
ocy_time = time.time()
ocy_result = ocutil.run_user_cmd_yaml(cmd, base_cmd=base_cmd, )
logger.info("oc command took %s seconds", str(time.time() - ocy_time))
return ocy_result
def parse_args():
""" parse the args from the cli """
parser = argparse.ArgumentParser(description='Check all the nodes taints Status')
parser.add_argument('-v', '--verbose', action='count', default=0,
help='verbosity level, specify multiple')
args = parser.parse_args()
if args.verbose > 0:
logger.setLevel(logging.INFO)
if args.verbose > 1:
logger.setLevel(logging.DEBUG)
return args
def check_taint_status():
"""get all the info of all node """
result_status = 0
node_info = runOCcmd_yaml("get node ")
for item in node_info['items']:
logger.info("Checking node: %s", item['metadata']['name'])
if "taints" in item['spec']:
taints = item['spec']['taints']
for taint in taints:
result_status = result_status + 1
logger.warn("Node: %s, have unexpected taint: %s=%s:%s", item['metadata']['name'], taint['key'], taint['value'], taint['effect'])
return result_status
def main():
""" check all the node taints see if any node have problem """
args = parse_args()
logger.debug("args: ")
logger.debug(args)
taint_status = check_taint_status()
#send the value to zabbix
mts = MetricSender(verbose=args.verbose)
mts.add_metric({'openshift.nodes.taint.status': taint_status})
mts.send_metrics()
if __name__ == "__main__":
main()
|
Add monitor script for taints
|
Add monitor script for taints
|
Python
|
apache-2.0
|
blrm/openshift-tools,blrm/openshift-tools,blrm/openshift-tools,blrm/openshift-tools,blrm/openshift-tools,blrm/openshift-tools
|
Add monitor script for taints
|
#!/usr/bin/env python
""" Node taints check for OpenShift V3 """
# Adding the ignore because it does not like the naming of the script
# to be different than the class name
# pylint: disable=invalid-name
# pylint: disable=wrong-import-position
# pylint: disable=broad-except
# pylint: disable=line-too-long
import argparse
import time
import logging
logging.basicConfig(
format='%(asctime)s - %(relativeCreated)6d - %(levelname)-8s - %(message)s',
)
logger = logging.getLogger()
logger.setLevel(logging.WARN)
# Our jenkins server does not include these rpms.
# In the future we might move this to a container where these
# libs might exist
#pylint: disable=import-error
from openshift_tools.monitoring.ocutil import OCUtil
from openshift_tools.monitoring.metric_sender import MetricSender
ocutil = OCUtil()
def runOCcmd_yaml(cmd, base_cmd='oc'):
""" log commands through ocutil """
logger.info(base_cmd + " " + cmd)
ocy_time = time.time()
ocy_result = ocutil.run_user_cmd_yaml(cmd, base_cmd=base_cmd, )
logger.info("oc command took %s seconds", str(time.time() - ocy_time))
return ocy_result
def parse_args():
""" parse the args from the cli """
parser = argparse.ArgumentParser(description='Check all the nodes taints Status')
parser.add_argument('-v', '--verbose', action='count', default=0,
help='verbosity level, specify multiple')
args = parser.parse_args()
if args.verbose > 0:
logger.setLevel(logging.INFO)
if args.verbose > 1:
logger.setLevel(logging.DEBUG)
return args
def check_taint_status():
"""get all the info of all node """
result_status = 0
node_info = runOCcmd_yaml("get node ")
for item in node_info['items']:
logger.info("Checking node: %s", item['metadata']['name'])
if "taints" in item['spec']:
taints = item['spec']['taints']
for taint in taints:
result_status = result_status + 1
logger.warn("Node: %s, have unexpected taint: %s=%s:%s", item['metadata']['name'], taint['key'], taint['value'], taint['effect'])
return result_status
def main():
""" check all the node taints see if any node have problem """
args = parse_args()
logger.debug("args: ")
logger.debug(args)
taint_status = check_taint_status()
#send the value to zabbix
mts = MetricSender(verbose=args.verbose)
mts.add_metric({'openshift.nodes.taint.status': taint_status})
mts.send_metrics()
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add monitor script for taints<commit_after>
|
#!/usr/bin/env python
""" Node taints check for OpenShift V3 """
# Adding the ignore because it does not like the naming of the script
# to be different than the class name
# pylint: disable=invalid-name
# pylint: disable=wrong-import-position
# pylint: disable=broad-except
# pylint: disable=line-too-long
import argparse
import time
import logging
logging.basicConfig(
format='%(asctime)s - %(relativeCreated)6d - %(levelname)-8s - %(message)s',
)
logger = logging.getLogger()
logger.setLevel(logging.WARN)
# Our jenkins server does not include these rpms.
# In the future we might move this to a container where these
# libs might exist
#pylint: disable=import-error
from openshift_tools.monitoring.ocutil import OCUtil
from openshift_tools.monitoring.metric_sender import MetricSender
ocutil = OCUtil()
def runOCcmd_yaml(cmd, base_cmd='oc'):
""" log commands through ocutil """
logger.info(base_cmd + " " + cmd)
ocy_time = time.time()
ocy_result = ocutil.run_user_cmd_yaml(cmd, base_cmd=base_cmd, )
logger.info("oc command took %s seconds", str(time.time() - ocy_time))
return ocy_result
def parse_args():
""" parse the args from the cli """
parser = argparse.ArgumentParser(description='Check all the nodes taints Status')
parser.add_argument('-v', '--verbose', action='count', default=0,
help='verbosity level, specify multiple')
args = parser.parse_args()
if args.verbose > 0:
logger.setLevel(logging.INFO)
if args.verbose > 1:
logger.setLevel(logging.DEBUG)
return args
def check_taint_status():
"""get all the info of all node """
result_status = 0
node_info = runOCcmd_yaml("get node ")
for item in node_info['items']:
logger.info("Checking node: %s", item['metadata']['name'])
if "taints" in item['spec']:
taints = item['spec']['taints']
for taint in taints:
result_status = result_status + 1
logger.warn("Node: %s, have unexpected taint: %s=%s:%s", item['metadata']['name'], taint['key'], taint['value'], taint['effect'])
return result_status
def main():
""" check all the node taints see if any node have problem """
args = parse_args()
logger.debug("args: ")
logger.debug(args)
taint_status = check_taint_status()
#send the value to zabbix
mts = MetricSender(verbose=args.verbose)
mts.add_metric({'openshift.nodes.taint.status': taint_status})
mts.send_metrics()
if __name__ == "__main__":
main()
|
Add monitor script for taints#!/usr/bin/env python
""" Node taints check for OpenShift V3 """
# Adding the ignore because it does not like the naming of the script
# to be different than the class name
# pylint: disable=invalid-name
# pylint: disable=wrong-import-position
# pylint: disable=broad-except
# pylint: disable=line-too-long
import argparse
import time
import logging
logging.basicConfig(
format='%(asctime)s - %(relativeCreated)6d - %(levelname)-8s - %(message)s',
)
logger = logging.getLogger()
logger.setLevel(logging.WARN)
# Our jenkins server does not include these rpms.
# In the future we might move this to a container where these
# libs might exist
#pylint: disable=import-error
from openshift_tools.monitoring.ocutil import OCUtil
from openshift_tools.monitoring.metric_sender import MetricSender
ocutil = OCUtil()
def runOCcmd_yaml(cmd, base_cmd='oc'):
""" log commands through ocutil """
logger.info(base_cmd + " " + cmd)
ocy_time = time.time()
ocy_result = ocutil.run_user_cmd_yaml(cmd, base_cmd=base_cmd, )
logger.info("oc command took %s seconds", str(time.time() - ocy_time))
return ocy_result
def parse_args():
""" parse the args from the cli """
parser = argparse.ArgumentParser(description='Check all the nodes taints Status')
parser.add_argument('-v', '--verbose', action='count', default=0,
help='verbosity level, specify multiple')
args = parser.parse_args()
if args.verbose > 0:
logger.setLevel(logging.INFO)
if args.verbose > 1:
logger.setLevel(logging.DEBUG)
return args
def check_taint_status():
"""get all the info of all node """
result_status = 0
node_info = runOCcmd_yaml("get node ")
for item in node_info['items']:
logger.info("Checking node: %s", item['metadata']['name'])
if "taints" in item['spec']:
taints = item['spec']['taints']
for taint in taints:
result_status = result_status + 1
logger.warn("Node: %s, have unexpected taint: %s=%s:%s", item['metadata']['name'], taint['key'], taint['value'], taint['effect'])
return result_status
def main():
""" check all the node taints see if any node have problem """
args = parse_args()
logger.debug("args: ")
logger.debug(args)
taint_status = check_taint_status()
#send the value to zabbix
mts = MetricSender(verbose=args.verbose)
mts.add_metric({'openshift.nodes.taint.status': taint_status})
mts.send_metrics()
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add monitor script for taints<commit_after>#!/usr/bin/env python
""" Node taints check for OpenShift V3 """
# Adding the ignore because it does not like the naming of the script
# to be different than the class name
# pylint: disable=invalid-name
# pylint: disable=wrong-import-position
# pylint: disable=broad-except
# pylint: disable=line-too-long
import argparse
import time
import logging
logging.basicConfig(
format='%(asctime)s - %(relativeCreated)6d - %(levelname)-8s - %(message)s',
)
logger = logging.getLogger()
logger.setLevel(logging.WARN)
# Our jenkins server does not include these rpms.
# In the future we might move this to a container where these
# libs might exist
#pylint: disable=import-error
from openshift_tools.monitoring.ocutil import OCUtil
from openshift_tools.monitoring.metric_sender import MetricSender
ocutil = OCUtil()
def runOCcmd_yaml(cmd, base_cmd='oc'):
""" log commands through ocutil """
logger.info(base_cmd + " " + cmd)
ocy_time = time.time()
ocy_result = ocutil.run_user_cmd_yaml(cmd, base_cmd=base_cmd, )
logger.info("oc command took %s seconds", str(time.time() - ocy_time))
return ocy_result
def parse_args():
""" parse the args from the cli """
parser = argparse.ArgumentParser(description='Check all the nodes taints Status')
parser.add_argument('-v', '--verbose', action='count', default=0,
help='verbosity level, specify multiple')
args = parser.parse_args()
if args.verbose > 0:
logger.setLevel(logging.INFO)
if args.verbose > 1:
logger.setLevel(logging.DEBUG)
return args
def check_taint_status():
"""get all the info of all node """
result_status = 0
node_info = runOCcmd_yaml("get node ")
for item in node_info['items']:
logger.info("Checking node: %s", item['metadata']['name'])
if "taints" in item['spec']:
taints = item['spec']['taints']
for taint in taints:
result_status = result_status + 1
logger.warn("Node: %s, have unexpected taint: %s=%s:%s", item['metadata']['name'], taint['key'], taint['value'], taint['effect'])
return result_status
def main():
""" check all the node taints see if any node have problem """
args = parse_args()
logger.debug("args: ")
logger.debug(args)
taint_status = check_taint_status()
#send the value to zabbix
mts = MetricSender(verbose=args.verbose)
mts.add_metric({'openshift.nodes.taint.status': taint_status})
mts.send_metrics()
if __name__ == "__main__":
main()
|
|
3627c5ce2d35e647f359476e784b067506de44e4
|
django_auth_policy/management/commands/unlock_all.py
|
django_auth_policy/management/commands/unlock_all.py
|
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django_auth_policy.models import LoginAttempt
class Command(BaseCommand):
help = "Remove all locks on usernames and IP addresses"
def handle(self, *args, **options):
c = LoginAttempt.objects.filter(lockout=True).update(lockout=False)
print u'Unlocked {0} login attempts.'.format(c)
|
Add management command which unlocks all lockout commands
|
Add management command which unlocks all lockout commands
|
Python
|
bsd-3-clause
|
mcella/django-auth-policy,mcella/django-auth-policy,Dreamsolution/django-auth-policy,Dreamsolution/django-auth-policy
|
Add management command which unlocks all lockout commands
|
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django_auth_policy.models import LoginAttempt
class Command(BaseCommand):
help = "Remove all locks on usernames and IP addresses"
def handle(self, *args, **options):
c = LoginAttempt.objects.filter(lockout=True).update(lockout=False)
print u'Unlocked {0} login attempts.'.format(c)
|
<commit_before><commit_msg>Add management command which unlocks all lockout commands<commit_after>
|
from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django_auth_policy.models import LoginAttempt
class Command(BaseCommand):
help = "Remove all locks on usernames and IP addresses"
def handle(self, *args, **options):
c = LoginAttempt.objects.filter(lockout=True).update(lockout=False)
print u'Unlocked {0} login attempts.'.format(c)
|
Add management command which unlocks all lockout commandsfrom django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django_auth_policy.models import LoginAttempt
class Command(BaseCommand):
help = "Remove all locks on usernames and IP addresses"
def handle(self, *args, **options):
c = LoginAttempt.objects.filter(lockout=True).update(lockout=False)
print u'Unlocked {0} login attempts.'.format(c)
|
<commit_before><commit_msg>Add management command which unlocks all lockout commands<commit_after>from django.core.management.base import BaseCommand, CommandError
from django.conf import settings
from django_auth_policy.models import LoginAttempt
class Command(BaseCommand):
help = "Remove all locks on usernames and IP addresses"
def handle(self, *args, **options):
c = LoginAttempt.objects.filter(lockout=True).update(lockout=False)
print u'Unlocked {0} login attempts.'.format(c)
|
|
795dfe1df9ca3e0ada86de450e5f7cf2c80fe4e2
|
tests/test_context_processors.py
|
tests/test_context_processors.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from responsive.conf import settings
from responsive.context_processors import device
from responsive.utils import Device
class ContextProcessorsTest(TestCase):
def setUp(self):
# Every test needs access to the request factory.
self.factory = RequestFactory()
def test_context_processor_raises_improperlyconfigured_error(self):
# with responsive middleware not included in MIDDLEWARE_CLASSES
# this should raise a ImproperlyConfigured error
request = self.factory.get('/')
self.assertRaises(ImproperlyConfigured, device, request)
@override_settings(MIDDLEWARE_CLASSES=('responsive.middleware.ResponsiveMiddleware', ))
def test_context_processor_returns_device_object(self):
request = self.factory.get('/')
context = device(request)
self.assertIsInstance(context[settings.RESPONSIVE_VARIABLE_NAME], Device)
|
Add tests for responsive context_processors
|
Add tests for responsive context_processors
|
Python
|
bsd-3-clause
|
mishbahr/django-responsive2,mishbahr/django-responsive2
|
Add tests for responsive context_processors
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from responsive.conf import settings
from responsive.context_processors import device
from responsive.utils import Device
class ContextProcessorsTest(TestCase):
def setUp(self):
# Every test needs access to the request factory.
self.factory = RequestFactory()
def test_context_processor_raises_improperlyconfigured_error(self):
# with responsive middleware not included in MIDDLEWARE_CLASSES
# this should raise a ImproperlyConfigured error
request = self.factory.get('/')
self.assertRaises(ImproperlyConfigured, device, request)
@override_settings(MIDDLEWARE_CLASSES=('responsive.middleware.ResponsiveMiddleware', ))
def test_context_processor_returns_device_object(self):
request = self.factory.get('/')
context = device(request)
self.assertIsInstance(context[settings.RESPONSIVE_VARIABLE_NAME], Device)
|
<commit_before><commit_msg>Add tests for responsive context_processors<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from responsive.conf import settings
from responsive.context_processors import device
from responsive.utils import Device
class ContextProcessorsTest(TestCase):
def setUp(self):
# Every test needs access to the request factory.
self.factory = RequestFactory()
def test_context_processor_raises_improperlyconfigured_error(self):
# with responsive middleware not included in MIDDLEWARE_CLASSES
# this should raise a ImproperlyConfigured error
request = self.factory.get('/')
self.assertRaises(ImproperlyConfigured, device, request)
@override_settings(MIDDLEWARE_CLASSES=('responsive.middleware.ResponsiveMiddleware', ))
def test_context_processor_returns_device_object(self):
request = self.factory.get('/')
context = device(request)
self.assertIsInstance(context[settings.RESPONSIVE_VARIABLE_NAME], Device)
|
Add tests for responsive context_processors#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from responsive.conf import settings
from responsive.context_processors import device
from responsive.utils import Device
class ContextProcessorsTest(TestCase):
def setUp(self):
# Every test needs access to the request factory.
self.factory = RequestFactory()
def test_context_processor_raises_improperlyconfigured_error(self):
# with responsive middleware not included in MIDDLEWARE_CLASSES
# this should raise a ImproperlyConfigured error
request = self.factory.get('/')
self.assertRaises(ImproperlyConfigured, device, request)
@override_settings(MIDDLEWARE_CLASSES=('responsive.middleware.ResponsiveMiddleware', ))
def test_context_processor_returns_device_object(self):
request = self.factory.get('/')
context = device(request)
self.assertIsInstance(context[settings.RESPONSIVE_VARIABLE_NAME], Device)
|
<commit_before><commit_msg>Add tests for responsive context_processors<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from django.core.exceptions import ImproperlyConfigured
from django.test import TestCase
from django.test.client import RequestFactory
from django.test.utils import override_settings
from responsive.conf import settings
from responsive.context_processors import device
from responsive.utils import Device
class ContextProcessorsTest(TestCase):
def setUp(self):
# Every test needs access to the request factory.
self.factory = RequestFactory()
def test_context_processor_raises_improperlyconfigured_error(self):
# with responsive middleware not included in MIDDLEWARE_CLASSES
# this should raise a ImproperlyConfigured error
request = self.factory.get('/')
self.assertRaises(ImproperlyConfigured, device, request)
@override_settings(MIDDLEWARE_CLASSES=('responsive.middleware.ResponsiveMiddleware', ))
def test_context_processor_returns_device_object(self):
request = self.factory.get('/')
context = device(request)
self.assertIsInstance(context[settings.RESPONSIVE_VARIABLE_NAME], Device)
|
|
3ef65ebffe960e205abe3efd36e5a5690a3ee947
|
arcutils/exc.py
|
arcutils/exc.py
|
import warnings
class ARCUtilsDeprecationWarning(DeprecationWarning):
@classmethod
def warn(cls, message, stacklevel=2):
warnings.warn(message, cls, stacklevel)
warnings.simplefilter('default', ARCUtilsDeprecationWarning)
|
Add a mechanism for deprecating things
|
Add a mechanism for deprecating things
|
Python
|
mit
|
PSU-OIT-ARC/django-arcutils,wylee/django-arcutils,wylee/django-arcutils,PSU-OIT-ARC/django-arcutils
|
Add a mechanism for deprecating things
|
import warnings
class ARCUtilsDeprecationWarning(DeprecationWarning):
@classmethod
def warn(cls, message, stacklevel=2):
warnings.warn(message, cls, stacklevel)
warnings.simplefilter('default', ARCUtilsDeprecationWarning)
|
<commit_before><commit_msg>Add a mechanism for deprecating things<commit_after>
|
import warnings
class ARCUtilsDeprecationWarning(DeprecationWarning):
@classmethod
def warn(cls, message, stacklevel=2):
warnings.warn(message, cls, stacklevel)
warnings.simplefilter('default', ARCUtilsDeprecationWarning)
|
Add a mechanism for deprecating thingsimport warnings
class ARCUtilsDeprecationWarning(DeprecationWarning):
@classmethod
def warn(cls, message, stacklevel=2):
warnings.warn(message, cls, stacklevel)
warnings.simplefilter('default', ARCUtilsDeprecationWarning)
|
<commit_before><commit_msg>Add a mechanism for deprecating things<commit_after>import warnings
class ARCUtilsDeprecationWarning(DeprecationWarning):
@classmethod
def warn(cls, message, stacklevel=2):
warnings.warn(message, cls, stacklevel)
warnings.simplefilter('default', ARCUtilsDeprecationWarning)
|
|
c11e88c6baa95bcbf72c3a5e09e203a5b74c3bc8
|
python/bin/python/pyqt4_symlinks.py
|
python/bin/python/pyqt4_symlinks.py
|
#!/usr/bin/env python
import os
import sys
from builtins import FileExistsError
try:
venv = os.environ['VIRTUAL_ENV']
except KeyError:
print('Virtual environment is not detected.')
raise
python_venv_version = sys.version_info[:2]
site_version = str(python_venv_version[0]) + '.' + str(python_venv_version[1])
if python_venv_version in [(3, 5), (3, 6)]:
dist_version = '3'
sip = 'sip.cpython-35m-x86_64-linux-gnu.so'
elif python_venv_version in [(2, 7)]:
dist_version = '2.7'
sip = 'sip.x86_64-linux-gnu.so'
else:
error_message = ' '.join(['Python', site_version, 'is not supported.'])
sys.exit(error_message)
usr_lib = '/usr/lib'
site_packages_dir = os.path.join(venv, 'lib', 'python'+site_version, 'site-packages')
dist_packages_dir = os.path.join(usr_lib, 'python'+dist_version, 'dist-packages')
pyqt4 = 'PyQt4'
pyqt4_site = os.path.join(site_packages_dir, pyqt4)
sip_site = os.path.join(site_packages_dir, sip)
pyqt4_dist = os.path.join(dist_packages_dir, pyqt4)
sip_dist = os.path.join(dist_packages_dir, sip)
args = [[pyqt4_dist, pyqt4_site], [sip_dist, sip_site]]
for arg in args:
try:
os.symlink(*arg)
except FileExistsError:
continue
|
Add Python code for making symlinks for PyQt4 and SIP into a virtualenv
|
Add Python code for making symlinks for PyQt4 and SIP into a virtualenv
|
Python
|
mit
|
edwinksl/dotfiles,edwinksl/dotfiles
|
Add Python code for making symlinks for PyQt4 and SIP into a virtualenv
|
#!/usr/bin/env python
import os
import sys
from builtins import FileExistsError
try:
venv = os.environ['VIRTUAL_ENV']
except KeyError:
print('Virtual environment is not detected.')
raise
python_venv_version = sys.version_info[:2]
site_version = str(python_venv_version[0]) + '.' + str(python_venv_version[1])
if python_venv_version in [(3, 5), (3, 6)]:
dist_version = '3'
sip = 'sip.cpython-35m-x86_64-linux-gnu.so'
elif python_venv_version in [(2, 7)]:
dist_version = '2.7'
sip = 'sip.x86_64-linux-gnu.so'
else:
error_message = ' '.join(['Python', site_version, 'is not supported.'])
sys.exit(error_message)
usr_lib = '/usr/lib'
site_packages_dir = os.path.join(venv, 'lib', 'python'+site_version, 'site-packages')
dist_packages_dir = os.path.join(usr_lib, 'python'+dist_version, 'dist-packages')
pyqt4 = 'PyQt4'
pyqt4_site = os.path.join(site_packages_dir, pyqt4)
sip_site = os.path.join(site_packages_dir, sip)
pyqt4_dist = os.path.join(dist_packages_dir, pyqt4)
sip_dist = os.path.join(dist_packages_dir, sip)
args = [[pyqt4_dist, pyqt4_site], [sip_dist, sip_site]]
for arg in args:
try:
os.symlink(*arg)
except FileExistsError:
continue
|
<commit_before><commit_msg>Add Python code for making symlinks for PyQt4 and SIP into a virtualenv<commit_after>
|
#!/usr/bin/env python
import os
import sys
from builtins import FileExistsError
try:
venv = os.environ['VIRTUAL_ENV']
except KeyError:
print('Virtual environment is not detected.')
raise
python_venv_version = sys.version_info[:2]
site_version = str(python_venv_version[0]) + '.' + str(python_venv_version[1])
if python_venv_version in [(3, 5), (3, 6)]:
dist_version = '3'
sip = 'sip.cpython-35m-x86_64-linux-gnu.so'
elif python_venv_version in [(2, 7)]:
dist_version = '2.7'
sip = 'sip.x86_64-linux-gnu.so'
else:
error_message = ' '.join(['Python', site_version, 'is not supported.'])
sys.exit(error_message)
usr_lib = '/usr/lib'
site_packages_dir = os.path.join(venv, 'lib', 'python'+site_version, 'site-packages')
dist_packages_dir = os.path.join(usr_lib, 'python'+dist_version, 'dist-packages')
pyqt4 = 'PyQt4'
pyqt4_site = os.path.join(site_packages_dir, pyqt4)
sip_site = os.path.join(site_packages_dir, sip)
pyqt4_dist = os.path.join(dist_packages_dir, pyqt4)
sip_dist = os.path.join(dist_packages_dir, sip)
args = [[pyqt4_dist, pyqt4_site], [sip_dist, sip_site]]
for arg in args:
try:
os.symlink(*arg)
except FileExistsError:
continue
|
Add Python code for making symlinks for PyQt4 and SIP into a virtualenv#!/usr/bin/env python
import os
import sys
from builtins import FileExistsError
try:
venv = os.environ['VIRTUAL_ENV']
except KeyError:
print('Virtual environment is not detected.')
raise
python_venv_version = sys.version_info[:2]
site_version = str(python_venv_version[0]) + '.' + str(python_venv_version[1])
if python_venv_version in [(3, 5), (3, 6)]:
dist_version = '3'
sip = 'sip.cpython-35m-x86_64-linux-gnu.so'
elif python_venv_version in [(2, 7)]:
dist_version = '2.7'
sip = 'sip.x86_64-linux-gnu.so'
else:
error_message = ' '.join(['Python', site_version, 'is not supported.'])
sys.exit(error_message)
usr_lib = '/usr/lib'
site_packages_dir = os.path.join(venv, 'lib', 'python'+site_version, 'site-packages')
dist_packages_dir = os.path.join(usr_lib, 'python'+dist_version, 'dist-packages')
pyqt4 = 'PyQt4'
pyqt4_site = os.path.join(site_packages_dir, pyqt4)
sip_site = os.path.join(site_packages_dir, sip)
pyqt4_dist = os.path.join(dist_packages_dir, pyqt4)
sip_dist = os.path.join(dist_packages_dir, sip)
args = [[pyqt4_dist, pyqt4_site], [sip_dist, sip_site]]
for arg in args:
try:
os.symlink(*arg)
except FileExistsError:
continue
|
<commit_before><commit_msg>Add Python code for making symlinks for PyQt4 and SIP into a virtualenv<commit_after>#!/usr/bin/env python
import os
import sys
from builtins import FileExistsError
try:
venv = os.environ['VIRTUAL_ENV']
except KeyError:
print('Virtual environment is not detected.')
raise
python_venv_version = sys.version_info[:2]
site_version = str(python_venv_version[0]) + '.' + str(python_venv_version[1])
if python_venv_version in [(3, 5), (3, 6)]:
dist_version = '3'
sip = 'sip.cpython-35m-x86_64-linux-gnu.so'
elif python_venv_version in [(2, 7)]:
dist_version = '2.7'
sip = 'sip.x86_64-linux-gnu.so'
else:
error_message = ' '.join(['Python', site_version, 'is not supported.'])
sys.exit(error_message)
usr_lib = '/usr/lib'
site_packages_dir = os.path.join(venv, 'lib', 'python'+site_version, 'site-packages')
dist_packages_dir = os.path.join(usr_lib, 'python'+dist_version, 'dist-packages')
pyqt4 = 'PyQt4'
pyqt4_site = os.path.join(site_packages_dir, pyqt4)
sip_site = os.path.join(site_packages_dir, sip)
pyqt4_dist = os.path.join(dist_packages_dir, pyqt4)
sip_dist = os.path.join(dist_packages_dir, sip)
args = [[pyqt4_dist, pyqt4_site], [sip_dist, sip_site]]
for arg in args:
try:
os.symlink(*arg)
except FileExistsError:
continue
|
|
d0934c5530b2deaffa6a0f76ef997861412442b2
|
setup.py
|
setup.py
|
from setuptools import setup, find_packages
with open('README.md', 'r') as f:
README = f.read()
setup(name='apigpio',
version='0.0.1',
description='asyncio-based python client for pigpiod',
long_description=README,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers"
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Hardware :: Hardware Drivers",
"Topic :: Software Development :: Libraries :: Python Modules",
],
author='Pierre Rust',
author_email='pierre.rust@gmail.com',
url='https://github.com/PierreRust/apigpio',
keywords=['gpio', 'pigpio', 'asyncio', 'raspberry'],
packages=find_packages()
)
|
Add basic python packaing info.
|
Add basic python packaing info.
|
Python
|
mit
|
PierreRust/apigpio
|
Add basic python packaing info.
|
from setuptools import setup, find_packages
with open('README.md', 'r') as f:
README = f.read()
setup(name='apigpio',
version='0.0.1',
description='asyncio-based python client for pigpiod',
long_description=README,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers"
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Hardware :: Hardware Drivers",
"Topic :: Software Development :: Libraries :: Python Modules",
],
author='Pierre Rust',
author_email='pierre.rust@gmail.com',
url='https://github.com/PierreRust/apigpio',
keywords=['gpio', 'pigpio', 'asyncio', 'raspberry'],
packages=find_packages()
)
|
<commit_before><commit_msg>Add basic python packaing info.<commit_after>
|
from setuptools import setup, find_packages
with open('README.md', 'r') as f:
README = f.read()
setup(name='apigpio',
version='0.0.1',
description='asyncio-based python client for pigpiod',
long_description=README,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers"
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Hardware :: Hardware Drivers",
"Topic :: Software Development :: Libraries :: Python Modules",
],
author='Pierre Rust',
author_email='pierre.rust@gmail.com',
url='https://github.com/PierreRust/apigpio',
keywords=['gpio', 'pigpio', 'asyncio', 'raspberry'],
packages=find_packages()
)
|
Add basic python packaing info.from setuptools import setup, find_packages
with open('README.md', 'r') as f:
README = f.read()
setup(name='apigpio',
version='0.0.1',
description='asyncio-based python client for pigpiod',
long_description=README,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers"
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Hardware :: Hardware Drivers",
"Topic :: Software Development :: Libraries :: Python Modules",
],
author='Pierre Rust',
author_email='pierre.rust@gmail.com',
url='https://github.com/PierreRust/apigpio',
keywords=['gpio', 'pigpio', 'asyncio', 'raspberry'],
packages=find_packages()
)
|
<commit_before><commit_msg>Add basic python packaing info.<commit_after>from setuptools import setup, find_packages
with open('README.md', 'r') as f:
README = f.read()
setup(name='apigpio',
version='0.0.1',
description='asyncio-based python client for pigpiod',
long_description=README,
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers"
"License :: OSI Approved :: Apache Software License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.4",
"Topic :: System :: Hardware :: Hardware Drivers",
"Topic :: Software Development :: Libraries :: Python Modules",
],
author='Pierre Rust',
author_email='pierre.rust@gmail.com',
url='https://github.com/PierreRust/apigpio',
keywords=['gpio', 'pigpio', 'asyncio', 'raspberry'],
packages=find_packages()
)
|
|
048c992ca7c9ebabab9a9f60d9432b99111e573e
|
stats.py
|
stats.py
|
import re
import sqlite3
def open_cloudfront_log_db():
db = sqlite3.connect('cf.db')
return db
def format_size(sz):
abbrevs = (
(1<<50, 'PB'),
(1<<40, 'TB'),
(1<<30, 'GB'),
(1<<20, 'MB'),
(1<<10, 'KB'),
(1, 'B')
)
for factor, suffix in abbrevs:
if sz >= factor:
break
return '%.2f %s' % (float(sz) / factor, suffix)
def __main__():
cloudfront_db = open_cloudfront_log_db()
cur = cloudfront_db.cursor()
cur.execute('select * from logs')
total_bytes = sum([int(row[1]) for row in cur])
print format_size(total_bytes)
if __name__ == "__main__":
__main__()
|
Print total byte count from db
|
Print total byte count from db
|
Python
|
bsd-2-clause
|
hortont424/cfwatch
|
Print total byte count from db
|
import re
import sqlite3
def open_cloudfront_log_db():
db = sqlite3.connect('cf.db')
return db
def format_size(sz):
abbrevs = (
(1<<50, 'PB'),
(1<<40, 'TB'),
(1<<30, 'GB'),
(1<<20, 'MB'),
(1<<10, 'KB'),
(1, 'B')
)
for factor, suffix in abbrevs:
if sz >= factor:
break
return '%.2f %s' % (float(sz) / factor, suffix)
def __main__():
cloudfront_db = open_cloudfront_log_db()
cur = cloudfront_db.cursor()
cur.execute('select * from logs')
total_bytes = sum([int(row[1]) for row in cur])
print format_size(total_bytes)
if __name__ == "__main__":
__main__()
|
<commit_before><commit_msg>Print total byte count from db<commit_after>
|
import re
import sqlite3
def open_cloudfront_log_db():
db = sqlite3.connect('cf.db')
return db
def format_size(sz):
abbrevs = (
(1<<50, 'PB'),
(1<<40, 'TB'),
(1<<30, 'GB'),
(1<<20, 'MB'),
(1<<10, 'KB'),
(1, 'B')
)
for factor, suffix in abbrevs:
if sz >= factor:
break
return '%.2f %s' % (float(sz) / factor, suffix)
def __main__():
cloudfront_db = open_cloudfront_log_db()
cur = cloudfront_db.cursor()
cur.execute('select * from logs')
total_bytes = sum([int(row[1]) for row in cur])
print format_size(total_bytes)
if __name__ == "__main__":
__main__()
|
Print total byte count from dbimport re
import sqlite3
def open_cloudfront_log_db():
db = sqlite3.connect('cf.db')
return db
def format_size(sz):
abbrevs = (
(1<<50, 'PB'),
(1<<40, 'TB'),
(1<<30, 'GB'),
(1<<20, 'MB'),
(1<<10, 'KB'),
(1, 'B')
)
for factor, suffix in abbrevs:
if sz >= factor:
break
return '%.2f %s' % (float(sz) / factor, suffix)
def __main__():
cloudfront_db = open_cloudfront_log_db()
cur = cloudfront_db.cursor()
cur.execute('select * from logs')
total_bytes = sum([int(row[1]) for row in cur])
print format_size(total_bytes)
if __name__ == "__main__":
__main__()
|
<commit_before><commit_msg>Print total byte count from db<commit_after>import re
import sqlite3
def open_cloudfront_log_db():
db = sqlite3.connect('cf.db')
return db
def format_size(sz):
abbrevs = (
(1<<50, 'PB'),
(1<<40, 'TB'),
(1<<30, 'GB'),
(1<<20, 'MB'),
(1<<10, 'KB'),
(1, 'B')
)
for factor, suffix in abbrevs:
if sz >= factor:
break
return '%.2f %s' % (float(sz) / factor, suffix)
def __main__():
cloudfront_db = open_cloudfront_log_db()
cur = cloudfront_db.cursor()
cur.execute('select * from logs')
total_bytes = sum([int(row[1]) for row in cur])
print format_size(total_bytes)
if __name__ == "__main__":
__main__()
|
|
92e3464a08fd7c8b38dbae9c3ea15f9f6f2b64ac
|
ReSId.py
|
ReSId.py
|
"""
The GLEAM REsidual Source IDentifier program
Created by:
Robin Cook
March 24 2016
Modifications by:
Robin Cook
"""
# Imports
import sys
import os
import numpy as np
import math
# Other Imports
import scipy
import astropy
from optparse import OptionParser
usage = "usage: %prog [options] filename.fits"
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose", default=True,
help="don't print status messages to stdout")
(options, args) = parser.parse_args()
|
Include initial option parser framework
|
Include initial option parser framework
|
Python
|
mit
|
AstroRobin/GLEAM-RESID
|
Include initial option parser framework
|
"""
The GLEAM REsidual Source IDentifier program
Created by:
Robin Cook
March 24 2016
Modifications by:
Robin Cook
"""
# Imports
import sys
import os
import numpy as np
import math
# Other Imports
import scipy
import astropy
from optparse import OptionParser
usage = "usage: %prog [options] filename.fits"
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose", default=True,
help="don't print status messages to stdout")
(options, args) = parser.parse_args()
|
<commit_before><commit_msg>Include initial option parser framework<commit_after>
|
"""
The GLEAM REsidual Source IDentifier program
Created by:
Robin Cook
March 24 2016
Modifications by:
Robin Cook
"""
# Imports
import sys
import os
import numpy as np
import math
# Other Imports
import scipy
import astropy
from optparse import OptionParser
usage = "usage: %prog [options] filename.fits"
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose", default=True,
help="don't print status messages to stdout")
(options, args) = parser.parse_args()
|
Include initial option parser framework"""
The GLEAM REsidual Source IDentifier program
Created by:
Robin Cook
March 24 2016
Modifications by:
Robin Cook
"""
# Imports
import sys
import os
import numpy as np
import math
# Other Imports
import scipy
import astropy
from optparse import OptionParser
usage = "usage: %prog [options] filename.fits"
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose", default=True,
help="don't print status messages to stdout")
(options, args) = parser.parse_args()
|
<commit_before><commit_msg>Include initial option parser framework<commit_after>"""
The GLEAM REsidual Source IDentifier program
Created by:
Robin Cook
March 24 2016
Modifications by:
Robin Cook
"""
# Imports
import sys
import os
import numpy as np
import math
# Other Imports
import scipy
import astropy
from optparse import OptionParser
usage = "usage: %prog [options] filename.fits"
parser = OptionParser()
parser.add_option("-f", "--file", dest="filename",
help="write report to FILE", metavar="FILE")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose", default=True,
help="don't print status messages to stdout")
(options, args) = parser.parse_args()
|
|
ae95f79ebced40badfbe7dff72ced0c4436567b1
|
context_processors.py
|
context_processors.py
|
from django.conf import settings as sett
def settings(request):
my_dict = {
'SB_ROOT': sett.SB_ROOT,
'SITE_URL': sett.SITE_URL,
'SITE_NAME': sett.SITE_NAME,
'SITE_AUTHOR': sett.SITE_AUTHOR,
'PuSH_URL': sett.PUBSUBHUBBUB_URL,
}
return my_dict
|
Add context processor for adding in data from the django settings file
|
Add context processor for adding in data from the django settings file
|
Python
|
mit
|
kyleconroy/stashboard2
|
Add context processor for adding in data from the django settings file
|
from django.conf import settings as sett
def settings(request):
my_dict = {
'SB_ROOT': sett.SB_ROOT,
'SITE_URL': sett.SITE_URL,
'SITE_NAME': sett.SITE_NAME,
'SITE_AUTHOR': sett.SITE_AUTHOR,
'PuSH_URL': sett.PUBSUBHUBBUB_URL,
}
return my_dict
|
<commit_before><commit_msg>Add context processor for adding in data from the django settings file<commit_after>
|
from django.conf import settings as sett
def settings(request):
my_dict = {
'SB_ROOT': sett.SB_ROOT,
'SITE_URL': sett.SITE_URL,
'SITE_NAME': sett.SITE_NAME,
'SITE_AUTHOR': sett.SITE_AUTHOR,
'PuSH_URL': sett.PUBSUBHUBBUB_URL,
}
return my_dict
|
Add context processor for adding in data from the django settings filefrom django.conf import settings as sett
def settings(request):
my_dict = {
'SB_ROOT': sett.SB_ROOT,
'SITE_URL': sett.SITE_URL,
'SITE_NAME': sett.SITE_NAME,
'SITE_AUTHOR': sett.SITE_AUTHOR,
'PuSH_URL': sett.PUBSUBHUBBUB_URL,
}
return my_dict
|
<commit_before><commit_msg>Add context processor for adding in data from the django settings file<commit_after>from django.conf import settings as sett
def settings(request):
my_dict = {
'SB_ROOT': sett.SB_ROOT,
'SITE_URL': sett.SITE_URL,
'SITE_NAME': sett.SITE_NAME,
'SITE_AUTHOR': sett.SITE_AUTHOR,
'PuSH_URL': sett.PUBSUBHUBBUB_URL,
}
return my_dict
|
|
96e853995203aae902416b4ba82707161ac6170c
|
IPython/core/tests/test_prompts.py
|
IPython/core/tests/test_prompts.py
|
"""Tests for prompt generation."""
import unittest
import nose.tools as nt
from IPython.testing import tools as tt, decorators as dec
from IPython.core.prompts import PromptManager
from IPython.testing.globalipapp import get_ipython
ip = get_ipython()
class PromptTests(unittest.TestCase):
def setUp(self):
self.pm = PromptManager(shell=ip, config=ip.config)
def test_multiline_prompt(self):
self.pm.in_template = "[In]\n>>>"
self.pm.render('in')
self.assertEqual(self.pm.width, 3)
self.assertEqual(self.pm.txtwidth, 3)
self.pm.in_template = '[In]\n'
self.pm.render('in')
self.assertEqual(self.pm.width, 0)
self.assertEqual(self.pm.txtwidth, 0)
def test_translate_abbreviations(self):
def do_translate(template):
self.pm.in_template = template
return self.pm.templates['in']
pairs = [(r'%n>', '{color.number}{count}{color.prompt}>'),
(r'\T', '{time}'),
(r'\n', '\n')
]
tt.check_pairs(do_translate, pairs)
def test_render(self):
self.pm.in_template = r'\#>'
self.assertEqual(self.pm.render('in',color=False), '%d>' % ip.execution_count)
|
Add tests for prompt system.
|
Add tests for prompt system.
|
Python
|
bsd-3-clause
|
ipython/ipython,ipython/ipython
|
Add tests for prompt system.
|
"""Tests for prompt generation."""
import unittest
import nose.tools as nt
from IPython.testing import tools as tt, decorators as dec
from IPython.core.prompts import PromptManager
from IPython.testing.globalipapp import get_ipython
ip = get_ipython()
class PromptTests(unittest.TestCase):
def setUp(self):
self.pm = PromptManager(shell=ip, config=ip.config)
def test_multiline_prompt(self):
self.pm.in_template = "[In]\n>>>"
self.pm.render('in')
self.assertEqual(self.pm.width, 3)
self.assertEqual(self.pm.txtwidth, 3)
self.pm.in_template = '[In]\n'
self.pm.render('in')
self.assertEqual(self.pm.width, 0)
self.assertEqual(self.pm.txtwidth, 0)
def test_translate_abbreviations(self):
def do_translate(template):
self.pm.in_template = template
return self.pm.templates['in']
pairs = [(r'%n>', '{color.number}{count}{color.prompt}>'),
(r'\T', '{time}'),
(r'\n', '\n')
]
tt.check_pairs(do_translate, pairs)
def test_render(self):
self.pm.in_template = r'\#>'
self.assertEqual(self.pm.render('in',color=False), '%d>' % ip.execution_count)
|
<commit_before><commit_msg>Add tests for prompt system.<commit_after>
|
"""Tests for prompt generation."""
import unittest
import nose.tools as nt
from IPython.testing import tools as tt, decorators as dec
from IPython.core.prompts import PromptManager
from IPython.testing.globalipapp import get_ipython
ip = get_ipython()
class PromptTests(unittest.TestCase):
def setUp(self):
self.pm = PromptManager(shell=ip, config=ip.config)
def test_multiline_prompt(self):
self.pm.in_template = "[In]\n>>>"
self.pm.render('in')
self.assertEqual(self.pm.width, 3)
self.assertEqual(self.pm.txtwidth, 3)
self.pm.in_template = '[In]\n'
self.pm.render('in')
self.assertEqual(self.pm.width, 0)
self.assertEqual(self.pm.txtwidth, 0)
def test_translate_abbreviations(self):
def do_translate(template):
self.pm.in_template = template
return self.pm.templates['in']
pairs = [(r'%n>', '{color.number}{count}{color.prompt}>'),
(r'\T', '{time}'),
(r'\n', '\n')
]
tt.check_pairs(do_translate, pairs)
def test_render(self):
self.pm.in_template = r'\#>'
self.assertEqual(self.pm.render('in',color=False), '%d>' % ip.execution_count)
|
Add tests for prompt system."""Tests for prompt generation."""
import unittest
import nose.tools as nt
from IPython.testing import tools as tt, decorators as dec
from IPython.core.prompts import PromptManager
from IPython.testing.globalipapp import get_ipython
ip = get_ipython()
class PromptTests(unittest.TestCase):
def setUp(self):
self.pm = PromptManager(shell=ip, config=ip.config)
def test_multiline_prompt(self):
self.pm.in_template = "[In]\n>>>"
self.pm.render('in')
self.assertEqual(self.pm.width, 3)
self.assertEqual(self.pm.txtwidth, 3)
self.pm.in_template = '[In]\n'
self.pm.render('in')
self.assertEqual(self.pm.width, 0)
self.assertEqual(self.pm.txtwidth, 0)
def test_translate_abbreviations(self):
def do_translate(template):
self.pm.in_template = template
return self.pm.templates['in']
pairs = [(r'%n>', '{color.number}{count}{color.prompt}>'),
(r'\T', '{time}'),
(r'\n', '\n')
]
tt.check_pairs(do_translate, pairs)
def test_render(self):
self.pm.in_template = r'\#>'
self.assertEqual(self.pm.render('in',color=False), '%d>' % ip.execution_count)
|
<commit_before><commit_msg>Add tests for prompt system.<commit_after>"""Tests for prompt generation."""
import unittest
import nose.tools as nt
from IPython.testing import tools as tt, decorators as dec
from IPython.core.prompts import PromptManager
from IPython.testing.globalipapp import get_ipython
ip = get_ipython()
class PromptTests(unittest.TestCase):
def setUp(self):
self.pm = PromptManager(shell=ip, config=ip.config)
def test_multiline_prompt(self):
self.pm.in_template = "[In]\n>>>"
self.pm.render('in')
self.assertEqual(self.pm.width, 3)
self.assertEqual(self.pm.txtwidth, 3)
self.pm.in_template = '[In]\n'
self.pm.render('in')
self.assertEqual(self.pm.width, 0)
self.assertEqual(self.pm.txtwidth, 0)
def test_translate_abbreviations(self):
def do_translate(template):
self.pm.in_template = template
return self.pm.templates['in']
pairs = [(r'%n>', '{color.number}{count}{color.prompt}>'),
(r'\T', '{time}'),
(r'\n', '\n')
]
tt.check_pairs(do_translate, pairs)
def test_render(self):
self.pm.in_template = r'\#>'
self.assertEqual(self.pm.render('in',color=False), '%d>' % ip.execution_count)
|
|
2576ad51ffeaecd645d030ca875c82f66ecd5ee8
|
learntools/computer_vision/ex3.py
|
learntools/computer_vision/ex3.py
|
from learntools.core import *
import tensorflow as tf
class Q1(CodingProblem):
_vars = ['image_condense', 'image_detect', 'image_filter', 'image']
_hint = ""
_solution = CS("""
image_filter = tf.nn.conv2d(
input=image,
filters=kernel,
strides=1,
padding='SAME',
)
""")
def check(self, image_condense, image_detect, image_filter, image):
pass
# TODO: check that the shape shrinks and that image_condense is
# max pooled image_detect
class Q2(ThoughtExperiment):
_solution = ""
class Q3(ThoughtExperiement):
_solution = ""
class Q4A(CodingProblem):
_hint = ""
_solution = ""
def check(self):
pass
class Q4B(CodingProblem):
_hint = ""
_solution = ""
def check(self):
pass
class Q4C(ThoughtExperiment):
_solution = ""
Q4 = MultipartProblem(Q4A, Q4B, Q4C)
qvars = bind_exercises(globals(), [
Q1, Q2, Q3, Q4,
],
var_format='q_{n}',
)
__all__ = list(qvars)
|
Add exercise 3 checking code
|
Add exercise 3 checking code
|
Python
|
apache-2.0
|
Kaggle/learntools,Kaggle/learntools
|
Add exercise 3 checking code
|
from learntools.core import *
import tensorflow as tf
class Q1(CodingProblem):
_vars = ['image_condense', 'image_detect', 'image_filter', 'image']
_hint = ""
_solution = CS("""
image_filter = tf.nn.conv2d(
input=image,
filters=kernel,
strides=1,
padding='SAME',
)
""")
def check(self, image_condense, image_detect, image_filter, image):
pass
# TODO: check that the shape shrinks and that image_condense is
# max pooled image_detect
class Q2(ThoughtExperiment):
_solution = ""
class Q3(ThoughtExperiement):
_solution = ""
class Q4A(CodingProblem):
_hint = ""
_solution = ""
def check(self):
pass
class Q4B(CodingProblem):
_hint = ""
_solution = ""
def check(self):
pass
class Q4C(ThoughtExperiment):
_solution = ""
Q4 = MultipartProblem(Q4A, Q4B, Q4C)
qvars = bind_exercises(globals(), [
Q1, Q2, Q3, Q4,
],
var_format='q_{n}',
)
__all__ = list(qvars)
|
<commit_before><commit_msg>Add exercise 3 checking code<commit_after>
|
from learntools.core import *
import tensorflow as tf
class Q1(CodingProblem):
_vars = ['image_condense', 'image_detect', 'image_filter', 'image']
_hint = ""
_solution = CS("""
image_filter = tf.nn.conv2d(
input=image,
filters=kernel,
strides=1,
padding='SAME',
)
""")
def check(self, image_condense, image_detect, image_filter, image):
pass
# TODO: check that the shape shrinks and that image_condense is
# max pooled image_detect
class Q2(ThoughtExperiment):
_solution = ""
class Q3(ThoughtExperiement):
_solution = ""
class Q4A(CodingProblem):
_hint = ""
_solution = ""
def check(self):
pass
class Q4B(CodingProblem):
_hint = ""
_solution = ""
def check(self):
pass
class Q4C(ThoughtExperiment):
_solution = ""
Q4 = MultipartProblem(Q4A, Q4B, Q4C)
qvars = bind_exercises(globals(), [
Q1, Q2, Q3, Q4,
],
var_format='q_{n}',
)
__all__ = list(qvars)
|
Add exercise 3 checking codefrom learntools.core import *
import tensorflow as tf
class Q1(CodingProblem):
_vars = ['image_condense', 'image_detect', 'image_filter', 'image']
_hint = ""
_solution = CS("""
image_filter = tf.nn.conv2d(
input=image,
filters=kernel,
strides=1,
padding='SAME',
)
""")
def check(self, image_condense, image_detect, image_filter, image):
pass
# TODO: check that the shape shrinks and that image_condense is
# max pooled image_detect
class Q2(ThoughtExperiment):
_solution = ""
class Q3(ThoughtExperiement):
_solution = ""
class Q4A(CodingProblem):
_hint = ""
_solution = ""
def check(self):
pass
class Q4B(CodingProblem):
_hint = ""
_solution = ""
def check(self):
pass
class Q4C(ThoughtExperiment):
_solution = ""
Q4 = MultipartProblem(Q4A, Q4B, Q4C)
qvars = bind_exercises(globals(), [
Q1, Q2, Q3, Q4,
],
var_format='q_{n}',
)
__all__ = list(qvars)
|
<commit_before><commit_msg>Add exercise 3 checking code<commit_after>from learntools.core import *
import tensorflow as tf
class Q1(CodingProblem):
_vars = ['image_condense', 'image_detect', 'image_filter', 'image']
_hint = ""
_solution = CS("""
image_filter = tf.nn.conv2d(
input=image,
filters=kernel,
strides=1,
padding='SAME',
)
""")
def check(self, image_condense, image_detect, image_filter, image):
pass
# TODO: check that the shape shrinks and that image_condense is
# max pooled image_detect
class Q2(ThoughtExperiment):
_solution = ""
class Q3(ThoughtExperiement):
_solution = ""
class Q4A(CodingProblem):
_hint = ""
_solution = ""
def check(self):
pass
class Q4B(CodingProblem):
_hint = ""
_solution = ""
def check(self):
pass
class Q4C(ThoughtExperiment):
_solution = ""
Q4 = MultipartProblem(Q4A, Q4B, Q4C)
qvars = bind_exercises(globals(), [
Q1, Q2, Q3, Q4,
],
var_format='q_{n}',
)
__all__ = list(qvars)
|
|
2e18b548bb13a274e94f4d3e36a631f9aa3e79a4
|
taskflow/tests/unit/test_utils.py
|
taskflow/tests/unit/test_utils.py
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import unittest
from taskflow import utils
class UtilTest(unittest.TestCase):
def test_rollback_accum(self):
context = {}
def caller(token, e):
context[token] = True
accum = utils.RollbackAccumulator()
def blowup():
for i in range(0, 10):
accum.add(functools.partial(caller, i))
self.assertEquals(0, len(context))
raise Exception
# Test manual triggering
self.assertEquals(0, len(accum))
self.assertRaises(Exception, blowup)
self.assertEquals(10, len(accum))
self.assertEquals(0, len(context))
accum.rollback(Exception())
self.assertEquals(10, len(context))
# Test context manager triggering
context = {}
accum.reset()
self.assertEquals(0, len(accum))
try:
with accum:
blowup()
except Exception:
pass
self.assertEquals(10, len(accum))
self.assertEquals(10, len(context))
|
Add a basic rollback accumlator test.
|
Add a basic rollback accumlator test.
|
Python
|
apache-2.0
|
jessicalucci/TaskManagement,openstack/taskflow,pombredanne/taskflow-1,citrix-openstack-build/taskflow,junneyang/taskflow,varunarya10/taskflow,openstack/taskflow,jessicalucci/TaskManagement,jimbobhickville/taskflow,citrix-openstack-build/taskflow,varunarya10/taskflow,junneyang/taskflow,pombredanne/taskflow-1,jimbobhickville/taskflow
|
Add a basic rollback accumlator test.
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import unittest
from taskflow import utils
class UtilTest(unittest.TestCase):
def test_rollback_accum(self):
context = {}
def caller(token, e):
context[token] = True
accum = utils.RollbackAccumulator()
def blowup():
for i in range(0, 10):
accum.add(functools.partial(caller, i))
self.assertEquals(0, len(context))
raise Exception
# Test manual triggering
self.assertEquals(0, len(accum))
self.assertRaises(Exception, blowup)
self.assertEquals(10, len(accum))
self.assertEquals(0, len(context))
accum.rollback(Exception())
self.assertEquals(10, len(context))
# Test context manager triggering
context = {}
accum.reset()
self.assertEquals(0, len(accum))
try:
with accum:
blowup()
except Exception:
pass
self.assertEquals(10, len(accum))
self.assertEquals(10, len(context))
|
<commit_before><commit_msg>Add a basic rollback accumlator test.<commit_after>
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import unittest
from taskflow import utils
class UtilTest(unittest.TestCase):
def test_rollback_accum(self):
context = {}
def caller(token, e):
context[token] = True
accum = utils.RollbackAccumulator()
def blowup():
for i in range(0, 10):
accum.add(functools.partial(caller, i))
self.assertEquals(0, len(context))
raise Exception
# Test manual triggering
self.assertEquals(0, len(accum))
self.assertRaises(Exception, blowup)
self.assertEquals(10, len(accum))
self.assertEquals(0, len(context))
accum.rollback(Exception())
self.assertEquals(10, len(context))
# Test context manager triggering
context = {}
accum.reset()
self.assertEquals(0, len(accum))
try:
with accum:
blowup()
except Exception:
pass
self.assertEquals(10, len(accum))
self.assertEquals(10, len(context))
|
Add a basic rollback accumlator test.# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import unittest
from taskflow import utils
class UtilTest(unittest.TestCase):
def test_rollback_accum(self):
context = {}
def caller(token, e):
context[token] = True
accum = utils.RollbackAccumulator()
def blowup():
for i in range(0, 10):
accum.add(functools.partial(caller, i))
self.assertEquals(0, len(context))
raise Exception
# Test manual triggering
self.assertEquals(0, len(accum))
self.assertRaises(Exception, blowup)
self.assertEquals(10, len(accum))
self.assertEquals(0, len(context))
accum.rollback(Exception())
self.assertEquals(10, len(context))
# Test context manager triggering
context = {}
accum.reset()
self.assertEquals(0, len(accum))
try:
with accum:
blowup()
except Exception:
pass
self.assertEquals(10, len(accum))
self.assertEquals(10, len(context))
|
<commit_before><commit_msg>Add a basic rollback accumlator test.<commit_after># -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import unittest
from taskflow import utils
class UtilTest(unittest.TestCase):
def test_rollback_accum(self):
context = {}
def caller(token, e):
context[token] = True
accum = utils.RollbackAccumulator()
def blowup():
for i in range(0, 10):
accum.add(functools.partial(caller, i))
self.assertEquals(0, len(context))
raise Exception
# Test manual triggering
self.assertEquals(0, len(accum))
self.assertRaises(Exception, blowup)
self.assertEquals(10, len(accum))
self.assertEquals(0, len(context))
accum.rollback(Exception())
self.assertEquals(10, len(context))
# Test context manager triggering
context = {}
accum.reset()
self.assertEquals(0, len(accum))
try:
with accum:
blowup()
except Exception:
pass
self.assertEquals(10, len(accum))
self.assertEquals(10, len(context))
|
|
324cafc24bae3c9ecb0b06ad8701e2b4c40c75c2
|
migration/versions/014_Add_terms_column.py
|
migration/versions/014_Add_terms_column.py
|
from sqlalchemy import Boolean, Column, MetaData, Table
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
account = Table('account', meta, autoload=True)
termsc = Column('terms', Boolean, default=False)
termsc.create(account, populate_default=True)
def downgrade(migrate_engine):
meta.bind = migrate_engine
account = Table('account', meta, autoload=True)
account.c.terms.drop()
|
Add migration for terms column
|
Add migration for terms column
|
Python
|
agpl-3.0
|
johnjohndoe/spendb,CivicVision/datahub,pudo/spendb,johnjohndoe/spendb,USStateDept/FPA_Core,openspending/spendb,pudo/spendb,pudo/spendb,nathanhilbert/FPA_Core,nathanhilbert/FPA_Core,USStateDept/FPA_Core,openspending/spendb,spendb/spendb,nathanhilbert/FPA_Core,openspending/spendb,CivicVision/datahub,CivicVision/datahub,johnjohndoe/spendb,spendb/spendb,USStateDept/FPA_Core,spendb/spendb
|
Add migration for terms column
|
from sqlalchemy import Boolean, Column, MetaData, Table
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
account = Table('account', meta, autoload=True)
termsc = Column('terms', Boolean, default=False)
termsc.create(account, populate_default=True)
def downgrade(migrate_engine):
meta.bind = migrate_engine
account = Table('account', meta, autoload=True)
account.c.terms.drop()
|
<commit_before><commit_msg>Add migration for terms column<commit_after>
|
from sqlalchemy import Boolean, Column, MetaData, Table
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
account = Table('account', meta, autoload=True)
termsc = Column('terms', Boolean, default=False)
termsc.create(account, populate_default=True)
def downgrade(migrate_engine):
meta.bind = migrate_engine
account = Table('account', meta, autoload=True)
account.c.terms.drop()
|
Add migration for terms columnfrom sqlalchemy import Boolean, Column, MetaData, Table
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
account = Table('account', meta, autoload=True)
termsc = Column('terms', Boolean, default=False)
termsc.create(account, populate_default=True)
def downgrade(migrate_engine):
meta.bind = migrate_engine
account = Table('account', meta, autoload=True)
account.c.terms.drop()
|
<commit_before><commit_msg>Add migration for terms column<commit_after>from sqlalchemy import Boolean, Column, MetaData, Table
meta = MetaData()
def upgrade(migrate_engine):
meta.bind = migrate_engine
account = Table('account', meta, autoload=True)
termsc = Column('terms', Boolean, default=False)
termsc.create(account, populate_default=True)
def downgrade(migrate_engine):
meta.bind = migrate_engine
account = Table('account', meta, autoload=True)
account.c.terms.drop()
|
|
bdadf9b67b6f35566d7ff874522aeaf4ee519e24
|
tests/test_configure_alt_name.py
|
tests/test_configure_alt_name.py
|
from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase
#todo: merge into test_series
def age_series(**kwargs):
from flexget.plugins.filter.series import Release
from flexget.manager import Session
import datetime
session = Session()
session.query(Release).update({'first_seen': datetime.datetime.now() - datetime.timedelta(**kwargs)})
session.commit()
class TestImportSeries(FlexGetBase):
__yaml__ = """
tasks:
test_import_altnames:
configure_series:
from:
mock:
- {title: 'the show', alternate_name: 'le show'}
mock:
- title: le show s03e03
"""
def test_timeframe_max(self):
"""Tests configure_series as well as timeframe with max_quality."""
self.execute_task('test_import_altnames')
entry = self.task.find_entry(title='le show s03e03')
assert entry.accepted, 'entry matching series alternate name should have been accepted.'
assert entry['series_name'] == 'the show', 'entry series should be set to the main name'
|
Test for series configure loading alternate name
|
Test for series configure loading alternate name
|
Python
|
mit
|
jawilson/Flexget,poulpito/Flexget,dsemi/Flexget,tarzasai/Flexget,X-dark/Flexget,jawilson/Flexget,antivirtel/Flexget,antivirtel/Flexget,vfrc2/Flexget,Flexget/Flexget,thalamus/Flexget,LynxyssCZ/Flexget,ianstalk/Flexget,spencerjanssen/Flexget,patsissons/Flexget,ratoaq2/Flexget,malkavi/Flexget,xfouloux/Flexget,tvcsantos/Flexget,jawilson/Flexget,X-dark/Flexget,v17al/Flexget,Pretagonist/Flexget,Danfocus/Flexget,gazpachoking/Flexget,sean797/Flexget,poulpito/Flexget,tobinjt/Flexget,Danfocus/Flexget,grrr2/Flexget,drwyrm/Flexget,offbyone/Flexget,poulpito/Flexget,tarzasai/Flexget,grrr2/Flexget,tvcsantos/Flexget,camon/Flexget,dsemi/Flexget,lildadou/Flexget,oxc/Flexget,crawln45/Flexget,ZefQ/Flexget,Flexget/Flexget,gazpachoking/Flexget,ZefQ/Flexget,oxc/Flexget,malkavi/Flexget,voriux/Flexget,v17al/Flexget,ratoaq2/Flexget,thalamus/Flexget,qvazzler/Flexget,qvazzler/Flexget,voriux/Flexget,lildadou/Flexget,offbyone/Flexget,JorisDeRieck/Flexget,crawln45/Flexget,ratoaq2/Flexget,Pretagonist/Flexget,thalamus/Flexget,drwyrm/Flexget,qk4l/Flexget,sean797/Flexget,JorisDeRieck/Flexget,crawln45/Flexget,spencerjanssen/Flexget,patsissons/Flexget,vfrc2/Flexget,oxc/Flexget,v17al/Flexget,ianstalk/Flexget,drwyrm/Flexget,qk4l/Flexget,tsnoam/Flexget,JorisDeRieck/Flexget,crawln45/Flexget,qvazzler/Flexget,malkavi/Flexget,camon/Flexget,Flexget/Flexget,antivirtel/Flexget,ibrahimkarahan/Flexget,xfouloux/Flexget,X-dark/Flexget,dsemi/Flexget,cvium/Flexget,spencerjanssen/Flexget,jacobmetrick/Flexget,OmgOhnoes/Flexget,Danfocus/Flexget,ibrahimkarahan/Flexget,vfrc2/Flexget,jacobmetrick/Flexget,JorisDeRieck/Flexget,xfouloux/Flexget,cvium/Flexget,ianstalk/Flexget,tobinjt/Flexget,jacobmetrick/Flexget,tobinjt/Flexget,tsnoam/Flexget,ibrahimkarahan/Flexget,tobinjt/Flexget,sean797/Flexget,Danfocus/Flexget,tarzasai/Flexget,cvium/Flexget,jawilson/Flexget,OmgOhnoes/Flexget,LynxyssCZ/Flexget,malkavi/Flexget,LynxyssCZ/Flexget,qk4l/Flexget,offbyone/Flexget,lildadou/Flexget,patsissons/Flexget,Pretagonist/Flexget,LynxyssCZ/Flexget,OmgOhnoes/Flexget,grrr2/Flexget,tsnoam/Flexget,Flexget/Flexget,ZefQ/Flexget
|
Test for series configure loading alternate name
|
from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase
#todo: merge into test_series
def age_series(**kwargs):
from flexget.plugins.filter.series import Release
from flexget.manager import Session
import datetime
session = Session()
session.query(Release).update({'first_seen': datetime.datetime.now() - datetime.timedelta(**kwargs)})
session.commit()
class TestImportSeries(FlexGetBase):
__yaml__ = """
tasks:
test_import_altnames:
configure_series:
from:
mock:
- {title: 'the show', alternate_name: 'le show'}
mock:
- title: le show s03e03
"""
def test_timeframe_max(self):
"""Tests configure_series as well as timeframe with max_quality."""
self.execute_task('test_import_altnames')
entry = self.task.find_entry(title='le show s03e03')
assert entry.accepted, 'entry matching series alternate name should have been accepted.'
assert entry['series_name'] == 'the show', 'entry series should be set to the main name'
|
<commit_before><commit_msg>Test for series configure loading alternate name<commit_after>
|
from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase
#todo: merge into test_series
def age_series(**kwargs):
from flexget.plugins.filter.series import Release
from flexget.manager import Session
import datetime
session = Session()
session.query(Release).update({'first_seen': datetime.datetime.now() - datetime.timedelta(**kwargs)})
session.commit()
class TestImportSeries(FlexGetBase):
__yaml__ = """
tasks:
test_import_altnames:
configure_series:
from:
mock:
- {title: 'the show', alternate_name: 'le show'}
mock:
- title: le show s03e03
"""
def test_timeframe_max(self):
"""Tests configure_series as well as timeframe with max_quality."""
self.execute_task('test_import_altnames')
entry = self.task.find_entry(title='le show s03e03')
assert entry.accepted, 'entry matching series alternate name should have been accepted.'
assert entry['series_name'] == 'the show', 'entry series should be set to the main name'
|
Test for series configure loading alternate namefrom __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase
#todo: merge into test_series
def age_series(**kwargs):
from flexget.plugins.filter.series import Release
from flexget.manager import Session
import datetime
session = Session()
session.query(Release).update({'first_seen': datetime.datetime.now() - datetime.timedelta(**kwargs)})
session.commit()
class TestImportSeries(FlexGetBase):
__yaml__ = """
tasks:
test_import_altnames:
configure_series:
from:
mock:
- {title: 'the show', alternate_name: 'le show'}
mock:
- title: le show s03e03
"""
def test_timeframe_max(self):
"""Tests configure_series as well as timeframe with max_quality."""
self.execute_task('test_import_altnames')
entry = self.task.find_entry(title='le show s03e03')
assert entry.accepted, 'entry matching series alternate name should have been accepted.'
assert entry['series_name'] == 'the show', 'entry series should be set to the main name'
|
<commit_before><commit_msg>Test for series configure loading alternate name<commit_after>from __future__ import unicode_literals, division, absolute_import
from tests import FlexGetBase
#todo: merge into test_series
def age_series(**kwargs):
from flexget.plugins.filter.series import Release
from flexget.manager import Session
import datetime
session = Session()
session.query(Release).update({'first_seen': datetime.datetime.now() - datetime.timedelta(**kwargs)})
session.commit()
class TestImportSeries(FlexGetBase):
__yaml__ = """
tasks:
test_import_altnames:
configure_series:
from:
mock:
- {title: 'the show', alternate_name: 'le show'}
mock:
- title: le show s03e03
"""
def test_timeframe_max(self):
"""Tests configure_series as well as timeframe with max_quality."""
self.execute_task('test_import_altnames')
entry = self.task.find_entry(title='le show s03e03')
assert entry.accepted, 'entry matching series alternate name should have been accepted.'
assert entry['series_name'] == 'the show', 'entry series should be set to the main name'
|
|
50bd84ed86c924fa5b020dd1200ccbfcd95aaf1d
|
tests/test_requests.py
|
tests/test_requests.py
|
# -*- coding: utf-8 -*-
"""
Add comment here
~~~~~~~~~~~~~~~~
Add descripton here
:copyright: (c) 2014 Shinya Ohyanagi, All rights reserved.
:license: BSD, see LICENSE for more details.
"""
import os
import shutil
import requests
from unittest import TestCase
from unittest import skip
from autodoc import autodoc
from autodoc._compat import iteritems
root_path = os.path.dirname(os.path.abspath(__file__))
var_path = os.path.join(root_path, 'var')
os.environ['PYAUTODOC'] = '1'
class TestRequestsResponse(TestCase):
def setUp(self):
self.client = requests
self.root_path = root_path
if os.path.exists(var_path):
shutil.rmtree(var_path)
os.mkdir(var_path)
# TODO Add mock
@skip('Skip')
def test_parse_response(self):
""" Should parse requests response. """
params = {'message': 'foo'}
headers = {'content-type': 'application/json'}
res = self.client.post('http://localhost:5000', data=params,
headers=headers)
autodoc.parse('POST /', res)
var = {
'describe': 'POST /',
'describe_separators': '======',
'target_url': 'http://localhost:5000/',
'status_code': 200,
'request': 'POST /',
'response_body': '{"response": "create"}',
'response_content_type': 'application/json',
'params': '{\n "message": "foo"\n}'
}
for k, v in iteritems(autodoc.vars[0]):
self.assertEqual(v, var[k])
|
Add requests test. But now skipping.
|
Add requests test. But now skipping.
|
Python
|
bsd-3-clause
|
heavenshell/py-autodoc
|
Add requests test. But now skipping.
|
# -*- coding: utf-8 -*-
"""
Add comment here
~~~~~~~~~~~~~~~~
Add descripton here
:copyright: (c) 2014 Shinya Ohyanagi, All rights reserved.
:license: BSD, see LICENSE for more details.
"""
import os
import shutil
import requests
from unittest import TestCase
from unittest import skip
from autodoc import autodoc
from autodoc._compat import iteritems
root_path = os.path.dirname(os.path.abspath(__file__))
var_path = os.path.join(root_path, 'var')
os.environ['PYAUTODOC'] = '1'
class TestRequestsResponse(TestCase):
def setUp(self):
self.client = requests
self.root_path = root_path
if os.path.exists(var_path):
shutil.rmtree(var_path)
os.mkdir(var_path)
# TODO Add mock
@skip('Skip')
def test_parse_response(self):
""" Should parse requests response. """
params = {'message': 'foo'}
headers = {'content-type': 'application/json'}
res = self.client.post('http://localhost:5000', data=params,
headers=headers)
autodoc.parse('POST /', res)
var = {
'describe': 'POST /',
'describe_separators': '======',
'target_url': 'http://localhost:5000/',
'status_code': 200,
'request': 'POST /',
'response_body': '{"response": "create"}',
'response_content_type': 'application/json',
'params': '{\n "message": "foo"\n}'
}
for k, v in iteritems(autodoc.vars[0]):
self.assertEqual(v, var[k])
|
<commit_before><commit_msg>Add requests test. But now skipping.<commit_after>
|
# -*- coding: utf-8 -*-
"""
Add comment here
~~~~~~~~~~~~~~~~
Add descripton here
:copyright: (c) 2014 Shinya Ohyanagi, All rights reserved.
:license: BSD, see LICENSE for more details.
"""
import os
import shutil
import requests
from unittest import TestCase
from unittest import skip
from autodoc import autodoc
from autodoc._compat import iteritems
root_path = os.path.dirname(os.path.abspath(__file__))
var_path = os.path.join(root_path, 'var')
os.environ['PYAUTODOC'] = '1'
class TestRequestsResponse(TestCase):
def setUp(self):
self.client = requests
self.root_path = root_path
if os.path.exists(var_path):
shutil.rmtree(var_path)
os.mkdir(var_path)
# TODO Add mock
@skip('Skip')
def test_parse_response(self):
""" Should parse requests response. """
params = {'message': 'foo'}
headers = {'content-type': 'application/json'}
res = self.client.post('http://localhost:5000', data=params,
headers=headers)
autodoc.parse('POST /', res)
var = {
'describe': 'POST /',
'describe_separators': '======',
'target_url': 'http://localhost:5000/',
'status_code': 200,
'request': 'POST /',
'response_body': '{"response": "create"}',
'response_content_type': 'application/json',
'params': '{\n "message": "foo"\n}'
}
for k, v in iteritems(autodoc.vars[0]):
self.assertEqual(v, var[k])
|
Add requests test. But now skipping.# -*- coding: utf-8 -*-
"""
Add comment here
~~~~~~~~~~~~~~~~
Add descripton here
:copyright: (c) 2014 Shinya Ohyanagi, All rights reserved.
:license: BSD, see LICENSE for more details.
"""
import os
import shutil
import requests
from unittest import TestCase
from unittest import skip
from autodoc import autodoc
from autodoc._compat import iteritems
root_path = os.path.dirname(os.path.abspath(__file__))
var_path = os.path.join(root_path, 'var')
os.environ['PYAUTODOC'] = '1'
class TestRequestsResponse(TestCase):
def setUp(self):
self.client = requests
self.root_path = root_path
if os.path.exists(var_path):
shutil.rmtree(var_path)
os.mkdir(var_path)
# TODO Add mock
@skip('Skip')
def test_parse_response(self):
""" Should parse requests response. """
params = {'message': 'foo'}
headers = {'content-type': 'application/json'}
res = self.client.post('http://localhost:5000', data=params,
headers=headers)
autodoc.parse('POST /', res)
var = {
'describe': 'POST /',
'describe_separators': '======',
'target_url': 'http://localhost:5000/',
'status_code': 200,
'request': 'POST /',
'response_body': '{"response": "create"}',
'response_content_type': 'application/json',
'params': '{\n "message": "foo"\n}'
}
for k, v in iteritems(autodoc.vars[0]):
self.assertEqual(v, var[k])
|
<commit_before><commit_msg>Add requests test. But now skipping.<commit_after># -*- coding: utf-8 -*-
"""
Add comment here
~~~~~~~~~~~~~~~~
Add descripton here
:copyright: (c) 2014 Shinya Ohyanagi, All rights reserved.
:license: BSD, see LICENSE for more details.
"""
import os
import shutil
import requests
from unittest import TestCase
from unittest import skip
from autodoc import autodoc
from autodoc._compat import iteritems
root_path = os.path.dirname(os.path.abspath(__file__))
var_path = os.path.join(root_path, 'var')
os.environ['PYAUTODOC'] = '1'
class TestRequestsResponse(TestCase):
def setUp(self):
self.client = requests
self.root_path = root_path
if os.path.exists(var_path):
shutil.rmtree(var_path)
os.mkdir(var_path)
# TODO Add mock
@skip('Skip')
def test_parse_response(self):
""" Should parse requests response. """
params = {'message': 'foo'}
headers = {'content-type': 'application/json'}
res = self.client.post('http://localhost:5000', data=params,
headers=headers)
autodoc.parse('POST /', res)
var = {
'describe': 'POST /',
'describe_separators': '======',
'target_url': 'http://localhost:5000/',
'status_code': 200,
'request': 'POST /',
'response_body': '{"response": "create"}',
'response_content_type': 'application/json',
'params': '{\n "message": "foo"\n}'
}
for k, v in iteritems(autodoc.vars[0]):
self.assertEqual(v, var[k])
|
|
380a86134a62265eb944d717cad002bbc4197be4
|
cjdata/views.py
|
cjdata/views.py
|
from django.views.generic import DetailView, ListView
from django.shortcuts import get_object_or_404
from cjdata.models import Dataset, Category
from cjdata.search.query import sqs
class DatasetDetailView(DetailView):
model = Dataset
slug_field = 'uuid'
slug_url_kwarg = 'uuid'
def get_context_data(self, **kwargs):
context = super(DatasetDetailView, self).get_context_data(**kwargs)
context['more_like_this'] = sqs.more_like_this(self.object)[:100]
return context
class CategoryDatasetsView(ListView):
model = Dataset
paginate_by = 50
def get_queryset(self):
path_arg = self.kwargs.get('path', None)
self.category = get_object_or_404(Category, path=path_arg.replace('-', ' '))
return Dataset.objects.filter(categories__path__iexact=self.category.path)
def get_context_data(self, **kwargs):
context = super(CategoryDatasetsView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
|
from django.views.generic import DetailView, ListView
from django.shortcuts import get_object_or_404
from cjdata.models import Dataset, Category
from cjdata.search.query import sqs
class DatasetDetailView(DetailView):
model = Dataset
slug_field = 'uuid'
slug_url_kwarg = 'uuid'
def get_context_data(self, **kwargs):
context = super(DatasetDetailView, self).get_context_data(**kwargs)
context['more_like_this'] = sqs.more_like_this(self.object)[:100]
return context
class CategoryDatasetsView(ListView):
model = Dataset
paginate_by = 50
def get_queryset(self):
path_arg = self.kwargs.get('path', None)
self.category = get_object_or_404(Category, path__iexact=path_arg.replace('-', ' '))
return Dataset.objects.filter(categories__path=self.category.path)
def get_context_data(self, **kwargs):
context = super(CategoryDatasetsView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
|
Correct category lookup on url kwarg to use case insensitive.
|
Correct category lookup on url kwarg to use case insensitive.
|
Python
|
bsd-3-clause
|
dmc2015/hall-of-justice,dmc2015/hall-of-justice,dmc2015/hall-of-justice,sunlightlabs/hall-of-justice,sunlightlabs/hall-of-justice,sunlightlabs/hall-of-justice
|
from django.views.generic import DetailView, ListView
from django.shortcuts import get_object_or_404
from cjdata.models import Dataset, Category
from cjdata.search.query import sqs
class DatasetDetailView(DetailView):
model = Dataset
slug_field = 'uuid'
slug_url_kwarg = 'uuid'
def get_context_data(self, **kwargs):
context = super(DatasetDetailView, self).get_context_data(**kwargs)
context['more_like_this'] = sqs.more_like_this(self.object)[:100]
return context
class CategoryDatasetsView(ListView):
model = Dataset
paginate_by = 50
def get_queryset(self):
path_arg = self.kwargs.get('path', None)
self.category = get_object_or_404(Category, path=path_arg.replace('-', ' '))
return Dataset.objects.filter(categories__path__iexact=self.category.path)
def get_context_data(self, **kwargs):
context = super(CategoryDatasetsView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
Correct category lookup on url kwarg to use case insensitive.
|
from django.views.generic import DetailView, ListView
from django.shortcuts import get_object_or_404
from cjdata.models import Dataset, Category
from cjdata.search.query import sqs
class DatasetDetailView(DetailView):
model = Dataset
slug_field = 'uuid'
slug_url_kwarg = 'uuid'
def get_context_data(self, **kwargs):
context = super(DatasetDetailView, self).get_context_data(**kwargs)
context['more_like_this'] = sqs.more_like_this(self.object)[:100]
return context
class CategoryDatasetsView(ListView):
model = Dataset
paginate_by = 50
def get_queryset(self):
path_arg = self.kwargs.get('path', None)
self.category = get_object_or_404(Category, path__iexact=path_arg.replace('-', ' '))
return Dataset.objects.filter(categories__path=self.category.path)
def get_context_data(self, **kwargs):
context = super(CategoryDatasetsView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
|
<commit_before>from django.views.generic import DetailView, ListView
from django.shortcuts import get_object_or_404
from cjdata.models import Dataset, Category
from cjdata.search.query import sqs
class DatasetDetailView(DetailView):
model = Dataset
slug_field = 'uuid'
slug_url_kwarg = 'uuid'
def get_context_data(self, **kwargs):
context = super(DatasetDetailView, self).get_context_data(**kwargs)
context['more_like_this'] = sqs.more_like_this(self.object)[:100]
return context
class CategoryDatasetsView(ListView):
model = Dataset
paginate_by = 50
def get_queryset(self):
path_arg = self.kwargs.get('path', None)
self.category = get_object_or_404(Category, path=path_arg.replace('-', ' '))
return Dataset.objects.filter(categories__path__iexact=self.category.path)
def get_context_data(self, **kwargs):
context = super(CategoryDatasetsView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
<commit_msg>Correct category lookup on url kwarg to use case insensitive.<commit_after>
|
from django.views.generic import DetailView, ListView
from django.shortcuts import get_object_or_404
from cjdata.models import Dataset, Category
from cjdata.search.query import sqs
class DatasetDetailView(DetailView):
model = Dataset
slug_field = 'uuid'
slug_url_kwarg = 'uuid'
def get_context_data(self, **kwargs):
context = super(DatasetDetailView, self).get_context_data(**kwargs)
context['more_like_this'] = sqs.more_like_this(self.object)[:100]
return context
class CategoryDatasetsView(ListView):
model = Dataset
paginate_by = 50
def get_queryset(self):
path_arg = self.kwargs.get('path', None)
self.category = get_object_or_404(Category, path__iexact=path_arg.replace('-', ' '))
return Dataset.objects.filter(categories__path=self.category.path)
def get_context_data(self, **kwargs):
context = super(CategoryDatasetsView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
|
from django.views.generic import DetailView, ListView
from django.shortcuts import get_object_or_404
from cjdata.models import Dataset, Category
from cjdata.search.query import sqs
class DatasetDetailView(DetailView):
model = Dataset
slug_field = 'uuid'
slug_url_kwarg = 'uuid'
def get_context_data(self, **kwargs):
context = super(DatasetDetailView, self).get_context_data(**kwargs)
context['more_like_this'] = sqs.more_like_this(self.object)[:100]
return context
class CategoryDatasetsView(ListView):
model = Dataset
paginate_by = 50
def get_queryset(self):
path_arg = self.kwargs.get('path', None)
self.category = get_object_or_404(Category, path=path_arg.replace('-', ' '))
return Dataset.objects.filter(categories__path__iexact=self.category.path)
def get_context_data(self, **kwargs):
context = super(CategoryDatasetsView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
Correct category lookup on url kwarg to use case insensitive.from django.views.generic import DetailView, ListView
from django.shortcuts import get_object_or_404
from cjdata.models import Dataset, Category
from cjdata.search.query import sqs
class DatasetDetailView(DetailView):
model = Dataset
slug_field = 'uuid'
slug_url_kwarg = 'uuid'
def get_context_data(self, **kwargs):
context = super(DatasetDetailView, self).get_context_data(**kwargs)
context['more_like_this'] = sqs.more_like_this(self.object)[:100]
return context
class CategoryDatasetsView(ListView):
model = Dataset
paginate_by = 50
def get_queryset(self):
path_arg = self.kwargs.get('path', None)
self.category = get_object_or_404(Category, path__iexact=path_arg.replace('-', ' '))
return Dataset.objects.filter(categories__path=self.category.path)
def get_context_data(self, **kwargs):
context = super(CategoryDatasetsView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
|
<commit_before>from django.views.generic import DetailView, ListView
from django.shortcuts import get_object_or_404
from cjdata.models import Dataset, Category
from cjdata.search.query import sqs
class DatasetDetailView(DetailView):
model = Dataset
slug_field = 'uuid'
slug_url_kwarg = 'uuid'
def get_context_data(self, **kwargs):
context = super(DatasetDetailView, self).get_context_data(**kwargs)
context['more_like_this'] = sqs.more_like_this(self.object)[:100]
return context
class CategoryDatasetsView(ListView):
model = Dataset
paginate_by = 50
def get_queryset(self):
path_arg = self.kwargs.get('path', None)
self.category = get_object_or_404(Category, path=path_arg.replace('-', ' '))
return Dataset.objects.filter(categories__path__iexact=self.category.path)
def get_context_data(self, **kwargs):
context = super(CategoryDatasetsView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
<commit_msg>Correct category lookup on url kwarg to use case insensitive.<commit_after>from django.views.generic import DetailView, ListView
from django.shortcuts import get_object_or_404
from cjdata.models import Dataset, Category
from cjdata.search.query import sqs
class DatasetDetailView(DetailView):
model = Dataset
slug_field = 'uuid'
slug_url_kwarg = 'uuid'
def get_context_data(self, **kwargs):
context = super(DatasetDetailView, self).get_context_data(**kwargs)
context['more_like_this'] = sqs.more_like_this(self.object)[:100]
return context
class CategoryDatasetsView(ListView):
model = Dataset
paginate_by = 50
def get_queryset(self):
path_arg = self.kwargs.get('path', None)
self.category = get_object_or_404(Category, path__iexact=path_arg.replace('-', ' '))
return Dataset.objects.filter(categories__path=self.category.path)
def get_context_data(self, **kwargs):
context = super(CategoryDatasetsView, self).get_context_data(**kwargs)
context['category'] = self.category
return context
|
1f495c7faa501efe15fb518ebe0dbf94a83e0f8a
|
pijobs/fortunejob.py
|
pijobs/fortunejob.py
|
import subprocess
from pijobs.scrolljob import SrollJob
class FortuneJob(ScrollJob):
def message(self):
return self.run_cmd('fortune')
def run_cmd(cmd):
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = p.communicate()[0]
return output
|
Add job to display fortunes.
|
Add job to display fortunes.
|
Python
|
mit
|
ollej/piapi,ollej/piapi
|
Add job to display fortunes.
|
import subprocess
from pijobs.scrolljob import SrollJob
class FortuneJob(ScrollJob):
def message(self):
return self.run_cmd('fortune')
def run_cmd(cmd):
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = p.communicate()[0]
return output
|
<commit_before><commit_msg>Add job to display fortunes.<commit_after>
|
import subprocess
from pijobs.scrolljob import SrollJob
class FortuneJob(ScrollJob):
def message(self):
return self.run_cmd('fortune')
def run_cmd(cmd):
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = p.communicate()[0]
return output
|
Add job to display fortunes.import subprocess
from pijobs.scrolljob import SrollJob
class FortuneJob(ScrollJob):
def message(self):
return self.run_cmd('fortune')
def run_cmd(cmd):
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = p.communicate()[0]
return output
|
<commit_before><commit_msg>Add job to display fortunes.<commit_after>import subprocess
from pijobs.scrolljob import SrollJob
class FortuneJob(ScrollJob):
def message(self):
return self.run_cmd('fortune')
def run_cmd(cmd):
p = subprocess.Popen(cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
output = p.communicate()[0]
return output
|
|
6054b634c79d95e5cd2a4ed5c796d8ffcd1ddcc1
|
frigg/builds/migrations/0003_auto_20141029_2158.py
|
frigg/builds/migrations/0003_auto_20141029_2158.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('builds', '0002_auto_20141028_0710'),
]
operations = [
migrations.AlterModelOptions(
name='build',
options={'ordering': ['-id']},
),
]
|
Add migration for ordering update
|
Add migration for ordering update
|
Python
|
mit
|
frigg/frigg-hq,frigg/frigg-hq,frigg/frigg-hq
|
Add migration for ordering update
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('builds', '0002_auto_20141028_0710'),
]
operations = [
migrations.AlterModelOptions(
name='build',
options={'ordering': ['-id']},
),
]
|
<commit_before><commit_msg>Add migration for ordering update<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('builds', '0002_auto_20141028_0710'),
]
operations = [
migrations.AlterModelOptions(
name='build',
options={'ordering': ['-id']},
),
]
|
Add migration for ordering update# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('builds', '0002_auto_20141028_0710'),
]
operations = [
migrations.AlterModelOptions(
name='build',
options={'ordering': ['-id']},
),
]
|
<commit_before><commit_msg>Add migration for ordering update<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('builds', '0002_auto_20141028_0710'),
]
operations = [
migrations.AlterModelOptions(
name='build',
options={'ordering': ['-id']},
),
]
|
|
f3945fc1227b1650080fc52e63ce3b5ca795aefc
|
tools/visualization.py
|
tools/visualization.py
|
from pathlib import Path
from keras.applications import VGG16
from keras.models import Model
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import AxesGrid
from tools.datasets.urban_tribes import load_images
def plot_filter(image_path, layer_name, output_dir):
base_model = VGG16(weights='imagenet')
x = load_images([image_path])
model = Model(input=base_model.input,
output=base_model.get_layer(layer_name).output)
layer_output = model.predict(x)
fig = plt.figure(figsize=(12, 12))
grid = AxesGrid(fig, 111,
nrows_ncols=(8, 8),
axes_pad=0.0,
share_all=True,
label_mode="L",
cbar_location="top",
cbar_mode="single")
for i in range(64):
im = grid[i].imshow(layer_output[0, :, :, i], interpolation="nearest")
grid.cbar_axes[0].colorbar(im)
for cax in grid.cbar_axes:
cax.toggle_label(False)
for ax in grid.axes_all:
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
output_dir = Path(output_dir)
fig_file = '{}-{}.pdf'.format(Path(image_path).stem, layer_name)
plt.savefig(str(output_dir / fig_file))
|
Add a convenient function to plot filter
|
Add a convenient function to plot filter
|
Python
|
mit
|
xiongliyang219/transfer-learning
|
Add a convenient function to plot filter
|
from pathlib import Path
from keras.applications import VGG16
from keras.models import Model
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import AxesGrid
from tools.datasets.urban_tribes import load_images
def plot_filter(image_path, layer_name, output_dir):
base_model = VGG16(weights='imagenet')
x = load_images([image_path])
model = Model(input=base_model.input,
output=base_model.get_layer(layer_name).output)
layer_output = model.predict(x)
fig = plt.figure(figsize=(12, 12))
grid = AxesGrid(fig, 111,
nrows_ncols=(8, 8),
axes_pad=0.0,
share_all=True,
label_mode="L",
cbar_location="top",
cbar_mode="single")
for i in range(64):
im = grid[i].imshow(layer_output[0, :, :, i], interpolation="nearest")
grid.cbar_axes[0].colorbar(im)
for cax in grid.cbar_axes:
cax.toggle_label(False)
for ax in grid.axes_all:
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
output_dir = Path(output_dir)
fig_file = '{}-{}.pdf'.format(Path(image_path).stem, layer_name)
plt.savefig(str(output_dir / fig_file))
|
<commit_before><commit_msg>Add a convenient function to plot filter<commit_after>
|
from pathlib import Path
from keras.applications import VGG16
from keras.models import Model
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import AxesGrid
from tools.datasets.urban_tribes import load_images
def plot_filter(image_path, layer_name, output_dir):
base_model = VGG16(weights='imagenet')
x = load_images([image_path])
model = Model(input=base_model.input,
output=base_model.get_layer(layer_name).output)
layer_output = model.predict(x)
fig = plt.figure(figsize=(12, 12))
grid = AxesGrid(fig, 111,
nrows_ncols=(8, 8),
axes_pad=0.0,
share_all=True,
label_mode="L",
cbar_location="top",
cbar_mode="single")
for i in range(64):
im = grid[i].imshow(layer_output[0, :, :, i], interpolation="nearest")
grid.cbar_axes[0].colorbar(im)
for cax in grid.cbar_axes:
cax.toggle_label(False)
for ax in grid.axes_all:
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
output_dir = Path(output_dir)
fig_file = '{}-{}.pdf'.format(Path(image_path).stem, layer_name)
plt.savefig(str(output_dir / fig_file))
|
Add a convenient function to plot filterfrom pathlib import Path
from keras.applications import VGG16
from keras.models import Model
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import AxesGrid
from tools.datasets.urban_tribes import load_images
def plot_filter(image_path, layer_name, output_dir):
base_model = VGG16(weights='imagenet')
x = load_images([image_path])
model = Model(input=base_model.input,
output=base_model.get_layer(layer_name).output)
layer_output = model.predict(x)
fig = plt.figure(figsize=(12, 12))
grid = AxesGrid(fig, 111,
nrows_ncols=(8, 8),
axes_pad=0.0,
share_all=True,
label_mode="L",
cbar_location="top",
cbar_mode="single")
for i in range(64):
im = grid[i].imshow(layer_output[0, :, :, i], interpolation="nearest")
grid.cbar_axes[0].colorbar(im)
for cax in grid.cbar_axes:
cax.toggle_label(False)
for ax in grid.axes_all:
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
output_dir = Path(output_dir)
fig_file = '{}-{}.pdf'.format(Path(image_path).stem, layer_name)
plt.savefig(str(output_dir / fig_file))
|
<commit_before><commit_msg>Add a convenient function to plot filter<commit_after>from pathlib import Path
from keras.applications import VGG16
from keras.models import Model
import matplotlib.pyplot as plt
from mpl_toolkits.axes_grid1 import AxesGrid
from tools.datasets.urban_tribes import load_images
def plot_filter(image_path, layer_name, output_dir):
base_model = VGG16(weights='imagenet')
x = load_images([image_path])
model = Model(input=base_model.input,
output=base_model.get_layer(layer_name).output)
layer_output = model.predict(x)
fig = plt.figure(figsize=(12, 12))
grid = AxesGrid(fig, 111,
nrows_ncols=(8, 8),
axes_pad=0.0,
share_all=True,
label_mode="L",
cbar_location="top",
cbar_mode="single")
for i in range(64):
im = grid[i].imshow(layer_output[0, :, :, i], interpolation="nearest")
grid.cbar_axes[0].colorbar(im)
for cax in grid.cbar_axes:
cax.toggle_label(False)
for ax in grid.axes_all:
ax.get_xaxis().set_visible(False)
ax.get_yaxis().set_visible(False)
output_dir = Path(output_dir)
fig_file = '{}-{}.pdf'.format(Path(image_path).stem, layer_name)
plt.savefig(str(output_dir / fig_file))
|
|
c7621bd5c5e48c8d45ae70836b681b715348d0ba
|
modules/module_oraakkeli.py
|
modules/module_oraakkeli.py
|
import urllib
def command_oraakkeli(bot, user, channel, args):
"""Asks a question from the oracle (http://www.lintukoto.net/viihde/oraakkeli/)"""
if not args: return
args = urllib.quote_plus(args)
answer = getUrl("http://www.lintukoto.net/viihde/oraakkeli/index.php?kysymys=%s&html=0" % args).getContent()
bot.say(channel, "Oraakkeli vastaa: %s" % answer)
|
import urllib
def command_oraakkeli(bot, user, channel, args):
"""Asks a question from the oracle (http://www.lintukoto.net/viihde/oraakkeli/)"""
if not args: return
args = urllib.quote_plus(args)
answer = getUrl("http://www.lintukoto.net/viihde/oraakkeli/index.php?kysymys=%s&html=0" % args).getContent()
answer = unicode(answer)
answer = answer.encode("utf-8")
bot.say(channel, "Oraakkeli vastaa: %s" % answer)
|
Update oracle module for UTF-8
|
Update oracle module for UTF-8
git-svn-id: 056f9092885898c4775d98c479d2d33d00273e45@99 dda364a1-ef19-0410-af65-756c83048fb2
|
Python
|
bsd-3-clause
|
EArmour/pyfibot,nigeljonez/newpyfibot,huqa/pyfibot,aapa/pyfibot,EArmour/pyfibot,rnyberg/pyfibot,rnyberg/pyfibot,lepinkainen/pyfibot,aapa/pyfibot,lepinkainen/pyfibot,huqa/pyfibot
|
import urllib
def command_oraakkeli(bot, user, channel, args):
"""Asks a question from the oracle (http://www.lintukoto.net/viihde/oraakkeli/)"""
if not args: return
args = urllib.quote_plus(args)
answer = getUrl("http://www.lintukoto.net/viihde/oraakkeli/index.php?kysymys=%s&html=0" % args).getContent()
bot.say(channel, "Oraakkeli vastaa: %s" % answer)
Update oracle module for UTF-8
git-svn-id: 056f9092885898c4775d98c479d2d33d00273e45@99 dda364a1-ef19-0410-af65-756c83048fb2
|
import urllib
def command_oraakkeli(bot, user, channel, args):
"""Asks a question from the oracle (http://www.lintukoto.net/viihde/oraakkeli/)"""
if not args: return
args = urllib.quote_plus(args)
answer = getUrl("http://www.lintukoto.net/viihde/oraakkeli/index.php?kysymys=%s&html=0" % args).getContent()
answer = unicode(answer)
answer = answer.encode("utf-8")
bot.say(channel, "Oraakkeli vastaa: %s" % answer)
|
<commit_before>
import urllib
def command_oraakkeli(bot, user, channel, args):
"""Asks a question from the oracle (http://www.lintukoto.net/viihde/oraakkeli/)"""
if not args: return
args = urllib.quote_plus(args)
answer = getUrl("http://www.lintukoto.net/viihde/oraakkeli/index.php?kysymys=%s&html=0" % args).getContent()
bot.say(channel, "Oraakkeli vastaa: %s" % answer)
<commit_msg>Update oracle module for UTF-8
git-svn-id: 056f9092885898c4775d98c479d2d33d00273e45@99 dda364a1-ef19-0410-af65-756c83048fb2<commit_after>
|
import urllib
def command_oraakkeli(bot, user, channel, args):
"""Asks a question from the oracle (http://www.lintukoto.net/viihde/oraakkeli/)"""
if not args: return
args = urllib.quote_plus(args)
answer = getUrl("http://www.lintukoto.net/viihde/oraakkeli/index.php?kysymys=%s&html=0" % args).getContent()
answer = unicode(answer)
answer = answer.encode("utf-8")
bot.say(channel, "Oraakkeli vastaa: %s" % answer)
|
import urllib
def command_oraakkeli(bot, user, channel, args):
"""Asks a question from the oracle (http://www.lintukoto.net/viihde/oraakkeli/)"""
if not args: return
args = urllib.quote_plus(args)
answer = getUrl("http://www.lintukoto.net/viihde/oraakkeli/index.php?kysymys=%s&html=0" % args).getContent()
bot.say(channel, "Oraakkeli vastaa: %s" % answer)
Update oracle module for UTF-8
git-svn-id: 056f9092885898c4775d98c479d2d33d00273e45@99 dda364a1-ef19-0410-af65-756c83048fb2
import urllib
def command_oraakkeli(bot, user, channel, args):
"""Asks a question from the oracle (http://www.lintukoto.net/viihde/oraakkeli/)"""
if not args: return
args = urllib.quote_plus(args)
answer = getUrl("http://www.lintukoto.net/viihde/oraakkeli/index.php?kysymys=%s&html=0" % args).getContent()
answer = unicode(answer)
answer = answer.encode("utf-8")
bot.say(channel, "Oraakkeli vastaa: %s" % answer)
|
<commit_before>
import urllib
def command_oraakkeli(bot, user, channel, args):
"""Asks a question from the oracle (http://www.lintukoto.net/viihde/oraakkeli/)"""
if not args: return
args = urllib.quote_plus(args)
answer = getUrl("http://www.lintukoto.net/viihde/oraakkeli/index.php?kysymys=%s&html=0" % args).getContent()
bot.say(channel, "Oraakkeli vastaa: %s" % answer)
<commit_msg>Update oracle module for UTF-8
git-svn-id: 056f9092885898c4775d98c479d2d33d00273e45@99 dda364a1-ef19-0410-af65-756c83048fb2<commit_after>
import urllib
def command_oraakkeli(bot, user, channel, args):
"""Asks a question from the oracle (http://www.lintukoto.net/viihde/oraakkeli/)"""
if not args: return
args = urllib.quote_plus(args)
answer = getUrl("http://www.lintukoto.net/viihde/oraakkeli/index.php?kysymys=%s&html=0" % args).getContent()
answer = unicode(answer)
answer = answer.encode("utf-8")
bot.say(channel, "Oraakkeli vastaa: %s" % answer)
|
35cc1959ebfc209a7b2b15e7a323afc88474b3e5
|
py/largest-palindrome-product.py
|
py/largest-palindrome-product.py
|
class Solution(object):
def largestPalindrome(self, n):
"""
:type n: int
:rtype: int
"""
if n == 1:
return 9
for x in xrange(3, 10 ** n):
# two numbers 10 ** n - i, 10 ** n - j
# x = i + j
# reverse(10 ** n - x) = i * j
y = int(str(10 ** n - x)[::-1])
D = x * x - 4 * y
if D >= 0:
sqrt_D = int((D + 0.5) ** .5)
if sqrt_D ** 2 == D:
return ((10 ** n - x) * 10 ** n + y) % 1337
|
Add py solution for 479. Largest Palindrome Product
|
Add py solution for 479. Largest Palindrome Product
479. Largest Palindrome Product: https://leetcode.com/problems/largest-palindrome-product/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 479. Largest Palindrome Product
479. Largest Palindrome Product: https://leetcode.com/problems/largest-palindrome-product/
|
class Solution(object):
def largestPalindrome(self, n):
"""
:type n: int
:rtype: int
"""
if n == 1:
return 9
for x in xrange(3, 10 ** n):
# two numbers 10 ** n - i, 10 ** n - j
# x = i + j
# reverse(10 ** n - x) = i * j
y = int(str(10 ** n - x)[::-1])
D = x * x - 4 * y
if D >= 0:
sqrt_D = int((D + 0.5) ** .5)
if sqrt_D ** 2 == D:
return ((10 ** n - x) * 10 ** n + y) % 1337
|
<commit_before><commit_msg>Add py solution for 479. Largest Palindrome Product
479. Largest Palindrome Product: https://leetcode.com/problems/largest-palindrome-product/<commit_after>
|
class Solution(object):
def largestPalindrome(self, n):
"""
:type n: int
:rtype: int
"""
if n == 1:
return 9
for x in xrange(3, 10 ** n):
# two numbers 10 ** n - i, 10 ** n - j
# x = i + j
# reverse(10 ** n - x) = i * j
y = int(str(10 ** n - x)[::-1])
D = x * x - 4 * y
if D >= 0:
sqrt_D = int((D + 0.5) ** .5)
if sqrt_D ** 2 == D:
return ((10 ** n - x) * 10 ** n + y) % 1337
|
Add py solution for 479. Largest Palindrome Product
479. Largest Palindrome Product: https://leetcode.com/problems/largest-palindrome-product/class Solution(object):
def largestPalindrome(self, n):
"""
:type n: int
:rtype: int
"""
if n == 1:
return 9
for x in xrange(3, 10 ** n):
# two numbers 10 ** n - i, 10 ** n - j
# x = i + j
# reverse(10 ** n - x) = i * j
y = int(str(10 ** n - x)[::-1])
D = x * x - 4 * y
if D >= 0:
sqrt_D = int((D + 0.5) ** .5)
if sqrt_D ** 2 == D:
return ((10 ** n - x) * 10 ** n + y) % 1337
|
<commit_before><commit_msg>Add py solution for 479. Largest Palindrome Product
479. Largest Palindrome Product: https://leetcode.com/problems/largest-palindrome-product/<commit_after>class Solution(object):
def largestPalindrome(self, n):
"""
:type n: int
:rtype: int
"""
if n == 1:
return 9
for x in xrange(3, 10 ** n):
# two numbers 10 ** n - i, 10 ** n - j
# x = i + j
# reverse(10 ** n - x) = i * j
y = int(str(10 ** n - x)[::-1])
D = x * x - 4 * y
if D >= 0:
sqrt_D = int((D + 0.5) ** .5)
if sqrt_D ** 2 == D:
return ((10 ** n - x) * 10 ** n + y) % 1337
|
|
18d10b310d83de6b6244fd09b9fc742db5ced6e2
|
py/assign-cookies.py
|
py/assign-cookies.py
|
from collections import Counter
class Solution(object):
def findContentChildren(self, children, s):
"""
:type g: List[int]
:type s: List[int]
:rtype: int
"""
if not s:
return 0
cookies = Counter(s)
it = iter(sorted(cookies))
cur = it.next()
children.sort()
ans = 0
try:
for child in children:
while cookies[cur] <= 0 or cur < child:
cur = it.next()
cookies[cur] -= 1
ans += 1
except StopIteration:
pass
return ans
|
Add py solution for 455. Assign Cookies
|
Add py solution for 455. Assign Cookies
455. Assign Cookies: https://leetcode.com/problems/assign-cookies/
|
Python
|
apache-2.0
|
ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode,ckclark/leetcode
|
Add py solution for 455. Assign Cookies
455. Assign Cookies: https://leetcode.com/problems/assign-cookies/
|
from collections import Counter
class Solution(object):
def findContentChildren(self, children, s):
"""
:type g: List[int]
:type s: List[int]
:rtype: int
"""
if not s:
return 0
cookies = Counter(s)
it = iter(sorted(cookies))
cur = it.next()
children.sort()
ans = 0
try:
for child in children:
while cookies[cur] <= 0 or cur < child:
cur = it.next()
cookies[cur] -= 1
ans += 1
except StopIteration:
pass
return ans
|
<commit_before><commit_msg>Add py solution for 455. Assign Cookies
455. Assign Cookies: https://leetcode.com/problems/assign-cookies/<commit_after>
|
from collections import Counter
class Solution(object):
def findContentChildren(self, children, s):
"""
:type g: List[int]
:type s: List[int]
:rtype: int
"""
if not s:
return 0
cookies = Counter(s)
it = iter(sorted(cookies))
cur = it.next()
children.sort()
ans = 0
try:
for child in children:
while cookies[cur] <= 0 or cur < child:
cur = it.next()
cookies[cur] -= 1
ans += 1
except StopIteration:
pass
return ans
|
Add py solution for 455. Assign Cookies
455. Assign Cookies: https://leetcode.com/problems/assign-cookies/from collections import Counter
class Solution(object):
def findContentChildren(self, children, s):
"""
:type g: List[int]
:type s: List[int]
:rtype: int
"""
if not s:
return 0
cookies = Counter(s)
it = iter(sorted(cookies))
cur = it.next()
children.sort()
ans = 0
try:
for child in children:
while cookies[cur] <= 0 or cur < child:
cur = it.next()
cookies[cur] -= 1
ans += 1
except StopIteration:
pass
return ans
|
<commit_before><commit_msg>Add py solution for 455. Assign Cookies
455. Assign Cookies: https://leetcode.com/problems/assign-cookies/<commit_after>from collections import Counter
class Solution(object):
def findContentChildren(self, children, s):
"""
:type g: List[int]
:type s: List[int]
:rtype: int
"""
if not s:
return 0
cookies = Counter(s)
it = iter(sorted(cookies))
cur = it.next()
children.sort()
ans = 0
try:
for child in children:
while cookies[cur] <= 0 or cur < child:
cur = it.next()
cookies[cur] -= 1
ans += 1
except StopIteration:
pass
return ans
|
|
192ec9e61839f9ea154a737cd219b3ca76bd7812
|
Worker.py
|
Worker.py
|
class Worker(object):
def __init__(self):
self.finished = False
class GridProcessor(Worker):
def __init__(self):
self.pid = os.getpid()
self.age = None
self.angle = None
self.data_dir = None
self.results_dir = None
def set_data_dir(self, data_dir):
self.data_dir = data_dir
def set_results_dir(self, results_dir):
self.results_dir = results_dir
def match_template(self, age, angle):
self.age = age
self.angle = angle
self.amp, self.snr = scarplet.match_template(self.data_dir, age, angle)
class Reducer(Worker):
def __init__(self):
self.best_fn = None
self.results_fn = None
def set_best_fn(self, best_fn):
self.best_fn = best_fn
def update_best_estimates(self, this_fn):
best_params = np.load(self.best_fn)
this_params = np.load(this_fn)
mask = this_params[-1,:,:] > best_params[-1,:,:]
best_params[:, mask] = this_params[:, mask]
np.save(self.best_fn, best_params)
|
Check if data has nodata mask
|
Check if data has nodata mask
|
Python
|
mit
|
stgl/scarplet,rmsare/scarplet
|
Check if data has nodata mask
|
class Worker(object):
def __init__(self):
self.finished = False
class GridProcessor(Worker):
def __init__(self):
self.pid = os.getpid()
self.age = None
self.angle = None
self.data_dir = None
self.results_dir = None
def set_data_dir(self, data_dir):
self.data_dir = data_dir
def set_results_dir(self, results_dir):
self.results_dir = results_dir
def match_template(self, age, angle):
self.age = age
self.angle = angle
self.amp, self.snr = scarplet.match_template(self.data_dir, age, angle)
class Reducer(Worker):
def __init__(self):
self.best_fn = None
self.results_fn = None
def set_best_fn(self, best_fn):
self.best_fn = best_fn
def update_best_estimates(self, this_fn):
best_params = np.load(self.best_fn)
this_params = np.load(this_fn)
mask = this_params[-1,:,:] > best_params[-1,:,:]
best_params[:, mask] = this_params[:, mask]
np.save(self.best_fn, best_params)
|
<commit_before><commit_msg>Check if data has nodata mask<commit_after>
|
class Worker(object):
def __init__(self):
self.finished = False
class GridProcessor(Worker):
def __init__(self):
self.pid = os.getpid()
self.age = None
self.angle = None
self.data_dir = None
self.results_dir = None
def set_data_dir(self, data_dir):
self.data_dir = data_dir
def set_results_dir(self, results_dir):
self.results_dir = results_dir
def match_template(self, age, angle):
self.age = age
self.angle = angle
self.amp, self.snr = scarplet.match_template(self.data_dir, age, angle)
class Reducer(Worker):
def __init__(self):
self.best_fn = None
self.results_fn = None
def set_best_fn(self, best_fn):
self.best_fn = best_fn
def update_best_estimates(self, this_fn):
best_params = np.load(self.best_fn)
this_params = np.load(this_fn)
mask = this_params[-1,:,:] > best_params[-1,:,:]
best_params[:, mask] = this_params[:, mask]
np.save(self.best_fn, best_params)
|
Check if data has nodata mask
class Worker(object):
def __init__(self):
self.finished = False
class GridProcessor(Worker):
def __init__(self):
self.pid = os.getpid()
self.age = None
self.angle = None
self.data_dir = None
self.results_dir = None
def set_data_dir(self, data_dir):
self.data_dir = data_dir
def set_results_dir(self, results_dir):
self.results_dir = results_dir
def match_template(self, age, angle):
self.age = age
self.angle = angle
self.amp, self.snr = scarplet.match_template(self.data_dir, age, angle)
class Reducer(Worker):
def __init__(self):
self.best_fn = None
self.results_fn = None
def set_best_fn(self, best_fn):
self.best_fn = best_fn
def update_best_estimates(self, this_fn):
best_params = np.load(self.best_fn)
this_params = np.load(this_fn)
mask = this_params[-1,:,:] > best_params[-1,:,:]
best_params[:, mask] = this_params[:, mask]
np.save(self.best_fn, best_params)
|
<commit_before><commit_msg>Check if data has nodata mask<commit_after>
class Worker(object):
def __init__(self):
self.finished = False
class GridProcessor(Worker):
def __init__(self):
self.pid = os.getpid()
self.age = None
self.angle = None
self.data_dir = None
self.results_dir = None
def set_data_dir(self, data_dir):
self.data_dir = data_dir
def set_results_dir(self, results_dir):
self.results_dir = results_dir
def match_template(self, age, angle):
self.age = age
self.angle = angle
self.amp, self.snr = scarplet.match_template(self.data_dir, age, angle)
class Reducer(Worker):
def __init__(self):
self.best_fn = None
self.results_fn = None
def set_best_fn(self, best_fn):
self.best_fn = best_fn
def update_best_estimates(self, this_fn):
best_params = np.load(self.best_fn)
this_params = np.load(this_fn)
mask = this_params[-1,:,:] > best_params[-1,:,:]
best_params[:, mask] = this_params[:, mask]
np.save(self.best_fn, best_params)
|
|
2a214050a30048eab177f696d891a33c2860bb55
|
pymatgen/symmetry/tests/test_spacegroup.py
|
pymatgen/symmetry/tests/test_spacegroup.py
|
#!/usr/bin/env python
'''
Created on Mar 12, 2012
'''
from __future__ import division
__author__="Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 12, 2012"
import unittest
import os
from pymatgen.core.structure import PeriodicSite
from pymatgen.symmetry.spacegroup import Spacegroup
from pymatgen.io.vaspio import Poscar
from pymatgen.symmetry.spglib_adaptor import SymmetryFinder
import pymatgen
test_dir = os.path.join(os.path.dirname(os.path.abspath(pymatgen.__file__)), '..', 'test_files')
class SpacegroupTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR'))
self.structure = p.struct
self.sg1 = SymmetryFinder(self.structure, 0.001).get_spacegroup()
self.sg2 = Spacegroup.from_spacegroup_number(62)
def test_are_symmetrically_equivalent(self):
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [2,3]]
self.assertTrue(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertTrue(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [0,2]]
self.assertFalse(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertFalse(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
Add a unittest for spacegroup. Still very basic.
|
Add a unittest for spacegroup. Still very basic.
|
Python
|
mit
|
rousseab/pymatgen,Bismarrck/pymatgen,Bismarrck/pymatgen,rousseab/pymatgen,sonium0/pymatgen,yanikou19/pymatgen,rousseab/pymatgen,Dioptas/pymatgen,Bismarrck/pymatgen,ctoher/pymatgen,sonium0/pymatgen,migueldiascosta/pymatgen,Dioptas/pymatgen,Bismarrck/pymatgen,yanikou19/pymatgen,yanikou19/pymatgen,migueldiascosta/pymatgen,ctoher/pymatgen,migueldiascosta/pymatgen,ctoher/pymatgen,sonium0/pymatgen,Bismarrck/pymatgen
|
Add a unittest for spacegroup. Still very basic.
|
#!/usr/bin/env python
'''
Created on Mar 12, 2012
'''
from __future__ import division
__author__="Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 12, 2012"
import unittest
import os
from pymatgen.core.structure import PeriodicSite
from pymatgen.symmetry.spacegroup import Spacegroup
from pymatgen.io.vaspio import Poscar
from pymatgen.symmetry.spglib_adaptor import SymmetryFinder
import pymatgen
test_dir = os.path.join(os.path.dirname(os.path.abspath(pymatgen.__file__)), '..', 'test_files')
class SpacegroupTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR'))
self.structure = p.struct
self.sg1 = SymmetryFinder(self.structure, 0.001).get_spacegroup()
self.sg2 = Spacegroup.from_spacegroup_number(62)
def test_are_symmetrically_equivalent(self):
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [2,3]]
self.assertTrue(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertTrue(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [0,2]]
self.assertFalse(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertFalse(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
<commit_before><commit_msg>Add a unittest for spacegroup. Still very basic.<commit_after>
|
#!/usr/bin/env python
'''
Created on Mar 12, 2012
'''
from __future__ import division
__author__="Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 12, 2012"
import unittest
import os
from pymatgen.core.structure import PeriodicSite
from pymatgen.symmetry.spacegroup import Spacegroup
from pymatgen.io.vaspio import Poscar
from pymatgen.symmetry.spglib_adaptor import SymmetryFinder
import pymatgen
test_dir = os.path.join(os.path.dirname(os.path.abspath(pymatgen.__file__)), '..', 'test_files')
class SpacegroupTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR'))
self.structure = p.struct
self.sg1 = SymmetryFinder(self.structure, 0.001).get_spacegroup()
self.sg2 = Spacegroup.from_spacegroup_number(62)
def test_are_symmetrically_equivalent(self):
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [2,3]]
self.assertTrue(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertTrue(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [0,2]]
self.assertFalse(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertFalse(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
Add a unittest for spacegroup. Still very basic.#!/usr/bin/env python
'''
Created on Mar 12, 2012
'''
from __future__ import division
__author__="Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 12, 2012"
import unittest
import os
from pymatgen.core.structure import PeriodicSite
from pymatgen.symmetry.spacegroup import Spacegroup
from pymatgen.io.vaspio import Poscar
from pymatgen.symmetry.spglib_adaptor import SymmetryFinder
import pymatgen
test_dir = os.path.join(os.path.dirname(os.path.abspath(pymatgen.__file__)), '..', 'test_files')
class SpacegroupTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR'))
self.structure = p.struct
self.sg1 = SymmetryFinder(self.structure, 0.001).get_spacegroup()
self.sg2 = Spacegroup.from_spacegroup_number(62)
def test_are_symmetrically_equivalent(self):
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [2,3]]
self.assertTrue(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertTrue(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [0,2]]
self.assertFalse(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertFalse(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
<commit_before><commit_msg>Add a unittest for spacegroup. Still very basic.<commit_after>#!/usr/bin/env python
'''
Created on Mar 12, 2012
'''
from __future__ import division
__author__="Shyue Ping Ong"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "0.1"
__maintainer__ = "Shyue Ping Ong"
__email__ = "shyue@mit.edu"
__date__ = "Mar 12, 2012"
import unittest
import os
from pymatgen.core.structure import PeriodicSite
from pymatgen.symmetry.spacegroup import Spacegroup
from pymatgen.io.vaspio import Poscar
from pymatgen.symmetry.spglib_adaptor import SymmetryFinder
import pymatgen
test_dir = os.path.join(os.path.dirname(os.path.abspath(pymatgen.__file__)), '..', 'test_files')
class SpacegroupTest(unittest.TestCase):
def setUp(self):
p = Poscar.from_file(os.path.join(test_dir, 'POSCAR'))
self.structure = p.struct
self.sg1 = SymmetryFinder(self.structure, 0.001).get_spacegroup()
self.sg2 = Spacegroup.from_spacegroup_number(62)
def test_are_symmetrically_equivalent(self):
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [2,3]]
self.assertTrue(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertTrue(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
sites1 = [self.structure[i] for i in [0,1]]
sites2 = [self.structure[i] for i in [0,2]]
self.assertFalse(self.sg1.are_symmetrically_equivalent(sites1, sites2, 1e-3))
self.assertFalse(self.sg2.are_symmetrically_equivalent(sites1, sites2, 1e-3))
if __name__ == "__main__":
#import sys;sys.argv = ['', 'Test.testName']
unittest.main()
|
|
e3fdb634be82394703db6d2cc0bf1091646197d8
|
skbeam/core/tests/test_smoothing.py
|
skbeam/core/tests/test_smoothing.py
|
# ######################################################################
# Copyright (c) 2014, Brookhaven Science Associates, Brookhaven #
# National Laboratory. All rights reserved. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions #
# are met: #
# #
# * Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# #
# * Redistributions in binary form must reproduce the above copyright #
# notice this list of conditions and the following disclaimer in #
# the documentation and/or other materials provided with the #
# distribution. #
# #
# * Neither the name of the Brookhaven Science Associates, Brookhaven #
# National Laboratory nor the names of its contributors may be used #
# to endorse or promote products derived from this software without #
# specific prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS #
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS #
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE #
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, #
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES #
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR #
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) #
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, #
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OTHERWISE) ARISING #
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
########################################################################
import numpy as np
from numpy.testing import assert_array_almost_equal
from scipy.signal import savgol_filter
from skbeam.core.smoothing import sgolay2d
def test_SG():
x = np.array([2, 2, 2, 5, 2, 1, 0, 1, 4, 9, 2, 2, 2])
x2d = np.vstack([x]*2)
smooth_sp = savgol_filter(x, 3, 1)
smooth_sk = sgolay2d(x2d, 3, 1)
assert_array_almost_equal(smooth_sp, smooth_sk)
|
Add test of smoothing module
|
Add test of smoothing module
|
Python
|
bsd-3-clause
|
tacaswell/scikit-beam,Nikea/scikit-xray,tacaswell/scikit-xray,tacaswell/scikit-beam,scikit-xray/scikit-xray,scikit-xray/scikit-xray,tacaswell/scikit-xray,Nikea/scikit-xray,tacaswell/scikit-beam,scikit-xray/scikit-xray,Nikea/scikit-xray,tacaswell/scikit-xray
|
Add test of smoothing module
|
# ######################################################################
# Copyright (c) 2014, Brookhaven Science Associates, Brookhaven #
# National Laboratory. All rights reserved. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions #
# are met: #
# #
# * Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# #
# * Redistributions in binary form must reproduce the above copyright #
# notice this list of conditions and the following disclaimer in #
# the documentation and/or other materials provided with the #
# distribution. #
# #
# * Neither the name of the Brookhaven Science Associates, Brookhaven #
# National Laboratory nor the names of its contributors may be used #
# to endorse or promote products derived from this software without #
# specific prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS #
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS #
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE #
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, #
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES #
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR #
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) #
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, #
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OTHERWISE) ARISING #
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
########################################################################
import numpy as np
from numpy.testing import assert_array_almost_equal
from scipy.signal import savgol_filter
from skbeam.core.smoothing import sgolay2d
def test_SG():
x = np.array([2, 2, 2, 5, 2, 1, 0, 1, 4, 9, 2, 2, 2])
x2d = np.vstack([x]*2)
smooth_sp = savgol_filter(x, 3, 1)
smooth_sk = sgolay2d(x2d, 3, 1)
assert_array_almost_equal(smooth_sp, smooth_sk)
|
<commit_before><commit_msg>Add test of smoothing module<commit_after>
|
# ######################################################################
# Copyright (c) 2014, Brookhaven Science Associates, Brookhaven #
# National Laboratory. All rights reserved. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions #
# are met: #
# #
# * Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# #
# * Redistributions in binary form must reproduce the above copyright #
# notice this list of conditions and the following disclaimer in #
# the documentation and/or other materials provided with the #
# distribution. #
# #
# * Neither the name of the Brookhaven Science Associates, Brookhaven #
# National Laboratory nor the names of its contributors may be used #
# to endorse or promote products derived from this software without #
# specific prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS #
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS #
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE #
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, #
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES #
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR #
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) #
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, #
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OTHERWISE) ARISING #
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
########################################################################
import numpy as np
from numpy.testing import assert_array_almost_equal
from scipy.signal import savgol_filter
from skbeam.core.smoothing import sgolay2d
def test_SG():
x = np.array([2, 2, 2, 5, 2, 1, 0, 1, 4, 9, 2, 2, 2])
x2d = np.vstack([x]*2)
smooth_sp = savgol_filter(x, 3, 1)
smooth_sk = sgolay2d(x2d, 3, 1)
assert_array_almost_equal(smooth_sp, smooth_sk)
|
Add test of smoothing module# ######################################################################
# Copyright (c) 2014, Brookhaven Science Associates, Brookhaven #
# National Laboratory. All rights reserved. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions #
# are met: #
# #
# * Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# #
# * Redistributions in binary form must reproduce the above copyright #
# notice this list of conditions and the following disclaimer in #
# the documentation and/or other materials provided with the #
# distribution. #
# #
# * Neither the name of the Brookhaven Science Associates, Brookhaven #
# National Laboratory nor the names of its contributors may be used #
# to endorse or promote products derived from this software without #
# specific prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS #
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS #
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE #
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, #
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES #
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR #
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) #
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, #
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OTHERWISE) ARISING #
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
########################################################################
import numpy as np
from numpy.testing import assert_array_almost_equal
from scipy.signal import savgol_filter
from skbeam.core.smoothing import sgolay2d
def test_SG():
x = np.array([2, 2, 2, 5, 2, 1, 0, 1, 4, 9, 2, 2, 2])
x2d = np.vstack([x]*2)
smooth_sp = savgol_filter(x, 3, 1)
smooth_sk = sgolay2d(x2d, 3, 1)
assert_array_almost_equal(smooth_sp, smooth_sk)
|
<commit_before><commit_msg>Add test of smoothing module<commit_after># ######################################################################
# Copyright (c) 2014, Brookhaven Science Associates, Brookhaven #
# National Laboratory. All rights reserved. #
# #
# Redistribution and use in source and binary forms, with or without #
# modification, are permitted provided that the following conditions #
# are met: #
# #
# * Redistributions of source code must retain the above copyright #
# notice, this list of conditions and the following disclaimer. #
# #
# * Redistributions in binary form must reproduce the above copyright #
# notice this list of conditions and the following disclaimer in #
# the documentation and/or other materials provided with the #
# distribution. #
# #
# * Neither the name of the Brookhaven Science Associates, Brookhaven #
# National Laboratory nor the names of its contributors may be used #
# to endorse or promote products derived from this software without #
# specific prior written permission. #
# #
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS #
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT #
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS #
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE #
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, #
# INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES #
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR #
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) #
# HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, #
# STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OTHERWISE) ARISING #
# IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE #
# POSSIBILITY OF SUCH DAMAGE. #
########################################################################
import numpy as np
from numpy.testing import assert_array_almost_equal
from scipy.signal import savgol_filter
from skbeam.core.smoothing import sgolay2d
def test_SG():
x = np.array([2, 2, 2, 5, 2, 1, 0, 1, 4, 9, 2, 2, 2])
x2d = np.vstack([x]*2)
smooth_sp = savgol_filter(x, 3, 1)
smooth_sk = sgolay2d(x2d, 3, 1)
assert_array_almost_equal(smooth_sp, smooth_sk)
|
|
497f5085143322d4b9d3ad23d35d30cdf852d1f6
|
test/unit/sorting/test_heap_sort.py
|
test/unit/sorting/test_heap_sort.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import unittest
from helper.read_data_file import read_int_array
from sorting.heap_sort import sort
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
class InsertionSortTester(unittest.TestCase):
# Test sort in default order, i.e., in ascending order.
def test_sort_default(self):
array = read_int_array(os.path.join(BASE_DIR, 'data1.data'))
array = sort(array)
expect = [65, 76, 86, 113, 140, 417, 444, 445, 567, 589, 637, 647, 702, 864, 969]
self.assertEqual(expect, array)
# Test sort in ascending order.
def test_sort_ascending(self):
array = read_int_array(os.path.join(BASE_DIR, 'data1.data'))
array = sort(array, 'asc')
expect = [65, 76, 86, 113, 140, 417, 444, 445, 567, 589, 637, 647, 702, 864, 969]
self.assertEqual(expect, array)
# Test sort in descending order.
def test_sort_descending(self):
array = read_int_array(os.path.join(BASE_DIR, 'data1.data'))
array = sort(array, 'desc')
expect = [969, 864, 702, 647, 637, 589, 567, 445, 444, 417, 140, 113, 86, 76, 65]
self.assertEqual(expect, array)
if __name__ == '__main__':
unittest.main()
|
Add unit test for heap sort implementation.
|
Add unit test for heap sort implementation.
|
Python
|
mit
|
weichen2046/algorithm-study,weichen2046/algorithm-study
|
Add unit test for heap sort implementation.
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import unittest
from helper.read_data_file import read_int_array
from sorting.heap_sort import sort
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
class InsertionSortTester(unittest.TestCase):
# Test sort in default order, i.e., in ascending order.
def test_sort_default(self):
array = read_int_array(os.path.join(BASE_DIR, 'data1.data'))
array = sort(array)
expect = [65, 76, 86, 113, 140, 417, 444, 445, 567, 589, 637, 647, 702, 864, 969]
self.assertEqual(expect, array)
# Test sort in ascending order.
def test_sort_ascending(self):
array = read_int_array(os.path.join(BASE_DIR, 'data1.data'))
array = sort(array, 'asc')
expect = [65, 76, 86, 113, 140, 417, 444, 445, 567, 589, 637, 647, 702, 864, 969]
self.assertEqual(expect, array)
# Test sort in descending order.
def test_sort_descending(self):
array = read_int_array(os.path.join(BASE_DIR, 'data1.data'))
array = sort(array, 'desc')
expect = [969, 864, 702, 647, 637, 589, 567, 445, 444, 417, 140, 113, 86, 76, 65]
self.assertEqual(expect, array)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add unit test for heap sort implementation.<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import unittest
from helper.read_data_file import read_int_array
from sorting.heap_sort import sort
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
class InsertionSortTester(unittest.TestCase):
# Test sort in default order, i.e., in ascending order.
def test_sort_default(self):
array = read_int_array(os.path.join(BASE_DIR, 'data1.data'))
array = sort(array)
expect = [65, 76, 86, 113, 140, 417, 444, 445, 567, 589, 637, 647, 702, 864, 969]
self.assertEqual(expect, array)
# Test sort in ascending order.
def test_sort_ascending(self):
array = read_int_array(os.path.join(BASE_DIR, 'data1.data'))
array = sort(array, 'asc')
expect = [65, 76, 86, 113, 140, 417, 444, 445, 567, 589, 637, 647, 702, 864, 969]
self.assertEqual(expect, array)
# Test sort in descending order.
def test_sort_descending(self):
array = read_int_array(os.path.join(BASE_DIR, 'data1.data'))
array = sort(array, 'desc')
expect = [969, 864, 702, 647, 637, 589, 567, 445, 444, 417, 140, 113, 86, 76, 65]
self.assertEqual(expect, array)
if __name__ == '__main__':
unittest.main()
|
Add unit test for heap sort implementation.#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import unittest
from helper.read_data_file import read_int_array
from sorting.heap_sort import sort
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
class InsertionSortTester(unittest.TestCase):
# Test sort in default order, i.e., in ascending order.
def test_sort_default(self):
array = read_int_array(os.path.join(BASE_DIR, 'data1.data'))
array = sort(array)
expect = [65, 76, 86, 113, 140, 417, 444, 445, 567, 589, 637, 647, 702, 864, 969]
self.assertEqual(expect, array)
# Test sort in ascending order.
def test_sort_ascending(self):
array = read_int_array(os.path.join(BASE_DIR, 'data1.data'))
array = sort(array, 'asc')
expect = [65, 76, 86, 113, 140, 417, 444, 445, 567, 589, 637, 647, 702, 864, 969]
self.assertEqual(expect, array)
# Test sort in descending order.
def test_sort_descending(self):
array = read_int_array(os.path.join(BASE_DIR, 'data1.data'))
array = sort(array, 'desc')
expect = [969, 864, 702, 647, 637, 589, 567, 445, 444, 417, 140, 113, 86, 76, 65]
self.assertEqual(expect, array)
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add unit test for heap sort implementation.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
import unittest
from helper.read_data_file import read_int_array
from sorting.heap_sort import sort
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
class InsertionSortTester(unittest.TestCase):
# Test sort in default order, i.e., in ascending order.
def test_sort_default(self):
array = read_int_array(os.path.join(BASE_DIR, 'data1.data'))
array = sort(array)
expect = [65, 76, 86, 113, 140, 417, 444, 445, 567, 589, 637, 647, 702, 864, 969]
self.assertEqual(expect, array)
# Test sort in ascending order.
def test_sort_ascending(self):
array = read_int_array(os.path.join(BASE_DIR, 'data1.data'))
array = sort(array, 'asc')
expect = [65, 76, 86, 113, 140, 417, 444, 445, 567, 589, 637, 647, 702, 864, 969]
self.assertEqual(expect, array)
# Test sort in descending order.
def test_sort_descending(self):
array = read_int_array(os.path.join(BASE_DIR, 'data1.data'))
array = sort(array, 'desc')
expect = [969, 864, 702, 647, 637, 589, 567, 445, 444, 417, 140, 113, 86, 76, 65]
self.assertEqual(expect, array)
if __name__ == '__main__':
unittest.main()
|
|
3d52040663eaef07aa3e9d13500819b4633f1187
|
typhon/tests/spareice/test_array.py
|
typhon/tests/spareice/test_array.py
|
import numpy as np
from typhon.spareice import Array, GroupedArrays
import xarray as xr
class TestArray:
"""Testing the array methods."""
pass
class TestGroupedArrays:
"""Testing the GroupedArrays methods."""
def test_dict(self):
# TODO: Implement test for export to / import from python dictionary
# TODO: objects
pass
def test_equal(self):
a = GroupedArrays()
a["group1/group2/data"] = np.arange(1, 100).astype(float)
a["group1/data"] = np.arange(1, 10).astype(float)
a["data"] = np.arange(-30, 10).astype(float)
# Check whether it does find equality:
assert a == a
# Check whether it does find inequality because the variables have
# different lengths:
assert a != a[:5]
# Check whether it does find inequality because a variable's content is
# different but has the same length:
b = a.copy()
b["data"] = np.arange(-20, 20).astype(float)
assert a != b
# Check whether it does find inequality (if one variable does not exist
# in the other group):
del b["data"]
assert a != b
def test_xarray(self):
# TODO: Implement test for export to / import from xarray.Dataset
pass
|
Implement test class for Array and GroupedArrays
|
Implement test class for Array and GroupedArrays
|
Python
|
mit
|
atmtools/typhon,atmtools/typhon
|
Implement test class for Array and GroupedArrays
|
import numpy as np
from typhon.spareice import Array, GroupedArrays
import xarray as xr
class TestArray:
"""Testing the array methods."""
pass
class TestGroupedArrays:
"""Testing the GroupedArrays methods."""
def test_dict(self):
# TODO: Implement test for export to / import from python dictionary
# TODO: objects
pass
def test_equal(self):
a = GroupedArrays()
a["group1/group2/data"] = np.arange(1, 100).astype(float)
a["group1/data"] = np.arange(1, 10).astype(float)
a["data"] = np.arange(-30, 10).astype(float)
# Check whether it does find equality:
assert a == a
# Check whether it does find inequality because the variables have
# different lengths:
assert a != a[:5]
# Check whether it does find inequality because a variable's content is
# different but has the same length:
b = a.copy()
b["data"] = np.arange(-20, 20).astype(float)
assert a != b
# Check whether it does find inequality (if one variable does not exist
# in the other group):
del b["data"]
assert a != b
def test_xarray(self):
# TODO: Implement test for export to / import from xarray.Dataset
pass
|
<commit_before><commit_msg>Implement test class for Array and GroupedArrays<commit_after>
|
import numpy as np
from typhon.spareice import Array, GroupedArrays
import xarray as xr
class TestArray:
"""Testing the array methods."""
pass
class TestGroupedArrays:
"""Testing the GroupedArrays methods."""
def test_dict(self):
# TODO: Implement test for export to / import from python dictionary
# TODO: objects
pass
def test_equal(self):
a = GroupedArrays()
a["group1/group2/data"] = np.arange(1, 100).astype(float)
a["group1/data"] = np.arange(1, 10).astype(float)
a["data"] = np.arange(-30, 10).astype(float)
# Check whether it does find equality:
assert a == a
# Check whether it does find inequality because the variables have
# different lengths:
assert a != a[:5]
# Check whether it does find inequality because a variable's content is
# different but has the same length:
b = a.copy()
b["data"] = np.arange(-20, 20).astype(float)
assert a != b
# Check whether it does find inequality (if one variable does not exist
# in the other group):
del b["data"]
assert a != b
def test_xarray(self):
# TODO: Implement test for export to / import from xarray.Dataset
pass
|
Implement test class for Array and GroupedArraysimport numpy as np
from typhon.spareice import Array, GroupedArrays
import xarray as xr
class TestArray:
"""Testing the array methods."""
pass
class TestGroupedArrays:
"""Testing the GroupedArrays methods."""
def test_dict(self):
# TODO: Implement test for export to / import from python dictionary
# TODO: objects
pass
def test_equal(self):
a = GroupedArrays()
a["group1/group2/data"] = np.arange(1, 100).astype(float)
a["group1/data"] = np.arange(1, 10).astype(float)
a["data"] = np.arange(-30, 10).astype(float)
# Check whether it does find equality:
assert a == a
# Check whether it does find inequality because the variables have
# different lengths:
assert a != a[:5]
# Check whether it does find inequality because a variable's content is
# different but has the same length:
b = a.copy()
b["data"] = np.arange(-20, 20).astype(float)
assert a != b
# Check whether it does find inequality (if one variable does not exist
# in the other group):
del b["data"]
assert a != b
def test_xarray(self):
# TODO: Implement test for export to / import from xarray.Dataset
pass
|
<commit_before><commit_msg>Implement test class for Array and GroupedArrays<commit_after>import numpy as np
from typhon.spareice import Array, GroupedArrays
import xarray as xr
class TestArray:
"""Testing the array methods."""
pass
class TestGroupedArrays:
"""Testing the GroupedArrays methods."""
def test_dict(self):
# TODO: Implement test for export to / import from python dictionary
# TODO: objects
pass
def test_equal(self):
a = GroupedArrays()
a["group1/group2/data"] = np.arange(1, 100).astype(float)
a["group1/data"] = np.arange(1, 10).astype(float)
a["data"] = np.arange(-30, 10).astype(float)
# Check whether it does find equality:
assert a == a
# Check whether it does find inequality because the variables have
# different lengths:
assert a != a[:5]
# Check whether it does find inequality because a variable's content is
# different but has the same length:
b = a.copy()
b["data"] = np.arange(-20, 20).astype(float)
assert a != b
# Check whether it does find inequality (if one variable does not exist
# in the other group):
del b["data"]
assert a != b
def test_xarray(self):
# TODO: Implement test for export to / import from xarray.Dataset
pass
|
|
3c3259024310e3ed28bc71c8bb6fa60d608a049c
|
spacy/tests/doc/test_morphanalysis.py
|
spacy/tests/doc/test_morphanalysis.py
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
import numpy
from spacy.attrs import IS_ALPHA, IS_DIGIT, IS_LOWER, IS_PUNCT, IS_TITLE, IS_STOP
from spacy.symbols import VERB
from spacy.vocab import Vocab
from spacy.tokens import Doc
@pytest.fixture
def i_has(en_tokenizer):
doc = en_tokenizer("I has")
doc[0].tag_ = "PRP"
doc[1].tag_ = "VBZ"
return doc
def test_token_morph_id(i_has):
assert i_has[0].morph.id
assert i_has[1].morph.id != 0
assert i_has[0].morph.id != i_has[1].morph.id
def test_morph_props(i_has):
assert i_has[0].morph.pron_type == i_has.vocab.strings["PronType_prs"]
assert i_has[1].morph.pron_type == 0
def test_morph_iter(i_has):
assert list(i_has[0].morph) == ["PronType_prs"]
assert list(i_has[1].morph) == ["Number_sing", "Person_three", "VerbForm_fin"]
|
Add test for morph analysis
|
Add test for morph analysis
|
Python
|
mit
|
spacy-io/spaCy,explosion/spaCy,honnibal/spaCy,honnibal/spaCy,explosion/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,explosion/spaCy,spacy-io/spaCy,honnibal/spaCy,explosion/spaCy,spacy-io/spaCy
|
Add test for morph analysis
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
import numpy
from spacy.attrs import IS_ALPHA, IS_DIGIT, IS_LOWER, IS_PUNCT, IS_TITLE, IS_STOP
from spacy.symbols import VERB
from spacy.vocab import Vocab
from spacy.tokens import Doc
@pytest.fixture
def i_has(en_tokenizer):
doc = en_tokenizer("I has")
doc[0].tag_ = "PRP"
doc[1].tag_ = "VBZ"
return doc
def test_token_morph_id(i_has):
assert i_has[0].morph.id
assert i_has[1].morph.id != 0
assert i_has[0].morph.id != i_has[1].morph.id
def test_morph_props(i_has):
assert i_has[0].morph.pron_type == i_has.vocab.strings["PronType_prs"]
assert i_has[1].morph.pron_type == 0
def test_morph_iter(i_has):
assert list(i_has[0].morph) == ["PronType_prs"]
assert list(i_has[1].morph) == ["Number_sing", "Person_three", "VerbForm_fin"]
|
<commit_before><commit_msg>Add test for morph analysis<commit_after>
|
# coding: utf-8
from __future__ import unicode_literals
import pytest
import numpy
from spacy.attrs import IS_ALPHA, IS_DIGIT, IS_LOWER, IS_PUNCT, IS_TITLE, IS_STOP
from spacy.symbols import VERB
from spacy.vocab import Vocab
from spacy.tokens import Doc
@pytest.fixture
def i_has(en_tokenizer):
doc = en_tokenizer("I has")
doc[0].tag_ = "PRP"
doc[1].tag_ = "VBZ"
return doc
def test_token_morph_id(i_has):
assert i_has[0].morph.id
assert i_has[1].morph.id != 0
assert i_has[0].morph.id != i_has[1].morph.id
def test_morph_props(i_has):
assert i_has[0].morph.pron_type == i_has.vocab.strings["PronType_prs"]
assert i_has[1].morph.pron_type == 0
def test_morph_iter(i_has):
assert list(i_has[0].morph) == ["PronType_prs"]
assert list(i_has[1].morph) == ["Number_sing", "Person_three", "VerbForm_fin"]
|
Add test for morph analysis# coding: utf-8
from __future__ import unicode_literals
import pytest
import numpy
from spacy.attrs import IS_ALPHA, IS_DIGIT, IS_LOWER, IS_PUNCT, IS_TITLE, IS_STOP
from spacy.symbols import VERB
from spacy.vocab import Vocab
from spacy.tokens import Doc
@pytest.fixture
def i_has(en_tokenizer):
doc = en_tokenizer("I has")
doc[0].tag_ = "PRP"
doc[1].tag_ = "VBZ"
return doc
def test_token_morph_id(i_has):
assert i_has[0].morph.id
assert i_has[1].morph.id != 0
assert i_has[0].morph.id != i_has[1].morph.id
def test_morph_props(i_has):
assert i_has[0].morph.pron_type == i_has.vocab.strings["PronType_prs"]
assert i_has[1].morph.pron_type == 0
def test_morph_iter(i_has):
assert list(i_has[0].morph) == ["PronType_prs"]
assert list(i_has[1].morph) == ["Number_sing", "Person_three", "VerbForm_fin"]
|
<commit_before><commit_msg>Add test for morph analysis<commit_after># coding: utf-8
from __future__ import unicode_literals
import pytest
import numpy
from spacy.attrs import IS_ALPHA, IS_DIGIT, IS_LOWER, IS_PUNCT, IS_TITLE, IS_STOP
from spacy.symbols import VERB
from spacy.vocab import Vocab
from spacy.tokens import Doc
@pytest.fixture
def i_has(en_tokenizer):
doc = en_tokenizer("I has")
doc[0].tag_ = "PRP"
doc[1].tag_ = "VBZ"
return doc
def test_token_morph_id(i_has):
assert i_has[0].morph.id
assert i_has[1].morph.id != 0
assert i_has[0].morph.id != i_has[1].morph.id
def test_morph_props(i_has):
assert i_has[0].morph.pron_type == i_has.vocab.strings["PronType_prs"]
assert i_has[1].morph.pron_type == 0
def test_morph_iter(i_has):
assert list(i_has[0].morph) == ["PronType_prs"]
assert list(i_has[1].morph) == ["Number_sing", "Person_three", "VerbForm_fin"]
|
|
39c28f76540da294d9dd4adf4f84ae266922498d
|
test/test_clientagent.py
|
test/test_clientagent.py
|
#!/usr/bin/env python2
import unittest
from socket import *
from common import *
from testdc import *
CONFIG = """\
messagedirector:
bind: 127.0.0.1:57123
general:
dc_files:
- %r
roles:
- type: clientagent
bind: 127.0.0.1:57128
version: "Sword Art Online v5.1"
""" % test_dc
VERSION = 'Sword Art Online v5.1'
class TestClientAgent(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.daemon = Daemon(CONFIG)
cls.daemon.start()
s = socket(AF_INET, SOCK_STREAM)
s.connect(('127.0.0.1', 57123))
cls.server = MDConnection(s)
@classmethod
def tearDownClass(cls):
cls.server.close()
cls.daemon.stop()
def assertDisconnect(self, s, reason_code):
while True:
dg = s.recv()
dgi = DatagramIterator(dg)
if dgi.read_uint16() == CLIENT_GO_GET_LOST:
self.assertEqual(dgi.read_uint16(), reason_code)
s.close()
return
def connect(self):
s = socket(AF_INET, SOCK_STREAM)
s.connect(('127.0.0.1', 57128))
client = ClientConnection(s)
return client
def test_hello(self):
# First, see if the CA ensures that the first datagram is a HELLO.
client = self.connect()
dg = Datagram()
dg.add_uint16(5) # invalid msgtype
client.send(dg)
self.assertDisconnect(client, CLIENT_DISCONNECT_NO_HELLO)
# Next, see if the version gets validated:
client = self.connect()
dg = Datagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(DC_HASH)
dg.add_string('Equestria Online v5.7')
client.send(dg)
self.assertDisconnect(client, CLIENT_DISCONNECT_BAD_VERSION)
# Now dchash validation:
client = self.connect()
dg = Datagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(0x12345678)
dg.add_string(VERSION)
client.send(dg)
self.assertDisconnect(client, CLIENT_DISCONNECT_BAD_DCHASH)
# If everything is correct, it should simply allow us in:
client = self.connect()
dg = Datagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(DC_HASH)
dg.add_string(VERSION)
client.send(dg)
dg = Datagram()
dg.add_uint16(CLIENT_HELLO_RESP)
self.assertTrue(client.expect(dg))
client.close()
if __name__ == '__main__':
unittest.main()
|
Create clientagent unittest file, with test for CLIENT_HELLO behavior.
|
tests: Create clientagent unittest file, with test for CLIENT_HELLO behavior.
|
Python
|
bsd-3-clause
|
pizcogirl/Astron,blindsighttf2/Astron,ketoo/Astron,pizcogirl/Astron,ketoo/Astron,blindsighttf2/Astron,blindsighttf2/Astron,ketoo/Astron,blindsighttf2/Astron,pizcogirl/Astron,ketoo/Astron,pizcogirl/Astron
|
tests: Create clientagent unittest file, with test for CLIENT_HELLO behavior.
|
#!/usr/bin/env python2
import unittest
from socket import *
from common import *
from testdc import *
CONFIG = """\
messagedirector:
bind: 127.0.0.1:57123
general:
dc_files:
- %r
roles:
- type: clientagent
bind: 127.0.0.1:57128
version: "Sword Art Online v5.1"
""" % test_dc
VERSION = 'Sword Art Online v5.1'
class TestClientAgent(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.daemon = Daemon(CONFIG)
cls.daemon.start()
s = socket(AF_INET, SOCK_STREAM)
s.connect(('127.0.0.1', 57123))
cls.server = MDConnection(s)
@classmethod
def tearDownClass(cls):
cls.server.close()
cls.daemon.stop()
def assertDisconnect(self, s, reason_code):
while True:
dg = s.recv()
dgi = DatagramIterator(dg)
if dgi.read_uint16() == CLIENT_GO_GET_LOST:
self.assertEqual(dgi.read_uint16(), reason_code)
s.close()
return
def connect(self):
s = socket(AF_INET, SOCK_STREAM)
s.connect(('127.0.0.1', 57128))
client = ClientConnection(s)
return client
def test_hello(self):
# First, see if the CA ensures that the first datagram is a HELLO.
client = self.connect()
dg = Datagram()
dg.add_uint16(5) # invalid msgtype
client.send(dg)
self.assertDisconnect(client, CLIENT_DISCONNECT_NO_HELLO)
# Next, see if the version gets validated:
client = self.connect()
dg = Datagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(DC_HASH)
dg.add_string('Equestria Online v5.7')
client.send(dg)
self.assertDisconnect(client, CLIENT_DISCONNECT_BAD_VERSION)
# Now dchash validation:
client = self.connect()
dg = Datagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(0x12345678)
dg.add_string(VERSION)
client.send(dg)
self.assertDisconnect(client, CLIENT_DISCONNECT_BAD_DCHASH)
# If everything is correct, it should simply allow us in:
client = self.connect()
dg = Datagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(DC_HASH)
dg.add_string(VERSION)
client.send(dg)
dg = Datagram()
dg.add_uint16(CLIENT_HELLO_RESP)
self.assertTrue(client.expect(dg))
client.close()
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>tests: Create clientagent unittest file, with test for CLIENT_HELLO behavior.<commit_after>
|
#!/usr/bin/env python2
import unittest
from socket import *
from common import *
from testdc import *
CONFIG = """\
messagedirector:
bind: 127.0.0.1:57123
general:
dc_files:
- %r
roles:
- type: clientagent
bind: 127.0.0.1:57128
version: "Sword Art Online v5.1"
""" % test_dc
VERSION = 'Sword Art Online v5.1'
class TestClientAgent(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.daemon = Daemon(CONFIG)
cls.daemon.start()
s = socket(AF_INET, SOCK_STREAM)
s.connect(('127.0.0.1', 57123))
cls.server = MDConnection(s)
@classmethod
def tearDownClass(cls):
cls.server.close()
cls.daemon.stop()
def assertDisconnect(self, s, reason_code):
while True:
dg = s.recv()
dgi = DatagramIterator(dg)
if dgi.read_uint16() == CLIENT_GO_GET_LOST:
self.assertEqual(dgi.read_uint16(), reason_code)
s.close()
return
def connect(self):
s = socket(AF_INET, SOCK_STREAM)
s.connect(('127.0.0.1', 57128))
client = ClientConnection(s)
return client
def test_hello(self):
# First, see if the CA ensures that the first datagram is a HELLO.
client = self.connect()
dg = Datagram()
dg.add_uint16(5) # invalid msgtype
client.send(dg)
self.assertDisconnect(client, CLIENT_DISCONNECT_NO_HELLO)
# Next, see if the version gets validated:
client = self.connect()
dg = Datagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(DC_HASH)
dg.add_string('Equestria Online v5.7')
client.send(dg)
self.assertDisconnect(client, CLIENT_DISCONNECT_BAD_VERSION)
# Now dchash validation:
client = self.connect()
dg = Datagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(0x12345678)
dg.add_string(VERSION)
client.send(dg)
self.assertDisconnect(client, CLIENT_DISCONNECT_BAD_DCHASH)
# If everything is correct, it should simply allow us in:
client = self.connect()
dg = Datagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(DC_HASH)
dg.add_string(VERSION)
client.send(dg)
dg = Datagram()
dg.add_uint16(CLIENT_HELLO_RESP)
self.assertTrue(client.expect(dg))
client.close()
if __name__ == '__main__':
unittest.main()
|
tests: Create clientagent unittest file, with test for CLIENT_HELLO behavior.#!/usr/bin/env python2
import unittest
from socket import *
from common import *
from testdc import *
CONFIG = """\
messagedirector:
bind: 127.0.0.1:57123
general:
dc_files:
- %r
roles:
- type: clientagent
bind: 127.0.0.1:57128
version: "Sword Art Online v5.1"
""" % test_dc
VERSION = 'Sword Art Online v5.1'
class TestClientAgent(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.daemon = Daemon(CONFIG)
cls.daemon.start()
s = socket(AF_INET, SOCK_STREAM)
s.connect(('127.0.0.1', 57123))
cls.server = MDConnection(s)
@classmethod
def tearDownClass(cls):
cls.server.close()
cls.daemon.stop()
def assertDisconnect(self, s, reason_code):
while True:
dg = s.recv()
dgi = DatagramIterator(dg)
if dgi.read_uint16() == CLIENT_GO_GET_LOST:
self.assertEqual(dgi.read_uint16(), reason_code)
s.close()
return
def connect(self):
s = socket(AF_INET, SOCK_STREAM)
s.connect(('127.0.0.1', 57128))
client = ClientConnection(s)
return client
def test_hello(self):
# First, see if the CA ensures that the first datagram is a HELLO.
client = self.connect()
dg = Datagram()
dg.add_uint16(5) # invalid msgtype
client.send(dg)
self.assertDisconnect(client, CLIENT_DISCONNECT_NO_HELLO)
# Next, see if the version gets validated:
client = self.connect()
dg = Datagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(DC_HASH)
dg.add_string('Equestria Online v5.7')
client.send(dg)
self.assertDisconnect(client, CLIENT_DISCONNECT_BAD_VERSION)
# Now dchash validation:
client = self.connect()
dg = Datagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(0x12345678)
dg.add_string(VERSION)
client.send(dg)
self.assertDisconnect(client, CLIENT_DISCONNECT_BAD_DCHASH)
# If everything is correct, it should simply allow us in:
client = self.connect()
dg = Datagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(DC_HASH)
dg.add_string(VERSION)
client.send(dg)
dg = Datagram()
dg.add_uint16(CLIENT_HELLO_RESP)
self.assertTrue(client.expect(dg))
client.close()
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>tests: Create clientagent unittest file, with test for CLIENT_HELLO behavior.<commit_after>#!/usr/bin/env python2
import unittest
from socket import *
from common import *
from testdc import *
CONFIG = """\
messagedirector:
bind: 127.0.0.1:57123
general:
dc_files:
- %r
roles:
- type: clientagent
bind: 127.0.0.1:57128
version: "Sword Art Online v5.1"
""" % test_dc
VERSION = 'Sword Art Online v5.1'
class TestClientAgent(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.daemon = Daemon(CONFIG)
cls.daemon.start()
s = socket(AF_INET, SOCK_STREAM)
s.connect(('127.0.0.1', 57123))
cls.server = MDConnection(s)
@classmethod
def tearDownClass(cls):
cls.server.close()
cls.daemon.stop()
def assertDisconnect(self, s, reason_code):
while True:
dg = s.recv()
dgi = DatagramIterator(dg)
if dgi.read_uint16() == CLIENT_GO_GET_LOST:
self.assertEqual(dgi.read_uint16(), reason_code)
s.close()
return
def connect(self):
s = socket(AF_INET, SOCK_STREAM)
s.connect(('127.0.0.1', 57128))
client = ClientConnection(s)
return client
def test_hello(self):
# First, see if the CA ensures that the first datagram is a HELLO.
client = self.connect()
dg = Datagram()
dg.add_uint16(5) # invalid msgtype
client.send(dg)
self.assertDisconnect(client, CLIENT_DISCONNECT_NO_HELLO)
# Next, see if the version gets validated:
client = self.connect()
dg = Datagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(DC_HASH)
dg.add_string('Equestria Online v5.7')
client.send(dg)
self.assertDisconnect(client, CLIENT_DISCONNECT_BAD_VERSION)
# Now dchash validation:
client = self.connect()
dg = Datagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(0x12345678)
dg.add_string(VERSION)
client.send(dg)
self.assertDisconnect(client, CLIENT_DISCONNECT_BAD_DCHASH)
# If everything is correct, it should simply allow us in:
client = self.connect()
dg = Datagram()
dg.add_uint16(CLIENT_HELLO)
dg.add_uint32(DC_HASH)
dg.add_string(VERSION)
client.send(dg)
dg = Datagram()
dg.add_uint16(CLIENT_HELLO_RESP)
self.assertTrue(client.expect(dg))
client.close()
if __name__ == '__main__':
unittest.main()
|
|
b5d0d935f21c76166cd767b81a08a59cce04f9cb
|
livereload/__init__.py
|
livereload/__init__.py
|
"""django-livereload"""
__version__ = '0.1.dev'
__license__ = 'BSD License'
__author__ = 'Fantomas42'
__email__ = 'fantomas42@gmail.com'
__url__ = 'https://github.com/Fantomas42/django-livereload'
|
Add metas information for the package
|
Add metas information for the package
|
Python
|
bsd-3-clause
|
Fantomas42/django-livereload,kbussell/django-livereload
|
Add metas information for the package
|
"""django-livereload"""
__version__ = '0.1.dev'
__license__ = 'BSD License'
__author__ = 'Fantomas42'
__email__ = 'fantomas42@gmail.com'
__url__ = 'https://github.com/Fantomas42/django-livereload'
|
<commit_before><commit_msg>Add metas information for the package<commit_after>
|
"""django-livereload"""
__version__ = '0.1.dev'
__license__ = 'BSD License'
__author__ = 'Fantomas42'
__email__ = 'fantomas42@gmail.com'
__url__ = 'https://github.com/Fantomas42/django-livereload'
|
Add metas information for the package"""django-livereload"""
__version__ = '0.1.dev'
__license__ = 'BSD License'
__author__ = 'Fantomas42'
__email__ = 'fantomas42@gmail.com'
__url__ = 'https://github.com/Fantomas42/django-livereload'
|
<commit_before><commit_msg>Add metas information for the package<commit_after>"""django-livereload"""
__version__ = '0.1.dev'
__license__ = 'BSD License'
__author__ = 'Fantomas42'
__email__ = 'fantomas42@gmail.com'
__url__ = 'https://github.com/Fantomas42/django-livereload'
|
|
7fa9fb24262c5ced8d09a2de34fd412cc5aa3758
|
private/realclearpolitics-scraper/realclearpolitics/spiders/spider.py
|
private/realclearpolitics-scraper/realclearpolitics/spiders/spider.py
|
import scrapy
from realclearpolitics.items import TableItem
class RcpSpider(scrapy.Spider):
name = "realclearpoliticsSpider"
start_urls = []
def __init__(self, url, state_code):
self.url = url
self.state_code = state_code
def start_requests(self):
return [scrapy.FormRequest(self.url,
callback=self.parse)]
def parse(self, response):
table = response.css('.data').pop()
legend = table.css('tr')[0]
fieldNames = legend.css('th::text').extract()
nb_fields = len(fieldNames)
items = []
contentLines = table.css('tr')[1::]
for line in contentLines:
item = TableItem()
values = line.css('td::text, td span::text, td a::text').extract()
for i in range(nb_fields):
item[fieldNames[i]] = values[i]
item['locale'] = self.state_code
items.append(item)
return items
|
import scrapy
from realclearpolitics.items import TableItem
class RcpSpider(scrapy.Spider):
name = "realclearpoliticsSpider"
start_urls = []
columns = ['Poll','Date', 'Sample', 'Spread']
def __init__(self, url, state_code):
self.url = url
self.state_code = state_code
def start_requests(self):
return [scrapy.FormRequest(self.url,
callback=self.parse)]
def parse(self, response):
table = response.css('.data').pop()
legend = table.css('tr')[0]
fieldNames = legend.css('th::text').extract()
nb_fields = len(fieldNames)
items = []
contentLines = table.css('tr')[1::]
for line in contentLines:
item = TableItem()
item['field'] = {}
values = line.css('td::text, td span::text, td a::text').extract()
for i in range(nb_fields):
if fieldNames[i] in RcpSpider.columns:
item[fieldNames[i]] = values[i]
else:
item['field'][fieldNames[i]] = values[i]
item['locale'] = self.state_code
items.append(item)
return items
|
Put candidate score in field object
|
Put candidate score in field object
|
Python
|
mit
|
dpxxdp/berniemetrics,Rumel/berniemetrics,dpxxdp/berniemetrics,fpagnoux/berniemetrics,fpagnoux/berniemetrics,dpxxdp/berniemetrics,fpagnoux/berniemetrics,Rumel/berniemetrics,dpxxdp/berniemetrics,fpagnoux/berniemetrics,Rumel/berniemetrics,Rumel/berniemetrics
|
import scrapy
from realclearpolitics.items import TableItem
class RcpSpider(scrapy.Spider):
name = "realclearpoliticsSpider"
start_urls = []
def __init__(self, url, state_code):
self.url = url
self.state_code = state_code
def start_requests(self):
return [scrapy.FormRequest(self.url,
callback=self.parse)]
def parse(self, response):
table = response.css('.data').pop()
legend = table.css('tr')[0]
fieldNames = legend.css('th::text').extract()
nb_fields = len(fieldNames)
items = []
contentLines = table.css('tr')[1::]
for line in contentLines:
item = TableItem()
values = line.css('td::text, td span::text, td a::text').extract()
for i in range(nb_fields):
item[fieldNames[i]] = values[i]
item['locale'] = self.state_code
items.append(item)
return items
Put candidate score in field object
|
import scrapy
from realclearpolitics.items import TableItem
class RcpSpider(scrapy.Spider):
name = "realclearpoliticsSpider"
start_urls = []
columns = ['Poll','Date', 'Sample', 'Spread']
def __init__(self, url, state_code):
self.url = url
self.state_code = state_code
def start_requests(self):
return [scrapy.FormRequest(self.url,
callback=self.parse)]
def parse(self, response):
table = response.css('.data').pop()
legend = table.css('tr')[0]
fieldNames = legend.css('th::text').extract()
nb_fields = len(fieldNames)
items = []
contentLines = table.css('tr')[1::]
for line in contentLines:
item = TableItem()
item['field'] = {}
values = line.css('td::text, td span::text, td a::text').extract()
for i in range(nb_fields):
if fieldNames[i] in RcpSpider.columns:
item[fieldNames[i]] = values[i]
else:
item['field'][fieldNames[i]] = values[i]
item['locale'] = self.state_code
items.append(item)
return items
|
<commit_before>import scrapy
from realclearpolitics.items import TableItem
class RcpSpider(scrapy.Spider):
name = "realclearpoliticsSpider"
start_urls = []
def __init__(self, url, state_code):
self.url = url
self.state_code = state_code
def start_requests(self):
return [scrapy.FormRequest(self.url,
callback=self.parse)]
def parse(self, response):
table = response.css('.data').pop()
legend = table.css('tr')[0]
fieldNames = legend.css('th::text').extract()
nb_fields = len(fieldNames)
items = []
contentLines = table.css('tr')[1::]
for line in contentLines:
item = TableItem()
values = line.css('td::text, td span::text, td a::text').extract()
for i in range(nb_fields):
item[fieldNames[i]] = values[i]
item['locale'] = self.state_code
items.append(item)
return items
<commit_msg>Put candidate score in field object<commit_after>
|
import scrapy
from realclearpolitics.items import TableItem
class RcpSpider(scrapy.Spider):
name = "realclearpoliticsSpider"
start_urls = []
columns = ['Poll','Date', 'Sample', 'Spread']
def __init__(self, url, state_code):
self.url = url
self.state_code = state_code
def start_requests(self):
return [scrapy.FormRequest(self.url,
callback=self.parse)]
def parse(self, response):
table = response.css('.data').pop()
legend = table.css('tr')[0]
fieldNames = legend.css('th::text').extract()
nb_fields = len(fieldNames)
items = []
contentLines = table.css('tr')[1::]
for line in contentLines:
item = TableItem()
item['field'] = {}
values = line.css('td::text, td span::text, td a::text').extract()
for i in range(nb_fields):
if fieldNames[i] in RcpSpider.columns:
item[fieldNames[i]] = values[i]
else:
item['field'][fieldNames[i]] = values[i]
item['locale'] = self.state_code
items.append(item)
return items
|
import scrapy
from realclearpolitics.items import TableItem
class RcpSpider(scrapy.Spider):
name = "realclearpoliticsSpider"
start_urls = []
def __init__(self, url, state_code):
self.url = url
self.state_code = state_code
def start_requests(self):
return [scrapy.FormRequest(self.url,
callback=self.parse)]
def parse(self, response):
table = response.css('.data').pop()
legend = table.css('tr')[0]
fieldNames = legend.css('th::text').extract()
nb_fields = len(fieldNames)
items = []
contentLines = table.css('tr')[1::]
for line in contentLines:
item = TableItem()
values = line.css('td::text, td span::text, td a::text').extract()
for i in range(nb_fields):
item[fieldNames[i]] = values[i]
item['locale'] = self.state_code
items.append(item)
return items
Put candidate score in field objectimport scrapy
from realclearpolitics.items import TableItem
class RcpSpider(scrapy.Spider):
name = "realclearpoliticsSpider"
start_urls = []
columns = ['Poll','Date', 'Sample', 'Spread']
def __init__(self, url, state_code):
self.url = url
self.state_code = state_code
def start_requests(self):
return [scrapy.FormRequest(self.url,
callback=self.parse)]
def parse(self, response):
table = response.css('.data').pop()
legend = table.css('tr')[0]
fieldNames = legend.css('th::text').extract()
nb_fields = len(fieldNames)
items = []
contentLines = table.css('tr')[1::]
for line in contentLines:
item = TableItem()
item['field'] = {}
values = line.css('td::text, td span::text, td a::text').extract()
for i in range(nb_fields):
if fieldNames[i] in RcpSpider.columns:
item[fieldNames[i]] = values[i]
else:
item['field'][fieldNames[i]] = values[i]
item['locale'] = self.state_code
items.append(item)
return items
|
<commit_before>import scrapy
from realclearpolitics.items import TableItem
class RcpSpider(scrapy.Spider):
name = "realclearpoliticsSpider"
start_urls = []
def __init__(self, url, state_code):
self.url = url
self.state_code = state_code
def start_requests(self):
return [scrapy.FormRequest(self.url,
callback=self.parse)]
def parse(self, response):
table = response.css('.data').pop()
legend = table.css('tr')[0]
fieldNames = legend.css('th::text').extract()
nb_fields = len(fieldNames)
items = []
contentLines = table.css('tr')[1::]
for line in contentLines:
item = TableItem()
values = line.css('td::text, td span::text, td a::text').extract()
for i in range(nb_fields):
item[fieldNames[i]] = values[i]
item['locale'] = self.state_code
items.append(item)
return items
<commit_msg>Put candidate score in field object<commit_after>import scrapy
from realclearpolitics.items import TableItem
class RcpSpider(scrapy.Spider):
name = "realclearpoliticsSpider"
start_urls = []
columns = ['Poll','Date', 'Sample', 'Spread']
def __init__(self, url, state_code):
self.url = url
self.state_code = state_code
def start_requests(self):
return [scrapy.FormRequest(self.url,
callback=self.parse)]
def parse(self, response):
table = response.css('.data').pop()
legend = table.css('tr')[0]
fieldNames = legend.css('th::text').extract()
nb_fields = len(fieldNames)
items = []
contentLines = table.css('tr')[1::]
for line in contentLines:
item = TableItem()
item['field'] = {}
values = line.css('td::text, td span::text, td a::text').extract()
for i in range(nb_fields):
if fieldNames[i] in RcpSpider.columns:
item[fieldNames[i]] = values[i]
else:
item['field'][fieldNames[i]] = values[i]
item['locale'] = self.state_code
items.append(item)
return items
|
e9105d4c0fcf7ff90b96e764cbf8acdb94856a7a
|
tests/executables/echo_server.py
|
tests/executables/echo_server.py
|
import sys
import socket
import select
from papa.utils import cast_string
__author__ = 'Scott Maxwell'
if len(sys.argv) != 2:
sys.stderr.write('Need one file descriptor\n')
sys.exit(1)
listen_socket = socket.fromfd(int(sys.argv[1]), socket.AF_INET, socket.SOCK_STREAM)
connection, address = listen_socket.accept()
while True:
read_sockets = select.select([connection], [], [])[0]
sock = read_sockets[0]
data = sock.recv(100)
if data:
sock.send(data)
sys.stdout.write(cast_string(data))
else:
break
|
Add simple echo server test app
|
Add simple echo server test app
|
Python
|
mit
|
scottkmaxwell/papa
|
Add simple echo server test app
|
import sys
import socket
import select
from papa.utils import cast_string
__author__ = 'Scott Maxwell'
if len(sys.argv) != 2:
sys.stderr.write('Need one file descriptor\n')
sys.exit(1)
listen_socket = socket.fromfd(int(sys.argv[1]), socket.AF_INET, socket.SOCK_STREAM)
connection, address = listen_socket.accept()
while True:
read_sockets = select.select([connection], [], [])[0]
sock = read_sockets[0]
data = sock.recv(100)
if data:
sock.send(data)
sys.stdout.write(cast_string(data))
else:
break
|
<commit_before><commit_msg>Add simple echo server test app<commit_after>
|
import sys
import socket
import select
from papa.utils import cast_string
__author__ = 'Scott Maxwell'
if len(sys.argv) != 2:
sys.stderr.write('Need one file descriptor\n')
sys.exit(1)
listen_socket = socket.fromfd(int(sys.argv[1]), socket.AF_INET, socket.SOCK_STREAM)
connection, address = listen_socket.accept()
while True:
read_sockets = select.select([connection], [], [])[0]
sock = read_sockets[0]
data = sock.recv(100)
if data:
sock.send(data)
sys.stdout.write(cast_string(data))
else:
break
|
Add simple echo server test appimport sys
import socket
import select
from papa.utils import cast_string
__author__ = 'Scott Maxwell'
if len(sys.argv) != 2:
sys.stderr.write('Need one file descriptor\n')
sys.exit(1)
listen_socket = socket.fromfd(int(sys.argv[1]), socket.AF_INET, socket.SOCK_STREAM)
connection, address = listen_socket.accept()
while True:
read_sockets = select.select([connection], [], [])[0]
sock = read_sockets[0]
data = sock.recv(100)
if data:
sock.send(data)
sys.stdout.write(cast_string(data))
else:
break
|
<commit_before><commit_msg>Add simple echo server test app<commit_after>import sys
import socket
import select
from papa.utils import cast_string
__author__ = 'Scott Maxwell'
if len(sys.argv) != 2:
sys.stderr.write('Need one file descriptor\n')
sys.exit(1)
listen_socket = socket.fromfd(int(sys.argv[1]), socket.AF_INET, socket.SOCK_STREAM)
connection, address = listen_socket.accept()
while True:
read_sockets = select.select([connection], [], [])[0]
sock = read_sockets[0]
data = sock.recv(100)
if data:
sock.send(data)
sys.stdout.write(cast_string(data))
else:
break
|
|
41c6a71e2a9e013966df06e3b5f458aa9a902bc8
|
tests/test_core.py
|
tests/test_core.py
|
import pytest
from mock import Mock
from saleor.core.utils import (
Country, get_country_by_ip, get_currency_for_country)
@pytest.mark.parametrize('ip_data, expected_country', [
({'country': {'iso_code': 'PL'}}, Country('PL')),
({'country': {'iso_code': 'UNKNOWN'}}, None),
(None, None),
({}, None),
({'country': {}}, None)])
def test_get_country_by_ip(ip_data, expected_country, monkeypatch):
monkeypatch.setattr(
'saleor.core.utils.georeader.get',
Mock(return_value=ip_data))
country = get_country_by_ip('127.0.0.1')
assert country == expected_country
@pytest.mark.parametrize('country, expected_currency', [
(Country('PL'), 'PLN'),
(Country('US'), 'USD'),
(Country('GB'), 'GBP')])
def test_get_currency_for_country(country, expected_currency, monkeypatch):
currency = get_currency_for_country(country)
assert currency == expected_currency
|
import pytest
from mock import Mock
from saleor.core.utils import (
Country, get_country_by_ip, get_currency_for_country, create_superuser)
from saleor.userprofile.models import User
@pytest.mark.parametrize('ip_data, expected_country', [
({'country': {'iso_code': 'PL'}}, Country('PL')),
({'country': {'iso_code': 'UNKNOWN'}}, None),
(None, None),
({}, None),
({'country': {}}, None)])
def test_get_country_by_ip(ip_data, expected_country, monkeypatch):
monkeypatch.setattr(
'saleor.core.utils.georeader.get',
Mock(return_value=ip_data))
country = get_country_by_ip('127.0.0.1')
assert country == expected_country
@pytest.mark.parametrize('country, expected_currency', [
(Country('PL'), 'PLN'),
(Country('US'), 'USD'),
(Country('GB'), 'GBP')])
def test_get_currency_for_country(country, expected_currency, monkeypatch):
currency = get_currency_for_country(country)
assert currency == expected_currency
def test_create_superuser(db, client):
credentials = {'email': 'admin@example.com', 'password': 'admin'}
# Test admin creation
assert User.objects.all().count() == 0
create_superuser(credentials)
assert User.objects.all().count() == 1
admin = User.objects.all().first()
assert admin.is_superuser
# Test duplicating
create_superuser(credentials)
assert User.objects.all().count() == 1
# Test logging in
response = client.post('/account/login/',
{'login': credentials['email'],
'password': credentials['password']},
follow=True)
assert response.context['request'].user == admin
|
Add populatedb admin creation test
|
Add populatedb admin creation test
|
Python
|
bsd-3-clause
|
car3oon/saleor,mociepka/saleor,jreigel/saleor,mociepka/saleor,tfroehlich82/saleor,maferelo/saleor,itbabu/saleor,itbabu/saleor,HyperManTT/ECommerceSaleor,KenMutemi/saleor,UITools/saleor,tfroehlich82/saleor,KenMutemi/saleor,maferelo/saleor,maferelo/saleor,UITools/saleor,mociepka/saleor,jreigel/saleor,jreigel/saleor,UITools/saleor,UITools/saleor,car3oon/saleor,HyperManTT/ECommerceSaleor,KenMutemi/saleor,itbabu/saleor,car3oon/saleor,tfroehlich82/saleor,HyperManTT/ECommerceSaleor,UITools/saleor
|
import pytest
from mock import Mock
from saleor.core.utils import (
Country, get_country_by_ip, get_currency_for_country)
@pytest.mark.parametrize('ip_data, expected_country', [
({'country': {'iso_code': 'PL'}}, Country('PL')),
({'country': {'iso_code': 'UNKNOWN'}}, None),
(None, None),
({}, None),
({'country': {}}, None)])
def test_get_country_by_ip(ip_data, expected_country, monkeypatch):
monkeypatch.setattr(
'saleor.core.utils.georeader.get',
Mock(return_value=ip_data))
country = get_country_by_ip('127.0.0.1')
assert country == expected_country
@pytest.mark.parametrize('country, expected_currency', [
(Country('PL'), 'PLN'),
(Country('US'), 'USD'),
(Country('GB'), 'GBP')])
def test_get_currency_for_country(country, expected_currency, monkeypatch):
currency = get_currency_for_country(country)
assert currency == expected_currency
Add populatedb admin creation test
|
import pytest
from mock import Mock
from saleor.core.utils import (
Country, get_country_by_ip, get_currency_for_country, create_superuser)
from saleor.userprofile.models import User
@pytest.mark.parametrize('ip_data, expected_country', [
({'country': {'iso_code': 'PL'}}, Country('PL')),
({'country': {'iso_code': 'UNKNOWN'}}, None),
(None, None),
({}, None),
({'country': {}}, None)])
def test_get_country_by_ip(ip_data, expected_country, monkeypatch):
monkeypatch.setattr(
'saleor.core.utils.georeader.get',
Mock(return_value=ip_data))
country = get_country_by_ip('127.0.0.1')
assert country == expected_country
@pytest.mark.parametrize('country, expected_currency', [
(Country('PL'), 'PLN'),
(Country('US'), 'USD'),
(Country('GB'), 'GBP')])
def test_get_currency_for_country(country, expected_currency, monkeypatch):
currency = get_currency_for_country(country)
assert currency == expected_currency
def test_create_superuser(db, client):
credentials = {'email': 'admin@example.com', 'password': 'admin'}
# Test admin creation
assert User.objects.all().count() == 0
create_superuser(credentials)
assert User.objects.all().count() == 1
admin = User.objects.all().first()
assert admin.is_superuser
# Test duplicating
create_superuser(credentials)
assert User.objects.all().count() == 1
# Test logging in
response = client.post('/account/login/',
{'login': credentials['email'],
'password': credentials['password']},
follow=True)
assert response.context['request'].user == admin
|
<commit_before>import pytest
from mock import Mock
from saleor.core.utils import (
Country, get_country_by_ip, get_currency_for_country)
@pytest.mark.parametrize('ip_data, expected_country', [
({'country': {'iso_code': 'PL'}}, Country('PL')),
({'country': {'iso_code': 'UNKNOWN'}}, None),
(None, None),
({}, None),
({'country': {}}, None)])
def test_get_country_by_ip(ip_data, expected_country, monkeypatch):
monkeypatch.setattr(
'saleor.core.utils.georeader.get',
Mock(return_value=ip_data))
country = get_country_by_ip('127.0.0.1')
assert country == expected_country
@pytest.mark.parametrize('country, expected_currency', [
(Country('PL'), 'PLN'),
(Country('US'), 'USD'),
(Country('GB'), 'GBP')])
def test_get_currency_for_country(country, expected_currency, monkeypatch):
currency = get_currency_for_country(country)
assert currency == expected_currency
<commit_msg>Add populatedb admin creation test<commit_after>
|
import pytest
from mock import Mock
from saleor.core.utils import (
Country, get_country_by_ip, get_currency_for_country, create_superuser)
from saleor.userprofile.models import User
@pytest.mark.parametrize('ip_data, expected_country', [
({'country': {'iso_code': 'PL'}}, Country('PL')),
({'country': {'iso_code': 'UNKNOWN'}}, None),
(None, None),
({}, None),
({'country': {}}, None)])
def test_get_country_by_ip(ip_data, expected_country, monkeypatch):
monkeypatch.setattr(
'saleor.core.utils.georeader.get',
Mock(return_value=ip_data))
country = get_country_by_ip('127.0.0.1')
assert country == expected_country
@pytest.mark.parametrize('country, expected_currency', [
(Country('PL'), 'PLN'),
(Country('US'), 'USD'),
(Country('GB'), 'GBP')])
def test_get_currency_for_country(country, expected_currency, monkeypatch):
currency = get_currency_for_country(country)
assert currency == expected_currency
def test_create_superuser(db, client):
credentials = {'email': 'admin@example.com', 'password': 'admin'}
# Test admin creation
assert User.objects.all().count() == 0
create_superuser(credentials)
assert User.objects.all().count() == 1
admin = User.objects.all().first()
assert admin.is_superuser
# Test duplicating
create_superuser(credentials)
assert User.objects.all().count() == 1
# Test logging in
response = client.post('/account/login/',
{'login': credentials['email'],
'password': credentials['password']},
follow=True)
assert response.context['request'].user == admin
|
import pytest
from mock import Mock
from saleor.core.utils import (
Country, get_country_by_ip, get_currency_for_country)
@pytest.mark.parametrize('ip_data, expected_country', [
({'country': {'iso_code': 'PL'}}, Country('PL')),
({'country': {'iso_code': 'UNKNOWN'}}, None),
(None, None),
({}, None),
({'country': {}}, None)])
def test_get_country_by_ip(ip_data, expected_country, monkeypatch):
monkeypatch.setattr(
'saleor.core.utils.georeader.get',
Mock(return_value=ip_data))
country = get_country_by_ip('127.0.0.1')
assert country == expected_country
@pytest.mark.parametrize('country, expected_currency', [
(Country('PL'), 'PLN'),
(Country('US'), 'USD'),
(Country('GB'), 'GBP')])
def test_get_currency_for_country(country, expected_currency, monkeypatch):
currency = get_currency_for_country(country)
assert currency == expected_currency
Add populatedb admin creation testimport pytest
from mock import Mock
from saleor.core.utils import (
Country, get_country_by_ip, get_currency_for_country, create_superuser)
from saleor.userprofile.models import User
@pytest.mark.parametrize('ip_data, expected_country', [
({'country': {'iso_code': 'PL'}}, Country('PL')),
({'country': {'iso_code': 'UNKNOWN'}}, None),
(None, None),
({}, None),
({'country': {}}, None)])
def test_get_country_by_ip(ip_data, expected_country, monkeypatch):
monkeypatch.setattr(
'saleor.core.utils.georeader.get',
Mock(return_value=ip_data))
country = get_country_by_ip('127.0.0.1')
assert country == expected_country
@pytest.mark.parametrize('country, expected_currency', [
(Country('PL'), 'PLN'),
(Country('US'), 'USD'),
(Country('GB'), 'GBP')])
def test_get_currency_for_country(country, expected_currency, monkeypatch):
currency = get_currency_for_country(country)
assert currency == expected_currency
def test_create_superuser(db, client):
credentials = {'email': 'admin@example.com', 'password': 'admin'}
# Test admin creation
assert User.objects.all().count() == 0
create_superuser(credentials)
assert User.objects.all().count() == 1
admin = User.objects.all().first()
assert admin.is_superuser
# Test duplicating
create_superuser(credentials)
assert User.objects.all().count() == 1
# Test logging in
response = client.post('/account/login/',
{'login': credentials['email'],
'password': credentials['password']},
follow=True)
assert response.context['request'].user == admin
|
<commit_before>import pytest
from mock import Mock
from saleor.core.utils import (
Country, get_country_by_ip, get_currency_for_country)
@pytest.mark.parametrize('ip_data, expected_country', [
({'country': {'iso_code': 'PL'}}, Country('PL')),
({'country': {'iso_code': 'UNKNOWN'}}, None),
(None, None),
({}, None),
({'country': {}}, None)])
def test_get_country_by_ip(ip_data, expected_country, monkeypatch):
monkeypatch.setattr(
'saleor.core.utils.georeader.get',
Mock(return_value=ip_data))
country = get_country_by_ip('127.0.0.1')
assert country == expected_country
@pytest.mark.parametrize('country, expected_currency', [
(Country('PL'), 'PLN'),
(Country('US'), 'USD'),
(Country('GB'), 'GBP')])
def test_get_currency_for_country(country, expected_currency, monkeypatch):
currency = get_currency_for_country(country)
assert currency == expected_currency
<commit_msg>Add populatedb admin creation test<commit_after>import pytest
from mock import Mock
from saleor.core.utils import (
Country, get_country_by_ip, get_currency_for_country, create_superuser)
from saleor.userprofile.models import User
@pytest.mark.parametrize('ip_data, expected_country', [
({'country': {'iso_code': 'PL'}}, Country('PL')),
({'country': {'iso_code': 'UNKNOWN'}}, None),
(None, None),
({}, None),
({'country': {}}, None)])
def test_get_country_by_ip(ip_data, expected_country, monkeypatch):
monkeypatch.setattr(
'saleor.core.utils.georeader.get',
Mock(return_value=ip_data))
country = get_country_by_ip('127.0.0.1')
assert country == expected_country
@pytest.mark.parametrize('country, expected_currency', [
(Country('PL'), 'PLN'),
(Country('US'), 'USD'),
(Country('GB'), 'GBP')])
def test_get_currency_for_country(country, expected_currency, monkeypatch):
currency = get_currency_for_country(country)
assert currency == expected_currency
def test_create_superuser(db, client):
credentials = {'email': 'admin@example.com', 'password': 'admin'}
# Test admin creation
assert User.objects.all().count() == 0
create_superuser(credentials)
assert User.objects.all().count() == 1
admin = User.objects.all().first()
assert admin.is_superuser
# Test duplicating
create_superuser(credentials)
assert User.objects.all().count() == 1
# Test logging in
response = client.post('/account/login/',
{'login': credentials['email'],
'password': credentials['password']},
follow=True)
assert response.context['request'].user == admin
|
1da0e093bd2a1fd70964dd0b80c0a554d46fd3e9
|
src/tests/client/test_stacking.py
|
src/tests/client/test_stacking.py
|
# Copyright (c) 2016, LE GOFF Vincent
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of ytranslate nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from mock import MagicMock, call
from models import MockClient
from scripting.alias import Alias
class TestCommandStacking(MockClient):
"""Test of command stacking."""
def test_without(self):
"""Test without any command stacking."""
self.client.write(u"say 1;say 2")
self.client.transport.write.assert_called_once_with(
"say 1;say 2\r\n")
def test_simple(self):
"""Test simple command stacking with a ; sign."""
def get_setting(address):
"""Private function to return a set of default settings."""
default = {
"options.input.command_stacking": ";",
"options.general.encoding": "utf-8",
}
return default[address]
self.client.factory.engine.settings.__getitem__ = MagicMock(
side_effect=get_setting)
self.client.write(u"say 1;say 2")
calls = [call("say 1\r\n"), call("say 2\r\n")]
self.client.transport.write.assert_has_calls(calls)
def test_special(self):
"""Test command stacking with a special character."""
def get_setting(address):
"""Private function to return a set of default settings."""
default = {
"options.input.command_stacking": u"\x82",
"options.general.encoding": "utf-8",
}
return default[address]
self.client.factory.engine.settings.__getitem__ = MagicMock(
side_effect=get_setting)
self.client.write(u"say 1\x82say 2")
calls = [call("say 1\r\n"), call("say 2\r\n")]
self.client.transport.write.assert_has_calls(calls)
|
Add tests for command stacking
|
Add tests for command stacking
|
Python
|
bsd-3-clause
|
vlegoff/cocomud
|
Add tests for command stacking
|
# Copyright (c) 2016, LE GOFF Vincent
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of ytranslate nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from mock import MagicMock, call
from models import MockClient
from scripting.alias import Alias
class TestCommandStacking(MockClient):
"""Test of command stacking."""
def test_without(self):
"""Test without any command stacking."""
self.client.write(u"say 1;say 2")
self.client.transport.write.assert_called_once_with(
"say 1;say 2\r\n")
def test_simple(self):
"""Test simple command stacking with a ; sign."""
def get_setting(address):
"""Private function to return a set of default settings."""
default = {
"options.input.command_stacking": ";",
"options.general.encoding": "utf-8",
}
return default[address]
self.client.factory.engine.settings.__getitem__ = MagicMock(
side_effect=get_setting)
self.client.write(u"say 1;say 2")
calls = [call("say 1\r\n"), call("say 2\r\n")]
self.client.transport.write.assert_has_calls(calls)
def test_special(self):
"""Test command stacking with a special character."""
def get_setting(address):
"""Private function to return a set of default settings."""
default = {
"options.input.command_stacking": u"\x82",
"options.general.encoding": "utf-8",
}
return default[address]
self.client.factory.engine.settings.__getitem__ = MagicMock(
side_effect=get_setting)
self.client.write(u"say 1\x82say 2")
calls = [call("say 1\r\n"), call("say 2\r\n")]
self.client.transport.write.assert_has_calls(calls)
|
<commit_before><commit_msg>Add tests for command stacking<commit_after>
|
# Copyright (c) 2016, LE GOFF Vincent
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of ytranslate nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from mock import MagicMock, call
from models import MockClient
from scripting.alias import Alias
class TestCommandStacking(MockClient):
"""Test of command stacking."""
def test_without(self):
"""Test without any command stacking."""
self.client.write(u"say 1;say 2")
self.client.transport.write.assert_called_once_with(
"say 1;say 2\r\n")
def test_simple(self):
"""Test simple command stacking with a ; sign."""
def get_setting(address):
"""Private function to return a set of default settings."""
default = {
"options.input.command_stacking": ";",
"options.general.encoding": "utf-8",
}
return default[address]
self.client.factory.engine.settings.__getitem__ = MagicMock(
side_effect=get_setting)
self.client.write(u"say 1;say 2")
calls = [call("say 1\r\n"), call("say 2\r\n")]
self.client.transport.write.assert_has_calls(calls)
def test_special(self):
"""Test command stacking with a special character."""
def get_setting(address):
"""Private function to return a set of default settings."""
default = {
"options.input.command_stacking": u"\x82",
"options.general.encoding": "utf-8",
}
return default[address]
self.client.factory.engine.settings.__getitem__ = MagicMock(
side_effect=get_setting)
self.client.write(u"say 1\x82say 2")
calls = [call("say 1\r\n"), call("say 2\r\n")]
self.client.transport.write.assert_has_calls(calls)
|
Add tests for command stacking# Copyright (c) 2016, LE GOFF Vincent
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of ytranslate nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from mock import MagicMock, call
from models import MockClient
from scripting.alias import Alias
class TestCommandStacking(MockClient):
"""Test of command stacking."""
def test_without(self):
"""Test without any command stacking."""
self.client.write(u"say 1;say 2")
self.client.transport.write.assert_called_once_with(
"say 1;say 2\r\n")
def test_simple(self):
"""Test simple command stacking with a ; sign."""
def get_setting(address):
"""Private function to return a set of default settings."""
default = {
"options.input.command_stacking": ";",
"options.general.encoding": "utf-8",
}
return default[address]
self.client.factory.engine.settings.__getitem__ = MagicMock(
side_effect=get_setting)
self.client.write(u"say 1;say 2")
calls = [call("say 1\r\n"), call("say 2\r\n")]
self.client.transport.write.assert_has_calls(calls)
def test_special(self):
"""Test command stacking with a special character."""
def get_setting(address):
"""Private function to return a set of default settings."""
default = {
"options.input.command_stacking": u"\x82",
"options.general.encoding": "utf-8",
}
return default[address]
self.client.factory.engine.settings.__getitem__ = MagicMock(
side_effect=get_setting)
self.client.write(u"say 1\x82say 2")
calls = [call("say 1\r\n"), call("say 2\r\n")]
self.client.transport.write.assert_has_calls(calls)
|
<commit_before><commit_msg>Add tests for command stacking<commit_after># Copyright (c) 2016, LE GOFF Vincent
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of ytranslate nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from mock import MagicMock, call
from models import MockClient
from scripting.alias import Alias
class TestCommandStacking(MockClient):
"""Test of command stacking."""
def test_without(self):
"""Test without any command stacking."""
self.client.write(u"say 1;say 2")
self.client.transport.write.assert_called_once_with(
"say 1;say 2\r\n")
def test_simple(self):
"""Test simple command stacking with a ; sign."""
def get_setting(address):
"""Private function to return a set of default settings."""
default = {
"options.input.command_stacking": ";",
"options.general.encoding": "utf-8",
}
return default[address]
self.client.factory.engine.settings.__getitem__ = MagicMock(
side_effect=get_setting)
self.client.write(u"say 1;say 2")
calls = [call("say 1\r\n"), call("say 2\r\n")]
self.client.transport.write.assert_has_calls(calls)
def test_special(self):
"""Test command stacking with a special character."""
def get_setting(address):
"""Private function to return a set of default settings."""
default = {
"options.input.command_stacking": u"\x82",
"options.general.encoding": "utf-8",
}
return default[address]
self.client.factory.engine.settings.__getitem__ = MagicMock(
side_effect=get_setting)
self.client.write(u"say 1\x82say 2")
calls = [call("say 1\r\n"), call("say 2\r\n")]
self.client.transport.write.assert_has_calls(calls)
|
|
2dcfbc9dfecef4920a8dec9f3d2362f5ece13612
|
sympy/printing/tests/test_numpy.py
|
sympy/printing/tests/test_numpy.py
|
from sympy import Piecewise
from sympy.abc import x
from sympy.printing.lambdarepr import NumPyPrinter
def test_numpy_piecewise_regression():
"""
NumPyPrinter needs to print Piecewise()'s choicelist as a list to avoid
breaking compatibility with numpy 1.8. This is not necessary in numpy 1.9+.
See gh-9747 and gh-9749 for details.
"""
p = Piecewise((1, x < 0), (0, True))
assert NumPyPrinter().doprint(p) == 'select([x < 0,True], [1,0], default=nan)'
|
Add test for NumPyPrinter regression
|
Add test for NumPyPrinter regression
|
Python
|
bsd-3-clause
|
kevalds51/sympy,aktech/sympy,maniteja123/sympy,madan96/sympy,atreyv/sympy,madan96/sympy,jbbskinny/sympy,Vishluck/sympy,iamutkarshtiwari/sympy,skidzo/sympy,chaffra/sympy,jbbskinny/sympy,saurabhjn76/sympy,kaichogami/sympy,sahmed95/sympy,abhiii5459/sympy,wyom/sympy,wyom/sympy,drufat/sympy,oliverlee/sympy,kumarkrishna/sympy,oliverlee/sympy,Vishluck/sympy,saurabhjn76/sympy,iamutkarshtiwari/sympy,shikil/sympy,kaichogami/sympy,VaibhavAgarwalVA/sympy,cswiercz/sympy,shikil/sympy,Davidjohnwilson/sympy,sahmed95/sympy,chaffra/sympy,yashsharan/sympy,Arafatk/sympy,mcdaniel67/sympy,rahuldan/sympy,jaimahajan1997/sympy,maniteja123/sympy,kumarkrishna/sympy,Vishluck/sympy,yashsharan/sympy,Shaswat27/sympy,kevalds51/sympy,chaffra/sympy,rahuldan/sympy,emon10005/sympy,jaimahajan1997/sympy,kaushik94/sympy,iamutkarshtiwari/sympy,debugger22/sympy,mafiya69/sympy,jerli/sympy,Titan-C/sympy,Shaswat27/sympy,sampadsaha5/sympy,cswiercz/sympy,grevutiu-gabriel/sympy,madan96/sympy,atreyv/sympy,Shaswat27/sympy,wanglongqi/sympy,kaichogami/sympy,Curious72/sympy,mafiya69/sympy,wyom/sympy,yukoba/sympy,lindsayad/sympy,ga7g08/sympy,hargup/sympy,AkademieOlympia/sympy,ga7g08/sympy,abhiii5459/sympy,yashsharan/sympy,debugger22/sympy,sampadsaha5/sympy,souravsingh/sympy,pandeyadarsh/sympy,moble/sympy,jbbskinny/sympy,ChristinaZografou/sympy,moble/sympy,drufat/sympy,Designist/sympy,kaushik94/sympy,debugger22/sympy,lindsayad/sympy,MechCoder/sympy,ChristinaZografou/sympy,emon10005/sympy,MechCoder/sympy,abhiii5459/sympy,ga7g08/sympy,mafiya69/sympy,postvakje/sympy,grevutiu-gabriel/sympy,farhaanbukhsh/sympy,skidzo/sympy,jerli/sympy,kevalds51/sympy,skidzo/sympy,yukoba/sympy,farhaanbukhsh/sympy,jerli/sympy,moble/sympy,drufat/sympy,ahhda/sympy,rahuldan/sympy,sahmed95/sympy,AkademieOlympia/sympy,mcdaniel67/sympy,jaimahajan1997/sympy,Curious72/sympy,mcdaniel67/sympy,postvakje/sympy,AkademieOlympia/sympy,VaibhavAgarwalVA/sympy,Titan-C/sympy,pandeyadarsh/sympy,cswiercz/sympy,sampadsaha5/sympy,oliverlee/sympy,aktech/sympy,Designist/sympy,maniteja123/sympy,saurabhjn76/sympy,Designist/sympy,Curious72/sympy,hargup/sympy,Arafatk/sympy,atreyv/sympy,wanglongqi/sympy,wanglongqi/sympy,MechCoder/sympy,souravsingh/sympy,ahhda/sympy,postvakje/sympy,yukoba/sympy,kaushik94/sympy,aktech/sympy,ChristinaZografou/sympy,farhaanbukhsh/sympy,kumarkrishna/sympy,Davidjohnwilson/sympy,shikil/sympy,emon10005/sympy,hargup/sympy,Arafatk/sympy,lindsayad/sympy,Titan-C/sympy,pandeyadarsh/sympy,Davidjohnwilson/sympy,VaibhavAgarwalVA/sympy,ahhda/sympy,grevutiu-gabriel/sympy,souravsingh/sympy
|
Add test for NumPyPrinter regression
|
from sympy import Piecewise
from sympy.abc import x
from sympy.printing.lambdarepr import NumPyPrinter
def test_numpy_piecewise_regression():
"""
NumPyPrinter needs to print Piecewise()'s choicelist as a list to avoid
breaking compatibility with numpy 1.8. This is not necessary in numpy 1.9+.
See gh-9747 and gh-9749 for details.
"""
p = Piecewise((1, x < 0), (0, True))
assert NumPyPrinter().doprint(p) == 'select([x < 0,True], [1,0], default=nan)'
|
<commit_before><commit_msg>Add test for NumPyPrinter regression<commit_after>
|
from sympy import Piecewise
from sympy.abc import x
from sympy.printing.lambdarepr import NumPyPrinter
def test_numpy_piecewise_regression():
"""
NumPyPrinter needs to print Piecewise()'s choicelist as a list to avoid
breaking compatibility with numpy 1.8. This is not necessary in numpy 1.9+.
See gh-9747 and gh-9749 for details.
"""
p = Piecewise((1, x < 0), (0, True))
assert NumPyPrinter().doprint(p) == 'select([x < 0,True], [1,0], default=nan)'
|
Add test for NumPyPrinter regressionfrom sympy import Piecewise
from sympy.abc import x
from sympy.printing.lambdarepr import NumPyPrinter
def test_numpy_piecewise_regression():
"""
NumPyPrinter needs to print Piecewise()'s choicelist as a list to avoid
breaking compatibility with numpy 1.8. This is not necessary in numpy 1.9+.
See gh-9747 and gh-9749 for details.
"""
p = Piecewise((1, x < 0), (0, True))
assert NumPyPrinter().doprint(p) == 'select([x < 0,True], [1,0], default=nan)'
|
<commit_before><commit_msg>Add test for NumPyPrinter regression<commit_after>from sympy import Piecewise
from sympy.abc import x
from sympy.printing.lambdarepr import NumPyPrinter
def test_numpy_piecewise_regression():
"""
NumPyPrinter needs to print Piecewise()'s choicelist as a list to avoid
breaking compatibility with numpy 1.8. This is not necessary in numpy 1.9+.
See gh-9747 and gh-9749 for details.
"""
p = Piecewise((1, x < 0), (0, True))
assert NumPyPrinter().doprint(p) == 'select([x < 0,True], [1,0], default=nan)'
|
|
a20255d2a2531eff982b78c15f5fc4d5cc1ec621
|
tests/integration/suite/test_istio.py
|
tests/integration/suite/test_istio.py
|
import os
import pytest
import subprocess
from .common import random_str
from .conftest import cluster_and_client, ClusterContext
kube_fname = os.path.join(os.path.dirname(os.path.realpath(__file__)),
"k8s_kube_config")
istio_crd_url = "https://raw.githubusercontent.com/istio/istio/1.1.5" \
"/install/kubernetes/helm/istio-init/files/crd-10.yaml"
def test_virtual_service(admin_pc):
client = admin_pc.client
ns = admin_pc.cluster.client.create_namespace(
name=random_str(),
projectId=admin_pc.project.id)
name = random_str()
client.create_virtualService(
name=name,
namespaceId=ns.id,
hosts=["test"],
http=[{
"route": [
{
"destination": {
"host": "test",
"subset": "v1"
}
}
]
}],
)
virtualServices = client.list_virtualService(
namespaceId=ns.id
)
assert len(virtualServices) == 1
client.delete(virtualServices.data[0])
client.delete(ns)
def test_destination_rule(admin_pc):
client = admin_pc.client
ns = admin_pc.cluster.client.create_namespace(
name=random_str(),
projectId=admin_pc.project.id)
name = random_str()
client.create_destinationRule(
name=name,
namespaceId=ns.id,
host="test",
subsets=[{
"name": "v1",
"labels": {
"version": "v1",
}
}],
)
destinationRules = client.list_destinationRule(
namespaceId=ns.id
)
assert len(destinationRules) == 1
client.delete(destinationRules.data[0])
client.delete(ns)
@pytest.fixture(scope='module', autouse="True")
def install_crd(admin_mc):
cluster, client = cluster_and_client('local', admin_mc.client)
cc = ClusterContext(admin_mc, cluster, client)
create_kubeconfig(cc.cluster)
try:
return subprocess.check_output(
'kubectl apply ' +
' --kubeconfig ' + kube_fname +
' -f ' + istio_crd_url,
stderr=subprocess.STDOUT, shell=True,
)
except subprocess.CalledProcessError as err:
print('kubectl error: ' + str(err.output))
raise err
def teardown_module(module):
try:
return subprocess.check_output(
'kubectl delete ' +
' --kubeconfig ' + kube_fname +
' -f ' + istio_crd_url,
stderr=subprocess.STDOUT, shell=True,
)
except subprocess.CalledProcessError as err:
print('kubectl error: ' + str(err.output))
raise err
def create_kubeconfig(cluster):
generateKubeConfigOutput = cluster.generateKubeconfig()
print(generateKubeConfigOutput.config)
file = open(kube_fname, "w")
file.write(generateKubeConfigOutput.config)
file.close()
|
Add integration tests for VirtualService and DestinationRule
|
Add integration tests for VirtualService and DestinationRule
|
Python
|
apache-2.0
|
rancherio/rancher,rancherio/rancher,cjellick/rancher,rancher/rancher,rancher/rancher,cjellick/rancher,rancher/rancher,cjellick/rancher,rancher/rancher
|
Add integration tests for VirtualService and DestinationRule
|
import os
import pytest
import subprocess
from .common import random_str
from .conftest import cluster_and_client, ClusterContext
kube_fname = os.path.join(os.path.dirname(os.path.realpath(__file__)),
"k8s_kube_config")
istio_crd_url = "https://raw.githubusercontent.com/istio/istio/1.1.5" \
"/install/kubernetes/helm/istio-init/files/crd-10.yaml"
def test_virtual_service(admin_pc):
client = admin_pc.client
ns = admin_pc.cluster.client.create_namespace(
name=random_str(),
projectId=admin_pc.project.id)
name = random_str()
client.create_virtualService(
name=name,
namespaceId=ns.id,
hosts=["test"],
http=[{
"route": [
{
"destination": {
"host": "test",
"subset": "v1"
}
}
]
}],
)
virtualServices = client.list_virtualService(
namespaceId=ns.id
)
assert len(virtualServices) == 1
client.delete(virtualServices.data[0])
client.delete(ns)
def test_destination_rule(admin_pc):
client = admin_pc.client
ns = admin_pc.cluster.client.create_namespace(
name=random_str(),
projectId=admin_pc.project.id)
name = random_str()
client.create_destinationRule(
name=name,
namespaceId=ns.id,
host="test",
subsets=[{
"name": "v1",
"labels": {
"version": "v1",
}
}],
)
destinationRules = client.list_destinationRule(
namespaceId=ns.id
)
assert len(destinationRules) == 1
client.delete(destinationRules.data[0])
client.delete(ns)
@pytest.fixture(scope='module', autouse="True")
def install_crd(admin_mc):
cluster, client = cluster_and_client('local', admin_mc.client)
cc = ClusterContext(admin_mc, cluster, client)
create_kubeconfig(cc.cluster)
try:
return subprocess.check_output(
'kubectl apply ' +
' --kubeconfig ' + kube_fname +
' -f ' + istio_crd_url,
stderr=subprocess.STDOUT, shell=True,
)
except subprocess.CalledProcessError as err:
print('kubectl error: ' + str(err.output))
raise err
def teardown_module(module):
try:
return subprocess.check_output(
'kubectl delete ' +
' --kubeconfig ' + kube_fname +
' -f ' + istio_crd_url,
stderr=subprocess.STDOUT, shell=True,
)
except subprocess.CalledProcessError as err:
print('kubectl error: ' + str(err.output))
raise err
def create_kubeconfig(cluster):
generateKubeConfigOutput = cluster.generateKubeconfig()
print(generateKubeConfigOutput.config)
file = open(kube_fname, "w")
file.write(generateKubeConfigOutput.config)
file.close()
|
<commit_before><commit_msg>Add integration tests for VirtualService and DestinationRule<commit_after>
|
import os
import pytest
import subprocess
from .common import random_str
from .conftest import cluster_and_client, ClusterContext
kube_fname = os.path.join(os.path.dirname(os.path.realpath(__file__)),
"k8s_kube_config")
istio_crd_url = "https://raw.githubusercontent.com/istio/istio/1.1.5" \
"/install/kubernetes/helm/istio-init/files/crd-10.yaml"
def test_virtual_service(admin_pc):
client = admin_pc.client
ns = admin_pc.cluster.client.create_namespace(
name=random_str(),
projectId=admin_pc.project.id)
name = random_str()
client.create_virtualService(
name=name,
namespaceId=ns.id,
hosts=["test"],
http=[{
"route": [
{
"destination": {
"host": "test",
"subset": "v1"
}
}
]
}],
)
virtualServices = client.list_virtualService(
namespaceId=ns.id
)
assert len(virtualServices) == 1
client.delete(virtualServices.data[0])
client.delete(ns)
def test_destination_rule(admin_pc):
client = admin_pc.client
ns = admin_pc.cluster.client.create_namespace(
name=random_str(),
projectId=admin_pc.project.id)
name = random_str()
client.create_destinationRule(
name=name,
namespaceId=ns.id,
host="test",
subsets=[{
"name": "v1",
"labels": {
"version": "v1",
}
}],
)
destinationRules = client.list_destinationRule(
namespaceId=ns.id
)
assert len(destinationRules) == 1
client.delete(destinationRules.data[0])
client.delete(ns)
@pytest.fixture(scope='module', autouse="True")
def install_crd(admin_mc):
cluster, client = cluster_and_client('local', admin_mc.client)
cc = ClusterContext(admin_mc, cluster, client)
create_kubeconfig(cc.cluster)
try:
return subprocess.check_output(
'kubectl apply ' +
' --kubeconfig ' + kube_fname +
' -f ' + istio_crd_url,
stderr=subprocess.STDOUT, shell=True,
)
except subprocess.CalledProcessError as err:
print('kubectl error: ' + str(err.output))
raise err
def teardown_module(module):
try:
return subprocess.check_output(
'kubectl delete ' +
' --kubeconfig ' + kube_fname +
' -f ' + istio_crd_url,
stderr=subprocess.STDOUT, shell=True,
)
except subprocess.CalledProcessError as err:
print('kubectl error: ' + str(err.output))
raise err
def create_kubeconfig(cluster):
generateKubeConfigOutput = cluster.generateKubeconfig()
print(generateKubeConfigOutput.config)
file = open(kube_fname, "w")
file.write(generateKubeConfigOutput.config)
file.close()
|
Add integration tests for VirtualService and DestinationRuleimport os
import pytest
import subprocess
from .common import random_str
from .conftest import cluster_and_client, ClusterContext
kube_fname = os.path.join(os.path.dirname(os.path.realpath(__file__)),
"k8s_kube_config")
istio_crd_url = "https://raw.githubusercontent.com/istio/istio/1.1.5" \
"/install/kubernetes/helm/istio-init/files/crd-10.yaml"
def test_virtual_service(admin_pc):
client = admin_pc.client
ns = admin_pc.cluster.client.create_namespace(
name=random_str(),
projectId=admin_pc.project.id)
name = random_str()
client.create_virtualService(
name=name,
namespaceId=ns.id,
hosts=["test"],
http=[{
"route": [
{
"destination": {
"host": "test",
"subset": "v1"
}
}
]
}],
)
virtualServices = client.list_virtualService(
namespaceId=ns.id
)
assert len(virtualServices) == 1
client.delete(virtualServices.data[0])
client.delete(ns)
def test_destination_rule(admin_pc):
client = admin_pc.client
ns = admin_pc.cluster.client.create_namespace(
name=random_str(),
projectId=admin_pc.project.id)
name = random_str()
client.create_destinationRule(
name=name,
namespaceId=ns.id,
host="test",
subsets=[{
"name": "v1",
"labels": {
"version": "v1",
}
}],
)
destinationRules = client.list_destinationRule(
namespaceId=ns.id
)
assert len(destinationRules) == 1
client.delete(destinationRules.data[0])
client.delete(ns)
@pytest.fixture(scope='module', autouse="True")
def install_crd(admin_mc):
cluster, client = cluster_and_client('local', admin_mc.client)
cc = ClusterContext(admin_mc, cluster, client)
create_kubeconfig(cc.cluster)
try:
return subprocess.check_output(
'kubectl apply ' +
' --kubeconfig ' + kube_fname +
' -f ' + istio_crd_url,
stderr=subprocess.STDOUT, shell=True,
)
except subprocess.CalledProcessError as err:
print('kubectl error: ' + str(err.output))
raise err
def teardown_module(module):
try:
return subprocess.check_output(
'kubectl delete ' +
' --kubeconfig ' + kube_fname +
' -f ' + istio_crd_url,
stderr=subprocess.STDOUT, shell=True,
)
except subprocess.CalledProcessError as err:
print('kubectl error: ' + str(err.output))
raise err
def create_kubeconfig(cluster):
generateKubeConfigOutput = cluster.generateKubeconfig()
print(generateKubeConfigOutput.config)
file = open(kube_fname, "w")
file.write(generateKubeConfigOutput.config)
file.close()
|
<commit_before><commit_msg>Add integration tests for VirtualService and DestinationRule<commit_after>import os
import pytest
import subprocess
from .common import random_str
from .conftest import cluster_and_client, ClusterContext
kube_fname = os.path.join(os.path.dirname(os.path.realpath(__file__)),
"k8s_kube_config")
istio_crd_url = "https://raw.githubusercontent.com/istio/istio/1.1.5" \
"/install/kubernetes/helm/istio-init/files/crd-10.yaml"
def test_virtual_service(admin_pc):
client = admin_pc.client
ns = admin_pc.cluster.client.create_namespace(
name=random_str(),
projectId=admin_pc.project.id)
name = random_str()
client.create_virtualService(
name=name,
namespaceId=ns.id,
hosts=["test"],
http=[{
"route": [
{
"destination": {
"host": "test",
"subset": "v1"
}
}
]
}],
)
virtualServices = client.list_virtualService(
namespaceId=ns.id
)
assert len(virtualServices) == 1
client.delete(virtualServices.data[0])
client.delete(ns)
def test_destination_rule(admin_pc):
client = admin_pc.client
ns = admin_pc.cluster.client.create_namespace(
name=random_str(),
projectId=admin_pc.project.id)
name = random_str()
client.create_destinationRule(
name=name,
namespaceId=ns.id,
host="test",
subsets=[{
"name": "v1",
"labels": {
"version": "v1",
}
}],
)
destinationRules = client.list_destinationRule(
namespaceId=ns.id
)
assert len(destinationRules) == 1
client.delete(destinationRules.data[0])
client.delete(ns)
@pytest.fixture(scope='module', autouse="True")
def install_crd(admin_mc):
cluster, client = cluster_and_client('local', admin_mc.client)
cc = ClusterContext(admin_mc, cluster, client)
create_kubeconfig(cc.cluster)
try:
return subprocess.check_output(
'kubectl apply ' +
' --kubeconfig ' + kube_fname +
' -f ' + istio_crd_url,
stderr=subprocess.STDOUT, shell=True,
)
except subprocess.CalledProcessError as err:
print('kubectl error: ' + str(err.output))
raise err
def teardown_module(module):
try:
return subprocess.check_output(
'kubectl delete ' +
' --kubeconfig ' + kube_fname +
' -f ' + istio_crd_url,
stderr=subprocess.STDOUT, shell=True,
)
except subprocess.CalledProcessError as err:
print('kubectl error: ' + str(err.output))
raise err
def create_kubeconfig(cluster):
generateKubeConfigOutput = cluster.generateKubeconfig()
print(generateKubeConfigOutput.config)
file = open(kube_fname, "w")
file.write(generateKubeConfigOutput.config)
file.close()
|
|
79eff5aa96e54120f5f020ac45058088b3b77813
|
tools/insert_dp_events_into_traceX.py
|
tools/insert_dp_events_into_traceX.py
|
#!/usr/bin/env python
import argparse
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import sts.replay_event as replay_events
from sts.dataplane_traces.trace import Trace
from sts.input_traces.input_logger import InputLogger
from sts.log_processing.superlog_parser import parse
parser = argparse.ArgumentParser()
parser.add_argument('input', metavar="INPUT",
help='The input json file to be printed')
parser.add_argument('-d', '--dp-trace-path', dest="dp_trace_path", default=None,
help='''Optional path to the dataplane trace file. '''
''' Default: `dirname`/dataplane.trace ''')
args = parser.parse_args()
def main(args):
if args.dp_trace_path is None:
args.dp_trace_path = os.path.dirname(args.input) + "/dataplane.trace"
dp_trace = Trace(args.dp_trace_path).dataplane_trace
event_logger = InputLogger(output_path="/tmp/events.trace")
event_logger.open()
with open(args.input) as input_file:
trace = parse(input_file)
for event in trace:
if type(event) == replay_events.TrafficInjection:
event.dp_event = dp_trace.pop(0)
event_logger.log_input_event(event)
event_logger.output.close()
if __name__ == '__main__':
main(args)
|
Add tool for removing need for TrafficInjection backwards compatibility
|
Add tool for removing need for TrafficInjection backwards compatibility
|
Python
|
apache-2.0
|
ucb-sts/sts,jmiserez/sts,ucb-sts/sts,jmiserez/sts
|
Add tool for removing need for TrafficInjection backwards compatibility
|
#!/usr/bin/env python
import argparse
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import sts.replay_event as replay_events
from sts.dataplane_traces.trace import Trace
from sts.input_traces.input_logger import InputLogger
from sts.log_processing.superlog_parser import parse
parser = argparse.ArgumentParser()
parser.add_argument('input', metavar="INPUT",
help='The input json file to be printed')
parser.add_argument('-d', '--dp-trace-path', dest="dp_trace_path", default=None,
help='''Optional path to the dataplane trace file. '''
''' Default: `dirname`/dataplane.trace ''')
args = parser.parse_args()
def main(args):
if args.dp_trace_path is None:
args.dp_trace_path = os.path.dirname(args.input) + "/dataplane.trace"
dp_trace = Trace(args.dp_trace_path).dataplane_trace
event_logger = InputLogger(output_path="/tmp/events.trace")
event_logger.open()
with open(args.input) as input_file:
trace = parse(input_file)
for event in trace:
if type(event) == replay_events.TrafficInjection:
event.dp_event = dp_trace.pop(0)
event_logger.log_input_event(event)
event_logger.output.close()
if __name__ == '__main__':
main(args)
|
<commit_before><commit_msg>Add tool for removing need for TrafficInjection backwards compatibility<commit_after>
|
#!/usr/bin/env python
import argparse
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import sts.replay_event as replay_events
from sts.dataplane_traces.trace import Trace
from sts.input_traces.input_logger import InputLogger
from sts.log_processing.superlog_parser import parse
parser = argparse.ArgumentParser()
parser.add_argument('input', metavar="INPUT",
help='The input json file to be printed')
parser.add_argument('-d', '--dp-trace-path', dest="dp_trace_path", default=None,
help='''Optional path to the dataplane trace file. '''
''' Default: `dirname`/dataplane.trace ''')
args = parser.parse_args()
def main(args):
if args.dp_trace_path is None:
args.dp_trace_path = os.path.dirname(args.input) + "/dataplane.trace"
dp_trace = Trace(args.dp_trace_path).dataplane_trace
event_logger = InputLogger(output_path="/tmp/events.trace")
event_logger.open()
with open(args.input) as input_file:
trace = parse(input_file)
for event in trace:
if type(event) == replay_events.TrafficInjection:
event.dp_event = dp_trace.pop(0)
event_logger.log_input_event(event)
event_logger.output.close()
if __name__ == '__main__':
main(args)
|
Add tool for removing need for TrafficInjection backwards compatibility#!/usr/bin/env python
import argparse
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import sts.replay_event as replay_events
from sts.dataplane_traces.trace import Trace
from sts.input_traces.input_logger import InputLogger
from sts.log_processing.superlog_parser import parse
parser = argparse.ArgumentParser()
parser.add_argument('input', metavar="INPUT",
help='The input json file to be printed')
parser.add_argument('-d', '--dp-trace-path', dest="dp_trace_path", default=None,
help='''Optional path to the dataplane trace file. '''
''' Default: `dirname`/dataplane.trace ''')
args = parser.parse_args()
def main(args):
if args.dp_trace_path is None:
args.dp_trace_path = os.path.dirname(args.input) + "/dataplane.trace"
dp_trace = Trace(args.dp_trace_path).dataplane_trace
event_logger = InputLogger(output_path="/tmp/events.trace")
event_logger.open()
with open(args.input) as input_file:
trace = parse(input_file)
for event in trace:
if type(event) == replay_events.TrafficInjection:
event.dp_event = dp_trace.pop(0)
event_logger.log_input_event(event)
event_logger.output.close()
if __name__ == '__main__':
main(args)
|
<commit_before><commit_msg>Add tool for removing need for TrafficInjection backwards compatibility<commit_after>#!/usr/bin/env python
import argparse
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import sts.replay_event as replay_events
from sts.dataplane_traces.trace import Trace
from sts.input_traces.input_logger import InputLogger
from sts.log_processing.superlog_parser import parse
parser = argparse.ArgumentParser()
parser.add_argument('input', metavar="INPUT",
help='The input json file to be printed')
parser.add_argument('-d', '--dp-trace-path', dest="dp_trace_path", default=None,
help='''Optional path to the dataplane trace file. '''
''' Default: `dirname`/dataplane.trace ''')
args = parser.parse_args()
def main(args):
if args.dp_trace_path is None:
args.dp_trace_path = os.path.dirname(args.input) + "/dataplane.trace"
dp_trace = Trace(args.dp_trace_path).dataplane_trace
event_logger = InputLogger(output_path="/tmp/events.trace")
event_logger.open()
with open(args.input) as input_file:
trace = parse(input_file)
for event in trace:
if type(event) == replay_events.TrafficInjection:
event.dp_event = dp_trace.pop(0)
event_logger.log_input_event(event)
event_logger.output.close()
if __name__ == '__main__':
main(args)
|
|
86f35f14c91116c3f00e0227a2eeb10c58f20d4b
|
pymetabiosis/test/test_wrapper.py
|
pymetabiosis/test/test_wrapper.py
|
from pymetabiosis.module import import_module
from pymetabiosis.wrapper import MetabiosisWrapper
def test_getattr_on_module():
sqlite = import_module("sqlite")
assert isinstance(sqlite, MetabiosisWrapper)
connect = sqlite.connect
assert isinstance(connect, MetabiosisWrapper)
assert repr(connect).startswith("<function connect at ")
|
Add a test for module.attribute
|
Add a test for module.attribute
|
Python
|
mit
|
prabhuramachandran/pymetabiosis,rguillebert/pymetabiosis
|
Add a test for module.attribute
|
from pymetabiosis.module import import_module
from pymetabiosis.wrapper import MetabiosisWrapper
def test_getattr_on_module():
sqlite = import_module("sqlite")
assert isinstance(sqlite, MetabiosisWrapper)
connect = sqlite.connect
assert isinstance(connect, MetabiosisWrapper)
assert repr(connect).startswith("<function connect at ")
|
<commit_before><commit_msg>Add a test for module.attribute<commit_after>
|
from pymetabiosis.module import import_module
from pymetabiosis.wrapper import MetabiosisWrapper
def test_getattr_on_module():
sqlite = import_module("sqlite")
assert isinstance(sqlite, MetabiosisWrapper)
connect = sqlite.connect
assert isinstance(connect, MetabiosisWrapper)
assert repr(connect).startswith("<function connect at ")
|
Add a test for module.attributefrom pymetabiosis.module import import_module
from pymetabiosis.wrapper import MetabiosisWrapper
def test_getattr_on_module():
sqlite = import_module("sqlite")
assert isinstance(sqlite, MetabiosisWrapper)
connect = sqlite.connect
assert isinstance(connect, MetabiosisWrapper)
assert repr(connect).startswith("<function connect at ")
|
<commit_before><commit_msg>Add a test for module.attribute<commit_after>from pymetabiosis.module import import_module
from pymetabiosis.wrapper import MetabiosisWrapper
def test_getattr_on_module():
sqlite = import_module("sqlite")
assert isinstance(sqlite, MetabiosisWrapper)
connect = sqlite.connect
assert isinstance(connect, MetabiosisWrapper)
assert repr(connect).startswith("<function connect at ")
|
|
c279862c8be98d697f9340a7a5ed96cc17744b08
|
icekit/admin_forms.py
|
icekit/admin_forms.py
|
from django.contrib.auth.forms import PasswordResetForm
class PasswordResetForm(PasswordResetForm):
"""
An extended Password reset form designed for staff users.
It limits the users allowed to use this form the send them a
password reset email to staff users only.
"""
def get_users(self, email):
"""
Make sure users are staff users.
Additionally to the other PasswordResetForm conditions ensure
that the user is a staff user before sending them a password
reset email.
:param email: Textual email address.
:return: List of users.
"""
return (u for u in super(PasswordResetForm, self).get_users(email) if u.is_staff)
|
Add admin form for password reset to limit to staff.
|
Add admin form for password reset to limit to staff.
|
Python
|
mit
|
ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit,ic-labs/django-icekit
|
Add admin form for password reset to limit to staff.
|
from django.contrib.auth.forms import PasswordResetForm
class PasswordResetForm(PasswordResetForm):
"""
An extended Password reset form designed for staff users.
It limits the users allowed to use this form the send them a
password reset email to staff users only.
"""
def get_users(self, email):
"""
Make sure users are staff users.
Additionally to the other PasswordResetForm conditions ensure
that the user is a staff user before sending them a password
reset email.
:param email: Textual email address.
:return: List of users.
"""
return (u for u in super(PasswordResetForm, self).get_users(email) if u.is_staff)
|
<commit_before><commit_msg>Add admin form for password reset to limit to staff.<commit_after>
|
from django.contrib.auth.forms import PasswordResetForm
class PasswordResetForm(PasswordResetForm):
"""
An extended Password reset form designed for staff users.
It limits the users allowed to use this form the send them a
password reset email to staff users only.
"""
def get_users(self, email):
"""
Make sure users are staff users.
Additionally to the other PasswordResetForm conditions ensure
that the user is a staff user before sending them a password
reset email.
:param email: Textual email address.
:return: List of users.
"""
return (u for u in super(PasswordResetForm, self).get_users(email) if u.is_staff)
|
Add admin form for password reset to limit to staff.from django.contrib.auth.forms import PasswordResetForm
class PasswordResetForm(PasswordResetForm):
"""
An extended Password reset form designed for staff users.
It limits the users allowed to use this form the send them a
password reset email to staff users only.
"""
def get_users(self, email):
"""
Make sure users are staff users.
Additionally to the other PasswordResetForm conditions ensure
that the user is a staff user before sending them a password
reset email.
:param email: Textual email address.
:return: List of users.
"""
return (u for u in super(PasswordResetForm, self).get_users(email) if u.is_staff)
|
<commit_before><commit_msg>Add admin form for password reset to limit to staff.<commit_after>from django.contrib.auth.forms import PasswordResetForm
class PasswordResetForm(PasswordResetForm):
"""
An extended Password reset form designed for staff users.
It limits the users allowed to use this form the send them a
password reset email to staff users only.
"""
def get_users(self, email):
"""
Make sure users are staff users.
Additionally to the other PasswordResetForm conditions ensure
that the user is a staff user before sending them a password
reset email.
:param email: Textual email address.
:return: List of users.
"""
return (u for u in super(PasswordResetForm, self).get_users(email) if u.is_staff)
|
|
19887c6d56761d1417b49f2e26787b9233bbdcae
|
Problems/newtonRaphson.py
|
Problems/newtonRaphson.py
|
#!/Applications/anaconda/envs/Python3/bin
def main():
# X^2 - 1
p_1 = (-1, 0, 1)
r_1 = newton_raphson(p_1)
print('Polynomial {} has root of {}'.format(print_poly(p_1), r_1))
# X^3 - 27
p_2 = (-27, 0, 0, 1)
r_2 = newton_raphson(p_2)
print('Polynomial {} has root of {}'.format(print_poly(p_2), r_2))
def print_poly(polynomial):
'''
Creates a string for a given polynomial, entered as a tuple of coefficients whee the index i of the item represents the power of the variable x.
'''
if polynomial[0] == 0:
output = ''
else:
output = str(polynomial[0])
for i, a in enumerate(polynomial):
if a == 0 or i == 0:
continue
output = str(a) + 'x^' + str(i) + ' + ' + output
return output
def eval_polynomial(polynomial, x):
'''
Inputs a tuple of coefficients a_i where the index i of the item represents the power of the variable x. Evaluates and returns the polynomial for given value of x.
'''
result = 0
for i, a in enumerate(polynomial):
result += a * (x ** i)
return result
def compute_derivative(polynomial):
'''
Inputs a tuple of coefficients a_i where the index i of the item represents the power of the variable x. Returns a tuple in similar form that represents the derivative of the given polynomial.
'''
derivative = []
for a, i in enumerate(polynomial):
derivative.append(i*a)
return tuple(derivative[1:])
def newton_raphson(polynomial, epsilon=0.01):
'''
General approximation to find the roots of a polynomial in one variable.
p(x) = a_n * x^n + a_n-1 * x^n-1 + ... + a_1 * x + a_0
Find r such that p(r) = 0
If g is an approximation of the root, then g - p(g)/p'(g) is a better approximation, where p' is the derivative of p.
Input is tuple of coefficients a_i where the index i of the item represents the power of the variable x
'''
guess = 1
derivative = compute_derivative(polynomial)
p_of_guess = eval_polynomial(polynomial, guess)
d_of_guess = eval_polynomial(derivative, guess)
while abs(p_of_guess) > epsilon:
guess = guess - (p_of_guess / d_of_guess)
p_of_guess = eval_polynomial(polynomial, guess)
d_of_guess = eval_polynomial(derivative, guess)
return guess
if __name__ == '__main__':
main()
|
Add Newton-Raphson polynomial approximation problem
|
Add Newton-Raphson polynomial approximation problem
|
Python
|
mit
|
HKuz/Test_Code
|
Add Newton-Raphson polynomial approximation problem
|
#!/Applications/anaconda/envs/Python3/bin
def main():
# X^2 - 1
p_1 = (-1, 0, 1)
r_1 = newton_raphson(p_1)
print('Polynomial {} has root of {}'.format(print_poly(p_1), r_1))
# X^3 - 27
p_2 = (-27, 0, 0, 1)
r_2 = newton_raphson(p_2)
print('Polynomial {} has root of {}'.format(print_poly(p_2), r_2))
def print_poly(polynomial):
'''
Creates a string for a given polynomial, entered as a tuple of coefficients whee the index i of the item represents the power of the variable x.
'''
if polynomial[0] == 0:
output = ''
else:
output = str(polynomial[0])
for i, a in enumerate(polynomial):
if a == 0 or i == 0:
continue
output = str(a) + 'x^' + str(i) + ' + ' + output
return output
def eval_polynomial(polynomial, x):
'''
Inputs a tuple of coefficients a_i where the index i of the item represents the power of the variable x. Evaluates and returns the polynomial for given value of x.
'''
result = 0
for i, a in enumerate(polynomial):
result += a * (x ** i)
return result
def compute_derivative(polynomial):
'''
Inputs a tuple of coefficients a_i where the index i of the item represents the power of the variable x. Returns a tuple in similar form that represents the derivative of the given polynomial.
'''
derivative = []
for a, i in enumerate(polynomial):
derivative.append(i*a)
return tuple(derivative[1:])
def newton_raphson(polynomial, epsilon=0.01):
'''
General approximation to find the roots of a polynomial in one variable.
p(x) = a_n * x^n + a_n-1 * x^n-1 + ... + a_1 * x + a_0
Find r such that p(r) = 0
If g is an approximation of the root, then g - p(g)/p'(g) is a better approximation, where p' is the derivative of p.
Input is tuple of coefficients a_i where the index i of the item represents the power of the variable x
'''
guess = 1
derivative = compute_derivative(polynomial)
p_of_guess = eval_polynomial(polynomial, guess)
d_of_guess = eval_polynomial(derivative, guess)
while abs(p_of_guess) > epsilon:
guess = guess - (p_of_guess / d_of_guess)
p_of_guess = eval_polynomial(polynomial, guess)
d_of_guess = eval_polynomial(derivative, guess)
return guess
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add Newton-Raphson polynomial approximation problem<commit_after>
|
#!/Applications/anaconda/envs/Python3/bin
def main():
# X^2 - 1
p_1 = (-1, 0, 1)
r_1 = newton_raphson(p_1)
print('Polynomial {} has root of {}'.format(print_poly(p_1), r_1))
# X^3 - 27
p_2 = (-27, 0, 0, 1)
r_2 = newton_raphson(p_2)
print('Polynomial {} has root of {}'.format(print_poly(p_2), r_2))
def print_poly(polynomial):
'''
Creates a string for a given polynomial, entered as a tuple of coefficients whee the index i of the item represents the power of the variable x.
'''
if polynomial[0] == 0:
output = ''
else:
output = str(polynomial[0])
for i, a in enumerate(polynomial):
if a == 0 or i == 0:
continue
output = str(a) + 'x^' + str(i) + ' + ' + output
return output
def eval_polynomial(polynomial, x):
'''
Inputs a tuple of coefficients a_i where the index i of the item represents the power of the variable x. Evaluates and returns the polynomial for given value of x.
'''
result = 0
for i, a in enumerate(polynomial):
result += a * (x ** i)
return result
def compute_derivative(polynomial):
'''
Inputs a tuple of coefficients a_i where the index i of the item represents the power of the variable x. Returns a tuple in similar form that represents the derivative of the given polynomial.
'''
derivative = []
for a, i in enumerate(polynomial):
derivative.append(i*a)
return tuple(derivative[1:])
def newton_raphson(polynomial, epsilon=0.01):
'''
General approximation to find the roots of a polynomial in one variable.
p(x) = a_n * x^n + a_n-1 * x^n-1 + ... + a_1 * x + a_0
Find r such that p(r) = 0
If g is an approximation of the root, then g - p(g)/p'(g) is a better approximation, where p' is the derivative of p.
Input is tuple of coefficients a_i where the index i of the item represents the power of the variable x
'''
guess = 1
derivative = compute_derivative(polynomial)
p_of_guess = eval_polynomial(polynomial, guess)
d_of_guess = eval_polynomial(derivative, guess)
while abs(p_of_guess) > epsilon:
guess = guess - (p_of_guess / d_of_guess)
p_of_guess = eval_polynomial(polynomial, guess)
d_of_guess = eval_polynomial(derivative, guess)
return guess
if __name__ == '__main__':
main()
|
Add Newton-Raphson polynomial approximation problem#!/Applications/anaconda/envs/Python3/bin
def main():
# X^2 - 1
p_1 = (-1, 0, 1)
r_1 = newton_raphson(p_1)
print('Polynomial {} has root of {}'.format(print_poly(p_1), r_1))
# X^3 - 27
p_2 = (-27, 0, 0, 1)
r_2 = newton_raphson(p_2)
print('Polynomial {} has root of {}'.format(print_poly(p_2), r_2))
def print_poly(polynomial):
'''
Creates a string for a given polynomial, entered as a tuple of coefficients whee the index i of the item represents the power of the variable x.
'''
if polynomial[0] == 0:
output = ''
else:
output = str(polynomial[0])
for i, a in enumerate(polynomial):
if a == 0 or i == 0:
continue
output = str(a) + 'x^' + str(i) + ' + ' + output
return output
def eval_polynomial(polynomial, x):
'''
Inputs a tuple of coefficients a_i where the index i of the item represents the power of the variable x. Evaluates and returns the polynomial for given value of x.
'''
result = 0
for i, a in enumerate(polynomial):
result += a * (x ** i)
return result
def compute_derivative(polynomial):
'''
Inputs a tuple of coefficients a_i where the index i of the item represents the power of the variable x. Returns a tuple in similar form that represents the derivative of the given polynomial.
'''
derivative = []
for a, i in enumerate(polynomial):
derivative.append(i*a)
return tuple(derivative[1:])
def newton_raphson(polynomial, epsilon=0.01):
'''
General approximation to find the roots of a polynomial in one variable.
p(x) = a_n * x^n + a_n-1 * x^n-1 + ... + a_1 * x + a_0
Find r such that p(r) = 0
If g is an approximation of the root, then g - p(g)/p'(g) is a better approximation, where p' is the derivative of p.
Input is tuple of coefficients a_i where the index i of the item represents the power of the variable x
'''
guess = 1
derivative = compute_derivative(polynomial)
p_of_guess = eval_polynomial(polynomial, guess)
d_of_guess = eval_polynomial(derivative, guess)
while abs(p_of_guess) > epsilon:
guess = guess - (p_of_guess / d_of_guess)
p_of_guess = eval_polynomial(polynomial, guess)
d_of_guess = eval_polynomial(derivative, guess)
return guess
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add Newton-Raphson polynomial approximation problem<commit_after>#!/Applications/anaconda/envs/Python3/bin
def main():
# X^2 - 1
p_1 = (-1, 0, 1)
r_1 = newton_raphson(p_1)
print('Polynomial {} has root of {}'.format(print_poly(p_1), r_1))
# X^3 - 27
p_2 = (-27, 0, 0, 1)
r_2 = newton_raphson(p_2)
print('Polynomial {} has root of {}'.format(print_poly(p_2), r_2))
def print_poly(polynomial):
'''
Creates a string for a given polynomial, entered as a tuple of coefficients whee the index i of the item represents the power of the variable x.
'''
if polynomial[0] == 0:
output = ''
else:
output = str(polynomial[0])
for i, a in enumerate(polynomial):
if a == 0 or i == 0:
continue
output = str(a) + 'x^' + str(i) + ' + ' + output
return output
def eval_polynomial(polynomial, x):
'''
Inputs a tuple of coefficients a_i where the index i of the item represents the power of the variable x. Evaluates and returns the polynomial for given value of x.
'''
result = 0
for i, a in enumerate(polynomial):
result += a * (x ** i)
return result
def compute_derivative(polynomial):
'''
Inputs a tuple of coefficients a_i where the index i of the item represents the power of the variable x. Returns a tuple in similar form that represents the derivative of the given polynomial.
'''
derivative = []
for a, i in enumerate(polynomial):
derivative.append(i*a)
return tuple(derivative[1:])
def newton_raphson(polynomial, epsilon=0.01):
'''
General approximation to find the roots of a polynomial in one variable.
p(x) = a_n * x^n + a_n-1 * x^n-1 + ... + a_1 * x + a_0
Find r such that p(r) = 0
If g is an approximation of the root, then g - p(g)/p'(g) is a better approximation, where p' is the derivative of p.
Input is tuple of coefficients a_i where the index i of the item represents the power of the variable x
'''
guess = 1
derivative = compute_derivative(polynomial)
p_of_guess = eval_polynomial(polynomial, guess)
d_of_guess = eval_polynomial(derivative, guess)
while abs(p_of_guess) > epsilon:
guess = guess - (p_of_guess / d_of_guess)
p_of_guess = eval_polynomial(polynomial, guess)
d_of_guess = eval_polynomial(derivative, guess)
return guess
if __name__ == '__main__':
main()
|
|
7b9e7e3c2e0c6274dd4dc67621bdac6111e8d56f
|
improved_decorator.py
|
improved_decorator.py
|
from flask import abort, current_app, request
from functools import wraps
from twilio.util import RequestValidator
import os
def validate_twilio_request(f):
"""Validates that incoming requests genuinely originated from Twilio"""
@wraps(f)
def decorated_function(*args, **kwargs):
# Create an instance of the RequestValidator class
validator = RequestValidator(os.environ.get('TWILIO_AUTH_TOKEN'))
# Validate the request using its URL, POST data,
# and X-TWILIO-SIGNATURE header
request_valid = validator.validate(
request.url,
request.form,
request.headers.get('X-TWILIO-SIGNATURE', ''))
# Continue processing the request if it's valid, return a 403 error if
# it's not
if request_valid or current_app.debug:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
|
Add better decorator for testing
|
Add better decorator for testing
|
Python
|
mit
|
TwilioDevEd/webhooks-example-flask
|
Add better decorator for testing
|
from flask import abort, current_app, request
from functools import wraps
from twilio.util import RequestValidator
import os
def validate_twilio_request(f):
"""Validates that incoming requests genuinely originated from Twilio"""
@wraps(f)
def decorated_function(*args, **kwargs):
# Create an instance of the RequestValidator class
validator = RequestValidator(os.environ.get('TWILIO_AUTH_TOKEN'))
# Validate the request using its URL, POST data,
# and X-TWILIO-SIGNATURE header
request_valid = validator.validate(
request.url,
request.form,
request.headers.get('X-TWILIO-SIGNATURE', ''))
# Continue processing the request if it's valid, return a 403 error if
# it's not
if request_valid or current_app.debug:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
|
<commit_before><commit_msg>Add better decorator for testing<commit_after>
|
from flask import abort, current_app, request
from functools import wraps
from twilio.util import RequestValidator
import os
def validate_twilio_request(f):
"""Validates that incoming requests genuinely originated from Twilio"""
@wraps(f)
def decorated_function(*args, **kwargs):
# Create an instance of the RequestValidator class
validator = RequestValidator(os.environ.get('TWILIO_AUTH_TOKEN'))
# Validate the request using its URL, POST data,
# and X-TWILIO-SIGNATURE header
request_valid = validator.validate(
request.url,
request.form,
request.headers.get('X-TWILIO-SIGNATURE', ''))
# Continue processing the request if it's valid, return a 403 error if
# it's not
if request_valid or current_app.debug:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
|
Add better decorator for testingfrom flask import abort, current_app, request
from functools import wraps
from twilio.util import RequestValidator
import os
def validate_twilio_request(f):
"""Validates that incoming requests genuinely originated from Twilio"""
@wraps(f)
def decorated_function(*args, **kwargs):
# Create an instance of the RequestValidator class
validator = RequestValidator(os.environ.get('TWILIO_AUTH_TOKEN'))
# Validate the request using its URL, POST data,
# and X-TWILIO-SIGNATURE header
request_valid = validator.validate(
request.url,
request.form,
request.headers.get('X-TWILIO-SIGNATURE', ''))
# Continue processing the request if it's valid, return a 403 error if
# it's not
if request_valid or current_app.debug:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
|
<commit_before><commit_msg>Add better decorator for testing<commit_after>from flask import abort, current_app, request
from functools import wraps
from twilio.util import RequestValidator
import os
def validate_twilio_request(f):
"""Validates that incoming requests genuinely originated from Twilio"""
@wraps(f)
def decorated_function(*args, **kwargs):
# Create an instance of the RequestValidator class
validator = RequestValidator(os.environ.get('TWILIO_AUTH_TOKEN'))
# Validate the request using its URL, POST data,
# and X-TWILIO-SIGNATURE header
request_valid = validator.validate(
request.url,
request.form,
request.headers.get('X-TWILIO-SIGNATURE', ''))
# Continue processing the request if it's valid, return a 403 error if
# it's not
if request_valid or current_app.debug:
return f(*args, **kwargs)
else:
return abort(403)
return decorated_function
|
|
0c62015b0eca96f3118ee099fb1d4f375738a888
|
CodeFights/stringsRearrangement.py
|
CodeFights/stringsRearrangement.py
|
#!/usr/local/bin/python
# Code Fights Cracking Password Problem
import itertools
def stringsRearrangement(inputArray):
def f(x, y):
c = 0
for i in range(len(x)):
if x[i] != y[i]:
c += 1
if c == 1:
return True
return False
for k in itertools.permutations(inputArray, len(inputArray)):
print(k)
r = True
for i in range(len(k) - 1):
if not f(k[i], k[i + 1]):
r = False
if r:
return True
return False
def main():
tests = [
[["aba", "bbb", "bab"], False],
[["ab", "bb", "aa"], True],
[["q", "q"], False],
[["zzzzab", "zzzzbb", "zzzzaa"], True],
[["ab", "ad", "ef", "eg"], False],
[["abc", "bef", "bcc", "bec", "bbc", "bdc"], True],
[["abc", "abx", "axx", "abc"], False],
[["abc", "abx", "axx", "abx", "abc"], True],
[["f", "g", "a", "h"], True]
]
for t in tests:
res = stringsRearrangement(t[0])
ans = t[1]
if ans == res:
print("PASSED: stringsRearrangement({}) returned {}"
.format(t[0], res))
else:
print(("FAILED: stringsRearrangement({}) returned {},"
"answer: {}").format(t[0], res, ans))
if __name__ == '__main__':
main()
|
Solve Code Fights strings rearrangement problem
|
Solve Code Fights strings rearrangement problem
|
Python
|
mit
|
HKuz/Test_Code
|
Solve Code Fights strings rearrangement problem
|
#!/usr/local/bin/python
# Code Fights Cracking Password Problem
import itertools
def stringsRearrangement(inputArray):
def f(x, y):
c = 0
for i in range(len(x)):
if x[i] != y[i]:
c += 1
if c == 1:
return True
return False
for k in itertools.permutations(inputArray, len(inputArray)):
print(k)
r = True
for i in range(len(k) - 1):
if not f(k[i], k[i + 1]):
r = False
if r:
return True
return False
def main():
tests = [
[["aba", "bbb", "bab"], False],
[["ab", "bb", "aa"], True],
[["q", "q"], False],
[["zzzzab", "zzzzbb", "zzzzaa"], True],
[["ab", "ad", "ef", "eg"], False],
[["abc", "bef", "bcc", "bec", "bbc", "bdc"], True],
[["abc", "abx", "axx", "abc"], False],
[["abc", "abx", "axx", "abx", "abc"], True],
[["f", "g", "a", "h"], True]
]
for t in tests:
res = stringsRearrangement(t[0])
ans = t[1]
if ans == res:
print("PASSED: stringsRearrangement({}) returned {}"
.format(t[0], res))
else:
print(("FAILED: stringsRearrangement({}) returned {},"
"answer: {}").format(t[0], res, ans))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights strings rearrangement problem<commit_after>
|
#!/usr/local/bin/python
# Code Fights Cracking Password Problem
import itertools
def stringsRearrangement(inputArray):
def f(x, y):
c = 0
for i in range(len(x)):
if x[i] != y[i]:
c += 1
if c == 1:
return True
return False
for k in itertools.permutations(inputArray, len(inputArray)):
print(k)
r = True
for i in range(len(k) - 1):
if not f(k[i], k[i + 1]):
r = False
if r:
return True
return False
def main():
tests = [
[["aba", "bbb", "bab"], False],
[["ab", "bb", "aa"], True],
[["q", "q"], False],
[["zzzzab", "zzzzbb", "zzzzaa"], True],
[["ab", "ad", "ef", "eg"], False],
[["abc", "bef", "bcc", "bec", "bbc", "bdc"], True],
[["abc", "abx", "axx", "abc"], False],
[["abc", "abx", "axx", "abx", "abc"], True],
[["f", "g", "a", "h"], True]
]
for t in tests:
res = stringsRearrangement(t[0])
ans = t[1]
if ans == res:
print("PASSED: stringsRearrangement({}) returned {}"
.format(t[0], res))
else:
print(("FAILED: stringsRearrangement({}) returned {},"
"answer: {}").format(t[0], res, ans))
if __name__ == '__main__':
main()
|
Solve Code Fights strings rearrangement problem#!/usr/local/bin/python
# Code Fights Cracking Password Problem
import itertools
def stringsRearrangement(inputArray):
def f(x, y):
c = 0
for i in range(len(x)):
if x[i] != y[i]:
c += 1
if c == 1:
return True
return False
for k in itertools.permutations(inputArray, len(inputArray)):
print(k)
r = True
for i in range(len(k) - 1):
if not f(k[i], k[i + 1]):
r = False
if r:
return True
return False
def main():
tests = [
[["aba", "bbb", "bab"], False],
[["ab", "bb", "aa"], True],
[["q", "q"], False],
[["zzzzab", "zzzzbb", "zzzzaa"], True],
[["ab", "ad", "ef", "eg"], False],
[["abc", "bef", "bcc", "bec", "bbc", "bdc"], True],
[["abc", "abx", "axx", "abc"], False],
[["abc", "abx", "axx", "abx", "abc"], True],
[["f", "g", "a", "h"], True]
]
for t in tests:
res = stringsRearrangement(t[0])
ans = t[1]
if ans == res:
print("PASSED: stringsRearrangement({}) returned {}"
.format(t[0], res))
else:
print(("FAILED: stringsRearrangement({}) returned {},"
"answer: {}").format(t[0], res, ans))
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Solve Code Fights strings rearrangement problem<commit_after>#!/usr/local/bin/python
# Code Fights Cracking Password Problem
import itertools
def stringsRearrangement(inputArray):
def f(x, y):
c = 0
for i in range(len(x)):
if x[i] != y[i]:
c += 1
if c == 1:
return True
return False
for k in itertools.permutations(inputArray, len(inputArray)):
print(k)
r = True
for i in range(len(k) - 1):
if not f(k[i], k[i + 1]):
r = False
if r:
return True
return False
def main():
tests = [
[["aba", "bbb", "bab"], False],
[["ab", "bb", "aa"], True],
[["q", "q"], False],
[["zzzzab", "zzzzbb", "zzzzaa"], True],
[["ab", "ad", "ef", "eg"], False],
[["abc", "bef", "bcc", "bec", "bbc", "bdc"], True],
[["abc", "abx", "axx", "abc"], False],
[["abc", "abx", "axx", "abx", "abc"], True],
[["f", "g", "a", "h"], True]
]
for t in tests:
res = stringsRearrangement(t[0])
ans = t[1]
if ans == res:
print("PASSED: stringsRearrangement({}) returned {}"
.format(t[0], res))
else:
print(("FAILED: stringsRearrangement({}) returned {},"
"answer: {}").format(t[0], res, ans))
if __name__ == '__main__':
main()
|
|
fb2f4abee96a209c2c2066ed92e9b22533f501c1
|
Arrays/longest_1s_by_replacing_0.py
|
Arrays/longest_1s_by_replacing_0.py
|
import unittest
"""
Given an array of 0s and 1s, find the position of 0 to be replaced with 1 to get longest contiguous
sequence of 1s.
Input: 1 1 0 0 1 0 1 1 1 0 1 1 1
Output: index 9
Input: 1 1 1 1 0
Output: index 4
Input: 01010
Output: index 2
"""
"""
Approach:
1. The idea is to keep track of 2 indexes - prev_zero and prev_prev_zero.
2. If current number is zero, calculate the difference between current index and prev_prev_zero.
3. This difference minus 1 is the number of 1s around prev_zero.
4. Update max_difference so far by comparing with difference from previous step.
5. Return the prev_zero index corresponding to max_difference as the answer.
"""
def index_of_zero_to_get_longest_1s(ones_and_zeros):
prev_zero = -1
prev_prev_zero = -1
end = len(ones_and_zeros)
max_index = -1
max_1s_length = 0
for i in range(end):
if ones_and_zeros[i] == 0:
if i-prev_prev_zero > max_1s_length:
max_1s_length = i-prev_prev_zero
max_index = prev_zero
prev_prev_zero = prev_zero
prev_zero = i
if end-prev_prev_zero > max_1s_length:
max_index = prev_zero
return max_index
class TestMax1s(unittest.TestCase):
def test_max_1s_length(self):
ones_and_zeros = [1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1]
self.assertEqual(index_of_zero_to_get_longest_1s(ones_and_zeros), 9)
ones_and_zeros = [1, 1, 1, 1, 0]
self.assertEqual(index_of_zero_to_get_longest_1s(ones_and_zeros), 4)
ones_and_zeros = [1, 1]
self.assertEqual(index_of_zero_to_get_longest_1s(ones_and_zeros), -1)
|
Index of zero to get longest 1s
|
Index of zero to get longest 1s
|
Python
|
mit
|
prathamtandon/g4gproblems
|
Index of zero to get longest 1s
|
import unittest
"""
Given an array of 0s and 1s, find the position of 0 to be replaced with 1 to get longest contiguous
sequence of 1s.
Input: 1 1 0 0 1 0 1 1 1 0 1 1 1
Output: index 9
Input: 1 1 1 1 0
Output: index 4
Input: 01010
Output: index 2
"""
"""
Approach:
1. The idea is to keep track of 2 indexes - prev_zero and prev_prev_zero.
2. If current number is zero, calculate the difference between current index and prev_prev_zero.
3. This difference minus 1 is the number of 1s around prev_zero.
4. Update max_difference so far by comparing with difference from previous step.
5. Return the prev_zero index corresponding to max_difference as the answer.
"""
def index_of_zero_to_get_longest_1s(ones_and_zeros):
prev_zero = -1
prev_prev_zero = -1
end = len(ones_and_zeros)
max_index = -1
max_1s_length = 0
for i in range(end):
if ones_and_zeros[i] == 0:
if i-prev_prev_zero > max_1s_length:
max_1s_length = i-prev_prev_zero
max_index = prev_zero
prev_prev_zero = prev_zero
prev_zero = i
if end-prev_prev_zero > max_1s_length:
max_index = prev_zero
return max_index
class TestMax1s(unittest.TestCase):
def test_max_1s_length(self):
ones_and_zeros = [1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1]
self.assertEqual(index_of_zero_to_get_longest_1s(ones_and_zeros), 9)
ones_and_zeros = [1, 1, 1, 1, 0]
self.assertEqual(index_of_zero_to_get_longest_1s(ones_and_zeros), 4)
ones_and_zeros = [1, 1]
self.assertEqual(index_of_zero_to_get_longest_1s(ones_and_zeros), -1)
|
<commit_before><commit_msg>Index of zero to get longest 1s<commit_after>
|
import unittest
"""
Given an array of 0s and 1s, find the position of 0 to be replaced with 1 to get longest contiguous
sequence of 1s.
Input: 1 1 0 0 1 0 1 1 1 0 1 1 1
Output: index 9
Input: 1 1 1 1 0
Output: index 4
Input: 01010
Output: index 2
"""
"""
Approach:
1. The idea is to keep track of 2 indexes - prev_zero and prev_prev_zero.
2. If current number is zero, calculate the difference between current index and prev_prev_zero.
3. This difference minus 1 is the number of 1s around prev_zero.
4. Update max_difference so far by comparing with difference from previous step.
5. Return the prev_zero index corresponding to max_difference as the answer.
"""
def index_of_zero_to_get_longest_1s(ones_and_zeros):
prev_zero = -1
prev_prev_zero = -1
end = len(ones_and_zeros)
max_index = -1
max_1s_length = 0
for i in range(end):
if ones_and_zeros[i] == 0:
if i-prev_prev_zero > max_1s_length:
max_1s_length = i-prev_prev_zero
max_index = prev_zero
prev_prev_zero = prev_zero
prev_zero = i
if end-prev_prev_zero > max_1s_length:
max_index = prev_zero
return max_index
class TestMax1s(unittest.TestCase):
def test_max_1s_length(self):
ones_and_zeros = [1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1]
self.assertEqual(index_of_zero_to_get_longest_1s(ones_and_zeros), 9)
ones_and_zeros = [1, 1, 1, 1, 0]
self.assertEqual(index_of_zero_to_get_longest_1s(ones_and_zeros), 4)
ones_and_zeros = [1, 1]
self.assertEqual(index_of_zero_to_get_longest_1s(ones_and_zeros), -1)
|
Index of zero to get longest 1simport unittest
"""
Given an array of 0s and 1s, find the position of 0 to be replaced with 1 to get longest contiguous
sequence of 1s.
Input: 1 1 0 0 1 0 1 1 1 0 1 1 1
Output: index 9
Input: 1 1 1 1 0
Output: index 4
Input: 01010
Output: index 2
"""
"""
Approach:
1. The idea is to keep track of 2 indexes - prev_zero and prev_prev_zero.
2. If current number is zero, calculate the difference between current index and prev_prev_zero.
3. This difference minus 1 is the number of 1s around prev_zero.
4. Update max_difference so far by comparing with difference from previous step.
5. Return the prev_zero index corresponding to max_difference as the answer.
"""
def index_of_zero_to_get_longest_1s(ones_and_zeros):
prev_zero = -1
prev_prev_zero = -1
end = len(ones_and_zeros)
max_index = -1
max_1s_length = 0
for i in range(end):
if ones_and_zeros[i] == 0:
if i-prev_prev_zero > max_1s_length:
max_1s_length = i-prev_prev_zero
max_index = prev_zero
prev_prev_zero = prev_zero
prev_zero = i
if end-prev_prev_zero > max_1s_length:
max_index = prev_zero
return max_index
class TestMax1s(unittest.TestCase):
def test_max_1s_length(self):
ones_and_zeros = [1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1]
self.assertEqual(index_of_zero_to_get_longest_1s(ones_and_zeros), 9)
ones_and_zeros = [1, 1, 1, 1, 0]
self.assertEqual(index_of_zero_to_get_longest_1s(ones_and_zeros), 4)
ones_and_zeros = [1, 1]
self.assertEqual(index_of_zero_to_get_longest_1s(ones_and_zeros), -1)
|
<commit_before><commit_msg>Index of zero to get longest 1s<commit_after>import unittest
"""
Given an array of 0s and 1s, find the position of 0 to be replaced with 1 to get longest contiguous
sequence of 1s.
Input: 1 1 0 0 1 0 1 1 1 0 1 1 1
Output: index 9
Input: 1 1 1 1 0
Output: index 4
Input: 01010
Output: index 2
"""
"""
Approach:
1. The idea is to keep track of 2 indexes - prev_zero and prev_prev_zero.
2. If current number is zero, calculate the difference between current index and prev_prev_zero.
3. This difference minus 1 is the number of 1s around prev_zero.
4. Update max_difference so far by comparing with difference from previous step.
5. Return the prev_zero index corresponding to max_difference as the answer.
"""
def index_of_zero_to_get_longest_1s(ones_and_zeros):
prev_zero = -1
prev_prev_zero = -1
end = len(ones_and_zeros)
max_index = -1
max_1s_length = 0
for i in range(end):
if ones_and_zeros[i] == 0:
if i-prev_prev_zero > max_1s_length:
max_1s_length = i-prev_prev_zero
max_index = prev_zero
prev_prev_zero = prev_zero
prev_zero = i
if end-prev_prev_zero > max_1s_length:
max_index = prev_zero
return max_index
class TestMax1s(unittest.TestCase):
def test_max_1s_length(self):
ones_and_zeros = [1, 1, 0, 0, 1, 0, 1, 1, 1, 0, 1, 1, 1]
self.assertEqual(index_of_zero_to_get_longest_1s(ones_and_zeros), 9)
ones_and_zeros = [1, 1, 1, 1, 0]
self.assertEqual(index_of_zero_to_get_longest_1s(ones_and_zeros), 4)
ones_and_zeros = [1, 1]
self.assertEqual(index_of_zero_to_get_longest_1s(ones_and_zeros), -1)
|
|
c2592b39fde98ac8ba46c165deb4a1245954f3a1
|
tests/test_crawler.py
|
tests/test_crawler.py
|
import warnings
import unittest
from scrapy.crawler import Crawler
from scrapy.settings import Settings
from scrapy.utils.spider import DefaultSpider
from scrapy.utils.misc import load_object
class CrawlerTestCase(unittest.TestCase):
def setUp(self):
self.crawler = Crawler(DefaultSpider, Settings())
def test_deprecated_attribute_spiders(self):
with warnings.catch_warnings(record=True) as w:
spiders = self.crawler.spiders
self.assertEqual(len(w), 1)
self.assertIn("Crawler.spiders", str(w[0].message))
sm_cls = load_object(self.crawler.settings['SPIDER_MANAGER_CLASS'])
self.assertIsInstance(spiders, sm_cls)
self.crawler.spiders
self.assertEqual(len(w), 1, "Warn deprecated access only once")
|
import warnings
import unittest
from twisted.internet import defer
from scrapy.crawler import Crawler, CrawlerRunner
from scrapy.settings import Settings
from scrapy.utils.spider import DefaultSpider
from scrapy.utils.misc import load_object
class CrawlerTestCase(unittest.TestCase):
def setUp(self):
self.crawler = Crawler(DefaultSpider, Settings())
def test_deprecated_attribute_spiders(self):
with warnings.catch_warnings(record=True) as w:
spiders = self.crawler.spiders
self.assertEqual(len(w), 1)
self.assertIn("Crawler.spiders", str(w[0].message))
sm_cls = load_object(self.crawler.settings['SPIDER_MANAGER_CLASS'])
self.assertIsInstance(spiders, sm_cls)
self.crawler.spiders
self.assertEqual(len(w), 1, "Warn deprecated access only once")
class CrawlerRunnerTest(unittest.TestCase):
def setUp(self):
self.crawler_runner = CrawlerRunner(Settings())
@defer.inlineCallbacks
def test_populate_spidercls_settings(self):
spider_settings = {'TEST1': 'spider', 'TEST2': 'spider'}
project_settings = {'TEST1': 'project', 'TEST3': 'project'}
class CustomSettingsSpider(DefaultSpider):
custom_settings = spider_settings
self.crawler_runner.settings.setdict(project_settings,
priority='project')
yield self.crawler_runner.crawl(CustomSettingsSpider)
crawler = self.crawler_runner.crawlers.pop()
self.assertEqual(crawler.settings.get('TEST1'), 'spider')
self.assertEqual(crawler.settings.get('TEST2'), 'spider')
self.assertEqual(crawler.settings.get('TEST3'), 'project')
|
Test verifying that CrawlerRunner populates spider class settings
|
Test verifying that CrawlerRunner populates spider class settings
|
Python
|
bsd-3-clause
|
elijah513/scrapy,darkrho/scrapy-scrapy,hansenDise/scrapy,dracony/scrapy,eliasdorneles/scrapy,URXtech/scrapy,olorz/scrapy,dgillis/scrapy,nguyenhongson03/scrapy,crasker/scrapy,heamon7/scrapy,kimimj/scrapy,Djlavoy/scrapy,nikgr95/scrapy,Timeship/scrapy,eLRuLL/scrapy,jdemaeyer/scrapy,finfish/scrapy,github-account-because-they-want-it/scrapy,yidongliu/scrapy,sigma-random/scrapy,github-account-because-they-want-it/scrapy,rdowinton/scrapy,mgedmin/scrapy,kimimj/scrapy,Digenis/scrapy,dgillis/scrapy,coderabhishek/scrapy,mlyundin/scrapy,pawelmhm/scrapy,rahulsharma1991/scrapy,ArturGaspar/scrapy,kmike/scrapy,aivarsk/scrapy,ramiro/scrapy,Preetwinder/scrapy,olorz/scrapy,eLRuLL/scrapy,chekunkov/scrapy,Adai0808/scrapy-1,agusc/scrapy,haiiiiiyun/scrapy,beni55/scrapy,zorojean/scrapy,carlosp420/scrapy,CENDARI/scrapy,pranjalpatil/scrapy,Ryezhang/scrapy,AaronTao1990/scrapy,xiao26/scrapy,snowdream1314/scrapy,hansenDise/scrapy,GregoryVigoTorres/scrapy,Bourneer/scrapy,jorik041/scrapy,liyy7/scrapy,IvanGavran/scrapy,csalazar/scrapy,huoxudong125/scrapy,curita/scrapy,KublaikhanGeek/scrapy,Partoo/scrapy,aivarsk/scrapy,dangra/scrapy,smaty1/scrapy,ssteo/scrapy,csalazar/scrapy,fqul/scrapy,Lucifer-Kim/scrapy,olafdietsche/scrapy,yusofm/scrapy,YeelerG/scrapy,livepy/scrapy,jdemaeyer/scrapy,redapple/scrapy,elijah513/scrapy,carlosp420/scrapy,elacuesta/scrapy,codebhendi/scrapy,tntC4stl3/scrapy,cyrixhero/scrapy,fqul/scrapy,dracony/scrapy,mlyundin/scrapy,ashishnerkar1/scrapy,moraesnicol/scrapy,tagatac/scrapy,yidongliu/scrapy,beni55/scrapy,fontenele/scrapy,tagatac/scrapy,zjuwangg/scrapy,jiezhu2007/scrapy,Digenis/scrapy,zackslash/scrapy,songfj/scrapy,arush0311/scrapy,eliasdorneles/scrapy,hwsyy/scrapy,kazitanvirahsan/scrapy,stenskjaer/scrapy,w495/scrapy,dangra/scrapy,hectoruelo/scrapy,Preetwinder/scrapy,z-fork/scrapy,yidongliu/scrapy,ENjOyAbLE1991/scrapy,elacuesta/scrapy,raphaelfruneaux/scrapy,Chenmxs/scrapy,pfctdayelise/scrapy,ylcolala/scrapy,rolando/scrapy,ylcolala/scrapy,tliber/scrapy,jc0n/scrapy,amboxer21/scrapy,Allianzcortex/scrapy,rolando-contrib/scrapy,wenyu1001/scrapy,dacjames/scrapy,fontenele/scrapy,Ryezhang/scrapy,cyberplant/scrapy,URXtech/scrapy,olafdietsche/scrapy,Slater-Victoroff/scrapy,JacobStevenR/scrapy,webmakin/scrapy,cleydson/scrapy,liyy7/scrapy,wenyu1001/scrapy,github-account-because-they-want-it/scrapy,zjuwangg/scrapy,agreen/scrapy,irwinlove/scrapy,ylcolala/scrapy,tntC4stl3/scrapy,dangra/scrapy,fqul/scrapy,raphaelfruneaux/scrapy,farhan0581/scrapy,yarikoptic/scrapy,dacjames/scrapy,rolando/scrapy,dhenyjarasandy/scrapy,zackslash/scrapy,rklabs/scrapy,stenskjaer/scrapy,YeelerG/scrapy,tagatac/scrapy,hyrole/scrapy,zhangtao11/scrapy,shaform/scrapy,umrashrf/scrapy,crasker/scrapy,z-fork/scrapy,pranjalpatil/scrapy,webmakin/scrapy,dhenyjarasandy/scrapy,ArturGaspar/scrapy,rolando-contrib/scrapy,Slater-Victoroff/scrapy,famorted/scrapy,johnardavies/scrapy,pablohoffman/scrapy,redapple/scrapy,pablohoffman/scrapy,ssh-odoo/scrapy,zhangtao11/scrapy,1yvT0s/scrapy,wzyuliyang/scrapy,Partoo/scrapy,xiao26/scrapy,ssteo/scrapy,fontenele/scrapy,pablohoffman/scrapy,Parlin-Galanodel/scrapy,hyrole/scrapy,jc0n/scrapy,agusc/scrapy,ENjOyAbLE1991/scrapy,gbirke/scrapy,lacrazyboy/scrapy,kashyap32/scrapy,nowopen/scrapy,pawelmhm/scrapy,crasker/scrapy,kalessin/scrapy,kalessin/scrapy,nowopen/scrapy,scorphus/scrapy,amboxer21/scrapy,legendtkl/scrapy,nfunato/scrapy,Preetwinder/scrapy,nfunato/scrapy,KublaikhanGeek/scrapy,moraesnicol/scrapy,livepy/scrapy,pranjalpatil/scrapy,jc0n/scrapy,fpy171/scrapy,tntC4stl3/scrapy,finfish/scrapy,agreen/scrapy,TarasRudnyk/scrapy,URXtech/scrapy,cleydson/scrapy,AaronTao1990/scrapy,wzyuliyang/scrapy,pfctdayelise/scrapy,Adai0808/scrapy-1,Allianzcortex/scrapy,scrapy/scrapy,xiao26/scrapy,cyberplant/scrapy,irwinlove/scrapy,arush0311/scrapy,pawelmhm/scrapy,IvanGavran/scrapy,coderabhishek/scrapy,Cnfc19932/scrapy,taito/scrapy,kalessin/scrapy,umrashrf/scrapy,CENDARI/scrapy,famorted/scrapy,curita/scrapy,avtoritet/scrapy,zhangtao11/scrapy,fafaman/scrapy,legendtkl/scrapy,hbwzhsh/scrapy,dhenyjarasandy/scrapy,kimimj/scrapy,hectoruelo/scrapy,JacobStevenR/scrapy,godfreyy/scrapy,rolando/scrapy,shaform/scrapy,lacrazyboy/scrapy,godfreyy/scrapy,chekunkov/scrapy,jamesblunt/scrapy,jorik041/scrapy,tliber/scrapy,arush0311/scrapy,pombredanne/scrapy,WilliamKinaan/scrapy,ssteo/scrapy,cyrixhero/scrapy,wangjun/scrapy,famorted/scrapy,kazitanvirahsan/scrapy,smaty1/scrapy,carlosp420/scrapy,mlyundin/scrapy,Djlavoy/scrapy,kmike/scrapy,umrashrf/scrapy,OpenWhere/scrapy,dgillis/scrapy,dracony/scrapy,amboxer21/scrapy,Digenis/scrapy,shaform/scrapy,johnardavies/scrapy,jiezhu2007/scrapy,rdowinton/scrapy,CodeJuan/scrapy,ssh-odoo/scrapy,codebhendi/scrapy,Parlin-Galanodel/scrapy,rklabs/scrapy,eliasdorneles/scrapy,rootAvish/scrapy,AaronTao1990/scrapy,legendtkl/scrapy,Djlavoy/scrapy,nikgr95/scrapy,fpy171/scrapy,Zephor5/scrapy,jorik041/scrapy,godfreyy/scrapy,zjuwangg/scrapy,IvanGavran/scrapy,CodeJuan/scrapy,Cnfc19932/scrapy,z-fork/scrapy,avtoritet/scrapy,bmess/scrapy,jiezhu2007/scrapy,hectoruelo/scrapy,ssh-odoo/scrapy,TarasRudnyk/scrapy,Lucifer-Kim/scrapy,fpy171/scrapy,YeelerG/scrapy,Chenmxs/scrapy,profjrr/scrapy,1yvT0s/scrapy,yusofm/scrapy,raphaelfruneaux/scrapy,cyberplant/scrapy,csalazar/scrapy,Geeglee/scrapy,cursesun/scrapy,zackslash/scrapy,farhan0581/scrapy,aivarsk/scrapy,Timeship/scrapy,Bourneer/scrapy,zorojean/scrapy,GregoryVigoTorres/scrapy,cleydson/scrapy,webmakin/scrapy,fafaman/scrapy,stenskjaer/scrapy,KublaikhanGeek/scrapy,rklabs/scrapy,nguyenhongson03/scrapy,snowdream1314/scrapy,WilliamKinaan/scrapy,scorphus/scrapy,taito/scrapy,kazitanvirahsan/scrapy,jeffreyjinfeng/scrapy,OpenWhere/scrapy,Ryezhang/scrapy,rahulsharma1991/scrapy,profjrr/scrapy,1yvT0s/scrapy,ramiro/scrapy,nikgr95/scrapy,rahulsharma1991/scrapy,ENjOyAbLE1991/scrapy,mgedmin/scrapy,songfj/scrapy,GregoryVigoTorres/scrapy,cursesun/scrapy,scrapy/scrapy,ashishnerkar1/scrapy,foromer4/scrapy,hyrole/scrapy,nfunato/scrapy,kashyap32/scrapy,haiiiiiyun/scrapy,CodeJuan/scrapy,yusofm/scrapy,codebhendi/scrapy,Zephor5/scrapy,songfj/scrapy,rolando-contrib/scrapy,Geeglee/scrapy,bmess/scrapy,Chenmxs/scrapy,wenyu1001/scrapy,livepy/scrapy,Geeglee/scrapy,jeffreyjinfeng/scrapy,liyy7/scrapy,hwsyy/scrapy,eLRuLL/scrapy,hansenDise/scrapy,gbirke/scrapy,farhan0581/scrapy,Adai0808/scrapy-1,wangjun/scrapy,dacjames/scrapy,Partoo/scrapy,Allianzcortex/scrapy,coderabhishek/scrapy,scorphus/scrapy,ramiro/scrapy,scrapy/scrapy,lacrazyboy/scrapy,cursesun/scrapy,irwinlove/scrapy,rdowinton/scrapy,chekunkov/scrapy,curita/scrapy,nguyenhongson03/scrapy,elijah513/scrapy,heamon7/scrapy,TarasRudnyk/scrapy,yarikoptic/scrapy,haiiiiiyun/scrapy,huoxudong125/scrapy,starrify/scrapy,Slater-Victoroff/scrapy,snowdream1314/scrapy,nowopen/scrapy,tliber/scrapy,pombredanne/scrapy,Bourneer/scrapy,ArturGaspar/scrapy,Timeship/scrapy,barraponto/scrapy,hbwzhsh/scrapy,hbwzhsh/scrapy,kashyap32/scrapy,CENDARI/scrapy,pombredanne/scrapy,agusc/scrapy,starrify/scrapy,Parlin-Galanodel/scrapy,bmess/scrapy,zorojean/scrapy,profjrr/scrapy,jdemaeyer/scrapy,mgedmin/scrapy,w495/scrapy,jeffreyjinfeng/scrapy,Zephor5/scrapy,wangjun/scrapy,barraponto/scrapy,johnardavies/scrapy,rootAvish/scrapy,w495/scrapy,wujuguang/scrapy,barraponto/scrapy,Cnfc19932/scrapy,agreen/scrapy,wujuguang/scrapy,finfish/scrapy,olafdietsche/scrapy,WilliamKinaan/scrapy,olorz/scrapy,wujuguang/scrapy,foromer4/scrapy,darkrho/scrapy-scrapy,JacobStevenR/scrapy,foromer4/scrapy,hwsyy/scrapy,yarikoptic/scrapy,jamesblunt/scrapy,pfctdayelise/scrapy,elacuesta/scrapy,sigma-random/scrapy,cyrixhero/scrapy,beni55/scrapy,darkrho/scrapy-scrapy,wzyuliyang/scrapy,starrify/scrapy,redapple/scrapy,moraesnicol/scrapy,rootAvish/scrapy,avtoritet/scrapy,fafaman/scrapy,smaty1/scrapy,heamon7/scrapy,OpenWhere/scrapy,Lucifer-Kim/scrapy,taito/scrapy,kmike/scrapy,huoxudong125/scrapy
|
import warnings
import unittest
from scrapy.crawler import Crawler
from scrapy.settings import Settings
from scrapy.utils.spider import DefaultSpider
from scrapy.utils.misc import load_object
class CrawlerTestCase(unittest.TestCase):
def setUp(self):
self.crawler = Crawler(DefaultSpider, Settings())
def test_deprecated_attribute_spiders(self):
with warnings.catch_warnings(record=True) as w:
spiders = self.crawler.spiders
self.assertEqual(len(w), 1)
self.assertIn("Crawler.spiders", str(w[0].message))
sm_cls = load_object(self.crawler.settings['SPIDER_MANAGER_CLASS'])
self.assertIsInstance(spiders, sm_cls)
self.crawler.spiders
self.assertEqual(len(w), 1, "Warn deprecated access only once")
Test verifying that CrawlerRunner populates spider class settings
|
import warnings
import unittest
from twisted.internet import defer
from scrapy.crawler import Crawler, CrawlerRunner
from scrapy.settings import Settings
from scrapy.utils.spider import DefaultSpider
from scrapy.utils.misc import load_object
class CrawlerTestCase(unittest.TestCase):
def setUp(self):
self.crawler = Crawler(DefaultSpider, Settings())
def test_deprecated_attribute_spiders(self):
with warnings.catch_warnings(record=True) as w:
spiders = self.crawler.spiders
self.assertEqual(len(w), 1)
self.assertIn("Crawler.spiders", str(w[0].message))
sm_cls = load_object(self.crawler.settings['SPIDER_MANAGER_CLASS'])
self.assertIsInstance(spiders, sm_cls)
self.crawler.spiders
self.assertEqual(len(w), 1, "Warn deprecated access only once")
class CrawlerRunnerTest(unittest.TestCase):
def setUp(self):
self.crawler_runner = CrawlerRunner(Settings())
@defer.inlineCallbacks
def test_populate_spidercls_settings(self):
spider_settings = {'TEST1': 'spider', 'TEST2': 'spider'}
project_settings = {'TEST1': 'project', 'TEST3': 'project'}
class CustomSettingsSpider(DefaultSpider):
custom_settings = spider_settings
self.crawler_runner.settings.setdict(project_settings,
priority='project')
yield self.crawler_runner.crawl(CustomSettingsSpider)
crawler = self.crawler_runner.crawlers.pop()
self.assertEqual(crawler.settings.get('TEST1'), 'spider')
self.assertEqual(crawler.settings.get('TEST2'), 'spider')
self.assertEqual(crawler.settings.get('TEST3'), 'project')
|
<commit_before>import warnings
import unittest
from scrapy.crawler import Crawler
from scrapy.settings import Settings
from scrapy.utils.spider import DefaultSpider
from scrapy.utils.misc import load_object
class CrawlerTestCase(unittest.TestCase):
def setUp(self):
self.crawler = Crawler(DefaultSpider, Settings())
def test_deprecated_attribute_spiders(self):
with warnings.catch_warnings(record=True) as w:
spiders = self.crawler.spiders
self.assertEqual(len(w), 1)
self.assertIn("Crawler.spiders", str(w[0].message))
sm_cls = load_object(self.crawler.settings['SPIDER_MANAGER_CLASS'])
self.assertIsInstance(spiders, sm_cls)
self.crawler.spiders
self.assertEqual(len(w), 1, "Warn deprecated access only once")
<commit_msg>Test verifying that CrawlerRunner populates spider class settings<commit_after>
|
import warnings
import unittest
from twisted.internet import defer
from scrapy.crawler import Crawler, CrawlerRunner
from scrapy.settings import Settings
from scrapy.utils.spider import DefaultSpider
from scrapy.utils.misc import load_object
class CrawlerTestCase(unittest.TestCase):
def setUp(self):
self.crawler = Crawler(DefaultSpider, Settings())
def test_deprecated_attribute_spiders(self):
with warnings.catch_warnings(record=True) as w:
spiders = self.crawler.spiders
self.assertEqual(len(w), 1)
self.assertIn("Crawler.spiders", str(w[0].message))
sm_cls = load_object(self.crawler.settings['SPIDER_MANAGER_CLASS'])
self.assertIsInstance(spiders, sm_cls)
self.crawler.spiders
self.assertEqual(len(w), 1, "Warn deprecated access only once")
class CrawlerRunnerTest(unittest.TestCase):
def setUp(self):
self.crawler_runner = CrawlerRunner(Settings())
@defer.inlineCallbacks
def test_populate_spidercls_settings(self):
spider_settings = {'TEST1': 'spider', 'TEST2': 'spider'}
project_settings = {'TEST1': 'project', 'TEST3': 'project'}
class CustomSettingsSpider(DefaultSpider):
custom_settings = spider_settings
self.crawler_runner.settings.setdict(project_settings,
priority='project')
yield self.crawler_runner.crawl(CustomSettingsSpider)
crawler = self.crawler_runner.crawlers.pop()
self.assertEqual(crawler.settings.get('TEST1'), 'spider')
self.assertEqual(crawler.settings.get('TEST2'), 'spider')
self.assertEqual(crawler.settings.get('TEST3'), 'project')
|
import warnings
import unittest
from scrapy.crawler import Crawler
from scrapy.settings import Settings
from scrapy.utils.spider import DefaultSpider
from scrapy.utils.misc import load_object
class CrawlerTestCase(unittest.TestCase):
def setUp(self):
self.crawler = Crawler(DefaultSpider, Settings())
def test_deprecated_attribute_spiders(self):
with warnings.catch_warnings(record=True) as w:
spiders = self.crawler.spiders
self.assertEqual(len(w), 1)
self.assertIn("Crawler.spiders", str(w[0].message))
sm_cls = load_object(self.crawler.settings['SPIDER_MANAGER_CLASS'])
self.assertIsInstance(spiders, sm_cls)
self.crawler.spiders
self.assertEqual(len(w), 1, "Warn deprecated access only once")
Test verifying that CrawlerRunner populates spider class settingsimport warnings
import unittest
from twisted.internet import defer
from scrapy.crawler import Crawler, CrawlerRunner
from scrapy.settings import Settings
from scrapy.utils.spider import DefaultSpider
from scrapy.utils.misc import load_object
class CrawlerTestCase(unittest.TestCase):
def setUp(self):
self.crawler = Crawler(DefaultSpider, Settings())
def test_deprecated_attribute_spiders(self):
with warnings.catch_warnings(record=True) as w:
spiders = self.crawler.spiders
self.assertEqual(len(w), 1)
self.assertIn("Crawler.spiders", str(w[0].message))
sm_cls = load_object(self.crawler.settings['SPIDER_MANAGER_CLASS'])
self.assertIsInstance(spiders, sm_cls)
self.crawler.spiders
self.assertEqual(len(w), 1, "Warn deprecated access only once")
class CrawlerRunnerTest(unittest.TestCase):
def setUp(self):
self.crawler_runner = CrawlerRunner(Settings())
@defer.inlineCallbacks
def test_populate_spidercls_settings(self):
spider_settings = {'TEST1': 'spider', 'TEST2': 'spider'}
project_settings = {'TEST1': 'project', 'TEST3': 'project'}
class CustomSettingsSpider(DefaultSpider):
custom_settings = spider_settings
self.crawler_runner.settings.setdict(project_settings,
priority='project')
yield self.crawler_runner.crawl(CustomSettingsSpider)
crawler = self.crawler_runner.crawlers.pop()
self.assertEqual(crawler.settings.get('TEST1'), 'spider')
self.assertEqual(crawler.settings.get('TEST2'), 'spider')
self.assertEqual(crawler.settings.get('TEST3'), 'project')
|
<commit_before>import warnings
import unittest
from scrapy.crawler import Crawler
from scrapy.settings import Settings
from scrapy.utils.spider import DefaultSpider
from scrapy.utils.misc import load_object
class CrawlerTestCase(unittest.TestCase):
def setUp(self):
self.crawler = Crawler(DefaultSpider, Settings())
def test_deprecated_attribute_spiders(self):
with warnings.catch_warnings(record=True) as w:
spiders = self.crawler.spiders
self.assertEqual(len(w), 1)
self.assertIn("Crawler.spiders", str(w[0].message))
sm_cls = load_object(self.crawler.settings['SPIDER_MANAGER_CLASS'])
self.assertIsInstance(spiders, sm_cls)
self.crawler.spiders
self.assertEqual(len(w), 1, "Warn deprecated access only once")
<commit_msg>Test verifying that CrawlerRunner populates spider class settings<commit_after>import warnings
import unittest
from twisted.internet import defer
from scrapy.crawler import Crawler, CrawlerRunner
from scrapy.settings import Settings
from scrapy.utils.spider import DefaultSpider
from scrapy.utils.misc import load_object
class CrawlerTestCase(unittest.TestCase):
def setUp(self):
self.crawler = Crawler(DefaultSpider, Settings())
def test_deprecated_attribute_spiders(self):
with warnings.catch_warnings(record=True) as w:
spiders = self.crawler.spiders
self.assertEqual(len(w), 1)
self.assertIn("Crawler.spiders", str(w[0].message))
sm_cls = load_object(self.crawler.settings['SPIDER_MANAGER_CLASS'])
self.assertIsInstance(spiders, sm_cls)
self.crawler.spiders
self.assertEqual(len(w), 1, "Warn deprecated access only once")
class CrawlerRunnerTest(unittest.TestCase):
def setUp(self):
self.crawler_runner = CrawlerRunner(Settings())
@defer.inlineCallbacks
def test_populate_spidercls_settings(self):
spider_settings = {'TEST1': 'spider', 'TEST2': 'spider'}
project_settings = {'TEST1': 'project', 'TEST3': 'project'}
class CustomSettingsSpider(DefaultSpider):
custom_settings = spider_settings
self.crawler_runner.settings.setdict(project_settings,
priority='project')
yield self.crawler_runner.crawl(CustomSettingsSpider)
crawler = self.crawler_runner.crawlers.pop()
self.assertEqual(crawler.settings.get('TEST1'), 'spider')
self.assertEqual(crawler.settings.get('TEST2'), 'spider')
self.assertEqual(crawler.settings.get('TEST3'), 'project')
|
f6fabf476e49d0d857217c428ed2ad1af7f034cd
|
candidates/management/commands/candidates_delete_everything_from_popit.py
|
candidates/management/commands/candidates_delete_everything_from_popit.py
|
import json
from candidates.models import PopItPerson
from candidates.popit import PopItApiMixin, get_base_url
from django.core.management.base import BaseCommand
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
message = "WARNING: this will delete all people, posts, " \
"organizations and\nmemberships from the PopIt instance:" + \
"\n\n " + get_base_url() + "\n\nIf you really want to do " + \
"this, type 'YES':"
self.stdout.write(message)
user_response = raw_input()
if user_response != 'YES':
self.stdout.write("Aborting, since you didn't type 'YES'.")
return
for collection in (
'memberships',
'posts',
'organizations',
'persons',
):
self.stdout.write("Deleting from collection: " + collection)
api_collection = getattr(self.api, collection)
# We have to be careful here - if you try to step to page
# 2 after deleting everything on page 1, then lots of
# objects will be missed. Instead, just get the first page
# until there's nothing left.
while True:
results = api_collection.get()
for o in results['result']:
object_id = o['id']
api_collection(object_id).delete()
if not results.get('has_more'):
break
|
Add a command to delete all objects from PopIt
|
Add a command to delete all objects from PopIt
|
Python
|
agpl-3.0
|
YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,neavouli/yournextrepresentative,YoQuieroSaber/yournextrepresentative,datamade/yournextmp-popit,DemocracyClub/yournextrepresentative,YoQuieroSaber/yournextrepresentative,neavouli/yournextrepresentative,neavouli/yournextrepresentative,mysociety/yournextrepresentative,DemocracyClub/yournextrepresentative,datamade/yournextmp-popit,datamade/yournextmp-popit,mysociety/yournextmp-popit,openstate/yournextrepresentative,YoQuieroSaber/yournextrepresentative,openstate/yournextrepresentative,YoQuieroSaber/yournextrepresentative,mysociety/yournextmp-popit,neavouli/yournextrepresentative,mysociety/yournextrepresentative,mysociety/yournextmp-popit,mysociety/yournextmp-popit,neavouli/yournextrepresentative,openstate/yournextrepresentative,mysociety/yournextrepresentative,openstate/yournextrepresentative,datamade/yournextmp-popit,mysociety/yournextmp-popit,mysociety/yournextrepresentative,mysociety/yournextrepresentative,DemocracyClub/yournextrepresentative,openstate/yournextrepresentative
|
Add a command to delete all objects from PopIt
|
import json
from candidates.models import PopItPerson
from candidates.popit import PopItApiMixin, get_base_url
from django.core.management.base import BaseCommand
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
message = "WARNING: this will delete all people, posts, " \
"organizations and\nmemberships from the PopIt instance:" + \
"\n\n " + get_base_url() + "\n\nIf you really want to do " + \
"this, type 'YES':"
self.stdout.write(message)
user_response = raw_input()
if user_response != 'YES':
self.stdout.write("Aborting, since you didn't type 'YES'.")
return
for collection in (
'memberships',
'posts',
'organizations',
'persons',
):
self.stdout.write("Deleting from collection: " + collection)
api_collection = getattr(self.api, collection)
# We have to be careful here - if you try to step to page
# 2 after deleting everything on page 1, then lots of
# objects will be missed. Instead, just get the first page
# until there's nothing left.
while True:
results = api_collection.get()
for o in results['result']:
object_id = o['id']
api_collection(object_id).delete()
if not results.get('has_more'):
break
|
<commit_before><commit_msg>Add a command to delete all objects from PopIt<commit_after>
|
import json
from candidates.models import PopItPerson
from candidates.popit import PopItApiMixin, get_base_url
from django.core.management.base import BaseCommand
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
message = "WARNING: this will delete all people, posts, " \
"organizations and\nmemberships from the PopIt instance:" + \
"\n\n " + get_base_url() + "\n\nIf you really want to do " + \
"this, type 'YES':"
self.stdout.write(message)
user_response = raw_input()
if user_response != 'YES':
self.stdout.write("Aborting, since you didn't type 'YES'.")
return
for collection in (
'memberships',
'posts',
'organizations',
'persons',
):
self.stdout.write("Deleting from collection: " + collection)
api_collection = getattr(self.api, collection)
# We have to be careful here - if you try to step to page
# 2 after deleting everything on page 1, then lots of
# objects will be missed. Instead, just get the first page
# until there's nothing left.
while True:
results = api_collection.get()
for o in results['result']:
object_id = o['id']
api_collection(object_id).delete()
if not results.get('has_more'):
break
|
Add a command to delete all objects from PopItimport json
from candidates.models import PopItPerson
from candidates.popit import PopItApiMixin, get_base_url
from django.core.management.base import BaseCommand
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
message = "WARNING: this will delete all people, posts, " \
"organizations and\nmemberships from the PopIt instance:" + \
"\n\n " + get_base_url() + "\n\nIf you really want to do " + \
"this, type 'YES':"
self.stdout.write(message)
user_response = raw_input()
if user_response != 'YES':
self.stdout.write("Aborting, since you didn't type 'YES'.")
return
for collection in (
'memberships',
'posts',
'organizations',
'persons',
):
self.stdout.write("Deleting from collection: " + collection)
api_collection = getattr(self.api, collection)
# We have to be careful here - if you try to step to page
# 2 after deleting everything on page 1, then lots of
# objects will be missed. Instead, just get the first page
# until there's nothing left.
while True:
results = api_collection.get()
for o in results['result']:
object_id = o['id']
api_collection(object_id).delete()
if not results.get('has_more'):
break
|
<commit_before><commit_msg>Add a command to delete all objects from PopIt<commit_after>import json
from candidates.models import PopItPerson
from candidates.popit import PopItApiMixin, get_base_url
from django.core.management.base import BaseCommand
class Command(PopItApiMixin, BaseCommand):
def handle(self, **options):
message = "WARNING: this will delete all people, posts, " \
"organizations and\nmemberships from the PopIt instance:" + \
"\n\n " + get_base_url() + "\n\nIf you really want to do " + \
"this, type 'YES':"
self.stdout.write(message)
user_response = raw_input()
if user_response != 'YES':
self.stdout.write("Aborting, since you didn't type 'YES'.")
return
for collection in (
'memberships',
'posts',
'organizations',
'persons',
):
self.stdout.write("Deleting from collection: " + collection)
api_collection = getattr(self.api, collection)
# We have to be careful here - if you try to step to page
# 2 after deleting everything on page 1, then lots of
# objects will be missed. Instead, just get the first page
# until there's nothing left.
while True:
results = api_collection.get()
for o in results['result']:
object_id = o['id']
api_collection(object_id).delete()
if not results.get('has_more'):
break
|
|
e9605dd5432806435dec26f7e23ebdf25074943b
|
blueLed.py
|
blueLed.py
|
'''
Dr Who Box: Blue Effects LED
'''
from __future__ import print_function
import RPi.GPIO as GPIO
import time
from multiprocessing import Process
import math
# Define PINS
LED = 18
# Use numbering based on P1 header
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
GPIO.setup(LED, GPIO.OUT, GPIO.LOW)
def pulsateLed():
pwm = GPIO.PWM(LED, 100)
pwm.start(0)
values = [math.sin(x*math.pi/180.0) for x in range (0,181)]
values = [int(100*x**3) for x in values]
increasing = True
count = 0
delay = 0.02
pwm.start(0)
while True:
pwm.ChangeDutyCycle(values[count])
time.sleep(delay)
if increasing:
count += 1
else:
count -= 1
if (count >= len(values)-1):
increasing = False
if (count <= 0):
increasing = True
# Wait forever...
try:
p = Process(target=pulsateLed)
p.start()
while True:
time.sleep(1)
print(time.asctime(),'and python is running!')
except:
GPIO.cleanup()
p.terminate()
|
Add stand-alone demo of pulsating LED using multiprocess.
|
Add stand-alone demo of pulsating LED using multiprocess.
|
Python
|
mit
|
davidb24v/drwho
|
Add stand-alone demo of pulsating LED using multiprocess.
|
'''
Dr Who Box: Blue Effects LED
'''
from __future__ import print_function
import RPi.GPIO as GPIO
import time
from multiprocessing import Process
import math
# Define PINS
LED = 18
# Use numbering based on P1 header
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
GPIO.setup(LED, GPIO.OUT, GPIO.LOW)
def pulsateLed():
pwm = GPIO.PWM(LED, 100)
pwm.start(0)
values = [math.sin(x*math.pi/180.0) for x in range (0,181)]
values = [int(100*x**3) for x in values]
increasing = True
count = 0
delay = 0.02
pwm.start(0)
while True:
pwm.ChangeDutyCycle(values[count])
time.sleep(delay)
if increasing:
count += 1
else:
count -= 1
if (count >= len(values)-1):
increasing = False
if (count <= 0):
increasing = True
# Wait forever...
try:
p = Process(target=pulsateLed)
p.start()
while True:
time.sleep(1)
print(time.asctime(),'and python is running!')
except:
GPIO.cleanup()
p.terminate()
|
<commit_before><commit_msg>Add stand-alone demo of pulsating LED using multiprocess.<commit_after>
|
'''
Dr Who Box: Blue Effects LED
'''
from __future__ import print_function
import RPi.GPIO as GPIO
import time
from multiprocessing import Process
import math
# Define PINS
LED = 18
# Use numbering based on P1 header
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
GPIO.setup(LED, GPIO.OUT, GPIO.LOW)
def pulsateLed():
pwm = GPIO.PWM(LED, 100)
pwm.start(0)
values = [math.sin(x*math.pi/180.0) for x in range (0,181)]
values = [int(100*x**3) for x in values]
increasing = True
count = 0
delay = 0.02
pwm.start(0)
while True:
pwm.ChangeDutyCycle(values[count])
time.sleep(delay)
if increasing:
count += 1
else:
count -= 1
if (count >= len(values)-1):
increasing = False
if (count <= 0):
increasing = True
# Wait forever...
try:
p = Process(target=pulsateLed)
p.start()
while True:
time.sleep(1)
print(time.asctime(),'and python is running!')
except:
GPIO.cleanup()
p.terminate()
|
Add stand-alone demo of pulsating LED using multiprocess.'''
Dr Who Box: Blue Effects LED
'''
from __future__ import print_function
import RPi.GPIO as GPIO
import time
from multiprocessing import Process
import math
# Define PINS
LED = 18
# Use numbering based on P1 header
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
GPIO.setup(LED, GPIO.OUT, GPIO.LOW)
def pulsateLed():
pwm = GPIO.PWM(LED, 100)
pwm.start(0)
values = [math.sin(x*math.pi/180.0) for x in range (0,181)]
values = [int(100*x**3) for x in values]
increasing = True
count = 0
delay = 0.02
pwm.start(0)
while True:
pwm.ChangeDutyCycle(values[count])
time.sleep(delay)
if increasing:
count += 1
else:
count -= 1
if (count >= len(values)-1):
increasing = False
if (count <= 0):
increasing = True
# Wait forever...
try:
p = Process(target=pulsateLed)
p.start()
while True:
time.sleep(1)
print(time.asctime(),'and python is running!')
except:
GPIO.cleanup()
p.terminate()
|
<commit_before><commit_msg>Add stand-alone demo of pulsating LED using multiprocess.<commit_after>'''
Dr Who Box: Blue Effects LED
'''
from __future__ import print_function
import RPi.GPIO as GPIO
import time
from multiprocessing import Process
import math
# Define PINS
LED = 18
# Use numbering based on P1 header
GPIO.setmode(GPIO.BOARD)
GPIO.setwarnings(False)
GPIO.setup(LED, GPIO.OUT, GPIO.LOW)
def pulsateLed():
pwm = GPIO.PWM(LED, 100)
pwm.start(0)
values = [math.sin(x*math.pi/180.0) for x in range (0,181)]
values = [int(100*x**3) for x in values]
increasing = True
count = 0
delay = 0.02
pwm.start(0)
while True:
pwm.ChangeDutyCycle(values[count])
time.sleep(delay)
if increasing:
count += 1
else:
count -= 1
if (count >= len(values)-1):
increasing = False
if (count <= 0):
increasing = True
# Wait forever...
try:
p = Process(target=pulsateLed)
p.start()
while True:
time.sleep(1)
print(time.asctime(),'and python is running!')
except:
GPIO.cleanup()
p.terminate()
|
|
93e5e1b8d36e6488044c619e9015aa2e7c5ab29b
|
dynamic_programming/edit_distance.py
|
dynamic_programming/edit_distance.py
|
"""
Author : Turfa Auliarachman
Date : October 12, 2016
This is a pure Python implementation of Dynamic Programming solution to the edit distance problem.
The problem is :
Given two strings A and B. Find the minimum number of operations to string B such that A = B. The permitted operations are removal, insertion, and substitution.
"""
class EditDistance:
"""
Use :
solver = EditDistance()
editDistanceResult = solver.solve(firstString, secondString)
"""
def __init__(self):
self.__prepare__()
def __prepare__(self, N = 0, M = 0):
self.dp = [[-1 for y in range(0,M)] for x in range(0,N)]
def __solveDP(self, x, y):
if (x==-1):
return y+1
elif (y==-1):
return x+1
elif (self.dp[x][y]>-1):
return self.dp[x][y]
else:
if (self.A[x]==self.B[y]):
self.dp[x][y] = self.__solveDP(x-1,y-1)
else:
self.dp[x][y] = 1+min(self.__solveDP(x,y-1), self.__solveDP(x-1,y), self.__solveDP(x-1,y-1))
return self.dp[x][y]
def solve(self, A, B):
if isinstance(A,bytes):
A = A.decode('ascii')
if isinstance(B,bytes):
B = B.decode('ascii')
self.A = str(A)
self.B = str(B)
self.__prepare__(len(A), len(B))
return self.__solveDP(len(A)-1, len(B)-1)
if __name__ == '__main__':
import sys
if sys.version_info.major < 3:
input_function = raw_input
else:
input_function = input
solver = EditDistance()
print("****************** Testing Edit Distance DP Algorithm ******************")
print()
print("Enter the first string: ", end="")
S1 = input_function()
print("Enter the second string: ", end="")
S2 = input_function()
print()
print("The minimum Edit Distance is: %d" % (solver.solve(S1, S2)))
print()
print("*************** End of Testing Edit Distance DP Algorithm ***************")
|
Add Edit Distance DP Algorithm
|
Add Edit Distance DP Algorithm
|
Python
|
mit
|
TheAlgorithms/Python
|
Add Edit Distance DP Algorithm
|
"""
Author : Turfa Auliarachman
Date : October 12, 2016
This is a pure Python implementation of Dynamic Programming solution to the edit distance problem.
The problem is :
Given two strings A and B. Find the minimum number of operations to string B such that A = B. The permitted operations are removal, insertion, and substitution.
"""
class EditDistance:
"""
Use :
solver = EditDistance()
editDistanceResult = solver.solve(firstString, secondString)
"""
def __init__(self):
self.__prepare__()
def __prepare__(self, N = 0, M = 0):
self.dp = [[-1 for y in range(0,M)] for x in range(0,N)]
def __solveDP(self, x, y):
if (x==-1):
return y+1
elif (y==-1):
return x+1
elif (self.dp[x][y]>-1):
return self.dp[x][y]
else:
if (self.A[x]==self.B[y]):
self.dp[x][y] = self.__solveDP(x-1,y-1)
else:
self.dp[x][y] = 1+min(self.__solveDP(x,y-1), self.__solveDP(x-1,y), self.__solveDP(x-1,y-1))
return self.dp[x][y]
def solve(self, A, B):
if isinstance(A,bytes):
A = A.decode('ascii')
if isinstance(B,bytes):
B = B.decode('ascii')
self.A = str(A)
self.B = str(B)
self.__prepare__(len(A), len(B))
return self.__solveDP(len(A)-1, len(B)-1)
if __name__ == '__main__':
import sys
if sys.version_info.major < 3:
input_function = raw_input
else:
input_function = input
solver = EditDistance()
print("****************** Testing Edit Distance DP Algorithm ******************")
print()
print("Enter the first string: ", end="")
S1 = input_function()
print("Enter the second string: ", end="")
S2 = input_function()
print()
print("The minimum Edit Distance is: %d" % (solver.solve(S1, S2)))
print()
print("*************** End of Testing Edit Distance DP Algorithm ***************")
|
<commit_before><commit_msg>Add Edit Distance DP Algorithm<commit_after>
|
"""
Author : Turfa Auliarachman
Date : October 12, 2016
This is a pure Python implementation of Dynamic Programming solution to the edit distance problem.
The problem is :
Given two strings A and B. Find the minimum number of operations to string B such that A = B. The permitted operations are removal, insertion, and substitution.
"""
class EditDistance:
"""
Use :
solver = EditDistance()
editDistanceResult = solver.solve(firstString, secondString)
"""
def __init__(self):
self.__prepare__()
def __prepare__(self, N = 0, M = 0):
self.dp = [[-1 for y in range(0,M)] for x in range(0,N)]
def __solveDP(self, x, y):
if (x==-1):
return y+1
elif (y==-1):
return x+1
elif (self.dp[x][y]>-1):
return self.dp[x][y]
else:
if (self.A[x]==self.B[y]):
self.dp[x][y] = self.__solveDP(x-1,y-1)
else:
self.dp[x][y] = 1+min(self.__solveDP(x,y-1), self.__solveDP(x-1,y), self.__solveDP(x-1,y-1))
return self.dp[x][y]
def solve(self, A, B):
if isinstance(A,bytes):
A = A.decode('ascii')
if isinstance(B,bytes):
B = B.decode('ascii')
self.A = str(A)
self.B = str(B)
self.__prepare__(len(A), len(B))
return self.__solveDP(len(A)-1, len(B)-1)
if __name__ == '__main__':
import sys
if sys.version_info.major < 3:
input_function = raw_input
else:
input_function = input
solver = EditDistance()
print("****************** Testing Edit Distance DP Algorithm ******************")
print()
print("Enter the first string: ", end="")
S1 = input_function()
print("Enter the second string: ", end="")
S2 = input_function()
print()
print("The minimum Edit Distance is: %d" % (solver.solve(S1, S2)))
print()
print("*************** End of Testing Edit Distance DP Algorithm ***************")
|
Add Edit Distance DP Algorithm"""
Author : Turfa Auliarachman
Date : October 12, 2016
This is a pure Python implementation of Dynamic Programming solution to the edit distance problem.
The problem is :
Given two strings A and B. Find the minimum number of operations to string B such that A = B. The permitted operations are removal, insertion, and substitution.
"""
class EditDistance:
"""
Use :
solver = EditDistance()
editDistanceResult = solver.solve(firstString, secondString)
"""
def __init__(self):
self.__prepare__()
def __prepare__(self, N = 0, M = 0):
self.dp = [[-1 for y in range(0,M)] for x in range(0,N)]
def __solveDP(self, x, y):
if (x==-1):
return y+1
elif (y==-1):
return x+1
elif (self.dp[x][y]>-1):
return self.dp[x][y]
else:
if (self.A[x]==self.B[y]):
self.dp[x][y] = self.__solveDP(x-1,y-1)
else:
self.dp[x][y] = 1+min(self.__solveDP(x,y-1), self.__solveDP(x-1,y), self.__solveDP(x-1,y-1))
return self.dp[x][y]
def solve(self, A, B):
if isinstance(A,bytes):
A = A.decode('ascii')
if isinstance(B,bytes):
B = B.decode('ascii')
self.A = str(A)
self.B = str(B)
self.__prepare__(len(A), len(B))
return self.__solveDP(len(A)-1, len(B)-1)
if __name__ == '__main__':
import sys
if sys.version_info.major < 3:
input_function = raw_input
else:
input_function = input
solver = EditDistance()
print("****************** Testing Edit Distance DP Algorithm ******************")
print()
print("Enter the first string: ", end="")
S1 = input_function()
print("Enter the second string: ", end="")
S2 = input_function()
print()
print("The minimum Edit Distance is: %d" % (solver.solve(S1, S2)))
print()
print("*************** End of Testing Edit Distance DP Algorithm ***************")
|
<commit_before><commit_msg>Add Edit Distance DP Algorithm<commit_after>"""
Author : Turfa Auliarachman
Date : October 12, 2016
This is a pure Python implementation of Dynamic Programming solution to the edit distance problem.
The problem is :
Given two strings A and B. Find the minimum number of operations to string B such that A = B. The permitted operations are removal, insertion, and substitution.
"""
class EditDistance:
"""
Use :
solver = EditDistance()
editDistanceResult = solver.solve(firstString, secondString)
"""
def __init__(self):
self.__prepare__()
def __prepare__(self, N = 0, M = 0):
self.dp = [[-1 for y in range(0,M)] for x in range(0,N)]
def __solveDP(self, x, y):
if (x==-1):
return y+1
elif (y==-1):
return x+1
elif (self.dp[x][y]>-1):
return self.dp[x][y]
else:
if (self.A[x]==self.B[y]):
self.dp[x][y] = self.__solveDP(x-1,y-1)
else:
self.dp[x][y] = 1+min(self.__solveDP(x,y-1), self.__solveDP(x-1,y), self.__solveDP(x-1,y-1))
return self.dp[x][y]
def solve(self, A, B):
if isinstance(A,bytes):
A = A.decode('ascii')
if isinstance(B,bytes):
B = B.decode('ascii')
self.A = str(A)
self.B = str(B)
self.__prepare__(len(A), len(B))
return self.__solveDP(len(A)-1, len(B)-1)
if __name__ == '__main__':
import sys
if sys.version_info.major < 3:
input_function = raw_input
else:
input_function = input
solver = EditDistance()
print("****************** Testing Edit Distance DP Algorithm ******************")
print()
print("Enter the first string: ", end="")
S1 = input_function()
print("Enter the second string: ", end="")
S2 = input_function()
print()
print("The minimum Edit Distance is: %d" % (solver.solve(S1, S2)))
print()
print("*************** End of Testing Edit Distance DP Algorithm ***************")
|
|
3a08fa5f2dba14c2527f1a381d6bb7dbaaf94553
|
dhash.py
|
dhash.py
|
'''
- a way to separate entries by key to know which machine to store it at
- eviction strategy:
* FIFO
* LRU
* LFU
- access pattern:
* write-thru
* write-around
* write-back
- latency vs consistency vs availability
- how would multithreading work? locks?
- collision resolution method
- resizing
* consistent hashing
* HRW (Rendezvous) hashing
'''
class DHash(object):
def __init__(self, config={}):
if not config:
config = {
'size': 1024,
'resizing_method': 'consistent-hashing',
'access_pattern': 'write-around',
'evection_strategy': 'LRU'
}
self.size = config['size']
self.resizing_method = config['resizing_method']
self.access_pattern = config['access_pattern']
def read(self, key):
pass
def write(self, key, value):
pass
class MockNode(object):
"""Implement a node.
Node will run in a separate thread and emulate a separate machine.
"""
pass
class MockDB(object):
"""Emulate database via file read/write."""
pass
class WriteThru(object):
"""Implement write-thru access pattern."""
pass
class WriteAround(object):
"""Implement write-around access pattern."""
pass
class WriteBack(object):
"""Implement write-back access pattern."""
pass
class LRU(object):
"""Implement Least-Recently-Used eviction strategy."""
pass
class LFU(object):
"""Implement Least-Frequently-Used eviction strategy."""
pass
class ConsistentHashRing(object):
"""Implement a consistent hashing ring."""
pass
class RendezvousHashing(object):
"""Implement Highest Random Weight hashing method."""
pass
if __name__ == '__main__':
dhash = DHash()
|
Add basic config functionality and stubs for MockNode, MockDB, as well as different resizing methods, access patterns and eviction strategies.
|
Add basic config functionality and stubs for MockNode, MockDB, as well as different resizing methods, access patterns and eviction strategies.
|
Python
|
mit
|
gudnm/dhash
|
Add basic config functionality and stubs for MockNode, MockDB, as well as different resizing methods, access patterns and eviction strategies.
|
'''
- a way to separate entries by key to know which machine to store it at
- eviction strategy:
* FIFO
* LRU
* LFU
- access pattern:
* write-thru
* write-around
* write-back
- latency vs consistency vs availability
- how would multithreading work? locks?
- collision resolution method
- resizing
* consistent hashing
* HRW (Rendezvous) hashing
'''
class DHash(object):
def __init__(self, config={}):
if not config:
config = {
'size': 1024,
'resizing_method': 'consistent-hashing',
'access_pattern': 'write-around',
'evection_strategy': 'LRU'
}
self.size = config['size']
self.resizing_method = config['resizing_method']
self.access_pattern = config['access_pattern']
def read(self, key):
pass
def write(self, key, value):
pass
class MockNode(object):
"""Implement a node.
Node will run in a separate thread and emulate a separate machine.
"""
pass
class MockDB(object):
"""Emulate database via file read/write."""
pass
class WriteThru(object):
"""Implement write-thru access pattern."""
pass
class WriteAround(object):
"""Implement write-around access pattern."""
pass
class WriteBack(object):
"""Implement write-back access pattern."""
pass
class LRU(object):
"""Implement Least-Recently-Used eviction strategy."""
pass
class LFU(object):
"""Implement Least-Frequently-Used eviction strategy."""
pass
class ConsistentHashRing(object):
"""Implement a consistent hashing ring."""
pass
class RendezvousHashing(object):
"""Implement Highest Random Weight hashing method."""
pass
if __name__ == '__main__':
dhash = DHash()
|
<commit_before><commit_msg>Add basic config functionality and stubs for MockNode, MockDB, as well as different resizing methods, access patterns and eviction strategies.<commit_after>
|
'''
- a way to separate entries by key to know which machine to store it at
- eviction strategy:
* FIFO
* LRU
* LFU
- access pattern:
* write-thru
* write-around
* write-back
- latency vs consistency vs availability
- how would multithreading work? locks?
- collision resolution method
- resizing
* consistent hashing
* HRW (Rendezvous) hashing
'''
class DHash(object):
def __init__(self, config={}):
if not config:
config = {
'size': 1024,
'resizing_method': 'consistent-hashing',
'access_pattern': 'write-around',
'evection_strategy': 'LRU'
}
self.size = config['size']
self.resizing_method = config['resizing_method']
self.access_pattern = config['access_pattern']
def read(self, key):
pass
def write(self, key, value):
pass
class MockNode(object):
"""Implement a node.
Node will run in a separate thread and emulate a separate machine.
"""
pass
class MockDB(object):
"""Emulate database via file read/write."""
pass
class WriteThru(object):
"""Implement write-thru access pattern."""
pass
class WriteAround(object):
"""Implement write-around access pattern."""
pass
class WriteBack(object):
"""Implement write-back access pattern."""
pass
class LRU(object):
"""Implement Least-Recently-Used eviction strategy."""
pass
class LFU(object):
"""Implement Least-Frequently-Used eviction strategy."""
pass
class ConsistentHashRing(object):
"""Implement a consistent hashing ring."""
pass
class RendezvousHashing(object):
"""Implement Highest Random Weight hashing method."""
pass
if __name__ == '__main__':
dhash = DHash()
|
Add basic config functionality and stubs for MockNode, MockDB, as well as different resizing methods, access patterns and eviction strategies.'''
- a way to separate entries by key to know which machine to store it at
- eviction strategy:
* FIFO
* LRU
* LFU
- access pattern:
* write-thru
* write-around
* write-back
- latency vs consistency vs availability
- how would multithreading work? locks?
- collision resolution method
- resizing
* consistent hashing
* HRW (Rendezvous) hashing
'''
class DHash(object):
def __init__(self, config={}):
if not config:
config = {
'size': 1024,
'resizing_method': 'consistent-hashing',
'access_pattern': 'write-around',
'evection_strategy': 'LRU'
}
self.size = config['size']
self.resizing_method = config['resizing_method']
self.access_pattern = config['access_pattern']
def read(self, key):
pass
def write(self, key, value):
pass
class MockNode(object):
"""Implement a node.
Node will run in a separate thread and emulate a separate machine.
"""
pass
class MockDB(object):
"""Emulate database via file read/write."""
pass
class WriteThru(object):
"""Implement write-thru access pattern."""
pass
class WriteAround(object):
"""Implement write-around access pattern."""
pass
class WriteBack(object):
"""Implement write-back access pattern."""
pass
class LRU(object):
"""Implement Least-Recently-Used eviction strategy."""
pass
class LFU(object):
"""Implement Least-Frequently-Used eviction strategy."""
pass
class ConsistentHashRing(object):
"""Implement a consistent hashing ring."""
pass
class RendezvousHashing(object):
"""Implement Highest Random Weight hashing method."""
pass
if __name__ == '__main__':
dhash = DHash()
|
<commit_before><commit_msg>Add basic config functionality and stubs for MockNode, MockDB, as well as different resizing methods, access patterns and eviction strategies.<commit_after>'''
- a way to separate entries by key to know which machine to store it at
- eviction strategy:
* FIFO
* LRU
* LFU
- access pattern:
* write-thru
* write-around
* write-back
- latency vs consistency vs availability
- how would multithreading work? locks?
- collision resolution method
- resizing
* consistent hashing
* HRW (Rendezvous) hashing
'''
class DHash(object):
def __init__(self, config={}):
if not config:
config = {
'size': 1024,
'resizing_method': 'consistent-hashing',
'access_pattern': 'write-around',
'evection_strategy': 'LRU'
}
self.size = config['size']
self.resizing_method = config['resizing_method']
self.access_pattern = config['access_pattern']
def read(self, key):
pass
def write(self, key, value):
pass
class MockNode(object):
"""Implement a node.
Node will run in a separate thread and emulate a separate machine.
"""
pass
class MockDB(object):
"""Emulate database via file read/write."""
pass
class WriteThru(object):
"""Implement write-thru access pattern."""
pass
class WriteAround(object):
"""Implement write-around access pattern."""
pass
class WriteBack(object):
"""Implement write-back access pattern."""
pass
class LRU(object):
"""Implement Least-Recently-Used eviction strategy."""
pass
class LFU(object):
"""Implement Least-Frequently-Used eviction strategy."""
pass
class ConsistentHashRing(object):
"""Implement a consistent hashing ring."""
pass
class RendezvousHashing(object):
"""Implement Highest Random Weight hashing method."""
pass
if __name__ == '__main__':
dhash = DHash()
|
|
67320776e7c93004d023c71f6e855fea88f03d7c
|
statsmodels/tsa/tests/test_x13.py
|
statsmodels/tsa/tests/test_x13.py
|
from nose import SkipTest
from numpy.testing import assert_
from statsmodels.tsa.base.datetools import dates_from_range
from statsmodels.tsa.x12 import _find_x12, select_arima_order
x13path = _find_x12()
if x13path is False:
_have_x13 = False
else:
_have_x13 = True
class TestX13(object):
@classmethod
def setupClass(cls):
if not _have_x13:
raise SkipTest('X13/X12 not available')
import pandas as pd
from statsmodels.datasets import macrodata, co2
dta = macrodata.load_pandas().data
dates = dates_from_range('1959Q1', '2009Q3')
index = pd.DatetimeIndex(dates)
dta.index = index
cls.quarterly_data = dta.dropna()
dta = co2.load_pandas().data
dta['co2'] = dta.co2.interpolate()
cls.monthly_data = dta.resample('M')
cls.monthly_start_data = dta.resample('MS')
def test_select_arima_order(self):
res = select_arima_order(self.monthly_data)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.monthly_start_data)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.monthly_data.co2)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.monthly_start_data.co2)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.quarterly_data[['realgdp']])
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.quarterly_data.realgdp)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
|
Add some smoke tests for x12/x13
|
TST: Add some smoke tests for x12/x13
|
Python
|
bsd-3-clause
|
gef756/statsmodels,adammenges/statsmodels,musically-ut/statsmodels,hlin117/statsmodels,josef-pkt/statsmodels,statsmodels/statsmodels,saketkc/statsmodels,DonBeo/statsmodels,saketkc/statsmodels,alekz112/statsmodels,ChadFulton/statsmodels,wzbozon/statsmodels,bashtage/statsmodels,DonBeo/statsmodels,wwf5067/statsmodels,astocko/statsmodels,hlin117/statsmodels,cbmoore/statsmodels,alekz112/statsmodels,wdurhamh/statsmodels,gef756/statsmodels,musically-ut/statsmodels,nvoron23/statsmodels,ChadFulton/statsmodels,YihaoLu/statsmodels,wzbozon/statsmodels,bert9bert/statsmodels,detrout/debian-statsmodels,wkfwkf/statsmodels,kiyoto/statsmodels,cbmoore/statsmodels,josef-pkt/statsmodels,nvoron23/statsmodels,detrout/debian-statsmodels,cbmoore/statsmodels,hainm/statsmodels,adammenges/statsmodels,gef756/statsmodels,wdurhamh/statsmodels,bzero/statsmodels,kiyoto/statsmodels,hainm/statsmodels,YihaoLu/statsmodels,DonBeo/statsmodels,waynenilsen/statsmodels,nguyentu1602/statsmodels,waynenilsen/statsmodels,DonBeo/statsmodels,jstoxrocky/statsmodels,hainm/statsmodels,wkfwkf/statsmodels,statsmodels/statsmodels,gef756/statsmodels,wdurhamh/statsmodels,phobson/statsmodels,kiyoto/statsmodels,statsmodels/statsmodels,hlin117/statsmodels,wzbozon/statsmodels,yl565/statsmodels,bzero/statsmodels,astocko/statsmodels,bert9bert/statsmodels,bsipocz/statsmodels,Averroes/statsmodels,jstoxrocky/statsmodels,detrout/debian-statsmodels,hainm/statsmodels,yl565/statsmodels,astocko/statsmodels,yl565/statsmodels,yl565/statsmodels,jseabold/statsmodels,wkfwkf/statsmodels,ChadFulton/statsmodels,kiyoto/statsmodels,huongttlan/statsmodels,gef756/statsmodels,josef-pkt/statsmodels,jstoxrocky/statsmodels,cbmoore/statsmodels,statsmodels/statsmodels,saketkc/statsmodels,edhuckle/statsmodels,huongttlan/statsmodels,nvoron23/statsmodels,josef-pkt/statsmodels,wwf5067/statsmodels,wwf5067/statsmodels,adammenges/statsmodels,nguyentu1602/statsmodels,statsmodels/statsmodels,bert9bert/statsmodels,edhuckle/statsmodels,nguyentu1602/statsmodels,alekz112/statsmodels,ChadFulton/statsmodels,musically-ut/statsmodels,yl565/statsmodels,bsipocz/statsmodels,jseabold/statsmodels,nvoron23/statsmodels,nvoron23/statsmodels,bashtage/statsmodels,jseabold/statsmodels,waynenilsen/statsmodels,phobson/statsmodels,Averroes/statsmodels,cbmoore/statsmodels,YihaoLu/statsmodels,kiyoto/statsmodels,wzbozon/statsmodels,bert9bert/statsmodels,detrout/debian-statsmodels,wwf5067/statsmodels,bzero/statsmodels,jseabold/statsmodels,wdurhamh/statsmodels,edhuckle/statsmodels,bert9bert/statsmodels,huongttlan/statsmodels,YihaoLu/statsmodels,phobson/statsmodels,bsipocz/statsmodels,Averroes/statsmodels,ChadFulton/statsmodels,wzbozon/statsmodels,adammenges/statsmodels,edhuckle/statsmodels,huongttlan/statsmodels,edhuckle/statsmodels,bashtage/statsmodels,bzero/statsmodels,josef-pkt/statsmodels,wkfwkf/statsmodels,josef-pkt/statsmodels,bashtage/statsmodels,Averroes/statsmodels,saketkc/statsmodels,DonBeo/statsmodels,wdurhamh/statsmodels,phobson/statsmodels,wkfwkf/statsmodels,astocko/statsmodels,jseabold/statsmodels,bashtage/statsmodels,saketkc/statsmodels,bashtage/statsmodels,jstoxrocky/statsmodels,bsipocz/statsmodels,bzero/statsmodels,ChadFulton/statsmodels,statsmodels/statsmodels,alekz112/statsmodels,hlin117/statsmodels,waynenilsen/statsmodels,phobson/statsmodels,YihaoLu/statsmodels,musically-ut/statsmodels,nguyentu1602/statsmodels
|
TST: Add some smoke tests for x12/x13
|
from nose import SkipTest
from numpy.testing import assert_
from statsmodels.tsa.base.datetools import dates_from_range
from statsmodels.tsa.x12 import _find_x12, select_arima_order
x13path = _find_x12()
if x13path is False:
_have_x13 = False
else:
_have_x13 = True
class TestX13(object):
@classmethod
def setupClass(cls):
if not _have_x13:
raise SkipTest('X13/X12 not available')
import pandas as pd
from statsmodels.datasets import macrodata, co2
dta = macrodata.load_pandas().data
dates = dates_from_range('1959Q1', '2009Q3')
index = pd.DatetimeIndex(dates)
dta.index = index
cls.quarterly_data = dta.dropna()
dta = co2.load_pandas().data
dta['co2'] = dta.co2.interpolate()
cls.monthly_data = dta.resample('M')
cls.monthly_start_data = dta.resample('MS')
def test_select_arima_order(self):
res = select_arima_order(self.monthly_data)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.monthly_start_data)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.monthly_data.co2)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.monthly_start_data.co2)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.quarterly_data[['realgdp']])
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.quarterly_data.realgdp)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
|
<commit_before><commit_msg>TST: Add some smoke tests for x12/x13<commit_after>
|
from nose import SkipTest
from numpy.testing import assert_
from statsmodels.tsa.base.datetools import dates_from_range
from statsmodels.tsa.x12 import _find_x12, select_arima_order
x13path = _find_x12()
if x13path is False:
_have_x13 = False
else:
_have_x13 = True
class TestX13(object):
@classmethod
def setupClass(cls):
if not _have_x13:
raise SkipTest('X13/X12 not available')
import pandas as pd
from statsmodels.datasets import macrodata, co2
dta = macrodata.load_pandas().data
dates = dates_from_range('1959Q1', '2009Q3')
index = pd.DatetimeIndex(dates)
dta.index = index
cls.quarterly_data = dta.dropna()
dta = co2.load_pandas().data
dta['co2'] = dta.co2.interpolate()
cls.monthly_data = dta.resample('M')
cls.monthly_start_data = dta.resample('MS')
def test_select_arima_order(self):
res = select_arima_order(self.monthly_data)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.monthly_start_data)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.monthly_data.co2)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.monthly_start_data.co2)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.quarterly_data[['realgdp']])
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.quarterly_data.realgdp)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
|
TST: Add some smoke tests for x12/x13from nose import SkipTest
from numpy.testing import assert_
from statsmodels.tsa.base.datetools import dates_from_range
from statsmodels.tsa.x12 import _find_x12, select_arima_order
x13path = _find_x12()
if x13path is False:
_have_x13 = False
else:
_have_x13 = True
class TestX13(object):
@classmethod
def setupClass(cls):
if not _have_x13:
raise SkipTest('X13/X12 not available')
import pandas as pd
from statsmodels.datasets import macrodata, co2
dta = macrodata.load_pandas().data
dates = dates_from_range('1959Q1', '2009Q3')
index = pd.DatetimeIndex(dates)
dta.index = index
cls.quarterly_data = dta.dropna()
dta = co2.load_pandas().data
dta['co2'] = dta.co2.interpolate()
cls.monthly_data = dta.resample('M')
cls.monthly_start_data = dta.resample('MS')
def test_select_arima_order(self):
res = select_arima_order(self.monthly_data)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.monthly_start_data)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.monthly_data.co2)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.monthly_start_data.co2)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.quarterly_data[['realgdp']])
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.quarterly_data.realgdp)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
|
<commit_before><commit_msg>TST: Add some smoke tests for x12/x13<commit_after>from nose import SkipTest
from numpy.testing import assert_
from statsmodels.tsa.base.datetools import dates_from_range
from statsmodels.tsa.x12 import _find_x12, select_arima_order
x13path = _find_x12()
if x13path is False:
_have_x13 = False
else:
_have_x13 = True
class TestX13(object):
@classmethod
def setupClass(cls):
if not _have_x13:
raise SkipTest('X13/X12 not available')
import pandas as pd
from statsmodels.datasets import macrodata, co2
dta = macrodata.load_pandas().data
dates = dates_from_range('1959Q1', '2009Q3')
index = pd.DatetimeIndex(dates)
dta.index = index
cls.quarterly_data = dta.dropna()
dta = co2.load_pandas().data
dta['co2'] = dta.co2.interpolate()
cls.monthly_data = dta.resample('M')
cls.monthly_start_data = dta.resample('MS')
def test_select_arima_order(self):
res = select_arima_order(self.monthly_data)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.monthly_start_data)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.monthly_data.co2)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.monthly_start_data.co2)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.quarterly_data[['realgdp']])
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
res = select_arima_order(self.quarterly_data.realgdp)
assert_(isinstance(res.order, tuple))
assert_(isinstance(res.sorder, tuple))
|
|
bd79e93b12f2d0563492a2c89813927d18c06ac1
|
tensorflow/python/tpu/__init__.py
|
tensorflow/python/tpu/__init__.py
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Ops related to Tensor Processing Units."""
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Ops related to Tensor Processing Units."""
import os
os.environ['TPU_ML_PLATFORM'] = 'Tensorflow'
|
Initialize TPU_ML_PLATFORM env variable with `Tensorflow`.
|
Initialize TPU_ML_PLATFORM env variable with `Tensorflow`.
PiperOrigin-RevId: 474832980
|
Python
|
apache-2.0
|
paolodedios/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,karllessard/tensorflow,karllessard/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-experimental_link_static_libraries_once,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,yongtang/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,Intel-tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,paolodedios/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,paolodedios/tensorflow,karllessard/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,tensorflow/tensorflow-experimental_link_static_libraries_once,yongtang/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_saved_model,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow-pywrap_tf_optimizer,tensorflow/tensorflow,yongtang/tensorflow,yongtang/tensorflow,yongtang/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,yongtang/tensorflow,karllessard/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,Intel-tensorflow/tensorflow,tensorflow/tensorflow,paolodedios/tensorflow,karllessard/tensorflow,karllessard/tensorflow,tensorflow/tensorflow-experimental_link_static_libraries_once,karllessard/tensorflow,tensorflow/tensorflow-pywrap_saved_model,karllessard/tensorflow,paolodedios/tensorflow,tensorflow/tensorflow-pywrap_saved_model,Intel-tensorflow/tensorflow,tensorflow/tensorflow-pywrap_saved_model
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Ops related to Tensor Processing Units."""
Initialize TPU_ML_PLATFORM env variable with `Tensorflow`.
PiperOrigin-RevId: 474832980
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Ops related to Tensor Processing Units."""
import os
os.environ['TPU_ML_PLATFORM'] = 'Tensorflow'
|
<commit_before># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Ops related to Tensor Processing Units."""
<commit_msg>Initialize TPU_ML_PLATFORM env variable with `Tensorflow`.
PiperOrigin-RevId: 474832980<commit_after>
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Ops related to Tensor Processing Units."""
import os
os.environ['TPU_ML_PLATFORM'] = 'Tensorflow'
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Ops related to Tensor Processing Units."""
Initialize TPU_ML_PLATFORM env variable with `Tensorflow`.
PiperOrigin-RevId: 474832980# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Ops related to Tensor Processing Units."""
import os
os.environ['TPU_ML_PLATFORM'] = 'Tensorflow'
|
<commit_before># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Ops related to Tensor Processing Units."""
<commit_msg>Initialize TPU_ML_PLATFORM env variable with `Tensorflow`.
PiperOrigin-RevId: 474832980<commit_after># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Ops related to Tensor Processing Units."""
import os
os.environ['TPU_ML_PLATFORM'] = 'Tensorflow'
|
6aace4f414aa004551b03702c1dd2f5b5e9c8143
|
tests/test_caster.py
|
tests/test_caster.py
|
"""Additional tests for the caster to ensure full code coverage.
"""
import pytest
def test_corner():
from aflow.caster import cast
assert cast("numbers", "spinD", None) is None
assert cast("numbers", "spinD", "garbage") is None
|
Increase test coverage for corner cases.
|
Increase test coverage for corner cases.
|
Python
|
mit
|
rosenbrockc/aflow
|
Increase test coverage for corner cases.
|
"""Additional tests for the caster to ensure full code coverage.
"""
import pytest
def test_corner():
from aflow.caster import cast
assert cast("numbers", "spinD", None) is None
assert cast("numbers", "spinD", "garbage") is None
|
<commit_before><commit_msg>Increase test coverage for corner cases.<commit_after>
|
"""Additional tests for the caster to ensure full code coverage.
"""
import pytest
def test_corner():
from aflow.caster import cast
assert cast("numbers", "spinD", None) is None
assert cast("numbers", "spinD", "garbage") is None
|
Increase test coverage for corner cases."""Additional tests for the caster to ensure full code coverage.
"""
import pytest
def test_corner():
from aflow.caster import cast
assert cast("numbers", "spinD", None) is None
assert cast("numbers", "spinD", "garbage") is None
|
<commit_before><commit_msg>Increase test coverage for corner cases.<commit_after>"""Additional tests for the caster to ensure full code coverage.
"""
import pytest
def test_corner():
from aflow.caster import cast
assert cast("numbers", "spinD", None) is None
assert cast("numbers", "spinD", "garbage") is None
|
|
7de33228e89139605f7eb20ebd7ba3192306c123
|
dataedit/management/commands/mirror.py
|
dataedit/management/commands/mirror.py
|
from django.core.management.base import BaseCommand, CommandError
from sqlalchemy.orm.session import sessionmaker
from api.connection import _get_engine
from dataedit.models import Table
from dataedit.views import schema_whitelist
import sqlalchemy as sqla
class Command(BaseCommand):
def handle(self, *args, **options):
engine = _get_engine()
inspector = sqla.inspect(engine)
real_tables = {(schema, table_name) for schema in schema_whitelist
for table_name in inspector.get_table_names(schema=schema) if schema in schema_whitelist}
table_objects = {(t.schema.name, t.name) for t in Table.objects.all() if t.schema.name in schema_whitelist}
for schema, table in table_objects.difference(real_tables):
Table.objects.get(name=table, schema__name=schema).delete()
for schema, table in real_tables.difference(table_objects):
Table.objects.create(name=table, schema__name=schema)
|
Add transition method for legacy databases
|
Add transition method for legacy databases
|
Python
|
agpl-3.0
|
openego/oeplatform,openego/oeplatform,openego/oeplatform,openego/oeplatform
|
Add transition method for legacy databases
|
from django.core.management.base import BaseCommand, CommandError
from sqlalchemy.orm.session import sessionmaker
from api.connection import _get_engine
from dataedit.models import Table
from dataedit.views import schema_whitelist
import sqlalchemy as sqla
class Command(BaseCommand):
def handle(self, *args, **options):
engine = _get_engine()
inspector = sqla.inspect(engine)
real_tables = {(schema, table_name) for schema in schema_whitelist
for table_name in inspector.get_table_names(schema=schema) if schema in schema_whitelist}
table_objects = {(t.schema.name, t.name) for t in Table.objects.all() if t.schema.name in schema_whitelist}
for schema, table in table_objects.difference(real_tables):
Table.objects.get(name=table, schema__name=schema).delete()
for schema, table in real_tables.difference(table_objects):
Table.objects.create(name=table, schema__name=schema)
|
<commit_before><commit_msg>Add transition method for legacy databases<commit_after>
|
from django.core.management.base import BaseCommand, CommandError
from sqlalchemy.orm.session import sessionmaker
from api.connection import _get_engine
from dataedit.models import Table
from dataedit.views import schema_whitelist
import sqlalchemy as sqla
class Command(BaseCommand):
def handle(self, *args, **options):
engine = _get_engine()
inspector = sqla.inspect(engine)
real_tables = {(schema, table_name) for schema in schema_whitelist
for table_name in inspector.get_table_names(schema=schema) if schema in schema_whitelist}
table_objects = {(t.schema.name, t.name) for t in Table.objects.all() if t.schema.name in schema_whitelist}
for schema, table in table_objects.difference(real_tables):
Table.objects.get(name=table, schema__name=schema).delete()
for schema, table in real_tables.difference(table_objects):
Table.objects.create(name=table, schema__name=schema)
|
Add transition method for legacy databasesfrom django.core.management.base import BaseCommand, CommandError
from sqlalchemy.orm.session import sessionmaker
from api.connection import _get_engine
from dataedit.models import Table
from dataedit.views import schema_whitelist
import sqlalchemy as sqla
class Command(BaseCommand):
def handle(self, *args, **options):
engine = _get_engine()
inspector = sqla.inspect(engine)
real_tables = {(schema, table_name) for schema in schema_whitelist
for table_name in inspector.get_table_names(schema=schema) if schema in schema_whitelist}
table_objects = {(t.schema.name, t.name) for t in Table.objects.all() if t.schema.name in schema_whitelist}
for schema, table in table_objects.difference(real_tables):
Table.objects.get(name=table, schema__name=schema).delete()
for schema, table in real_tables.difference(table_objects):
Table.objects.create(name=table, schema__name=schema)
|
<commit_before><commit_msg>Add transition method for legacy databases<commit_after>from django.core.management.base import BaseCommand, CommandError
from sqlalchemy.orm.session import sessionmaker
from api.connection import _get_engine
from dataedit.models import Table
from dataedit.views import schema_whitelist
import sqlalchemy as sqla
class Command(BaseCommand):
def handle(self, *args, **options):
engine = _get_engine()
inspector = sqla.inspect(engine)
real_tables = {(schema, table_name) for schema in schema_whitelist
for table_name in inspector.get_table_names(schema=schema) if schema in schema_whitelist}
table_objects = {(t.schema.name, t.name) for t in Table.objects.all() if t.schema.name in schema_whitelist}
for schema, table in table_objects.difference(real_tables):
Table.objects.get(name=table, schema__name=schema).delete()
for schema, table in real_tables.difference(table_objects):
Table.objects.create(name=table, schema__name=schema)
|
|
e4e70331e95a515d669e576ebcdb854d141a5a84
|
Tools/remove-trailing-whitespace.py
|
Tools/remove-trailing-whitespace.py
|
#!/usr/bin/python
# This file is a part of the OpenSurgSim project.
# Copyright 2012-2013, SimQuest Solutions Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Remove all trailing whitespace from a file.
Typical usage:
Tools/remove-trailing-whitespace.py Foo.h
or:
find SurgSim \( -name '*.h' -o -name '*.cpp' \) -print \
| xargs python Tools/remove-trailing-whitespace.py
"""
import argparse
import sys
import re
def slurp_raw_lines(file):
try:
with open(file, 'rb') as f:
return f.readlines()
except IOError as e:
print >> sys.stderr, e
return None
def spew_raw_lines(file, lines):
try:
with open(file, 'wb') as f:
for line in lines:
f.write(line)
return True
except IOError as e:
print >> sys.stderr, e
return False
def update(file, lines):
if lines is None:
return None
eol = "\n"
if len(lines) and re.search(r'\r\n$', lines[0]):
eol = "\r\n"
result = map(lambda x: x.rstrip() + eol, lines)
if result == lines:
return None
return result
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Remove all trailing whitespace from a file.")
parser.add_argument('files', metavar='FILE', nargs='*',
help='The file names to modify.')
args = parser.parse_args()
touched = False
for file in args.files:
lines = update(file, slurp_raw_lines(file))
if lines is not None:
spew_raw_lines(file, lines)
print "Updated", file
touched = True
if not touched:
print "{}: Nothing to update!".format(sys.argv[0])
|
Add a script to remove trailing whitespace from a file.
|
Add a script to remove trailing whitespace from a file.
Typical usage:
Tools/remove-trailing-whitespace.py Foo.h
or:
find SurgSim \( -name '*.h' -o -name '*.cpp' \) -print \
| xargs python Tools/remove-trailing-whitespace.py
|
Python
|
apache-2.0
|
simquest/opensurgsim,simquest/opensurgsim,simquest/opensurgsim,simquest/opensurgsim
|
Add a script to remove trailing whitespace from a file.
Typical usage:
Tools/remove-trailing-whitespace.py Foo.h
or:
find SurgSim \( -name '*.h' -o -name '*.cpp' \) -print \
| xargs python Tools/remove-trailing-whitespace.py
|
#!/usr/bin/python
# This file is a part of the OpenSurgSim project.
# Copyright 2012-2013, SimQuest Solutions Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Remove all trailing whitespace from a file.
Typical usage:
Tools/remove-trailing-whitespace.py Foo.h
or:
find SurgSim \( -name '*.h' -o -name '*.cpp' \) -print \
| xargs python Tools/remove-trailing-whitespace.py
"""
import argparse
import sys
import re
def slurp_raw_lines(file):
try:
with open(file, 'rb') as f:
return f.readlines()
except IOError as e:
print >> sys.stderr, e
return None
def spew_raw_lines(file, lines):
try:
with open(file, 'wb') as f:
for line in lines:
f.write(line)
return True
except IOError as e:
print >> sys.stderr, e
return False
def update(file, lines):
if lines is None:
return None
eol = "\n"
if len(lines) and re.search(r'\r\n$', lines[0]):
eol = "\r\n"
result = map(lambda x: x.rstrip() + eol, lines)
if result == lines:
return None
return result
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Remove all trailing whitespace from a file.")
parser.add_argument('files', metavar='FILE', nargs='*',
help='The file names to modify.')
args = parser.parse_args()
touched = False
for file in args.files:
lines = update(file, slurp_raw_lines(file))
if lines is not None:
spew_raw_lines(file, lines)
print "Updated", file
touched = True
if not touched:
print "{}: Nothing to update!".format(sys.argv[0])
|
<commit_before><commit_msg>Add a script to remove trailing whitespace from a file.
Typical usage:
Tools/remove-trailing-whitespace.py Foo.h
or:
find SurgSim \( -name '*.h' -o -name '*.cpp' \) -print \
| xargs python Tools/remove-trailing-whitespace.py<commit_after>
|
#!/usr/bin/python
# This file is a part of the OpenSurgSim project.
# Copyright 2012-2013, SimQuest Solutions Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Remove all trailing whitespace from a file.
Typical usage:
Tools/remove-trailing-whitespace.py Foo.h
or:
find SurgSim \( -name '*.h' -o -name '*.cpp' \) -print \
| xargs python Tools/remove-trailing-whitespace.py
"""
import argparse
import sys
import re
def slurp_raw_lines(file):
try:
with open(file, 'rb') as f:
return f.readlines()
except IOError as e:
print >> sys.stderr, e
return None
def spew_raw_lines(file, lines):
try:
with open(file, 'wb') as f:
for line in lines:
f.write(line)
return True
except IOError as e:
print >> sys.stderr, e
return False
def update(file, lines):
if lines is None:
return None
eol = "\n"
if len(lines) and re.search(r'\r\n$', lines[0]):
eol = "\r\n"
result = map(lambda x: x.rstrip() + eol, lines)
if result == lines:
return None
return result
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Remove all trailing whitespace from a file.")
parser.add_argument('files', metavar='FILE', nargs='*',
help='The file names to modify.')
args = parser.parse_args()
touched = False
for file in args.files:
lines = update(file, slurp_raw_lines(file))
if lines is not None:
spew_raw_lines(file, lines)
print "Updated", file
touched = True
if not touched:
print "{}: Nothing to update!".format(sys.argv[0])
|
Add a script to remove trailing whitespace from a file.
Typical usage:
Tools/remove-trailing-whitespace.py Foo.h
or:
find SurgSim \( -name '*.h' -o -name '*.cpp' \) -print \
| xargs python Tools/remove-trailing-whitespace.py#!/usr/bin/python
# This file is a part of the OpenSurgSim project.
# Copyright 2012-2013, SimQuest Solutions Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Remove all trailing whitespace from a file.
Typical usage:
Tools/remove-trailing-whitespace.py Foo.h
or:
find SurgSim \( -name '*.h' -o -name '*.cpp' \) -print \
| xargs python Tools/remove-trailing-whitespace.py
"""
import argparse
import sys
import re
def slurp_raw_lines(file):
try:
with open(file, 'rb') as f:
return f.readlines()
except IOError as e:
print >> sys.stderr, e
return None
def spew_raw_lines(file, lines):
try:
with open(file, 'wb') as f:
for line in lines:
f.write(line)
return True
except IOError as e:
print >> sys.stderr, e
return False
def update(file, lines):
if lines is None:
return None
eol = "\n"
if len(lines) and re.search(r'\r\n$', lines[0]):
eol = "\r\n"
result = map(lambda x: x.rstrip() + eol, lines)
if result == lines:
return None
return result
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Remove all trailing whitespace from a file.")
parser.add_argument('files', metavar='FILE', nargs='*',
help='The file names to modify.')
args = parser.parse_args()
touched = False
for file in args.files:
lines = update(file, slurp_raw_lines(file))
if lines is not None:
spew_raw_lines(file, lines)
print "Updated", file
touched = True
if not touched:
print "{}: Nothing to update!".format(sys.argv[0])
|
<commit_before><commit_msg>Add a script to remove trailing whitespace from a file.
Typical usage:
Tools/remove-trailing-whitespace.py Foo.h
or:
find SurgSim \( -name '*.h' -o -name '*.cpp' \) -print \
| xargs python Tools/remove-trailing-whitespace.py<commit_after>#!/usr/bin/python
# This file is a part of the OpenSurgSim project.
# Copyright 2012-2013, SimQuest Solutions Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Remove all trailing whitespace from a file.
Typical usage:
Tools/remove-trailing-whitespace.py Foo.h
or:
find SurgSim \( -name '*.h' -o -name '*.cpp' \) -print \
| xargs python Tools/remove-trailing-whitespace.py
"""
import argparse
import sys
import re
def slurp_raw_lines(file):
try:
with open(file, 'rb') as f:
return f.readlines()
except IOError as e:
print >> sys.stderr, e
return None
def spew_raw_lines(file, lines):
try:
with open(file, 'wb') as f:
for line in lines:
f.write(line)
return True
except IOError as e:
print >> sys.stderr, e
return False
def update(file, lines):
if lines is None:
return None
eol = "\n"
if len(lines) and re.search(r'\r\n$', lines[0]):
eol = "\r\n"
result = map(lambda x: x.rstrip() + eol, lines)
if result == lines:
return None
return result
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description="Remove all trailing whitespace from a file.")
parser.add_argument('files', metavar='FILE', nargs='*',
help='The file names to modify.')
args = parser.parse_args()
touched = False
for file in args.files:
lines = update(file, slurp_raw_lines(file))
if lines is not None:
spew_raw_lines(file, lines)
print "Updated", file
touched = True
if not touched:
print "{}: Nothing to update!".format(sys.argv[0])
|
|
960a12ee047a915e29258fcafcf642be11b838ed
|
overlay/DataRegion.py
|
overlay/DataRegion.py
|
class DataRegion:
def __init__(self):
self.data = []
def addTimeData(self, data_time, data_value):
self.data.append((data_time, data_value))
def isTimeInRegion(self, secs_since_epoch):
return self.data[0][0] <= secs_since_epoch <= self.data[-1][0]
def dataInTimeRegion(self, start_time, end_time):
in_region = False
result = []
for item in self.data:
# print(item[0])
if not in_region:
if start_time <= item[0] <= end_time:
in_region = True
result.append(item)
else:
if end_time < item[0]:
in_region = False
else:
result.append(item)
return result
def interpolatedValueAtTime(self, secs_since_epoch):
if secs_since_epoch < self.data[0][0]:
return None
elif self.data[-1][0] < secs_since_epoch:
return None
else:
start = None
end = None
for (time, value) in self.data:
if time == secs_since_epoch:
return value
else:
if time <= secs_since_epoch:
start = (time, value)
elif secs_since_epoch < time:
if end is None:
end = (time, value)
time_delta = end[0] - start[0]
percent = time_delta / (secs_since_epoch - start[0])
value_delta = end[1] - start[1]
return start[1] + value_delta * percent
|
Create class to hold regions of data
|
Create class to hold regions of data
|
Python
|
mit
|
gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,gizmo-cda/g2x,thelonious/g2x,thelonious/g2x
|
Create class to hold regions of data
|
class DataRegion:
def __init__(self):
self.data = []
def addTimeData(self, data_time, data_value):
self.data.append((data_time, data_value))
def isTimeInRegion(self, secs_since_epoch):
return self.data[0][0] <= secs_since_epoch <= self.data[-1][0]
def dataInTimeRegion(self, start_time, end_time):
in_region = False
result = []
for item in self.data:
# print(item[0])
if not in_region:
if start_time <= item[0] <= end_time:
in_region = True
result.append(item)
else:
if end_time < item[0]:
in_region = False
else:
result.append(item)
return result
def interpolatedValueAtTime(self, secs_since_epoch):
if secs_since_epoch < self.data[0][0]:
return None
elif self.data[-1][0] < secs_since_epoch:
return None
else:
start = None
end = None
for (time, value) in self.data:
if time == secs_since_epoch:
return value
else:
if time <= secs_since_epoch:
start = (time, value)
elif secs_since_epoch < time:
if end is None:
end = (time, value)
time_delta = end[0] - start[0]
percent = time_delta / (secs_since_epoch - start[0])
value_delta = end[1] - start[1]
return start[1] + value_delta * percent
|
<commit_before><commit_msg>Create class to hold regions of data<commit_after>
|
class DataRegion:
def __init__(self):
self.data = []
def addTimeData(self, data_time, data_value):
self.data.append((data_time, data_value))
def isTimeInRegion(self, secs_since_epoch):
return self.data[0][0] <= secs_since_epoch <= self.data[-1][0]
def dataInTimeRegion(self, start_time, end_time):
in_region = False
result = []
for item in self.data:
# print(item[0])
if not in_region:
if start_time <= item[0] <= end_time:
in_region = True
result.append(item)
else:
if end_time < item[0]:
in_region = False
else:
result.append(item)
return result
def interpolatedValueAtTime(self, secs_since_epoch):
if secs_since_epoch < self.data[0][0]:
return None
elif self.data[-1][0] < secs_since_epoch:
return None
else:
start = None
end = None
for (time, value) in self.data:
if time == secs_since_epoch:
return value
else:
if time <= secs_since_epoch:
start = (time, value)
elif secs_since_epoch < time:
if end is None:
end = (time, value)
time_delta = end[0] - start[0]
percent = time_delta / (secs_since_epoch - start[0])
value_delta = end[1] - start[1]
return start[1] + value_delta * percent
|
Create class to hold regions of dataclass DataRegion:
def __init__(self):
self.data = []
def addTimeData(self, data_time, data_value):
self.data.append((data_time, data_value))
def isTimeInRegion(self, secs_since_epoch):
return self.data[0][0] <= secs_since_epoch <= self.data[-1][0]
def dataInTimeRegion(self, start_time, end_time):
in_region = False
result = []
for item in self.data:
# print(item[0])
if not in_region:
if start_time <= item[0] <= end_time:
in_region = True
result.append(item)
else:
if end_time < item[0]:
in_region = False
else:
result.append(item)
return result
def interpolatedValueAtTime(self, secs_since_epoch):
if secs_since_epoch < self.data[0][0]:
return None
elif self.data[-1][0] < secs_since_epoch:
return None
else:
start = None
end = None
for (time, value) in self.data:
if time == secs_since_epoch:
return value
else:
if time <= secs_since_epoch:
start = (time, value)
elif secs_since_epoch < time:
if end is None:
end = (time, value)
time_delta = end[0] - start[0]
percent = time_delta / (secs_since_epoch - start[0])
value_delta = end[1] - start[1]
return start[1] + value_delta * percent
|
<commit_before><commit_msg>Create class to hold regions of data<commit_after>class DataRegion:
def __init__(self):
self.data = []
def addTimeData(self, data_time, data_value):
self.data.append((data_time, data_value))
def isTimeInRegion(self, secs_since_epoch):
return self.data[0][0] <= secs_since_epoch <= self.data[-1][0]
def dataInTimeRegion(self, start_time, end_time):
in_region = False
result = []
for item in self.data:
# print(item[0])
if not in_region:
if start_time <= item[0] <= end_time:
in_region = True
result.append(item)
else:
if end_time < item[0]:
in_region = False
else:
result.append(item)
return result
def interpolatedValueAtTime(self, secs_since_epoch):
if secs_since_epoch < self.data[0][0]:
return None
elif self.data[-1][0] < secs_since_epoch:
return None
else:
start = None
end = None
for (time, value) in self.data:
if time == secs_since_epoch:
return value
else:
if time <= secs_since_epoch:
start = (time, value)
elif secs_since_epoch < time:
if end is None:
end = (time, value)
time_delta = end[0] - start[0]
percent = time_delta / (secs_since_epoch - start[0])
value_delta = end[1] - start[1]
return start[1] + value_delta * percent
|
|
6373384ddc2836fc23db27d1b230b9f4228a0e38
|
bluebottle/cms/migrations/0062_auto_20200812_1514.py
|
bluebottle/cms/migrations/0062_auto_20200812_1514.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-07-17 08:37
from __future__ import unicode_literals
from django.core.exceptions import ObjectDoesNotExist
from django.db import migrations, models, connection
from django.utils.translation import activate, _trans
from tenant_extras.middleware import tenant_translation
from parler.models import TranslatableModelMixin
def remove_old_statistic_block_from_homepage(apps, schema_editor):
StatsContent = apps.get_model('cms', 'StatsContent')
ContentType = apps.get_model('contenttypes', 'ContentType')
for stats_content in StatsContent.objects.all():
if stats_content.placeholder and stats_content.placeholder.parent_type.model == 'homepage':
print stats_content
stats_content.stats.all().delete()
with connection.cursor() as c:
c.execute(
'delete from contentitem_cms_statscontent where contentitem_ptr_id = {};'.format(
stats_content.contentitem_ptr_id
)
)
class Migration(migrations.Migration):
dependencies = [
('cms', '0061_auto_20200812_1030'),
]
operations = [
migrations.RunPython(
remove_old_statistic_block_from_homepage,
migrations.RunPython.noop
)
]
|
Add migration that deletes old statscontent objects from homepage
|
Add migration that deletes old statscontent objects from homepage
|
Python
|
bsd-3-clause
|
onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle,onepercentclub/bluebottle
|
Add migration that deletes old statscontent objects from homepage
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-07-17 08:37
from __future__ import unicode_literals
from django.core.exceptions import ObjectDoesNotExist
from django.db import migrations, models, connection
from django.utils.translation import activate, _trans
from tenant_extras.middleware import tenant_translation
from parler.models import TranslatableModelMixin
def remove_old_statistic_block_from_homepage(apps, schema_editor):
StatsContent = apps.get_model('cms', 'StatsContent')
ContentType = apps.get_model('contenttypes', 'ContentType')
for stats_content in StatsContent.objects.all():
if stats_content.placeholder and stats_content.placeholder.parent_type.model == 'homepage':
print stats_content
stats_content.stats.all().delete()
with connection.cursor() as c:
c.execute(
'delete from contentitem_cms_statscontent where contentitem_ptr_id = {};'.format(
stats_content.contentitem_ptr_id
)
)
class Migration(migrations.Migration):
dependencies = [
('cms', '0061_auto_20200812_1030'),
]
operations = [
migrations.RunPython(
remove_old_statistic_block_from_homepage,
migrations.RunPython.noop
)
]
|
<commit_before><commit_msg>Add migration that deletes old statscontent objects from homepage<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-07-17 08:37
from __future__ import unicode_literals
from django.core.exceptions import ObjectDoesNotExist
from django.db import migrations, models, connection
from django.utils.translation import activate, _trans
from tenant_extras.middleware import tenant_translation
from parler.models import TranslatableModelMixin
def remove_old_statistic_block_from_homepage(apps, schema_editor):
StatsContent = apps.get_model('cms', 'StatsContent')
ContentType = apps.get_model('contenttypes', 'ContentType')
for stats_content in StatsContent.objects.all():
if stats_content.placeholder and stats_content.placeholder.parent_type.model == 'homepage':
print stats_content
stats_content.stats.all().delete()
with connection.cursor() as c:
c.execute(
'delete from contentitem_cms_statscontent where contentitem_ptr_id = {};'.format(
stats_content.contentitem_ptr_id
)
)
class Migration(migrations.Migration):
dependencies = [
('cms', '0061_auto_20200812_1030'),
]
operations = [
migrations.RunPython(
remove_old_statistic_block_from_homepage,
migrations.RunPython.noop
)
]
|
Add migration that deletes old statscontent objects from homepage# -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-07-17 08:37
from __future__ import unicode_literals
from django.core.exceptions import ObjectDoesNotExist
from django.db import migrations, models, connection
from django.utils.translation import activate, _trans
from tenant_extras.middleware import tenant_translation
from parler.models import TranslatableModelMixin
def remove_old_statistic_block_from_homepage(apps, schema_editor):
StatsContent = apps.get_model('cms', 'StatsContent')
ContentType = apps.get_model('contenttypes', 'ContentType')
for stats_content in StatsContent.objects.all():
if stats_content.placeholder and stats_content.placeholder.parent_type.model == 'homepage':
print stats_content
stats_content.stats.all().delete()
with connection.cursor() as c:
c.execute(
'delete from contentitem_cms_statscontent where contentitem_ptr_id = {};'.format(
stats_content.contentitem_ptr_id
)
)
class Migration(migrations.Migration):
dependencies = [
('cms', '0061_auto_20200812_1030'),
]
operations = [
migrations.RunPython(
remove_old_statistic_block_from_homepage,
migrations.RunPython.noop
)
]
|
<commit_before><commit_msg>Add migration that deletes old statscontent objects from homepage<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.11.15 on 2020-07-17 08:37
from __future__ import unicode_literals
from django.core.exceptions import ObjectDoesNotExist
from django.db import migrations, models, connection
from django.utils.translation import activate, _trans
from tenant_extras.middleware import tenant_translation
from parler.models import TranslatableModelMixin
def remove_old_statistic_block_from_homepage(apps, schema_editor):
StatsContent = apps.get_model('cms', 'StatsContent')
ContentType = apps.get_model('contenttypes', 'ContentType')
for stats_content in StatsContent.objects.all():
if stats_content.placeholder and stats_content.placeholder.parent_type.model == 'homepage':
print stats_content
stats_content.stats.all().delete()
with connection.cursor() as c:
c.execute(
'delete from contentitem_cms_statscontent where contentitem_ptr_id = {};'.format(
stats_content.contentitem_ptr_id
)
)
class Migration(migrations.Migration):
dependencies = [
('cms', '0061_auto_20200812_1030'),
]
operations = [
migrations.RunPython(
remove_old_statistic_block_from_homepage,
migrations.RunPython.noop
)
]
|
|
aa43336c74376ac9ea70ee407d936c28cfc7ccab
|
MOS6502.py
|
MOS6502.py
|
class Register(object):
def __init__(self, name, bitwidth):
self.name = name
self.value = 0
self.past = [0] # maybe for rewind?
self.symbVal = [z3.BitVec(self.name, bitwidth)] # maybe for
# symbolic
# execution?
def GetValue(self):
return self.value
def SetValue(self, value):
self.value = value
def GetCurrentSymb(self):
return self.symbVal[-1]
def GetInitialSymb(self):
return self.symbVal[0]
def SetInitialSymb(self, value):
self.symbVal[0] = value
class CPU(object):
def __init__(self, bitwidth, baseAddress):
self.regs = { 'A': Register('A', bitwidth),
'X': Register('X', bitwidth),
'Y': Register('Y', bitwidth),
'PC': Register('PC', bitwidth * 2),
'S': Register('S', bitwidth),
'P': Register('P', bitwidth)}
self.bitwidth = bitwidth
self.ptrSize = 2 # 2 bytes for PC
self.memory = []
self.pastMemory = []
self.symbMemory = z3Array('mem', z3.BitVecSort(bitwidth), z3.BitVecSort(8))
if (baseAddress):
self.regs['PC'].SetValue(baseAddress)
def ReadMemory(self, address): # always read 1 byte
return self.memory[address]
def SetMemory(self, address, value): # always write 1 byte
self.memory[address] = value
return value
def GetRegister(name):
return self.regs[name].GetValue()
def SetRegister(self, name, value):
self.regs[name].SetValue(value)
return value
|
Add register and CPU stuff
|
Add register and CPU stuff
|
Python
|
bsd-2-clause
|
pusscat/refNes
|
Add register and CPU stuff
|
class Register(object):
def __init__(self, name, bitwidth):
self.name = name
self.value = 0
self.past = [0] # maybe for rewind?
self.symbVal = [z3.BitVec(self.name, bitwidth)] # maybe for
# symbolic
# execution?
def GetValue(self):
return self.value
def SetValue(self, value):
self.value = value
def GetCurrentSymb(self):
return self.symbVal[-1]
def GetInitialSymb(self):
return self.symbVal[0]
def SetInitialSymb(self, value):
self.symbVal[0] = value
class CPU(object):
def __init__(self, bitwidth, baseAddress):
self.regs = { 'A': Register('A', bitwidth),
'X': Register('X', bitwidth),
'Y': Register('Y', bitwidth),
'PC': Register('PC', bitwidth * 2),
'S': Register('S', bitwidth),
'P': Register('P', bitwidth)}
self.bitwidth = bitwidth
self.ptrSize = 2 # 2 bytes for PC
self.memory = []
self.pastMemory = []
self.symbMemory = z3Array('mem', z3.BitVecSort(bitwidth), z3.BitVecSort(8))
if (baseAddress):
self.regs['PC'].SetValue(baseAddress)
def ReadMemory(self, address): # always read 1 byte
return self.memory[address]
def SetMemory(self, address, value): # always write 1 byte
self.memory[address] = value
return value
def GetRegister(name):
return self.regs[name].GetValue()
def SetRegister(self, name, value):
self.regs[name].SetValue(value)
return value
|
<commit_before><commit_msg>Add register and CPU stuff<commit_after>
|
class Register(object):
def __init__(self, name, bitwidth):
self.name = name
self.value = 0
self.past = [0] # maybe for rewind?
self.symbVal = [z3.BitVec(self.name, bitwidth)] # maybe for
# symbolic
# execution?
def GetValue(self):
return self.value
def SetValue(self, value):
self.value = value
def GetCurrentSymb(self):
return self.symbVal[-1]
def GetInitialSymb(self):
return self.symbVal[0]
def SetInitialSymb(self, value):
self.symbVal[0] = value
class CPU(object):
def __init__(self, bitwidth, baseAddress):
self.regs = { 'A': Register('A', bitwidth),
'X': Register('X', bitwidth),
'Y': Register('Y', bitwidth),
'PC': Register('PC', bitwidth * 2),
'S': Register('S', bitwidth),
'P': Register('P', bitwidth)}
self.bitwidth = bitwidth
self.ptrSize = 2 # 2 bytes for PC
self.memory = []
self.pastMemory = []
self.symbMemory = z3Array('mem', z3.BitVecSort(bitwidth), z3.BitVecSort(8))
if (baseAddress):
self.regs['PC'].SetValue(baseAddress)
def ReadMemory(self, address): # always read 1 byte
return self.memory[address]
def SetMemory(self, address, value): # always write 1 byte
self.memory[address] = value
return value
def GetRegister(name):
return self.regs[name].GetValue()
def SetRegister(self, name, value):
self.regs[name].SetValue(value)
return value
|
Add register and CPU stuffclass Register(object):
def __init__(self, name, bitwidth):
self.name = name
self.value = 0
self.past = [0] # maybe for rewind?
self.symbVal = [z3.BitVec(self.name, bitwidth)] # maybe for
# symbolic
# execution?
def GetValue(self):
return self.value
def SetValue(self, value):
self.value = value
def GetCurrentSymb(self):
return self.symbVal[-1]
def GetInitialSymb(self):
return self.symbVal[0]
def SetInitialSymb(self, value):
self.symbVal[0] = value
class CPU(object):
def __init__(self, bitwidth, baseAddress):
self.regs = { 'A': Register('A', bitwidth),
'X': Register('X', bitwidth),
'Y': Register('Y', bitwidth),
'PC': Register('PC', bitwidth * 2),
'S': Register('S', bitwidth),
'P': Register('P', bitwidth)}
self.bitwidth = bitwidth
self.ptrSize = 2 # 2 bytes for PC
self.memory = []
self.pastMemory = []
self.symbMemory = z3Array('mem', z3.BitVecSort(bitwidth), z3.BitVecSort(8))
if (baseAddress):
self.regs['PC'].SetValue(baseAddress)
def ReadMemory(self, address): # always read 1 byte
return self.memory[address]
def SetMemory(self, address, value): # always write 1 byte
self.memory[address] = value
return value
def GetRegister(name):
return self.regs[name].GetValue()
def SetRegister(self, name, value):
self.regs[name].SetValue(value)
return value
|
<commit_before><commit_msg>Add register and CPU stuff<commit_after>class Register(object):
def __init__(self, name, bitwidth):
self.name = name
self.value = 0
self.past = [0] # maybe for rewind?
self.symbVal = [z3.BitVec(self.name, bitwidth)] # maybe for
# symbolic
# execution?
def GetValue(self):
return self.value
def SetValue(self, value):
self.value = value
def GetCurrentSymb(self):
return self.symbVal[-1]
def GetInitialSymb(self):
return self.symbVal[0]
def SetInitialSymb(self, value):
self.symbVal[0] = value
class CPU(object):
def __init__(self, bitwidth, baseAddress):
self.regs = { 'A': Register('A', bitwidth),
'X': Register('X', bitwidth),
'Y': Register('Y', bitwidth),
'PC': Register('PC', bitwidth * 2),
'S': Register('S', bitwidth),
'P': Register('P', bitwidth)}
self.bitwidth = bitwidth
self.ptrSize = 2 # 2 bytes for PC
self.memory = []
self.pastMemory = []
self.symbMemory = z3Array('mem', z3.BitVecSort(bitwidth), z3.BitVecSort(8))
if (baseAddress):
self.regs['PC'].SetValue(baseAddress)
def ReadMemory(self, address): # always read 1 byte
return self.memory[address]
def SetMemory(self, address, value): # always write 1 byte
self.memory[address] = value
return value
def GetRegister(name):
return self.regs[name].GetValue()
def SetRegister(self, name, value):
self.regs[name].SetValue(value)
return value
|
|
2f143f3eeaff68b2d871d4cc8e6c1e212bdf246b
|
tools/trigger-job.py
|
tools/trigger-job.py
|
#!/usr/bin/env python
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This script can be used to manually trigger a job in the same way that
# Zuul does. At the moment, it only supports the post set of Zuul
# parameters.
import argparse
import time
import json
from uuid import uuid4
import gear
def main():
c = gear.Client()
parser = argparse.ArgumentParser(description='Trigger a Zuul job.')
parser.add_argument('--job', dest='job', required=True,
help='Job Name')
parser.add_argument('--project', dest='project', required=True,
help='Project name')
parser.add_argument('--pipeline', dest='pipeline', default='release',
help='Zuul pipeline')
parser.add_argument('--refname', dest='refname',
help='Ref name')
parser.add_argument('--oldrev', dest='oldrev',
default='0000000000000000000000000000000000000000',
help='Old revision (SHA)')
parser.add_argument('--newrev', dest='newrev',
help='New revision (SHA)')
args = parser.parse_args()
data = {'ZUUL_PIPELINE': args.pipeline,
'ZUUL_PROJECT': args.project,
'ZUUL_UUID': str(uuid4().hex),
'ZUUL_REF': args.refname,
'ZUUL_REFNAME': args.refname,
'ZUUL_OLDREV': args.oldrev,
'ZUUL_NEWREV': args.newrev,
'ZUUL_SHORT_OLDREV': args.oldrev[:7],
'ZUUL_SHORT_NEWREV': args.newrev[:7],
'ZUUL_COMMIT': args.newrev,
}
c.addServer('127.0.0.1', 4730)
c.waitForServer()
job = gear.Job("build:%s" % args.job,
json.dumps(data),
unique=data['ZUUL_UUID'])
c.submitJob(job)
while not job.complete:
time.sleep(1)
if __name__ == '__main__':
main()
|
Add manual job triggering script.
|
Add manual job triggering script.
Change-Id: Ic396c845e5f61a98d3b0e8c3fc08c6f9dbfffc6a
Reviewed-on: https://review.openstack.org/34437
Reviewed-by: Jeremy Stanley <98883b97e35b7bb1ac282bb863f2e9d917db0454@yuggoth.org>
Approved: Clark Boylan <635fc31d01fa593720982fa0568e87170cedbe2c@gmail.com>
Reviewed-by: Clark Boylan <635fc31d01fa593720982fa0568e87170cedbe2c@gmail.com>
Tested-by: Jenkins
|
Python
|
apache-2.0
|
devdattakulkarni/zuul_messaging,devdattakulkarni/zuul_messaging,gooddata/zuul,gooddata/zuul,wikimedia/integration-zuul,wikimedia/integration-zuul,gooddata/zuul,gooddata/zuul,wikimedia/integration-zuul,devdattakulkarni/zuul_messaging,gooddata/zuul,wikimedia/integration-zuul
|
Add manual job triggering script.
Change-Id: Ic396c845e5f61a98d3b0e8c3fc08c6f9dbfffc6a
Reviewed-on: https://review.openstack.org/34437
Reviewed-by: Jeremy Stanley <98883b97e35b7bb1ac282bb863f2e9d917db0454@yuggoth.org>
Approved: Clark Boylan <635fc31d01fa593720982fa0568e87170cedbe2c@gmail.com>
Reviewed-by: Clark Boylan <635fc31d01fa593720982fa0568e87170cedbe2c@gmail.com>
Tested-by: Jenkins
|
#!/usr/bin/env python
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This script can be used to manually trigger a job in the same way that
# Zuul does. At the moment, it only supports the post set of Zuul
# parameters.
import argparse
import time
import json
from uuid import uuid4
import gear
def main():
c = gear.Client()
parser = argparse.ArgumentParser(description='Trigger a Zuul job.')
parser.add_argument('--job', dest='job', required=True,
help='Job Name')
parser.add_argument('--project', dest='project', required=True,
help='Project name')
parser.add_argument('--pipeline', dest='pipeline', default='release',
help='Zuul pipeline')
parser.add_argument('--refname', dest='refname',
help='Ref name')
parser.add_argument('--oldrev', dest='oldrev',
default='0000000000000000000000000000000000000000',
help='Old revision (SHA)')
parser.add_argument('--newrev', dest='newrev',
help='New revision (SHA)')
args = parser.parse_args()
data = {'ZUUL_PIPELINE': args.pipeline,
'ZUUL_PROJECT': args.project,
'ZUUL_UUID': str(uuid4().hex),
'ZUUL_REF': args.refname,
'ZUUL_REFNAME': args.refname,
'ZUUL_OLDREV': args.oldrev,
'ZUUL_NEWREV': args.newrev,
'ZUUL_SHORT_OLDREV': args.oldrev[:7],
'ZUUL_SHORT_NEWREV': args.newrev[:7],
'ZUUL_COMMIT': args.newrev,
}
c.addServer('127.0.0.1', 4730)
c.waitForServer()
job = gear.Job("build:%s" % args.job,
json.dumps(data),
unique=data['ZUUL_UUID'])
c.submitJob(job)
while not job.complete:
time.sleep(1)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add manual job triggering script.
Change-Id: Ic396c845e5f61a98d3b0e8c3fc08c6f9dbfffc6a
Reviewed-on: https://review.openstack.org/34437
Reviewed-by: Jeremy Stanley <98883b97e35b7bb1ac282bb863f2e9d917db0454@yuggoth.org>
Approved: Clark Boylan <635fc31d01fa593720982fa0568e87170cedbe2c@gmail.com>
Reviewed-by: Clark Boylan <635fc31d01fa593720982fa0568e87170cedbe2c@gmail.com>
Tested-by: Jenkins<commit_after>
|
#!/usr/bin/env python
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This script can be used to manually trigger a job in the same way that
# Zuul does. At the moment, it only supports the post set of Zuul
# parameters.
import argparse
import time
import json
from uuid import uuid4
import gear
def main():
c = gear.Client()
parser = argparse.ArgumentParser(description='Trigger a Zuul job.')
parser.add_argument('--job', dest='job', required=True,
help='Job Name')
parser.add_argument('--project', dest='project', required=True,
help='Project name')
parser.add_argument('--pipeline', dest='pipeline', default='release',
help='Zuul pipeline')
parser.add_argument('--refname', dest='refname',
help='Ref name')
parser.add_argument('--oldrev', dest='oldrev',
default='0000000000000000000000000000000000000000',
help='Old revision (SHA)')
parser.add_argument('--newrev', dest='newrev',
help='New revision (SHA)')
args = parser.parse_args()
data = {'ZUUL_PIPELINE': args.pipeline,
'ZUUL_PROJECT': args.project,
'ZUUL_UUID': str(uuid4().hex),
'ZUUL_REF': args.refname,
'ZUUL_REFNAME': args.refname,
'ZUUL_OLDREV': args.oldrev,
'ZUUL_NEWREV': args.newrev,
'ZUUL_SHORT_OLDREV': args.oldrev[:7],
'ZUUL_SHORT_NEWREV': args.newrev[:7],
'ZUUL_COMMIT': args.newrev,
}
c.addServer('127.0.0.1', 4730)
c.waitForServer()
job = gear.Job("build:%s" % args.job,
json.dumps(data),
unique=data['ZUUL_UUID'])
c.submitJob(job)
while not job.complete:
time.sleep(1)
if __name__ == '__main__':
main()
|
Add manual job triggering script.
Change-Id: Ic396c845e5f61a98d3b0e8c3fc08c6f9dbfffc6a
Reviewed-on: https://review.openstack.org/34437
Reviewed-by: Jeremy Stanley <98883b97e35b7bb1ac282bb863f2e9d917db0454@yuggoth.org>
Approved: Clark Boylan <635fc31d01fa593720982fa0568e87170cedbe2c@gmail.com>
Reviewed-by: Clark Boylan <635fc31d01fa593720982fa0568e87170cedbe2c@gmail.com>
Tested-by: Jenkins#!/usr/bin/env python
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This script can be used to manually trigger a job in the same way that
# Zuul does. At the moment, it only supports the post set of Zuul
# parameters.
import argparse
import time
import json
from uuid import uuid4
import gear
def main():
c = gear.Client()
parser = argparse.ArgumentParser(description='Trigger a Zuul job.')
parser.add_argument('--job', dest='job', required=True,
help='Job Name')
parser.add_argument('--project', dest='project', required=True,
help='Project name')
parser.add_argument('--pipeline', dest='pipeline', default='release',
help='Zuul pipeline')
parser.add_argument('--refname', dest='refname',
help='Ref name')
parser.add_argument('--oldrev', dest='oldrev',
default='0000000000000000000000000000000000000000',
help='Old revision (SHA)')
parser.add_argument('--newrev', dest='newrev',
help='New revision (SHA)')
args = parser.parse_args()
data = {'ZUUL_PIPELINE': args.pipeline,
'ZUUL_PROJECT': args.project,
'ZUUL_UUID': str(uuid4().hex),
'ZUUL_REF': args.refname,
'ZUUL_REFNAME': args.refname,
'ZUUL_OLDREV': args.oldrev,
'ZUUL_NEWREV': args.newrev,
'ZUUL_SHORT_OLDREV': args.oldrev[:7],
'ZUUL_SHORT_NEWREV': args.newrev[:7],
'ZUUL_COMMIT': args.newrev,
}
c.addServer('127.0.0.1', 4730)
c.waitForServer()
job = gear.Job("build:%s" % args.job,
json.dumps(data),
unique=data['ZUUL_UUID'])
c.submitJob(job)
while not job.complete:
time.sleep(1)
if __name__ == '__main__':
main()
|
<commit_before><commit_msg>Add manual job triggering script.
Change-Id: Ic396c845e5f61a98d3b0e8c3fc08c6f9dbfffc6a
Reviewed-on: https://review.openstack.org/34437
Reviewed-by: Jeremy Stanley <98883b97e35b7bb1ac282bb863f2e9d917db0454@yuggoth.org>
Approved: Clark Boylan <635fc31d01fa593720982fa0568e87170cedbe2c@gmail.com>
Reviewed-by: Clark Boylan <635fc31d01fa593720982fa0568e87170cedbe2c@gmail.com>
Tested-by: Jenkins<commit_after>#!/usr/bin/env python
# Copyright 2013 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# This script can be used to manually trigger a job in the same way that
# Zuul does. At the moment, it only supports the post set of Zuul
# parameters.
import argparse
import time
import json
from uuid import uuid4
import gear
def main():
c = gear.Client()
parser = argparse.ArgumentParser(description='Trigger a Zuul job.')
parser.add_argument('--job', dest='job', required=True,
help='Job Name')
parser.add_argument('--project', dest='project', required=True,
help='Project name')
parser.add_argument('--pipeline', dest='pipeline', default='release',
help='Zuul pipeline')
parser.add_argument('--refname', dest='refname',
help='Ref name')
parser.add_argument('--oldrev', dest='oldrev',
default='0000000000000000000000000000000000000000',
help='Old revision (SHA)')
parser.add_argument('--newrev', dest='newrev',
help='New revision (SHA)')
args = parser.parse_args()
data = {'ZUUL_PIPELINE': args.pipeline,
'ZUUL_PROJECT': args.project,
'ZUUL_UUID': str(uuid4().hex),
'ZUUL_REF': args.refname,
'ZUUL_REFNAME': args.refname,
'ZUUL_OLDREV': args.oldrev,
'ZUUL_NEWREV': args.newrev,
'ZUUL_SHORT_OLDREV': args.oldrev[:7],
'ZUUL_SHORT_NEWREV': args.newrev[:7],
'ZUUL_COMMIT': args.newrev,
}
c.addServer('127.0.0.1', 4730)
c.waitForServer()
job = gear.Job("build:%s" % args.job,
json.dumps(data),
unique=data['ZUUL_UUID'])
c.submitJob(job)
while not job.complete:
time.sleep(1)
if __name__ == '__main__':
main()
|
|
e9619cdf7292de21a97562cdc068be7d26402cac
|
readthedocs/oauth/management/commands/sync_vcs_data.py
|
readthedocs/oauth/management/commands/sync_vcs_data.py
|
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from readthedocs.oauth.tasks import sync_remote_repositories
class Command(BaseCommand):
help = "Sync OAuth RemoteRepository and RemoteOrganization"
def add_arguments(self, parser):
parser.add_argument(
'--queue',
type=str,
default='resync-oauth',
help='Celery queue name.',
)
parser.add_argument(
'--users',
nargs='*',
type=str,
default=[],
help='Re-sync VCS provider data for specific users only.',
)
parser.add_argument(
'--skip-users',
nargs='*',
type=str,
default=[],
help='Skip re-sync VCS provider data for specific users.',
)
parser.add_argument(
'--max-users',
type=int,
default=100,
help='Maximum number of users that should be synced.',
)
def handle(self, *args, **options):
queue = options.get('queue')
sync_users = options.get('users')
skip_users = options.get('skip_users')
max_users = options.get('max_users')
# Filter users who have social accounts connected
# and has no remote repository relations
users = User.objects.filter(
socialaccount__isnull=False,
remote_repository_relations__isnull=True
).distinct()
if sync_users:
users = users.filter(username__in=sync_users)
if skip_users:
users = users.exclude(username__in=skip_users)
users_to_sync = users.values_list('id', flat=True)[:max_users]
self.stdout.write(
self.style.SUCCESS(
'Found %s user(s) with the given parameters' % users.count()
)
)
self.stdout.write(
self.style.SUCCESS(
'Re-syncing VCS Providers for %s user(s)' % len(users_to_sync)
)
)
for user_id in users_to_sync:
# Trigger Sync Remote Repository Tasks for users
sync_remote_repositories.apply_async(
args=[user_id], queue=queue
)
|
Add manafement command to Sync RemoteRepositories and RemoteOrganizations
|
Add manafement command to Sync RemoteRepositories and RemoteOrganizations
|
Python
|
mit
|
rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org,rtfd/readthedocs.org
|
Add manafement command to Sync RemoteRepositories and RemoteOrganizations
|
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from readthedocs.oauth.tasks import sync_remote_repositories
class Command(BaseCommand):
help = "Sync OAuth RemoteRepository and RemoteOrganization"
def add_arguments(self, parser):
parser.add_argument(
'--queue',
type=str,
default='resync-oauth',
help='Celery queue name.',
)
parser.add_argument(
'--users',
nargs='*',
type=str,
default=[],
help='Re-sync VCS provider data for specific users only.',
)
parser.add_argument(
'--skip-users',
nargs='*',
type=str,
default=[],
help='Skip re-sync VCS provider data for specific users.',
)
parser.add_argument(
'--max-users',
type=int,
default=100,
help='Maximum number of users that should be synced.',
)
def handle(self, *args, **options):
queue = options.get('queue')
sync_users = options.get('users')
skip_users = options.get('skip_users')
max_users = options.get('max_users')
# Filter users who have social accounts connected
# and has no remote repository relations
users = User.objects.filter(
socialaccount__isnull=False,
remote_repository_relations__isnull=True
).distinct()
if sync_users:
users = users.filter(username__in=sync_users)
if skip_users:
users = users.exclude(username__in=skip_users)
users_to_sync = users.values_list('id', flat=True)[:max_users]
self.stdout.write(
self.style.SUCCESS(
'Found %s user(s) with the given parameters' % users.count()
)
)
self.stdout.write(
self.style.SUCCESS(
'Re-syncing VCS Providers for %s user(s)' % len(users_to_sync)
)
)
for user_id in users_to_sync:
# Trigger Sync Remote Repository Tasks for users
sync_remote_repositories.apply_async(
args=[user_id], queue=queue
)
|
<commit_before><commit_msg>Add manafement command to Sync RemoteRepositories and RemoteOrganizations<commit_after>
|
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from readthedocs.oauth.tasks import sync_remote_repositories
class Command(BaseCommand):
help = "Sync OAuth RemoteRepository and RemoteOrganization"
def add_arguments(self, parser):
parser.add_argument(
'--queue',
type=str,
default='resync-oauth',
help='Celery queue name.',
)
parser.add_argument(
'--users',
nargs='*',
type=str,
default=[],
help='Re-sync VCS provider data for specific users only.',
)
parser.add_argument(
'--skip-users',
nargs='*',
type=str,
default=[],
help='Skip re-sync VCS provider data for specific users.',
)
parser.add_argument(
'--max-users',
type=int,
default=100,
help='Maximum number of users that should be synced.',
)
def handle(self, *args, **options):
queue = options.get('queue')
sync_users = options.get('users')
skip_users = options.get('skip_users')
max_users = options.get('max_users')
# Filter users who have social accounts connected
# and has no remote repository relations
users = User.objects.filter(
socialaccount__isnull=False,
remote_repository_relations__isnull=True
).distinct()
if sync_users:
users = users.filter(username__in=sync_users)
if skip_users:
users = users.exclude(username__in=skip_users)
users_to_sync = users.values_list('id', flat=True)[:max_users]
self.stdout.write(
self.style.SUCCESS(
'Found %s user(s) with the given parameters' % users.count()
)
)
self.stdout.write(
self.style.SUCCESS(
'Re-syncing VCS Providers for %s user(s)' % len(users_to_sync)
)
)
for user_id in users_to_sync:
# Trigger Sync Remote Repository Tasks for users
sync_remote_repositories.apply_async(
args=[user_id], queue=queue
)
|
Add manafement command to Sync RemoteRepositories and RemoteOrganizationsfrom django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from readthedocs.oauth.tasks import sync_remote_repositories
class Command(BaseCommand):
help = "Sync OAuth RemoteRepository and RemoteOrganization"
def add_arguments(self, parser):
parser.add_argument(
'--queue',
type=str,
default='resync-oauth',
help='Celery queue name.',
)
parser.add_argument(
'--users',
nargs='*',
type=str,
default=[],
help='Re-sync VCS provider data for specific users only.',
)
parser.add_argument(
'--skip-users',
nargs='*',
type=str,
default=[],
help='Skip re-sync VCS provider data for specific users.',
)
parser.add_argument(
'--max-users',
type=int,
default=100,
help='Maximum number of users that should be synced.',
)
def handle(self, *args, **options):
queue = options.get('queue')
sync_users = options.get('users')
skip_users = options.get('skip_users')
max_users = options.get('max_users')
# Filter users who have social accounts connected
# and has no remote repository relations
users = User.objects.filter(
socialaccount__isnull=False,
remote_repository_relations__isnull=True
).distinct()
if sync_users:
users = users.filter(username__in=sync_users)
if skip_users:
users = users.exclude(username__in=skip_users)
users_to_sync = users.values_list('id', flat=True)[:max_users]
self.stdout.write(
self.style.SUCCESS(
'Found %s user(s) with the given parameters' % users.count()
)
)
self.stdout.write(
self.style.SUCCESS(
'Re-syncing VCS Providers for %s user(s)' % len(users_to_sync)
)
)
for user_id in users_to_sync:
# Trigger Sync Remote Repository Tasks for users
sync_remote_repositories.apply_async(
args=[user_id], queue=queue
)
|
<commit_before><commit_msg>Add manafement command to Sync RemoteRepositories and RemoteOrganizations<commit_after>from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from readthedocs.oauth.tasks import sync_remote_repositories
class Command(BaseCommand):
help = "Sync OAuth RemoteRepository and RemoteOrganization"
def add_arguments(self, parser):
parser.add_argument(
'--queue',
type=str,
default='resync-oauth',
help='Celery queue name.',
)
parser.add_argument(
'--users',
nargs='*',
type=str,
default=[],
help='Re-sync VCS provider data for specific users only.',
)
parser.add_argument(
'--skip-users',
nargs='*',
type=str,
default=[],
help='Skip re-sync VCS provider data for specific users.',
)
parser.add_argument(
'--max-users',
type=int,
default=100,
help='Maximum number of users that should be synced.',
)
def handle(self, *args, **options):
queue = options.get('queue')
sync_users = options.get('users')
skip_users = options.get('skip_users')
max_users = options.get('max_users')
# Filter users who have social accounts connected
# and has no remote repository relations
users = User.objects.filter(
socialaccount__isnull=False,
remote_repository_relations__isnull=True
).distinct()
if sync_users:
users = users.filter(username__in=sync_users)
if skip_users:
users = users.exclude(username__in=skip_users)
users_to_sync = users.values_list('id', flat=True)[:max_users]
self.stdout.write(
self.style.SUCCESS(
'Found %s user(s) with the given parameters' % users.count()
)
)
self.stdout.write(
self.style.SUCCESS(
'Re-syncing VCS Providers for %s user(s)' % len(users_to_sync)
)
)
for user_id in users_to_sync:
# Trigger Sync Remote Repository Tasks for users
sync_remote_repositories.apply_async(
args=[user_id], queue=queue
)
|
|
f96919f5091126c96082acdcf430df9b93a97d06
|
migrations/versions/0378_remove_doc_download_perm.py
|
migrations/versions/0378_remove_doc_download_perm.py
|
"""
Revision ID: 0378_remove_doc_download_perm
Revises: 0377_populate_org_brand_pools
Create Date: 2022-10-12 11:55:28.906151
"""
from alembic import op
revision = "0378_remove_doc_download_perm"
down_revision = "0377_populate_org_brand_pools"
def upgrade():
op.execute("DELETE FROM service_permissions WHERE permission = 'document_download_verify_email'")
op.execute("DELETE FROM service_permission_types WHERE name = 'document_download_verify_email'")
def downgrade():
pass
|
Remove temp doc download service perms
|
Remove temp doc download service perms
We added a service permission to gate the new document download security
features. Now that these have been released, let's remove the
permission.
|
Python
|
mit
|
alphagov/notifications-api,alphagov/notifications-api
|
Remove temp doc download service perms
We added a service permission to gate the new document download security
features. Now that these have been released, let's remove the
permission.
|
"""
Revision ID: 0378_remove_doc_download_perm
Revises: 0377_populate_org_brand_pools
Create Date: 2022-10-12 11:55:28.906151
"""
from alembic import op
revision = "0378_remove_doc_download_perm"
down_revision = "0377_populate_org_brand_pools"
def upgrade():
op.execute("DELETE FROM service_permissions WHERE permission = 'document_download_verify_email'")
op.execute("DELETE FROM service_permission_types WHERE name = 'document_download_verify_email'")
def downgrade():
pass
|
<commit_before><commit_msg>Remove temp doc download service perms
We added a service permission to gate the new document download security
features. Now that these have been released, let's remove the
permission.<commit_after>
|
"""
Revision ID: 0378_remove_doc_download_perm
Revises: 0377_populate_org_brand_pools
Create Date: 2022-10-12 11:55:28.906151
"""
from alembic import op
revision = "0378_remove_doc_download_perm"
down_revision = "0377_populate_org_brand_pools"
def upgrade():
op.execute("DELETE FROM service_permissions WHERE permission = 'document_download_verify_email'")
op.execute("DELETE FROM service_permission_types WHERE name = 'document_download_verify_email'")
def downgrade():
pass
|
Remove temp doc download service perms
We added a service permission to gate the new document download security
features. Now that these have been released, let's remove the
permission."""
Revision ID: 0378_remove_doc_download_perm
Revises: 0377_populate_org_brand_pools
Create Date: 2022-10-12 11:55:28.906151
"""
from alembic import op
revision = "0378_remove_doc_download_perm"
down_revision = "0377_populate_org_brand_pools"
def upgrade():
op.execute("DELETE FROM service_permissions WHERE permission = 'document_download_verify_email'")
op.execute("DELETE FROM service_permission_types WHERE name = 'document_download_verify_email'")
def downgrade():
pass
|
<commit_before><commit_msg>Remove temp doc download service perms
We added a service permission to gate the new document download security
features. Now that these have been released, let's remove the
permission.<commit_after>"""
Revision ID: 0378_remove_doc_download_perm
Revises: 0377_populate_org_brand_pools
Create Date: 2022-10-12 11:55:28.906151
"""
from alembic import op
revision = "0378_remove_doc_download_perm"
down_revision = "0377_populate_org_brand_pools"
def upgrade():
op.execute("DELETE FROM service_permissions WHERE permission = 'document_download_verify_email'")
op.execute("DELETE FROM service_permission_types WHERE name = 'document_download_verify_email'")
def downgrade():
pass
|
|
d82a6fa96391630a17520c7ff8dbb23918c2a8aa
|
test/conserve-p/lj-fluid.py
|
test/conserve-p/lj-fluid.py
|
#! /usr/bin/env hoomd
from hoomd_script import *
init.create_random(N=10000, phi_p=0.2)
lj = pair.lj(r_cut=2.5)
lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0, alpha=1.0)
all = group.all()
integrate.mode_standard(dt=0.005)
integrate.nvt(group=all, T=1.2, tau=0.5)
analyze.log(filename = 'thermo.log', quantities = ['potential_energy', 'kinetic_energy', 'nvt_reservoir_energy','momentum'], period=1000)
run(100e6)
|
Test script for evaluating momentum conservation
|
Test script for evaluating momentum conservation
git-svn-id: 0c14c15b0cdbdea73634a2d7d7d743954bb0522f@3197 fa922fa7-2fde-0310-acd8-f43f465a7996
|
Python
|
bsd-3-clause
|
joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue
|
Test script for evaluating momentum conservation
git-svn-id: 0c14c15b0cdbdea73634a2d7d7d743954bb0522f@3197 fa922fa7-2fde-0310-acd8-f43f465a7996
|
#! /usr/bin/env hoomd
from hoomd_script import *
init.create_random(N=10000, phi_p=0.2)
lj = pair.lj(r_cut=2.5)
lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0, alpha=1.0)
all = group.all()
integrate.mode_standard(dt=0.005)
integrate.nvt(group=all, T=1.2, tau=0.5)
analyze.log(filename = 'thermo.log', quantities = ['potential_energy', 'kinetic_energy', 'nvt_reservoir_energy','momentum'], period=1000)
run(100e6)
|
<commit_before><commit_msg>Test script for evaluating momentum conservation
git-svn-id: 0c14c15b0cdbdea73634a2d7d7d743954bb0522f@3197 fa922fa7-2fde-0310-acd8-f43f465a7996<commit_after>
|
#! /usr/bin/env hoomd
from hoomd_script import *
init.create_random(N=10000, phi_p=0.2)
lj = pair.lj(r_cut=2.5)
lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0, alpha=1.0)
all = group.all()
integrate.mode_standard(dt=0.005)
integrate.nvt(group=all, T=1.2, tau=0.5)
analyze.log(filename = 'thermo.log', quantities = ['potential_energy', 'kinetic_energy', 'nvt_reservoir_energy','momentum'], period=1000)
run(100e6)
|
Test script for evaluating momentum conservation
git-svn-id: 0c14c15b0cdbdea73634a2d7d7d743954bb0522f@3197 fa922fa7-2fde-0310-acd8-f43f465a7996#! /usr/bin/env hoomd
from hoomd_script import *
init.create_random(N=10000, phi_p=0.2)
lj = pair.lj(r_cut=2.5)
lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0, alpha=1.0)
all = group.all()
integrate.mode_standard(dt=0.005)
integrate.nvt(group=all, T=1.2, tau=0.5)
analyze.log(filename = 'thermo.log', quantities = ['potential_energy', 'kinetic_energy', 'nvt_reservoir_energy','momentum'], period=1000)
run(100e6)
|
<commit_before><commit_msg>Test script for evaluating momentum conservation
git-svn-id: 0c14c15b0cdbdea73634a2d7d7d743954bb0522f@3197 fa922fa7-2fde-0310-acd8-f43f465a7996<commit_after>#! /usr/bin/env hoomd
from hoomd_script import *
init.create_random(N=10000, phi_p=0.2)
lj = pair.lj(r_cut=2.5)
lj.pair_coeff.set('A', 'A', epsilon=1.0, sigma=1.0, alpha=1.0)
all = group.all()
integrate.mode_standard(dt=0.005)
integrate.nvt(group=all, T=1.2, tau=0.5)
analyze.log(filename = 'thermo.log', quantities = ['potential_energy', 'kinetic_energy', 'nvt_reservoir_energy','momentum'], period=1000)
run(100e6)
|
|
7cb7fa17720864669ac1e0c3fe361e3925415169
|
client/python/plot_request_times.py
|
client/python/plot_request_times.py
|
import requests
r = requests.get('http://localhost:8081/monitor_results/1')
print(r.json())
for monitoring_data in r.json():
print 'URL: ' + monitoring_data['urlToMonitor']['url']
|
Add stub for python client
|
Add stub for python client
|
Python
|
mit
|
gernd/simple-site-mon
|
Add stub for python client
|
import requests
r = requests.get('http://localhost:8081/monitor_results/1')
print(r.json())
for monitoring_data in r.json():
print 'URL: ' + monitoring_data['urlToMonitor']['url']
|
<commit_before><commit_msg>Add stub for python client<commit_after>
|
import requests
r = requests.get('http://localhost:8081/monitor_results/1')
print(r.json())
for monitoring_data in r.json():
print 'URL: ' + monitoring_data['urlToMonitor']['url']
|
Add stub for python clientimport requests
r = requests.get('http://localhost:8081/monitor_results/1')
print(r.json())
for monitoring_data in r.json():
print 'URL: ' + monitoring_data['urlToMonitor']['url']
|
<commit_before><commit_msg>Add stub for python client<commit_after>import requests
r = requests.get('http://localhost:8081/monitor_results/1')
print(r.json())
for monitoring_data in r.json():
print 'URL: ' + monitoring_data['urlToMonitor']['url']
|
|
7e5982299e8b5b07239f7fb1aa088e7eafd08f88
|
tardis/montecarlo/enum.py
|
tardis/montecarlo/enum.py
|
from ctypes import c_int
class EnumerationType(type(c_int)):
def __new__(metacls, name, bases, dictionary):
if not "_members_" in dictionary:
_members_ = {}
for key, value in dictionary.items():
if not key.startswith("_"):
_members_[key] = value
dictionary["_members_"] = _members_
else:
_members_ = dict["_members_"]
dictionary["_reverse_map_"] = {value: key for key, value in _members_.items()}
cls = type(c_int).__new__(metacls, name, bases, dict)
for key, value in cls._members_.items():
globals()[key] = value
return cls
class CEnumeration(c_int):
__metaclass__ = EnumerationType
_members_ = {}
def __eq__(self, other):
if isinstance(other, int):
return self.value == other
return type(self) == type(other) and self.value == other.value
|
Add metaclass and class for CEnumeration adoption.
|
Add metaclass and class for CEnumeration adoption.
|
Python
|
bsd-3-clause
|
kaushik94/tardis,Tobychev/tardis,kaushik94/tardis,Tobychev/tardis,Tobychev/tardis,orbitfold/tardis,orbitfold/tardis,kaushik94/tardis,orbitfold/tardis,orbitfold/tardis,kaushik94/tardis
|
Add metaclass and class for CEnumeration adoption.
|
from ctypes import c_int
class EnumerationType(type(c_int)):
def __new__(metacls, name, bases, dictionary):
if not "_members_" in dictionary:
_members_ = {}
for key, value in dictionary.items():
if not key.startswith("_"):
_members_[key] = value
dictionary["_members_"] = _members_
else:
_members_ = dict["_members_"]
dictionary["_reverse_map_"] = {value: key for key, value in _members_.items()}
cls = type(c_int).__new__(metacls, name, bases, dict)
for key, value in cls._members_.items():
globals()[key] = value
return cls
class CEnumeration(c_int):
__metaclass__ = EnumerationType
_members_ = {}
def __eq__(self, other):
if isinstance(other, int):
return self.value == other
return type(self) == type(other) and self.value == other.value
|
<commit_before><commit_msg>Add metaclass and class for CEnumeration adoption.<commit_after>
|
from ctypes import c_int
class EnumerationType(type(c_int)):
def __new__(metacls, name, bases, dictionary):
if not "_members_" in dictionary:
_members_ = {}
for key, value in dictionary.items():
if not key.startswith("_"):
_members_[key] = value
dictionary["_members_"] = _members_
else:
_members_ = dict["_members_"]
dictionary["_reverse_map_"] = {value: key for key, value in _members_.items()}
cls = type(c_int).__new__(metacls, name, bases, dict)
for key, value in cls._members_.items():
globals()[key] = value
return cls
class CEnumeration(c_int):
__metaclass__ = EnumerationType
_members_ = {}
def __eq__(self, other):
if isinstance(other, int):
return self.value == other
return type(self) == type(other) and self.value == other.value
|
Add metaclass and class for CEnumeration adoption.from ctypes import c_int
class EnumerationType(type(c_int)):
def __new__(metacls, name, bases, dictionary):
if not "_members_" in dictionary:
_members_ = {}
for key, value in dictionary.items():
if not key.startswith("_"):
_members_[key] = value
dictionary["_members_"] = _members_
else:
_members_ = dict["_members_"]
dictionary["_reverse_map_"] = {value: key for key, value in _members_.items()}
cls = type(c_int).__new__(metacls, name, bases, dict)
for key, value in cls._members_.items():
globals()[key] = value
return cls
class CEnumeration(c_int):
__metaclass__ = EnumerationType
_members_ = {}
def __eq__(self, other):
if isinstance(other, int):
return self.value == other
return type(self) == type(other) and self.value == other.value
|
<commit_before><commit_msg>Add metaclass and class for CEnumeration adoption.<commit_after>from ctypes import c_int
class EnumerationType(type(c_int)):
def __new__(metacls, name, bases, dictionary):
if not "_members_" in dictionary:
_members_ = {}
for key, value in dictionary.items():
if not key.startswith("_"):
_members_[key] = value
dictionary["_members_"] = _members_
else:
_members_ = dict["_members_"]
dictionary["_reverse_map_"] = {value: key for key, value in _members_.items()}
cls = type(c_int).__new__(metacls, name, bases, dict)
for key, value in cls._members_.items():
globals()[key] = value
return cls
class CEnumeration(c_int):
__metaclass__ = EnumerationType
_members_ = {}
def __eq__(self, other):
if isinstance(other, int):
return self.value == other
return type(self) == type(other) and self.value == other.value
|
|
a5d99114ac3c7d146f0874744d6a54b27e9a990f
|
search/generate_db.py
|
search/generate_db.py
|
from random_words import RandomWords
from faker import Factory
import random
import sqlite3
NUM_GENERATE = 100
CATEGORIES = ['miscellaneous', 'appliance', 'bedding', 'toys', 'books', 'clothing',
'seasonal', 'electronics', 'household', 'kitchen', 'sports']
def generate_postings(count):
postings = []
for i in range(count):
faker = Factory.create()
rw = RandomWords()
posting = {
'name': faker.name(),
'address': faker.city() + ", " + faker.state(),
'email': faker.email(),
'phone': random.randint(1000000000, 9999999999),
'title': " ".join(rw.random_words(count=2)),
'description': " ".join(rw.random_words(count=8)),
'category': random.choice(CATEGORIES),
}
postings.append(posting)
return postings
def write_database(postings):
conn=sqlite3.connect('givegetgreen_db');
c=conn.cursor()
for post in postings:
# name email phone address category description title
c.execute('''INSERT INTO posting_posting VALUES (null, '%s', '%s', '%s', '%s', '%s', '%s', '%s')'''
% (post['name'], post['email'], post['phone'], post['address'], post['category'], post['description'], post['title']));
conn.commit()
conn.close()
print str(len(postings)) + " postings written to database"
if __name__ == '__main__':
postings = generate_postings(NUM_GENERATE)
write_database(postings)
|
Implement auto generate db postings
|
Implement auto generate db postings
|
Python
|
mit
|
njbbaer/give-get-green,njbbaer/give-get-green,njbbaer/give-get-green
|
Implement auto generate db postings
|
from random_words import RandomWords
from faker import Factory
import random
import sqlite3
NUM_GENERATE = 100
CATEGORIES = ['miscellaneous', 'appliance', 'bedding', 'toys', 'books', 'clothing',
'seasonal', 'electronics', 'household', 'kitchen', 'sports']
def generate_postings(count):
postings = []
for i in range(count):
faker = Factory.create()
rw = RandomWords()
posting = {
'name': faker.name(),
'address': faker.city() + ", " + faker.state(),
'email': faker.email(),
'phone': random.randint(1000000000, 9999999999),
'title': " ".join(rw.random_words(count=2)),
'description': " ".join(rw.random_words(count=8)),
'category': random.choice(CATEGORIES),
}
postings.append(posting)
return postings
def write_database(postings):
conn=sqlite3.connect('givegetgreen_db');
c=conn.cursor()
for post in postings:
# name email phone address category description title
c.execute('''INSERT INTO posting_posting VALUES (null, '%s', '%s', '%s', '%s', '%s', '%s', '%s')'''
% (post['name'], post['email'], post['phone'], post['address'], post['category'], post['description'], post['title']));
conn.commit()
conn.close()
print str(len(postings)) + " postings written to database"
if __name__ == '__main__':
postings = generate_postings(NUM_GENERATE)
write_database(postings)
|
<commit_before><commit_msg>Implement auto generate db postings<commit_after>
|
from random_words import RandomWords
from faker import Factory
import random
import sqlite3
NUM_GENERATE = 100
CATEGORIES = ['miscellaneous', 'appliance', 'bedding', 'toys', 'books', 'clothing',
'seasonal', 'electronics', 'household', 'kitchen', 'sports']
def generate_postings(count):
postings = []
for i in range(count):
faker = Factory.create()
rw = RandomWords()
posting = {
'name': faker.name(),
'address': faker.city() + ", " + faker.state(),
'email': faker.email(),
'phone': random.randint(1000000000, 9999999999),
'title': " ".join(rw.random_words(count=2)),
'description': " ".join(rw.random_words(count=8)),
'category': random.choice(CATEGORIES),
}
postings.append(posting)
return postings
def write_database(postings):
conn=sqlite3.connect('givegetgreen_db');
c=conn.cursor()
for post in postings:
# name email phone address category description title
c.execute('''INSERT INTO posting_posting VALUES (null, '%s', '%s', '%s', '%s', '%s', '%s', '%s')'''
% (post['name'], post['email'], post['phone'], post['address'], post['category'], post['description'], post['title']));
conn.commit()
conn.close()
print str(len(postings)) + " postings written to database"
if __name__ == '__main__':
postings = generate_postings(NUM_GENERATE)
write_database(postings)
|
Implement auto generate db postingsfrom random_words import RandomWords
from faker import Factory
import random
import sqlite3
NUM_GENERATE = 100
CATEGORIES = ['miscellaneous', 'appliance', 'bedding', 'toys', 'books', 'clothing',
'seasonal', 'electronics', 'household', 'kitchen', 'sports']
def generate_postings(count):
postings = []
for i in range(count):
faker = Factory.create()
rw = RandomWords()
posting = {
'name': faker.name(),
'address': faker.city() + ", " + faker.state(),
'email': faker.email(),
'phone': random.randint(1000000000, 9999999999),
'title': " ".join(rw.random_words(count=2)),
'description': " ".join(rw.random_words(count=8)),
'category': random.choice(CATEGORIES),
}
postings.append(posting)
return postings
def write_database(postings):
conn=sqlite3.connect('givegetgreen_db');
c=conn.cursor()
for post in postings:
# name email phone address category description title
c.execute('''INSERT INTO posting_posting VALUES (null, '%s', '%s', '%s', '%s', '%s', '%s', '%s')'''
% (post['name'], post['email'], post['phone'], post['address'], post['category'], post['description'], post['title']));
conn.commit()
conn.close()
print str(len(postings)) + " postings written to database"
if __name__ == '__main__':
postings = generate_postings(NUM_GENERATE)
write_database(postings)
|
<commit_before><commit_msg>Implement auto generate db postings<commit_after>from random_words import RandomWords
from faker import Factory
import random
import sqlite3
NUM_GENERATE = 100
CATEGORIES = ['miscellaneous', 'appliance', 'bedding', 'toys', 'books', 'clothing',
'seasonal', 'electronics', 'household', 'kitchen', 'sports']
def generate_postings(count):
postings = []
for i in range(count):
faker = Factory.create()
rw = RandomWords()
posting = {
'name': faker.name(),
'address': faker.city() + ", " + faker.state(),
'email': faker.email(),
'phone': random.randint(1000000000, 9999999999),
'title': " ".join(rw.random_words(count=2)),
'description': " ".join(rw.random_words(count=8)),
'category': random.choice(CATEGORIES),
}
postings.append(posting)
return postings
def write_database(postings):
conn=sqlite3.connect('givegetgreen_db');
c=conn.cursor()
for post in postings:
# name email phone address category description title
c.execute('''INSERT INTO posting_posting VALUES (null, '%s', '%s', '%s', '%s', '%s', '%s', '%s')'''
% (post['name'], post['email'], post['phone'], post['address'], post['category'], post['description'], post['title']));
conn.commit()
conn.close()
print str(len(postings)) + " postings written to database"
if __name__ == '__main__':
postings = generate_postings(NUM_GENERATE)
write_database(postings)
|
|
881d7ed1e2ad8be504c02d29ccaaac4e12ea0605
|
scripts/band_width.py
|
scripts/band_width.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
__author__ = "Yuji Ikeda"
from ph_plotter.common_arguments_adder import CommonArgumentsAdder
def run(variables):
from ph_plotter.band_error_plotter import BandWidthPlotter
BandWidthPlotter(variables).run()
def main():
import argparse
parser = argparse.ArgumentParser()
CommonArgumentsAdder().add_common_arguments(parser)
parser.add_argument("--data_file",
default="sf_fitted.hdf5",
type=str,
help="Filename of data.")
args = parser.parse_args()
print(vars(args))
run(vars(args))
if __name__ == "__main__":
main()
|
Add the script to plot band structures with bandwidths
|
Add the script to plot band structures with bandwidths
|
Python
|
mit
|
yuzie007/ph_plotter
|
Add the script to plot band structures with bandwidths
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
__author__ = "Yuji Ikeda"
from ph_plotter.common_arguments_adder import CommonArgumentsAdder
def run(variables):
from ph_plotter.band_error_plotter import BandWidthPlotter
BandWidthPlotter(variables).run()
def main():
import argparse
parser = argparse.ArgumentParser()
CommonArgumentsAdder().add_common_arguments(parser)
parser.add_argument("--data_file",
default="sf_fitted.hdf5",
type=str,
help="Filename of data.")
args = parser.parse_args()
print(vars(args))
run(vars(args))
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add the script to plot band structures with bandwidths<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
__author__ = "Yuji Ikeda"
from ph_plotter.common_arguments_adder import CommonArgumentsAdder
def run(variables):
from ph_plotter.band_error_plotter import BandWidthPlotter
BandWidthPlotter(variables).run()
def main():
import argparse
parser = argparse.ArgumentParser()
CommonArgumentsAdder().add_common_arguments(parser)
parser.add_argument("--data_file",
default="sf_fitted.hdf5",
type=str,
help="Filename of data.")
args = parser.parse_args()
print(vars(args))
run(vars(args))
if __name__ == "__main__":
main()
|
Add the script to plot band structures with bandwidths#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
__author__ = "Yuji Ikeda"
from ph_plotter.common_arguments_adder import CommonArgumentsAdder
def run(variables):
from ph_plotter.band_error_plotter import BandWidthPlotter
BandWidthPlotter(variables).run()
def main():
import argparse
parser = argparse.ArgumentParser()
CommonArgumentsAdder().add_common_arguments(parser)
parser.add_argument("--data_file",
default="sf_fitted.hdf5",
type=str,
help="Filename of data.")
args = parser.parse_args()
print(vars(args))
run(vars(args))
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add the script to plot band structures with bandwidths<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
from __future__ import (absolute_import, division,
print_function, unicode_literals)
__author__ = "Yuji Ikeda"
from ph_plotter.common_arguments_adder import CommonArgumentsAdder
def run(variables):
from ph_plotter.band_error_plotter import BandWidthPlotter
BandWidthPlotter(variables).run()
def main():
import argparse
parser = argparse.ArgumentParser()
CommonArgumentsAdder().add_common_arguments(parser)
parser.add_argument("--data_file",
default="sf_fitted.hdf5",
type=str,
help="Filename of data.")
args = parser.parse_args()
print(vars(args))
run(vars(args))
if __name__ == "__main__":
main()
|
|
e567bdb0ed315f6e00be7d541e5d034fe926eeb6
|
scripts/migration/migrate_citation_addons_list_id.py
|
scripts/migration/migrate_citation_addons_list_id.py
|
import logging
import sys
from modularodm import Q
from framework.mongo import database
from framework.transactions.context import TokuTransaction
from website.app import init_app
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
PROVIDERS = ['mendeley', 'zotero']
def migrate_list_id_field(document, provider):
try:
database['{}nodesettings'.format(provider)].find_and_modify(
{'_id': document['_id']},
{
'$set': {
'list_id': document['{}_list_id'.format(provider)]
}
}
)
database['{}nodesettings'.format(provider)].find_and_modify(
{'_id': document['_id']},
{
'$unset': {
'{}_list_id'.format(provider): ''
}
}
)
except Exception:
return False
return True
def verify_node_settings_document(document, provider):
try:
assert('_id' in document)
assert('{}_list_id'.format(provider) in document)
except AssertionError:
return False
return True
def migrate(dry_run=True):
documents_no_list_id = {}
documents_migration_failed = {}
documents_migrated = {}
for provider in PROVIDERS:
documents_migrated[provider] = []
documents_migration_failed[provider] = []
documents_no_list_id[provider] = []
for document in database['{}nodesettings'.format(provider)].find():
if verify_node_settings_document(document, provider):
if migrate_list_id_field(document, provider):
documents_migrated[provider].append(document)
else:
documents_migration_failed[provider].append(document)
else:
documents_no_list_id[provider].append(document)
for provider in PROVIDERS:
if documents_migrated[provider]:
logger.info('Successfully migrated {0} {1} node settings documents:\n{2}'.format(
len(documents_migrated[provider]), provider, [e['_id'] for e in documents_migrated[provider]]
))
if documents_no_list_id[provider]:
logger.error('Failed to migrate {0} {1} node settings documents due to no {1}_list_id field:\n{2}'.format(
len(documents_no_list_id[provider]), provider, [e['_id'] for e in documents_no_list_id[provider]]
))
if documents_migration_failed[provider]:
logger.error('Failed to migrate {0} {1} node settings documents for unknown reason:\n{2}'.format(
len(documents_migration_failed[provider]), provider, [e['_id'] for e in documents_migration_failed[provider]]
))
if dry_run:
raise RuntimeError('Dry run, transaction rolled back.')
def main():
dry_run = False
if '--dry' in sys.argv:
dry_run = True
if not dry_run:
script_utils.add_file_logger(logger, __file__)
init_app(set_backends=True, routes=False)
with TokuTransaction():
migrate(dry_run=dry_run)
if __name__ == "__main__":
main()
|
Add migration script for [citations]nodesettings <provider>_list_id --> list_id
|
Add migration script for [citations]nodesettings
<provider>_list_id --> list_id
|
Python
|
apache-2.0
|
mfraezz/osf.io,leb2dg/osf.io,asanfilippo7/osf.io,chennan47/osf.io,kch8qx/osf.io,brianjgeiger/osf.io,hmoco/osf.io,monikagrabowska/osf.io,caseyrollins/osf.io,SSJohns/osf.io,alexschiller/osf.io,mattclark/osf.io,doublebits/osf.io,HalcyonChimera/osf.io,acshi/osf.io,zamattiac/osf.io,felliott/osf.io,chrisseto/osf.io,CenterForOpenScience/osf.io,doublebits/osf.io,samchrisinger/osf.io,Nesiehr/osf.io,samchrisinger/osf.io,amyshi188/osf.io,hmoco/osf.io,CenterForOpenScience/osf.io,RomanZWang/osf.io,Johnetordoff/osf.io,mfraezz/osf.io,wearpants/osf.io,leb2dg/osf.io,abought/osf.io,cwisecarver/osf.io,cwisecarver/osf.io,amyshi188/osf.io,samchrisinger/osf.io,SSJohns/osf.io,emetsger/osf.io,asanfilippo7/osf.io,amyshi188/osf.io,kch8qx/osf.io,mluo613/osf.io,cslzchen/osf.io,laurenrevere/osf.io,hmoco/osf.io,leb2dg/osf.io,HalcyonChimera/osf.io,adlius/osf.io,chennan47/osf.io,baylee-d/osf.io,rdhyee/osf.io,DanielSBrown/osf.io,kwierman/osf.io,monikagrabowska/osf.io,icereval/osf.io,emetsger/osf.io,HalcyonChimera/osf.io,DanielSBrown/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,felliott/osf.io,caneruguz/osf.io,brianjgeiger/osf.io,kwierman/osf.io,pattisdr/osf.io,adlius/osf.io,DanielSBrown/osf.io,laurenrevere/osf.io,binoculars/osf.io,RomanZWang/osf.io,RomanZWang/osf.io,caneruguz/osf.io,emetsger/osf.io,saradbowman/osf.io,SSJohns/osf.io,wearpants/osf.io,HalcyonChimera/osf.io,RomanZWang/osf.io,chrisseto/osf.io,aaxelb/osf.io,crcresearch/osf.io,TomBaxter/osf.io,chrisseto/osf.io,baylee-d/osf.io,abought/osf.io,erinspace/osf.io,amyshi188/osf.io,mluo613/osf.io,zamattiac/osf.io,TomHeatwole/osf.io,felliott/osf.io,mluke93/osf.io,cwisecarver/osf.io,chennan47/osf.io,rdhyee/osf.io,acshi/osf.io,TomHeatwole/osf.io,cwisecarver/osf.io,icereval/osf.io,monikagrabowska/osf.io,jnayak1/osf.io,crcresearch/osf.io,kwierman/osf.io,RomanZWang/osf.io,binoculars/osf.io,doublebits/osf.io,saradbowman/osf.io,mluo613/osf.io,cslzchen/osf.io,CenterForOpenScience/osf.io,felliott/osf.io,abought/osf.io,alexschiller/osf.io,kch8qx/osf.io,binoculars/osf.io,mluke93/osf.io,zachjanicki/osf.io,kch8qx/osf.io,Nesiehr/osf.io,rdhyee/osf.io,jnayak1/osf.io,aaxelb/osf.io,crcresearch/osf.io,Johnetordoff/osf.io,brianjgeiger/osf.io,hmoco/osf.io,mfraezz/osf.io,asanfilippo7/osf.io,mattclark/osf.io,doublebits/osf.io,acshi/osf.io,baylee-d/osf.io,zamattiac/osf.io,TomBaxter/osf.io,CenterForOpenScience/osf.io,wearpants/osf.io,monikagrabowska/osf.io,zachjanicki/osf.io,wearpants/osf.io,mluke93/osf.io,erinspace/osf.io,asanfilippo7/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,laurenrevere/osf.io,rdhyee/osf.io,alexschiller/osf.io,alexschiller/osf.io,Nesiehr/osf.io,caneruguz/osf.io,mfraezz/osf.io,aaxelb/osf.io,acshi/osf.io,mluke93/osf.io,doublebits/osf.io,caneruguz/osf.io,aaxelb/osf.io,TomHeatwole/osf.io,leb2dg/osf.io,mluo613/osf.io,chrisseto/osf.io,sloria/osf.io,kwierman/osf.io,mattclark/osf.io,pattisdr/osf.io,samchrisinger/osf.io,SSJohns/osf.io,DanielSBrown/osf.io,Johnetordoff/osf.io,adlius/osf.io,sloria/osf.io,jnayak1/osf.io,sloria/osf.io,mluo613/osf.io,pattisdr/osf.io,abought/osf.io,TomHeatwole/osf.io,zachjanicki/osf.io,zachjanicki/osf.io,emetsger/osf.io,caseyrollins/osf.io,acshi/osf.io,jnayak1/osf.io,cslzchen/osf.io,erinspace/osf.io,icereval/osf.io,brianjgeiger/osf.io,kch8qx/osf.io,cslzchen/osf.io,TomBaxter/osf.io,zamattiac/osf.io,caseyrollins/osf.io,adlius/osf.io
|
Add migration script for [citations]nodesettings
<provider>_list_id --> list_id
|
import logging
import sys
from modularodm import Q
from framework.mongo import database
from framework.transactions.context import TokuTransaction
from website.app import init_app
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
PROVIDERS = ['mendeley', 'zotero']
def migrate_list_id_field(document, provider):
try:
database['{}nodesettings'.format(provider)].find_and_modify(
{'_id': document['_id']},
{
'$set': {
'list_id': document['{}_list_id'.format(provider)]
}
}
)
database['{}nodesettings'.format(provider)].find_and_modify(
{'_id': document['_id']},
{
'$unset': {
'{}_list_id'.format(provider): ''
}
}
)
except Exception:
return False
return True
def verify_node_settings_document(document, provider):
try:
assert('_id' in document)
assert('{}_list_id'.format(provider) in document)
except AssertionError:
return False
return True
def migrate(dry_run=True):
documents_no_list_id = {}
documents_migration_failed = {}
documents_migrated = {}
for provider in PROVIDERS:
documents_migrated[provider] = []
documents_migration_failed[provider] = []
documents_no_list_id[provider] = []
for document in database['{}nodesettings'.format(provider)].find():
if verify_node_settings_document(document, provider):
if migrate_list_id_field(document, provider):
documents_migrated[provider].append(document)
else:
documents_migration_failed[provider].append(document)
else:
documents_no_list_id[provider].append(document)
for provider in PROVIDERS:
if documents_migrated[provider]:
logger.info('Successfully migrated {0} {1} node settings documents:\n{2}'.format(
len(documents_migrated[provider]), provider, [e['_id'] for e in documents_migrated[provider]]
))
if documents_no_list_id[provider]:
logger.error('Failed to migrate {0} {1} node settings documents due to no {1}_list_id field:\n{2}'.format(
len(documents_no_list_id[provider]), provider, [e['_id'] for e in documents_no_list_id[provider]]
))
if documents_migration_failed[provider]:
logger.error('Failed to migrate {0} {1} node settings documents for unknown reason:\n{2}'.format(
len(documents_migration_failed[provider]), provider, [e['_id'] for e in documents_migration_failed[provider]]
))
if dry_run:
raise RuntimeError('Dry run, transaction rolled back.')
def main():
dry_run = False
if '--dry' in sys.argv:
dry_run = True
if not dry_run:
script_utils.add_file_logger(logger, __file__)
init_app(set_backends=True, routes=False)
with TokuTransaction():
migrate(dry_run=dry_run)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add migration script for [citations]nodesettings
<provider>_list_id --> list_id<commit_after>
|
import logging
import sys
from modularodm import Q
from framework.mongo import database
from framework.transactions.context import TokuTransaction
from website.app import init_app
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
PROVIDERS = ['mendeley', 'zotero']
def migrate_list_id_field(document, provider):
try:
database['{}nodesettings'.format(provider)].find_and_modify(
{'_id': document['_id']},
{
'$set': {
'list_id': document['{}_list_id'.format(provider)]
}
}
)
database['{}nodesettings'.format(provider)].find_and_modify(
{'_id': document['_id']},
{
'$unset': {
'{}_list_id'.format(provider): ''
}
}
)
except Exception:
return False
return True
def verify_node_settings_document(document, provider):
try:
assert('_id' in document)
assert('{}_list_id'.format(provider) in document)
except AssertionError:
return False
return True
def migrate(dry_run=True):
documents_no_list_id = {}
documents_migration_failed = {}
documents_migrated = {}
for provider in PROVIDERS:
documents_migrated[provider] = []
documents_migration_failed[provider] = []
documents_no_list_id[provider] = []
for document in database['{}nodesettings'.format(provider)].find():
if verify_node_settings_document(document, provider):
if migrate_list_id_field(document, provider):
documents_migrated[provider].append(document)
else:
documents_migration_failed[provider].append(document)
else:
documents_no_list_id[provider].append(document)
for provider in PROVIDERS:
if documents_migrated[provider]:
logger.info('Successfully migrated {0} {1} node settings documents:\n{2}'.format(
len(documents_migrated[provider]), provider, [e['_id'] for e in documents_migrated[provider]]
))
if documents_no_list_id[provider]:
logger.error('Failed to migrate {0} {1} node settings documents due to no {1}_list_id field:\n{2}'.format(
len(documents_no_list_id[provider]), provider, [e['_id'] for e in documents_no_list_id[provider]]
))
if documents_migration_failed[provider]:
logger.error('Failed to migrate {0} {1} node settings documents for unknown reason:\n{2}'.format(
len(documents_migration_failed[provider]), provider, [e['_id'] for e in documents_migration_failed[provider]]
))
if dry_run:
raise RuntimeError('Dry run, transaction rolled back.')
def main():
dry_run = False
if '--dry' in sys.argv:
dry_run = True
if not dry_run:
script_utils.add_file_logger(logger, __file__)
init_app(set_backends=True, routes=False)
with TokuTransaction():
migrate(dry_run=dry_run)
if __name__ == "__main__":
main()
|
Add migration script for [citations]nodesettings
<provider>_list_id --> list_idimport logging
import sys
from modularodm import Q
from framework.mongo import database
from framework.transactions.context import TokuTransaction
from website.app import init_app
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
PROVIDERS = ['mendeley', 'zotero']
def migrate_list_id_field(document, provider):
try:
database['{}nodesettings'.format(provider)].find_and_modify(
{'_id': document['_id']},
{
'$set': {
'list_id': document['{}_list_id'.format(provider)]
}
}
)
database['{}nodesettings'.format(provider)].find_and_modify(
{'_id': document['_id']},
{
'$unset': {
'{}_list_id'.format(provider): ''
}
}
)
except Exception:
return False
return True
def verify_node_settings_document(document, provider):
try:
assert('_id' in document)
assert('{}_list_id'.format(provider) in document)
except AssertionError:
return False
return True
def migrate(dry_run=True):
documents_no_list_id = {}
documents_migration_failed = {}
documents_migrated = {}
for provider in PROVIDERS:
documents_migrated[provider] = []
documents_migration_failed[provider] = []
documents_no_list_id[provider] = []
for document in database['{}nodesettings'.format(provider)].find():
if verify_node_settings_document(document, provider):
if migrate_list_id_field(document, provider):
documents_migrated[provider].append(document)
else:
documents_migration_failed[provider].append(document)
else:
documents_no_list_id[provider].append(document)
for provider in PROVIDERS:
if documents_migrated[provider]:
logger.info('Successfully migrated {0} {1} node settings documents:\n{2}'.format(
len(documents_migrated[provider]), provider, [e['_id'] for e in documents_migrated[provider]]
))
if documents_no_list_id[provider]:
logger.error('Failed to migrate {0} {1} node settings documents due to no {1}_list_id field:\n{2}'.format(
len(documents_no_list_id[provider]), provider, [e['_id'] for e in documents_no_list_id[provider]]
))
if documents_migration_failed[provider]:
logger.error('Failed to migrate {0} {1} node settings documents for unknown reason:\n{2}'.format(
len(documents_migration_failed[provider]), provider, [e['_id'] for e in documents_migration_failed[provider]]
))
if dry_run:
raise RuntimeError('Dry run, transaction rolled back.')
def main():
dry_run = False
if '--dry' in sys.argv:
dry_run = True
if not dry_run:
script_utils.add_file_logger(logger, __file__)
init_app(set_backends=True, routes=False)
with TokuTransaction():
migrate(dry_run=dry_run)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add migration script for [citations]nodesettings
<provider>_list_id --> list_id<commit_after>import logging
import sys
from modularodm import Q
from framework.mongo import database
from framework.transactions.context import TokuTransaction
from website.app import init_app
from scripts import utils as script_utils
logger = logging.getLogger(__name__)
PROVIDERS = ['mendeley', 'zotero']
def migrate_list_id_field(document, provider):
try:
database['{}nodesettings'.format(provider)].find_and_modify(
{'_id': document['_id']},
{
'$set': {
'list_id': document['{}_list_id'.format(provider)]
}
}
)
database['{}nodesettings'.format(provider)].find_and_modify(
{'_id': document['_id']},
{
'$unset': {
'{}_list_id'.format(provider): ''
}
}
)
except Exception:
return False
return True
def verify_node_settings_document(document, provider):
try:
assert('_id' in document)
assert('{}_list_id'.format(provider) in document)
except AssertionError:
return False
return True
def migrate(dry_run=True):
documents_no_list_id = {}
documents_migration_failed = {}
documents_migrated = {}
for provider in PROVIDERS:
documents_migrated[provider] = []
documents_migration_failed[provider] = []
documents_no_list_id[provider] = []
for document in database['{}nodesettings'.format(provider)].find():
if verify_node_settings_document(document, provider):
if migrate_list_id_field(document, provider):
documents_migrated[provider].append(document)
else:
documents_migration_failed[provider].append(document)
else:
documents_no_list_id[provider].append(document)
for provider in PROVIDERS:
if documents_migrated[provider]:
logger.info('Successfully migrated {0} {1} node settings documents:\n{2}'.format(
len(documents_migrated[provider]), provider, [e['_id'] for e in documents_migrated[provider]]
))
if documents_no_list_id[provider]:
logger.error('Failed to migrate {0} {1} node settings documents due to no {1}_list_id field:\n{2}'.format(
len(documents_no_list_id[provider]), provider, [e['_id'] for e in documents_no_list_id[provider]]
))
if documents_migration_failed[provider]:
logger.error('Failed to migrate {0} {1} node settings documents for unknown reason:\n{2}'.format(
len(documents_migration_failed[provider]), provider, [e['_id'] for e in documents_migration_failed[provider]]
))
if dry_run:
raise RuntimeError('Dry run, transaction rolled back.')
def main():
dry_run = False
if '--dry' in sys.argv:
dry_run = True
if not dry_run:
script_utils.add_file_logger(logger, __file__)
init_app(set_backends=True, routes=False)
with TokuTransaction():
migrate(dry_run=dry_run)
if __name__ == "__main__":
main()
|
|
361ca05d2ba9662551b688b1d7867edbec8d9705
|
unittests/test_roles.py
|
unittests/test_roles.py
|
import pytest
@pytest.fixture
def mockctl(testdir):
testdir.makepyfile(mockctl="""
import mock
pytest_plugins = 'sangoma.lab.roles'
def pytest_lab_addroles(rolemanager):
rolemanager.register('mock', mock.Mock())
""")
def test_manage_location(testdir):
testdir.makeconftest("""
pytest_plugins = 'sangoma.lab.roles'
""")
testdir.makepyfile("""
import pytest
FACTS = {'testing': True}
def test_manage():
localhost = pytest.env.manage('localhost', facts=FACTS)
assert localhost.hostname == 'localhost'
assert localhost.facts == FACTS
""")
# Needs a subprocess because of plumbums's atexit hooks
result = testdir.runpytest_subprocess('--env', 'mock')
assert result.ret == 0
def test_add_role(mockctl, testdir):
testdir.makeconftest("""
import pytest
pytest_plugins = 'mockctl'
@pytest.fixture
def localhost():
return pytest.env.manage('localhost')
def pytest_lab_register_role(config, ctl):
if ctl.name == 'mock':
ctl.setup()
def pytest_lab_delete_role(config, ctl):
if ctl.name == 'mock':
ctl.teardown()
""")
testdir.makepyfile("""
def test_add_role(localhost):
role = localhost.role('mock')
assert role.name == 'mock'
role.setup.assert_called_once_with()
localhost.destroy(role)
role.teardown.assert_called_once_with()
""")
result = testdir.runpytest_subprocess('--env', 'mock')
assert result.ret == 0
def test_add_unknown_role(mockctl, testdir):
testdir.makeconftest("""
import pytest
pytest_plugins = 'mockctl'
@pytest.fixture
def localhost():
return pytest.env.manage('localhost')
""")
testdir.makepyfile("""
import pytest
def test_add_role(localhost):
with pytest.raises(KeyError):
localhost.role('tyler')
""")
result = testdir.runpytest_subprocess('--env', 'mock')
assert result.ret == 0
|
Add unit tests for basic role stuff
|
Add unit tests for basic role stuff
|
Python
|
mpl-2.0
|
sangoma/pytestlab
|
Add unit tests for basic role stuff
|
import pytest
@pytest.fixture
def mockctl(testdir):
testdir.makepyfile(mockctl="""
import mock
pytest_plugins = 'sangoma.lab.roles'
def pytest_lab_addroles(rolemanager):
rolemanager.register('mock', mock.Mock())
""")
def test_manage_location(testdir):
testdir.makeconftest("""
pytest_plugins = 'sangoma.lab.roles'
""")
testdir.makepyfile("""
import pytest
FACTS = {'testing': True}
def test_manage():
localhost = pytest.env.manage('localhost', facts=FACTS)
assert localhost.hostname == 'localhost'
assert localhost.facts == FACTS
""")
# Needs a subprocess because of plumbums's atexit hooks
result = testdir.runpytest_subprocess('--env', 'mock')
assert result.ret == 0
def test_add_role(mockctl, testdir):
testdir.makeconftest("""
import pytest
pytest_plugins = 'mockctl'
@pytest.fixture
def localhost():
return pytest.env.manage('localhost')
def pytest_lab_register_role(config, ctl):
if ctl.name == 'mock':
ctl.setup()
def pytest_lab_delete_role(config, ctl):
if ctl.name == 'mock':
ctl.teardown()
""")
testdir.makepyfile("""
def test_add_role(localhost):
role = localhost.role('mock')
assert role.name == 'mock'
role.setup.assert_called_once_with()
localhost.destroy(role)
role.teardown.assert_called_once_with()
""")
result = testdir.runpytest_subprocess('--env', 'mock')
assert result.ret == 0
def test_add_unknown_role(mockctl, testdir):
testdir.makeconftest("""
import pytest
pytest_plugins = 'mockctl'
@pytest.fixture
def localhost():
return pytest.env.manage('localhost')
""")
testdir.makepyfile("""
import pytest
def test_add_role(localhost):
with pytest.raises(KeyError):
localhost.role('tyler')
""")
result = testdir.runpytest_subprocess('--env', 'mock')
assert result.ret == 0
|
<commit_before><commit_msg>Add unit tests for basic role stuff<commit_after>
|
import pytest
@pytest.fixture
def mockctl(testdir):
testdir.makepyfile(mockctl="""
import mock
pytest_plugins = 'sangoma.lab.roles'
def pytest_lab_addroles(rolemanager):
rolemanager.register('mock', mock.Mock())
""")
def test_manage_location(testdir):
testdir.makeconftest("""
pytest_plugins = 'sangoma.lab.roles'
""")
testdir.makepyfile("""
import pytest
FACTS = {'testing': True}
def test_manage():
localhost = pytest.env.manage('localhost', facts=FACTS)
assert localhost.hostname == 'localhost'
assert localhost.facts == FACTS
""")
# Needs a subprocess because of plumbums's atexit hooks
result = testdir.runpytest_subprocess('--env', 'mock')
assert result.ret == 0
def test_add_role(mockctl, testdir):
testdir.makeconftest("""
import pytest
pytest_plugins = 'mockctl'
@pytest.fixture
def localhost():
return pytest.env.manage('localhost')
def pytest_lab_register_role(config, ctl):
if ctl.name == 'mock':
ctl.setup()
def pytest_lab_delete_role(config, ctl):
if ctl.name == 'mock':
ctl.teardown()
""")
testdir.makepyfile("""
def test_add_role(localhost):
role = localhost.role('mock')
assert role.name == 'mock'
role.setup.assert_called_once_with()
localhost.destroy(role)
role.teardown.assert_called_once_with()
""")
result = testdir.runpytest_subprocess('--env', 'mock')
assert result.ret == 0
def test_add_unknown_role(mockctl, testdir):
testdir.makeconftest("""
import pytest
pytest_plugins = 'mockctl'
@pytest.fixture
def localhost():
return pytest.env.manage('localhost')
""")
testdir.makepyfile("""
import pytest
def test_add_role(localhost):
with pytest.raises(KeyError):
localhost.role('tyler')
""")
result = testdir.runpytest_subprocess('--env', 'mock')
assert result.ret == 0
|
Add unit tests for basic role stuffimport pytest
@pytest.fixture
def mockctl(testdir):
testdir.makepyfile(mockctl="""
import mock
pytest_plugins = 'sangoma.lab.roles'
def pytest_lab_addroles(rolemanager):
rolemanager.register('mock', mock.Mock())
""")
def test_manage_location(testdir):
testdir.makeconftest("""
pytest_plugins = 'sangoma.lab.roles'
""")
testdir.makepyfile("""
import pytest
FACTS = {'testing': True}
def test_manage():
localhost = pytest.env.manage('localhost', facts=FACTS)
assert localhost.hostname == 'localhost'
assert localhost.facts == FACTS
""")
# Needs a subprocess because of plumbums's atexit hooks
result = testdir.runpytest_subprocess('--env', 'mock')
assert result.ret == 0
def test_add_role(mockctl, testdir):
testdir.makeconftest("""
import pytest
pytest_plugins = 'mockctl'
@pytest.fixture
def localhost():
return pytest.env.manage('localhost')
def pytest_lab_register_role(config, ctl):
if ctl.name == 'mock':
ctl.setup()
def pytest_lab_delete_role(config, ctl):
if ctl.name == 'mock':
ctl.teardown()
""")
testdir.makepyfile("""
def test_add_role(localhost):
role = localhost.role('mock')
assert role.name == 'mock'
role.setup.assert_called_once_with()
localhost.destroy(role)
role.teardown.assert_called_once_with()
""")
result = testdir.runpytest_subprocess('--env', 'mock')
assert result.ret == 0
def test_add_unknown_role(mockctl, testdir):
testdir.makeconftest("""
import pytest
pytest_plugins = 'mockctl'
@pytest.fixture
def localhost():
return pytest.env.manage('localhost')
""")
testdir.makepyfile("""
import pytest
def test_add_role(localhost):
with pytest.raises(KeyError):
localhost.role('tyler')
""")
result = testdir.runpytest_subprocess('--env', 'mock')
assert result.ret == 0
|
<commit_before><commit_msg>Add unit tests for basic role stuff<commit_after>import pytest
@pytest.fixture
def mockctl(testdir):
testdir.makepyfile(mockctl="""
import mock
pytest_plugins = 'sangoma.lab.roles'
def pytest_lab_addroles(rolemanager):
rolemanager.register('mock', mock.Mock())
""")
def test_manage_location(testdir):
testdir.makeconftest("""
pytest_plugins = 'sangoma.lab.roles'
""")
testdir.makepyfile("""
import pytest
FACTS = {'testing': True}
def test_manage():
localhost = pytest.env.manage('localhost', facts=FACTS)
assert localhost.hostname == 'localhost'
assert localhost.facts == FACTS
""")
# Needs a subprocess because of plumbums's atexit hooks
result = testdir.runpytest_subprocess('--env', 'mock')
assert result.ret == 0
def test_add_role(mockctl, testdir):
testdir.makeconftest("""
import pytest
pytest_plugins = 'mockctl'
@pytest.fixture
def localhost():
return pytest.env.manage('localhost')
def pytest_lab_register_role(config, ctl):
if ctl.name == 'mock':
ctl.setup()
def pytest_lab_delete_role(config, ctl):
if ctl.name == 'mock':
ctl.teardown()
""")
testdir.makepyfile("""
def test_add_role(localhost):
role = localhost.role('mock')
assert role.name == 'mock'
role.setup.assert_called_once_with()
localhost.destroy(role)
role.teardown.assert_called_once_with()
""")
result = testdir.runpytest_subprocess('--env', 'mock')
assert result.ret == 0
def test_add_unknown_role(mockctl, testdir):
testdir.makeconftest("""
import pytest
pytest_plugins = 'mockctl'
@pytest.fixture
def localhost():
return pytest.env.manage('localhost')
""")
testdir.makepyfile("""
import pytest
def test_add_role(localhost):
with pytest.raises(KeyError):
localhost.role('tyler')
""")
result = testdir.runpytest_subprocess('--env', 'mock')
assert result.ret == 0
|
|
e6866b5e186600d88a61684c8f35c59c4fcc228f
|
utils/indirect_calls.py
|
utils/indirect_calls.py
|
#!/usr/bin/env python
"""A tool for looking for indirect jumps and calls in x86 binaries.
Helpful to verify whether or not retpoline mitigations are catching
all of the indirect branches in a binary and telling you which
functions the remaining ones are in (assembly, etc).
Depends on llvm-objdump being in your path and is tied to the
dump format.
"""
import os
import sys
import re
import subprocess
import optparse
# Look for indirect calls/jmps in a binary. re: (call|jmp).*\*
def look_for_indirect(file):
args = ['llvm-objdump']
args.extend(["-d"])
args.extend([file])
p = subprocess.Popen(args=args, stdin=None, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
(stdout,stderr) = p.communicate()
function = ""
for line in stdout.splitlines():
if line.startswith(' ') == False:
function = line
result = re.search('(call|jmp).*\*', line)
if result != None:
# TODO: Perhaps use cxxfilt to demangle functions?
print function
print line
return
# Compare the debug info between two files.
def main(args):
# No options currently other than the binary.
parser = optparse.OptionParser("%prog [options] <binary>")
(opts, args) = parser.parse_args(args)
if len(args) != 2:
parser.error("invalid number of arguments: %s" % len(args))
look_for_indirect(args[1])
if __name__ == '__main__':
main(sys.argv)
|
Add a wrapper around llvm-objdump to look for indirect calls/jmps in x86 assembly.
|
Add a wrapper around llvm-objdump to look for indirect calls/jmps in x86 assembly.
Useful when looking for indirect calls/jmps the need mitigation
via retpoline or other mitigations for Spectre v2.
Feedback, extension, additional patches welcome.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@329050 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
llvm-mirror/llvm,llvm-mirror/llvm,apple/swift-llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,apple/swift-llvm,llvm-mirror/llvm,apple/swift-llvm,GPUOpen-Drivers/llvm,llvm-mirror/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,GPUOpen-Drivers/llvm,apple/swift-llvm,llvm-mirror/llvm,llvm-mirror/llvm,apple/swift-llvm,apple/swift-llvm,llvm-mirror/llvm,llvm-mirror/llvm,llvm-mirror/llvm
|
Add a wrapper around llvm-objdump to look for indirect calls/jmps in x86 assembly.
Useful when looking for indirect calls/jmps the need mitigation
via retpoline or other mitigations for Spectre v2.
Feedback, extension, additional patches welcome.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@329050 91177308-0d34-0410-b5e6-96231b3b80d8
|
#!/usr/bin/env python
"""A tool for looking for indirect jumps and calls in x86 binaries.
Helpful to verify whether or not retpoline mitigations are catching
all of the indirect branches in a binary and telling you which
functions the remaining ones are in (assembly, etc).
Depends on llvm-objdump being in your path and is tied to the
dump format.
"""
import os
import sys
import re
import subprocess
import optparse
# Look for indirect calls/jmps in a binary. re: (call|jmp).*\*
def look_for_indirect(file):
args = ['llvm-objdump']
args.extend(["-d"])
args.extend([file])
p = subprocess.Popen(args=args, stdin=None, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
(stdout,stderr) = p.communicate()
function = ""
for line in stdout.splitlines():
if line.startswith(' ') == False:
function = line
result = re.search('(call|jmp).*\*', line)
if result != None:
# TODO: Perhaps use cxxfilt to demangle functions?
print function
print line
return
# Compare the debug info between two files.
def main(args):
# No options currently other than the binary.
parser = optparse.OptionParser("%prog [options] <binary>")
(opts, args) = parser.parse_args(args)
if len(args) != 2:
parser.error("invalid number of arguments: %s" % len(args))
look_for_indirect(args[1])
if __name__ == '__main__':
main(sys.argv)
|
<commit_before><commit_msg>Add a wrapper around llvm-objdump to look for indirect calls/jmps in x86 assembly.
Useful when looking for indirect calls/jmps the need mitigation
via retpoline or other mitigations for Spectre v2.
Feedback, extension, additional patches welcome.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@329050 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
|
#!/usr/bin/env python
"""A tool for looking for indirect jumps and calls in x86 binaries.
Helpful to verify whether or not retpoline mitigations are catching
all of the indirect branches in a binary and telling you which
functions the remaining ones are in (assembly, etc).
Depends on llvm-objdump being in your path and is tied to the
dump format.
"""
import os
import sys
import re
import subprocess
import optparse
# Look for indirect calls/jmps in a binary. re: (call|jmp).*\*
def look_for_indirect(file):
args = ['llvm-objdump']
args.extend(["-d"])
args.extend([file])
p = subprocess.Popen(args=args, stdin=None, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
(stdout,stderr) = p.communicate()
function = ""
for line in stdout.splitlines():
if line.startswith(' ') == False:
function = line
result = re.search('(call|jmp).*\*', line)
if result != None:
# TODO: Perhaps use cxxfilt to demangle functions?
print function
print line
return
# Compare the debug info between two files.
def main(args):
# No options currently other than the binary.
parser = optparse.OptionParser("%prog [options] <binary>")
(opts, args) = parser.parse_args(args)
if len(args) != 2:
parser.error("invalid number of arguments: %s" % len(args))
look_for_indirect(args[1])
if __name__ == '__main__':
main(sys.argv)
|
Add a wrapper around llvm-objdump to look for indirect calls/jmps in x86 assembly.
Useful when looking for indirect calls/jmps the need mitigation
via retpoline or other mitigations for Spectre v2.
Feedback, extension, additional patches welcome.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@329050 91177308-0d34-0410-b5e6-96231b3b80d8#!/usr/bin/env python
"""A tool for looking for indirect jumps and calls in x86 binaries.
Helpful to verify whether or not retpoline mitigations are catching
all of the indirect branches in a binary and telling you which
functions the remaining ones are in (assembly, etc).
Depends on llvm-objdump being in your path and is tied to the
dump format.
"""
import os
import sys
import re
import subprocess
import optparse
# Look for indirect calls/jmps in a binary. re: (call|jmp).*\*
def look_for_indirect(file):
args = ['llvm-objdump']
args.extend(["-d"])
args.extend([file])
p = subprocess.Popen(args=args, stdin=None, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
(stdout,stderr) = p.communicate()
function = ""
for line in stdout.splitlines():
if line.startswith(' ') == False:
function = line
result = re.search('(call|jmp).*\*', line)
if result != None:
# TODO: Perhaps use cxxfilt to demangle functions?
print function
print line
return
# Compare the debug info between two files.
def main(args):
# No options currently other than the binary.
parser = optparse.OptionParser("%prog [options] <binary>")
(opts, args) = parser.parse_args(args)
if len(args) != 2:
parser.error("invalid number of arguments: %s" % len(args))
look_for_indirect(args[1])
if __name__ == '__main__':
main(sys.argv)
|
<commit_before><commit_msg>Add a wrapper around llvm-objdump to look for indirect calls/jmps in x86 assembly.
Useful when looking for indirect calls/jmps the need mitigation
via retpoline or other mitigations for Spectre v2.
Feedback, extension, additional patches welcome.
git-svn-id: 0ff597fd157e6f4fc38580e8d64ab130330d2411@329050 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>#!/usr/bin/env python
"""A tool for looking for indirect jumps and calls in x86 binaries.
Helpful to verify whether or not retpoline mitigations are catching
all of the indirect branches in a binary and telling you which
functions the remaining ones are in (assembly, etc).
Depends on llvm-objdump being in your path and is tied to the
dump format.
"""
import os
import sys
import re
import subprocess
import optparse
# Look for indirect calls/jmps in a binary. re: (call|jmp).*\*
def look_for_indirect(file):
args = ['llvm-objdump']
args.extend(["-d"])
args.extend([file])
p = subprocess.Popen(args=args, stdin=None, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
(stdout,stderr) = p.communicate()
function = ""
for line in stdout.splitlines():
if line.startswith(' ') == False:
function = line
result = re.search('(call|jmp).*\*', line)
if result != None:
# TODO: Perhaps use cxxfilt to demangle functions?
print function
print line
return
# Compare the debug info between two files.
def main(args):
# No options currently other than the binary.
parser = optparse.OptionParser("%prog [options] <binary>")
(opts, args) = parser.parse_args(args)
if len(args) != 2:
parser.error("invalid number of arguments: %s" % len(args))
look_for_indirect(args[1])
if __name__ == '__main__':
main(sys.argv)
|
|
34e875d1d876f05b3d5bb50f38ae61a7c48ea42b
|
server/migrations/0080_auto_20190202_1020.py
|
server/migrations/0080_auto_20190202_1020.py
|
# Generated by Django 2.1.4 on 2019-02-02 15:20
from django.db import migrations, models
import django.db.models.deletion
import ulid2
class Migration(migrations.Migration):
dependencies = [
('server', '0079_auto_20181211_1028'),
]
operations = [
migrations.CreateModel(
name='ManagedItem',
fields=[
('id', models.UUIDField(default=ulid2.generate_ulid_as_uuid, primary_key=True, serialize=False)),
('name', models.CharField(max_length=255, unique=True)),
('date_managed', models.DateTimeField()),
('status', models.CharField(choices=[('PRESENT', 'Present'), ('ABSENT', 'Absent'), ('PENDING', 'Pending'), ('ERROR', 'Error'), ('UNKNOWN', 'Unknown')], default='UNKNOWN', max_length=7)),
('data', models.TextField(null=True)),
('machine', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='server.Machine')),
],
),
migrations.CreateModel(
name='ManagementSource',
fields=[
('id', models.UUIDField(default=ulid2.generate_ulid_as_uuid, primary_key=True, serialize=False)),
('name', models.CharField(max_length=255, unique=True)),
],
),
migrations.AddField(
model_name='manageditem',
name='management_source',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='server.ManagementSource'),
),
]
|
Add in migration for ManagedItem, MangementSource.
|
Add in migration for ManagedItem, MangementSource.
|
Python
|
apache-2.0
|
sheagcraig/sal,salopensource/sal,sheagcraig/sal,salopensource/sal,sheagcraig/sal,salopensource/sal,sheagcraig/sal,salopensource/sal
|
Add in migration for ManagedItem, MangementSource.
|
# Generated by Django 2.1.4 on 2019-02-02 15:20
from django.db import migrations, models
import django.db.models.deletion
import ulid2
class Migration(migrations.Migration):
dependencies = [
('server', '0079_auto_20181211_1028'),
]
operations = [
migrations.CreateModel(
name='ManagedItem',
fields=[
('id', models.UUIDField(default=ulid2.generate_ulid_as_uuid, primary_key=True, serialize=False)),
('name', models.CharField(max_length=255, unique=True)),
('date_managed', models.DateTimeField()),
('status', models.CharField(choices=[('PRESENT', 'Present'), ('ABSENT', 'Absent'), ('PENDING', 'Pending'), ('ERROR', 'Error'), ('UNKNOWN', 'Unknown')], default='UNKNOWN', max_length=7)),
('data', models.TextField(null=True)),
('machine', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='server.Machine')),
],
),
migrations.CreateModel(
name='ManagementSource',
fields=[
('id', models.UUIDField(default=ulid2.generate_ulid_as_uuid, primary_key=True, serialize=False)),
('name', models.CharField(max_length=255, unique=True)),
],
),
migrations.AddField(
model_name='manageditem',
name='management_source',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='server.ManagementSource'),
),
]
|
<commit_before><commit_msg>Add in migration for ManagedItem, MangementSource.<commit_after>
|
# Generated by Django 2.1.4 on 2019-02-02 15:20
from django.db import migrations, models
import django.db.models.deletion
import ulid2
class Migration(migrations.Migration):
dependencies = [
('server', '0079_auto_20181211_1028'),
]
operations = [
migrations.CreateModel(
name='ManagedItem',
fields=[
('id', models.UUIDField(default=ulid2.generate_ulid_as_uuid, primary_key=True, serialize=False)),
('name', models.CharField(max_length=255, unique=True)),
('date_managed', models.DateTimeField()),
('status', models.CharField(choices=[('PRESENT', 'Present'), ('ABSENT', 'Absent'), ('PENDING', 'Pending'), ('ERROR', 'Error'), ('UNKNOWN', 'Unknown')], default='UNKNOWN', max_length=7)),
('data', models.TextField(null=True)),
('machine', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='server.Machine')),
],
),
migrations.CreateModel(
name='ManagementSource',
fields=[
('id', models.UUIDField(default=ulid2.generate_ulid_as_uuid, primary_key=True, serialize=False)),
('name', models.CharField(max_length=255, unique=True)),
],
),
migrations.AddField(
model_name='manageditem',
name='management_source',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='server.ManagementSource'),
),
]
|
Add in migration for ManagedItem, MangementSource.# Generated by Django 2.1.4 on 2019-02-02 15:20
from django.db import migrations, models
import django.db.models.deletion
import ulid2
class Migration(migrations.Migration):
dependencies = [
('server', '0079_auto_20181211_1028'),
]
operations = [
migrations.CreateModel(
name='ManagedItem',
fields=[
('id', models.UUIDField(default=ulid2.generate_ulid_as_uuid, primary_key=True, serialize=False)),
('name', models.CharField(max_length=255, unique=True)),
('date_managed', models.DateTimeField()),
('status', models.CharField(choices=[('PRESENT', 'Present'), ('ABSENT', 'Absent'), ('PENDING', 'Pending'), ('ERROR', 'Error'), ('UNKNOWN', 'Unknown')], default='UNKNOWN', max_length=7)),
('data', models.TextField(null=True)),
('machine', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='server.Machine')),
],
),
migrations.CreateModel(
name='ManagementSource',
fields=[
('id', models.UUIDField(default=ulid2.generate_ulid_as_uuid, primary_key=True, serialize=False)),
('name', models.CharField(max_length=255, unique=True)),
],
),
migrations.AddField(
model_name='manageditem',
name='management_source',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='server.ManagementSource'),
),
]
|
<commit_before><commit_msg>Add in migration for ManagedItem, MangementSource.<commit_after># Generated by Django 2.1.4 on 2019-02-02 15:20
from django.db import migrations, models
import django.db.models.deletion
import ulid2
class Migration(migrations.Migration):
dependencies = [
('server', '0079_auto_20181211_1028'),
]
operations = [
migrations.CreateModel(
name='ManagedItem',
fields=[
('id', models.UUIDField(default=ulid2.generate_ulid_as_uuid, primary_key=True, serialize=False)),
('name', models.CharField(max_length=255, unique=True)),
('date_managed', models.DateTimeField()),
('status', models.CharField(choices=[('PRESENT', 'Present'), ('ABSENT', 'Absent'), ('PENDING', 'Pending'), ('ERROR', 'Error'), ('UNKNOWN', 'Unknown')], default='UNKNOWN', max_length=7)),
('data', models.TextField(null=True)),
('machine', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='server.Machine')),
],
),
migrations.CreateModel(
name='ManagementSource',
fields=[
('id', models.UUIDField(default=ulid2.generate_ulid_as_uuid, primary_key=True, serialize=False)),
('name', models.CharField(max_length=255, unique=True)),
],
),
migrations.AddField(
model_name='manageditem',
name='management_source',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='server.ManagementSource'),
),
]
|
|
f96c393611e07b31fe31bb44f7dc1729172483f0
|
test_inclination.py
|
test_inclination.py
|
import numpy as np
from astropy import units as u
from poliastro.bodies import Earth
from poliastro.twobody import State
from poliastro.twobody.propagation import cowell
from poliastro.util import norm
def test_inclination():
a_0 = 7000.0
i_0 = (28.5 * u.deg).to(u.rad).value
f = 3.5e-7
k = Earth.k.decompose([u.km, u.s]).value
def a_d(t0, u_, _):
r, v = u_[:3], u_[3:]
beta = np.pi / 2 * np.sign(r[0]) # Change with out-of-plane velocity
#beta = np.pi / 2 * np.sign(r[1]) # Change at node crossing
# DEBUG
ss = State.from_vectors(Earth, r * u.km, v * u.km / u.s)
print(beta, ss.inc.to("deg"))
# END DEBUG
w_ = np.cross(r, v) / norm(np.cross(r, v))
accel_v = f * np.sin(beta) * w_
return accel_v
# Retrieve r and v from initial orbit
s0 = State.circular(Earth, a_0 * u.km - Earth.R, i_0 * u.rad)
r0, v0 = s0.rv()
tf = 150 * s0.period
# Propagate orbit
r, v = cowell(k,
r0.to(u.km).value,
v0.to(u.km / u.s).value,
tf.to(u.s).value,
ad=a_d,
nsteps=100000)
sf = State.from_vectors(Earth,
r * u.km,
v * u.km / u.s,
s0.epoch + tf)
print(sf.a.to(u.km))
print(sf.ecc)
print(sf.inc.to("deg"))
|
Add test to study the change in inclination only
|
Add test to study the change in inclination only
|
Python
|
mit
|
Juanlu001/pfc-uc3m
|
Add test to study the change in inclination only
|
import numpy as np
from astropy import units as u
from poliastro.bodies import Earth
from poliastro.twobody import State
from poliastro.twobody.propagation import cowell
from poliastro.util import norm
def test_inclination():
a_0 = 7000.0
i_0 = (28.5 * u.deg).to(u.rad).value
f = 3.5e-7
k = Earth.k.decompose([u.km, u.s]).value
def a_d(t0, u_, _):
r, v = u_[:3], u_[3:]
beta = np.pi / 2 * np.sign(r[0]) # Change with out-of-plane velocity
#beta = np.pi / 2 * np.sign(r[1]) # Change at node crossing
# DEBUG
ss = State.from_vectors(Earth, r * u.km, v * u.km / u.s)
print(beta, ss.inc.to("deg"))
# END DEBUG
w_ = np.cross(r, v) / norm(np.cross(r, v))
accel_v = f * np.sin(beta) * w_
return accel_v
# Retrieve r and v from initial orbit
s0 = State.circular(Earth, a_0 * u.km - Earth.R, i_0 * u.rad)
r0, v0 = s0.rv()
tf = 150 * s0.period
# Propagate orbit
r, v = cowell(k,
r0.to(u.km).value,
v0.to(u.km / u.s).value,
tf.to(u.s).value,
ad=a_d,
nsteps=100000)
sf = State.from_vectors(Earth,
r * u.km,
v * u.km / u.s,
s0.epoch + tf)
print(sf.a.to(u.km))
print(sf.ecc)
print(sf.inc.to("deg"))
|
<commit_before><commit_msg>Add test to study the change in inclination only<commit_after>
|
import numpy as np
from astropy import units as u
from poliastro.bodies import Earth
from poliastro.twobody import State
from poliastro.twobody.propagation import cowell
from poliastro.util import norm
def test_inclination():
a_0 = 7000.0
i_0 = (28.5 * u.deg).to(u.rad).value
f = 3.5e-7
k = Earth.k.decompose([u.km, u.s]).value
def a_d(t0, u_, _):
r, v = u_[:3], u_[3:]
beta = np.pi / 2 * np.sign(r[0]) # Change with out-of-plane velocity
#beta = np.pi / 2 * np.sign(r[1]) # Change at node crossing
# DEBUG
ss = State.from_vectors(Earth, r * u.km, v * u.km / u.s)
print(beta, ss.inc.to("deg"))
# END DEBUG
w_ = np.cross(r, v) / norm(np.cross(r, v))
accel_v = f * np.sin(beta) * w_
return accel_v
# Retrieve r and v from initial orbit
s0 = State.circular(Earth, a_0 * u.km - Earth.R, i_0 * u.rad)
r0, v0 = s0.rv()
tf = 150 * s0.period
# Propagate orbit
r, v = cowell(k,
r0.to(u.km).value,
v0.to(u.km / u.s).value,
tf.to(u.s).value,
ad=a_d,
nsteps=100000)
sf = State.from_vectors(Earth,
r * u.km,
v * u.km / u.s,
s0.epoch + tf)
print(sf.a.to(u.km))
print(sf.ecc)
print(sf.inc.to("deg"))
|
Add test to study the change in inclination onlyimport numpy as np
from astropy import units as u
from poliastro.bodies import Earth
from poliastro.twobody import State
from poliastro.twobody.propagation import cowell
from poliastro.util import norm
def test_inclination():
a_0 = 7000.0
i_0 = (28.5 * u.deg).to(u.rad).value
f = 3.5e-7
k = Earth.k.decompose([u.km, u.s]).value
def a_d(t0, u_, _):
r, v = u_[:3], u_[3:]
beta = np.pi / 2 * np.sign(r[0]) # Change with out-of-plane velocity
#beta = np.pi / 2 * np.sign(r[1]) # Change at node crossing
# DEBUG
ss = State.from_vectors(Earth, r * u.km, v * u.km / u.s)
print(beta, ss.inc.to("deg"))
# END DEBUG
w_ = np.cross(r, v) / norm(np.cross(r, v))
accel_v = f * np.sin(beta) * w_
return accel_v
# Retrieve r and v from initial orbit
s0 = State.circular(Earth, a_0 * u.km - Earth.R, i_0 * u.rad)
r0, v0 = s0.rv()
tf = 150 * s0.period
# Propagate orbit
r, v = cowell(k,
r0.to(u.km).value,
v0.to(u.km / u.s).value,
tf.to(u.s).value,
ad=a_d,
nsteps=100000)
sf = State.from_vectors(Earth,
r * u.km,
v * u.km / u.s,
s0.epoch + tf)
print(sf.a.to(u.km))
print(sf.ecc)
print(sf.inc.to("deg"))
|
<commit_before><commit_msg>Add test to study the change in inclination only<commit_after>import numpy as np
from astropy import units as u
from poliastro.bodies import Earth
from poliastro.twobody import State
from poliastro.twobody.propagation import cowell
from poliastro.util import norm
def test_inclination():
a_0 = 7000.0
i_0 = (28.5 * u.deg).to(u.rad).value
f = 3.5e-7
k = Earth.k.decompose([u.km, u.s]).value
def a_d(t0, u_, _):
r, v = u_[:3], u_[3:]
beta = np.pi / 2 * np.sign(r[0]) # Change with out-of-plane velocity
#beta = np.pi / 2 * np.sign(r[1]) # Change at node crossing
# DEBUG
ss = State.from_vectors(Earth, r * u.km, v * u.km / u.s)
print(beta, ss.inc.to("deg"))
# END DEBUG
w_ = np.cross(r, v) / norm(np.cross(r, v))
accel_v = f * np.sin(beta) * w_
return accel_v
# Retrieve r and v from initial orbit
s0 = State.circular(Earth, a_0 * u.km - Earth.R, i_0 * u.rad)
r0, v0 = s0.rv()
tf = 150 * s0.period
# Propagate orbit
r, v = cowell(k,
r0.to(u.km).value,
v0.to(u.km / u.s).value,
tf.to(u.s).value,
ad=a_d,
nsteps=100000)
sf = State.from_vectors(Earth,
r * u.km,
v * u.km / u.s,
s0.epoch + tf)
print(sf.a.to(u.km))
print(sf.ecc)
print(sf.inc.to("deg"))
|
|
3287e9d1814cdc9fec61d5ad5888168f08d44e70
|
pyp2c.py
|
pyp2c.py
|
#!/usr/bin/env python
import sys
import argparse
def _description():
return 'A C preprocessor that interprets python'
""" The usage
We do override the usage since the one given by `argparse` put the positional
arguments at the end of it.
"""
def _usage():
return 'usage: pyp2c.py [-h] FILES ... --then THEN [THEN ...]'
def _parse_opts(args=sys.argv[1:]):
parser = argparse.ArgumentParser(description=_description(), usage=_usage())
parser.add_argument('files', nargs='+')
parser.add_argument('--then', dest='then', nargs='+', required=True)
return vars(parser.parse_args(args))
def pyp2c():
opts = _parse_opts()
print(opts)
pass
if __name__ == "__main__":
pyp2c()
|
Add very basic handling of options
|
Add very basic handling of options
|
Python
|
mit
|
ccharly/pyaspp,ccharly/pyaspp,lisqlql/pyaspp,lisqlql/pyaspp
|
Add very basic handling of options
|
#!/usr/bin/env python
import sys
import argparse
def _description():
return 'A C preprocessor that interprets python'
""" The usage
We do override the usage since the one given by `argparse` put the positional
arguments at the end of it.
"""
def _usage():
return 'usage: pyp2c.py [-h] FILES ... --then THEN [THEN ...]'
def _parse_opts(args=sys.argv[1:]):
parser = argparse.ArgumentParser(description=_description(), usage=_usage())
parser.add_argument('files', nargs='+')
parser.add_argument('--then', dest='then', nargs='+', required=True)
return vars(parser.parse_args(args))
def pyp2c():
opts = _parse_opts()
print(opts)
pass
if __name__ == "__main__":
pyp2c()
|
<commit_before><commit_msg>Add very basic handling of options<commit_after>
|
#!/usr/bin/env python
import sys
import argparse
def _description():
return 'A C preprocessor that interprets python'
""" The usage
We do override the usage since the one given by `argparse` put the positional
arguments at the end of it.
"""
def _usage():
return 'usage: pyp2c.py [-h] FILES ... --then THEN [THEN ...]'
def _parse_opts(args=sys.argv[1:]):
parser = argparse.ArgumentParser(description=_description(), usage=_usage())
parser.add_argument('files', nargs='+')
parser.add_argument('--then', dest='then', nargs='+', required=True)
return vars(parser.parse_args(args))
def pyp2c():
opts = _parse_opts()
print(opts)
pass
if __name__ == "__main__":
pyp2c()
|
Add very basic handling of options#!/usr/bin/env python
import sys
import argparse
def _description():
return 'A C preprocessor that interprets python'
""" The usage
We do override the usage since the one given by `argparse` put the positional
arguments at the end of it.
"""
def _usage():
return 'usage: pyp2c.py [-h] FILES ... --then THEN [THEN ...]'
def _parse_opts(args=sys.argv[1:]):
parser = argparse.ArgumentParser(description=_description(), usage=_usage())
parser.add_argument('files', nargs='+')
parser.add_argument('--then', dest='then', nargs='+', required=True)
return vars(parser.parse_args(args))
def pyp2c():
opts = _parse_opts()
print(opts)
pass
if __name__ == "__main__":
pyp2c()
|
<commit_before><commit_msg>Add very basic handling of options<commit_after>#!/usr/bin/env python
import sys
import argparse
def _description():
return 'A C preprocessor that interprets python'
""" The usage
We do override the usage since the one given by `argparse` put the positional
arguments at the end of it.
"""
def _usage():
return 'usage: pyp2c.py [-h] FILES ... --then THEN [THEN ...]'
def _parse_opts(args=sys.argv[1:]):
parser = argparse.ArgumentParser(description=_description(), usage=_usage())
parser.add_argument('files', nargs='+')
parser.add_argument('--then', dest='then', nargs='+', required=True)
return vars(parser.parse_args(args))
def pyp2c():
opts = _parse_opts()
print(opts)
pass
if __name__ == "__main__":
pyp2c()
|
|
a8e3209663a5268b029214964e2db7faf2d8cf36
|
scripts/calculate_cell_areas.py
|
scripts/calculate_cell_areas.py
|
"""Script to calculate cell areas from a curated image of cell outlines.
Outputs csv file with cell areas as well as an image where the colour
of each cell represents its area.
"""
import os
import logging
import argparse
from jicbioimage.core.image import Image
from jicbioimage.core.io import AutoName, AutoWrite
from utils import (
identity,
invert,
logical_and,
erode_binary,
connected_components,
watershed_with_seeds,
)
__version__ = "0.3.0"
AutoName.prefix_format = "{:03d}_"
def analyse_file(image_fpath, mask_fpath, output_directory):
"""Analyse a single file."""
logging.info("Analysing file: {}".format(image_fpath))
image = Image.from_file(image_fpath)
logging.info("Mask file: {}".format(mask_fpath))
mask = Image.from_file(mask_fpath)[:, :, 0]
mask = identity(mask)
seeds = invert(image)
seeds = erode_binary(seeds.view(bool))
seeds = logical_and(seeds, mask)
seeds = connected_components(seeds, connectivity=1)
segmentation = watershed_with_seeds(-image, seeds, mask)
def main():
# Parse the command line arguments.
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("input_file", help="Input file")
parser.add_argument("mask_file", help="Mask file")
parser.add_argument("output_dir", help="Output directory")
parser.add_argument("--debug", default=False, action="store_true",
help="Write out intermediate images")
args = parser.parse_args()
if not os.path.isfile(args.input_file):
parser.error("{} not a file".format(args.input_file))
if not os.path.isfile(args.mask_file):
parser.error("{} not a file".format(args.mask_file))
# Create the output directory if it does not exist.
if not os.path.isdir(args.output_dir):
os.mkdir(args.output_dir)
AutoName.directory = args.output_dir
# Only write out intermediate images in debug mode.
AutoWrite.on = args.debug
# Setup a logger for the script.
log_fname = "audit.log"
log_fpath = os.path.join(args.output_dir, log_fname)
logging_level = logging.INFO
if args.debug:
logging_level = logging.DEBUG
logging.basicConfig(filename=log_fpath, level=logging_level)
# Log some basic information about the script that is running.
logging.info("Script name: {}".format(__file__))
logging.info("Script version: {}".format(__version__))
# Run the analysis.
analyse_file(args.input_file, args.mask_file, args.output_dir)
if __name__ == "__main__":
main()
|
Add initial script for calculating cell areas
|
Add initial script for calculating cell areas
|
Python
|
mit
|
JIC-Image-Analysis/semi-automated-leaf-segmentation,JIC-Image-Analysis/semi-automated-leaf-segmentation
|
Add initial script for calculating cell areas
|
"""Script to calculate cell areas from a curated image of cell outlines.
Outputs csv file with cell areas as well as an image where the colour
of each cell represents its area.
"""
import os
import logging
import argparse
from jicbioimage.core.image import Image
from jicbioimage.core.io import AutoName, AutoWrite
from utils import (
identity,
invert,
logical_and,
erode_binary,
connected_components,
watershed_with_seeds,
)
__version__ = "0.3.0"
AutoName.prefix_format = "{:03d}_"
def analyse_file(image_fpath, mask_fpath, output_directory):
"""Analyse a single file."""
logging.info("Analysing file: {}".format(image_fpath))
image = Image.from_file(image_fpath)
logging.info("Mask file: {}".format(mask_fpath))
mask = Image.from_file(mask_fpath)[:, :, 0]
mask = identity(mask)
seeds = invert(image)
seeds = erode_binary(seeds.view(bool))
seeds = logical_and(seeds, mask)
seeds = connected_components(seeds, connectivity=1)
segmentation = watershed_with_seeds(-image, seeds, mask)
def main():
# Parse the command line arguments.
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("input_file", help="Input file")
parser.add_argument("mask_file", help="Mask file")
parser.add_argument("output_dir", help="Output directory")
parser.add_argument("--debug", default=False, action="store_true",
help="Write out intermediate images")
args = parser.parse_args()
if not os.path.isfile(args.input_file):
parser.error("{} not a file".format(args.input_file))
if not os.path.isfile(args.mask_file):
parser.error("{} not a file".format(args.mask_file))
# Create the output directory if it does not exist.
if not os.path.isdir(args.output_dir):
os.mkdir(args.output_dir)
AutoName.directory = args.output_dir
# Only write out intermediate images in debug mode.
AutoWrite.on = args.debug
# Setup a logger for the script.
log_fname = "audit.log"
log_fpath = os.path.join(args.output_dir, log_fname)
logging_level = logging.INFO
if args.debug:
logging_level = logging.DEBUG
logging.basicConfig(filename=log_fpath, level=logging_level)
# Log some basic information about the script that is running.
logging.info("Script name: {}".format(__file__))
logging.info("Script version: {}".format(__version__))
# Run the analysis.
analyse_file(args.input_file, args.mask_file, args.output_dir)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add initial script for calculating cell areas<commit_after>
|
"""Script to calculate cell areas from a curated image of cell outlines.
Outputs csv file with cell areas as well as an image where the colour
of each cell represents its area.
"""
import os
import logging
import argparse
from jicbioimage.core.image import Image
from jicbioimage.core.io import AutoName, AutoWrite
from utils import (
identity,
invert,
logical_and,
erode_binary,
connected_components,
watershed_with_seeds,
)
__version__ = "0.3.0"
AutoName.prefix_format = "{:03d}_"
def analyse_file(image_fpath, mask_fpath, output_directory):
"""Analyse a single file."""
logging.info("Analysing file: {}".format(image_fpath))
image = Image.from_file(image_fpath)
logging.info("Mask file: {}".format(mask_fpath))
mask = Image.from_file(mask_fpath)[:, :, 0]
mask = identity(mask)
seeds = invert(image)
seeds = erode_binary(seeds.view(bool))
seeds = logical_and(seeds, mask)
seeds = connected_components(seeds, connectivity=1)
segmentation = watershed_with_seeds(-image, seeds, mask)
def main():
# Parse the command line arguments.
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("input_file", help="Input file")
parser.add_argument("mask_file", help="Mask file")
parser.add_argument("output_dir", help="Output directory")
parser.add_argument("--debug", default=False, action="store_true",
help="Write out intermediate images")
args = parser.parse_args()
if not os.path.isfile(args.input_file):
parser.error("{} not a file".format(args.input_file))
if not os.path.isfile(args.mask_file):
parser.error("{} not a file".format(args.mask_file))
# Create the output directory if it does not exist.
if not os.path.isdir(args.output_dir):
os.mkdir(args.output_dir)
AutoName.directory = args.output_dir
# Only write out intermediate images in debug mode.
AutoWrite.on = args.debug
# Setup a logger for the script.
log_fname = "audit.log"
log_fpath = os.path.join(args.output_dir, log_fname)
logging_level = logging.INFO
if args.debug:
logging_level = logging.DEBUG
logging.basicConfig(filename=log_fpath, level=logging_level)
# Log some basic information about the script that is running.
logging.info("Script name: {}".format(__file__))
logging.info("Script version: {}".format(__version__))
# Run the analysis.
analyse_file(args.input_file, args.mask_file, args.output_dir)
if __name__ == "__main__":
main()
|
Add initial script for calculating cell areas"""Script to calculate cell areas from a curated image of cell outlines.
Outputs csv file with cell areas as well as an image where the colour
of each cell represents its area.
"""
import os
import logging
import argparse
from jicbioimage.core.image import Image
from jicbioimage.core.io import AutoName, AutoWrite
from utils import (
identity,
invert,
logical_and,
erode_binary,
connected_components,
watershed_with_seeds,
)
__version__ = "0.3.0"
AutoName.prefix_format = "{:03d}_"
def analyse_file(image_fpath, mask_fpath, output_directory):
"""Analyse a single file."""
logging.info("Analysing file: {}".format(image_fpath))
image = Image.from_file(image_fpath)
logging.info("Mask file: {}".format(mask_fpath))
mask = Image.from_file(mask_fpath)[:, :, 0]
mask = identity(mask)
seeds = invert(image)
seeds = erode_binary(seeds.view(bool))
seeds = logical_and(seeds, mask)
seeds = connected_components(seeds, connectivity=1)
segmentation = watershed_with_seeds(-image, seeds, mask)
def main():
# Parse the command line arguments.
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("input_file", help="Input file")
parser.add_argument("mask_file", help="Mask file")
parser.add_argument("output_dir", help="Output directory")
parser.add_argument("--debug", default=False, action="store_true",
help="Write out intermediate images")
args = parser.parse_args()
if not os.path.isfile(args.input_file):
parser.error("{} not a file".format(args.input_file))
if not os.path.isfile(args.mask_file):
parser.error("{} not a file".format(args.mask_file))
# Create the output directory if it does not exist.
if not os.path.isdir(args.output_dir):
os.mkdir(args.output_dir)
AutoName.directory = args.output_dir
# Only write out intermediate images in debug mode.
AutoWrite.on = args.debug
# Setup a logger for the script.
log_fname = "audit.log"
log_fpath = os.path.join(args.output_dir, log_fname)
logging_level = logging.INFO
if args.debug:
logging_level = logging.DEBUG
logging.basicConfig(filename=log_fpath, level=logging_level)
# Log some basic information about the script that is running.
logging.info("Script name: {}".format(__file__))
logging.info("Script version: {}".format(__version__))
# Run the analysis.
analyse_file(args.input_file, args.mask_file, args.output_dir)
if __name__ == "__main__":
main()
|
<commit_before><commit_msg>Add initial script for calculating cell areas<commit_after>"""Script to calculate cell areas from a curated image of cell outlines.
Outputs csv file with cell areas as well as an image where the colour
of each cell represents its area.
"""
import os
import logging
import argparse
from jicbioimage.core.image import Image
from jicbioimage.core.io import AutoName, AutoWrite
from utils import (
identity,
invert,
logical_and,
erode_binary,
connected_components,
watershed_with_seeds,
)
__version__ = "0.3.0"
AutoName.prefix_format = "{:03d}_"
def analyse_file(image_fpath, mask_fpath, output_directory):
"""Analyse a single file."""
logging.info("Analysing file: {}".format(image_fpath))
image = Image.from_file(image_fpath)
logging.info("Mask file: {}".format(mask_fpath))
mask = Image.from_file(mask_fpath)[:, :, 0]
mask = identity(mask)
seeds = invert(image)
seeds = erode_binary(seeds.view(bool))
seeds = logical_and(seeds, mask)
seeds = connected_components(seeds, connectivity=1)
segmentation = watershed_with_seeds(-image, seeds, mask)
def main():
# Parse the command line arguments.
parser = argparse.ArgumentParser(description=__doc__)
parser.add_argument("input_file", help="Input file")
parser.add_argument("mask_file", help="Mask file")
parser.add_argument("output_dir", help="Output directory")
parser.add_argument("--debug", default=False, action="store_true",
help="Write out intermediate images")
args = parser.parse_args()
if not os.path.isfile(args.input_file):
parser.error("{} not a file".format(args.input_file))
if not os.path.isfile(args.mask_file):
parser.error("{} not a file".format(args.mask_file))
# Create the output directory if it does not exist.
if not os.path.isdir(args.output_dir):
os.mkdir(args.output_dir)
AutoName.directory = args.output_dir
# Only write out intermediate images in debug mode.
AutoWrite.on = args.debug
# Setup a logger for the script.
log_fname = "audit.log"
log_fpath = os.path.join(args.output_dir, log_fname)
logging_level = logging.INFO
if args.debug:
logging_level = logging.DEBUG
logging.basicConfig(filename=log_fpath, level=logging_level)
# Log some basic information about the script that is running.
logging.info("Script name: {}".format(__file__))
logging.info("Script version: {}".format(__version__))
# Run the analysis.
analyse_file(args.input_file, args.mask_file, args.output_dir)
if __name__ == "__main__":
main()
|
|
17da1f5053a66e35ed1955a1cf716fac667f5859
|
py/postResultsTest.py
|
py/postResultsTest.py
|
import urllib
import urllib2
url = 'http://app.dev.inowas.com/api/results.json'
params = urllib.urlencode({
'id': '1',
'propertyType': 'gwHead',
'width': 4,
'height': 4,
'upperLeftX': 0.005,
'upperLeftY': 0.005,
'scaleX': 1,
'scaleY': 1,
'skewX': 0,
'skewY': 0,
'srid': 4326,
'bandPixelType': '\'32BF\'::text',
'bandInitValue': 200,
'bandNoDataVal': -9999,
'data': [[0,1,2,3],[0,1,2,3],[0,1,2,3],[0,1,2,3]],
'date': '2016-02-23 12:32:12'
})
response = urllib2.urlopen(url, params).read()
print response
|
Add python folder with a rest-test-file
|
Add python folder with a rest-test-file
|
Python
|
mit
|
inowas/inowas,inowas/inowas_api,inowas/inowas,inowas/inowas_api,inowas/inowas_api,inowas/inowas,inowas/inowas,inowas/inowas_api
|
Add python folder with a rest-test-file
|
import urllib
import urllib2
url = 'http://app.dev.inowas.com/api/results.json'
params = urllib.urlencode({
'id': '1',
'propertyType': 'gwHead',
'width': 4,
'height': 4,
'upperLeftX': 0.005,
'upperLeftY': 0.005,
'scaleX': 1,
'scaleY': 1,
'skewX': 0,
'skewY': 0,
'srid': 4326,
'bandPixelType': '\'32BF\'::text',
'bandInitValue': 200,
'bandNoDataVal': -9999,
'data': [[0,1,2,3],[0,1,2,3],[0,1,2,3],[0,1,2,3]],
'date': '2016-02-23 12:32:12'
})
response = urllib2.urlopen(url, params).read()
print response
|
<commit_before><commit_msg>Add python folder with a rest-test-file<commit_after>
|
import urllib
import urllib2
url = 'http://app.dev.inowas.com/api/results.json'
params = urllib.urlencode({
'id': '1',
'propertyType': 'gwHead',
'width': 4,
'height': 4,
'upperLeftX': 0.005,
'upperLeftY': 0.005,
'scaleX': 1,
'scaleY': 1,
'skewX': 0,
'skewY': 0,
'srid': 4326,
'bandPixelType': '\'32BF\'::text',
'bandInitValue': 200,
'bandNoDataVal': -9999,
'data': [[0,1,2,3],[0,1,2,3],[0,1,2,3],[0,1,2,3]],
'date': '2016-02-23 12:32:12'
})
response = urllib2.urlopen(url, params).read()
print response
|
Add python folder with a rest-test-fileimport urllib
import urllib2
url = 'http://app.dev.inowas.com/api/results.json'
params = urllib.urlencode({
'id': '1',
'propertyType': 'gwHead',
'width': 4,
'height': 4,
'upperLeftX': 0.005,
'upperLeftY': 0.005,
'scaleX': 1,
'scaleY': 1,
'skewX': 0,
'skewY': 0,
'srid': 4326,
'bandPixelType': '\'32BF\'::text',
'bandInitValue': 200,
'bandNoDataVal': -9999,
'data': [[0,1,2,3],[0,1,2,3],[0,1,2,3],[0,1,2,3]],
'date': '2016-02-23 12:32:12'
})
response = urllib2.urlopen(url, params).read()
print response
|
<commit_before><commit_msg>Add python folder with a rest-test-file<commit_after>import urllib
import urllib2
url = 'http://app.dev.inowas.com/api/results.json'
params = urllib.urlencode({
'id': '1',
'propertyType': 'gwHead',
'width': 4,
'height': 4,
'upperLeftX': 0.005,
'upperLeftY': 0.005,
'scaleX': 1,
'scaleY': 1,
'skewX': 0,
'skewY': 0,
'srid': 4326,
'bandPixelType': '\'32BF\'::text',
'bandInitValue': 200,
'bandNoDataVal': -9999,
'data': [[0,1,2,3],[0,1,2,3],[0,1,2,3],[0,1,2,3]],
'date': '2016-02-23 12:32:12'
})
response = urllib2.urlopen(url, params).read()
print response
|
|
b16ed06e1b0c5fb1b189e1992e13726bda99fd68
|
examples/plot_pmt_time_slewing.py
|
examples/plot_pmt_time_slewing.py
|
# -*- coding: utf-8 -*-
"""
==================
PMT Time Slewing
==================
Show different variants of PMT time slewing calculations.
Variant 3 is currently (as of 2020-10-16) what's also used in Jpp.
"""
# Author: Tamas Gal <tgal@km3net.de>
# License: BSD-3
import km3pipe as kp
import numpy as np
import matplotlib.pyplot as plt
kp.style.use()
tots = np.arange(256)
slews = {variant: kp.calib.slew(tots, variant=variant) for variant in (1, 2, 3)}
fig, ax = plt.subplots()
for variant, slew in slews.items():
ax.plot(tots, slew, label=f"Variant {variant}")
ax.set_xlabel("ToT / ns")
ax.set_ylabel("time slewing / ns")
ax.legend()
fig.tight_layout()
plt.show()
|
Add PMT time slewing example
|
Add PMT time slewing example
|
Python
|
mit
|
tamasgal/km3pipe,tamasgal/km3pipe
|
Add PMT time slewing example
|
# -*- coding: utf-8 -*-
"""
==================
PMT Time Slewing
==================
Show different variants of PMT time slewing calculations.
Variant 3 is currently (as of 2020-10-16) what's also used in Jpp.
"""
# Author: Tamas Gal <tgal@km3net.de>
# License: BSD-3
import km3pipe as kp
import numpy as np
import matplotlib.pyplot as plt
kp.style.use()
tots = np.arange(256)
slews = {variant: kp.calib.slew(tots, variant=variant) for variant in (1, 2, 3)}
fig, ax = plt.subplots()
for variant, slew in slews.items():
ax.plot(tots, slew, label=f"Variant {variant}")
ax.set_xlabel("ToT / ns")
ax.set_ylabel("time slewing / ns")
ax.legend()
fig.tight_layout()
plt.show()
|
<commit_before><commit_msg>Add PMT time slewing example<commit_after>
|
# -*- coding: utf-8 -*-
"""
==================
PMT Time Slewing
==================
Show different variants of PMT time slewing calculations.
Variant 3 is currently (as of 2020-10-16) what's also used in Jpp.
"""
# Author: Tamas Gal <tgal@km3net.de>
# License: BSD-3
import km3pipe as kp
import numpy as np
import matplotlib.pyplot as plt
kp.style.use()
tots = np.arange(256)
slews = {variant: kp.calib.slew(tots, variant=variant) for variant in (1, 2, 3)}
fig, ax = plt.subplots()
for variant, slew in slews.items():
ax.plot(tots, slew, label=f"Variant {variant}")
ax.set_xlabel("ToT / ns")
ax.set_ylabel("time slewing / ns")
ax.legend()
fig.tight_layout()
plt.show()
|
Add PMT time slewing example# -*- coding: utf-8 -*-
"""
==================
PMT Time Slewing
==================
Show different variants of PMT time slewing calculations.
Variant 3 is currently (as of 2020-10-16) what's also used in Jpp.
"""
# Author: Tamas Gal <tgal@km3net.de>
# License: BSD-3
import km3pipe as kp
import numpy as np
import matplotlib.pyplot as plt
kp.style.use()
tots = np.arange(256)
slews = {variant: kp.calib.slew(tots, variant=variant) for variant in (1, 2, 3)}
fig, ax = plt.subplots()
for variant, slew in slews.items():
ax.plot(tots, slew, label=f"Variant {variant}")
ax.set_xlabel("ToT / ns")
ax.set_ylabel("time slewing / ns")
ax.legend()
fig.tight_layout()
plt.show()
|
<commit_before><commit_msg>Add PMT time slewing example<commit_after># -*- coding: utf-8 -*-
"""
==================
PMT Time Slewing
==================
Show different variants of PMT time slewing calculations.
Variant 3 is currently (as of 2020-10-16) what's also used in Jpp.
"""
# Author: Tamas Gal <tgal@km3net.de>
# License: BSD-3
import km3pipe as kp
import numpy as np
import matplotlib.pyplot as plt
kp.style.use()
tots = np.arange(256)
slews = {variant: kp.calib.slew(tots, variant=variant) for variant in (1, 2, 3)}
fig, ax = plt.subplots()
for variant, slew in slews.items():
ax.plot(tots, slew, label=f"Variant {variant}")
ax.set_xlabel("ToT / ns")
ax.set_ylabel("time slewing / ns")
ax.legend()
fig.tight_layout()
plt.show()
|
|
81c6d9664d08c72347ff8e1c7550ef53987dd8a0
|
olympiad/sum.py
|
olympiad/sum.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2014 Fabian M.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Read integers from the standard input (format: "1 2 3 4...\n") and print
# the sum of those integers.
if __name__ == "__main__":
print(sum(map(lambda x: int(x), input().split())))
|
Add solution for test problem
|
Add solution for test problem
|
Python
|
apache-2.0
|
fabianm/olympiad,fabianm/olympiad,fabianm/olympiad
|
Add solution for test problem
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2014 Fabian M.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Read integers from the standard input (format: "1 2 3 4...\n") and print
# the sum of those integers.
if __name__ == "__main__":
print(sum(map(lambda x: int(x), input().split())))
|
<commit_before><commit_msg>Add solution for test problem<commit_after>
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2014 Fabian M.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Read integers from the standard input (format: "1 2 3 4...\n") and print
# the sum of those integers.
if __name__ == "__main__":
print(sum(map(lambda x: int(x), input().split())))
|
Add solution for test problem#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2014 Fabian M.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Read integers from the standard input (format: "1 2 3 4...\n") and print
# the sum of those integers.
if __name__ == "__main__":
print(sum(map(lambda x: int(x), input().split())))
|
<commit_before><commit_msg>Add solution for test problem<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2014 Fabian M.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Read integers from the standard input (format: "1 2 3 4...\n") and print
# the sum of those integers.
if __name__ == "__main__":
print(sum(map(lambda x: int(x), input().split())))
|
|
fb2f00f0780ce447b985aedf5d13a36c4fcff4b1
|
dipy/core/tests/test_propagation.py
|
dipy/core/tests/test_propagation.py
|
import numpy as np
import dipy as dp
import nibabel as ni
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
def test_fact():
pass
|
TEST - new test file added for propagation
|
TEST - new test file added for propagation
|
Python
|
bsd-3-clause
|
rfdougherty/dipy,samuelstjean/dipy,jyeatman/dipy,beni55/dipy,StongeEtienne/dipy,oesteban/dipy,beni55/dipy,demianw/dipy,maurozucchelli/dipy,maurozucchelli/dipy,matthieudumont/dipy,FrancoisRheaultUS/dipy,Messaoud-Boudjada/dipy,maurozucchelli/dipy,JohnGriffiths/dipy,oesteban/dipy,StongeEtienne/dipy,samuelstjean/dipy,mdesco/dipy,samuelstjean/dipy,mdesco/dipy,villalonreina/dipy,JohnGriffiths/dipy,nilgoyyou/dipy,villalonreina/dipy,sinkpoint/dipy,matthieudumont/dipy,sinkpoint/dipy,Messaoud-Boudjada/dipy,demianw/dipy,nilgoyyou/dipy,FrancoisRheaultUS/dipy,rfdougherty/dipy,jyeatman/dipy
|
TEST - new test file added for propagation
|
import numpy as np
import dipy as dp
import nibabel as ni
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
def test_fact():
pass
|
<commit_before><commit_msg>TEST - new test file added for propagation<commit_after>
|
import numpy as np
import dipy as dp
import nibabel as ni
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
def test_fact():
pass
|
TEST - new test file added for propagationimport numpy as np
import dipy as dp
import nibabel as ni
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
def test_fact():
pass
|
<commit_before><commit_msg>TEST - new test file added for propagation<commit_after>import numpy as np
import dipy as dp
import nibabel as ni
from nose.tools import assert_true, assert_false, \
assert_equal, assert_raises
from numpy.testing import assert_array_equal, assert_array_almost_equal
def test_fact():
pass
|
|
0e6fc9de3aaeec9d82bc6087b2ad1d8ef17d3c67
|
contrib/inventory/lxc_inventory.py
|
contrib/inventory/lxc_inventory.py
|
#!/usr/bin/env python
#
# (c) 2015-16 Florian Haas, hastexo Professional Services GmbH
# <florian@hastexo.com>
# Based in part on:
# libvirt_lxc.py, (c) 2013, Michael Scherer <misc@zarb.org>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
Ansible inventory script for LXC containers. Requires Python
bindings for LXC API.
In LXC, containers can be grouped by setting the lxc.group option,
which may be found more than once in a container's
configuration. So, we enumerate all containers, fetch their list
of groups, and then build the dictionary in the way Ansible expects
it.
"""
from __future__ import print_function
import sys
import lxc
import json
def build_dict():
"""Returns a dictionary keyed to the defined LXC groups. All
containers, including the ones not in any group, are included in the
"all" group."""
# Enumerate all containers, and list the groups they are in. Also,
# implicitly add every container to the 'all' group.
containers = dict([(c,
['all'] +
(lxc.Container(c).get_config_item('lxc.group') or []))
for c in lxc.list_containers()])
# Extract the groups, flatten the list, and remove duplicates
groups = set(sum([g for g in containers.values()], []))
# Create a dictionary for each group (including the 'all' group
return dict([(g, {'hosts': [k for k, v in containers.items() if g in v],
'vars': {'ansible_connection':'lxc'}}) for g in groups])
def main(argv):
"""Returns a JSON dictionary as expected by Ansible"""
result = build_dict()
if len(argv) == 2 and argv[1] == '--list':
json.dump(result, sys.stdout)
elif len(argv) == 3 and argv[1] == '--host':
json.dump({'ansible_connection': 'lxc'}, sys.stdout)
else:
print("Need an argument, either --list or --host <host>", file=sys.stderr)
if __name__ == '__main__':
main(sys.argv)
|
Add inventory script for LXC containers
|
Add inventory script for LXC containers
Enables an LXC server's configuration as an inventory source for LXC
containers.
In LXC, containers can be defined with an "lxc.group" configuration
option that is normally used with lxc-autostart -g. Here, we are using
the same option to build Ansible inventory groups.
In addition to being grouped according to their lxc.group entry (or
entries, as LXC allows a single container to be in multiple groups),
we also add all containers (including those with no lxc.group entry)
to the "all" group.
|
Python
|
mit
|
thaim/ansible,thaim/ansible
|
Add inventory script for LXC containers
Enables an LXC server's configuration as an inventory source for LXC
containers.
In LXC, containers can be defined with an "lxc.group" configuration
option that is normally used with lxc-autostart -g. Here, we are using
the same option to build Ansible inventory groups.
In addition to being grouped according to their lxc.group entry (or
entries, as LXC allows a single container to be in multiple groups),
we also add all containers (including those with no lxc.group entry)
to the "all" group.
|
#!/usr/bin/env python
#
# (c) 2015-16 Florian Haas, hastexo Professional Services GmbH
# <florian@hastexo.com>
# Based in part on:
# libvirt_lxc.py, (c) 2013, Michael Scherer <misc@zarb.org>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
Ansible inventory script for LXC containers. Requires Python
bindings for LXC API.
In LXC, containers can be grouped by setting the lxc.group option,
which may be found more than once in a container's
configuration. So, we enumerate all containers, fetch their list
of groups, and then build the dictionary in the way Ansible expects
it.
"""
from __future__ import print_function
import sys
import lxc
import json
def build_dict():
"""Returns a dictionary keyed to the defined LXC groups. All
containers, including the ones not in any group, are included in the
"all" group."""
# Enumerate all containers, and list the groups they are in. Also,
# implicitly add every container to the 'all' group.
containers = dict([(c,
['all'] +
(lxc.Container(c).get_config_item('lxc.group') or []))
for c in lxc.list_containers()])
# Extract the groups, flatten the list, and remove duplicates
groups = set(sum([g for g in containers.values()], []))
# Create a dictionary for each group (including the 'all' group
return dict([(g, {'hosts': [k for k, v in containers.items() if g in v],
'vars': {'ansible_connection':'lxc'}}) for g in groups])
def main(argv):
"""Returns a JSON dictionary as expected by Ansible"""
result = build_dict()
if len(argv) == 2 and argv[1] == '--list':
json.dump(result, sys.stdout)
elif len(argv) == 3 and argv[1] == '--host':
json.dump({'ansible_connection': 'lxc'}, sys.stdout)
else:
print("Need an argument, either --list or --host <host>", file=sys.stderr)
if __name__ == '__main__':
main(sys.argv)
|
<commit_before><commit_msg>Add inventory script for LXC containers
Enables an LXC server's configuration as an inventory source for LXC
containers.
In LXC, containers can be defined with an "lxc.group" configuration
option that is normally used with lxc-autostart -g. Here, we are using
the same option to build Ansible inventory groups.
In addition to being grouped according to their lxc.group entry (or
entries, as LXC allows a single container to be in multiple groups),
we also add all containers (including those with no lxc.group entry)
to the "all" group.<commit_after>
|
#!/usr/bin/env python
#
# (c) 2015-16 Florian Haas, hastexo Professional Services GmbH
# <florian@hastexo.com>
# Based in part on:
# libvirt_lxc.py, (c) 2013, Michael Scherer <misc@zarb.org>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
Ansible inventory script for LXC containers. Requires Python
bindings for LXC API.
In LXC, containers can be grouped by setting the lxc.group option,
which may be found more than once in a container's
configuration. So, we enumerate all containers, fetch their list
of groups, and then build the dictionary in the way Ansible expects
it.
"""
from __future__ import print_function
import sys
import lxc
import json
def build_dict():
"""Returns a dictionary keyed to the defined LXC groups. All
containers, including the ones not in any group, are included in the
"all" group."""
# Enumerate all containers, and list the groups they are in. Also,
# implicitly add every container to the 'all' group.
containers = dict([(c,
['all'] +
(lxc.Container(c).get_config_item('lxc.group') or []))
for c in lxc.list_containers()])
# Extract the groups, flatten the list, and remove duplicates
groups = set(sum([g for g in containers.values()], []))
# Create a dictionary for each group (including the 'all' group
return dict([(g, {'hosts': [k for k, v in containers.items() if g in v],
'vars': {'ansible_connection':'lxc'}}) for g in groups])
def main(argv):
"""Returns a JSON dictionary as expected by Ansible"""
result = build_dict()
if len(argv) == 2 and argv[1] == '--list':
json.dump(result, sys.stdout)
elif len(argv) == 3 and argv[1] == '--host':
json.dump({'ansible_connection': 'lxc'}, sys.stdout)
else:
print("Need an argument, either --list or --host <host>", file=sys.stderr)
if __name__ == '__main__':
main(sys.argv)
|
Add inventory script for LXC containers
Enables an LXC server's configuration as an inventory source for LXC
containers.
In LXC, containers can be defined with an "lxc.group" configuration
option that is normally used with lxc-autostart -g. Here, we are using
the same option to build Ansible inventory groups.
In addition to being grouped according to their lxc.group entry (or
entries, as LXC allows a single container to be in multiple groups),
we also add all containers (including those with no lxc.group entry)
to the "all" group.#!/usr/bin/env python
#
# (c) 2015-16 Florian Haas, hastexo Professional Services GmbH
# <florian@hastexo.com>
# Based in part on:
# libvirt_lxc.py, (c) 2013, Michael Scherer <misc@zarb.org>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
Ansible inventory script for LXC containers. Requires Python
bindings for LXC API.
In LXC, containers can be grouped by setting the lxc.group option,
which may be found more than once in a container's
configuration. So, we enumerate all containers, fetch their list
of groups, and then build the dictionary in the way Ansible expects
it.
"""
from __future__ import print_function
import sys
import lxc
import json
def build_dict():
"""Returns a dictionary keyed to the defined LXC groups. All
containers, including the ones not in any group, are included in the
"all" group."""
# Enumerate all containers, and list the groups they are in. Also,
# implicitly add every container to the 'all' group.
containers = dict([(c,
['all'] +
(lxc.Container(c).get_config_item('lxc.group') or []))
for c in lxc.list_containers()])
# Extract the groups, flatten the list, and remove duplicates
groups = set(sum([g for g in containers.values()], []))
# Create a dictionary for each group (including the 'all' group
return dict([(g, {'hosts': [k for k, v in containers.items() if g in v],
'vars': {'ansible_connection':'lxc'}}) for g in groups])
def main(argv):
"""Returns a JSON dictionary as expected by Ansible"""
result = build_dict()
if len(argv) == 2 and argv[1] == '--list':
json.dump(result, sys.stdout)
elif len(argv) == 3 and argv[1] == '--host':
json.dump({'ansible_connection': 'lxc'}, sys.stdout)
else:
print("Need an argument, either --list or --host <host>", file=sys.stderr)
if __name__ == '__main__':
main(sys.argv)
|
<commit_before><commit_msg>Add inventory script for LXC containers
Enables an LXC server's configuration as an inventory source for LXC
containers.
In LXC, containers can be defined with an "lxc.group" configuration
option that is normally used with lxc-autostart -g. Here, we are using
the same option to build Ansible inventory groups.
In addition to being grouped according to their lxc.group entry (or
entries, as LXC allows a single container to be in multiple groups),
we also add all containers (including those with no lxc.group entry)
to the "all" group.<commit_after>#!/usr/bin/env python
#
# (c) 2015-16 Florian Haas, hastexo Professional Services GmbH
# <florian@hastexo.com>
# Based in part on:
# libvirt_lxc.py, (c) 2013, Michael Scherer <misc@zarb.org>
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""
Ansible inventory script for LXC containers. Requires Python
bindings for LXC API.
In LXC, containers can be grouped by setting the lxc.group option,
which may be found more than once in a container's
configuration. So, we enumerate all containers, fetch their list
of groups, and then build the dictionary in the way Ansible expects
it.
"""
from __future__ import print_function
import sys
import lxc
import json
def build_dict():
"""Returns a dictionary keyed to the defined LXC groups. All
containers, including the ones not in any group, are included in the
"all" group."""
# Enumerate all containers, and list the groups they are in. Also,
# implicitly add every container to the 'all' group.
containers = dict([(c,
['all'] +
(lxc.Container(c).get_config_item('lxc.group') or []))
for c in lxc.list_containers()])
# Extract the groups, flatten the list, and remove duplicates
groups = set(sum([g for g in containers.values()], []))
# Create a dictionary for each group (including the 'all' group
return dict([(g, {'hosts': [k for k, v in containers.items() if g in v],
'vars': {'ansible_connection':'lxc'}}) for g in groups])
def main(argv):
"""Returns a JSON dictionary as expected by Ansible"""
result = build_dict()
if len(argv) == 2 and argv[1] == '--list':
json.dump(result, sys.stdout)
elif len(argv) == 3 and argv[1] == '--host':
json.dump({'ansible_connection': 'lxc'}, sys.stdout)
else:
print("Need an argument, either --list or --host <host>", file=sys.stderr)
if __name__ == '__main__':
main(sys.argv)
|
|
b0648969f03dfa1cd55cf1f201883ec82afd97be
|
test/test_command_parser.py
|
test/test_command_parser.py
|
from string import ascii_letters
import pytest
from nex.codes import CatCode
from nex.instructions import Instructions
from nex.instructioner import (Instructioner,
make_unexpanded_control_sequence_instruction,
char_cat_instr_tok)
from nex.utils import ascii_characters
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from common import ITok
test_char_to_cat = {}
for c in ascii_characters:
test_char_to_cat[c] = CatCode.other
for c in ascii_letters:
test_char_to_cat[c] = CatCode.letter
test_char_to_cat.update({
'$': CatCode.escape,
' ': CatCode.space,
'[': CatCode.begin_group,
']': CatCode.end_group,
'\n': CatCode.end_of_line,
})
class DummyCodes:
def __init__(self, char_to_cat):
if char_to_cat is None:
self.char_to_cat = test_char_to_cat.copy()
else:
self.char_to_cat = char_to_cat
def get_cat_code(self, char):
return self.char_to_cat[char]
def get_lower_case_code(self, c):
return c.lower()
def get_upper_case_code(self, c):
return c.upper()
class DummyRouter:
def __init__(self, cs_map):
self.cs_map = cs_map
def lookup_control_sequence(self, name, *args, **kwargs):
canon_token = self.cs_map[name]
return canon_token.copy(*args, **kwargs)
class DummyParameters:
def __init__(self, param_map):
self.param_map = param_map
def get(self, name, *args, **kwargs):
return self.param_map[name]
class DummyState:
def __init__(self, char_to_cat, cs_map, param_map=None):
self.router = DummyRouter(cs_map)
self.parameters = DummyParameters(param_map)
self.codes = DummyCodes(char_to_cat)
def string_to_banisher(s, cs_map, char_to_cat=None, param_map=None):
state = DummyState(cs_map=cs_map,
param_map=param_map, char_to_cat=char_to_cat)
instrs = Instructioner.from_string(s, get_cat_code_func=state.codes.get_cat_code)
return Banisher(instrs, state, instrs.lexer.reader)
def test_h_rule():
cs_map = {
'hRule': ITok(Instructions.h_rule),
}
b = string_to_banisher('$hRule height 20pt width 10pt depth 30pt', cs_map)
command = command_parser.parse(b.advance_to_end())
print(command)
|
Add test for command parsing, without executing
|
Add test for command parsing, without executing
|
Python
|
mit
|
eddiejessup/nex
|
Add test for command parsing, without executing
|
from string import ascii_letters
import pytest
from nex.codes import CatCode
from nex.instructions import Instructions
from nex.instructioner import (Instructioner,
make_unexpanded_control_sequence_instruction,
char_cat_instr_tok)
from nex.utils import ascii_characters
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from common import ITok
test_char_to_cat = {}
for c in ascii_characters:
test_char_to_cat[c] = CatCode.other
for c in ascii_letters:
test_char_to_cat[c] = CatCode.letter
test_char_to_cat.update({
'$': CatCode.escape,
' ': CatCode.space,
'[': CatCode.begin_group,
']': CatCode.end_group,
'\n': CatCode.end_of_line,
})
class DummyCodes:
def __init__(self, char_to_cat):
if char_to_cat is None:
self.char_to_cat = test_char_to_cat.copy()
else:
self.char_to_cat = char_to_cat
def get_cat_code(self, char):
return self.char_to_cat[char]
def get_lower_case_code(self, c):
return c.lower()
def get_upper_case_code(self, c):
return c.upper()
class DummyRouter:
def __init__(self, cs_map):
self.cs_map = cs_map
def lookup_control_sequence(self, name, *args, **kwargs):
canon_token = self.cs_map[name]
return canon_token.copy(*args, **kwargs)
class DummyParameters:
def __init__(self, param_map):
self.param_map = param_map
def get(self, name, *args, **kwargs):
return self.param_map[name]
class DummyState:
def __init__(self, char_to_cat, cs_map, param_map=None):
self.router = DummyRouter(cs_map)
self.parameters = DummyParameters(param_map)
self.codes = DummyCodes(char_to_cat)
def string_to_banisher(s, cs_map, char_to_cat=None, param_map=None):
state = DummyState(cs_map=cs_map,
param_map=param_map, char_to_cat=char_to_cat)
instrs = Instructioner.from_string(s, get_cat_code_func=state.codes.get_cat_code)
return Banisher(instrs, state, instrs.lexer.reader)
def test_h_rule():
cs_map = {
'hRule': ITok(Instructions.h_rule),
}
b = string_to_banisher('$hRule height 20pt width 10pt depth 30pt', cs_map)
command = command_parser.parse(b.advance_to_end())
print(command)
|
<commit_before><commit_msg>Add test for command parsing, without executing<commit_after>
|
from string import ascii_letters
import pytest
from nex.codes import CatCode
from nex.instructions import Instructions
from nex.instructioner import (Instructioner,
make_unexpanded_control_sequence_instruction,
char_cat_instr_tok)
from nex.utils import ascii_characters
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from common import ITok
test_char_to_cat = {}
for c in ascii_characters:
test_char_to_cat[c] = CatCode.other
for c in ascii_letters:
test_char_to_cat[c] = CatCode.letter
test_char_to_cat.update({
'$': CatCode.escape,
' ': CatCode.space,
'[': CatCode.begin_group,
']': CatCode.end_group,
'\n': CatCode.end_of_line,
})
class DummyCodes:
def __init__(self, char_to_cat):
if char_to_cat is None:
self.char_to_cat = test_char_to_cat.copy()
else:
self.char_to_cat = char_to_cat
def get_cat_code(self, char):
return self.char_to_cat[char]
def get_lower_case_code(self, c):
return c.lower()
def get_upper_case_code(self, c):
return c.upper()
class DummyRouter:
def __init__(self, cs_map):
self.cs_map = cs_map
def lookup_control_sequence(self, name, *args, **kwargs):
canon_token = self.cs_map[name]
return canon_token.copy(*args, **kwargs)
class DummyParameters:
def __init__(self, param_map):
self.param_map = param_map
def get(self, name, *args, **kwargs):
return self.param_map[name]
class DummyState:
def __init__(self, char_to_cat, cs_map, param_map=None):
self.router = DummyRouter(cs_map)
self.parameters = DummyParameters(param_map)
self.codes = DummyCodes(char_to_cat)
def string_to_banisher(s, cs_map, char_to_cat=None, param_map=None):
state = DummyState(cs_map=cs_map,
param_map=param_map, char_to_cat=char_to_cat)
instrs = Instructioner.from_string(s, get_cat_code_func=state.codes.get_cat_code)
return Banisher(instrs, state, instrs.lexer.reader)
def test_h_rule():
cs_map = {
'hRule': ITok(Instructions.h_rule),
}
b = string_to_banisher('$hRule height 20pt width 10pt depth 30pt', cs_map)
command = command_parser.parse(b.advance_to_end())
print(command)
|
Add test for command parsing, without executingfrom string import ascii_letters
import pytest
from nex.codes import CatCode
from nex.instructions import Instructions
from nex.instructioner import (Instructioner,
make_unexpanded_control_sequence_instruction,
char_cat_instr_tok)
from nex.utils import ascii_characters
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from common import ITok
test_char_to_cat = {}
for c in ascii_characters:
test_char_to_cat[c] = CatCode.other
for c in ascii_letters:
test_char_to_cat[c] = CatCode.letter
test_char_to_cat.update({
'$': CatCode.escape,
' ': CatCode.space,
'[': CatCode.begin_group,
']': CatCode.end_group,
'\n': CatCode.end_of_line,
})
class DummyCodes:
def __init__(self, char_to_cat):
if char_to_cat is None:
self.char_to_cat = test_char_to_cat.copy()
else:
self.char_to_cat = char_to_cat
def get_cat_code(self, char):
return self.char_to_cat[char]
def get_lower_case_code(self, c):
return c.lower()
def get_upper_case_code(self, c):
return c.upper()
class DummyRouter:
def __init__(self, cs_map):
self.cs_map = cs_map
def lookup_control_sequence(self, name, *args, **kwargs):
canon_token = self.cs_map[name]
return canon_token.copy(*args, **kwargs)
class DummyParameters:
def __init__(self, param_map):
self.param_map = param_map
def get(self, name, *args, **kwargs):
return self.param_map[name]
class DummyState:
def __init__(self, char_to_cat, cs_map, param_map=None):
self.router = DummyRouter(cs_map)
self.parameters = DummyParameters(param_map)
self.codes = DummyCodes(char_to_cat)
def string_to_banisher(s, cs_map, char_to_cat=None, param_map=None):
state = DummyState(cs_map=cs_map,
param_map=param_map, char_to_cat=char_to_cat)
instrs = Instructioner.from_string(s, get_cat_code_func=state.codes.get_cat_code)
return Banisher(instrs, state, instrs.lexer.reader)
def test_h_rule():
cs_map = {
'hRule': ITok(Instructions.h_rule),
}
b = string_to_banisher('$hRule height 20pt width 10pt depth 30pt', cs_map)
command = command_parser.parse(b.advance_to_end())
print(command)
|
<commit_before><commit_msg>Add test for command parsing, without executing<commit_after>from string import ascii_letters
import pytest
from nex.codes import CatCode
from nex.instructions import Instructions
from nex.instructioner import (Instructioner,
make_unexpanded_control_sequence_instruction,
char_cat_instr_tok)
from nex.utils import ascii_characters
from nex.banisher import Banisher
from nex.parsing.command_parser import command_parser
from common import ITok
test_char_to_cat = {}
for c in ascii_characters:
test_char_to_cat[c] = CatCode.other
for c in ascii_letters:
test_char_to_cat[c] = CatCode.letter
test_char_to_cat.update({
'$': CatCode.escape,
' ': CatCode.space,
'[': CatCode.begin_group,
']': CatCode.end_group,
'\n': CatCode.end_of_line,
})
class DummyCodes:
def __init__(self, char_to_cat):
if char_to_cat is None:
self.char_to_cat = test_char_to_cat.copy()
else:
self.char_to_cat = char_to_cat
def get_cat_code(self, char):
return self.char_to_cat[char]
def get_lower_case_code(self, c):
return c.lower()
def get_upper_case_code(self, c):
return c.upper()
class DummyRouter:
def __init__(self, cs_map):
self.cs_map = cs_map
def lookup_control_sequence(self, name, *args, **kwargs):
canon_token = self.cs_map[name]
return canon_token.copy(*args, **kwargs)
class DummyParameters:
def __init__(self, param_map):
self.param_map = param_map
def get(self, name, *args, **kwargs):
return self.param_map[name]
class DummyState:
def __init__(self, char_to_cat, cs_map, param_map=None):
self.router = DummyRouter(cs_map)
self.parameters = DummyParameters(param_map)
self.codes = DummyCodes(char_to_cat)
def string_to_banisher(s, cs_map, char_to_cat=None, param_map=None):
state = DummyState(cs_map=cs_map,
param_map=param_map, char_to_cat=char_to_cat)
instrs = Instructioner.from_string(s, get_cat_code_func=state.codes.get_cat_code)
return Banisher(instrs, state, instrs.lexer.reader)
def test_h_rule():
cs_map = {
'hRule': ITok(Instructions.h_rule),
}
b = string_to_banisher('$hRule height 20pt width 10pt depth 30pt', cs_map)
command = command_parser.parse(b.advance_to_end())
print(command)
|
|
5c66a9d8c7b53338d37ef9e959e55e14511f7c84
|
packages/Python/lldbsuite/test/commands/gui/invalid-args/TestInvalidArgsGui.py
|
packages/Python/lldbsuite/test/commands/gui/invalid-args/TestInvalidArgsGui.py
|
import lldb
from lldbsuite.test.lldbtest import *
from lldbsuite.test.decorators import *
class GuiTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
@no_debug_info_test
def test_reproducer_generate_invalid_invocation(self):
self.expect("gui blub", error=True,
substrs=["the gui command takes no arguments."])
|
Add test for invalid gui command
|
[lldb][NFC] Add test for invalid gui command
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@370647 91177308-0d34-0410-b5e6-96231b3b80d8
|
Python
|
apache-2.0
|
llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,apple/swift-lldb,llvm-mirror/lldb,apple/swift-lldb,llvm-mirror/lldb
|
[lldb][NFC] Add test for invalid gui command
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@370647 91177308-0d34-0410-b5e6-96231b3b80d8
|
import lldb
from lldbsuite.test.lldbtest import *
from lldbsuite.test.decorators import *
class GuiTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
@no_debug_info_test
def test_reproducer_generate_invalid_invocation(self):
self.expect("gui blub", error=True,
substrs=["the gui command takes no arguments."])
|
<commit_before><commit_msg>[lldb][NFC] Add test for invalid gui command
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@370647 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>
|
import lldb
from lldbsuite.test.lldbtest import *
from lldbsuite.test.decorators import *
class GuiTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
@no_debug_info_test
def test_reproducer_generate_invalid_invocation(self):
self.expect("gui blub", error=True,
substrs=["the gui command takes no arguments."])
|
[lldb][NFC] Add test for invalid gui command
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@370647 91177308-0d34-0410-b5e6-96231b3b80d8import lldb
from lldbsuite.test.lldbtest import *
from lldbsuite.test.decorators import *
class GuiTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
@no_debug_info_test
def test_reproducer_generate_invalid_invocation(self):
self.expect("gui blub", error=True,
substrs=["the gui command takes no arguments."])
|
<commit_before><commit_msg>[lldb][NFC] Add test for invalid gui command
git-svn-id: 4c4cc70b1ef44ba2b7963015e681894188cea27e@370647 91177308-0d34-0410-b5e6-96231b3b80d8<commit_after>import lldb
from lldbsuite.test.lldbtest import *
from lldbsuite.test.decorators import *
class GuiTestCase(TestBase):
mydir = TestBase.compute_mydir(__file__)
def setUp(self):
TestBase.setUp(self)
@no_debug_info_test
def test_reproducer_generate_invalid_invocation(self):
self.expect("gui blub", error=True,
substrs=["the gui command takes no arguments."])
|
|
5056586586becb94bba265bdd90e46f2e2366534
|
factory/checkFactory.py
|
factory/checkFactory.py
|
#!/bin/env python
#
# Description:
# Check if a glideinFactory is running
#
# Arguments:
# $1 = glidein submit_dir (i.e. factory dir)
#
# Author:
# Igor Sfiligoi Jul 9th 2008
#
import sys
import glideFactoryPidLib
try:
startup_dir=sys.argv[1]
factory_pid=glideFactoryPidLib.get_gfactory_pid(startup_dir)
except:
print "Not running"
sys.exit(1)
print "Running"
sys.exit(0)
|
Check if a glideinFactory is running
|
Check if a glideinFactory is running
|
Python
|
bsd-3-clause
|
holzman/glideinwms-old,bbockelm/glideinWMS,bbockelm/glideinWMS,bbockelm/glideinWMS,holzman/glideinwms-old,holzman/glideinwms-old,bbockelm/glideinWMS
|
Check if a glideinFactory is running
|
#!/bin/env python
#
# Description:
# Check if a glideinFactory is running
#
# Arguments:
# $1 = glidein submit_dir (i.e. factory dir)
#
# Author:
# Igor Sfiligoi Jul 9th 2008
#
import sys
import glideFactoryPidLib
try:
startup_dir=sys.argv[1]
factory_pid=glideFactoryPidLib.get_gfactory_pid(startup_dir)
except:
print "Not running"
sys.exit(1)
print "Running"
sys.exit(0)
|
<commit_before><commit_msg>Check if a glideinFactory is running<commit_after>
|
#!/bin/env python
#
# Description:
# Check if a glideinFactory is running
#
# Arguments:
# $1 = glidein submit_dir (i.e. factory dir)
#
# Author:
# Igor Sfiligoi Jul 9th 2008
#
import sys
import glideFactoryPidLib
try:
startup_dir=sys.argv[1]
factory_pid=glideFactoryPidLib.get_gfactory_pid(startup_dir)
except:
print "Not running"
sys.exit(1)
print "Running"
sys.exit(0)
|
Check if a glideinFactory is running#!/bin/env python
#
# Description:
# Check if a glideinFactory is running
#
# Arguments:
# $1 = glidein submit_dir (i.e. factory dir)
#
# Author:
# Igor Sfiligoi Jul 9th 2008
#
import sys
import glideFactoryPidLib
try:
startup_dir=sys.argv[1]
factory_pid=glideFactoryPidLib.get_gfactory_pid(startup_dir)
except:
print "Not running"
sys.exit(1)
print "Running"
sys.exit(0)
|
<commit_before><commit_msg>Check if a glideinFactory is running<commit_after>#!/bin/env python
#
# Description:
# Check if a glideinFactory is running
#
# Arguments:
# $1 = glidein submit_dir (i.e. factory dir)
#
# Author:
# Igor Sfiligoi Jul 9th 2008
#
import sys
import glideFactoryPidLib
try:
startup_dir=sys.argv[1]
factory_pid=glideFactoryPidLib.get_gfactory_pid(startup_dir)
except:
print "Not running"
sys.exit(1)
print "Running"
sys.exit(0)
|
|
e7cf50d3f1b751e695f6d7bd1ca1bbe4a4e067cc
|
scripts/odom_trace.py
|
scripts/odom_trace.py
|
#!/usr/bin/env python
'''
Copyright 2016 William Baskin
/*****************************************
LICENSE SUMMARY
This package is licensed under the
MIT License. Please see the LICENSE.md
file in the root folder for the
complete license.
*****************************************/
Odom Trace
Records all published odom/ground truth poses and performs a rolling publish of
the recieved positions to /replay.
Interface:
msg/replay - publishes the past Odometry positions
'''
import rospy
from nav_msgs.msg import Odometry
class OdomTrace(object):
def __init__(self):
rospy.init_node('odom_trace')
rospy.loginfo('Trace Running.')
self.rcvd = []
self.index = 0
self.odom = rospy.Subscriber('/odom', Odometry, self.process_position)
self.replay = rospy.Publisher('/replay', Odometry, queue_size=1)
sleep_rate = rospy.Rate(50)
while not rospy.is_shutdown():
for position in self.rcvd:
self.replay.publish(position)
sleep_rate.sleep()
sleep_rate.sleep()
def process_position(self, msg):
self.rcvd.append(msg)
if __name__ == '__main__':
ot = OdomTrace()
|
Create a ros node for storing, replaying a robot's position
|
Create a ros node for storing, replaying a robot's position
|
Python
|
mit
|
buckbaskin/drive_stack,buckbaskin/drive_stack,buckbaskin/drive_stack
|
Create a ros node for storing, replaying a robot's position
|
#!/usr/bin/env python
'''
Copyright 2016 William Baskin
/*****************************************
LICENSE SUMMARY
This package is licensed under the
MIT License. Please see the LICENSE.md
file in the root folder for the
complete license.
*****************************************/
Odom Trace
Records all published odom/ground truth poses and performs a rolling publish of
the recieved positions to /replay.
Interface:
msg/replay - publishes the past Odometry positions
'''
import rospy
from nav_msgs.msg import Odometry
class OdomTrace(object):
def __init__(self):
rospy.init_node('odom_trace')
rospy.loginfo('Trace Running.')
self.rcvd = []
self.index = 0
self.odom = rospy.Subscriber('/odom', Odometry, self.process_position)
self.replay = rospy.Publisher('/replay', Odometry, queue_size=1)
sleep_rate = rospy.Rate(50)
while not rospy.is_shutdown():
for position in self.rcvd:
self.replay.publish(position)
sleep_rate.sleep()
sleep_rate.sleep()
def process_position(self, msg):
self.rcvd.append(msg)
if __name__ == '__main__':
ot = OdomTrace()
|
<commit_before><commit_msg>Create a ros node for storing, replaying a robot's position<commit_after>
|
#!/usr/bin/env python
'''
Copyright 2016 William Baskin
/*****************************************
LICENSE SUMMARY
This package is licensed under the
MIT License. Please see the LICENSE.md
file in the root folder for the
complete license.
*****************************************/
Odom Trace
Records all published odom/ground truth poses and performs a rolling publish of
the recieved positions to /replay.
Interface:
msg/replay - publishes the past Odometry positions
'''
import rospy
from nav_msgs.msg import Odometry
class OdomTrace(object):
def __init__(self):
rospy.init_node('odom_trace')
rospy.loginfo('Trace Running.')
self.rcvd = []
self.index = 0
self.odom = rospy.Subscriber('/odom', Odometry, self.process_position)
self.replay = rospy.Publisher('/replay', Odometry, queue_size=1)
sleep_rate = rospy.Rate(50)
while not rospy.is_shutdown():
for position in self.rcvd:
self.replay.publish(position)
sleep_rate.sleep()
sleep_rate.sleep()
def process_position(self, msg):
self.rcvd.append(msg)
if __name__ == '__main__':
ot = OdomTrace()
|
Create a ros node for storing, replaying a robot's position#!/usr/bin/env python
'''
Copyright 2016 William Baskin
/*****************************************
LICENSE SUMMARY
This package is licensed under the
MIT License. Please see the LICENSE.md
file in the root folder for the
complete license.
*****************************************/
Odom Trace
Records all published odom/ground truth poses and performs a rolling publish of
the recieved positions to /replay.
Interface:
msg/replay - publishes the past Odometry positions
'''
import rospy
from nav_msgs.msg import Odometry
class OdomTrace(object):
def __init__(self):
rospy.init_node('odom_trace')
rospy.loginfo('Trace Running.')
self.rcvd = []
self.index = 0
self.odom = rospy.Subscriber('/odom', Odometry, self.process_position)
self.replay = rospy.Publisher('/replay', Odometry, queue_size=1)
sleep_rate = rospy.Rate(50)
while not rospy.is_shutdown():
for position in self.rcvd:
self.replay.publish(position)
sleep_rate.sleep()
sleep_rate.sleep()
def process_position(self, msg):
self.rcvd.append(msg)
if __name__ == '__main__':
ot = OdomTrace()
|
<commit_before><commit_msg>Create a ros node for storing, replaying a robot's position<commit_after>#!/usr/bin/env python
'''
Copyright 2016 William Baskin
/*****************************************
LICENSE SUMMARY
This package is licensed under the
MIT License. Please see the LICENSE.md
file in the root folder for the
complete license.
*****************************************/
Odom Trace
Records all published odom/ground truth poses and performs a rolling publish of
the recieved positions to /replay.
Interface:
msg/replay - publishes the past Odometry positions
'''
import rospy
from nav_msgs.msg import Odometry
class OdomTrace(object):
def __init__(self):
rospy.init_node('odom_trace')
rospy.loginfo('Trace Running.')
self.rcvd = []
self.index = 0
self.odom = rospy.Subscriber('/odom', Odometry, self.process_position)
self.replay = rospy.Publisher('/replay', Odometry, queue_size=1)
sleep_rate = rospy.Rate(50)
while not rospy.is_shutdown():
for position in self.rcvd:
self.replay.publish(position)
sleep_rate.sleep()
sleep_rate.sleep()
def process_position(self, msg):
self.rcvd.append(msg)
if __name__ == '__main__':
ot = OdomTrace()
|
|
7bdc7e6002d837f7362f58f13e53e504e9de77e0
|
examples/python/download_package.py
|
examples/python/download_package.py
|
#!/usr/bin/env python
"""
librepo - download a package
"""
import os
import sys
import shutil
from pprint import pprint
import librepo
DESTDIR = "downloaded_metadata"
PROGRESSBAR_LEN = 40
finished = False
def callback(data, total_to_download, downloaded):
"""Progress callback"""
global finished
if total_to_download != downloaded:
finished = False
if total_to_download <= 0 or finished == True:
return
completed = int(downloaded / (total_to_download / PROGRESSBAR_LEN))
print "%30s: [%s%s] %8s/%8s\r" % (data, '#'*completed, '-'*(PROGRESSBAR_LEN-completed), int(downloaded), int(total_to_download)),
sys.stdout.flush()
if total_to_download == downloaded and not finished:
print
finished = True
return
if __name__ == "__main__":
pkgs = [
("ImageMagick-djvu", "Packages/i/ImageMagick-djvu-6.7.5.6-3.fc17.i686.rpm"),
("i2c-tools-eepromer", "Packages/i/i2c-tools-eepromer-3.1.0-1.fc17.i686.rpm")
]
h = librepo.Handle()
h.setopt(librepo.LRO_URL, "http://ftp.linux.ncsu.edu/pub/fedora/linux/releases/17/Everything/i386/os/")
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_PROGRESSCB, callback)
h.setopt(librepo.LRO_PROGRESSDATA, "")
for pkg_name, pkg_url in pkgs:
h.progressdata = pkg_name
h.download(pkg_url)
|
Add example of downloading a package
|
example: Add example of downloading a package
|
Python
|
lgpl-2.1
|
cgwalters/librepo,Conan-Kudo/librepo,Tojaj/librepo,cgwalters/librepo,Conan-Kudo/librepo,rpm-software-management/librepo,rpm-software-management/librepo,rpm-software-management/librepo,bgamari/librepo,Conan-Kudo/librepo,rholy/librepo,Tojaj/librepo,bgamari/librepo,rholy/librepo,rholy/librepo,bgamari/librepo,Tojaj/librepo,rholy/librepo,cgwalters/librepo,cgwalters/librepo
|
example: Add example of downloading a package
|
#!/usr/bin/env python
"""
librepo - download a package
"""
import os
import sys
import shutil
from pprint import pprint
import librepo
DESTDIR = "downloaded_metadata"
PROGRESSBAR_LEN = 40
finished = False
def callback(data, total_to_download, downloaded):
"""Progress callback"""
global finished
if total_to_download != downloaded:
finished = False
if total_to_download <= 0 or finished == True:
return
completed = int(downloaded / (total_to_download / PROGRESSBAR_LEN))
print "%30s: [%s%s] %8s/%8s\r" % (data, '#'*completed, '-'*(PROGRESSBAR_LEN-completed), int(downloaded), int(total_to_download)),
sys.stdout.flush()
if total_to_download == downloaded and not finished:
print
finished = True
return
if __name__ == "__main__":
pkgs = [
("ImageMagick-djvu", "Packages/i/ImageMagick-djvu-6.7.5.6-3.fc17.i686.rpm"),
("i2c-tools-eepromer", "Packages/i/i2c-tools-eepromer-3.1.0-1.fc17.i686.rpm")
]
h = librepo.Handle()
h.setopt(librepo.LRO_URL, "http://ftp.linux.ncsu.edu/pub/fedora/linux/releases/17/Everything/i386/os/")
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_PROGRESSCB, callback)
h.setopt(librepo.LRO_PROGRESSDATA, "")
for pkg_name, pkg_url in pkgs:
h.progressdata = pkg_name
h.download(pkg_url)
|
<commit_before><commit_msg>example: Add example of downloading a package<commit_after>
|
#!/usr/bin/env python
"""
librepo - download a package
"""
import os
import sys
import shutil
from pprint import pprint
import librepo
DESTDIR = "downloaded_metadata"
PROGRESSBAR_LEN = 40
finished = False
def callback(data, total_to_download, downloaded):
"""Progress callback"""
global finished
if total_to_download != downloaded:
finished = False
if total_to_download <= 0 or finished == True:
return
completed = int(downloaded / (total_to_download / PROGRESSBAR_LEN))
print "%30s: [%s%s] %8s/%8s\r" % (data, '#'*completed, '-'*(PROGRESSBAR_LEN-completed), int(downloaded), int(total_to_download)),
sys.stdout.flush()
if total_to_download == downloaded and not finished:
print
finished = True
return
if __name__ == "__main__":
pkgs = [
("ImageMagick-djvu", "Packages/i/ImageMagick-djvu-6.7.5.6-3.fc17.i686.rpm"),
("i2c-tools-eepromer", "Packages/i/i2c-tools-eepromer-3.1.0-1.fc17.i686.rpm")
]
h = librepo.Handle()
h.setopt(librepo.LRO_URL, "http://ftp.linux.ncsu.edu/pub/fedora/linux/releases/17/Everything/i386/os/")
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_PROGRESSCB, callback)
h.setopt(librepo.LRO_PROGRESSDATA, "")
for pkg_name, pkg_url in pkgs:
h.progressdata = pkg_name
h.download(pkg_url)
|
example: Add example of downloading a package#!/usr/bin/env python
"""
librepo - download a package
"""
import os
import sys
import shutil
from pprint import pprint
import librepo
DESTDIR = "downloaded_metadata"
PROGRESSBAR_LEN = 40
finished = False
def callback(data, total_to_download, downloaded):
"""Progress callback"""
global finished
if total_to_download != downloaded:
finished = False
if total_to_download <= 0 or finished == True:
return
completed = int(downloaded / (total_to_download / PROGRESSBAR_LEN))
print "%30s: [%s%s] %8s/%8s\r" % (data, '#'*completed, '-'*(PROGRESSBAR_LEN-completed), int(downloaded), int(total_to_download)),
sys.stdout.flush()
if total_to_download == downloaded and not finished:
print
finished = True
return
if __name__ == "__main__":
pkgs = [
("ImageMagick-djvu", "Packages/i/ImageMagick-djvu-6.7.5.6-3.fc17.i686.rpm"),
("i2c-tools-eepromer", "Packages/i/i2c-tools-eepromer-3.1.0-1.fc17.i686.rpm")
]
h = librepo.Handle()
h.setopt(librepo.LRO_URL, "http://ftp.linux.ncsu.edu/pub/fedora/linux/releases/17/Everything/i386/os/")
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_PROGRESSCB, callback)
h.setopt(librepo.LRO_PROGRESSDATA, "")
for pkg_name, pkg_url in pkgs:
h.progressdata = pkg_name
h.download(pkg_url)
|
<commit_before><commit_msg>example: Add example of downloading a package<commit_after>#!/usr/bin/env python
"""
librepo - download a package
"""
import os
import sys
import shutil
from pprint import pprint
import librepo
DESTDIR = "downloaded_metadata"
PROGRESSBAR_LEN = 40
finished = False
def callback(data, total_to_download, downloaded):
"""Progress callback"""
global finished
if total_to_download != downloaded:
finished = False
if total_to_download <= 0 or finished == True:
return
completed = int(downloaded / (total_to_download / PROGRESSBAR_LEN))
print "%30s: [%s%s] %8s/%8s\r" % (data, '#'*completed, '-'*(PROGRESSBAR_LEN-completed), int(downloaded), int(total_to_download)),
sys.stdout.flush()
if total_to_download == downloaded and not finished:
print
finished = True
return
if __name__ == "__main__":
pkgs = [
("ImageMagick-djvu", "Packages/i/ImageMagick-djvu-6.7.5.6-3.fc17.i686.rpm"),
("i2c-tools-eepromer", "Packages/i/i2c-tools-eepromer-3.1.0-1.fc17.i686.rpm")
]
h = librepo.Handle()
h.setopt(librepo.LRO_URL, "http://ftp.linux.ncsu.edu/pub/fedora/linux/releases/17/Everything/i386/os/")
h.setopt(librepo.LRO_REPOTYPE, librepo.LR_YUMREPO)
h.setopt(librepo.LRO_PROGRESSCB, callback)
h.setopt(librepo.LRO_PROGRESSDATA, "")
for pkg_name, pkg_url in pkgs:
h.progressdata = pkg_name
h.download(pkg_url)
|
|
1d50c5f10faeb17f8c7a1b7290c7d80f52220ad9
|
src/ggrc/migrations/versions/20161020125620_1db61b597d2d_remove_bad_ca_names.py
|
src/ggrc/migrations/versions/20161020125620_1db61b597d2d_remove_bad_ca_names.py
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Remove bad CA names
Create Date: 2016-10-20 12:56:20.500665
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column, select
# revision identifiers, used by Alembic.
revision = "1db61b597d2d"
down_revision = "53206b20c12b"
MAX_TRIES = 100
CAD = table(
"custom_attribute_definitions",
column("id", sa.Integer),
column("title", sa.String),
column("definition_id", sa.Integer),
column("definition_type", sa.String),
)
def _update_at_cad_titles(old_titles, new_titles):
"""Update Assessment template CAD titles."""
for old, new in zip(old_titles, new_titles):
op.execute(
CAD.update()
.where(CAD.c.title == old)
.where(CAD.c.definition_type == "assessment_template")
.values(title=new)
)
def upgrade():
"""Upgrade database schema and/or data, creating a new revision.
The global CAD titles to be replaced can be found with the sql statement:
SELECT title
FROM custom_attribute_definitions
WHERE definition_type = "assessment_template"
AND title IN (
SELECT DISTINCT(title)
FROM custom_attribute_definitions
WHERE definition_type = "assessment"
AND definition_id IS NULL
)
"""
connection = op.get_bind()
assessment_global_titles = select([CAD.c.title])\
.where(CAD.c.definition_type == "assessment")\
.where(CAD.c.definition_id.is_(None))
bad_rows_sql = select([CAD.c.title])\
.where(CAD.c.definition_type == "assessment_template")\
.where(CAD.c.title.in_(assessment_global_titles))
bad_rows = connection.execute(bad_rows_sql).fetchall()
bad_titles = [row.title for row in bad_rows]
if not bad_titles:
return
for counter in range(1, MAX_TRIES):
new_titles = [u"{} ({})".format(title, counter) for title in bad_titles]
collisions = connection.execute(
CAD.select().where(CAD.c.title.in_(new_titles))
).fetchall()
if not collisions:
_update_at_cad_titles(bad_titles, new_titles)
break
def downgrade():
"""Ignore downgrade function.
Fixing title names can't be reversed so there is nothing to do here.
"""
|
Add migration for fixing bad CA names
|
Add migration for fixing bad CA names
This migration removes all assessment template custom attribute
definition names that match any of the assessment global custom
attributes.
Resolves: GGRC-26, GGRC-111
|
Python
|
apache-2.0
|
AleksNeStu/ggrc-core,selahssea/ggrc-core,j0gurt/ggrc-core,josthkko/ggrc-core,AleksNeStu/ggrc-core,AleksNeStu/ggrc-core,josthkko/ggrc-core,VinnieJohns/ggrc-core,selahssea/ggrc-core,selahssea/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,VinnieJohns/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core,plamut/ggrc-core,AleksNeStu/ggrc-core,selahssea/ggrc-core,plamut/ggrc-core,j0gurt/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,VinnieJohns/ggrc-core,andrei-karalionak/ggrc-core,plamut/ggrc-core,andrei-karalionak/ggrc-core,josthkko/ggrc-core,j0gurt/ggrc-core
|
Add migration for fixing bad CA names
This migration removes all assessment template custom attribute
definition names that match any of the assessment global custom
attributes.
Resolves: GGRC-26, GGRC-111
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Remove bad CA names
Create Date: 2016-10-20 12:56:20.500665
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column, select
# revision identifiers, used by Alembic.
revision = "1db61b597d2d"
down_revision = "53206b20c12b"
MAX_TRIES = 100
CAD = table(
"custom_attribute_definitions",
column("id", sa.Integer),
column("title", sa.String),
column("definition_id", sa.Integer),
column("definition_type", sa.String),
)
def _update_at_cad_titles(old_titles, new_titles):
"""Update Assessment template CAD titles."""
for old, new in zip(old_titles, new_titles):
op.execute(
CAD.update()
.where(CAD.c.title == old)
.where(CAD.c.definition_type == "assessment_template")
.values(title=new)
)
def upgrade():
"""Upgrade database schema and/or data, creating a new revision.
The global CAD titles to be replaced can be found with the sql statement:
SELECT title
FROM custom_attribute_definitions
WHERE definition_type = "assessment_template"
AND title IN (
SELECT DISTINCT(title)
FROM custom_attribute_definitions
WHERE definition_type = "assessment"
AND definition_id IS NULL
)
"""
connection = op.get_bind()
assessment_global_titles = select([CAD.c.title])\
.where(CAD.c.definition_type == "assessment")\
.where(CAD.c.definition_id.is_(None))
bad_rows_sql = select([CAD.c.title])\
.where(CAD.c.definition_type == "assessment_template")\
.where(CAD.c.title.in_(assessment_global_titles))
bad_rows = connection.execute(bad_rows_sql).fetchall()
bad_titles = [row.title for row in bad_rows]
if not bad_titles:
return
for counter in range(1, MAX_TRIES):
new_titles = [u"{} ({})".format(title, counter) for title in bad_titles]
collisions = connection.execute(
CAD.select().where(CAD.c.title.in_(new_titles))
).fetchall()
if not collisions:
_update_at_cad_titles(bad_titles, new_titles)
break
def downgrade():
"""Ignore downgrade function.
Fixing title names can't be reversed so there is nothing to do here.
"""
|
<commit_before><commit_msg>Add migration for fixing bad CA names
This migration removes all assessment template custom attribute
definition names that match any of the assessment global custom
attributes.
Resolves: GGRC-26, GGRC-111<commit_after>
|
# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Remove bad CA names
Create Date: 2016-10-20 12:56:20.500665
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column, select
# revision identifiers, used by Alembic.
revision = "1db61b597d2d"
down_revision = "53206b20c12b"
MAX_TRIES = 100
CAD = table(
"custom_attribute_definitions",
column("id", sa.Integer),
column("title", sa.String),
column("definition_id", sa.Integer),
column("definition_type", sa.String),
)
def _update_at_cad_titles(old_titles, new_titles):
"""Update Assessment template CAD titles."""
for old, new in zip(old_titles, new_titles):
op.execute(
CAD.update()
.where(CAD.c.title == old)
.where(CAD.c.definition_type == "assessment_template")
.values(title=new)
)
def upgrade():
"""Upgrade database schema and/or data, creating a new revision.
The global CAD titles to be replaced can be found with the sql statement:
SELECT title
FROM custom_attribute_definitions
WHERE definition_type = "assessment_template"
AND title IN (
SELECT DISTINCT(title)
FROM custom_attribute_definitions
WHERE definition_type = "assessment"
AND definition_id IS NULL
)
"""
connection = op.get_bind()
assessment_global_titles = select([CAD.c.title])\
.where(CAD.c.definition_type == "assessment")\
.where(CAD.c.definition_id.is_(None))
bad_rows_sql = select([CAD.c.title])\
.where(CAD.c.definition_type == "assessment_template")\
.where(CAD.c.title.in_(assessment_global_titles))
bad_rows = connection.execute(bad_rows_sql).fetchall()
bad_titles = [row.title for row in bad_rows]
if not bad_titles:
return
for counter in range(1, MAX_TRIES):
new_titles = [u"{} ({})".format(title, counter) for title in bad_titles]
collisions = connection.execute(
CAD.select().where(CAD.c.title.in_(new_titles))
).fetchall()
if not collisions:
_update_at_cad_titles(bad_titles, new_titles)
break
def downgrade():
"""Ignore downgrade function.
Fixing title names can't be reversed so there is nothing to do here.
"""
|
Add migration for fixing bad CA names
This migration removes all assessment template custom attribute
definition names that match any of the assessment global custom
attributes.
Resolves: GGRC-26, GGRC-111# Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Remove bad CA names
Create Date: 2016-10-20 12:56:20.500665
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column, select
# revision identifiers, used by Alembic.
revision = "1db61b597d2d"
down_revision = "53206b20c12b"
MAX_TRIES = 100
CAD = table(
"custom_attribute_definitions",
column("id", sa.Integer),
column("title", sa.String),
column("definition_id", sa.Integer),
column("definition_type", sa.String),
)
def _update_at_cad_titles(old_titles, new_titles):
"""Update Assessment template CAD titles."""
for old, new in zip(old_titles, new_titles):
op.execute(
CAD.update()
.where(CAD.c.title == old)
.where(CAD.c.definition_type == "assessment_template")
.values(title=new)
)
def upgrade():
"""Upgrade database schema and/or data, creating a new revision.
The global CAD titles to be replaced can be found with the sql statement:
SELECT title
FROM custom_attribute_definitions
WHERE definition_type = "assessment_template"
AND title IN (
SELECT DISTINCT(title)
FROM custom_attribute_definitions
WHERE definition_type = "assessment"
AND definition_id IS NULL
)
"""
connection = op.get_bind()
assessment_global_titles = select([CAD.c.title])\
.where(CAD.c.definition_type == "assessment")\
.where(CAD.c.definition_id.is_(None))
bad_rows_sql = select([CAD.c.title])\
.where(CAD.c.definition_type == "assessment_template")\
.where(CAD.c.title.in_(assessment_global_titles))
bad_rows = connection.execute(bad_rows_sql).fetchall()
bad_titles = [row.title for row in bad_rows]
if not bad_titles:
return
for counter in range(1, MAX_TRIES):
new_titles = [u"{} ({})".format(title, counter) for title in bad_titles]
collisions = connection.execute(
CAD.select().where(CAD.c.title.in_(new_titles))
).fetchall()
if not collisions:
_update_at_cad_titles(bad_titles, new_titles)
break
def downgrade():
"""Ignore downgrade function.
Fixing title names can't be reversed so there is nothing to do here.
"""
|
<commit_before><commit_msg>Add migration for fixing bad CA names
This migration removes all assessment template custom attribute
definition names that match any of the assessment global custom
attributes.
Resolves: GGRC-26, GGRC-111<commit_after># Copyright (C) 2016 Google Inc.
# Licensed under http://www.apache.org/licenses/LICENSE-2.0 <see LICENSE file>
"""
Remove bad CA names
Create Date: 2016-10-20 12:56:20.500665
"""
# disable Invalid constant name pylint warning for mandatory Alembic variables.
# pylint: disable=invalid-name
from alembic import op
import sqlalchemy as sa
from sqlalchemy.sql import table, column, select
# revision identifiers, used by Alembic.
revision = "1db61b597d2d"
down_revision = "53206b20c12b"
MAX_TRIES = 100
CAD = table(
"custom_attribute_definitions",
column("id", sa.Integer),
column("title", sa.String),
column("definition_id", sa.Integer),
column("definition_type", sa.String),
)
def _update_at_cad_titles(old_titles, new_titles):
"""Update Assessment template CAD titles."""
for old, new in zip(old_titles, new_titles):
op.execute(
CAD.update()
.where(CAD.c.title == old)
.where(CAD.c.definition_type == "assessment_template")
.values(title=new)
)
def upgrade():
"""Upgrade database schema and/or data, creating a new revision.
The global CAD titles to be replaced can be found with the sql statement:
SELECT title
FROM custom_attribute_definitions
WHERE definition_type = "assessment_template"
AND title IN (
SELECT DISTINCT(title)
FROM custom_attribute_definitions
WHERE definition_type = "assessment"
AND definition_id IS NULL
)
"""
connection = op.get_bind()
assessment_global_titles = select([CAD.c.title])\
.where(CAD.c.definition_type == "assessment")\
.where(CAD.c.definition_id.is_(None))
bad_rows_sql = select([CAD.c.title])\
.where(CAD.c.definition_type == "assessment_template")\
.where(CAD.c.title.in_(assessment_global_titles))
bad_rows = connection.execute(bad_rows_sql).fetchall()
bad_titles = [row.title for row in bad_rows]
if not bad_titles:
return
for counter in range(1, MAX_TRIES):
new_titles = [u"{} ({})".format(title, counter) for title in bad_titles]
collisions = connection.execute(
CAD.select().where(CAD.c.title.in_(new_titles))
).fetchall()
if not collisions:
_update_at_cad_titles(bad_titles, new_titles)
break
def downgrade():
"""Ignore downgrade function.
Fixing title names can't be reversed so there is nothing to do here.
"""
|
|
c618a65f95b3c0486dd62bba4b1255ad95bbe9d2
|
quran_tafseer/tests/test_views.py
|
quran_tafseer/tests/test_views.py
|
from django.test import TestCase
from django.urls import reverse
from model_mommy import mommy
class TestTafsserViews(TestCase):
def setUp(self):
self.sura = mommy.make('quran_text.sura', name='Al-Fateha', index=1)
self.ayah = mommy.make('quran_text.ayah', number=1, sura=self.sura,
text='بسم الله الرحمن الرحيم')
self.tafseer = mommy.make('quran_tafseer.Tafseer', name='simple')
self.tafseer_text = mommy.make('quran_tafseer.TafseerText',
ayah=self.ayah, tafseer=self.tafseer,
text='بسم الله الرحمن الرحيم')
def test_tafseer_view(self):
tafseer_url = reverse('tafseer-list')
response = self.client.get(tafseer_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode(), '[{"id":1,"name":"simple"}]')
def test_tafseer_text_view(self):
tafseer_text_url = reverse('ayah-tafseer', kwargs={'tafseer_id':1,
'sura_index':1,
'ayah_number':1})
response = self.client.get(tafseer_text_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode(), '{"tafseer_id":1,"tafseer_name":"simple",'
'"ayah_url":"/quran/1/1","ayah_number":1,'
'"text":"بسم الله الرحمن الرحيم"}')
|
Add tests for Tafseer Views
|
Add tests for Tafseer Views
|
Python
|
mit
|
EmadMokhtar/tafseer_api
|
Add tests for Tafseer Views
|
from django.test import TestCase
from django.urls import reverse
from model_mommy import mommy
class TestTafsserViews(TestCase):
def setUp(self):
self.sura = mommy.make('quran_text.sura', name='Al-Fateha', index=1)
self.ayah = mommy.make('quran_text.ayah', number=1, sura=self.sura,
text='بسم الله الرحمن الرحيم')
self.tafseer = mommy.make('quran_tafseer.Tafseer', name='simple')
self.tafseer_text = mommy.make('quran_tafseer.TafseerText',
ayah=self.ayah, tafseer=self.tafseer,
text='بسم الله الرحمن الرحيم')
def test_tafseer_view(self):
tafseer_url = reverse('tafseer-list')
response = self.client.get(tafseer_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode(), '[{"id":1,"name":"simple"}]')
def test_tafseer_text_view(self):
tafseer_text_url = reverse('ayah-tafseer', kwargs={'tafseer_id':1,
'sura_index':1,
'ayah_number':1})
response = self.client.get(tafseer_text_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode(), '{"tafseer_id":1,"tafseer_name":"simple",'
'"ayah_url":"/quran/1/1","ayah_number":1,'
'"text":"بسم الله الرحمن الرحيم"}')
|
<commit_before><commit_msg>Add tests for Tafseer Views<commit_after>
|
from django.test import TestCase
from django.urls import reverse
from model_mommy import mommy
class TestTafsserViews(TestCase):
def setUp(self):
self.sura = mommy.make('quran_text.sura', name='Al-Fateha', index=1)
self.ayah = mommy.make('quran_text.ayah', number=1, sura=self.sura,
text='بسم الله الرحمن الرحيم')
self.tafseer = mommy.make('quran_tafseer.Tafseer', name='simple')
self.tafseer_text = mommy.make('quran_tafseer.TafseerText',
ayah=self.ayah, tafseer=self.tafseer,
text='بسم الله الرحمن الرحيم')
def test_tafseer_view(self):
tafseer_url = reverse('tafseer-list')
response = self.client.get(tafseer_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode(), '[{"id":1,"name":"simple"}]')
def test_tafseer_text_view(self):
tafseer_text_url = reverse('ayah-tafseer', kwargs={'tafseer_id':1,
'sura_index':1,
'ayah_number':1})
response = self.client.get(tafseer_text_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode(), '{"tafseer_id":1,"tafseer_name":"simple",'
'"ayah_url":"/quran/1/1","ayah_number":1,'
'"text":"بسم الله الرحمن الرحيم"}')
|
Add tests for Tafseer Viewsfrom django.test import TestCase
from django.urls import reverse
from model_mommy import mommy
class TestTafsserViews(TestCase):
def setUp(self):
self.sura = mommy.make('quran_text.sura', name='Al-Fateha', index=1)
self.ayah = mommy.make('quran_text.ayah', number=1, sura=self.sura,
text='بسم الله الرحمن الرحيم')
self.tafseer = mommy.make('quran_tafseer.Tafseer', name='simple')
self.tafseer_text = mommy.make('quran_tafseer.TafseerText',
ayah=self.ayah, tafseer=self.tafseer,
text='بسم الله الرحمن الرحيم')
def test_tafseer_view(self):
tafseer_url = reverse('tafseer-list')
response = self.client.get(tafseer_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode(), '[{"id":1,"name":"simple"}]')
def test_tafseer_text_view(self):
tafseer_text_url = reverse('ayah-tafseer', kwargs={'tafseer_id':1,
'sura_index':1,
'ayah_number':1})
response = self.client.get(tafseer_text_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode(), '{"tafseer_id":1,"tafseer_name":"simple",'
'"ayah_url":"/quran/1/1","ayah_number":1,'
'"text":"بسم الله الرحمن الرحيم"}')
|
<commit_before><commit_msg>Add tests for Tafseer Views<commit_after>from django.test import TestCase
from django.urls import reverse
from model_mommy import mommy
class TestTafsserViews(TestCase):
def setUp(self):
self.sura = mommy.make('quran_text.sura', name='Al-Fateha', index=1)
self.ayah = mommy.make('quran_text.ayah', number=1, sura=self.sura,
text='بسم الله الرحمن الرحيم')
self.tafseer = mommy.make('quran_tafseer.Tafseer', name='simple')
self.tafseer_text = mommy.make('quran_tafseer.TafseerText',
ayah=self.ayah, tafseer=self.tafseer,
text='بسم الله الرحمن الرحيم')
def test_tafseer_view(self):
tafseer_url = reverse('tafseer-list')
response = self.client.get(tafseer_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode(), '[{"id":1,"name":"simple"}]')
def test_tafseer_text_view(self):
tafseer_text_url = reverse('ayah-tafseer', kwargs={'tafseer_id':1,
'sura_index':1,
'ayah_number':1})
response = self.client.get(tafseer_text_url)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content.decode(), '{"tafseer_id":1,"tafseer_name":"simple",'
'"ayah_url":"/quran/1/1","ayah_number":1,'
'"text":"بسم الله الرحمن الرحيم"}')
|
|
03b0e88c909a5e8f0eeb8116222f166891e45b28
|
scale_perf/vertica_queries.py
|
scale_perf/vertica_queries.py
|
import sys
import argparse
from subprocess import Popen, PIPE
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--vertica_password",
help="Vertica password for disk.sh and alarm_transitions.sh", default='password',
required=False)
return parser.parse_args()
def split_result(results):
result_dict = []
results = results.split("\n")
column_names = results.pop(0).split(",")
for result in results:
result_dict.append(zip(column_names, result.split(",")))
return result_dict
def run_query(vertica_base_query, vertica_query):
vertica_base_query.append(vertica_query)
query = Popen(vertica_base_query, stdout=PIPE, stderr=PIPE)
result, error_output = query.communicate()
return result
def parse_projection_stats(stats):
print stats
def parse_resource_rejections(rejections):
print rejections
def parse_resource_usage(usage):
print usage
def main():
args = parse_args()
# vertica_base_query =["/opt/vertica/bin/vsql", "-U", "dbadmin", "-w", args.vertica_password, "-A", "-F", ",", "-c"]
vertica_base_query =["/opt/vertica/bin/vsql", "-U", "dbadmin", "-w", args.vertica_password, "-c"]
parse_projection_stats(run_query(list(vertica_base_query), "select node_name, projection_name, projection_schema, "
"wos_used_bytes, ros_used_bytes, ros_count from "
"projection_storage"))
parse_resource_rejections(run_query(list(vertica_base_query), "select * from resource_rejections"))
parse_resource_usage(run_query(list(vertica_base_query), "select node_name, request_queue_depth, "
"active_thread_count, open_file_handle_count, "
"wos_used_bytes, ros_used_bytes, "
"resource_request_reject_count, "
"resource_request_timeout_count, "
"disk_space_request_reject_count, "
"failed_volume_reject_count from resource_usage"))
if __name__ == "__main__":
sys.exit(main())
|
Add script to query vertica stats
|
Add script to query vertica stats
|
Python
|
apache-2.0
|
hpcloud-mon/monasca-perf,hpcloud-mon/monasca-perf
|
Add script to query vertica stats
|
import sys
import argparse
from subprocess import Popen, PIPE
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--vertica_password",
help="Vertica password for disk.sh and alarm_transitions.sh", default='password',
required=False)
return parser.parse_args()
def split_result(results):
result_dict = []
results = results.split("\n")
column_names = results.pop(0).split(",")
for result in results:
result_dict.append(zip(column_names, result.split(",")))
return result_dict
def run_query(vertica_base_query, vertica_query):
vertica_base_query.append(vertica_query)
query = Popen(vertica_base_query, stdout=PIPE, stderr=PIPE)
result, error_output = query.communicate()
return result
def parse_projection_stats(stats):
print stats
def parse_resource_rejections(rejections):
print rejections
def parse_resource_usage(usage):
print usage
def main():
args = parse_args()
# vertica_base_query =["/opt/vertica/bin/vsql", "-U", "dbadmin", "-w", args.vertica_password, "-A", "-F", ",", "-c"]
vertica_base_query =["/opt/vertica/bin/vsql", "-U", "dbadmin", "-w", args.vertica_password, "-c"]
parse_projection_stats(run_query(list(vertica_base_query), "select node_name, projection_name, projection_schema, "
"wos_used_bytes, ros_used_bytes, ros_count from "
"projection_storage"))
parse_resource_rejections(run_query(list(vertica_base_query), "select * from resource_rejections"))
parse_resource_usage(run_query(list(vertica_base_query), "select node_name, request_queue_depth, "
"active_thread_count, open_file_handle_count, "
"wos_used_bytes, ros_used_bytes, "
"resource_request_reject_count, "
"resource_request_timeout_count, "
"disk_space_request_reject_count, "
"failed_volume_reject_count from resource_usage"))
if __name__ == "__main__":
sys.exit(main())
|
<commit_before><commit_msg>Add script to query vertica stats<commit_after>
|
import sys
import argparse
from subprocess import Popen, PIPE
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--vertica_password",
help="Vertica password for disk.sh and alarm_transitions.sh", default='password',
required=False)
return parser.parse_args()
def split_result(results):
result_dict = []
results = results.split("\n")
column_names = results.pop(0).split(",")
for result in results:
result_dict.append(zip(column_names, result.split(",")))
return result_dict
def run_query(vertica_base_query, vertica_query):
vertica_base_query.append(vertica_query)
query = Popen(vertica_base_query, stdout=PIPE, stderr=PIPE)
result, error_output = query.communicate()
return result
def parse_projection_stats(stats):
print stats
def parse_resource_rejections(rejections):
print rejections
def parse_resource_usage(usage):
print usage
def main():
args = parse_args()
# vertica_base_query =["/opt/vertica/bin/vsql", "-U", "dbadmin", "-w", args.vertica_password, "-A", "-F", ",", "-c"]
vertica_base_query =["/opt/vertica/bin/vsql", "-U", "dbadmin", "-w", args.vertica_password, "-c"]
parse_projection_stats(run_query(list(vertica_base_query), "select node_name, projection_name, projection_schema, "
"wos_used_bytes, ros_used_bytes, ros_count from "
"projection_storage"))
parse_resource_rejections(run_query(list(vertica_base_query), "select * from resource_rejections"))
parse_resource_usage(run_query(list(vertica_base_query), "select node_name, request_queue_depth, "
"active_thread_count, open_file_handle_count, "
"wos_used_bytes, ros_used_bytes, "
"resource_request_reject_count, "
"resource_request_timeout_count, "
"disk_space_request_reject_count, "
"failed_volume_reject_count from resource_usage"))
if __name__ == "__main__":
sys.exit(main())
|
Add script to query vertica statsimport sys
import argparse
from subprocess import Popen, PIPE
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--vertica_password",
help="Vertica password for disk.sh and alarm_transitions.sh", default='password',
required=False)
return parser.parse_args()
def split_result(results):
result_dict = []
results = results.split("\n")
column_names = results.pop(0).split(",")
for result in results:
result_dict.append(zip(column_names, result.split(",")))
return result_dict
def run_query(vertica_base_query, vertica_query):
vertica_base_query.append(vertica_query)
query = Popen(vertica_base_query, stdout=PIPE, stderr=PIPE)
result, error_output = query.communicate()
return result
def parse_projection_stats(stats):
print stats
def parse_resource_rejections(rejections):
print rejections
def parse_resource_usage(usage):
print usage
def main():
args = parse_args()
# vertica_base_query =["/opt/vertica/bin/vsql", "-U", "dbadmin", "-w", args.vertica_password, "-A", "-F", ",", "-c"]
vertica_base_query =["/opt/vertica/bin/vsql", "-U", "dbadmin", "-w", args.vertica_password, "-c"]
parse_projection_stats(run_query(list(vertica_base_query), "select node_name, projection_name, projection_schema, "
"wos_used_bytes, ros_used_bytes, ros_count from "
"projection_storage"))
parse_resource_rejections(run_query(list(vertica_base_query), "select * from resource_rejections"))
parse_resource_usage(run_query(list(vertica_base_query), "select node_name, request_queue_depth, "
"active_thread_count, open_file_handle_count, "
"wos_used_bytes, ros_used_bytes, "
"resource_request_reject_count, "
"resource_request_timeout_count, "
"disk_space_request_reject_count, "
"failed_volume_reject_count from resource_usage"))
if __name__ == "__main__":
sys.exit(main())
|
<commit_before><commit_msg>Add script to query vertica stats<commit_after>import sys
import argparse
from subprocess import Popen, PIPE
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("--vertica_password",
help="Vertica password for disk.sh and alarm_transitions.sh", default='password',
required=False)
return parser.parse_args()
def split_result(results):
result_dict = []
results = results.split("\n")
column_names = results.pop(0).split(",")
for result in results:
result_dict.append(zip(column_names, result.split(",")))
return result_dict
def run_query(vertica_base_query, vertica_query):
vertica_base_query.append(vertica_query)
query = Popen(vertica_base_query, stdout=PIPE, stderr=PIPE)
result, error_output = query.communicate()
return result
def parse_projection_stats(stats):
print stats
def parse_resource_rejections(rejections):
print rejections
def parse_resource_usage(usage):
print usage
def main():
args = parse_args()
# vertica_base_query =["/opt/vertica/bin/vsql", "-U", "dbadmin", "-w", args.vertica_password, "-A", "-F", ",", "-c"]
vertica_base_query =["/opt/vertica/bin/vsql", "-U", "dbadmin", "-w", args.vertica_password, "-c"]
parse_projection_stats(run_query(list(vertica_base_query), "select node_name, projection_name, projection_schema, "
"wos_used_bytes, ros_used_bytes, ros_count from "
"projection_storage"))
parse_resource_rejections(run_query(list(vertica_base_query), "select * from resource_rejections"))
parse_resource_usage(run_query(list(vertica_base_query), "select node_name, request_queue_depth, "
"active_thread_count, open_file_handle_count, "
"wos_used_bytes, ros_used_bytes, "
"resource_request_reject_count, "
"resource_request_timeout_count, "
"disk_space_request_reject_count, "
"failed_volume_reject_count from resource_usage"))
if __name__ == "__main__":
sys.exit(main())
|
|
38e570e50976ee863fddd00129ce2d1782b06ca5
|
examples/manual_stats_reporting.py
|
examples/manual_stats_reporting.py
|
"""
Example of a manual_report() function that can be used either as a context manager
(with statement), or a decorator, to manually add entries to Locust's statistics.
Usage as a context manager:
with manual_report("stats entry name"):
# Run time of this block will be reported under a stats entry called "stats entry name"
# do stuff here, if an Exception is raised, it'll be reported as a failure
Usage as a decorator:
@task
@manual_report
def my_task(self):
# The run time of this task will be reported under a stats entry called "my task" (type "manual").
# If an Exception is raised, it'll be reported as a failure
"""
import random
from contextlib import contextmanager, ContextDecorator
from time import time, sleep
from locust import User, task, constant, events
@contextmanager
def _manual_report(name):
start_time = time()
try:
yield
except Exception as e:
events.request_failure.fire(
request_type="manual",
name=name,
response_time=(time() - start_time) * 1000,
response_length=0,
exception=e,
)
raise
else:
events.request_success.fire(
request_type="manual",
name=name,
response_time=(time() - start_time) * 1000,
response_length=0,
)
def manual_report(name_or_func):
if callable(name_or_func):
# used as decorator without name argument specified
return _manual_report(name_or_func.__name__)(name_or_func)
else:
return _manual_report(name_or_func)
class MyUser(User):
wait_time = constant(1)
@task
def successful_task(self):
with manual_report("successful_task"):
sleep(random.random())
@task
@manual_report
def decorator_test(self):
if random.random() > 0.5:
raise Exception("decorator_task failed")
sleep(random.random())
@task
def failing_task(self):
with manual_report("failing_task"):
sleep(random.random())
raise Exception("Oh nooes!")
|
Add example with a manual_report decorator/context manager that can be used to easily measure the time of functions/code blocks and manually report them as stats entries to the Locust statistics
|
Add example with a manual_report decorator/context manager that can be used to easily measure the time of functions/code blocks and manually report them as stats entries to the Locust statistics
|
Python
|
mit
|
mbeacom/locust,mbeacom/locust,mbeacom/locust,locustio/locust,locustio/locust,mbeacom/locust,locustio/locust,locustio/locust
|
Add example with a manual_report decorator/context manager that can be used to easily measure the time of functions/code blocks and manually report them as stats entries to the Locust statistics
|
"""
Example of a manual_report() function that can be used either as a context manager
(with statement), or a decorator, to manually add entries to Locust's statistics.
Usage as a context manager:
with manual_report("stats entry name"):
# Run time of this block will be reported under a stats entry called "stats entry name"
# do stuff here, if an Exception is raised, it'll be reported as a failure
Usage as a decorator:
@task
@manual_report
def my_task(self):
# The run time of this task will be reported under a stats entry called "my task" (type "manual").
# If an Exception is raised, it'll be reported as a failure
"""
import random
from contextlib import contextmanager, ContextDecorator
from time import time, sleep
from locust import User, task, constant, events
@contextmanager
def _manual_report(name):
start_time = time()
try:
yield
except Exception as e:
events.request_failure.fire(
request_type="manual",
name=name,
response_time=(time() - start_time) * 1000,
response_length=0,
exception=e,
)
raise
else:
events.request_success.fire(
request_type="manual",
name=name,
response_time=(time() - start_time) * 1000,
response_length=0,
)
def manual_report(name_or_func):
if callable(name_or_func):
# used as decorator without name argument specified
return _manual_report(name_or_func.__name__)(name_or_func)
else:
return _manual_report(name_or_func)
class MyUser(User):
wait_time = constant(1)
@task
def successful_task(self):
with manual_report("successful_task"):
sleep(random.random())
@task
@manual_report
def decorator_test(self):
if random.random() > 0.5:
raise Exception("decorator_task failed")
sleep(random.random())
@task
def failing_task(self):
with manual_report("failing_task"):
sleep(random.random())
raise Exception("Oh nooes!")
|
<commit_before><commit_msg>Add example with a manual_report decorator/context manager that can be used to easily measure the time of functions/code blocks and manually report them as stats entries to the Locust statistics<commit_after>
|
"""
Example of a manual_report() function that can be used either as a context manager
(with statement), or a decorator, to manually add entries to Locust's statistics.
Usage as a context manager:
with manual_report("stats entry name"):
# Run time of this block will be reported under a stats entry called "stats entry name"
# do stuff here, if an Exception is raised, it'll be reported as a failure
Usage as a decorator:
@task
@manual_report
def my_task(self):
# The run time of this task will be reported under a stats entry called "my task" (type "manual").
# If an Exception is raised, it'll be reported as a failure
"""
import random
from contextlib import contextmanager, ContextDecorator
from time import time, sleep
from locust import User, task, constant, events
@contextmanager
def _manual_report(name):
start_time = time()
try:
yield
except Exception as e:
events.request_failure.fire(
request_type="manual",
name=name,
response_time=(time() - start_time) * 1000,
response_length=0,
exception=e,
)
raise
else:
events.request_success.fire(
request_type="manual",
name=name,
response_time=(time() - start_time) * 1000,
response_length=0,
)
def manual_report(name_or_func):
if callable(name_or_func):
# used as decorator without name argument specified
return _manual_report(name_or_func.__name__)(name_or_func)
else:
return _manual_report(name_or_func)
class MyUser(User):
wait_time = constant(1)
@task
def successful_task(self):
with manual_report("successful_task"):
sleep(random.random())
@task
@manual_report
def decorator_test(self):
if random.random() > 0.5:
raise Exception("decorator_task failed")
sleep(random.random())
@task
def failing_task(self):
with manual_report("failing_task"):
sleep(random.random())
raise Exception("Oh nooes!")
|
Add example with a manual_report decorator/context manager that can be used to easily measure the time of functions/code blocks and manually report them as stats entries to the Locust statistics"""
Example of a manual_report() function that can be used either as a context manager
(with statement), or a decorator, to manually add entries to Locust's statistics.
Usage as a context manager:
with manual_report("stats entry name"):
# Run time of this block will be reported under a stats entry called "stats entry name"
# do stuff here, if an Exception is raised, it'll be reported as a failure
Usage as a decorator:
@task
@manual_report
def my_task(self):
# The run time of this task will be reported under a stats entry called "my task" (type "manual").
# If an Exception is raised, it'll be reported as a failure
"""
import random
from contextlib import contextmanager, ContextDecorator
from time import time, sleep
from locust import User, task, constant, events
@contextmanager
def _manual_report(name):
start_time = time()
try:
yield
except Exception as e:
events.request_failure.fire(
request_type="manual",
name=name,
response_time=(time() - start_time) * 1000,
response_length=0,
exception=e,
)
raise
else:
events.request_success.fire(
request_type="manual",
name=name,
response_time=(time() - start_time) * 1000,
response_length=0,
)
def manual_report(name_or_func):
if callable(name_or_func):
# used as decorator without name argument specified
return _manual_report(name_or_func.__name__)(name_or_func)
else:
return _manual_report(name_or_func)
class MyUser(User):
wait_time = constant(1)
@task
def successful_task(self):
with manual_report("successful_task"):
sleep(random.random())
@task
@manual_report
def decorator_test(self):
if random.random() > 0.5:
raise Exception("decorator_task failed")
sleep(random.random())
@task
def failing_task(self):
with manual_report("failing_task"):
sleep(random.random())
raise Exception("Oh nooes!")
|
<commit_before><commit_msg>Add example with a manual_report decorator/context manager that can be used to easily measure the time of functions/code blocks and manually report them as stats entries to the Locust statistics<commit_after>"""
Example of a manual_report() function that can be used either as a context manager
(with statement), or a decorator, to manually add entries to Locust's statistics.
Usage as a context manager:
with manual_report("stats entry name"):
# Run time of this block will be reported under a stats entry called "stats entry name"
# do stuff here, if an Exception is raised, it'll be reported as a failure
Usage as a decorator:
@task
@manual_report
def my_task(self):
# The run time of this task will be reported under a stats entry called "my task" (type "manual").
# If an Exception is raised, it'll be reported as a failure
"""
import random
from contextlib import contextmanager, ContextDecorator
from time import time, sleep
from locust import User, task, constant, events
@contextmanager
def _manual_report(name):
start_time = time()
try:
yield
except Exception as e:
events.request_failure.fire(
request_type="manual",
name=name,
response_time=(time() - start_time) * 1000,
response_length=0,
exception=e,
)
raise
else:
events.request_success.fire(
request_type="manual",
name=name,
response_time=(time() - start_time) * 1000,
response_length=0,
)
def manual_report(name_or_func):
if callable(name_or_func):
# used as decorator without name argument specified
return _manual_report(name_or_func.__name__)(name_or_func)
else:
return _manual_report(name_or_func)
class MyUser(User):
wait_time = constant(1)
@task
def successful_task(self):
with manual_report("successful_task"):
sleep(random.random())
@task
@manual_report
def decorator_test(self):
if random.random() > 0.5:
raise Exception("decorator_task failed")
sleep(random.random())
@task
def failing_task(self):
with manual_report("failing_task"):
sleep(random.random())
raise Exception("Oh nooes!")
|
|
811eab1329a02a36244b7a8f79c23bf5bfce4dc6
|
spotpy/unittests/test_objectivefunctions.py
|
spotpy/unittests/test_objectivefunctions.py
|
import unittest
from spotpy import objectivefunctions as of
import numpy as np
#https://docs.python.org/3/library/unittest.html
class TestObjectiveFunctions(unittest.TestCase):
# How many digits to match in case of floating point answers
tolerance = 10
def setUp(self):
np.random.seed(42)
self.simulation = np.random.randn(10)
self.evaluation = np.random.randn(10)
print(self.simulation)
print(self.evaluation)
def test_bias(self):
res = of.bias(self.evaluation, self.simulation)
self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance)
def test_length_mismatch_return_nan(self):
all_funcs = of._all_functions
for func in all_funcs:
res = func([0], [0, 1])
self.assertIs(res, np.nan, "Expected np.nan in length mismatch, Got {}".format(res))
if __name__ == '__main__':
unittest.main()
|
Add tests for bias and length mismatch
|
Add tests for bias and length mismatch
|
Python
|
mit
|
thouska/spotpy,thouska/spotpy,bees4ever/spotpy,thouska/spotpy,bees4ever/spotpy,bees4ever/spotpy
|
Add tests for bias and length mismatch
|
import unittest
from spotpy import objectivefunctions as of
import numpy as np
#https://docs.python.org/3/library/unittest.html
class TestObjectiveFunctions(unittest.TestCase):
# How many digits to match in case of floating point answers
tolerance = 10
def setUp(self):
np.random.seed(42)
self.simulation = np.random.randn(10)
self.evaluation = np.random.randn(10)
print(self.simulation)
print(self.evaluation)
def test_bias(self):
res = of.bias(self.evaluation, self.simulation)
self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance)
def test_length_mismatch_return_nan(self):
all_funcs = of._all_functions
for func in all_funcs:
res = func([0], [0, 1])
self.assertIs(res, np.nan, "Expected np.nan in length mismatch, Got {}".format(res))
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add tests for bias and length mismatch<commit_after>
|
import unittest
from spotpy import objectivefunctions as of
import numpy as np
#https://docs.python.org/3/library/unittest.html
class TestObjectiveFunctions(unittest.TestCase):
# How many digits to match in case of floating point answers
tolerance = 10
def setUp(self):
np.random.seed(42)
self.simulation = np.random.randn(10)
self.evaluation = np.random.randn(10)
print(self.simulation)
print(self.evaluation)
def test_bias(self):
res = of.bias(self.evaluation, self.simulation)
self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance)
def test_length_mismatch_return_nan(self):
all_funcs = of._all_functions
for func in all_funcs:
res = func([0], [0, 1])
self.assertIs(res, np.nan, "Expected np.nan in length mismatch, Got {}".format(res))
if __name__ == '__main__':
unittest.main()
|
Add tests for bias and length mismatchimport unittest
from spotpy import objectivefunctions as of
import numpy as np
#https://docs.python.org/3/library/unittest.html
class TestObjectiveFunctions(unittest.TestCase):
# How many digits to match in case of floating point answers
tolerance = 10
def setUp(self):
np.random.seed(42)
self.simulation = np.random.randn(10)
self.evaluation = np.random.randn(10)
print(self.simulation)
print(self.evaluation)
def test_bias(self):
res = of.bias(self.evaluation, self.simulation)
self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance)
def test_length_mismatch_return_nan(self):
all_funcs = of._all_functions
for func in all_funcs:
res = func([0], [0, 1])
self.assertIs(res, np.nan, "Expected np.nan in length mismatch, Got {}".format(res))
if __name__ == '__main__':
unittest.main()
|
<commit_before><commit_msg>Add tests for bias and length mismatch<commit_after>import unittest
from spotpy import objectivefunctions as of
import numpy as np
#https://docs.python.org/3/library/unittest.html
class TestObjectiveFunctions(unittest.TestCase):
# How many digits to match in case of floating point answers
tolerance = 10
def setUp(self):
np.random.seed(42)
self.simulation = np.random.randn(10)
self.evaluation = np.random.randn(10)
print(self.simulation)
print(self.evaluation)
def test_bias(self):
res = of.bias(self.evaluation, self.simulation)
self.assertAlmostEqual(res, 1.2387193462811703, self.tolerance)
def test_length_mismatch_return_nan(self):
all_funcs = of._all_functions
for func in all_funcs:
res = func([0], [0, 1])
self.assertIs(res, np.nan, "Expected np.nan in length mismatch, Got {}".format(res))
if __name__ == '__main__':
unittest.main()
|
|
ee35ffbaef1990dfbedbb50d3d94eb9e4ae4a298
|
migrations/versions/1f8e3cf51fbc_.py
|
migrations/versions/1f8e3cf51fbc_.py
|
"""Add a creator_id to streams
Revision ID: 1f8e3cf51fbc
Revises: 70c7d046881
Create Date: 2013-12-08 16:55:14.142000
"""
# revision identifiers, used by Alembic.
revision = '1f8e3cf51fbc'
down_revision = '70c7d046881'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('stream', sa.Column('creator_id', sa.Integer(), nullable=False))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('stream', 'creator_id')
### end Alembic commands ###
|
Add creator_id migration for streams
|
Add creator_id migration for streams
|
Python
|
mit
|
streamr/marvin,streamr/marvin,streamr/marvin
|
Add creator_id migration for streams
|
"""Add a creator_id to streams
Revision ID: 1f8e3cf51fbc
Revises: 70c7d046881
Create Date: 2013-12-08 16:55:14.142000
"""
# revision identifiers, used by Alembic.
revision = '1f8e3cf51fbc'
down_revision = '70c7d046881'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('stream', sa.Column('creator_id', sa.Integer(), nullable=False))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('stream', 'creator_id')
### end Alembic commands ###
|
<commit_before><commit_msg>Add creator_id migration for streams<commit_after>
|
"""Add a creator_id to streams
Revision ID: 1f8e3cf51fbc
Revises: 70c7d046881
Create Date: 2013-12-08 16:55:14.142000
"""
# revision identifiers, used by Alembic.
revision = '1f8e3cf51fbc'
down_revision = '70c7d046881'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('stream', sa.Column('creator_id', sa.Integer(), nullable=False))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('stream', 'creator_id')
### end Alembic commands ###
|
Add creator_id migration for streams"""Add a creator_id to streams
Revision ID: 1f8e3cf51fbc
Revises: 70c7d046881
Create Date: 2013-12-08 16:55:14.142000
"""
# revision identifiers, used by Alembic.
revision = '1f8e3cf51fbc'
down_revision = '70c7d046881'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('stream', sa.Column('creator_id', sa.Integer(), nullable=False))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('stream', 'creator_id')
### end Alembic commands ###
|
<commit_before><commit_msg>Add creator_id migration for streams<commit_after>"""Add a creator_id to streams
Revision ID: 1f8e3cf51fbc
Revises: 70c7d046881
Create Date: 2013-12-08 16:55:14.142000
"""
# revision identifiers, used by Alembic.
revision = '1f8e3cf51fbc'
down_revision = '70c7d046881'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('stream', sa.Column('creator_id', sa.Integer(), nullable=False))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('stream', 'creator_id')
### end Alembic commands ###
|
|
019a520ae3672e2bac791d1606e5d905700e21c0
|
comics/crawlers/yafgc.py
|
comics/crawlers/yafgc.py
|
from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Yet Another Fantasy Gamer Comic'
language = 'en'
url = 'http://yafgc.shipsinker.com/'
start_date = '2006-05-29'
history_capable_date = '2006-05-29'
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = -8
rights = 'Rich Morris'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.url = 'http://yafgc.shipsinker.com/istrip_files/strips/%(date)s.jpg' % {
'date': self.pub_date.strftime('%Y%m%d'),
}
|
Add crawler for 'Yet Another Fantasy Gamer Comic'
|
Add crawler for 'Yet Another Fantasy Gamer Comic'
|
Python
|
agpl-3.0
|
jodal/comics,klette/comics,datagutten/comics,datagutten/comics,jodal/comics,datagutten/comics,jodal/comics,klette/comics,datagutten/comics,klette/comics,jodal/comics
|
Add crawler for 'Yet Another Fantasy Gamer Comic'
|
from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Yet Another Fantasy Gamer Comic'
language = 'en'
url = 'http://yafgc.shipsinker.com/'
start_date = '2006-05-29'
history_capable_date = '2006-05-29'
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = -8
rights = 'Rich Morris'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.url = 'http://yafgc.shipsinker.com/istrip_files/strips/%(date)s.jpg' % {
'date': self.pub_date.strftime('%Y%m%d'),
}
|
<commit_before><commit_msg>Add crawler for 'Yet Another Fantasy Gamer Comic'<commit_after>
|
from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Yet Another Fantasy Gamer Comic'
language = 'en'
url = 'http://yafgc.shipsinker.com/'
start_date = '2006-05-29'
history_capable_date = '2006-05-29'
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = -8
rights = 'Rich Morris'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.url = 'http://yafgc.shipsinker.com/istrip_files/strips/%(date)s.jpg' % {
'date': self.pub_date.strftime('%Y%m%d'),
}
|
Add crawler for 'Yet Another Fantasy Gamer Comic'from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Yet Another Fantasy Gamer Comic'
language = 'en'
url = 'http://yafgc.shipsinker.com/'
start_date = '2006-05-29'
history_capable_date = '2006-05-29'
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = -8
rights = 'Rich Morris'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.url = 'http://yafgc.shipsinker.com/istrip_files/strips/%(date)s.jpg' % {
'date': self.pub_date.strftime('%Y%m%d'),
}
|
<commit_before><commit_msg>Add crawler for 'Yet Another Fantasy Gamer Comic'<commit_after>from comics.crawler.base import BaseComicCrawler
from comics.crawler.meta import BaseComicMeta
class ComicMeta(BaseComicMeta):
name = 'Yet Another Fantasy Gamer Comic'
language = 'en'
url = 'http://yafgc.shipsinker.com/'
start_date = '2006-05-29'
history_capable_date = '2006-05-29'
schedule = 'Mo,Tu,We,Th,Fr,Sa,Su'
time_zone = -8
rights = 'Rich Morris'
class ComicCrawler(BaseComicCrawler):
def _get_url(self):
self.url = 'http://yafgc.shipsinker.com/istrip_files/strips/%(date)s.jpg' % {
'date': self.pub_date.strftime('%Y%m%d'),
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.