commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e85d0b100bd907ddf82f4c5e132690000d8cb4a0 | smart_open/__init__.py | smart_open/__init__.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
logger = logging.getLogger(__name__)
if len(logger.handlers) == 0:
logger.addHandler(logging.NullHandler())
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
| # -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
| Revert "Configure logging handlers before submodule imports" | Revert "Configure logging handlers before submodule imports"
This reverts commit d9ce6cc440019ecfc73f1c82e41da4e9ce02a234.
| Python | mit | RaRe-Technologies/smart_open,RaRe-Technologies/smart_open,piskvorky/smart_open | # -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
logger = logging.getLogger(__name__)
if len(logger.handlers) == 0:
logger.addHandler(logging.NullHandler())
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
Revert "Configure logging handlers before submodule imports"
This reverts commit d9ce6cc440019ecfc73f1c82e41da4e9ce02a234. | # -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
logger = logging.getLogger(__name__)
if len(logger.handlers) == 0:
logger.addHandler(logging.NullHandler())
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
<commit_msg>Revert "Configure logging handlers before submodule imports"
This reverts commit d9ce6cc440019ecfc73f1c82e41da4e9ce02a234.<commit_after> | # -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
| # -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
logger = logging.getLogger(__name__)
if len(logger.handlers) == 0:
logger.addHandler(logging.NullHandler())
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
Revert "Configure logging handlers before submodule imports"
This reverts commit d9ce6cc440019ecfc73f1c82e41da4e9ce02a234.# -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
logger = logging.getLogger(__name__)
if len(logger.handlers) == 0:
logger.addHandler(logging.NullHandler())
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
<commit_msg>Revert "Configure logging handlers before submodule imports"
This reverts commit d9ce6cc440019ecfc73f1c82e41da4e9ce02a234.<commit_after># -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
|
4bc34f0b8adfc22037475cbdafbb149b7ea88421 | tests/test_utils.py | tests/test_utils.py | import sys
from girder_worker.utils import TeeStdOutCustomWrite, TeeStdErrCustomWrite, JobManager
def test_TeeStdOutCustomeWrite(capfd):
_nonlocal = {'data': ''}
def _append_to_data(message, **kwargs):
_nonlocal['data'] += message
with TeeStdOutCustomWrite(_append_to_data):
sys.stdout.write('Test String')
sys.stdout.flush()
assert _nonlocal['data'] == 'Test String'
| import sys
from girder_worker.utils import TeeStdOutCustomWrite
def test_TeeStdOutCustomWrite(capfd):
nonlocal_ = {'data': ''}
def _append_to_data(message, **kwargs):
nonlocal_['data'] += message
with TeeStdOutCustomWrite(_append_to_data):
sys.stdout.write('Test String')
sys.stdout.flush()
assert nonlocal_['data'] == 'Test String'
out, err = capfd.readouterr()
assert out == 'Test String'
| Test TeeStdOutCustomWrite writes through to stdout | Test TeeStdOutCustomWrite writes through to stdout
| Python | apache-2.0 | girder/girder_worker,girder/girder_worker,girder/girder_worker | import sys
from girder_worker.utils import TeeStdOutCustomWrite, TeeStdErrCustomWrite, JobManager
def test_TeeStdOutCustomeWrite(capfd):
_nonlocal = {'data': ''}
def _append_to_data(message, **kwargs):
_nonlocal['data'] += message
with TeeStdOutCustomWrite(_append_to_data):
sys.stdout.write('Test String')
sys.stdout.flush()
assert _nonlocal['data'] == 'Test String'
Test TeeStdOutCustomWrite writes through to stdout | import sys
from girder_worker.utils import TeeStdOutCustomWrite
def test_TeeStdOutCustomWrite(capfd):
nonlocal_ = {'data': ''}
def _append_to_data(message, **kwargs):
nonlocal_['data'] += message
with TeeStdOutCustomWrite(_append_to_data):
sys.stdout.write('Test String')
sys.stdout.flush()
assert nonlocal_['data'] == 'Test String'
out, err = capfd.readouterr()
assert out == 'Test String'
| <commit_before>import sys
from girder_worker.utils import TeeStdOutCustomWrite, TeeStdErrCustomWrite, JobManager
def test_TeeStdOutCustomeWrite(capfd):
_nonlocal = {'data': ''}
def _append_to_data(message, **kwargs):
_nonlocal['data'] += message
with TeeStdOutCustomWrite(_append_to_data):
sys.stdout.write('Test String')
sys.stdout.flush()
assert _nonlocal['data'] == 'Test String'
<commit_msg>Test TeeStdOutCustomWrite writes through to stdout<commit_after> | import sys
from girder_worker.utils import TeeStdOutCustomWrite
def test_TeeStdOutCustomWrite(capfd):
nonlocal_ = {'data': ''}
def _append_to_data(message, **kwargs):
nonlocal_['data'] += message
with TeeStdOutCustomWrite(_append_to_data):
sys.stdout.write('Test String')
sys.stdout.flush()
assert nonlocal_['data'] == 'Test String'
out, err = capfd.readouterr()
assert out == 'Test String'
| import sys
from girder_worker.utils import TeeStdOutCustomWrite, TeeStdErrCustomWrite, JobManager
def test_TeeStdOutCustomeWrite(capfd):
_nonlocal = {'data': ''}
def _append_to_data(message, **kwargs):
_nonlocal['data'] += message
with TeeStdOutCustomWrite(_append_to_data):
sys.stdout.write('Test String')
sys.stdout.flush()
assert _nonlocal['data'] == 'Test String'
Test TeeStdOutCustomWrite writes through to stdoutimport sys
from girder_worker.utils import TeeStdOutCustomWrite
def test_TeeStdOutCustomWrite(capfd):
nonlocal_ = {'data': ''}
def _append_to_data(message, **kwargs):
nonlocal_['data'] += message
with TeeStdOutCustomWrite(_append_to_data):
sys.stdout.write('Test String')
sys.stdout.flush()
assert nonlocal_['data'] == 'Test String'
out, err = capfd.readouterr()
assert out == 'Test String'
| <commit_before>import sys
from girder_worker.utils import TeeStdOutCustomWrite, TeeStdErrCustomWrite, JobManager
def test_TeeStdOutCustomeWrite(capfd):
_nonlocal = {'data': ''}
def _append_to_data(message, **kwargs):
_nonlocal['data'] += message
with TeeStdOutCustomWrite(_append_to_data):
sys.stdout.write('Test String')
sys.stdout.flush()
assert _nonlocal['data'] == 'Test String'
<commit_msg>Test TeeStdOutCustomWrite writes through to stdout<commit_after>import sys
from girder_worker.utils import TeeStdOutCustomWrite
def test_TeeStdOutCustomWrite(capfd):
nonlocal_ = {'data': ''}
def _append_to_data(message, **kwargs):
nonlocal_['data'] += message
with TeeStdOutCustomWrite(_append_to_data):
sys.stdout.write('Test String')
sys.stdout.flush()
assert nonlocal_['data'] == 'Test String'
out, err = capfd.readouterr()
assert out == 'Test String'
|
08116d65cad849a2b031f960ef5da29e7802f4b7 | tests/unit_tests.py | tests/unit_tests.py | import os
import sys
import unittest
import tempfile
sys.path.insert(0, os.environ.get('BLOG_PATH'))
from blog.blog import *
class BlogUnitTestCase(unittest.TestCase):
def test_connect_db(self):
db = connect_db()
assert isinstance(db, sqlite3.Connection)
def test_get_db(self):
self.db, app.config['DATABASE'] = tempfile.mkstemp()
app.config['TESTING'] = True
self.app = app.test_client()
with app.app_context():
db = get_db()
assert isinstance(db, sqlite3.Connection)
def schema():
return db.execute("SELECT * FROM sqlite_master").fetchall()
assert len(schema()) == 0
init = migrate_db()
assert len(schema()) == 11
if __name__ == '__main__':
unittest.main()
| import os
import sys
import unittest
import tempfile
sys.path.insert(0, os.environ.get('BLOG_PATH'))
from blog.blog import *
class BlogUnitTestCase(unittest.TestCase):
def test_connect_db(self):
db = connect_db()
assert isinstance(db, sqlite3.Connection)
def test_get_db(self):
self.db, app.config['DATABASE'] = tempfile.mkstemp()
app.config['TESTING'] = True
self.app = app.test_client()
with app.app_context():
db = get_db()
assert isinstance(db, sqlite3.Connection)
def schema():
return db.execute("SELECT * FROM sqlite_master").fetchall()
assert len(schema()) == 0
init = migrate_db()
assert len(schema()) == 21
if __name__ == '__main__':
unittest.main()
| Update count of things in DB | Update count of things in DB
| Python | agpl-3.0 | levlaz/blog,levlaz/blog,levlaz/blog,levlaz/blog | import os
import sys
import unittest
import tempfile
sys.path.insert(0, os.environ.get('BLOG_PATH'))
from blog.blog import *
class BlogUnitTestCase(unittest.TestCase):
def test_connect_db(self):
db = connect_db()
assert isinstance(db, sqlite3.Connection)
def test_get_db(self):
self.db, app.config['DATABASE'] = tempfile.mkstemp()
app.config['TESTING'] = True
self.app = app.test_client()
with app.app_context():
db = get_db()
assert isinstance(db, sqlite3.Connection)
def schema():
return db.execute("SELECT * FROM sqlite_master").fetchall()
assert len(schema()) == 0
init = migrate_db()
assert len(schema()) == 11
if __name__ == '__main__':
unittest.main()
Update count of things in DB | import os
import sys
import unittest
import tempfile
sys.path.insert(0, os.environ.get('BLOG_PATH'))
from blog.blog import *
class BlogUnitTestCase(unittest.TestCase):
def test_connect_db(self):
db = connect_db()
assert isinstance(db, sqlite3.Connection)
def test_get_db(self):
self.db, app.config['DATABASE'] = tempfile.mkstemp()
app.config['TESTING'] = True
self.app = app.test_client()
with app.app_context():
db = get_db()
assert isinstance(db, sqlite3.Connection)
def schema():
return db.execute("SELECT * FROM sqlite_master").fetchall()
assert len(schema()) == 0
init = migrate_db()
assert len(schema()) == 21
if __name__ == '__main__':
unittest.main()
| <commit_before>import os
import sys
import unittest
import tempfile
sys.path.insert(0, os.environ.get('BLOG_PATH'))
from blog.blog import *
class BlogUnitTestCase(unittest.TestCase):
def test_connect_db(self):
db = connect_db()
assert isinstance(db, sqlite3.Connection)
def test_get_db(self):
self.db, app.config['DATABASE'] = tempfile.mkstemp()
app.config['TESTING'] = True
self.app = app.test_client()
with app.app_context():
db = get_db()
assert isinstance(db, sqlite3.Connection)
def schema():
return db.execute("SELECT * FROM sqlite_master").fetchall()
assert len(schema()) == 0
init = migrate_db()
assert len(schema()) == 11
if __name__ == '__main__':
unittest.main()
<commit_msg>Update count of things in DB<commit_after> | import os
import sys
import unittest
import tempfile
sys.path.insert(0, os.environ.get('BLOG_PATH'))
from blog.blog import *
class BlogUnitTestCase(unittest.TestCase):
def test_connect_db(self):
db = connect_db()
assert isinstance(db, sqlite3.Connection)
def test_get_db(self):
self.db, app.config['DATABASE'] = tempfile.mkstemp()
app.config['TESTING'] = True
self.app = app.test_client()
with app.app_context():
db = get_db()
assert isinstance(db, sqlite3.Connection)
def schema():
return db.execute("SELECT * FROM sqlite_master").fetchall()
assert len(schema()) == 0
init = migrate_db()
assert len(schema()) == 21
if __name__ == '__main__':
unittest.main()
| import os
import sys
import unittest
import tempfile
sys.path.insert(0, os.environ.get('BLOG_PATH'))
from blog.blog import *
class BlogUnitTestCase(unittest.TestCase):
def test_connect_db(self):
db = connect_db()
assert isinstance(db, sqlite3.Connection)
def test_get_db(self):
self.db, app.config['DATABASE'] = tempfile.mkstemp()
app.config['TESTING'] = True
self.app = app.test_client()
with app.app_context():
db = get_db()
assert isinstance(db, sqlite3.Connection)
def schema():
return db.execute("SELECT * FROM sqlite_master").fetchall()
assert len(schema()) == 0
init = migrate_db()
assert len(schema()) == 11
if __name__ == '__main__':
unittest.main()
Update count of things in DBimport os
import sys
import unittest
import tempfile
sys.path.insert(0, os.environ.get('BLOG_PATH'))
from blog.blog import *
class BlogUnitTestCase(unittest.TestCase):
def test_connect_db(self):
db = connect_db()
assert isinstance(db, sqlite3.Connection)
def test_get_db(self):
self.db, app.config['DATABASE'] = tempfile.mkstemp()
app.config['TESTING'] = True
self.app = app.test_client()
with app.app_context():
db = get_db()
assert isinstance(db, sqlite3.Connection)
def schema():
return db.execute("SELECT * FROM sqlite_master").fetchall()
assert len(schema()) == 0
init = migrate_db()
assert len(schema()) == 21
if __name__ == '__main__':
unittest.main()
| <commit_before>import os
import sys
import unittest
import tempfile
sys.path.insert(0, os.environ.get('BLOG_PATH'))
from blog.blog import *
class BlogUnitTestCase(unittest.TestCase):
def test_connect_db(self):
db = connect_db()
assert isinstance(db, sqlite3.Connection)
def test_get_db(self):
self.db, app.config['DATABASE'] = tempfile.mkstemp()
app.config['TESTING'] = True
self.app = app.test_client()
with app.app_context():
db = get_db()
assert isinstance(db, sqlite3.Connection)
def schema():
return db.execute("SELECT * FROM sqlite_master").fetchall()
assert len(schema()) == 0
init = migrate_db()
assert len(schema()) == 11
if __name__ == '__main__':
unittest.main()
<commit_msg>Update count of things in DB<commit_after>import os
import sys
import unittest
import tempfile
sys.path.insert(0, os.environ.get('BLOG_PATH'))
from blog.blog import *
class BlogUnitTestCase(unittest.TestCase):
def test_connect_db(self):
db = connect_db()
assert isinstance(db, sqlite3.Connection)
def test_get_db(self):
self.db, app.config['DATABASE'] = tempfile.mkstemp()
app.config['TESTING'] = True
self.app = app.test_client()
with app.app_context():
db = get_db()
assert isinstance(db, sqlite3.Connection)
def schema():
return db.execute("SELECT * FROM sqlite_master").fetchall()
assert len(schema()) == 0
init = migrate_db()
assert len(schema()) == 21
if __name__ == '__main__':
unittest.main()
|
ff6502fd8ecc4ee28cb05cb7ee8f11f75240ce47 | mini_project.py | mini_project.py | # Load the machine
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
# print(BPMS[BPM].pv())
print caget(BPMS[BPM].pv())
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
| # Load the machine
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
pvs = BPMS[BPM].pv()
print('PV names: {} PV values: {}'.format(pvs, caget(pvs)))
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
| Make the difference between PV names and values clearer. | Make the difference between PV names and values clearer.
| Python | apache-2.0 | razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects,razvanvasile/Work-Mini-Projects | # Load the machine
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
# print(BPMS[BPM].pv())
print caget(BPMS[BPM].pv())
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
Make the difference between PV names and values clearer. | # Load the machine
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
pvs = BPMS[BPM].pv()
print('PV names: {} PV values: {}'.format(pvs, caget(pvs)))
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
| <commit_before># Load the machine
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
# print(BPMS[BPM].pv())
print caget(BPMS[BPM].pv())
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
<commit_msg>Make the difference between PV names and values clearer.<commit_after> | # Load the machine
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
pvs = BPMS[BPM].pv()
print('PV names: {} PV values: {}'.format(pvs, caget(pvs)))
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
| # Load the machine
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
# print(BPMS[BPM].pv())
print caget(BPMS[BPM].pv())
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
Make the difference between PV names and values clearer.# Load the machine
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
pvs = BPMS[BPM].pv()
print('PV names: {} PV values: {}'.format(pvs, caget(pvs)))
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
| <commit_before># Load the machine
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
# print(BPMS[BPM].pv())
print caget(BPMS[BPM].pv())
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
<commit_msg>Make the difference between PV names and values clearer.<commit_after># Load the machine
import pkg_resources
pkg_resources.require('aphla')
import aphla as ap
# Other stuff
from cothread.catools import caget, caput, ca_nothing
# Load the machine
ap.machines.load('SRI21')
# First task
BPMS = ap.getElements('BPM')
print('There are {} BPM elements in the machine.'.format(len(BPMS)))
# Second task
print('A list of all the PV names for all BPMS')
for BPM in range(len(BPMS)):
pvs = BPMS[BPM].pv()
print('PV names: {} PV values: {}'.format(pvs, caget(pvs)))
# Third task
QUADS = ap.getElements('QUAD')
print('String values for the setpoint currents')
#for QUAD in range(len(QUADS)):
# print(caget(QUADS[QUAD].pv(handle='readback')))
|
c1c66caa31506190944c8bd37df6fe056b60c0e2 | vote.py | vote.py | import enki
import json
e = enki.Enki('key', 'http://localhost:5001', 'translations')
e.get_all()
tasks = []
for t in e.tasks:
options = dict()
i = 0
for k in e.task_runs_df[t.id]['msgid'].keys():
options[k] = e.task_runs_df[t.id]['msgid'][k]
t.info['msgid_options'] = options
tasks.append(t.info)
file = open('/tmp/translations_voting_tasks.json', 'w')
file.write(json.dumps(tasks))
file.close()
| import enki
import json
e = enki.Enki('key', 'http://localhost:5001', 'translations')
e.get_all()
tasks = []
for t in e.tasks:
options = []
i = 0
for k in e.task_runs_df[t.id]['msgid'].keys():
option = dict(task_run_id=None, msgid=None)
option['task_run_id'] = k
option['msgid'] = e.task_runs_df[t.id]['msgid'][k]
options.append(option)
t.info['msgid_options'] = options
tasks.append(t.info)
file = open('/tmp/translations_voting_tasks.json', 'w')
file.write(json.dumps(tasks))
file.close()
| Save the tasks in the proper way. | Save the tasks in the proper way.
| Python | agpl-3.0 | PyBossa/app-translations | import enki
import json
e = enki.Enki('key', 'http://localhost:5001', 'translations')
e.get_all()
tasks = []
for t in e.tasks:
options = dict()
i = 0
for k in e.task_runs_df[t.id]['msgid'].keys():
options[k] = e.task_runs_df[t.id]['msgid'][k]
t.info['msgid_options'] = options
tasks.append(t.info)
file = open('/tmp/translations_voting_tasks.json', 'w')
file.write(json.dumps(tasks))
file.close()
Save the tasks in the proper way. | import enki
import json
e = enki.Enki('key', 'http://localhost:5001', 'translations')
e.get_all()
tasks = []
for t in e.tasks:
options = []
i = 0
for k in e.task_runs_df[t.id]['msgid'].keys():
option = dict(task_run_id=None, msgid=None)
option['task_run_id'] = k
option['msgid'] = e.task_runs_df[t.id]['msgid'][k]
options.append(option)
t.info['msgid_options'] = options
tasks.append(t.info)
file = open('/tmp/translations_voting_tasks.json', 'w')
file.write(json.dumps(tasks))
file.close()
| <commit_before>import enki
import json
e = enki.Enki('key', 'http://localhost:5001', 'translations')
e.get_all()
tasks = []
for t in e.tasks:
options = dict()
i = 0
for k in e.task_runs_df[t.id]['msgid'].keys():
options[k] = e.task_runs_df[t.id]['msgid'][k]
t.info['msgid_options'] = options
tasks.append(t.info)
file = open('/tmp/translations_voting_tasks.json', 'w')
file.write(json.dumps(tasks))
file.close()
<commit_msg>Save the tasks in the proper way.<commit_after> | import enki
import json
e = enki.Enki('key', 'http://localhost:5001', 'translations')
e.get_all()
tasks = []
for t in e.tasks:
options = []
i = 0
for k in e.task_runs_df[t.id]['msgid'].keys():
option = dict(task_run_id=None, msgid=None)
option['task_run_id'] = k
option['msgid'] = e.task_runs_df[t.id]['msgid'][k]
options.append(option)
t.info['msgid_options'] = options
tasks.append(t.info)
file = open('/tmp/translations_voting_tasks.json', 'w')
file.write(json.dumps(tasks))
file.close()
| import enki
import json
e = enki.Enki('key', 'http://localhost:5001', 'translations')
e.get_all()
tasks = []
for t in e.tasks:
options = dict()
i = 0
for k in e.task_runs_df[t.id]['msgid'].keys():
options[k] = e.task_runs_df[t.id]['msgid'][k]
t.info['msgid_options'] = options
tasks.append(t.info)
file = open('/tmp/translations_voting_tasks.json', 'w')
file.write(json.dumps(tasks))
file.close()
Save the tasks in the proper way.import enki
import json
e = enki.Enki('key', 'http://localhost:5001', 'translations')
e.get_all()
tasks = []
for t in e.tasks:
options = []
i = 0
for k in e.task_runs_df[t.id]['msgid'].keys():
option = dict(task_run_id=None, msgid=None)
option['task_run_id'] = k
option['msgid'] = e.task_runs_df[t.id]['msgid'][k]
options.append(option)
t.info['msgid_options'] = options
tasks.append(t.info)
file = open('/tmp/translations_voting_tasks.json', 'w')
file.write(json.dumps(tasks))
file.close()
| <commit_before>import enki
import json
e = enki.Enki('key', 'http://localhost:5001', 'translations')
e.get_all()
tasks = []
for t in e.tasks:
options = dict()
i = 0
for k in e.task_runs_df[t.id]['msgid'].keys():
options[k] = e.task_runs_df[t.id]['msgid'][k]
t.info['msgid_options'] = options
tasks.append(t.info)
file = open('/tmp/translations_voting_tasks.json', 'w')
file.write(json.dumps(tasks))
file.close()
<commit_msg>Save the tasks in the proper way.<commit_after>import enki
import json
e = enki.Enki('key', 'http://localhost:5001', 'translations')
e.get_all()
tasks = []
for t in e.tasks:
options = []
i = 0
for k in e.task_runs_df[t.id]['msgid'].keys():
option = dict(task_run_id=None, msgid=None)
option['task_run_id'] = k
option['msgid'] = e.task_runs_df[t.id]['msgid'][k]
options.append(option)
t.info['msgid_options'] = options
tasks.append(t.info)
file = open('/tmp/translations_voting_tasks.json', 'w')
file.write(json.dumps(tasks))
file.close()
|
01c5a8d0f7e6540995817fecfee124f1859fa448 | mistraldashboard/executions/views.py | mistraldashboard/executions/views.py | # -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from horizon import tables
from mistraldashboard import api
from mistraldashboard.executions.tables import ExecutionsTable
from mistraldashboard.executions.tables import TaskTable
class IndexView(tables.DataTableView):
table_class = ExecutionsTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.execution_list(self.request)
class TaskView(tables.DataTableView):
table_class = TaskTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.task_list(self.kwargs['execution_id'])
| # -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from horizon import tables
from mistraldashboard import api
from mistraldashboard.executions.tables import ExecutionsTable
from mistraldashboard.executions.tables import TaskTable
class IndexView(tables.DataTableView):
table_class = ExecutionsTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.execution_list(self.request)
class TaskView(tables.DataTableView):
table_class = TaskTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.task_list(self.request, self.kwargs['execution_id'])
| Fix errors when user click execution id | Fix errors when user click execution id
Missing request argument when calling task list
Change-Id: Idc282c267fc99513dd96a80f6f4a745bfefec2c8
Closes-Bug: #1471778
| Python | apache-2.0 | openstack/mistral-dashboard,openstack/mistral-dashboard,openstack/mistral-dashboard | # -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from horizon import tables
from mistraldashboard import api
from mistraldashboard.executions.tables import ExecutionsTable
from mistraldashboard.executions.tables import TaskTable
class IndexView(tables.DataTableView):
table_class = ExecutionsTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.execution_list(self.request)
class TaskView(tables.DataTableView):
table_class = TaskTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.task_list(self.kwargs['execution_id'])
Fix errors when user click execution id
Missing request argument when calling task list
Change-Id: Idc282c267fc99513dd96a80f6f4a745bfefec2c8
Closes-Bug: #1471778 | # -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from horizon import tables
from mistraldashboard import api
from mistraldashboard.executions.tables import ExecutionsTable
from mistraldashboard.executions.tables import TaskTable
class IndexView(tables.DataTableView):
table_class = ExecutionsTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.execution_list(self.request)
class TaskView(tables.DataTableView):
table_class = TaskTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.task_list(self.request, self.kwargs['execution_id'])
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from horizon import tables
from mistraldashboard import api
from mistraldashboard.executions.tables import ExecutionsTable
from mistraldashboard.executions.tables import TaskTable
class IndexView(tables.DataTableView):
table_class = ExecutionsTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.execution_list(self.request)
class TaskView(tables.DataTableView):
table_class = TaskTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.task_list(self.kwargs['execution_id'])
<commit_msg>Fix errors when user click execution id
Missing request argument when calling task list
Change-Id: Idc282c267fc99513dd96a80f6f4a745bfefec2c8
Closes-Bug: #1471778<commit_after> | # -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from horizon import tables
from mistraldashboard import api
from mistraldashboard.executions.tables import ExecutionsTable
from mistraldashboard.executions.tables import TaskTable
class IndexView(tables.DataTableView):
table_class = ExecutionsTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.execution_list(self.request)
class TaskView(tables.DataTableView):
table_class = TaskTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.task_list(self.request, self.kwargs['execution_id'])
| # -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from horizon import tables
from mistraldashboard import api
from mistraldashboard.executions.tables import ExecutionsTable
from mistraldashboard.executions.tables import TaskTable
class IndexView(tables.DataTableView):
table_class = ExecutionsTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.execution_list(self.request)
class TaskView(tables.DataTableView):
table_class = TaskTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.task_list(self.kwargs['execution_id'])
Fix errors when user click execution id
Missing request argument when calling task list
Change-Id: Idc282c267fc99513dd96a80f6f4a745bfefec2c8
Closes-Bug: #1471778# -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from horizon import tables
from mistraldashboard import api
from mistraldashboard.executions.tables import ExecutionsTable
from mistraldashboard.executions.tables import TaskTable
class IndexView(tables.DataTableView):
table_class = ExecutionsTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.execution_list(self.request)
class TaskView(tables.DataTableView):
table_class = TaskTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.task_list(self.request, self.kwargs['execution_id'])
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from horizon import tables
from mistraldashboard import api
from mistraldashboard.executions.tables import ExecutionsTable
from mistraldashboard.executions.tables import TaskTable
class IndexView(tables.DataTableView):
table_class = ExecutionsTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.execution_list(self.request)
class TaskView(tables.DataTableView):
table_class = TaskTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.task_list(self.kwargs['execution_id'])
<commit_msg>Fix errors when user click execution id
Missing request argument when calling task list
Change-Id: Idc282c267fc99513dd96a80f6f4a745bfefec2c8
Closes-Bug: #1471778<commit_after># -*- coding: utf-8 -*-
#
# Copyright 2014 - StackStorm, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from horizon import tables
from mistraldashboard import api
from mistraldashboard.executions.tables import ExecutionsTable
from mistraldashboard.executions.tables import TaskTable
class IndexView(tables.DataTableView):
table_class = ExecutionsTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.execution_list(self.request)
class TaskView(tables.DataTableView):
table_class = TaskTable
template_name = 'mistral/executions/index.html'
def get_data(self):
return api.task_list(self.request, self.kwargs['execution_id'])
|
10360cc4d956faac194c58eb3b52fae2b348b356 | links/functions.py | links/functions.py | from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
print url
req = BasicHttp(url)
print req
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
'content_type': res['headers']['Content-Type'],
'file_size': res['headers']['Content-Length']
}
return data
| from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
req = BasicHttp(url)
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
'content_type': res['header']['Content-Type'],
'file_size': res['header']['Content-Length']
}
return data
| Remove of debug prints. Fix of invalid key name. | Remove of debug prints.
Fix of invalid key name.
| Python | bsd-3-clause | nachopro/followlink,nachopro/followlink | from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
print url
req = BasicHttp(url)
print req
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
'content_type': res['headers']['Content-Type'],
'file_size': res['headers']['Content-Length']
}
return data
Remove of debug prints.
Fix of invalid key name. | from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
req = BasicHttp(url)
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
'content_type': res['header']['Content-Type'],
'file_size': res['header']['Content-Length']
}
return data
| <commit_before>from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
print url
req = BasicHttp(url)
print req
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
'content_type': res['headers']['Content-Type'],
'file_size': res['headers']['Content-Length']
}
return data
<commit_msg>Remove of debug prints.
Fix of invalid key name.<commit_after> | from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
req = BasicHttp(url)
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
'content_type': res['header']['Content-Type'],
'file_size': res['header']['Content-Length']
}
return data
| from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
print url
req = BasicHttp(url)
print req
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
'content_type': res['headers']['Content-Type'],
'file_size': res['headers']['Content-Length']
}
return data
Remove of debug prints.
Fix of invalid key name.from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
req = BasicHttp(url)
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
'content_type': res['header']['Content-Type'],
'file_size': res['header']['Content-Length']
}
return data
| <commit_before>from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
print url
req = BasicHttp(url)
print req
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
'content_type': res['headers']['Content-Type'],
'file_size': res['headers']['Content-Length']
}
return data
<commit_msg>Remove of debug prints.
Fix of invalid key name.<commit_after>from uuid import uuid4
from random import randint
from basic_http import BasicHttp
def random_pk(length=8):
s = uuid4().hex
f = randint(0, (len(s) - length))
t = f + length
return s[f:t]
def get_url_info(url):
req = BasicHttp(url)
res = req.HEAD()
data = {
'file_name': url.split('/')[-1],
'content_type': res['header']['Content-Type'],
'file_size': res['header']['Content-Length']
}
return data
|
72384b3f06d4c68a94805e101f6cf4f820157834 | process.py | process.py | # encoding: utf-8
import sys
lines = sys.stdin.readlines()
# Contact details are expected to begin on the fourth line, following the
# header and a blank line, and extend until the next blank line. Lines with
# bullets (•) will be split into separate lines.
contact_lines = []
for line in lines[3:]:
lines.remove(line)
parts = [x.strip() for x in line.split("•")]
if parts == ['']:
break
contact_lines.extend(parts)
if '--tex' in sys.argv:
lines.insert(0, "\\begin{nospace}\\begin{flushright}\n" +
"\n\n".join(contact_lines) +
"\n\\end{flushright}\\end{nospace}\n")
print "".join(lines)
if '--html' in sys.argv:
lines.insert(0, "<div id='container'><div id='contact'>%s</div>\n" %
"<br>".join(contact_lines))
lines.insert(1, "<div>")
lines.append("</div>")
print "".join(lines)
| # encoding: utf-8
import sys
lines = sys.stdin.readlines()
# Contact details are expected to begin on the fourth line, following the
# header and a blank line, and extend until the next blank line. Lines with
# bullets (•) will be split into separate lines.
contact_lines = []
for line in lines[3:]:
lines.remove(line)
parts = [x.strip() for x in line.split("•")]
if parts == ['']:
break
contact_lines.extend(parts)
if '--tex' in sys.argv:
lines.insert(0, "\\begin{nospace}\\begin{flushright}\n" +
"\n\n".join(contact_lines) +
"\n\\end{flushright}\\end{nospace}\n")
print "".join(lines).replace('~', '$\sim$')
if '--html' in sys.argv:
lines.insert(0, "<div id='container'><div id='contact'>%s</div>\n" %
"<br>".join(contact_lines))
lines.insert(1, "<div>")
lines.append("</div>")
print "".join(lines)
| Fix display of tildes in PDF output. | Fix display of tildes in PDF output. | Python | apache-2.0 | davidbradway/resume,davidbradway/resume | # encoding: utf-8
import sys
lines = sys.stdin.readlines()
# Contact details are expected to begin on the fourth line, following the
# header and a blank line, and extend until the next blank line. Lines with
# bullets (•) will be split into separate lines.
contact_lines = []
for line in lines[3:]:
lines.remove(line)
parts = [x.strip() for x in line.split("•")]
if parts == ['']:
break
contact_lines.extend(parts)
if '--tex' in sys.argv:
lines.insert(0, "\\begin{nospace}\\begin{flushright}\n" +
"\n\n".join(contact_lines) +
"\n\\end{flushright}\\end{nospace}\n")
print "".join(lines)
if '--html' in sys.argv:
lines.insert(0, "<div id='container'><div id='contact'>%s</div>\n" %
"<br>".join(contact_lines))
lines.insert(1, "<div>")
lines.append("</div>")
print "".join(lines)
Fix display of tildes in PDF output. | # encoding: utf-8
import sys
lines = sys.stdin.readlines()
# Contact details are expected to begin on the fourth line, following the
# header and a blank line, and extend until the next blank line. Lines with
# bullets (•) will be split into separate lines.
contact_lines = []
for line in lines[3:]:
lines.remove(line)
parts = [x.strip() for x in line.split("•")]
if parts == ['']:
break
contact_lines.extend(parts)
if '--tex' in sys.argv:
lines.insert(0, "\\begin{nospace}\\begin{flushright}\n" +
"\n\n".join(contact_lines) +
"\n\\end{flushright}\\end{nospace}\n")
print "".join(lines).replace('~', '$\sim$')
if '--html' in sys.argv:
lines.insert(0, "<div id='container'><div id='contact'>%s</div>\n" %
"<br>".join(contact_lines))
lines.insert(1, "<div>")
lines.append("</div>")
print "".join(lines)
| <commit_before># encoding: utf-8
import sys
lines = sys.stdin.readlines()
# Contact details are expected to begin on the fourth line, following the
# header and a blank line, and extend until the next blank line. Lines with
# bullets (•) will be split into separate lines.
contact_lines = []
for line in lines[3:]:
lines.remove(line)
parts = [x.strip() for x in line.split("•")]
if parts == ['']:
break
contact_lines.extend(parts)
if '--tex' in sys.argv:
lines.insert(0, "\\begin{nospace}\\begin{flushright}\n" +
"\n\n".join(contact_lines) +
"\n\\end{flushright}\\end{nospace}\n")
print "".join(lines)
if '--html' in sys.argv:
lines.insert(0, "<div id='container'><div id='contact'>%s</div>\n" %
"<br>".join(contact_lines))
lines.insert(1, "<div>")
lines.append("</div>")
print "".join(lines)
<commit_msg>Fix display of tildes in PDF output.<commit_after> | # encoding: utf-8
import sys
lines = sys.stdin.readlines()
# Contact details are expected to begin on the fourth line, following the
# header and a blank line, and extend until the next blank line. Lines with
# bullets (•) will be split into separate lines.
contact_lines = []
for line in lines[3:]:
lines.remove(line)
parts = [x.strip() for x in line.split("•")]
if parts == ['']:
break
contact_lines.extend(parts)
if '--tex' in sys.argv:
lines.insert(0, "\\begin{nospace}\\begin{flushright}\n" +
"\n\n".join(contact_lines) +
"\n\\end{flushright}\\end{nospace}\n")
print "".join(lines).replace('~', '$\sim$')
if '--html' in sys.argv:
lines.insert(0, "<div id='container'><div id='contact'>%s</div>\n" %
"<br>".join(contact_lines))
lines.insert(1, "<div>")
lines.append("</div>")
print "".join(lines)
| # encoding: utf-8
import sys
lines = sys.stdin.readlines()
# Contact details are expected to begin on the fourth line, following the
# header and a blank line, and extend until the next blank line. Lines with
# bullets (•) will be split into separate lines.
contact_lines = []
for line in lines[3:]:
lines.remove(line)
parts = [x.strip() for x in line.split("•")]
if parts == ['']:
break
contact_lines.extend(parts)
if '--tex' in sys.argv:
lines.insert(0, "\\begin{nospace}\\begin{flushright}\n" +
"\n\n".join(contact_lines) +
"\n\\end{flushright}\\end{nospace}\n")
print "".join(lines)
if '--html' in sys.argv:
lines.insert(0, "<div id='container'><div id='contact'>%s</div>\n" %
"<br>".join(contact_lines))
lines.insert(1, "<div>")
lines.append("</div>")
print "".join(lines)
Fix display of tildes in PDF output.# encoding: utf-8
import sys
lines = sys.stdin.readlines()
# Contact details are expected to begin on the fourth line, following the
# header and a blank line, and extend until the next blank line. Lines with
# bullets (•) will be split into separate lines.
contact_lines = []
for line in lines[3:]:
lines.remove(line)
parts = [x.strip() for x in line.split("•")]
if parts == ['']:
break
contact_lines.extend(parts)
if '--tex' in sys.argv:
lines.insert(0, "\\begin{nospace}\\begin{flushright}\n" +
"\n\n".join(contact_lines) +
"\n\\end{flushright}\\end{nospace}\n")
print "".join(lines).replace('~', '$\sim$')
if '--html' in sys.argv:
lines.insert(0, "<div id='container'><div id='contact'>%s</div>\n" %
"<br>".join(contact_lines))
lines.insert(1, "<div>")
lines.append("</div>")
print "".join(lines)
| <commit_before># encoding: utf-8
import sys
lines = sys.stdin.readlines()
# Contact details are expected to begin on the fourth line, following the
# header and a blank line, and extend until the next blank line. Lines with
# bullets (•) will be split into separate lines.
contact_lines = []
for line in lines[3:]:
lines.remove(line)
parts = [x.strip() for x in line.split("•")]
if parts == ['']:
break
contact_lines.extend(parts)
if '--tex' in sys.argv:
lines.insert(0, "\\begin{nospace}\\begin{flushright}\n" +
"\n\n".join(contact_lines) +
"\n\\end{flushright}\\end{nospace}\n")
print "".join(lines)
if '--html' in sys.argv:
lines.insert(0, "<div id='container'><div id='contact'>%s</div>\n" %
"<br>".join(contact_lines))
lines.insert(1, "<div>")
lines.append("</div>")
print "".join(lines)
<commit_msg>Fix display of tildes in PDF output.<commit_after># encoding: utf-8
import sys
lines = sys.stdin.readlines()
# Contact details are expected to begin on the fourth line, following the
# header and a blank line, and extend until the next blank line. Lines with
# bullets (•) will be split into separate lines.
contact_lines = []
for line in lines[3:]:
lines.remove(line)
parts = [x.strip() for x in line.split("•")]
if parts == ['']:
break
contact_lines.extend(parts)
if '--tex' in sys.argv:
lines.insert(0, "\\begin{nospace}\\begin{flushright}\n" +
"\n\n".join(contact_lines) +
"\n\\end{flushright}\\end{nospace}\n")
print "".join(lines).replace('~', '$\sim$')
if '--html' in sys.argv:
lines.insert(0, "<div id='container'><div id='contact'>%s</div>\n" %
"<br>".join(contact_lines))
lines.insert(1, "<div>")
lines.append("</div>")
print "".join(lines)
|
4581f05e937800b0541c219fd82390fdfef7644f | opal/tests/test_updates_from_dict.py | opal/tests/test_updates_from_dict.py | from django.test import TestCase
from django.db import models as djangomodels
from opal.models import UpdatesFromDictMixin
class UpdatesFromDictMixinTest(TestCase):
class TestDiagnosis(UpdatesFromDictMixin, djangomodels.Model):
condition = djangomodels.CharField(max_length=255, blank=True, null=True)
provisional = djangomodels.BooleanField()
details = djangomodels.CharField(max_length=255, blank=True)
date_of_diagnosis = djangomodels.DateField(blank=True, null=True)
def test_get_fieldnames_to_serialise(self):
names = self.TestDiagnosis._get_fieldnames_to_serialize()
expected = ['id', 'condition', 'provisional', 'details', 'date_of_diagnosis']
self.assertEqual(expected, names)
| from django.test import TestCase
from django.db import models as djangomodels
from opal.models import UpdatesFromDictMixin
class UpdatesFromDictMixinTest(TestCase):
class TestDiagnosis(UpdatesFromDictMixin, djangomodels.Model):
condition = djangomodels.CharField(max_length=255, blank=True, null=True)
provisional = djangomodels.BooleanField()
details = djangomodels.CharField(max_length=255, blank=True)
date_of_diagnosis = djangomodels.DateField(blank=True, null=True)
def test_get_fieldnames_to_serialise(self):
names = self.TestDiagnosis._get_fieldnames_to_serialize()
expected = ['id', 'condition', 'provisional', 'details', 'date_of_diagnosis']
self.assertEqual(expected, names)
def test_get_named_foreign_key_fields(self):
for name in ['patient_id', 'episode_id', 'gp_id', 'nurse_id']:
self.assertEqual(djangomodels.ForeignKey,
self.TestDiagnosis._get_field_type(name))
| Add a specific test for the updatesfromdict hard coded foreignkey fields | Add a specific test for the updatesfromdict hard coded foreignkey fields
| Python | agpl-3.0 | khchine5/opal,khchine5/opal,khchine5/opal | from django.test import TestCase
from django.db import models as djangomodels
from opal.models import UpdatesFromDictMixin
class UpdatesFromDictMixinTest(TestCase):
class TestDiagnosis(UpdatesFromDictMixin, djangomodels.Model):
condition = djangomodels.CharField(max_length=255, blank=True, null=True)
provisional = djangomodels.BooleanField()
details = djangomodels.CharField(max_length=255, blank=True)
date_of_diagnosis = djangomodels.DateField(blank=True, null=True)
def test_get_fieldnames_to_serialise(self):
names = self.TestDiagnosis._get_fieldnames_to_serialize()
expected = ['id', 'condition', 'provisional', 'details', 'date_of_diagnosis']
self.assertEqual(expected, names)
Add a specific test for the updatesfromdict hard coded foreignkey fields | from django.test import TestCase
from django.db import models as djangomodels
from opal.models import UpdatesFromDictMixin
class UpdatesFromDictMixinTest(TestCase):
class TestDiagnosis(UpdatesFromDictMixin, djangomodels.Model):
condition = djangomodels.CharField(max_length=255, blank=True, null=True)
provisional = djangomodels.BooleanField()
details = djangomodels.CharField(max_length=255, blank=True)
date_of_diagnosis = djangomodels.DateField(blank=True, null=True)
def test_get_fieldnames_to_serialise(self):
names = self.TestDiagnosis._get_fieldnames_to_serialize()
expected = ['id', 'condition', 'provisional', 'details', 'date_of_diagnosis']
self.assertEqual(expected, names)
def test_get_named_foreign_key_fields(self):
for name in ['patient_id', 'episode_id', 'gp_id', 'nurse_id']:
self.assertEqual(djangomodels.ForeignKey,
self.TestDiagnosis._get_field_type(name))
| <commit_before>from django.test import TestCase
from django.db import models as djangomodels
from opal.models import UpdatesFromDictMixin
class UpdatesFromDictMixinTest(TestCase):
class TestDiagnosis(UpdatesFromDictMixin, djangomodels.Model):
condition = djangomodels.CharField(max_length=255, blank=True, null=True)
provisional = djangomodels.BooleanField()
details = djangomodels.CharField(max_length=255, blank=True)
date_of_diagnosis = djangomodels.DateField(blank=True, null=True)
def test_get_fieldnames_to_serialise(self):
names = self.TestDiagnosis._get_fieldnames_to_serialize()
expected = ['id', 'condition', 'provisional', 'details', 'date_of_diagnosis']
self.assertEqual(expected, names)
<commit_msg>Add a specific test for the updatesfromdict hard coded foreignkey fields<commit_after> | from django.test import TestCase
from django.db import models as djangomodels
from opal.models import UpdatesFromDictMixin
class UpdatesFromDictMixinTest(TestCase):
class TestDiagnosis(UpdatesFromDictMixin, djangomodels.Model):
condition = djangomodels.CharField(max_length=255, blank=True, null=True)
provisional = djangomodels.BooleanField()
details = djangomodels.CharField(max_length=255, blank=True)
date_of_diagnosis = djangomodels.DateField(blank=True, null=True)
def test_get_fieldnames_to_serialise(self):
names = self.TestDiagnosis._get_fieldnames_to_serialize()
expected = ['id', 'condition', 'provisional', 'details', 'date_of_diagnosis']
self.assertEqual(expected, names)
def test_get_named_foreign_key_fields(self):
for name in ['patient_id', 'episode_id', 'gp_id', 'nurse_id']:
self.assertEqual(djangomodels.ForeignKey,
self.TestDiagnosis._get_field_type(name))
| from django.test import TestCase
from django.db import models as djangomodels
from opal.models import UpdatesFromDictMixin
class UpdatesFromDictMixinTest(TestCase):
class TestDiagnosis(UpdatesFromDictMixin, djangomodels.Model):
condition = djangomodels.CharField(max_length=255, blank=True, null=True)
provisional = djangomodels.BooleanField()
details = djangomodels.CharField(max_length=255, blank=True)
date_of_diagnosis = djangomodels.DateField(blank=True, null=True)
def test_get_fieldnames_to_serialise(self):
names = self.TestDiagnosis._get_fieldnames_to_serialize()
expected = ['id', 'condition', 'provisional', 'details', 'date_of_diagnosis']
self.assertEqual(expected, names)
Add a specific test for the updatesfromdict hard coded foreignkey fieldsfrom django.test import TestCase
from django.db import models as djangomodels
from opal.models import UpdatesFromDictMixin
class UpdatesFromDictMixinTest(TestCase):
class TestDiagnosis(UpdatesFromDictMixin, djangomodels.Model):
condition = djangomodels.CharField(max_length=255, blank=True, null=True)
provisional = djangomodels.BooleanField()
details = djangomodels.CharField(max_length=255, blank=True)
date_of_diagnosis = djangomodels.DateField(blank=True, null=True)
def test_get_fieldnames_to_serialise(self):
names = self.TestDiagnosis._get_fieldnames_to_serialize()
expected = ['id', 'condition', 'provisional', 'details', 'date_of_diagnosis']
self.assertEqual(expected, names)
def test_get_named_foreign_key_fields(self):
for name in ['patient_id', 'episode_id', 'gp_id', 'nurse_id']:
self.assertEqual(djangomodels.ForeignKey,
self.TestDiagnosis._get_field_type(name))
| <commit_before>from django.test import TestCase
from django.db import models as djangomodels
from opal.models import UpdatesFromDictMixin
class UpdatesFromDictMixinTest(TestCase):
class TestDiagnosis(UpdatesFromDictMixin, djangomodels.Model):
condition = djangomodels.CharField(max_length=255, blank=True, null=True)
provisional = djangomodels.BooleanField()
details = djangomodels.CharField(max_length=255, blank=True)
date_of_diagnosis = djangomodels.DateField(blank=True, null=True)
def test_get_fieldnames_to_serialise(self):
names = self.TestDiagnosis._get_fieldnames_to_serialize()
expected = ['id', 'condition', 'provisional', 'details', 'date_of_diagnosis']
self.assertEqual(expected, names)
<commit_msg>Add a specific test for the updatesfromdict hard coded foreignkey fields<commit_after>from django.test import TestCase
from django.db import models as djangomodels
from opal.models import UpdatesFromDictMixin
class UpdatesFromDictMixinTest(TestCase):
class TestDiagnosis(UpdatesFromDictMixin, djangomodels.Model):
condition = djangomodels.CharField(max_length=255, blank=True, null=True)
provisional = djangomodels.BooleanField()
details = djangomodels.CharField(max_length=255, blank=True)
date_of_diagnosis = djangomodels.DateField(blank=True, null=True)
def test_get_fieldnames_to_serialise(self):
names = self.TestDiagnosis._get_fieldnames_to_serialize()
expected = ['id', 'condition', 'provisional', 'details', 'date_of_diagnosis']
self.assertEqual(expected, names)
def test_get_named_foreign_key_fields(self):
for name in ['patient_id', 'episode_id', 'gp_id', 'nurse_id']:
self.assertEqual(djangomodels.ForeignKey,
self.TestDiagnosis._get_field_type(name))
|
3520217e38849ad18b11245c6cac51d79db8422d | pytablereader/loadermanager/_base.py | pytablereader/loadermanager/_base.py | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def table_name(self):
return self.__loader.table_name
@table_name.setter
def table_name(self, value):
self.__loader.table_name = value
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
| Add an interface to change table_name | Add an interface to change table_name
| Python | mit | thombashi/pytablereader,thombashi/pytablereader,thombashi/pytablereader | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
Add an interface to change table_name | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def table_name(self):
return self.__loader.table_name
@table_name.setter
def table_name(self, value):
self.__loader.table_name = value
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
| <commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
<commit_msg>Add an interface to change table_name<commit_after> | # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def table_name(self):
return self.__loader.table_name
@table_name.setter
def table_name(self, value):
self.__loader.table_name = value
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
| # encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
Add an interface to change table_name# encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def table_name(self):
return self.__loader.table_name
@table_name.setter
def table_name(self, value):
self.__loader.table_name = value
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
| <commit_before># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
<commit_msg>Add an interface to change table_name<commit_after># encoding: utf-8
"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
from __future__ import absolute_import
from ..interface import TableLoaderInterface
class TableLoaderManager(TableLoaderInterface):
def __init__(self, loader):
self.__loader = loader
@property
def loader(self):
return self.__loader
@property
def format_name(self):
return self.__loader.format_name
@property
def source_type(self):
return self.__loader.source_type
@property
def table_name(self):
return self.__loader.table_name
@table_name.setter
def table_name(self, value):
self.__loader.table_name = value
@property
def encoding(self):
try:
return self.__loader.encoding
except AttributeError:
return None
@encoding.setter
def encoding(self, codec_name):
self.__loader.encoding = codec_name
def load(self):
return self.__loader.load()
def inc_table_count(self):
self.__loader.inc_table_count()
|
ec1058fc1576b7a47af2d00b5b9e60e71dc6dc6b | kraller/kraller.py | kraller/kraller.py | """
Kraller
An application to allow signups for accounts on a server with a key.
"""
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello from ACM@CWRU'
if __name__ == '__main__':
app.run()
| """
Kraller
An application to allow signups for accounts on a server with a key.
"""
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello from ACM@CWRU'
"""
Don't invoke this directly in production. This is for development only.
Use a WSGI webserver such as Gunicorn to serve this in production.
"""
if __name__ == '__main__':
app.run(debug=True)
| Add debug mode to the development version | Add debug mode to the development version
| Python | bsd-2-clause | Aaronneyer/kraller,Aaronneyer/kraller,bis12/kraller,hashbrowncipher/kraller,hacsoc/kraller,hashbrowncipher/kraller,hacsoc/kraller,cwruacm/kraller,bis12/kraller,cwruacm/kraller | """
Kraller
An application to allow signups for accounts on a server with a key.
"""
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello from ACM@CWRU'
if __name__ == '__main__':
app.run()
Add debug mode to the development version | """
Kraller
An application to allow signups for accounts on a server with a key.
"""
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello from ACM@CWRU'
"""
Don't invoke this directly in production. This is for development only.
Use a WSGI webserver such as Gunicorn to serve this in production.
"""
if __name__ == '__main__':
app.run(debug=True)
| <commit_before>"""
Kraller
An application to allow signups for accounts on a server with a key.
"""
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello from ACM@CWRU'
if __name__ == '__main__':
app.run()
<commit_msg>Add debug mode to the development version<commit_after> | """
Kraller
An application to allow signups for accounts on a server with a key.
"""
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello from ACM@CWRU'
"""
Don't invoke this directly in production. This is for development only.
Use a WSGI webserver such as Gunicorn to serve this in production.
"""
if __name__ == '__main__':
app.run(debug=True)
| """
Kraller
An application to allow signups for accounts on a server with a key.
"""
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello from ACM@CWRU'
if __name__ == '__main__':
app.run()
Add debug mode to the development version"""
Kraller
An application to allow signups for accounts on a server with a key.
"""
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello from ACM@CWRU'
"""
Don't invoke this directly in production. This is for development only.
Use a WSGI webserver such as Gunicorn to serve this in production.
"""
if __name__ == '__main__':
app.run(debug=True)
| <commit_before>"""
Kraller
An application to allow signups for accounts on a server with a key.
"""
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello from ACM@CWRU'
if __name__ == '__main__':
app.run()
<commit_msg>Add debug mode to the development version<commit_after>"""
Kraller
An application to allow signups for accounts on a server with a key.
"""
from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return 'Hello from ACM@CWRU'
"""
Don't invoke this directly in production. This is for development only.
Use a WSGI webserver such as Gunicorn to serve this in production.
"""
if __name__ == '__main__':
app.run(debug=True)
|
d518c4f8fa7b657c735d5a2b4f653d5c0cad529e | gnotty/templatetags/gnotty_tags.py | gnotty/templatetags/gnotty_tags.py |
from django import template
from django.db.models import Min, Max
from gnotty.models import IRCMessage
from gnotty.conf import settings
register = template.Library()
@register.inclusion_tag("gnotty/includes/nav.html", takes_context=True)
def gnotty_nav(context):
min_max = IRCMessage.objects.aggregate(Min("message_time"),
Max("message_time"))
if min_max:
years = range(min_max["message_time__max"].year,
min_max["message_time__min"].year - 1, -1)
else:
years = []
context["IRC_CHANNEL"] = settings.IRC_CHANNEL
context["years"] = years
return context
|
from django import template
from django.db.models import Min, Max
from gnotty.models import IRCMessage
from gnotty.conf import settings
register = template.Library()
@register.inclusion_tag("gnotty/includes/nav.html", takes_context=True)
def gnotty_nav(context):
min_max = IRCMessage.objects.aggregate(Min("message_time"),
Max("message_time"))
if min_max.values()[0]:
years = range(min_max["message_time__max"].year,
min_max["message_time__min"].year - 1, -1)
else:
years = []
context["IRC_CHANNEL"] = settings.IRC_CHANNEL
context["years"] = years
return context
| Fix check for empty min/max years. | Fix check for empty min/max years.
| Python | bsd-2-clause | spaceone/gnotty,stephenmcd/gnotty,stephenmcd/gnotty,spaceone/gnotty,stephenmcd/gnotty,spaceone/gnotty |
from django import template
from django.db.models import Min, Max
from gnotty.models import IRCMessage
from gnotty.conf import settings
register = template.Library()
@register.inclusion_tag("gnotty/includes/nav.html", takes_context=True)
def gnotty_nav(context):
min_max = IRCMessage.objects.aggregate(Min("message_time"),
Max("message_time"))
if min_max:
years = range(min_max["message_time__max"].year,
min_max["message_time__min"].year - 1, -1)
else:
years = []
context["IRC_CHANNEL"] = settings.IRC_CHANNEL
context["years"] = years
return context
Fix check for empty min/max years. |
from django import template
from django.db.models import Min, Max
from gnotty.models import IRCMessage
from gnotty.conf import settings
register = template.Library()
@register.inclusion_tag("gnotty/includes/nav.html", takes_context=True)
def gnotty_nav(context):
min_max = IRCMessage.objects.aggregate(Min("message_time"),
Max("message_time"))
if min_max.values()[0]:
years = range(min_max["message_time__max"].year,
min_max["message_time__min"].year - 1, -1)
else:
years = []
context["IRC_CHANNEL"] = settings.IRC_CHANNEL
context["years"] = years
return context
| <commit_before>
from django import template
from django.db.models import Min, Max
from gnotty.models import IRCMessage
from gnotty.conf import settings
register = template.Library()
@register.inclusion_tag("gnotty/includes/nav.html", takes_context=True)
def gnotty_nav(context):
min_max = IRCMessage.objects.aggregate(Min("message_time"),
Max("message_time"))
if min_max:
years = range(min_max["message_time__max"].year,
min_max["message_time__min"].year - 1, -1)
else:
years = []
context["IRC_CHANNEL"] = settings.IRC_CHANNEL
context["years"] = years
return context
<commit_msg>Fix check for empty min/max years.<commit_after> |
from django import template
from django.db.models import Min, Max
from gnotty.models import IRCMessage
from gnotty.conf import settings
register = template.Library()
@register.inclusion_tag("gnotty/includes/nav.html", takes_context=True)
def gnotty_nav(context):
min_max = IRCMessage.objects.aggregate(Min("message_time"),
Max("message_time"))
if min_max.values()[0]:
years = range(min_max["message_time__max"].year,
min_max["message_time__min"].year - 1, -1)
else:
years = []
context["IRC_CHANNEL"] = settings.IRC_CHANNEL
context["years"] = years
return context
|
from django import template
from django.db.models import Min, Max
from gnotty.models import IRCMessage
from gnotty.conf import settings
register = template.Library()
@register.inclusion_tag("gnotty/includes/nav.html", takes_context=True)
def gnotty_nav(context):
min_max = IRCMessage.objects.aggregate(Min("message_time"),
Max("message_time"))
if min_max:
years = range(min_max["message_time__max"].year,
min_max["message_time__min"].year - 1, -1)
else:
years = []
context["IRC_CHANNEL"] = settings.IRC_CHANNEL
context["years"] = years
return context
Fix check for empty min/max years.
from django import template
from django.db.models import Min, Max
from gnotty.models import IRCMessage
from gnotty.conf import settings
register = template.Library()
@register.inclusion_tag("gnotty/includes/nav.html", takes_context=True)
def gnotty_nav(context):
min_max = IRCMessage.objects.aggregate(Min("message_time"),
Max("message_time"))
if min_max.values()[0]:
years = range(min_max["message_time__max"].year,
min_max["message_time__min"].year - 1, -1)
else:
years = []
context["IRC_CHANNEL"] = settings.IRC_CHANNEL
context["years"] = years
return context
| <commit_before>
from django import template
from django.db.models import Min, Max
from gnotty.models import IRCMessage
from gnotty.conf import settings
register = template.Library()
@register.inclusion_tag("gnotty/includes/nav.html", takes_context=True)
def gnotty_nav(context):
min_max = IRCMessage.objects.aggregate(Min("message_time"),
Max("message_time"))
if min_max:
years = range(min_max["message_time__max"].year,
min_max["message_time__min"].year - 1, -1)
else:
years = []
context["IRC_CHANNEL"] = settings.IRC_CHANNEL
context["years"] = years
return context
<commit_msg>Fix check for empty min/max years.<commit_after>
from django import template
from django.db.models import Min, Max
from gnotty.models import IRCMessage
from gnotty.conf import settings
register = template.Library()
@register.inclusion_tag("gnotty/includes/nav.html", takes_context=True)
def gnotty_nav(context):
min_max = IRCMessage.objects.aggregate(Min("message_time"),
Max("message_time"))
if min_max.values()[0]:
years = range(min_max["message_time__max"].year,
min_max["message_time__min"].year - 1, -1)
else:
years = []
context["IRC_CHANNEL"] = settings.IRC_CHANNEL
context["years"] = years
return context
|
087b6a623e4a48b76fa3ce62a14298d9744afd2a | go/apps/bulk_message/definition.py | go/apps/bulk_message/definition.py | from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
action_display_verb = 'Send message'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=action_data['delivery_class'],
dedupe=action_data['dedupe'])
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
| from go.scheduler.models import Task
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
action_display_verb = 'Send message now'
action_schedule_verb = 'Schedule'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
if action_data['scheduled_datetime'] is None:
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=action_data['delivery_class'],
dedupe=action_data['dedupe'])
else:
task = Task.objects.create(
account_id=self._conv.user_api.user_account_key,
label='Bulk Message Send',
task_type=Task.TYPE_CONVERSATION_ACTION,
task_data={
'action_name': 'bulk_send',
'action_kwargs': {
'batch_id': self._conv.batch.key,
'msg_options': {},
'content': action_data['message'],
'delivery_class': action_data['delivery_class'],
'dedupe': action_data['dedupe'],
},
},
scheduled_for=action_data['scheduled_datetime'])
task.save()
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
| Add handling for creating scheduled action for bulk send | Add handling for creating scheduled action for bulk send
| Python | bsd-3-clause | praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go,praekelt/vumi-go | from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
action_display_verb = 'Send message'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=action_data['delivery_class'],
dedupe=action_data['dedupe'])
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
Add handling for creating scheduled action for bulk send | from go.scheduler.models import Task
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
action_display_verb = 'Send message now'
action_schedule_verb = 'Schedule'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
if action_data['scheduled_datetime'] is None:
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=action_data['delivery_class'],
dedupe=action_data['dedupe'])
else:
task = Task.objects.create(
account_id=self._conv.user_api.user_account_key,
label='Bulk Message Send',
task_type=Task.TYPE_CONVERSATION_ACTION,
task_data={
'action_name': 'bulk_send',
'action_kwargs': {
'batch_id': self._conv.batch.key,
'msg_options': {},
'content': action_data['message'],
'delivery_class': action_data['delivery_class'],
'dedupe': action_data['dedupe'],
},
},
scheduled_for=action_data['scheduled_datetime'])
task.save()
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
| <commit_before>from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
action_display_verb = 'Send message'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=action_data['delivery_class'],
dedupe=action_data['dedupe'])
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
<commit_msg>Add handling for creating scheduled action for bulk send<commit_after> | from go.scheduler.models import Task
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
action_display_verb = 'Send message now'
action_schedule_verb = 'Schedule'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
if action_data['scheduled_datetime'] is None:
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=action_data['delivery_class'],
dedupe=action_data['dedupe'])
else:
task = Task.objects.create(
account_id=self._conv.user_api.user_account_key,
label='Bulk Message Send',
task_type=Task.TYPE_CONVERSATION_ACTION,
task_data={
'action_name': 'bulk_send',
'action_kwargs': {
'batch_id': self._conv.batch.key,
'msg_options': {},
'content': action_data['message'],
'delivery_class': action_data['delivery_class'],
'dedupe': action_data['dedupe'],
},
},
scheduled_for=action_data['scheduled_datetime'])
task.save()
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
| from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
action_display_verb = 'Send message'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=action_data['delivery_class'],
dedupe=action_data['dedupe'])
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
Add handling for creating scheduled action for bulk sendfrom go.scheduler.models import Task
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
action_display_verb = 'Send message now'
action_schedule_verb = 'Schedule'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
if action_data['scheduled_datetime'] is None:
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=action_data['delivery_class'],
dedupe=action_data['dedupe'])
else:
task = Task.objects.create(
account_id=self._conv.user_api.user_account_key,
label='Bulk Message Send',
task_type=Task.TYPE_CONVERSATION_ACTION,
task_data={
'action_name': 'bulk_send',
'action_kwargs': {
'batch_id': self._conv.batch.key,
'msg_options': {},
'content': action_data['message'],
'delivery_class': action_data['delivery_class'],
'dedupe': action_data['dedupe'],
},
},
scheduled_for=action_data['scheduled_datetime'])
task.save()
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
| <commit_before>from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
action_display_verb = 'Send message'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=action_data['delivery_class'],
dedupe=action_data['dedupe'])
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
<commit_msg>Add handling for creating scheduled action for bulk send<commit_after>from go.scheduler.models import Task
from go.vumitools.conversation.definition import (
ConversationDefinitionBase, ConversationAction)
class BulkSendAction(ConversationAction):
action_name = 'bulk_send'
action_display_name = 'Write and send bulk message'
action_display_verb = 'Send message now'
action_schedule_verb = 'Schedule'
needs_confirmation = True
needs_group = True
needs_running = True
def check_disabled(self):
if self._conv.has_channel_supporting_generic_sends():
return None
return ("This action needs channels capable of sending"
" messages attached to this conversation.")
def perform_action(self, action_data):
if action_data['scheduled_datetime'] is None:
return self.send_command(
'bulk_send', batch_id=self._conv.batch.key,
msg_options={}, content=action_data['message'],
delivery_class=action_data['delivery_class'],
dedupe=action_data['dedupe'])
else:
task = Task.objects.create(
account_id=self._conv.user_api.user_account_key,
label='Bulk Message Send',
task_type=Task.TYPE_CONVERSATION_ACTION,
task_data={
'action_name': 'bulk_send',
'action_kwargs': {
'batch_id': self._conv.batch.key,
'msg_options': {},
'content': action_data['message'],
'delivery_class': action_data['delivery_class'],
'dedupe': action_data['dedupe'],
},
},
scheduled_for=action_data['scheduled_datetime'])
task.save()
class ConversationDefinition(ConversationDefinitionBase):
conversation_type = 'bulk_message'
actions = (BulkSendAction,)
|
c38fb74a71b3471f5363cf1aa95fecdd8ac9180f | picoCTF-shell/tests/test_problems.py | picoCTF-shell/tests/test_problems.py | import pytest
from os.path import dirname, join, realpath
import hacksport.deploy
from hacksport.deploy import deploy_problem
from shell_manager.util import default_config
PATH = dirname(realpath(__file__))
hacksport.deploy.deploy_config = default_config
@pytest.mark.skip("Broken tests/not working")
class TestProblems:
"""
Regression tests for compiled problems.
"""
def test_compiled_sources(self):
deploy_problem(join(PATH, "problems/compiled_sources"), test=True)
def test_remote_compiled_makefile_template(self):
deploy_problem(
join(PATH, "problems/remote_compiled_makefile_template"), test=True)
def test_remote_no_compile(self):
deploy_problem(join(PATH, "problems/remote_no_compile"), test=True)
def test_compiled_sources_url(self):
deploy_problem(join(PATH, "problems/compiled_sources_url"), test=True)
def test_high_level_compiled_binary(self):
deploy_problem(join(PATH, "problems/local_compiled1"), test=True)
deploy_problem(join(PATH, "problems/local_compiled2"), test=True)
deploy_problem(join(PATH, "problems/remote_compiled1"), test=True)
| import pytest
from os.path import dirname, join, realpath
import hacksport.deploy
from hacksport.deploy import deploy_problem
PATH = dirname(realpath(__file__))
@pytest.mark.skip("Broken tests/not working")
class TestProblems:
"""
Regression tests for compiled problems.
"""
def test_compiled_sources(self):
deploy_problem(join(PATH, "problems/compiled_sources"), test=True)
def test_remote_compiled_makefile_template(self):
deploy_problem(
join(PATH, "problems/remote_compiled_makefile_template"), test=True)
def test_remote_no_compile(self):
deploy_problem(join(PATH, "problems/remote_no_compile"), test=True)
def test_compiled_sources_url(self):
deploy_problem(join(PATH, "problems/compiled_sources_url"), test=True)
def test_high_level_compiled_binary(self):
deploy_problem(join(PATH, "problems/local_compiled1"), test=True)
deploy_problem(join(PATH, "problems/local_compiled2"), test=True)
deploy_problem(join(PATH, "problems/remote_compiled1"), test=True)
| Remove broken import in skipped tests | Remove broken import in skipped tests
| Python | mit | royragsdale/picoCTF,picoCTF/picoCTF,picoCTF/picoCTF,royragsdale/picoCTF,picoCTF/picoCTF,picoCTF/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,royragsdale/picoCTF,picoCTF/picoCTF,royragsdale/picoCTF,picoCTF/picoCTF | import pytest
from os.path import dirname, join, realpath
import hacksport.deploy
from hacksport.deploy import deploy_problem
from shell_manager.util import default_config
PATH = dirname(realpath(__file__))
hacksport.deploy.deploy_config = default_config
@pytest.mark.skip("Broken tests/not working")
class TestProblems:
"""
Regression tests for compiled problems.
"""
def test_compiled_sources(self):
deploy_problem(join(PATH, "problems/compiled_sources"), test=True)
def test_remote_compiled_makefile_template(self):
deploy_problem(
join(PATH, "problems/remote_compiled_makefile_template"), test=True)
def test_remote_no_compile(self):
deploy_problem(join(PATH, "problems/remote_no_compile"), test=True)
def test_compiled_sources_url(self):
deploy_problem(join(PATH, "problems/compiled_sources_url"), test=True)
def test_high_level_compiled_binary(self):
deploy_problem(join(PATH, "problems/local_compiled1"), test=True)
deploy_problem(join(PATH, "problems/local_compiled2"), test=True)
deploy_problem(join(PATH, "problems/remote_compiled1"), test=True)
Remove broken import in skipped tests | import pytest
from os.path import dirname, join, realpath
import hacksport.deploy
from hacksport.deploy import deploy_problem
PATH = dirname(realpath(__file__))
@pytest.mark.skip("Broken tests/not working")
class TestProblems:
"""
Regression tests for compiled problems.
"""
def test_compiled_sources(self):
deploy_problem(join(PATH, "problems/compiled_sources"), test=True)
def test_remote_compiled_makefile_template(self):
deploy_problem(
join(PATH, "problems/remote_compiled_makefile_template"), test=True)
def test_remote_no_compile(self):
deploy_problem(join(PATH, "problems/remote_no_compile"), test=True)
def test_compiled_sources_url(self):
deploy_problem(join(PATH, "problems/compiled_sources_url"), test=True)
def test_high_level_compiled_binary(self):
deploy_problem(join(PATH, "problems/local_compiled1"), test=True)
deploy_problem(join(PATH, "problems/local_compiled2"), test=True)
deploy_problem(join(PATH, "problems/remote_compiled1"), test=True)
| <commit_before>import pytest
from os.path import dirname, join, realpath
import hacksport.deploy
from hacksport.deploy import deploy_problem
from shell_manager.util import default_config
PATH = dirname(realpath(__file__))
hacksport.deploy.deploy_config = default_config
@pytest.mark.skip("Broken tests/not working")
class TestProblems:
"""
Regression tests for compiled problems.
"""
def test_compiled_sources(self):
deploy_problem(join(PATH, "problems/compiled_sources"), test=True)
def test_remote_compiled_makefile_template(self):
deploy_problem(
join(PATH, "problems/remote_compiled_makefile_template"), test=True)
def test_remote_no_compile(self):
deploy_problem(join(PATH, "problems/remote_no_compile"), test=True)
def test_compiled_sources_url(self):
deploy_problem(join(PATH, "problems/compiled_sources_url"), test=True)
def test_high_level_compiled_binary(self):
deploy_problem(join(PATH, "problems/local_compiled1"), test=True)
deploy_problem(join(PATH, "problems/local_compiled2"), test=True)
deploy_problem(join(PATH, "problems/remote_compiled1"), test=True)
<commit_msg>Remove broken import in skipped tests<commit_after> | import pytest
from os.path import dirname, join, realpath
import hacksport.deploy
from hacksport.deploy import deploy_problem
PATH = dirname(realpath(__file__))
@pytest.mark.skip("Broken tests/not working")
class TestProblems:
"""
Regression tests for compiled problems.
"""
def test_compiled_sources(self):
deploy_problem(join(PATH, "problems/compiled_sources"), test=True)
def test_remote_compiled_makefile_template(self):
deploy_problem(
join(PATH, "problems/remote_compiled_makefile_template"), test=True)
def test_remote_no_compile(self):
deploy_problem(join(PATH, "problems/remote_no_compile"), test=True)
def test_compiled_sources_url(self):
deploy_problem(join(PATH, "problems/compiled_sources_url"), test=True)
def test_high_level_compiled_binary(self):
deploy_problem(join(PATH, "problems/local_compiled1"), test=True)
deploy_problem(join(PATH, "problems/local_compiled2"), test=True)
deploy_problem(join(PATH, "problems/remote_compiled1"), test=True)
| import pytest
from os.path import dirname, join, realpath
import hacksport.deploy
from hacksport.deploy import deploy_problem
from shell_manager.util import default_config
PATH = dirname(realpath(__file__))
hacksport.deploy.deploy_config = default_config
@pytest.mark.skip("Broken tests/not working")
class TestProblems:
"""
Regression tests for compiled problems.
"""
def test_compiled_sources(self):
deploy_problem(join(PATH, "problems/compiled_sources"), test=True)
def test_remote_compiled_makefile_template(self):
deploy_problem(
join(PATH, "problems/remote_compiled_makefile_template"), test=True)
def test_remote_no_compile(self):
deploy_problem(join(PATH, "problems/remote_no_compile"), test=True)
def test_compiled_sources_url(self):
deploy_problem(join(PATH, "problems/compiled_sources_url"), test=True)
def test_high_level_compiled_binary(self):
deploy_problem(join(PATH, "problems/local_compiled1"), test=True)
deploy_problem(join(PATH, "problems/local_compiled2"), test=True)
deploy_problem(join(PATH, "problems/remote_compiled1"), test=True)
Remove broken import in skipped testsimport pytest
from os.path import dirname, join, realpath
import hacksport.deploy
from hacksport.deploy import deploy_problem
PATH = dirname(realpath(__file__))
@pytest.mark.skip("Broken tests/not working")
class TestProblems:
"""
Regression tests for compiled problems.
"""
def test_compiled_sources(self):
deploy_problem(join(PATH, "problems/compiled_sources"), test=True)
def test_remote_compiled_makefile_template(self):
deploy_problem(
join(PATH, "problems/remote_compiled_makefile_template"), test=True)
def test_remote_no_compile(self):
deploy_problem(join(PATH, "problems/remote_no_compile"), test=True)
def test_compiled_sources_url(self):
deploy_problem(join(PATH, "problems/compiled_sources_url"), test=True)
def test_high_level_compiled_binary(self):
deploy_problem(join(PATH, "problems/local_compiled1"), test=True)
deploy_problem(join(PATH, "problems/local_compiled2"), test=True)
deploy_problem(join(PATH, "problems/remote_compiled1"), test=True)
| <commit_before>import pytest
from os.path import dirname, join, realpath
import hacksport.deploy
from hacksport.deploy import deploy_problem
from shell_manager.util import default_config
PATH = dirname(realpath(__file__))
hacksport.deploy.deploy_config = default_config
@pytest.mark.skip("Broken tests/not working")
class TestProblems:
"""
Regression tests for compiled problems.
"""
def test_compiled_sources(self):
deploy_problem(join(PATH, "problems/compiled_sources"), test=True)
def test_remote_compiled_makefile_template(self):
deploy_problem(
join(PATH, "problems/remote_compiled_makefile_template"), test=True)
def test_remote_no_compile(self):
deploy_problem(join(PATH, "problems/remote_no_compile"), test=True)
def test_compiled_sources_url(self):
deploy_problem(join(PATH, "problems/compiled_sources_url"), test=True)
def test_high_level_compiled_binary(self):
deploy_problem(join(PATH, "problems/local_compiled1"), test=True)
deploy_problem(join(PATH, "problems/local_compiled2"), test=True)
deploy_problem(join(PATH, "problems/remote_compiled1"), test=True)
<commit_msg>Remove broken import in skipped tests<commit_after>import pytest
from os.path import dirname, join, realpath
import hacksport.deploy
from hacksport.deploy import deploy_problem
PATH = dirname(realpath(__file__))
@pytest.mark.skip("Broken tests/not working")
class TestProblems:
"""
Regression tests for compiled problems.
"""
def test_compiled_sources(self):
deploy_problem(join(PATH, "problems/compiled_sources"), test=True)
def test_remote_compiled_makefile_template(self):
deploy_problem(
join(PATH, "problems/remote_compiled_makefile_template"), test=True)
def test_remote_no_compile(self):
deploy_problem(join(PATH, "problems/remote_no_compile"), test=True)
def test_compiled_sources_url(self):
deploy_problem(join(PATH, "problems/compiled_sources_url"), test=True)
def test_high_level_compiled_binary(self):
deploy_problem(join(PATH, "problems/local_compiled1"), test=True)
deploy_problem(join(PATH, "problems/local_compiled2"), test=True)
deploy_problem(join(PATH, "problems/remote_compiled1"), test=True)
|
36f218ccab5de6799674c438546189245b444b25 | was/photo/forms.py | was/photo/forms.py | from django.forms.models import ModelForm
from .models import Photo
from artists.models import Artists
class UploadPhotoForm(ModelForm):
class Meta:
model = Photo
fields = ['picture', 'comment']
def __init__(self, *args, **kwargs):
self.request = kwargs.pop("request")
super(UploadPhotoForm, self).__init__(*args, **kwargs)
def save(self):
photo = super(UploadPhotoForm, self).save(commit=False)
artist = Artists.objects.get(user=self.request.user)
photo.artist = artist
photo.save()
return photo
| from django.forms.models import ModelForm
from .models import Photo
from artists.models import Artists
class UploadPhotoForm(ModelForm):
class Meta:
model = Photo
fields = ['picture', 'comment']
def __init__(self, *args, **kwargs):
self.request = kwargs.pop("request", None)
super(UploadPhotoForm, self).__init__(*args, **kwargs)
def save(self):
photo = super(UploadPhotoForm, self).save(commit=False)
artist = Artists.objects.get(user=self.request.user)
photo.artist = artist
photo.save()
return photo
| Add None when request arg is pop, in case we access the form in get | Add None when request arg is pop, in case we access the form in get
| Python | mit | KeserOner/where-artists-share,KeserOner/where-artists-share | from django.forms.models import ModelForm
from .models import Photo
from artists.models import Artists
class UploadPhotoForm(ModelForm):
class Meta:
model = Photo
fields = ['picture', 'comment']
def __init__(self, *args, **kwargs):
self.request = kwargs.pop("request")
super(UploadPhotoForm, self).__init__(*args, **kwargs)
def save(self):
photo = super(UploadPhotoForm, self).save(commit=False)
artist = Artists.objects.get(user=self.request.user)
photo.artist = artist
photo.save()
return photo
Add None when request arg is pop, in case we access the form in get | from django.forms.models import ModelForm
from .models import Photo
from artists.models import Artists
class UploadPhotoForm(ModelForm):
class Meta:
model = Photo
fields = ['picture', 'comment']
def __init__(self, *args, **kwargs):
self.request = kwargs.pop("request", None)
super(UploadPhotoForm, self).__init__(*args, **kwargs)
def save(self):
photo = super(UploadPhotoForm, self).save(commit=False)
artist = Artists.objects.get(user=self.request.user)
photo.artist = artist
photo.save()
return photo
| <commit_before>from django.forms.models import ModelForm
from .models import Photo
from artists.models import Artists
class UploadPhotoForm(ModelForm):
class Meta:
model = Photo
fields = ['picture', 'comment']
def __init__(self, *args, **kwargs):
self.request = kwargs.pop("request")
super(UploadPhotoForm, self).__init__(*args, **kwargs)
def save(self):
photo = super(UploadPhotoForm, self).save(commit=False)
artist = Artists.objects.get(user=self.request.user)
photo.artist = artist
photo.save()
return photo
<commit_msg>Add None when request arg is pop, in case we access the form in get<commit_after> | from django.forms.models import ModelForm
from .models import Photo
from artists.models import Artists
class UploadPhotoForm(ModelForm):
class Meta:
model = Photo
fields = ['picture', 'comment']
def __init__(self, *args, **kwargs):
self.request = kwargs.pop("request", None)
super(UploadPhotoForm, self).__init__(*args, **kwargs)
def save(self):
photo = super(UploadPhotoForm, self).save(commit=False)
artist = Artists.objects.get(user=self.request.user)
photo.artist = artist
photo.save()
return photo
| from django.forms.models import ModelForm
from .models import Photo
from artists.models import Artists
class UploadPhotoForm(ModelForm):
class Meta:
model = Photo
fields = ['picture', 'comment']
def __init__(self, *args, **kwargs):
self.request = kwargs.pop("request")
super(UploadPhotoForm, self).__init__(*args, **kwargs)
def save(self):
photo = super(UploadPhotoForm, self).save(commit=False)
artist = Artists.objects.get(user=self.request.user)
photo.artist = artist
photo.save()
return photo
Add None when request arg is pop, in case we access the form in getfrom django.forms.models import ModelForm
from .models import Photo
from artists.models import Artists
class UploadPhotoForm(ModelForm):
class Meta:
model = Photo
fields = ['picture', 'comment']
def __init__(self, *args, **kwargs):
self.request = kwargs.pop("request", None)
super(UploadPhotoForm, self).__init__(*args, **kwargs)
def save(self):
photo = super(UploadPhotoForm, self).save(commit=False)
artist = Artists.objects.get(user=self.request.user)
photo.artist = artist
photo.save()
return photo
| <commit_before>from django.forms.models import ModelForm
from .models import Photo
from artists.models import Artists
class UploadPhotoForm(ModelForm):
class Meta:
model = Photo
fields = ['picture', 'comment']
def __init__(self, *args, **kwargs):
self.request = kwargs.pop("request")
super(UploadPhotoForm, self).__init__(*args, **kwargs)
def save(self):
photo = super(UploadPhotoForm, self).save(commit=False)
artist = Artists.objects.get(user=self.request.user)
photo.artist = artist
photo.save()
return photo
<commit_msg>Add None when request arg is pop, in case we access the form in get<commit_after>from django.forms.models import ModelForm
from .models import Photo
from artists.models import Artists
class UploadPhotoForm(ModelForm):
class Meta:
model = Photo
fields = ['picture', 'comment']
def __init__(self, *args, **kwargs):
self.request = kwargs.pop("request", None)
super(UploadPhotoForm, self).__init__(*args, **kwargs)
def save(self):
photo = super(UploadPhotoForm, self).save(commit=False)
artist = Artists.objects.get(user=self.request.user)
photo.artist = artist
photo.save()
return photo
|
224478e0d69197feb19f2f89c21e884a2e5cb77d | organizer/views.py | organizer/views.py | from django.shortcuts import (
get_object_or_404, render)
from .models import Startup, Tag
def startup_detail(request, slug):
startup = get_object_or_404(
Startup, slug__iexact=slug)
return render(
request,
'organizer/startup_detail.html',
{'startup': startup})
def startup_list(request):
return render(
request,
'organizer/startup_list.html',
{'startup_list': Startup.objects.all()})
def tag_create(request):
pass
def tag_detail(request, slug):
tag = get_object_or_404(
Tag, slug__iexact=slug)
return render(
request,
'organizer/tag_detail.html',
{'tag': tag})
def tag_list(request):
return render(
request,
'organizer/tag_list.html',
{'tag_list': Tag.objects.all()})
| from django.shortcuts import (
get_object_or_404, render)
from .models import Startup, Tag
def startup_detail(request, slug):
startup = get_object_or_404(
Startup, slug__iexact=slug)
return render(
request,
'organizer/startup_detail.html',
{'startup': startup})
def startup_list(request):
return render(
request,
'organizer/startup_list.html',
{'startup_list': Startup.objects.all()})
def tag_create(request):
if request.method == 'POST':
# bind data to form
# if the data is valid:
# create new object from data
# show webpage for new object
# else: (empty data or invalid data)
# show bound HTML form (with errors)
pass
else: # request.method != 'POST'
# show unbound HTML form
pass
def tag_detail(request, slug):
tag = get_object_or_404(
Tag, slug__iexact=slug)
return render(
request,
'organizer/tag_detail.html',
{'tag': tag})
def tag_list(request):
return render(
request,
'organizer/tag_list.html',
{'tag_list': Tag.objects.all()})
| Add HTTP method condition to tag_create(). | Ch09: Add HTTP method condition to tag_create().
| Python | bsd-2-clause | jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8 | from django.shortcuts import (
get_object_or_404, render)
from .models import Startup, Tag
def startup_detail(request, slug):
startup = get_object_or_404(
Startup, slug__iexact=slug)
return render(
request,
'organizer/startup_detail.html',
{'startup': startup})
def startup_list(request):
return render(
request,
'organizer/startup_list.html',
{'startup_list': Startup.objects.all()})
def tag_create(request):
pass
def tag_detail(request, slug):
tag = get_object_or_404(
Tag, slug__iexact=slug)
return render(
request,
'organizer/tag_detail.html',
{'tag': tag})
def tag_list(request):
return render(
request,
'organizer/tag_list.html',
{'tag_list': Tag.objects.all()})
Ch09: Add HTTP method condition to tag_create(). | from django.shortcuts import (
get_object_or_404, render)
from .models import Startup, Tag
def startup_detail(request, slug):
startup = get_object_or_404(
Startup, slug__iexact=slug)
return render(
request,
'organizer/startup_detail.html',
{'startup': startup})
def startup_list(request):
return render(
request,
'organizer/startup_list.html',
{'startup_list': Startup.objects.all()})
def tag_create(request):
if request.method == 'POST':
# bind data to form
# if the data is valid:
# create new object from data
# show webpage for new object
# else: (empty data or invalid data)
# show bound HTML form (with errors)
pass
else: # request.method != 'POST'
# show unbound HTML form
pass
def tag_detail(request, slug):
tag = get_object_or_404(
Tag, slug__iexact=slug)
return render(
request,
'organizer/tag_detail.html',
{'tag': tag})
def tag_list(request):
return render(
request,
'organizer/tag_list.html',
{'tag_list': Tag.objects.all()})
| <commit_before>from django.shortcuts import (
get_object_or_404, render)
from .models import Startup, Tag
def startup_detail(request, slug):
startup = get_object_or_404(
Startup, slug__iexact=slug)
return render(
request,
'organizer/startup_detail.html',
{'startup': startup})
def startup_list(request):
return render(
request,
'organizer/startup_list.html',
{'startup_list': Startup.objects.all()})
def tag_create(request):
pass
def tag_detail(request, slug):
tag = get_object_or_404(
Tag, slug__iexact=slug)
return render(
request,
'organizer/tag_detail.html',
{'tag': tag})
def tag_list(request):
return render(
request,
'organizer/tag_list.html',
{'tag_list': Tag.objects.all()})
<commit_msg>Ch09: Add HTTP method condition to tag_create().<commit_after> | from django.shortcuts import (
get_object_or_404, render)
from .models import Startup, Tag
def startup_detail(request, slug):
startup = get_object_or_404(
Startup, slug__iexact=slug)
return render(
request,
'organizer/startup_detail.html',
{'startup': startup})
def startup_list(request):
return render(
request,
'organizer/startup_list.html',
{'startup_list': Startup.objects.all()})
def tag_create(request):
if request.method == 'POST':
# bind data to form
# if the data is valid:
# create new object from data
# show webpage for new object
# else: (empty data or invalid data)
# show bound HTML form (with errors)
pass
else: # request.method != 'POST'
# show unbound HTML form
pass
def tag_detail(request, slug):
tag = get_object_or_404(
Tag, slug__iexact=slug)
return render(
request,
'organizer/tag_detail.html',
{'tag': tag})
def tag_list(request):
return render(
request,
'organizer/tag_list.html',
{'tag_list': Tag.objects.all()})
| from django.shortcuts import (
get_object_or_404, render)
from .models import Startup, Tag
def startup_detail(request, slug):
startup = get_object_or_404(
Startup, slug__iexact=slug)
return render(
request,
'organizer/startup_detail.html',
{'startup': startup})
def startup_list(request):
return render(
request,
'organizer/startup_list.html',
{'startup_list': Startup.objects.all()})
def tag_create(request):
pass
def tag_detail(request, slug):
tag = get_object_or_404(
Tag, slug__iexact=slug)
return render(
request,
'organizer/tag_detail.html',
{'tag': tag})
def tag_list(request):
return render(
request,
'organizer/tag_list.html',
{'tag_list': Tag.objects.all()})
Ch09: Add HTTP method condition to tag_create().from django.shortcuts import (
get_object_or_404, render)
from .models import Startup, Tag
def startup_detail(request, slug):
startup = get_object_or_404(
Startup, slug__iexact=slug)
return render(
request,
'organizer/startup_detail.html',
{'startup': startup})
def startup_list(request):
return render(
request,
'organizer/startup_list.html',
{'startup_list': Startup.objects.all()})
def tag_create(request):
if request.method == 'POST':
# bind data to form
# if the data is valid:
# create new object from data
# show webpage for new object
# else: (empty data or invalid data)
# show bound HTML form (with errors)
pass
else: # request.method != 'POST'
# show unbound HTML form
pass
def tag_detail(request, slug):
tag = get_object_or_404(
Tag, slug__iexact=slug)
return render(
request,
'organizer/tag_detail.html',
{'tag': tag})
def tag_list(request):
return render(
request,
'organizer/tag_list.html',
{'tag_list': Tag.objects.all()})
| <commit_before>from django.shortcuts import (
get_object_or_404, render)
from .models import Startup, Tag
def startup_detail(request, slug):
startup = get_object_or_404(
Startup, slug__iexact=slug)
return render(
request,
'organizer/startup_detail.html',
{'startup': startup})
def startup_list(request):
return render(
request,
'organizer/startup_list.html',
{'startup_list': Startup.objects.all()})
def tag_create(request):
pass
def tag_detail(request, slug):
tag = get_object_or_404(
Tag, slug__iexact=slug)
return render(
request,
'organizer/tag_detail.html',
{'tag': tag})
def tag_list(request):
return render(
request,
'organizer/tag_list.html',
{'tag_list': Tag.objects.all()})
<commit_msg>Ch09: Add HTTP method condition to tag_create().<commit_after>from django.shortcuts import (
get_object_or_404, render)
from .models import Startup, Tag
def startup_detail(request, slug):
startup = get_object_or_404(
Startup, slug__iexact=slug)
return render(
request,
'organizer/startup_detail.html',
{'startup': startup})
def startup_list(request):
return render(
request,
'organizer/startup_list.html',
{'startup_list': Startup.objects.all()})
def tag_create(request):
if request.method == 'POST':
# bind data to form
# if the data is valid:
# create new object from data
# show webpage for new object
# else: (empty data or invalid data)
# show bound HTML form (with errors)
pass
else: # request.method != 'POST'
# show unbound HTML form
pass
def tag_detail(request, slug):
tag = get_object_or_404(
Tag, slug__iexact=slug)
return render(
request,
'organizer/tag_detail.html',
{'tag': tag})
def tag_list(request):
return render(
request,
'organizer/tag_list.html',
{'tag_list': Tag.objects.all()})
|
271c91a607797bbd9d1a1e179745aedf580e8209 | user/models.py | user/models.py | from django.conf import settings
from django.contrib.auth.models import (
AbstractBaseUser, PermissionsMixin)
from django.core.urlresolvers import reverse
from django.db import models
class Profile(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL)
slug = models.SlugField(
max_length=30,
unique=True)
about = models.TextField()
def __str__(self):
return self.user.get_username()
def get_absolute_url(self):
return reverse(
'dj-auth:public_profile',
kwargs={'slug': self.slug})
def get_update_url(self):
return reverse('dj-auth:profile_update')
class User(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(
'email address',
max_length=254,
unique=True)
is_staff = models.BooleanField(
'staff status',
default=False,
help_text=(
'Designates whether the user can '
'log into this admin site.'))
is_active = models.BooleanField(
'active',
default=True,
help_text=(
'Designates whether this user should '
'be treated as active. Unselect this '
'instead of deleting accounts.'))
USERNAME_FIELD = 'email'
def __str__(self):
return self.email
def get_absolute_url(self):
return self.profile.get_absolute_url()
| from django.conf import settings
from django.contrib.auth.models import (
AbstractBaseUser, PermissionsMixin)
from django.core.urlresolvers import reverse
from django.db import models
class Profile(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL)
name = models.CharField(
max_length=255)
slug = models.SlugField(
max_length=30,
unique=True)
about = models.TextField()
joined = models.DateTimeField(
"Date Joined",
auto_now_add=True)
def __str__(self):
return self.user.get_username()
def get_absolute_url(self):
return reverse(
'dj-auth:public_profile',
kwargs={'slug': self.slug})
def get_update_url(self):
return reverse('dj-auth:profile_update')
class User(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(
'email address',
max_length=254,
unique=True)
is_staff = models.BooleanField(
'staff status',
default=False,
help_text=(
'Designates whether the user can '
'log into this admin site.'))
is_active = models.BooleanField(
'active',
default=True,
help_text=(
'Designates whether this user should '
'be treated as active. Unselect this '
'instead of deleting accounts.'))
USERNAME_FIELD = 'email'
def __str__(self):
return self.email
def get_absolute_url(self):
return self.profile.get_absolute_url()
def get_full_name(self):
return self.profile.name
def get_short_name(self):
return self.profile.name
| Add name and joined date field to Profile. | Ch22: Add name and joined date field to Profile.
| Python | bsd-2-clause | jambonrose/DjangoUnleashed-1.8,jambonrose/DjangoUnleashed-1.8 | from django.conf import settings
from django.contrib.auth.models import (
AbstractBaseUser, PermissionsMixin)
from django.core.urlresolvers import reverse
from django.db import models
class Profile(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL)
slug = models.SlugField(
max_length=30,
unique=True)
about = models.TextField()
def __str__(self):
return self.user.get_username()
def get_absolute_url(self):
return reverse(
'dj-auth:public_profile',
kwargs={'slug': self.slug})
def get_update_url(self):
return reverse('dj-auth:profile_update')
class User(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(
'email address',
max_length=254,
unique=True)
is_staff = models.BooleanField(
'staff status',
default=False,
help_text=(
'Designates whether the user can '
'log into this admin site.'))
is_active = models.BooleanField(
'active',
default=True,
help_text=(
'Designates whether this user should '
'be treated as active. Unselect this '
'instead of deleting accounts.'))
USERNAME_FIELD = 'email'
def __str__(self):
return self.email
def get_absolute_url(self):
return self.profile.get_absolute_url()
Ch22: Add name and joined date field to Profile. | from django.conf import settings
from django.contrib.auth.models import (
AbstractBaseUser, PermissionsMixin)
from django.core.urlresolvers import reverse
from django.db import models
class Profile(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL)
name = models.CharField(
max_length=255)
slug = models.SlugField(
max_length=30,
unique=True)
about = models.TextField()
joined = models.DateTimeField(
"Date Joined",
auto_now_add=True)
def __str__(self):
return self.user.get_username()
def get_absolute_url(self):
return reverse(
'dj-auth:public_profile',
kwargs={'slug': self.slug})
def get_update_url(self):
return reverse('dj-auth:profile_update')
class User(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(
'email address',
max_length=254,
unique=True)
is_staff = models.BooleanField(
'staff status',
default=False,
help_text=(
'Designates whether the user can '
'log into this admin site.'))
is_active = models.BooleanField(
'active',
default=True,
help_text=(
'Designates whether this user should '
'be treated as active. Unselect this '
'instead of deleting accounts.'))
USERNAME_FIELD = 'email'
def __str__(self):
return self.email
def get_absolute_url(self):
return self.profile.get_absolute_url()
def get_full_name(self):
return self.profile.name
def get_short_name(self):
return self.profile.name
| <commit_before>from django.conf import settings
from django.contrib.auth.models import (
AbstractBaseUser, PermissionsMixin)
from django.core.urlresolvers import reverse
from django.db import models
class Profile(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL)
slug = models.SlugField(
max_length=30,
unique=True)
about = models.TextField()
def __str__(self):
return self.user.get_username()
def get_absolute_url(self):
return reverse(
'dj-auth:public_profile',
kwargs={'slug': self.slug})
def get_update_url(self):
return reverse('dj-auth:profile_update')
class User(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(
'email address',
max_length=254,
unique=True)
is_staff = models.BooleanField(
'staff status',
default=False,
help_text=(
'Designates whether the user can '
'log into this admin site.'))
is_active = models.BooleanField(
'active',
default=True,
help_text=(
'Designates whether this user should '
'be treated as active. Unselect this '
'instead of deleting accounts.'))
USERNAME_FIELD = 'email'
def __str__(self):
return self.email
def get_absolute_url(self):
return self.profile.get_absolute_url()
<commit_msg>Ch22: Add name and joined date field to Profile.<commit_after> | from django.conf import settings
from django.contrib.auth.models import (
AbstractBaseUser, PermissionsMixin)
from django.core.urlresolvers import reverse
from django.db import models
class Profile(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL)
name = models.CharField(
max_length=255)
slug = models.SlugField(
max_length=30,
unique=True)
about = models.TextField()
joined = models.DateTimeField(
"Date Joined",
auto_now_add=True)
def __str__(self):
return self.user.get_username()
def get_absolute_url(self):
return reverse(
'dj-auth:public_profile',
kwargs={'slug': self.slug})
def get_update_url(self):
return reverse('dj-auth:profile_update')
class User(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(
'email address',
max_length=254,
unique=True)
is_staff = models.BooleanField(
'staff status',
default=False,
help_text=(
'Designates whether the user can '
'log into this admin site.'))
is_active = models.BooleanField(
'active',
default=True,
help_text=(
'Designates whether this user should '
'be treated as active. Unselect this '
'instead of deleting accounts.'))
USERNAME_FIELD = 'email'
def __str__(self):
return self.email
def get_absolute_url(self):
return self.profile.get_absolute_url()
def get_full_name(self):
return self.profile.name
def get_short_name(self):
return self.profile.name
| from django.conf import settings
from django.contrib.auth.models import (
AbstractBaseUser, PermissionsMixin)
from django.core.urlresolvers import reverse
from django.db import models
class Profile(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL)
slug = models.SlugField(
max_length=30,
unique=True)
about = models.TextField()
def __str__(self):
return self.user.get_username()
def get_absolute_url(self):
return reverse(
'dj-auth:public_profile',
kwargs={'slug': self.slug})
def get_update_url(self):
return reverse('dj-auth:profile_update')
class User(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(
'email address',
max_length=254,
unique=True)
is_staff = models.BooleanField(
'staff status',
default=False,
help_text=(
'Designates whether the user can '
'log into this admin site.'))
is_active = models.BooleanField(
'active',
default=True,
help_text=(
'Designates whether this user should '
'be treated as active. Unselect this '
'instead of deleting accounts.'))
USERNAME_FIELD = 'email'
def __str__(self):
return self.email
def get_absolute_url(self):
return self.profile.get_absolute_url()
Ch22: Add name and joined date field to Profile.from django.conf import settings
from django.contrib.auth.models import (
AbstractBaseUser, PermissionsMixin)
from django.core.urlresolvers import reverse
from django.db import models
class Profile(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL)
name = models.CharField(
max_length=255)
slug = models.SlugField(
max_length=30,
unique=True)
about = models.TextField()
joined = models.DateTimeField(
"Date Joined",
auto_now_add=True)
def __str__(self):
return self.user.get_username()
def get_absolute_url(self):
return reverse(
'dj-auth:public_profile',
kwargs={'slug': self.slug})
def get_update_url(self):
return reverse('dj-auth:profile_update')
class User(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(
'email address',
max_length=254,
unique=True)
is_staff = models.BooleanField(
'staff status',
default=False,
help_text=(
'Designates whether the user can '
'log into this admin site.'))
is_active = models.BooleanField(
'active',
default=True,
help_text=(
'Designates whether this user should '
'be treated as active. Unselect this '
'instead of deleting accounts.'))
USERNAME_FIELD = 'email'
def __str__(self):
return self.email
def get_absolute_url(self):
return self.profile.get_absolute_url()
def get_full_name(self):
return self.profile.name
def get_short_name(self):
return self.profile.name
| <commit_before>from django.conf import settings
from django.contrib.auth.models import (
AbstractBaseUser, PermissionsMixin)
from django.core.urlresolvers import reverse
from django.db import models
class Profile(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL)
slug = models.SlugField(
max_length=30,
unique=True)
about = models.TextField()
def __str__(self):
return self.user.get_username()
def get_absolute_url(self):
return reverse(
'dj-auth:public_profile',
kwargs={'slug': self.slug})
def get_update_url(self):
return reverse('dj-auth:profile_update')
class User(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(
'email address',
max_length=254,
unique=True)
is_staff = models.BooleanField(
'staff status',
default=False,
help_text=(
'Designates whether the user can '
'log into this admin site.'))
is_active = models.BooleanField(
'active',
default=True,
help_text=(
'Designates whether this user should '
'be treated as active. Unselect this '
'instead of deleting accounts.'))
USERNAME_FIELD = 'email'
def __str__(self):
return self.email
def get_absolute_url(self):
return self.profile.get_absolute_url()
<commit_msg>Ch22: Add name and joined date field to Profile.<commit_after>from django.conf import settings
from django.contrib.auth.models import (
AbstractBaseUser, PermissionsMixin)
from django.core.urlresolvers import reverse
from django.db import models
class Profile(models.Model):
user = models.OneToOneField(
settings.AUTH_USER_MODEL)
name = models.CharField(
max_length=255)
slug = models.SlugField(
max_length=30,
unique=True)
about = models.TextField()
joined = models.DateTimeField(
"Date Joined",
auto_now_add=True)
def __str__(self):
return self.user.get_username()
def get_absolute_url(self):
return reverse(
'dj-auth:public_profile',
kwargs={'slug': self.slug})
def get_update_url(self):
return reverse('dj-auth:profile_update')
class User(AbstractBaseUser, PermissionsMixin):
email = models.EmailField(
'email address',
max_length=254,
unique=True)
is_staff = models.BooleanField(
'staff status',
default=False,
help_text=(
'Designates whether the user can '
'log into this admin site.'))
is_active = models.BooleanField(
'active',
default=True,
help_text=(
'Designates whether this user should '
'be treated as active. Unselect this '
'instead of deleting accounts.'))
USERNAME_FIELD = 'email'
def __str__(self):
return self.email
def get_absolute_url(self):
return self.profile.get_absolute_url()
def get_full_name(self):
return self.profile.name
def get_short_name(self):
return self.profile.name
|
1b1a40646a08e6a01927279eb7dff8399adec740 | markups/common.py | markups/common.py | # This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
| # This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.2/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
| Update MathJax URL to 2.7.2 | Update MathJax URL to 2.7.2
| Python | bsd-3-clause | mitya57/pymarkups,retext-project/pymarkups | # This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
Update MathJax URL to 2.7.2 | # This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.2/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
| <commit_before># This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
<commit_msg>Update MathJax URL to 2.7.2<commit_after> | # This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.2/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
| # This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
Update MathJax URL to 2.7.2# This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.2/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
| <commit_before># This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.1/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
<commit_msg>Update MathJax URL to 2.7.2<commit_after># This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2017
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdnjs.cloudflare.com/ajax/libs/mathjax/2.7.2/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
|
5a05b1cbe258ef18607aabbc276aaccd5f2eb14c | admin_crud/router.py | admin_crud/router.py | from django.apps import apps as dj_apps
from django.conf.urls import include, url
from django.template.response import TemplateResponse
from django.utils.text import capfirst
class Router(object):
def __init__(self):
self.registry = []
self._urls = [
url(r'^$', self.index_view, name='index')
]
def build_groups(self, request):
apps = self.registry
models = {}
for path, controller in apps:
app_label = controller.model._meta.app_label
if app_label not in models:
models[app_label] = {
'verbose_name': dj_apps.get_app_config(app_label).verbose_name,
'admins': []
}
controller_info = {
'verbose_name': capfirst(controller.model._meta.verbose_name_plural),
}
models[app_label]['admins'].append(controller_info)
return models
def index_view(self, request, *args, **kwargs):
groups = self.build_groups(request)
return TemplateResponse(
request,
template='admin_crud/index.html',
context={
'groups': groups
}
)
def register(self, path, Controller):
controller = Controller()
self.registry.append([path, controller])
self._urls += [
url(r'^%s/' % path, include(controller.get_urls()))
]
@property
def urls(self):
return [self._urls, 'admin-crud', 'admin-crud']
| from django.apps import apps as dj_apps
from django.conf.urls import include, url
from django.template.response import TemplateResponse
from django.utils.text import capfirst
class Router(object):
def __init__(self):
self.registry = []
self._urls = [
url(r'^$', self.index_view, name='index')
]
def build_groups(self, request):
apps = self.registry
groups = {}
for path, controller in apps:
app_label = controller.model._meta.app_label
if app_label not in groups:
groups[app_label] = {
'verbose_name': dj_apps.get_app_config(app_label).verbose_name,
'admins': []
}
controller_info = {
'verbose_name': capfirst(controller.model._meta.verbose_name_plural),
}
groups[app_label]['admins'].append(controller_info)
return groups
def index_view(self, request, *args, **kwargs):
groups = self.build_groups(request)
return TemplateResponse(
request,
template='admin_crud/index.html',
context={
'groups': groups
}
)
def register(self, path, Controller):
controller = Controller()
self.registry.append([path, controller])
self._urls += [
url(r'^%s/' % path, include(controller.get_urls()))
]
@property
def urls(self):
return [self._urls, 'admin-crud', 'admin-crud']
| Rename models to groups for corresponding with method name | Rename models to groups for corresponding with method name
| Python | mit | raizanshine/django-admin-crud,raizanshine/django-admin-crud | from django.apps import apps as dj_apps
from django.conf.urls import include, url
from django.template.response import TemplateResponse
from django.utils.text import capfirst
class Router(object):
def __init__(self):
self.registry = []
self._urls = [
url(r'^$', self.index_view, name='index')
]
def build_groups(self, request):
apps = self.registry
models = {}
for path, controller in apps:
app_label = controller.model._meta.app_label
if app_label not in models:
models[app_label] = {
'verbose_name': dj_apps.get_app_config(app_label).verbose_name,
'admins': []
}
controller_info = {
'verbose_name': capfirst(controller.model._meta.verbose_name_plural),
}
models[app_label]['admins'].append(controller_info)
return models
def index_view(self, request, *args, **kwargs):
groups = self.build_groups(request)
return TemplateResponse(
request,
template='admin_crud/index.html',
context={
'groups': groups
}
)
def register(self, path, Controller):
controller = Controller()
self.registry.append([path, controller])
self._urls += [
url(r'^%s/' % path, include(controller.get_urls()))
]
@property
def urls(self):
return [self._urls, 'admin-crud', 'admin-crud']
Rename models to groups for corresponding with method name | from django.apps import apps as dj_apps
from django.conf.urls import include, url
from django.template.response import TemplateResponse
from django.utils.text import capfirst
class Router(object):
def __init__(self):
self.registry = []
self._urls = [
url(r'^$', self.index_view, name='index')
]
def build_groups(self, request):
apps = self.registry
groups = {}
for path, controller in apps:
app_label = controller.model._meta.app_label
if app_label not in groups:
groups[app_label] = {
'verbose_name': dj_apps.get_app_config(app_label).verbose_name,
'admins': []
}
controller_info = {
'verbose_name': capfirst(controller.model._meta.verbose_name_plural),
}
groups[app_label]['admins'].append(controller_info)
return groups
def index_view(self, request, *args, **kwargs):
groups = self.build_groups(request)
return TemplateResponse(
request,
template='admin_crud/index.html',
context={
'groups': groups
}
)
def register(self, path, Controller):
controller = Controller()
self.registry.append([path, controller])
self._urls += [
url(r'^%s/' % path, include(controller.get_urls()))
]
@property
def urls(self):
return [self._urls, 'admin-crud', 'admin-crud']
| <commit_before>from django.apps import apps as dj_apps
from django.conf.urls import include, url
from django.template.response import TemplateResponse
from django.utils.text import capfirst
class Router(object):
def __init__(self):
self.registry = []
self._urls = [
url(r'^$', self.index_view, name='index')
]
def build_groups(self, request):
apps = self.registry
models = {}
for path, controller in apps:
app_label = controller.model._meta.app_label
if app_label not in models:
models[app_label] = {
'verbose_name': dj_apps.get_app_config(app_label).verbose_name,
'admins': []
}
controller_info = {
'verbose_name': capfirst(controller.model._meta.verbose_name_plural),
}
models[app_label]['admins'].append(controller_info)
return models
def index_view(self, request, *args, **kwargs):
groups = self.build_groups(request)
return TemplateResponse(
request,
template='admin_crud/index.html',
context={
'groups': groups
}
)
def register(self, path, Controller):
controller = Controller()
self.registry.append([path, controller])
self._urls += [
url(r'^%s/' % path, include(controller.get_urls()))
]
@property
def urls(self):
return [self._urls, 'admin-crud', 'admin-crud']
<commit_msg>Rename models to groups for corresponding with method name<commit_after> | from django.apps import apps as dj_apps
from django.conf.urls import include, url
from django.template.response import TemplateResponse
from django.utils.text import capfirst
class Router(object):
def __init__(self):
self.registry = []
self._urls = [
url(r'^$', self.index_view, name='index')
]
def build_groups(self, request):
apps = self.registry
groups = {}
for path, controller in apps:
app_label = controller.model._meta.app_label
if app_label not in groups:
groups[app_label] = {
'verbose_name': dj_apps.get_app_config(app_label).verbose_name,
'admins': []
}
controller_info = {
'verbose_name': capfirst(controller.model._meta.verbose_name_plural),
}
groups[app_label]['admins'].append(controller_info)
return groups
def index_view(self, request, *args, **kwargs):
groups = self.build_groups(request)
return TemplateResponse(
request,
template='admin_crud/index.html',
context={
'groups': groups
}
)
def register(self, path, Controller):
controller = Controller()
self.registry.append([path, controller])
self._urls += [
url(r'^%s/' % path, include(controller.get_urls()))
]
@property
def urls(self):
return [self._urls, 'admin-crud', 'admin-crud']
| from django.apps import apps as dj_apps
from django.conf.urls import include, url
from django.template.response import TemplateResponse
from django.utils.text import capfirst
class Router(object):
def __init__(self):
self.registry = []
self._urls = [
url(r'^$', self.index_view, name='index')
]
def build_groups(self, request):
apps = self.registry
models = {}
for path, controller in apps:
app_label = controller.model._meta.app_label
if app_label not in models:
models[app_label] = {
'verbose_name': dj_apps.get_app_config(app_label).verbose_name,
'admins': []
}
controller_info = {
'verbose_name': capfirst(controller.model._meta.verbose_name_plural),
}
models[app_label]['admins'].append(controller_info)
return models
def index_view(self, request, *args, **kwargs):
groups = self.build_groups(request)
return TemplateResponse(
request,
template='admin_crud/index.html',
context={
'groups': groups
}
)
def register(self, path, Controller):
controller = Controller()
self.registry.append([path, controller])
self._urls += [
url(r'^%s/' % path, include(controller.get_urls()))
]
@property
def urls(self):
return [self._urls, 'admin-crud', 'admin-crud']
Rename models to groups for corresponding with method namefrom django.apps import apps as dj_apps
from django.conf.urls import include, url
from django.template.response import TemplateResponse
from django.utils.text import capfirst
class Router(object):
def __init__(self):
self.registry = []
self._urls = [
url(r'^$', self.index_view, name='index')
]
def build_groups(self, request):
apps = self.registry
groups = {}
for path, controller in apps:
app_label = controller.model._meta.app_label
if app_label not in groups:
groups[app_label] = {
'verbose_name': dj_apps.get_app_config(app_label).verbose_name,
'admins': []
}
controller_info = {
'verbose_name': capfirst(controller.model._meta.verbose_name_plural),
}
groups[app_label]['admins'].append(controller_info)
return groups
def index_view(self, request, *args, **kwargs):
groups = self.build_groups(request)
return TemplateResponse(
request,
template='admin_crud/index.html',
context={
'groups': groups
}
)
def register(self, path, Controller):
controller = Controller()
self.registry.append([path, controller])
self._urls += [
url(r'^%s/' % path, include(controller.get_urls()))
]
@property
def urls(self):
return [self._urls, 'admin-crud', 'admin-crud']
| <commit_before>from django.apps import apps as dj_apps
from django.conf.urls import include, url
from django.template.response import TemplateResponse
from django.utils.text import capfirst
class Router(object):
def __init__(self):
self.registry = []
self._urls = [
url(r'^$', self.index_view, name='index')
]
def build_groups(self, request):
apps = self.registry
models = {}
for path, controller in apps:
app_label = controller.model._meta.app_label
if app_label not in models:
models[app_label] = {
'verbose_name': dj_apps.get_app_config(app_label).verbose_name,
'admins': []
}
controller_info = {
'verbose_name': capfirst(controller.model._meta.verbose_name_plural),
}
models[app_label]['admins'].append(controller_info)
return models
def index_view(self, request, *args, **kwargs):
groups = self.build_groups(request)
return TemplateResponse(
request,
template='admin_crud/index.html',
context={
'groups': groups
}
)
def register(self, path, Controller):
controller = Controller()
self.registry.append([path, controller])
self._urls += [
url(r'^%s/' % path, include(controller.get_urls()))
]
@property
def urls(self):
return [self._urls, 'admin-crud', 'admin-crud']
<commit_msg>Rename models to groups for corresponding with method name<commit_after>from django.apps import apps as dj_apps
from django.conf.urls import include, url
from django.template.response import TemplateResponse
from django.utils.text import capfirst
class Router(object):
def __init__(self):
self.registry = []
self._urls = [
url(r'^$', self.index_view, name='index')
]
def build_groups(self, request):
apps = self.registry
groups = {}
for path, controller in apps:
app_label = controller.model._meta.app_label
if app_label not in groups:
groups[app_label] = {
'verbose_name': dj_apps.get_app_config(app_label).verbose_name,
'admins': []
}
controller_info = {
'verbose_name': capfirst(controller.model._meta.verbose_name_plural),
}
groups[app_label]['admins'].append(controller_info)
return groups
def index_view(self, request, *args, **kwargs):
groups = self.build_groups(request)
return TemplateResponse(
request,
template='admin_crud/index.html',
context={
'groups': groups
}
)
def register(self, path, Controller):
controller = Controller()
self.registry.append([path, controller])
self._urls += [
url(r'^%s/' % path, include(controller.get_urls()))
]
@property
def urls(self):
return [self._urls, 'admin-crud', 'admin-crud']
|
e0233b0bb0a5a92c641d06648ec9921318b9a028 | knowledge_repo/converters/pdf.py | knowledge_repo/converters/pdf.py | from ..converter import KnowledgePostConverter
from .html import HTMLConverter
class PDFConverter(KnowledgePostConverter):
'''
Use this as a template for new KnowledgePostConverters.
'''
_registry_keys = ['pdf']
@property
def dependencies(self):
# Dependencies required for this converter on top of core knowledge-repo dependencies
return ['weasyprint']
def from_file(self, filename, **opts):
raise NotImplementedError
def from_string(self, filename, **opts):
raise NotImplementedError
def to_file(self, filename, **opts):
with open(filename, 'w') as f:
f.write(self.to_string())
def to_string(self, **opts):
from weasyprint import HTML
html = HTMLConverter(self.kp).to_string()
return HTML(string=html).write_pdf()
| from ..converter import KnowledgePostConverter
from .html import HTMLConverter
class PDFConverter(KnowledgePostConverter):
'''
Use this as a template for new KnowledgePostConverters.
'''
_registry_keys = ['pdf']
@property
def dependencies(self):
# Dependencies required for this converter on top of core knowledge-repo dependencies
return ['weasyprint']
def from_file(self, filename, **opts):
raise NotImplementedError
def from_string(self, filename, **opts):
raise NotImplementedError
def to_file(self, filename, **opts):
with open(filename, 'wb') as f:
f.write(self.to_string())
def to_string(self, **opts):
from weasyprint import HTML
html = HTMLConverter(self.kp).to_string()
return HTML(string=html).write_pdf()
| Fix encoding error in Python 3. | Fix encoding error in Python 3.
| Python | apache-2.0 | airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo,airbnb/knowledge-repo | from ..converter import KnowledgePostConverter
from .html import HTMLConverter
class PDFConverter(KnowledgePostConverter):
'''
Use this as a template for new KnowledgePostConverters.
'''
_registry_keys = ['pdf']
@property
def dependencies(self):
# Dependencies required for this converter on top of core knowledge-repo dependencies
return ['weasyprint']
def from_file(self, filename, **opts):
raise NotImplementedError
def from_string(self, filename, **opts):
raise NotImplementedError
def to_file(self, filename, **opts):
with open(filename, 'w') as f:
f.write(self.to_string())
def to_string(self, **opts):
from weasyprint import HTML
html = HTMLConverter(self.kp).to_string()
return HTML(string=html).write_pdf()
Fix encoding error in Python 3. | from ..converter import KnowledgePostConverter
from .html import HTMLConverter
class PDFConverter(KnowledgePostConverter):
'''
Use this as a template for new KnowledgePostConverters.
'''
_registry_keys = ['pdf']
@property
def dependencies(self):
# Dependencies required for this converter on top of core knowledge-repo dependencies
return ['weasyprint']
def from_file(self, filename, **opts):
raise NotImplementedError
def from_string(self, filename, **opts):
raise NotImplementedError
def to_file(self, filename, **opts):
with open(filename, 'wb') as f:
f.write(self.to_string())
def to_string(self, **opts):
from weasyprint import HTML
html = HTMLConverter(self.kp).to_string()
return HTML(string=html).write_pdf()
| <commit_before>from ..converter import KnowledgePostConverter
from .html import HTMLConverter
class PDFConverter(KnowledgePostConverter):
'''
Use this as a template for new KnowledgePostConverters.
'''
_registry_keys = ['pdf']
@property
def dependencies(self):
# Dependencies required for this converter on top of core knowledge-repo dependencies
return ['weasyprint']
def from_file(self, filename, **opts):
raise NotImplementedError
def from_string(self, filename, **opts):
raise NotImplementedError
def to_file(self, filename, **opts):
with open(filename, 'w') as f:
f.write(self.to_string())
def to_string(self, **opts):
from weasyprint import HTML
html = HTMLConverter(self.kp).to_string()
return HTML(string=html).write_pdf()
<commit_msg>Fix encoding error in Python 3.<commit_after> | from ..converter import KnowledgePostConverter
from .html import HTMLConverter
class PDFConverter(KnowledgePostConverter):
'''
Use this as a template for new KnowledgePostConverters.
'''
_registry_keys = ['pdf']
@property
def dependencies(self):
# Dependencies required for this converter on top of core knowledge-repo dependencies
return ['weasyprint']
def from_file(self, filename, **opts):
raise NotImplementedError
def from_string(self, filename, **opts):
raise NotImplementedError
def to_file(self, filename, **opts):
with open(filename, 'wb') as f:
f.write(self.to_string())
def to_string(self, **opts):
from weasyprint import HTML
html = HTMLConverter(self.kp).to_string()
return HTML(string=html).write_pdf()
| from ..converter import KnowledgePostConverter
from .html import HTMLConverter
class PDFConverter(KnowledgePostConverter):
'''
Use this as a template for new KnowledgePostConverters.
'''
_registry_keys = ['pdf']
@property
def dependencies(self):
# Dependencies required for this converter on top of core knowledge-repo dependencies
return ['weasyprint']
def from_file(self, filename, **opts):
raise NotImplementedError
def from_string(self, filename, **opts):
raise NotImplementedError
def to_file(self, filename, **opts):
with open(filename, 'w') as f:
f.write(self.to_string())
def to_string(self, **opts):
from weasyprint import HTML
html = HTMLConverter(self.kp).to_string()
return HTML(string=html).write_pdf()
Fix encoding error in Python 3.from ..converter import KnowledgePostConverter
from .html import HTMLConverter
class PDFConverter(KnowledgePostConverter):
'''
Use this as a template for new KnowledgePostConverters.
'''
_registry_keys = ['pdf']
@property
def dependencies(self):
# Dependencies required for this converter on top of core knowledge-repo dependencies
return ['weasyprint']
def from_file(self, filename, **opts):
raise NotImplementedError
def from_string(self, filename, **opts):
raise NotImplementedError
def to_file(self, filename, **opts):
with open(filename, 'wb') as f:
f.write(self.to_string())
def to_string(self, **opts):
from weasyprint import HTML
html = HTMLConverter(self.kp).to_string()
return HTML(string=html).write_pdf()
| <commit_before>from ..converter import KnowledgePostConverter
from .html import HTMLConverter
class PDFConverter(KnowledgePostConverter):
'''
Use this as a template for new KnowledgePostConverters.
'''
_registry_keys = ['pdf']
@property
def dependencies(self):
# Dependencies required for this converter on top of core knowledge-repo dependencies
return ['weasyprint']
def from_file(self, filename, **opts):
raise NotImplementedError
def from_string(self, filename, **opts):
raise NotImplementedError
def to_file(self, filename, **opts):
with open(filename, 'w') as f:
f.write(self.to_string())
def to_string(self, **opts):
from weasyprint import HTML
html = HTMLConverter(self.kp).to_string()
return HTML(string=html).write_pdf()
<commit_msg>Fix encoding error in Python 3.<commit_after>from ..converter import KnowledgePostConverter
from .html import HTMLConverter
class PDFConverter(KnowledgePostConverter):
'''
Use this as a template for new KnowledgePostConverters.
'''
_registry_keys = ['pdf']
@property
def dependencies(self):
# Dependencies required for this converter on top of core knowledge-repo dependencies
return ['weasyprint']
def from_file(self, filename, **opts):
raise NotImplementedError
def from_string(self, filename, **opts):
raise NotImplementedError
def to_file(self, filename, **opts):
with open(filename, 'wb') as f:
f.write(self.to_string())
def to_string(self, **opts):
from weasyprint import HTML
html = HTMLConverter(self.kp).to_string()
return HTML(string=html).write_pdf()
|
4c43e7588d8747e26901400ce67c1485ba98bb92 | l10n_br_purchase/__manifest__.py | l10n_br_purchase/__manifest__.py | # -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localization Purchase',
'license': 'AGPL-3',
'category': 'Localisation',
'author': 'Akretion, Odoo Community Association (OCA)',
'website': 'http://odoo-brasil.org',
'version': '8.0.1.0.0',
'depends': [
'l10n_br_stock_account',
'account_fiscal_position_rule_purchase',
],
'data': [
'data/l10n_br_purchase_data.xml',
'views/purchase_view.xml',
'views/res_company_view.xml',
'security/ir.model.access.csv',
'security/l10n_br_purchase_security.xml',
],
'demo': [
# FIXME
# 'test/purchase_order_demo.yml'
],
'installable': False,
'auto_install': False,
}
| # -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localization Purchase',
'license': 'AGPL-3',
'category': 'Localisation',
'author': 'Akretion, Odoo Community Association (OCA)',
'website': 'http://odoo-brasil.org',
'version': '10.0.1.0.0',
'depends': [
'l10n_br_stock_account',
'account_fiscal_position_rule_purchase',
],
'data': [
'data/l10n_br_purchase_data.xml',
'views/purchase_view.xml',
'views/res_company_view.xml',
'security/ir.model.access.csv',
'security/l10n_br_purchase_security.xml',
],
'demo': [
# FIXME
# 'test/purchase_order_demo.yml'
],
'installable': False,
'auto_install': False,
}
| Bump module version to 10.0.1.0.0 | [MIG] Bump module version to 10.0.1.0.0
| Python | agpl-3.0 | OCA/l10n-brazil,akretion/l10n-brazil,akretion/l10n-brazil,OCA/l10n-brazil,OCA/l10n-brazil,akretion/l10n-brazil | # -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localization Purchase',
'license': 'AGPL-3',
'category': 'Localisation',
'author': 'Akretion, Odoo Community Association (OCA)',
'website': 'http://odoo-brasil.org',
'version': '8.0.1.0.0',
'depends': [
'l10n_br_stock_account',
'account_fiscal_position_rule_purchase',
],
'data': [
'data/l10n_br_purchase_data.xml',
'views/purchase_view.xml',
'views/res_company_view.xml',
'security/ir.model.access.csv',
'security/l10n_br_purchase_security.xml',
],
'demo': [
# FIXME
# 'test/purchase_order_demo.yml'
],
'installable': False,
'auto_install': False,
}
[MIG] Bump module version to 10.0.1.0.0 | # -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localization Purchase',
'license': 'AGPL-3',
'category': 'Localisation',
'author': 'Akretion, Odoo Community Association (OCA)',
'website': 'http://odoo-brasil.org',
'version': '10.0.1.0.0',
'depends': [
'l10n_br_stock_account',
'account_fiscal_position_rule_purchase',
],
'data': [
'data/l10n_br_purchase_data.xml',
'views/purchase_view.xml',
'views/res_company_view.xml',
'security/ir.model.access.csv',
'security/l10n_br_purchase_security.xml',
],
'demo': [
# FIXME
# 'test/purchase_order_demo.yml'
],
'installable': False,
'auto_install': False,
}
| <commit_before># -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localization Purchase',
'license': 'AGPL-3',
'category': 'Localisation',
'author': 'Akretion, Odoo Community Association (OCA)',
'website': 'http://odoo-brasil.org',
'version': '8.0.1.0.0',
'depends': [
'l10n_br_stock_account',
'account_fiscal_position_rule_purchase',
],
'data': [
'data/l10n_br_purchase_data.xml',
'views/purchase_view.xml',
'views/res_company_view.xml',
'security/ir.model.access.csv',
'security/l10n_br_purchase_security.xml',
],
'demo': [
# FIXME
# 'test/purchase_order_demo.yml'
],
'installable': False,
'auto_install': False,
}
<commit_msg>[MIG] Bump module version to 10.0.1.0.0<commit_after> | # -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localization Purchase',
'license': 'AGPL-3',
'category': 'Localisation',
'author': 'Akretion, Odoo Community Association (OCA)',
'website': 'http://odoo-brasil.org',
'version': '10.0.1.0.0',
'depends': [
'l10n_br_stock_account',
'account_fiscal_position_rule_purchase',
],
'data': [
'data/l10n_br_purchase_data.xml',
'views/purchase_view.xml',
'views/res_company_view.xml',
'security/ir.model.access.csv',
'security/l10n_br_purchase_security.xml',
],
'demo': [
# FIXME
# 'test/purchase_order_demo.yml'
],
'installable': False,
'auto_install': False,
}
| # -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localization Purchase',
'license': 'AGPL-3',
'category': 'Localisation',
'author': 'Akretion, Odoo Community Association (OCA)',
'website': 'http://odoo-brasil.org',
'version': '8.0.1.0.0',
'depends': [
'l10n_br_stock_account',
'account_fiscal_position_rule_purchase',
],
'data': [
'data/l10n_br_purchase_data.xml',
'views/purchase_view.xml',
'views/res_company_view.xml',
'security/ir.model.access.csv',
'security/l10n_br_purchase_security.xml',
],
'demo': [
# FIXME
# 'test/purchase_order_demo.yml'
],
'installable': False,
'auto_install': False,
}
[MIG] Bump module version to 10.0.1.0.0# -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localization Purchase',
'license': 'AGPL-3',
'category': 'Localisation',
'author': 'Akretion, Odoo Community Association (OCA)',
'website': 'http://odoo-brasil.org',
'version': '10.0.1.0.0',
'depends': [
'l10n_br_stock_account',
'account_fiscal_position_rule_purchase',
],
'data': [
'data/l10n_br_purchase_data.xml',
'views/purchase_view.xml',
'views/res_company_view.xml',
'security/ir.model.access.csv',
'security/l10n_br_purchase_security.xml',
],
'demo': [
# FIXME
# 'test/purchase_order_demo.yml'
],
'installable': False,
'auto_install': False,
}
| <commit_before># -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localization Purchase',
'license': 'AGPL-3',
'category': 'Localisation',
'author': 'Akretion, Odoo Community Association (OCA)',
'website': 'http://odoo-brasil.org',
'version': '8.0.1.0.0',
'depends': [
'l10n_br_stock_account',
'account_fiscal_position_rule_purchase',
],
'data': [
'data/l10n_br_purchase_data.xml',
'views/purchase_view.xml',
'views/res_company_view.xml',
'security/ir.model.access.csv',
'security/l10n_br_purchase_security.xml',
],
'demo': [
# FIXME
# 'test/purchase_order_demo.yml'
],
'installable': False,
'auto_install': False,
}
<commit_msg>[MIG] Bump module version to 10.0.1.0.0<commit_after># -*- coding: utf-8 -*-
# Copyright (C) 2009 Renato Lima - Akretion
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
{
'name': 'Brazilian Localization Purchase',
'license': 'AGPL-3',
'category': 'Localisation',
'author': 'Akretion, Odoo Community Association (OCA)',
'website': 'http://odoo-brasil.org',
'version': '10.0.1.0.0',
'depends': [
'l10n_br_stock_account',
'account_fiscal_position_rule_purchase',
],
'data': [
'data/l10n_br_purchase_data.xml',
'views/purchase_view.xml',
'views/res_company_view.xml',
'security/ir.model.access.csv',
'security/l10n_br_purchase_security.xml',
],
'demo': [
# FIXME
# 'test/purchase_order_demo.yml'
],
'installable': False,
'auto_install': False,
}
|
6caca3259f4ec8f298b1d35f15e4492efbcff6b1 | tests/basics/dict1.py | tests/basics/dict1.py | # basic dictionary
d = {}
print(d)
d[2] = 123
print(d)
d = {1:2}
d[3] = 3
print(len(d), d[1], d[3])
d[1] = 0
print(len(d), d[1], d[3])
print(str(d) == '{1: 0, 3: 3}' or str(d) == '{3: 3, 1: 0}')
x = 1
while x < 100:
d[x] = x
x += 1
print(d[50])
# equality operator on dicts of different size
print({} == {1:1})
# equality operator on dicts of same size but with different keys
print({1:1} == {2:1})
# value not found
try:
{}[0]
except KeyError:
print('KeyError')
# unsupported unary op
try:
+{}
except TypeError:
print('TypeError')
# unsupported binary op
try:
{} + {}
except TypeError:
print('TypeError')
| # basic dictionary
d = {}
print(d)
d[2] = 123
print(d)
d = {1:2}
d[3] = 3
print(len(d), d[1], d[3])
d[1] = 0
print(len(d), d[1], d[3])
print(str(d) == '{1: 0, 3: 3}' or str(d) == '{3: 3, 1: 0}')
x = 1
while x < 100:
d[x] = x
x += 1
print(d[50])
# equality operator on dicts of different size
print({} == {1:1})
# equality operator on dicts of same size but with different keys
print({1:1} == {2:1})
# value not found
try:
{}[0]
except KeyError as er:
print('KeyError', er, repr(er), er.args)
# unsupported unary op
try:
+{}
except TypeError:
print('TypeError')
# unsupported binary op
try:
{} + {}
except TypeError:
print('TypeError')
| Add test to print full KeyError exc from failed dict lookup. | tests: Add test to print full KeyError exc from failed dict lookup.
| Python | mit | jmarcelino/pycom-micropython,alex-march/micropython,hiway/micropython,AriZuu/micropython,chrisdearman/micropython,kerneltask/micropython,jmarcelino/pycom-micropython,selste/micropython,tuc-osg/micropython,blazewicz/micropython,oopy/micropython,ryannathans/micropython,micropython/micropython-esp32,trezor/micropython,infinnovation/micropython,MrSurly/micropython,puuu/micropython,adafruit/micropython,torwag/micropython,pfalcon/micropython,micropython/micropython-esp32,AriZuu/micropython,ryannathans/micropython,pozetroninc/micropython,tuc-osg/micropython,pozetroninc/micropython,cwyark/micropython,dmazzella/micropython,pramasoul/micropython,tobbad/micropython,lowRISC/micropython,HenrikSolver/micropython,TDAbboud/micropython,pramasoul/micropython,infinnovation/micropython,puuu/micropython,blazewicz/micropython,SHA2017-badge/micropython-esp32,infinnovation/micropython,selste/micropython,AriZuu/micropython,adafruit/micropython,swegener/micropython,mhoffma/micropython,adafruit/circuitpython,mhoffma/micropython,oopy/micropython,deshipu/micropython,trezor/micropython,PappaPeppar/micropython,jmarcelino/pycom-micropython,pfalcon/micropython,tobbad/micropython,ryannathans/micropython,tobbad/micropython,bvernoux/micropython,chrisdearman/micropython,pozetroninc/micropython,blazewicz/micropython,HenrikSolver/micropython,hiway/micropython,torwag/micropython,ryannathans/micropython,AriZuu/micropython,henriknelson/micropython,henriknelson/micropython,mhoffma/micropython,dmazzella/micropython,PappaPeppar/micropython,Timmenem/micropython,mhoffma/micropython,blazewicz/micropython,infinnovation/micropython,oopy/micropython,tralamazza/micropython,dxxb/micropython,TDAbboud/micropython,puuu/micropython,chrisdearman/micropython,PappaPeppar/micropython,Timmenem/micropython,alex-march/micropython,pozetroninc/micropython,TDAbboud/micropython,Peetz0r/micropython-esp32,HenrikSolver/micropython,pramasoul/micropython,TDAbboud/micropython,HenrikSolver/micropython,AriZuu/micropython,oopy/micropython,alex-march/micropython,pramasoul/micropython,tobbad/micropython,alex-robbins/micropython,kerneltask/micropython,pfalcon/micropython,henriknelson/micropython,pfalcon/micropython,adafruit/circuitpython,torwag/micropython,Timmenem/micropython,cwyark/micropython,tuc-osg/micropython,tuc-osg/micropython,MrSurly/micropython,toolmacher/micropython,SHA2017-badge/micropython-esp32,henriknelson/micropython,adafruit/circuitpython,pozetroninc/micropython,micropython/micropython-esp32,alex-robbins/micropython,alex-robbins/micropython,adafruit/micropython,SHA2017-badge/micropython-esp32,dmazzella/micropython,Peetz0r/micropython-esp32,puuu/micropython,swegener/micropython,dxxb/micropython,tuc-osg/micropython,adafruit/micropython,lowRISC/micropython,MrSurly/micropython-esp32,micropython/micropython-esp32,MrSurly/micropython-esp32,hosaka/micropython,bvernoux/micropython,selste/micropython,PappaPeppar/micropython,matthewelse/micropython,matthewelse/micropython,trezor/micropython,MrSurly/micropython-esp32,hiway/micropython,SHA2017-badge/micropython-esp32,MrSurly/micropython,adafruit/circuitpython,hiway/micropython,blazewicz/micropython,kerneltask/micropython,henriknelson/micropython,Peetz0r/micropython-esp32,selste/micropython,kerneltask/micropython,MrSurly/micropython,micropython/micropython-esp32,alex-march/micropython,pfalcon/micropython,matthewelse/micropython,alex-robbins/micropython,toolmacher/micropython,puuu/micropython,toolmacher/micropython,tralamazza/micropython,torwag/micropython,hosaka/micropython,hosaka/micropython,alex-march/micropython,trezor/micropython,Timmenem/micropython,hosaka/micropython,ryannathans/micropython,swegener/micropython,jmarcelino/pycom-micropython,mhoffma/micropython,Peetz0r/micropython-esp32,dxxb/micropython,Peetz0r/micropython-esp32,swegener/micropython,toolmacher/micropython,torwag/micropython,deshipu/micropython,deshipu/micropython,adafruit/circuitpython,dxxb/micropython,lowRISC/micropython,cwyark/micropython,Timmenem/micropython,matthewelse/micropython,MrSurly/micropython-esp32,tralamazza/micropython,oopy/micropython,MrSurly/micropython,chrisdearman/micropython,dxxb/micropython,tralamazza/micropython,bvernoux/micropython,hiway/micropython,deshipu/micropython,matthewelse/micropython,toolmacher/micropython,hosaka/micropython,HenrikSolver/micropython,TDAbboud/micropython,tobbad/micropython,swegener/micropython,adafruit/circuitpython,infinnovation/micropython,cwyark/micropython,bvernoux/micropython,adafruit/micropython,trezor/micropython,MrSurly/micropython-esp32,dmazzella/micropython,lowRISC/micropython,kerneltask/micropython,SHA2017-badge/micropython-esp32,lowRISC/micropython,deshipu/micropython,chrisdearman/micropython,matthewelse/micropython,cwyark/micropython,selste/micropython,alex-robbins/micropython,PappaPeppar/micropython,jmarcelino/pycom-micropython,pramasoul/micropython,bvernoux/micropython | # basic dictionary
d = {}
print(d)
d[2] = 123
print(d)
d = {1:2}
d[3] = 3
print(len(d), d[1], d[3])
d[1] = 0
print(len(d), d[1], d[3])
print(str(d) == '{1: 0, 3: 3}' or str(d) == '{3: 3, 1: 0}')
x = 1
while x < 100:
d[x] = x
x += 1
print(d[50])
# equality operator on dicts of different size
print({} == {1:1})
# equality operator on dicts of same size but with different keys
print({1:1} == {2:1})
# value not found
try:
{}[0]
except KeyError:
print('KeyError')
# unsupported unary op
try:
+{}
except TypeError:
print('TypeError')
# unsupported binary op
try:
{} + {}
except TypeError:
print('TypeError')
tests: Add test to print full KeyError exc from failed dict lookup. | # basic dictionary
d = {}
print(d)
d[2] = 123
print(d)
d = {1:2}
d[3] = 3
print(len(d), d[1], d[3])
d[1] = 0
print(len(d), d[1], d[3])
print(str(d) == '{1: 0, 3: 3}' or str(d) == '{3: 3, 1: 0}')
x = 1
while x < 100:
d[x] = x
x += 1
print(d[50])
# equality operator on dicts of different size
print({} == {1:1})
# equality operator on dicts of same size but with different keys
print({1:1} == {2:1})
# value not found
try:
{}[0]
except KeyError as er:
print('KeyError', er, repr(er), er.args)
# unsupported unary op
try:
+{}
except TypeError:
print('TypeError')
# unsupported binary op
try:
{} + {}
except TypeError:
print('TypeError')
| <commit_before># basic dictionary
d = {}
print(d)
d[2] = 123
print(d)
d = {1:2}
d[3] = 3
print(len(d), d[1], d[3])
d[1] = 0
print(len(d), d[1], d[3])
print(str(d) == '{1: 0, 3: 3}' or str(d) == '{3: 3, 1: 0}')
x = 1
while x < 100:
d[x] = x
x += 1
print(d[50])
# equality operator on dicts of different size
print({} == {1:1})
# equality operator on dicts of same size but with different keys
print({1:1} == {2:1})
# value not found
try:
{}[0]
except KeyError:
print('KeyError')
# unsupported unary op
try:
+{}
except TypeError:
print('TypeError')
# unsupported binary op
try:
{} + {}
except TypeError:
print('TypeError')
<commit_msg>tests: Add test to print full KeyError exc from failed dict lookup.<commit_after> | # basic dictionary
d = {}
print(d)
d[2] = 123
print(d)
d = {1:2}
d[3] = 3
print(len(d), d[1], d[3])
d[1] = 0
print(len(d), d[1], d[3])
print(str(d) == '{1: 0, 3: 3}' or str(d) == '{3: 3, 1: 0}')
x = 1
while x < 100:
d[x] = x
x += 1
print(d[50])
# equality operator on dicts of different size
print({} == {1:1})
# equality operator on dicts of same size but with different keys
print({1:1} == {2:1})
# value not found
try:
{}[0]
except KeyError as er:
print('KeyError', er, repr(er), er.args)
# unsupported unary op
try:
+{}
except TypeError:
print('TypeError')
# unsupported binary op
try:
{} + {}
except TypeError:
print('TypeError')
| # basic dictionary
d = {}
print(d)
d[2] = 123
print(d)
d = {1:2}
d[3] = 3
print(len(d), d[1], d[3])
d[1] = 0
print(len(d), d[1], d[3])
print(str(d) == '{1: 0, 3: 3}' or str(d) == '{3: 3, 1: 0}')
x = 1
while x < 100:
d[x] = x
x += 1
print(d[50])
# equality operator on dicts of different size
print({} == {1:1})
# equality operator on dicts of same size but with different keys
print({1:1} == {2:1})
# value not found
try:
{}[0]
except KeyError:
print('KeyError')
# unsupported unary op
try:
+{}
except TypeError:
print('TypeError')
# unsupported binary op
try:
{} + {}
except TypeError:
print('TypeError')
tests: Add test to print full KeyError exc from failed dict lookup.# basic dictionary
d = {}
print(d)
d[2] = 123
print(d)
d = {1:2}
d[3] = 3
print(len(d), d[1], d[3])
d[1] = 0
print(len(d), d[1], d[3])
print(str(d) == '{1: 0, 3: 3}' or str(d) == '{3: 3, 1: 0}')
x = 1
while x < 100:
d[x] = x
x += 1
print(d[50])
# equality operator on dicts of different size
print({} == {1:1})
# equality operator on dicts of same size but with different keys
print({1:1} == {2:1})
# value not found
try:
{}[0]
except KeyError as er:
print('KeyError', er, repr(er), er.args)
# unsupported unary op
try:
+{}
except TypeError:
print('TypeError')
# unsupported binary op
try:
{} + {}
except TypeError:
print('TypeError')
| <commit_before># basic dictionary
d = {}
print(d)
d[2] = 123
print(d)
d = {1:2}
d[3] = 3
print(len(d), d[1], d[3])
d[1] = 0
print(len(d), d[1], d[3])
print(str(d) == '{1: 0, 3: 3}' or str(d) == '{3: 3, 1: 0}')
x = 1
while x < 100:
d[x] = x
x += 1
print(d[50])
# equality operator on dicts of different size
print({} == {1:1})
# equality operator on dicts of same size but with different keys
print({1:1} == {2:1})
# value not found
try:
{}[0]
except KeyError:
print('KeyError')
# unsupported unary op
try:
+{}
except TypeError:
print('TypeError')
# unsupported binary op
try:
{} + {}
except TypeError:
print('TypeError')
<commit_msg>tests: Add test to print full KeyError exc from failed dict lookup.<commit_after># basic dictionary
d = {}
print(d)
d[2] = 123
print(d)
d = {1:2}
d[3] = 3
print(len(d), d[1], d[3])
d[1] = 0
print(len(d), d[1], d[3])
print(str(d) == '{1: 0, 3: 3}' or str(d) == '{3: 3, 1: 0}')
x = 1
while x < 100:
d[x] = x
x += 1
print(d[50])
# equality operator on dicts of different size
print({} == {1:1})
# equality operator on dicts of same size but with different keys
print({1:1} == {2:1})
# value not found
try:
{}[0]
except KeyError as er:
print('KeyError', er, repr(er), er.args)
# unsupported unary op
try:
+{}
except TypeError:
print('TypeError')
# unsupported binary op
try:
{} + {}
except TypeError:
print('TypeError')
|
ad346d73a27c021de131ec871dc19da2e17854ee | armstrong/dev/virtualdjango/base.py | armstrong/dev/virtualdjango/base.py | import os, sys
DEFAULT_SETTINGS = {
'DATABASE_ENGINE': 'sqlite3',
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'mydatabase'
}
},
}
class VirtualDjango(object):
def __init__(self,
caller=sys.modules['__main__'],
default_settings=DEFAULT_SETTINGS):
self.caller = caller
self.default_settings = default_settings
def configure_settings(self, customizations, reset=True):
# Django expects a `DATABASE_ENGINE` value
custom_settings = self.default_settings
custom_settings.update(customizations)
settings = self.settings
if reset:
settings._wrapped = None
settings.configure(**custom_settings)
@property
def settings(self):
from django.conf import settings
return settings
@property
def call_command(self):
from django.core.management import call_command
return call_command
def run(self, my_settings):
if hasattr(self.caller, 'setUp'):
self.caller.setUp()
self.configure_settings(my_settings)
return self.call_command
| import django
import os, sys
DEFAULT_SETTINGS = {
'DATABASE_ENGINE': 'sqlite3',
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'mydatabase'
}
},
}
class VirtualDjango(object):
def __init__(self,
caller=sys.modules['__main__'],
default_settings=DEFAULT_SETTINGS):
self.caller = caller
self.default_settings = default_settings
def configure_settings(self, customizations, reset=True):
# Django expects a `DATABASE_ENGINE` value
custom_settings = self.default_settings
custom_settings.update(customizations)
settings = self.settings
if reset:
self.reset_settings(settings)
settings.configure(**custom_settings)
def reset_settings(self, settings):
if django.VERSION[:2] == (1, 3):
settings._wrapped = None
return
# This is the way to reset settings going forward
from django.utils.functional import empty
settings._wrapped = empty
@property
def settings(self):
from django.conf import settings
return settings
@property
def call_command(self):
from django.core.management import call_command
return call_command
def run(self, my_settings):
if hasattr(self.caller, 'setUp'):
self.caller.setUp()
self.configure_settings(my_settings)
return self.call_command
| Adjust the settings reset to work with Django 1.4 | Adjust the settings reset to work with Django 1.4
Django changed the `settings._wrapped` value from `None` to the special
`empty` object. This change maintains backwards compatibility for
1.3.X, while using the new method for all other versions of Django.
| Python | apache-2.0 | armstrong/armstrong.dev | import os, sys
DEFAULT_SETTINGS = {
'DATABASE_ENGINE': 'sqlite3',
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'mydatabase'
}
},
}
class VirtualDjango(object):
def __init__(self,
caller=sys.modules['__main__'],
default_settings=DEFAULT_SETTINGS):
self.caller = caller
self.default_settings = default_settings
def configure_settings(self, customizations, reset=True):
# Django expects a `DATABASE_ENGINE` value
custom_settings = self.default_settings
custom_settings.update(customizations)
settings = self.settings
if reset:
settings._wrapped = None
settings.configure(**custom_settings)
@property
def settings(self):
from django.conf import settings
return settings
@property
def call_command(self):
from django.core.management import call_command
return call_command
def run(self, my_settings):
if hasattr(self.caller, 'setUp'):
self.caller.setUp()
self.configure_settings(my_settings)
return self.call_command
Adjust the settings reset to work with Django 1.4
Django changed the `settings._wrapped` value from `None` to the special
`empty` object. This change maintains backwards compatibility for
1.3.X, while using the new method for all other versions of Django. | import django
import os, sys
DEFAULT_SETTINGS = {
'DATABASE_ENGINE': 'sqlite3',
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'mydatabase'
}
},
}
class VirtualDjango(object):
def __init__(self,
caller=sys.modules['__main__'],
default_settings=DEFAULT_SETTINGS):
self.caller = caller
self.default_settings = default_settings
def configure_settings(self, customizations, reset=True):
# Django expects a `DATABASE_ENGINE` value
custom_settings = self.default_settings
custom_settings.update(customizations)
settings = self.settings
if reset:
self.reset_settings(settings)
settings.configure(**custom_settings)
def reset_settings(self, settings):
if django.VERSION[:2] == (1, 3):
settings._wrapped = None
return
# This is the way to reset settings going forward
from django.utils.functional import empty
settings._wrapped = empty
@property
def settings(self):
from django.conf import settings
return settings
@property
def call_command(self):
from django.core.management import call_command
return call_command
def run(self, my_settings):
if hasattr(self.caller, 'setUp'):
self.caller.setUp()
self.configure_settings(my_settings)
return self.call_command
| <commit_before>import os, sys
DEFAULT_SETTINGS = {
'DATABASE_ENGINE': 'sqlite3',
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'mydatabase'
}
},
}
class VirtualDjango(object):
def __init__(self,
caller=sys.modules['__main__'],
default_settings=DEFAULT_SETTINGS):
self.caller = caller
self.default_settings = default_settings
def configure_settings(self, customizations, reset=True):
# Django expects a `DATABASE_ENGINE` value
custom_settings = self.default_settings
custom_settings.update(customizations)
settings = self.settings
if reset:
settings._wrapped = None
settings.configure(**custom_settings)
@property
def settings(self):
from django.conf import settings
return settings
@property
def call_command(self):
from django.core.management import call_command
return call_command
def run(self, my_settings):
if hasattr(self.caller, 'setUp'):
self.caller.setUp()
self.configure_settings(my_settings)
return self.call_command
<commit_msg>Adjust the settings reset to work with Django 1.4
Django changed the `settings._wrapped` value from `None` to the special
`empty` object. This change maintains backwards compatibility for
1.3.X, while using the new method for all other versions of Django.<commit_after> | import django
import os, sys
DEFAULT_SETTINGS = {
'DATABASE_ENGINE': 'sqlite3',
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'mydatabase'
}
},
}
class VirtualDjango(object):
def __init__(self,
caller=sys.modules['__main__'],
default_settings=DEFAULT_SETTINGS):
self.caller = caller
self.default_settings = default_settings
def configure_settings(self, customizations, reset=True):
# Django expects a `DATABASE_ENGINE` value
custom_settings = self.default_settings
custom_settings.update(customizations)
settings = self.settings
if reset:
self.reset_settings(settings)
settings.configure(**custom_settings)
def reset_settings(self, settings):
if django.VERSION[:2] == (1, 3):
settings._wrapped = None
return
# This is the way to reset settings going forward
from django.utils.functional import empty
settings._wrapped = empty
@property
def settings(self):
from django.conf import settings
return settings
@property
def call_command(self):
from django.core.management import call_command
return call_command
def run(self, my_settings):
if hasattr(self.caller, 'setUp'):
self.caller.setUp()
self.configure_settings(my_settings)
return self.call_command
| import os, sys
DEFAULT_SETTINGS = {
'DATABASE_ENGINE': 'sqlite3',
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'mydatabase'
}
},
}
class VirtualDjango(object):
def __init__(self,
caller=sys.modules['__main__'],
default_settings=DEFAULT_SETTINGS):
self.caller = caller
self.default_settings = default_settings
def configure_settings(self, customizations, reset=True):
# Django expects a `DATABASE_ENGINE` value
custom_settings = self.default_settings
custom_settings.update(customizations)
settings = self.settings
if reset:
settings._wrapped = None
settings.configure(**custom_settings)
@property
def settings(self):
from django.conf import settings
return settings
@property
def call_command(self):
from django.core.management import call_command
return call_command
def run(self, my_settings):
if hasattr(self.caller, 'setUp'):
self.caller.setUp()
self.configure_settings(my_settings)
return self.call_command
Adjust the settings reset to work with Django 1.4
Django changed the `settings._wrapped` value from `None` to the special
`empty` object. This change maintains backwards compatibility for
1.3.X, while using the new method for all other versions of Django.import django
import os, sys
DEFAULT_SETTINGS = {
'DATABASE_ENGINE': 'sqlite3',
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'mydatabase'
}
},
}
class VirtualDjango(object):
def __init__(self,
caller=sys.modules['__main__'],
default_settings=DEFAULT_SETTINGS):
self.caller = caller
self.default_settings = default_settings
def configure_settings(self, customizations, reset=True):
# Django expects a `DATABASE_ENGINE` value
custom_settings = self.default_settings
custom_settings.update(customizations)
settings = self.settings
if reset:
self.reset_settings(settings)
settings.configure(**custom_settings)
def reset_settings(self, settings):
if django.VERSION[:2] == (1, 3):
settings._wrapped = None
return
# This is the way to reset settings going forward
from django.utils.functional import empty
settings._wrapped = empty
@property
def settings(self):
from django.conf import settings
return settings
@property
def call_command(self):
from django.core.management import call_command
return call_command
def run(self, my_settings):
if hasattr(self.caller, 'setUp'):
self.caller.setUp()
self.configure_settings(my_settings)
return self.call_command
| <commit_before>import os, sys
DEFAULT_SETTINGS = {
'DATABASE_ENGINE': 'sqlite3',
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'mydatabase'
}
},
}
class VirtualDjango(object):
def __init__(self,
caller=sys.modules['__main__'],
default_settings=DEFAULT_SETTINGS):
self.caller = caller
self.default_settings = default_settings
def configure_settings(self, customizations, reset=True):
# Django expects a `DATABASE_ENGINE` value
custom_settings = self.default_settings
custom_settings.update(customizations)
settings = self.settings
if reset:
settings._wrapped = None
settings.configure(**custom_settings)
@property
def settings(self):
from django.conf import settings
return settings
@property
def call_command(self):
from django.core.management import call_command
return call_command
def run(self, my_settings):
if hasattr(self.caller, 'setUp'):
self.caller.setUp()
self.configure_settings(my_settings)
return self.call_command
<commit_msg>Adjust the settings reset to work with Django 1.4
Django changed the `settings._wrapped` value from `None` to the special
`empty` object. This change maintains backwards compatibility for
1.3.X, while using the new method for all other versions of Django.<commit_after>import django
import os, sys
DEFAULT_SETTINGS = {
'DATABASE_ENGINE': 'sqlite3',
'DATABASES': {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'mydatabase'
}
},
}
class VirtualDjango(object):
def __init__(self,
caller=sys.modules['__main__'],
default_settings=DEFAULT_SETTINGS):
self.caller = caller
self.default_settings = default_settings
def configure_settings(self, customizations, reset=True):
# Django expects a `DATABASE_ENGINE` value
custom_settings = self.default_settings
custom_settings.update(customizations)
settings = self.settings
if reset:
self.reset_settings(settings)
settings.configure(**custom_settings)
def reset_settings(self, settings):
if django.VERSION[:2] == (1, 3):
settings._wrapped = None
return
# This is the way to reset settings going forward
from django.utils.functional import empty
settings._wrapped = empty
@property
def settings(self):
from django.conf import settings
return settings
@property
def call_command(self):
from django.core.management import call_command
return call_command
def run(self, my_settings):
if hasattr(self.caller, 'setUp'):
self.caller.setUp()
self.configure_settings(my_settings)
return self.call_command
|
1e4bdeeb1156c4ff65d261ba261f594be57bc30f | tests/test_appinfo.py | tests/test_appinfo.py | import io
import os
import pickle
import pytest
from steamfiles import appinfo
test_file_name = os.path.join(os.path.dirname(__file__), 'test_data/appinfo.vdf')
@pytest.yield_fixture
def vdf_data():
with open(test_file_name, 'rb') as f:
yield f.read()
@pytest.mark.usefixtures('vdf_data')
def test_load_dump(vdf_data):
with open(test_file_name, 'rb') as in_file:
out_file = io.BytesIO()
obj = appinfo.load(in_file)
appinfo.dump(obj, out_file)
# Rewind to the beginning
out_file.seek(0)
assert out_file.read() == vdf_data
def test_loads_wrong_type():
with pytest.raises(TypeError):
appinfo.loads('JustTestData')
| import io
import os
import pickle
import pytest
from steamfiles import appinfo
test_file_name = os.path.join(os.path.dirname(__file__), 'test_data/appinfo.vdf')
@pytest.yield_fixture
def vdf_data():
with open(test_file_name, 'rb') as f:
yield f.read()
@pytest.mark.usefixtures('vdf_data')
def test_loads_dumps(vdf_data):
assert appinfo.dumps(appinfo.loads(vdf_data)) == vdf_data
@pytest.mark.usefixtures('vdf_data')
def test_load_dump(vdf_data):
with open(test_file_name, 'rb') as in_file:
out_file = io.BytesIO()
obj = appinfo.load(in_file)
appinfo.dump(obj, out_file)
# Rewind to the beginning
out_file.seek(0)
assert out_file.read() == vdf_data
def test_loads_wrong_type():
with pytest.raises(TypeError):
appinfo.loads('JustTestData')
| Add loads-dumps test for Appinfo | Add loads-dumps test for Appinfo
| Python | mit | leovp/steamfiles | import io
import os
import pickle
import pytest
from steamfiles import appinfo
test_file_name = os.path.join(os.path.dirname(__file__), 'test_data/appinfo.vdf')
@pytest.yield_fixture
def vdf_data():
with open(test_file_name, 'rb') as f:
yield f.read()
@pytest.mark.usefixtures('vdf_data')
def test_load_dump(vdf_data):
with open(test_file_name, 'rb') as in_file:
out_file = io.BytesIO()
obj = appinfo.load(in_file)
appinfo.dump(obj, out_file)
# Rewind to the beginning
out_file.seek(0)
assert out_file.read() == vdf_data
def test_loads_wrong_type():
with pytest.raises(TypeError):
appinfo.loads('JustTestData')
Add loads-dumps test for Appinfo | import io
import os
import pickle
import pytest
from steamfiles import appinfo
test_file_name = os.path.join(os.path.dirname(__file__), 'test_data/appinfo.vdf')
@pytest.yield_fixture
def vdf_data():
with open(test_file_name, 'rb') as f:
yield f.read()
@pytest.mark.usefixtures('vdf_data')
def test_loads_dumps(vdf_data):
assert appinfo.dumps(appinfo.loads(vdf_data)) == vdf_data
@pytest.mark.usefixtures('vdf_data')
def test_load_dump(vdf_data):
with open(test_file_name, 'rb') as in_file:
out_file = io.BytesIO()
obj = appinfo.load(in_file)
appinfo.dump(obj, out_file)
# Rewind to the beginning
out_file.seek(0)
assert out_file.read() == vdf_data
def test_loads_wrong_type():
with pytest.raises(TypeError):
appinfo.loads('JustTestData')
| <commit_before>import io
import os
import pickle
import pytest
from steamfiles import appinfo
test_file_name = os.path.join(os.path.dirname(__file__), 'test_data/appinfo.vdf')
@pytest.yield_fixture
def vdf_data():
with open(test_file_name, 'rb') as f:
yield f.read()
@pytest.mark.usefixtures('vdf_data')
def test_load_dump(vdf_data):
with open(test_file_name, 'rb') as in_file:
out_file = io.BytesIO()
obj = appinfo.load(in_file)
appinfo.dump(obj, out_file)
# Rewind to the beginning
out_file.seek(0)
assert out_file.read() == vdf_data
def test_loads_wrong_type():
with pytest.raises(TypeError):
appinfo.loads('JustTestData')
<commit_msg>Add loads-dumps test for Appinfo<commit_after> | import io
import os
import pickle
import pytest
from steamfiles import appinfo
test_file_name = os.path.join(os.path.dirname(__file__), 'test_data/appinfo.vdf')
@pytest.yield_fixture
def vdf_data():
with open(test_file_name, 'rb') as f:
yield f.read()
@pytest.mark.usefixtures('vdf_data')
def test_loads_dumps(vdf_data):
assert appinfo.dumps(appinfo.loads(vdf_data)) == vdf_data
@pytest.mark.usefixtures('vdf_data')
def test_load_dump(vdf_data):
with open(test_file_name, 'rb') as in_file:
out_file = io.BytesIO()
obj = appinfo.load(in_file)
appinfo.dump(obj, out_file)
# Rewind to the beginning
out_file.seek(0)
assert out_file.read() == vdf_data
def test_loads_wrong_type():
with pytest.raises(TypeError):
appinfo.loads('JustTestData')
| import io
import os
import pickle
import pytest
from steamfiles import appinfo
test_file_name = os.path.join(os.path.dirname(__file__), 'test_data/appinfo.vdf')
@pytest.yield_fixture
def vdf_data():
with open(test_file_name, 'rb') as f:
yield f.read()
@pytest.mark.usefixtures('vdf_data')
def test_load_dump(vdf_data):
with open(test_file_name, 'rb') as in_file:
out_file = io.BytesIO()
obj = appinfo.load(in_file)
appinfo.dump(obj, out_file)
# Rewind to the beginning
out_file.seek(0)
assert out_file.read() == vdf_data
def test_loads_wrong_type():
with pytest.raises(TypeError):
appinfo.loads('JustTestData')
Add loads-dumps test for Appinfoimport io
import os
import pickle
import pytest
from steamfiles import appinfo
test_file_name = os.path.join(os.path.dirname(__file__), 'test_data/appinfo.vdf')
@pytest.yield_fixture
def vdf_data():
with open(test_file_name, 'rb') as f:
yield f.read()
@pytest.mark.usefixtures('vdf_data')
def test_loads_dumps(vdf_data):
assert appinfo.dumps(appinfo.loads(vdf_data)) == vdf_data
@pytest.mark.usefixtures('vdf_data')
def test_load_dump(vdf_data):
with open(test_file_name, 'rb') as in_file:
out_file = io.BytesIO()
obj = appinfo.load(in_file)
appinfo.dump(obj, out_file)
# Rewind to the beginning
out_file.seek(0)
assert out_file.read() == vdf_data
def test_loads_wrong_type():
with pytest.raises(TypeError):
appinfo.loads('JustTestData')
| <commit_before>import io
import os
import pickle
import pytest
from steamfiles import appinfo
test_file_name = os.path.join(os.path.dirname(__file__), 'test_data/appinfo.vdf')
@pytest.yield_fixture
def vdf_data():
with open(test_file_name, 'rb') as f:
yield f.read()
@pytest.mark.usefixtures('vdf_data')
def test_load_dump(vdf_data):
with open(test_file_name, 'rb') as in_file:
out_file = io.BytesIO()
obj = appinfo.load(in_file)
appinfo.dump(obj, out_file)
# Rewind to the beginning
out_file.seek(0)
assert out_file.read() == vdf_data
def test_loads_wrong_type():
with pytest.raises(TypeError):
appinfo.loads('JustTestData')
<commit_msg>Add loads-dumps test for Appinfo<commit_after>import io
import os
import pickle
import pytest
from steamfiles import appinfo
test_file_name = os.path.join(os.path.dirname(__file__), 'test_data/appinfo.vdf')
@pytest.yield_fixture
def vdf_data():
with open(test_file_name, 'rb') as f:
yield f.read()
@pytest.mark.usefixtures('vdf_data')
def test_loads_dumps(vdf_data):
assert appinfo.dumps(appinfo.loads(vdf_data)) == vdf_data
@pytest.mark.usefixtures('vdf_data')
def test_load_dump(vdf_data):
with open(test_file_name, 'rb') as in_file:
out_file = io.BytesIO()
obj = appinfo.load(in_file)
appinfo.dump(obj, out_file)
# Rewind to the beginning
out_file.seek(0)
assert out_file.read() == vdf_data
def test_loads_wrong_type():
with pytest.raises(TypeError):
appinfo.loads('JustTestData')
|
331f776eef9acd0509c7534040ef225869305d7f | tests/test_cookies.py | tests/test_cookies.py | # -*- coding: utf-8 -*-
def test_cookies_fixture(testdir):
"""Make sure that pytest accepts the `cookies` fixture."""
# create a temporary pytest test module
testdir.makepyfile("""
def test_valid_fixture(cookies):
assert hasattr(cookies, 'bake')
assert callable(cookies.bake)
assert cookies.error is None
assert cookies.project is None
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'*::test_valid_fixture PASSED',
])
# make sure that that we get a '0' exit code for the testsuite
assert result.ret == 0
def test_help_message(testdir):
result = testdir.runpytest(
'--help',
)
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'cookies:',
])
| # -*- coding: utf-8 -*-
def test_cookies_fixture(testdir):
"""Make sure that pytest accepts the `cookies` fixture."""
# create a temporary pytest test module
testdir.makepyfile("""
def test_valid_fixture(cookies):
assert hasattr(cookies, 'bake')
assert callable(cookies.bake)
assert cookies.exception is None
assert cookies.exit_code == 0
assert cookies.project is None
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'*::test_valid_fixture PASSED',
])
# make sure that that we get a '0' exit code for the testsuite
assert result.ret == 0
def test_help_message(testdir):
result = testdir.runpytest(
'--help',
)
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'cookies:',
])
| Update test for cookies fixture | Update test for cookies fixture
| Python | mit | hackebrot/pytest-cookies | # -*- coding: utf-8 -*-
def test_cookies_fixture(testdir):
"""Make sure that pytest accepts the `cookies` fixture."""
# create a temporary pytest test module
testdir.makepyfile("""
def test_valid_fixture(cookies):
assert hasattr(cookies, 'bake')
assert callable(cookies.bake)
assert cookies.error is None
assert cookies.project is None
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'*::test_valid_fixture PASSED',
])
# make sure that that we get a '0' exit code for the testsuite
assert result.ret == 0
def test_help_message(testdir):
result = testdir.runpytest(
'--help',
)
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'cookies:',
])
Update test for cookies fixture | # -*- coding: utf-8 -*-
def test_cookies_fixture(testdir):
"""Make sure that pytest accepts the `cookies` fixture."""
# create a temporary pytest test module
testdir.makepyfile("""
def test_valid_fixture(cookies):
assert hasattr(cookies, 'bake')
assert callable(cookies.bake)
assert cookies.exception is None
assert cookies.exit_code == 0
assert cookies.project is None
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'*::test_valid_fixture PASSED',
])
# make sure that that we get a '0' exit code for the testsuite
assert result.ret == 0
def test_help_message(testdir):
result = testdir.runpytest(
'--help',
)
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'cookies:',
])
| <commit_before># -*- coding: utf-8 -*-
def test_cookies_fixture(testdir):
"""Make sure that pytest accepts the `cookies` fixture."""
# create a temporary pytest test module
testdir.makepyfile("""
def test_valid_fixture(cookies):
assert hasattr(cookies, 'bake')
assert callable(cookies.bake)
assert cookies.error is None
assert cookies.project is None
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'*::test_valid_fixture PASSED',
])
# make sure that that we get a '0' exit code for the testsuite
assert result.ret == 0
def test_help_message(testdir):
result = testdir.runpytest(
'--help',
)
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'cookies:',
])
<commit_msg>Update test for cookies fixture<commit_after> | # -*- coding: utf-8 -*-
def test_cookies_fixture(testdir):
"""Make sure that pytest accepts the `cookies` fixture."""
# create a temporary pytest test module
testdir.makepyfile("""
def test_valid_fixture(cookies):
assert hasattr(cookies, 'bake')
assert callable(cookies.bake)
assert cookies.exception is None
assert cookies.exit_code == 0
assert cookies.project is None
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'*::test_valid_fixture PASSED',
])
# make sure that that we get a '0' exit code for the testsuite
assert result.ret == 0
def test_help_message(testdir):
result = testdir.runpytest(
'--help',
)
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'cookies:',
])
| # -*- coding: utf-8 -*-
def test_cookies_fixture(testdir):
"""Make sure that pytest accepts the `cookies` fixture."""
# create a temporary pytest test module
testdir.makepyfile("""
def test_valid_fixture(cookies):
assert hasattr(cookies, 'bake')
assert callable(cookies.bake)
assert cookies.error is None
assert cookies.project is None
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'*::test_valid_fixture PASSED',
])
# make sure that that we get a '0' exit code for the testsuite
assert result.ret == 0
def test_help_message(testdir):
result = testdir.runpytest(
'--help',
)
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'cookies:',
])
Update test for cookies fixture# -*- coding: utf-8 -*-
def test_cookies_fixture(testdir):
"""Make sure that pytest accepts the `cookies` fixture."""
# create a temporary pytest test module
testdir.makepyfile("""
def test_valid_fixture(cookies):
assert hasattr(cookies, 'bake')
assert callable(cookies.bake)
assert cookies.exception is None
assert cookies.exit_code == 0
assert cookies.project is None
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'*::test_valid_fixture PASSED',
])
# make sure that that we get a '0' exit code for the testsuite
assert result.ret == 0
def test_help_message(testdir):
result = testdir.runpytest(
'--help',
)
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'cookies:',
])
| <commit_before># -*- coding: utf-8 -*-
def test_cookies_fixture(testdir):
"""Make sure that pytest accepts the `cookies` fixture."""
# create a temporary pytest test module
testdir.makepyfile("""
def test_valid_fixture(cookies):
assert hasattr(cookies, 'bake')
assert callable(cookies.bake)
assert cookies.error is None
assert cookies.project is None
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'*::test_valid_fixture PASSED',
])
# make sure that that we get a '0' exit code for the testsuite
assert result.ret == 0
def test_help_message(testdir):
result = testdir.runpytest(
'--help',
)
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'cookies:',
])
<commit_msg>Update test for cookies fixture<commit_after># -*- coding: utf-8 -*-
def test_cookies_fixture(testdir):
"""Make sure that pytest accepts the `cookies` fixture."""
# create a temporary pytest test module
testdir.makepyfile("""
def test_valid_fixture(cookies):
assert hasattr(cookies, 'bake')
assert callable(cookies.bake)
assert cookies.exception is None
assert cookies.exit_code == 0
assert cookies.project is None
""")
# run pytest with the following cmd args
result = testdir.runpytest('-v')
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'*::test_valid_fixture PASSED',
])
# make sure that that we get a '0' exit code for the testsuite
assert result.ret == 0
def test_help_message(testdir):
result = testdir.runpytest(
'--help',
)
# fnmatch_lines does an assertion internally
result.stdout.fnmatch_lines([
'cookies:',
])
|
e8bb81a8be7c76c2e1839d8315bd29f381fea4ae | enable/__init__.py | enable/__init__.py | # Copyright (c) 2007-2013 by Enthought, Inc.
# All rights reserved.
""" A multi-platform object drawing library.
Part of the Enable project of the Enthought Tool Suite.
"""
__version__ = '4.3.0'
__requires__ = [
'traitsui',
'PIL',
]
| # Copyright (c) 2007-2013 by Enthought, Inc.
# All rights reserved.
""" A multi-platform object drawing library.
Part of the Enable project of the Enthought Tool Suite.
"""
__version__ = '4.3.0'
__requires__ = [
'traitsui',
'PIL',
'casuarius',
]
| Add casuarius to the list of required packages. | Add casuarius to the list of required packages.
| Python | bsd-3-clause | tommy-u/enable,tommy-u/enable,tommy-u/enable,tommy-u/enable | # Copyright (c) 2007-2013 by Enthought, Inc.
# All rights reserved.
""" A multi-platform object drawing library.
Part of the Enable project of the Enthought Tool Suite.
"""
__version__ = '4.3.0'
__requires__ = [
'traitsui',
'PIL',
]
Add casuarius to the list of required packages. | # Copyright (c) 2007-2013 by Enthought, Inc.
# All rights reserved.
""" A multi-platform object drawing library.
Part of the Enable project of the Enthought Tool Suite.
"""
__version__ = '4.3.0'
__requires__ = [
'traitsui',
'PIL',
'casuarius',
]
| <commit_before># Copyright (c) 2007-2013 by Enthought, Inc.
# All rights reserved.
""" A multi-platform object drawing library.
Part of the Enable project of the Enthought Tool Suite.
"""
__version__ = '4.3.0'
__requires__ = [
'traitsui',
'PIL',
]
<commit_msg>Add casuarius to the list of required packages.<commit_after> | # Copyright (c) 2007-2013 by Enthought, Inc.
# All rights reserved.
""" A multi-platform object drawing library.
Part of the Enable project of the Enthought Tool Suite.
"""
__version__ = '4.3.0'
__requires__ = [
'traitsui',
'PIL',
'casuarius',
]
| # Copyright (c) 2007-2013 by Enthought, Inc.
# All rights reserved.
""" A multi-platform object drawing library.
Part of the Enable project of the Enthought Tool Suite.
"""
__version__ = '4.3.0'
__requires__ = [
'traitsui',
'PIL',
]
Add casuarius to the list of required packages.# Copyright (c) 2007-2013 by Enthought, Inc.
# All rights reserved.
""" A multi-platform object drawing library.
Part of the Enable project of the Enthought Tool Suite.
"""
__version__ = '4.3.0'
__requires__ = [
'traitsui',
'PIL',
'casuarius',
]
| <commit_before># Copyright (c) 2007-2013 by Enthought, Inc.
# All rights reserved.
""" A multi-platform object drawing library.
Part of the Enable project of the Enthought Tool Suite.
"""
__version__ = '4.3.0'
__requires__ = [
'traitsui',
'PIL',
]
<commit_msg>Add casuarius to the list of required packages.<commit_after># Copyright (c) 2007-2013 by Enthought, Inc.
# All rights reserved.
""" A multi-platform object drawing library.
Part of the Enable project of the Enthought Tool Suite.
"""
__version__ = '4.3.0'
__requires__ = [
'traitsui',
'PIL',
'casuarius',
]
|
072497f0032bae261e78e27daf6c8e6caeea68c1 | ANN.py | ANN.py | import tensorflow as tf
class ANN:
def __init__(self):
self.inputNodes = 7
self.hiddenNodes = 64
self.hiddenNodes2 = 32 # weight3
self.outputNodes = 1
self.x = tf.placeholder("float", shape=[None, self.inputNodes], name="sensor-input")
self.W1 = tf.placeholder("float", shape=[self.inputNodes, self.hiddenNodes])
self.W2 = tf.placeholder("float", shape=[self.hiddenNodes, self.hiddenNodes2]) # weight3
self.W3 = tf.placeholder("float", shape=[self.hiddenNodes2, self.outputNodes]) # weight3
self.y = tf.tanh(
tf.matmul(tf.tanh(tf.matmul(tf.tanh(tf.matmul(self.x, self.W1)), self.W2)), self.W3)) # weight3
self.session = tf.Session()
init = tf.initialize_all_variables()
self.session.run(init)
def propagate_forward(self, car, sensor_data):
return self.session.run(self.y, feed_dict={self.x: sensor_data, self.W1: car.W1,
self.W2: car.W2, self.W3: car.W3}) # weight3
| import tensorflow as tf
class ANN:
def __init__(self):
self.inputNodes = 7
self.hiddenNodes = 64
self.hiddenNodes2 = 64 # weight3
self.outputNodes = 1
self.x = tf.placeholder("float", shape=[None, self.inputNodes], name="sensor-input")
self.W1 = tf.placeholder("float", shape=[self.inputNodes, self.hiddenNodes])
self.W2 = tf.placeholder("float", shape=[self.hiddenNodes, self.hiddenNodes2]) # weight3
self.W3 = tf.placeholder("float", shape=[self.hiddenNodes2, self.outputNodes]) # weight3
self.y = tf.tanh(
tf.matmul(tf.tanh(tf.matmul(tf.tanh(tf.matmul(self.x, self.W1)), self.W2)), self.W3)) # weight3
self.session = tf.Session()
init = tf.initialize_all_variables()
self.session.run(init)
def propagate_forward(self, car, sensor_data):
return self.session.run(self.y, feed_dict={self.x: sensor_data, self.W1: car.W1,
self.W2: car.W2, self.W3: car.W3}) # weight3
| Change 2nd hidden layer size | Change 2nd hidden layer size
| Python | mit | DiamondOperators/selfdriving-car | import tensorflow as tf
class ANN:
def __init__(self):
self.inputNodes = 7
self.hiddenNodes = 64
self.hiddenNodes2 = 32 # weight3
self.outputNodes = 1
self.x = tf.placeholder("float", shape=[None, self.inputNodes], name="sensor-input")
self.W1 = tf.placeholder("float", shape=[self.inputNodes, self.hiddenNodes])
self.W2 = tf.placeholder("float", shape=[self.hiddenNodes, self.hiddenNodes2]) # weight3
self.W3 = tf.placeholder("float", shape=[self.hiddenNodes2, self.outputNodes]) # weight3
self.y = tf.tanh(
tf.matmul(tf.tanh(tf.matmul(tf.tanh(tf.matmul(self.x, self.W1)), self.W2)), self.W3)) # weight3
self.session = tf.Session()
init = tf.initialize_all_variables()
self.session.run(init)
def propagate_forward(self, car, sensor_data):
return self.session.run(self.y, feed_dict={self.x: sensor_data, self.W1: car.W1,
self.W2: car.W2, self.W3: car.W3}) # weight3
Change 2nd hidden layer size | import tensorflow as tf
class ANN:
def __init__(self):
self.inputNodes = 7
self.hiddenNodes = 64
self.hiddenNodes2 = 64 # weight3
self.outputNodes = 1
self.x = tf.placeholder("float", shape=[None, self.inputNodes], name="sensor-input")
self.W1 = tf.placeholder("float", shape=[self.inputNodes, self.hiddenNodes])
self.W2 = tf.placeholder("float", shape=[self.hiddenNodes, self.hiddenNodes2]) # weight3
self.W3 = tf.placeholder("float", shape=[self.hiddenNodes2, self.outputNodes]) # weight3
self.y = tf.tanh(
tf.matmul(tf.tanh(tf.matmul(tf.tanh(tf.matmul(self.x, self.W1)), self.W2)), self.W3)) # weight3
self.session = tf.Session()
init = tf.initialize_all_variables()
self.session.run(init)
def propagate_forward(self, car, sensor_data):
return self.session.run(self.y, feed_dict={self.x: sensor_data, self.W1: car.W1,
self.W2: car.W2, self.W3: car.W3}) # weight3
| <commit_before>import tensorflow as tf
class ANN:
def __init__(self):
self.inputNodes = 7
self.hiddenNodes = 64
self.hiddenNodes2 = 32 # weight3
self.outputNodes = 1
self.x = tf.placeholder("float", shape=[None, self.inputNodes], name="sensor-input")
self.W1 = tf.placeholder("float", shape=[self.inputNodes, self.hiddenNodes])
self.W2 = tf.placeholder("float", shape=[self.hiddenNodes, self.hiddenNodes2]) # weight3
self.W3 = tf.placeholder("float", shape=[self.hiddenNodes2, self.outputNodes]) # weight3
self.y = tf.tanh(
tf.matmul(tf.tanh(tf.matmul(tf.tanh(tf.matmul(self.x, self.W1)), self.W2)), self.W3)) # weight3
self.session = tf.Session()
init = tf.initialize_all_variables()
self.session.run(init)
def propagate_forward(self, car, sensor_data):
return self.session.run(self.y, feed_dict={self.x: sensor_data, self.W1: car.W1,
self.W2: car.W2, self.W3: car.W3}) # weight3
<commit_msg>Change 2nd hidden layer size<commit_after> | import tensorflow as tf
class ANN:
def __init__(self):
self.inputNodes = 7
self.hiddenNodes = 64
self.hiddenNodes2 = 64 # weight3
self.outputNodes = 1
self.x = tf.placeholder("float", shape=[None, self.inputNodes], name="sensor-input")
self.W1 = tf.placeholder("float", shape=[self.inputNodes, self.hiddenNodes])
self.W2 = tf.placeholder("float", shape=[self.hiddenNodes, self.hiddenNodes2]) # weight3
self.W3 = tf.placeholder("float", shape=[self.hiddenNodes2, self.outputNodes]) # weight3
self.y = tf.tanh(
tf.matmul(tf.tanh(tf.matmul(tf.tanh(tf.matmul(self.x, self.W1)), self.W2)), self.W3)) # weight3
self.session = tf.Session()
init = tf.initialize_all_variables()
self.session.run(init)
def propagate_forward(self, car, sensor_data):
return self.session.run(self.y, feed_dict={self.x: sensor_data, self.W1: car.W1,
self.W2: car.W2, self.W3: car.W3}) # weight3
| import tensorflow as tf
class ANN:
def __init__(self):
self.inputNodes = 7
self.hiddenNodes = 64
self.hiddenNodes2 = 32 # weight3
self.outputNodes = 1
self.x = tf.placeholder("float", shape=[None, self.inputNodes], name="sensor-input")
self.W1 = tf.placeholder("float", shape=[self.inputNodes, self.hiddenNodes])
self.W2 = tf.placeholder("float", shape=[self.hiddenNodes, self.hiddenNodes2]) # weight3
self.W3 = tf.placeholder("float", shape=[self.hiddenNodes2, self.outputNodes]) # weight3
self.y = tf.tanh(
tf.matmul(tf.tanh(tf.matmul(tf.tanh(tf.matmul(self.x, self.W1)), self.W2)), self.W3)) # weight3
self.session = tf.Session()
init = tf.initialize_all_variables()
self.session.run(init)
def propagate_forward(self, car, sensor_data):
return self.session.run(self.y, feed_dict={self.x: sensor_data, self.W1: car.W1,
self.W2: car.W2, self.W3: car.W3}) # weight3
Change 2nd hidden layer sizeimport tensorflow as tf
class ANN:
def __init__(self):
self.inputNodes = 7
self.hiddenNodes = 64
self.hiddenNodes2 = 64 # weight3
self.outputNodes = 1
self.x = tf.placeholder("float", shape=[None, self.inputNodes], name="sensor-input")
self.W1 = tf.placeholder("float", shape=[self.inputNodes, self.hiddenNodes])
self.W2 = tf.placeholder("float", shape=[self.hiddenNodes, self.hiddenNodes2]) # weight3
self.W3 = tf.placeholder("float", shape=[self.hiddenNodes2, self.outputNodes]) # weight3
self.y = tf.tanh(
tf.matmul(tf.tanh(tf.matmul(tf.tanh(tf.matmul(self.x, self.W1)), self.W2)), self.W3)) # weight3
self.session = tf.Session()
init = tf.initialize_all_variables()
self.session.run(init)
def propagate_forward(self, car, sensor_data):
return self.session.run(self.y, feed_dict={self.x: sensor_data, self.W1: car.W1,
self.W2: car.W2, self.W3: car.W3}) # weight3
| <commit_before>import tensorflow as tf
class ANN:
def __init__(self):
self.inputNodes = 7
self.hiddenNodes = 64
self.hiddenNodes2 = 32 # weight3
self.outputNodes = 1
self.x = tf.placeholder("float", shape=[None, self.inputNodes], name="sensor-input")
self.W1 = tf.placeholder("float", shape=[self.inputNodes, self.hiddenNodes])
self.W2 = tf.placeholder("float", shape=[self.hiddenNodes, self.hiddenNodes2]) # weight3
self.W3 = tf.placeholder("float", shape=[self.hiddenNodes2, self.outputNodes]) # weight3
self.y = tf.tanh(
tf.matmul(tf.tanh(tf.matmul(tf.tanh(tf.matmul(self.x, self.W1)), self.W2)), self.W3)) # weight3
self.session = tf.Session()
init = tf.initialize_all_variables()
self.session.run(init)
def propagate_forward(self, car, sensor_data):
return self.session.run(self.y, feed_dict={self.x: sensor_data, self.W1: car.W1,
self.W2: car.W2, self.W3: car.W3}) # weight3
<commit_msg>Change 2nd hidden layer size<commit_after>import tensorflow as tf
class ANN:
def __init__(self):
self.inputNodes = 7
self.hiddenNodes = 64
self.hiddenNodes2 = 64 # weight3
self.outputNodes = 1
self.x = tf.placeholder("float", shape=[None, self.inputNodes], name="sensor-input")
self.W1 = tf.placeholder("float", shape=[self.inputNodes, self.hiddenNodes])
self.W2 = tf.placeholder("float", shape=[self.hiddenNodes, self.hiddenNodes2]) # weight3
self.W3 = tf.placeholder("float", shape=[self.hiddenNodes2, self.outputNodes]) # weight3
self.y = tf.tanh(
tf.matmul(tf.tanh(tf.matmul(tf.tanh(tf.matmul(self.x, self.W1)), self.W2)), self.W3)) # weight3
self.session = tf.Session()
init = tf.initialize_all_variables()
self.session.run(init)
def propagate_forward(self, car, sensor_data):
return self.session.run(self.y, feed_dict={self.x: sensor_data, self.W1: car.W1,
self.W2: car.W2, self.W3: car.W3}) # weight3
|
f9ba5e64f73c3fa3fed62655c846fb4435d627cc | node/multi_var.py | node/multi_var.py |
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
self.args = max([node_1.args, node_2.args])
def prepare(self, stack):
if len(stack) == 0:
self.add_arg(stack)
@Node.is_func
def apply(self, *stack):
self.node_2.prepare(stack)
rtn = self.node_2(stack[:self.node_2.args])
self.node_1.prepare(stack)
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
|
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
def prepare(self, stack):
self.node_1.prepare(stack)
self.node_2.prepare(stack)
self.args = max([self.node_1.args,self.node_2.args])
@Node.is_func
def apply(self, *stack):
rtn = self.node_2(stack[:self.node_2.args])
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
| Fix multivar for nodes with variable lenght stacks | Fix multivar for nodes with variable lenght stacks
| Python | mit | muddyfish/PYKE,muddyfish/PYKE |
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
self.args = max([node_1.args, node_2.args])
def prepare(self, stack):
if len(stack) == 0:
self.add_arg(stack)
@Node.is_func
def apply(self, *stack):
self.node_2.prepare(stack)
rtn = self.node_2(stack[:self.node_2.args])
self.node_1.prepare(stack)
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
Fix multivar for nodes with variable lenght stacks |
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
def prepare(self, stack):
self.node_1.prepare(stack)
self.node_2.prepare(stack)
self.args = max([self.node_1.args,self.node_2.args])
@Node.is_func
def apply(self, *stack):
rtn = self.node_2(stack[:self.node_2.args])
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
| <commit_before>
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
self.args = max([node_1.args, node_2.args])
def prepare(self, stack):
if len(stack) == 0:
self.add_arg(stack)
@Node.is_func
def apply(self, *stack):
self.node_2.prepare(stack)
rtn = self.node_2(stack[:self.node_2.args])
self.node_1.prepare(stack)
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
<commit_msg>Fix multivar for nodes with variable lenght stacks<commit_after> |
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
def prepare(self, stack):
self.node_1.prepare(stack)
self.node_2.prepare(stack)
self.args = max([self.node_1.args,self.node_2.args])
@Node.is_func
def apply(self, *stack):
rtn = self.node_2(stack[:self.node_2.args])
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
|
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
self.args = max([node_1.args, node_2.args])
def prepare(self, stack):
if len(stack) == 0:
self.add_arg(stack)
@Node.is_func
def apply(self, *stack):
self.node_2.prepare(stack)
rtn = self.node_2(stack[:self.node_2.args])
self.node_1.prepare(stack)
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
Fix multivar for nodes with variable lenght stacks
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
def prepare(self, stack):
self.node_1.prepare(stack)
self.node_2.prepare(stack)
self.args = max([self.node_1.args,self.node_2.args])
@Node.is_func
def apply(self, *stack):
rtn = self.node_2(stack[:self.node_2.args])
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
| <commit_before>
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
self.args = max([node_1.args, node_2.args])
def prepare(self, stack):
if len(stack) == 0:
self.add_arg(stack)
@Node.is_func
def apply(self, *stack):
self.node_2.prepare(stack)
rtn = self.node_2(stack[:self.node_2.args])
self.node_1.prepare(stack)
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
<commit_msg>Fix multivar for nodes with variable lenght stacks<commit_after>
from nodes import Node
class MultiVar(Node):
char = "'"
args = 0
results = None
contents = -1
def __init__(self, node_1: Node.NodeSingle, node_2: Node.NodeSingle):
self.node_1 = node_1
self.node_2 = node_2
def prepare(self, stack):
self.node_1.prepare(stack)
self.node_2.prepare(stack)
self.args = max([self.node_1.args,self.node_2.args])
@Node.is_func
def apply(self, *stack):
rtn = self.node_2(stack[:self.node_2.args])
rtn.extend(self.node_1(stack[:self.node_1.args]))
return rtn
|
253a375af742699ec4a13e5d8b33e669691ed872 | mysite/urls.py | mysite/urls.py | from __future__ import print_function
from cms.sitemaps import CMSSitemap
from django.conf.urls import * # NOQA
from django.conf.urls.i18n import i18n_patterns
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)), # NOQA
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap',
{'sitemaps': {'cmspages': CMSSitemap}}),
url(r'^lab_members/', include('lab_members.urls', namespace='lab_members')),
url(r'^', include('cms.urls')),
)
# This is only needed when using runserver.
if settings.DEBUG:
urlpatterns = patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', # NOQA
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
) + staticfiles_urlpatterns() + urlpatterns # NOQA
| from __future__ import print_function
from cms.sitemaps import CMSSitemap
from django.conf.urls import * # NOQA
from django.conf.urls.i18n import i18n_patterns
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)), # NOQA
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap',
{'sitemaps': {'cmspages': CMSSitemap}}),
url(r'^', include('cms.urls')),
)
# This is only needed when using runserver.
if settings.DEBUG:
urlpatterns = patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', # NOQA
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
) + staticfiles_urlpatterns() + urlpatterns # NOQA
| Remove lab_members/ from project URLs | Remove lab_members/ from project URLs
| Python | bsd-3-clause | mfcovington/djangocms-lab-members,mfcovington/djangocms-lab-members | from __future__ import print_function
from cms.sitemaps import CMSSitemap
from django.conf.urls import * # NOQA
from django.conf.urls.i18n import i18n_patterns
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)), # NOQA
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap',
{'sitemaps': {'cmspages': CMSSitemap}}),
url(r'^lab_members/', include('lab_members.urls', namespace='lab_members')),
url(r'^', include('cms.urls')),
)
# This is only needed when using runserver.
if settings.DEBUG:
urlpatterns = patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', # NOQA
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
) + staticfiles_urlpatterns() + urlpatterns # NOQA
Remove lab_members/ from project URLs | from __future__ import print_function
from cms.sitemaps import CMSSitemap
from django.conf.urls import * # NOQA
from django.conf.urls.i18n import i18n_patterns
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)), # NOQA
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap',
{'sitemaps': {'cmspages': CMSSitemap}}),
url(r'^', include('cms.urls')),
)
# This is only needed when using runserver.
if settings.DEBUG:
urlpatterns = patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', # NOQA
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
) + staticfiles_urlpatterns() + urlpatterns # NOQA
| <commit_before>from __future__ import print_function
from cms.sitemaps import CMSSitemap
from django.conf.urls import * # NOQA
from django.conf.urls.i18n import i18n_patterns
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)), # NOQA
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap',
{'sitemaps': {'cmspages': CMSSitemap}}),
url(r'^lab_members/', include('lab_members.urls', namespace='lab_members')),
url(r'^', include('cms.urls')),
)
# This is only needed when using runserver.
if settings.DEBUG:
urlpatterns = patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', # NOQA
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
) + staticfiles_urlpatterns() + urlpatterns # NOQA
<commit_msg>Remove lab_members/ from project URLs<commit_after> | from __future__ import print_function
from cms.sitemaps import CMSSitemap
from django.conf.urls import * # NOQA
from django.conf.urls.i18n import i18n_patterns
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)), # NOQA
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap',
{'sitemaps': {'cmspages': CMSSitemap}}),
url(r'^', include('cms.urls')),
)
# This is only needed when using runserver.
if settings.DEBUG:
urlpatterns = patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', # NOQA
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
) + staticfiles_urlpatterns() + urlpatterns # NOQA
| from __future__ import print_function
from cms.sitemaps import CMSSitemap
from django.conf.urls import * # NOQA
from django.conf.urls.i18n import i18n_patterns
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)), # NOQA
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap',
{'sitemaps': {'cmspages': CMSSitemap}}),
url(r'^lab_members/', include('lab_members.urls', namespace='lab_members')),
url(r'^', include('cms.urls')),
)
# This is only needed when using runserver.
if settings.DEBUG:
urlpatterns = patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', # NOQA
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
) + staticfiles_urlpatterns() + urlpatterns # NOQA
Remove lab_members/ from project URLsfrom __future__ import print_function
from cms.sitemaps import CMSSitemap
from django.conf.urls import * # NOQA
from django.conf.urls.i18n import i18n_patterns
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)), # NOQA
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap',
{'sitemaps': {'cmspages': CMSSitemap}}),
url(r'^', include('cms.urls')),
)
# This is only needed when using runserver.
if settings.DEBUG:
urlpatterns = patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', # NOQA
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
) + staticfiles_urlpatterns() + urlpatterns # NOQA
| <commit_before>from __future__ import print_function
from cms.sitemaps import CMSSitemap
from django.conf.urls import * # NOQA
from django.conf.urls.i18n import i18n_patterns
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)), # NOQA
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap',
{'sitemaps': {'cmspages': CMSSitemap}}),
url(r'^lab_members/', include('lab_members.urls', namespace='lab_members')),
url(r'^', include('cms.urls')),
)
# This is only needed when using runserver.
if settings.DEBUG:
urlpatterns = patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', # NOQA
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
) + staticfiles_urlpatterns() + urlpatterns # NOQA
<commit_msg>Remove lab_members/ from project URLs<commit_after>from __future__ import print_function
from cms.sitemaps import CMSSitemap
from django.conf.urls import * # NOQA
from django.conf.urls.i18n import i18n_patterns
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
from django.contrib import admin
from django.conf import settings
admin.autodiscover()
urlpatterns = i18n_patterns('',
url(r'^admin/', include(admin.site.urls)), # NOQA
url(r'^sitemap\.xml$', 'django.contrib.sitemaps.views.sitemap',
{'sitemaps': {'cmspages': CMSSitemap}}),
url(r'^', include('cms.urls')),
)
# This is only needed when using runserver.
if settings.DEBUG:
urlpatterns = patterns('',
url(r'^media/(?P<path>.*)$', 'django.views.static.serve', # NOQA
{'document_root': settings.MEDIA_ROOT, 'show_indexes': True}),
) + staticfiles_urlpatterns() + urlpatterns # NOQA
|
e178cfd7b6abba804d8bb856a0c35a6f87209771 | opentreemap/treemap/tests/ui/ui_test_urls.py | opentreemap/treemap/tests/ui/ui_test_urls.py | from django.conf.urls import patterns, include, url
from django.http import HttpResponse
from opentreemap import urls
testing_id = 1
def full_utf8_grid(request):
"""
Creates a big utf8 grid where every entry is 'turned on'
to point to whatever plot id is the currently assigned value
of testing_id
this is useful for mocking a tiler utf8 grid response so
that ui tests can click the map and simulate clicking a
rendered tree tile.
"""
global testing_id
quoted_space_line = '"' + (' ' * 64) + '"'
quoted_space_line_with_comma = quoted_space_line + ','
full_space_utf8_grid = ('{"grid":[' +
(quoted_space_line_with_comma * 63) +
quoted_space_line +
'],"keys":["1"],"data":{"1":{"the_plot_id":%s}}}'
% testing_id)
response = HttpResponse(full_space_utf8_grid)
response['Content-Type'] = 'application/json'
return response
urlpatterns = patterns(
'',
url(r'^tile/.*', full_utf8_grid),
url(r'', include(urls))
)
| from django.conf.urls import patterns, include, url
from django.http import HttpResponse
from opentreemap import urls
testing_id = 1
def full_utf8_grid(request):
"""
Creates a big utf8 grid where every entry is 'turned on'
to point to whatever plot id is the currently assigned value
of testing_id
this is useful for mocking a tiler utf8 grid response so
that ui tests can click the map and simulate clicking a
rendered tree tile.
"""
global testing_id
quoted_space_line = '"' + (' ' * 64) + '"'
quoted_space_line_with_comma = quoted_space_line + ','
full_space_utf8_grid = ('{"grid":[' +
(quoted_space_line_with_comma * 63) +
quoted_space_line +
'],"keys":["1"],"data":{"1":{"id":%s}}}'
% testing_id)
response = HttpResponse(full_space_utf8_grid)
response['Content-Type'] = 'application/json'
return response
urlpatterns = patterns(
'',
url(r'^tile/.*', full_utf8_grid),
url(r'', include(urls))
)
| Use "id" instead of "plot_id" | Use "id" instead of "plot_id"
For the fake utf grid
| Python | agpl-3.0 | clever-crow-consulting/otm-core,recklessromeo/otm-core,RickMohr/otm-core,clever-crow-consulting/otm-core,RickMohr/otm-core,recklessromeo/otm-core,recklessromeo/otm-core,recklessromeo/otm-core,maurizi/otm-core,RickMohr/otm-core,RickMohr/otm-core,maurizi/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core,maurizi/otm-core,clever-crow-consulting/otm-core | from django.conf.urls import patterns, include, url
from django.http import HttpResponse
from opentreemap import urls
testing_id = 1
def full_utf8_grid(request):
"""
Creates a big utf8 grid where every entry is 'turned on'
to point to whatever plot id is the currently assigned value
of testing_id
this is useful for mocking a tiler utf8 grid response so
that ui tests can click the map and simulate clicking a
rendered tree tile.
"""
global testing_id
quoted_space_line = '"' + (' ' * 64) + '"'
quoted_space_line_with_comma = quoted_space_line + ','
full_space_utf8_grid = ('{"grid":[' +
(quoted_space_line_with_comma * 63) +
quoted_space_line +
'],"keys":["1"],"data":{"1":{"the_plot_id":%s}}}'
% testing_id)
response = HttpResponse(full_space_utf8_grid)
response['Content-Type'] = 'application/json'
return response
urlpatterns = patterns(
'',
url(r'^tile/.*', full_utf8_grid),
url(r'', include(urls))
)
Use "id" instead of "plot_id"
For the fake utf grid | from django.conf.urls import patterns, include, url
from django.http import HttpResponse
from opentreemap import urls
testing_id = 1
def full_utf8_grid(request):
"""
Creates a big utf8 grid where every entry is 'turned on'
to point to whatever plot id is the currently assigned value
of testing_id
this is useful for mocking a tiler utf8 grid response so
that ui tests can click the map and simulate clicking a
rendered tree tile.
"""
global testing_id
quoted_space_line = '"' + (' ' * 64) + '"'
quoted_space_line_with_comma = quoted_space_line + ','
full_space_utf8_grid = ('{"grid":[' +
(quoted_space_line_with_comma * 63) +
quoted_space_line +
'],"keys":["1"],"data":{"1":{"id":%s}}}'
% testing_id)
response = HttpResponse(full_space_utf8_grid)
response['Content-Type'] = 'application/json'
return response
urlpatterns = patterns(
'',
url(r'^tile/.*', full_utf8_grid),
url(r'', include(urls))
)
| <commit_before>from django.conf.urls import patterns, include, url
from django.http import HttpResponse
from opentreemap import urls
testing_id = 1
def full_utf8_grid(request):
"""
Creates a big utf8 grid where every entry is 'turned on'
to point to whatever plot id is the currently assigned value
of testing_id
this is useful for mocking a tiler utf8 grid response so
that ui tests can click the map and simulate clicking a
rendered tree tile.
"""
global testing_id
quoted_space_line = '"' + (' ' * 64) + '"'
quoted_space_line_with_comma = quoted_space_line + ','
full_space_utf8_grid = ('{"grid":[' +
(quoted_space_line_with_comma * 63) +
quoted_space_line +
'],"keys":["1"],"data":{"1":{"the_plot_id":%s}}}'
% testing_id)
response = HttpResponse(full_space_utf8_grid)
response['Content-Type'] = 'application/json'
return response
urlpatterns = patterns(
'',
url(r'^tile/.*', full_utf8_grid),
url(r'', include(urls))
)
<commit_msg>Use "id" instead of "plot_id"
For the fake utf grid<commit_after> | from django.conf.urls import patterns, include, url
from django.http import HttpResponse
from opentreemap import urls
testing_id = 1
def full_utf8_grid(request):
"""
Creates a big utf8 grid where every entry is 'turned on'
to point to whatever plot id is the currently assigned value
of testing_id
this is useful for mocking a tiler utf8 grid response so
that ui tests can click the map and simulate clicking a
rendered tree tile.
"""
global testing_id
quoted_space_line = '"' + (' ' * 64) + '"'
quoted_space_line_with_comma = quoted_space_line + ','
full_space_utf8_grid = ('{"grid":[' +
(quoted_space_line_with_comma * 63) +
quoted_space_line +
'],"keys":["1"],"data":{"1":{"id":%s}}}'
% testing_id)
response = HttpResponse(full_space_utf8_grid)
response['Content-Type'] = 'application/json'
return response
urlpatterns = patterns(
'',
url(r'^tile/.*', full_utf8_grid),
url(r'', include(urls))
)
| from django.conf.urls import patterns, include, url
from django.http import HttpResponse
from opentreemap import urls
testing_id = 1
def full_utf8_grid(request):
"""
Creates a big utf8 grid where every entry is 'turned on'
to point to whatever plot id is the currently assigned value
of testing_id
this is useful for mocking a tiler utf8 grid response so
that ui tests can click the map and simulate clicking a
rendered tree tile.
"""
global testing_id
quoted_space_line = '"' + (' ' * 64) + '"'
quoted_space_line_with_comma = quoted_space_line + ','
full_space_utf8_grid = ('{"grid":[' +
(quoted_space_line_with_comma * 63) +
quoted_space_line +
'],"keys":["1"],"data":{"1":{"the_plot_id":%s}}}'
% testing_id)
response = HttpResponse(full_space_utf8_grid)
response['Content-Type'] = 'application/json'
return response
urlpatterns = patterns(
'',
url(r'^tile/.*', full_utf8_grid),
url(r'', include(urls))
)
Use "id" instead of "plot_id"
For the fake utf gridfrom django.conf.urls import patterns, include, url
from django.http import HttpResponse
from opentreemap import urls
testing_id = 1
def full_utf8_grid(request):
"""
Creates a big utf8 grid where every entry is 'turned on'
to point to whatever plot id is the currently assigned value
of testing_id
this is useful for mocking a tiler utf8 grid response so
that ui tests can click the map and simulate clicking a
rendered tree tile.
"""
global testing_id
quoted_space_line = '"' + (' ' * 64) + '"'
quoted_space_line_with_comma = quoted_space_line + ','
full_space_utf8_grid = ('{"grid":[' +
(quoted_space_line_with_comma * 63) +
quoted_space_line +
'],"keys":["1"],"data":{"1":{"id":%s}}}'
% testing_id)
response = HttpResponse(full_space_utf8_grid)
response['Content-Type'] = 'application/json'
return response
urlpatterns = patterns(
'',
url(r'^tile/.*', full_utf8_grid),
url(r'', include(urls))
)
| <commit_before>from django.conf.urls import patterns, include, url
from django.http import HttpResponse
from opentreemap import urls
testing_id = 1
def full_utf8_grid(request):
"""
Creates a big utf8 grid where every entry is 'turned on'
to point to whatever plot id is the currently assigned value
of testing_id
this is useful for mocking a tiler utf8 grid response so
that ui tests can click the map and simulate clicking a
rendered tree tile.
"""
global testing_id
quoted_space_line = '"' + (' ' * 64) + '"'
quoted_space_line_with_comma = quoted_space_line + ','
full_space_utf8_grid = ('{"grid":[' +
(quoted_space_line_with_comma * 63) +
quoted_space_line +
'],"keys":["1"],"data":{"1":{"the_plot_id":%s}}}'
% testing_id)
response = HttpResponse(full_space_utf8_grid)
response['Content-Type'] = 'application/json'
return response
urlpatterns = patterns(
'',
url(r'^tile/.*', full_utf8_grid),
url(r'', include(urls))
)
<commit_msg>Use "id" instead of "plot_id"
For the fake utf grid<commit_after>from django.conf.urls import patterns, include, url
from django.http import HttpResponse
from opentreemap import urls
testing_id = 1
def full_utf8_grid(request):
"""
Creates a big utf8 grid where every entry is 'turned on'
to point to whatever plot id is the currently assigned value
of testing_id
this is useful for mocking a tiler utf8 grid response so
that ui tests can click the map and simulate clicking a
rendered tree tile.
"""
global testing_id
quoted_space_line = '"' + (' ' * 64) + '"'
quoted_space_line_with_comma = quoted_space_line + ','
full_space_utf8_grid = ('{"grid":[' +
(quoted_space_line_with_comma * 63) +
quoted_space_line +
'],"keys":["1"],"data":{"1":{"id":%s}}}'
% testing_id)
response = HttpResponse(full_space_utf8_grid)
response['Content-Type'] = 'application/json'
return response
urlpatterns = patterns(
'',
url(r'^tile/.*', full_utf8_grid),
url(r'', include(urls))
)
|
1501510e37722a2b68871d44c431ac8c44060e1e | logster/scanner.py | logster/scanner.py | import os
import pymongo
from .db import connect_to_db
files_to_check = [
'/home/irvind/test.log',
'/home/irvind/test2.log'
]
def run_scanner():
_, db = connect_to_db(async=False)
while True:
try:
pass
except KeyboardInterrupt:
break
| import os
import datetime
import time
import hashlib
from datetime import datetime
import pymongo
from .db import connect_to_db
def file_mtime(path):
return datetime.fromtimestamp(os.stat(path).st_mtime)
def md5_hex(data):
# todo
pass
def run_scanner():
_, db = connect_to_db(async=False)
try:
while True:
_scanner_iter(db)
except KeyboardInterrupt:
pass
def _scanner_iter(db):
def check_log(log):
mtime, path, log_id = (
log.get('last_mtime'),
log.get('path'),
log.get('id'),
)
if mtime is not None and mtime >= file_mtime(path):
# File not changed
return
log_entries = list(db.entries.find({'log': log_id}).sort('order'))
db_lines = [ent['content'] for ent in log_entries]
with open(path, 'r') as f:
file_lines = f.readlines()
if len(file_lines) <= len(db_lines):
# todo: special case
return
old_lines = file_lines[:len(db_lines)]
new_lines = file_lines[len(db_lines):]
if md5_hex(old_lines) != md5_hex(db_lines):
# todo: special case
return
last_order = log_entries[len(db_lines)-1]['order']
new_docs = []
i = last_order + 1
for line in new_lines:
new_docs.append({
'log': log_id,
'content': line,
'order': i
})
i += 1
db.entries.insert(new_docs)
_notify_websockets(new_docs)
for log in db.logs.find():
check_log(log)
time.sleep(1)
def _notify_websockets(docs):
# todo
pass | Monitor new records and add them to the database | [wip] Monitor new records and add them to the database
| Python | mit | irvind/logster,irvind/logster,irvind/logster | import os
import pymongo
from .db import connect_to_db
files_to_check = [
'/home/irvind/test.log',
'/home/irvind/test2.log'
]
def run_scanner():
_, db = connect_to_db(async=False)
while True:
try:
pass
except KeyboardInterrupt:
break
[wip] Monitor new records and add them to the database | import os
import datetime
import time
import hashlib
from datetime import datetime
import pymongo
from .db import connect_to_db
def file_mtime(path):
return datetime.fromtimestamp(os.stat(path).st_mtime)
def md5_hex(data):
# todo
pass
def run_scanner():
_, db = connect_to_db(async=False)
try:
while True:
_scanner_iter(db)
except KeyboardInterrupt:
pass
def _scanner_iter(db):
def check_log(log):
mtime, path, log_id = (
log.get('last_mtime'),
log.get('path'),
log.get('id'),
)
if mtime is not None and mtime >= file_mtime(path):
# File not changed
return
log_entries = list(db.entries.find({'log': log_id}).sort('order'))
db_lines = [ent['content'] for ent in log_entries]
with open(path, 'r') as f:
file_lines = f.readlines()
if len(file_lines) <= len(db_lines):
# todo: special case
return
old_lines = file_lines[:len(db_lines)]
new_lines = file_lines[len(db_lines):]
if md5_hex(old_lines) != md5_hex(db_lines):
# todo: special case
return
last_order = log_entries[len(db_lines)-1]['order']
new_docs = []
i = last_order + 1
for line in new_lines:
new_docs.append({
'log': log_id,
'content': line,
'order': i
})
i += 1
db.entries.insert(new_docs)
_notify_websockets(new_docs)
for log in db.logs.find():
check_log(log)
time.sleep(1)
def _notify_websockets(docs):
# todo
pass | <commit_before>import os
import pymongo
from .db import connect_to_db
files_to_check = [
'/home/irvind/test.log',
'/home/irvind/test2.log'
]
def run_scanner():
_, db = connect_to_db(async=False)
while True:
try:
pass
except KeyboardInterrupt:
break
<commit_msg>[wip] Monitor new records and add them to the database<commit_after> | import os
import datetime
import time
import hashlib
from datetime import datetime
import pymongo
from .db import connect_to_db
def file_mtime(path):
return datetime.fromtimestamp(os.stat(path).st_mtime)
def md5_hex(data):
# todo
pass
def run_scanner():
_, db = connect_to_db(async=False)
try:
while True:
_scanner_iter(db)
except KeyboardInterrupt:
pass
def _scanner_iter(db):
def check_log(log):
mtime, path, log_id = (
log.get('last_mtime'),
log.get('path'),
log.get('id'),
)
if mtime is not None and mtime >= file_mtime(path):
# File not changed
return
log_entries = list(db.entries.find({'log': log_id}).sort('order'))
db_lines = [ent['content'] for ent in log_entries]
with open(path, 'r') as f:
file_lines = f.readlines()
if len(file_lines) <= len(db_lines):
# todo: special case
return
old_lines = file_lines[:len(db_lines)]
new_lines = file_lines[len(db_lines):]
if md5_hex(old_lines) != md5_hex(db_lines):
# todo: special case
return
last_order = log_entries[len(db_lines)-1]['order']
new_docs = []
i = last_order + 1
for line in new_lines:
new_docs.append({
'log': log_id,
'content': line,
'order': i
})
i += 1
db.entries.insert(new_docs)
_notify_websockets(new_docs)
for log in db.logs.find():
check_log(log)
time.sleep(1)
def _notify_websockets(docs):
# todo
pass | import os
import pymongo
from .db import connect_to_db
files_to_check = [
'/home/irvind/test.log',
'/home/irvind/test2.log'
]
def run_scanner():
_, db = connect_to_db(async=False)
while True:
try:
pass
except KeyboardInterrupt:
break
[wip] Monitor new records and add them to the databaseimport os
import datetime
import time
import hashlib
from datetime import datetime
import pymongo
from .db import connect_to_db
def file_mtime(path):
return datetime.fromtimestamp(os.stat(path).st_mtime)
def md5_hex(data):
# todo
pass
def run_scanner():
_, db = connect_to_db(async=False)
try:
while True:
_scanner_iter(db)
except KeyboardInterrupt:
pass
def _scanner_iter(db):
def check_log(log):
mtime, path, log_id = (
log.get('last_mtime'),
log.get('path'),
log.get('id'),
)
if mtime is not None and mtime >= file_mtime(path):
# File not changed
return
log_entries = list(db.entries.find({'log': log_id}).sort('order'))
db_lines = [ent['content'] for ent in log_entries]
with open(path, 'r') as f:
file_lines = f.readlines()
if len(file_lines) <= len(db_lines):
# todo: special case
return
old_lines = file_lines[:len(db_lines)]
new_lines = file_lines[len(db_lines):]
if md5_hex(old_lines) != md5_hex(db_lines):
# todo: special case
return
last_order = log_entries[len(db_lines)-1]['order']
new_docs = []
i = last_order + 1
for line in new_lines:
new_docs.append({
'log': log_id,
'content': line,
'order': i
})
i += 1
db.entries.insert(new_docs)
_notify_websockets(new_docs)
for log in db.logs.find():
check_log(log)
time.sleep(1)
def _notify_websockets(docs):
# todo
pass | <commit_before>import os
import pymongo
from .db import connect_to_db
files_to_check = [
'/home/irvind/test.log',
'/home/irvind/test2.log'
]
def run_scanner():
_, db = connect_to_db(async=False)
while True:
try:
pass
except KeyboardInterrupt:
break
<commit_msg>[wip] Monitor new records and add them to the database<commit_after>import os
import datetime
import time
import hashlib
from datetime import datetime
import pymongo
from .db import connect_to_db
def file_mtime(path):
return datetime.fromtimestamp(os.stat(path).st_mtime)
def md5_hex(data):
# todo
pass
def run_scanner():
_, db = connect_to_db(async=False)
try:
while True:
_scanner_iter(db)
except KeyboardInterrupt:
pass
def _scanner_iter(db):
def check_log(log):
mtime, path, log_id = (
log.get('last_mtime'),
log.get('path'),
log.get('id'),
)
if mtime is not None and mtime >= file_mtime(path):
# File not changed
return
log_entries = list(db.entries.find({'log': log_id}).sort('order'))
db_lines = [ent['content'] for ent in log_entries]
with open(path, 'r') as f:
file_lines = f.readlines()
if len(file_lines) <= len(db_lines):
# todo: special case
return
old_lines = file_lines[:len(db_lines)]
new_lines = file_lines[len(db_lines):]
if md5_hex(old_lines) != md5_hex(db_lines):
# todo: special case
return
last_order = log_entries[len(db_lines)-1]['order']
new_docs = []
i = last_order + 1
for line in new_lines:
new_docs.append({
'log': log_id,
'content': line,
'order': i
})
i += 1
db.entries.insert(new_docs)
_notify_websockets(new_docs)
for log in db.logs.find():
check_log(log)
time.sleep(1)
def _notify_websockets(docs):
# todo
pass |
5e0dde9e6bb6dc8f24c4eb47e63c5dd68ede809c | pontoon/administration/tasks.py | pontoon/administration/tasks.py |
import commonware
from celery.task import task
from pontoon.base.models import Project
from pontoon.administration.views import _update_from_repository
log = commonware.log.getLogger('pontoon')
@task()
def update_projects_from_repository():
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = project.repository_path
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
except Exception as e:
log.debug('UpdateFromRepositoryTaskError: %s' % unicode(e))
|
import commonware
import os
from celery.task import task
from django.conf import settings
from pontoon.base.models import Project
from pontoon.administration.views import _update_from_repository
log = commonware.log.getLogger('pontoon')
@task()
def update_projects_from_repository():
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
except Exception as e:
log.debug('UpdateFromRepositoryTaskError: %s' % unicode(e))
| Use master path as the argument, which is crucial for single-locale repositories | Use master path as the argument, which is crucial for single-locale repositories
| Python | bsd-3-clause | yfdyh000/pontoon,sudheesh001/pontoon,mozilla/pontoon,m8ttyB/pontoon,mathjazz/pontoon,participedia/pontoon,sudheesh001/pontoon,Jobava/mirror-pontoon,yfdyh000/pontoon,vivekanand1101/pontoon,jotes/pontoon,m8ttyB/pontoon,yfdyh000/pontoon,mozilla/pontoon,Jobava/mirror-pontoon,participedia/pontoon,Jobava/mirror-pontoon,sudheesh001/pontoon,jotes/pontoon,mathjazz/pontoon,vivekanand1101/pontoon,mastizada/pontoon,mathjazz/pontoon,mathjazz/pontoon,m8ttyB/pontoon,jotes/pontoon,Osmose/pontoon,mastizada/pontoon,m8ttyB/pontoon,mastizada/pontoon,mozilla/pontoon,sudheesh001/pontoon,mathjazz/pontoon,mastizada/pontoon,mozilla/pontoon,vivekanand1101/pontoon,Osmose/pontoon,participedia/pontoon,yfdyh000/pontoon,Jobava/mirror-pontoon,jotes/pontoon,Osmose/pontoon,participedia/pontoon,mozilla/pontoon,Osmose/pontoon,vivekanand1101/pontoon |
import commonware
from celery.task import task
from pontoon.base.models import Project
from pontoon.administration.views import _update_from_repository
log = commonware.log.getLogger('pontoon')
@task()
def update_projects_from_repository():
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = project.repository_path
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
except Exception as e:
log.debug('UpdateFromRepositoryTaskError: %s' % unicode(e))
Use master path as the argument, which is crucial for single-locale repositories |
import commonware
import os
from celery.task import task
from django.conf import settings
from pontoon.base.models import Project
from pontoon.administration.views import _update_from_repository
log = commonware.log.getLogger('pontoon')
@task()
def update_projects_from_repository():
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
except Exception as e:
log.debug('UpdateFromRepositoryTaskError: %s' % unicode(e))
| <commit_before>
import commonware
from celery.task import task
from pontoon.base.models import Project
from pontoon.administration.views import _update_from_repository
log = commonware.log.getLogger('pontoon')
@task()
def update_projects_from_repository():
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = project.repository_path
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
except Exception as e:
log.debug('UpdateFromRepositoryTaskError: %s' % unicode(e))
<commit_msg>Use master path as the argument, which is crucial for single-locale repositories<commit_after> |
import commonware
import os
from celery.task import task
from django.conf import settings
from pontoon.base.models import Project
from pontoon.administration.views import _update_from_repository
log = commonware.log.getLogger('pontoon')
@task()
def update_projects_from_repository():
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
except Exception as e:
log.debug('UpdateFromRepositoryTaskError: %s' % unicode(e))
|
import commonware
from celery.task import task
from pontoon.base.models import Project
from pontoon.administration.views import _update_from_repository
log = commonware.log.getLogger('pontoon')
@task()
def update_projects_from_repository():
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = project.repository_path
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
except Exception as e:
log.debug('UpdateFromRepositoryTaskError: %s' % unicode(e))
Use master path as the argument, which is crucial for single-locale repositories
import commonware
import os
from celery.task import task
from django.conf import settings
from pontoon.base.models import Project
from pontoon.administration.views import _update_from_repository
log = commonware.log.getLogger('pontoon')
@task()
def update_projects_from_repository():
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
except Exception as e:
log.debug('UpdateFromRepositoryTaskError: %s' % unicode(e))
| <commit_before>
import commonware
from celery.task import task
from pontoon.base.models import Project
from pontoon.administration.views import _update_from_repository
log = commonware.log.getLogger('pontoon')
@task()
def update_projects_from_repository():
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = project.repository_path
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
except Exception as e:
log.debug('UpdateFromRepositoryTaskError: %s' % unicode(e))
<commit_msg>Use master path as the argument, which is crucial for single-locale repositories<commit_after>
import commonware
import os
from celery.task import task
from django.conf import settings
from pontoon.base.models import Project
from pontoon.administration.views import _update_from_repository
log = commonware.log.getLogger('pontoon')
@task()
def update_projects_from_repository():
for project in Project.objects.all():
try:
repository_type = project.repository_type
repository_url = project.repository
repository_path_master = os.path.join(settings.MEDIA_ROOT,
repository_type, project.name)
_update_from_repository(
project, repository_type, repository_url,
repository_path_master)
except Exception as e:
log.debug('UpdateFromRepositoryTaskError: %s' % unicode(e))
|
e089a75174c003b4d22c4a2f8d65f3adb3d84def | feedback/models.py | feedback/models.py | from django.db import models
from django.contrib.sites.models import Site
from django.utils.translation import ugettext as _
class Feedback(models.Model):
site = models.ForeignKey(Site, verbose_name=_('site'))
url = models.CharField(max_length=255, verbose_name=_('url'))
urlhash = models.TextField(verbose_name=_('urlhash'), default="", null=True, blank=True)
useragent = models.TextField(verbose_name=_('useragent'), default="", null=True, blank=True)
subject = models.CharField(max_length=255, blank=True, null=True,
verbose_name=_('subject'))
email = models.EmailField(blank=True, null=True, verbose_name=_('email'))
text = models.TextField(verbose_name=_('text'))
created = models.DateTimeField(auto_now_add=True, null=True)
def __unicode__(self):
return u'{url}: {subject}'.format(url=self.url, subject=self.subject)
| from django.db import models
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
class Feedback(models.Model):
site = models.ForeignKey(Site, verbose_name=_('site'))
url = models.CharField(max_length=255, verbose_name=_('url'))
urlhash = models.TextField(verbose_name=_('urlhash'), default="", null=True, blank=True)
useragent = models.TextField(verbose_name=_('useragent'), default="", null=True, blank=True)
subject = models.CharField(max_length=255, blank=True, null=True,
verbose_name=_('subject'))
email = models.EmailField(blank=True, null=True, verbose_name=_('email'))
text = models.TextField(verbose_name=_('text'))
created = models.DateTimeField(auto_now_add=True, null=True)
def __unicode__(self):
return u'{url}: {subject}'.format(url=self.url, subject=self.subject)
| Use ugettext_lazy instead of ugettext to support Django 1.7 migrations | Use ugettext_lazy instead of ugettext to support Django 1.7 migrations
| Python | mit | PetrDlouhy/django-feedback,jaredly/django-feedback,jaredly/django-feedback,PetrDlouhy/django-feedback,PetrDlouhy/django-feedback,jaredly/django-feedback | from django.db import models
from django.contrib.sites.models import Site
from django.utils.translation import ugettext as _
class Feedback(models.Model):
site = models.ForeignKey(Site, verbose_name=_('site'))
url = models.CharField(max_length=255, verbose_name=_('url'))
urlhash = models.TextField(verbose_name=_('urlhash'), default="", null=True, blank=True)
useragent = models.TextField(verbose_name=_('useragent'), default="", null=True, blank=True)
subject = models.CharField(max_length=255, blank=True, null=True,
verbose_name=_('subject'))
email = models.EmailField(blank=True, null=True, verbose_name=_('email'))
text = models.TextField(verbose_name=_('text'))
created = models.DateTimeField(auto_now_add=True, null=True)
def __unicode__(self):
return u'{url}: {subject}'.format(url=self.url, subject=self.subject)
Use ugettext_lazy instead of ugettext to support Django 1.7 migrations | from django.db import models
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
class Feedback(models.Model):
site = models.ForeignKey(Site, verbose_name=_('site'))
url = models.CharField(max_length=255, verbose_name=_('url'))
urlhash = models.TextField(verbose_name=_('urlhash'), default="", null=True, blank=True)
useragent = models.TextField(verbose_name=_('useragent'), default="", null=True, blank=True)
subject = models.CharField(max_length=255, blank=True, null=True,
verbose_name=_('subject'))
email = models.EmailField(blank=True, null=True, verbose_name=_('email'))
text = models.TextField(verbose_name=_('text'))
created = models.DateTimeField(auto_now_add=True, null=True)
def __unicode__(self):
return u'{url}: {subject}'.format(url=self.url, subject=self.subject)
| <commit_before>from django.db import models
from django.contrib.sites.models import Site
from django.utils.translation import ugettext as _
class Feedback(models.Model):
site = models.ForeignKey(Site, verbose_name=_('site'))
url = models.CharField(max_length=255, verbose_name=_('url'))
urlhash = models.TextField(verbose_name=_('urlhash'), default="", null=True, blank=True)
useragent = models.TextField(verbose_name=_('useragent'), default="", null=True, blank=True)
subject = models.CharField(max_length=255, blank=True, null=True,
verbose_name=_('subject'))
email = models.EmailField(blank=True, null=True, verbose_name=_('email'))
text = models.TextField(verbose_name=_('text'))
created = models.DateTimeField(auto_now_add=True, null=True)
def __unicode__(self):
return u'{url}: {subject}'.format(url=self.url, subject=self.subject)
<commit_msg>Use ugettext_lazy instead of ugettext to support Django 1.7 migrations<commit_after> | from django.db import models
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
class Feedback(models.Model):
site = models.ForeignKey(Site, verbose_name=_('site'))
url = models.CharField(max_length=255, verbose_name=_('url'))
urlhash = models.TextField(verbose_name=_('urlhash'), default="", null=True, blank=True)
useragent = models.TextField(verbose_name=_('useragent'), default="", null=True, blank=True)
subject = models.CharField(max_length=255, blank=True, null=True,
verbose_name=_('subject'))
email = models.EmailField(blank=True, null=True, verbose_name=_('email'))
text = models.TextField(verbose_name=_('text'))
created = models.DateTimeField(auto_now_add=True, null=True)
def __unicode__(self):
return u'{url}: {subject}'.format(url=self.url, subject=self.subject)
| from django.db import models
from django.contrib.sites.models import Site
from django.utils.translation import ugettext as _
class Feedback(models.Model):
site = models.ForeignKey(Site, verbose_name=_('site'))
url = models.CharField(max_length=255, verbose_name=_('url'))
urlhash = models.TextField(verbose_name=_('urlhash'), default="", null=True, blank=True)
useragent = models.TextField(verbose_name=_('useragent'), default="", null=True, blank=True)
subject = models.CharField(max_length=255, blank=True, null=True,
verbose_name=_('subject'))
email = models.EmailField(blank=True, null=True, verbose_name=_('email'))
text = models.TextField(verbose_name=_('text'))
created = models.DateTimeField(auto_now_add=True, null=True)
def __unicode__(self):
return u'{url}: {subject}'.format(url=self.url, subject=self.subject)
Use ugettext_lazy instead of ugettext to support Django 1.7 migrationsfrom django.db import models
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
class Feedback(models.Model):
site = models.ForeignKey(Site, verbose_name=_('site'))
url = models.CharField(max_length=255, verbose_name=_('url'))
urlhash = models.TextField(verbose_name=_('urlhash'), default="", null=True, blank=True)
useragent = models.TextField(verbose_name=_('useragent'), default="", null=True, blank=True)
subject = models.CharField(max_length=255, blank=True, null=True,
verbose_name=_('subject'))
email = models.EmailField(blank=True, null=True, verbose_name=_('email'))
text = models.TextField(verbose_name=_('text'))
created = models.DateTimeField(auto_now_add=True, null=True)
def __unicode__(self):
return u'{url}: {subject}'.format(url=self.url, subject=self.subject)
| <commit_before>from django.db import models
from django.contrib.sites.models import Site
from django.utils.translation import ugettext as _
class Feedback(models.Model):
site = models.ForeignKey(Site, verbose_name=_('site'))
url = models.CharField(max_length=255, verbose_name=_('url'))
urlhash = models.TextField(verbose_name=_('urlhash'), default="", null=True, blank=True)
useragent = models.TextField(verbose_name=_('useragent'), default="", null=True, blank=True)
subject = models.CharField(max_length=255, blank=True, null=True,
verbose_name=_('subject'))
email = models.EmailField(blank=True, null=True, verbose_name=_('email'))
text = models.TextField(verbose_name=_('text'))
created = models.DateTimeField(auto_now_add=True, null=True)
def __unicode__(self):
return u'{url}: {subject}'.format(url=self.url, subject=self.subject)
<commit_msg>Use ugettext_lazy instead of ugettext to support Django 1.7 migrations<commit_after>from django.db import models
from django.contrib.sites.models import Site
from django.utils.translation import ugettext_lazy as _
class Feedback(models.Model):
site = models.ForeignKey(Site, verbose_name=_('site'))
url = models.CharField(max_length=255, verbose_name=_('url'))
urlhash = models.TextField(verbose_name=_('urlhash'), default="", null=True, blank=True)
useragent = models.TextField(verbose_name=_('useragent'), default="", null=True, blank=True)
subject = models.CharField(max_length=255, blank=True, null=True,
verbose_name=_('subject'))
email = models.EmailField(blank=True, null=True, verbose_name=_('email'))
text = models.TextField(verbose_name=_('text'))
created = models.DateTimeField(auto_now_add=True, null=True)
def __unicode__(self):
return u'{url}: {subject}'.format(url=self.url, subject=self.subject)
|
fcc9a8ed53385f4cf9ca7f1bb73fdc7407b88b7f | pryvate/blueprints/simple/simple.py | pryvate/blueprints/simple/simple.py | """Simple blueprint."""
import os
from flask import Blueprint, current_app, render_template
blueprint = Blueprint('simple', __name__, url_prefix='/simple',
template_folder='templates')
@blueprint.route('', methods=['GET'])
def get_simple():
"""List all packages."""
packages = os.listdir(current_app.config['BASEDIR'])
return render_template('simple.html', packages=packages)
@blueprint.route('/<package>', methods=['GET'])
def get_package(package):
"""List versions of a package."""
package_path = os.path.join(current_app.config['BASEDIR'],
package.lower())
files = os.listdir(package_path)
packages = []
for filename in files:
if filename.endswith('md5'):
with open(os.path.join(package_path, filename), 'r') as md5_digest:
item = {
'name': package,
'version': filename.replace('.md5', ''),
'digest': md5_digest.read()
}
packages.append(item)
return render_template('simple_package.html', packages=packages,
letter=package[:1])
| """Simple blueprint."""
import os
from flask import Blueprint, current_app, render_template
blueprint = Blueprint('simple', __name__, url_prefix='/simple',
template_folder='templates')
@blueprint.route('', methods=['GET'])
def get_simple():
"""List all packages."""
packages = os.listdir(current_app.config['BASEDIR'])
return render_template('simple.html', packages=packages)
@blueprint.route('/<package>', methods=['GET'])
def get_package(package):
"""List versions of a package."""
package_path = os.path.join(current_app.config['BASEDIR'],
package.lower())
files = os.listdir(package_path)
packages = []
for filename in files:
if filename.endswith('md5'):
with open(os.path.join(package_path, filename), 'r') as md5_digest:
item = {
'name': package,
'version': filename.replace('.md5', ''),
'digest': md5_digest.read()
}
packages.append(item)
return render_template('simple_package.html', packages=packages,
letter=package[:1].lower())
| Use only lowercase letters in the source link as well | Use only lowercase letters in the source link as well
| Python | mit | Dinoshauer/pryvate,Dinoshauer/pryvate | """Simple blueprint."""
import os
from flask import Blueprint, current_app, render_template
blueprint = Blueprint('simple', __name__, url_prefix='/simple',
template_folder='templates')
@blueprint.route('', methods=['GET'])
def get_simple():
"""List all packages."""
packages = os.listdir(current_app.config['BASEDIR'])
return render_template('simple.html', packages=packages)
@blueprint.route('/<package>', methods=['GET'])
def get_package(package):
"""List versions of a package."""
package_path = os.path.join(current_app.config['BASEDIR'],
package.lower())
files = os.listdir(package_path)
packages = []
for filename in files:
if filename.endswith('md5'):
with open(os.path.join(package_path, filename), 'r') as md5_digest:
item = {
'name': package,
'version': filename.replace('.md5', ''),
'digest': md5_digest.read()
}
packages.append(item)
return render_template('simple_package.html', packages=packages,
letter=package[:1])
Use only lowercase letters in the source link as well | """Simple blueprint."""
import os
from flask import Blueprint, current_app, render_template
blueprint = Blueprint('simple', __name__, url_prefix='/simple',
template_folder='templates')
@blueprint.route('', methods=['GET'])
def get_simple():
"""List all packages."""
packages = os.listdir(current_app.config['BASEDIR'])
return render_template('simple.html', packages=packages)
@blueprint.route('/<package>', methods=['GET'])
def get_package(package):
"""List versions of a package."""
package_path = os.path.join(current_app.config['BASEDIR'],
package.lower())
files = os.listdir(package_path)
packages = []
for filename in files:
if filename.endswith('md5'):
with open(os.path.join(package_path, filename), 'r') as md5_digest:
item = {
'name': package,
'version': filename.replace('.md5', ''),
'digest': md5_digest.read()
}
packages.append(item)
return render_template('simple_package.html', packages=packages,
letter=package[:1].lower())
| <commit_before>"""Simple blueprint."""
import os
from flask import Blueprint, current_app, render_template
blueprint = Blueprint('simple', __name__, url_prefix='/simple',
template_folder='templates')
@blueprint.route('', methods=['GET'])
def get_simple():
"""List all packages."""
packages = os.listdir(current_app.config['BASEDIR'])
return render_template('simple.html', packages=packages)
@blueprint.route('/<package>', methods=['GET'])
def get_package(package):
"""List versions of a package."""
package_path = os.path.join(current_app.config['BASEDIR'],
package.lower())
files = os.listdir(package_path)
packages = []
for filename in files:
if filename.endswith('md5'):
with open(os.path.join(package_path, filename), 'r') as md5_digest:
item = {
'name': package,
'version': filename.replace('.md5', ''),
'digest': md5_digest.read()
}
packages.append(item)
return render_template('simple_package.html', packages=packages,
letter=package[:1])
<commit_msg>Use only lowercase letters in the source link as well<commit_after> | """Simple blueprint."""
import os
from flask import Blueprint, current_app, render_template
blueprint = Blueprint('simple', __name__, url_prefix='/simple',
template_folder='templates')
@blueprint.route('', methods=['GET'])
def get_simple():
"""List all packages."""
packages = os.listdir(current_app.config['BASEDIR'])
return render_template('simple.html', packages=packages)
@blueprint.route('/<package>', methods=['GET'])
def get_package(package):
"""List versions of a package."""
package_path = os.path.join(current_app.config['BASEDIR'],
package.lower())
files = os.listdir(package_path)
packages = []
for filename in files:
if filename.endswith('md5'):
with open(os.path.join(package_path, filename), 'r') as md5_digest:
item = {
'name': package,
'version': filename.replace('.md5', ''),
'digest': md5_digest.read()
}
packages.append(item)
return render_template('simple_package.html', packages=packages,
letter=package[:1].lower())
| """Simple blueprint."""
import os
from flask import Blueprint, current_app, render_template
blueprint = Blueprint('simple', __name__, url_prefix='/simple',
template_folder='templates')
@blueprint.route('', methods=['GET'])
def get_simple():
"""List all packages."""
packages = os.listdir(current_app.config['BASEDIR'])
return render_template('simple.html', packages=packages)
@blueprint.route('/<package>', methods=['GET'])
def get_package(package):
"""List versions of a package."""
package_path = os.path.join(current_app.config['BASEDIR'],
package.lower())
files = os.listdir(package_path)
packages = []
for filename in files:
if filename.endswith('md5'):
with open(os.path.join(package_path, filename), 'r') as md5_digest:
item = {
'name': package,
'version': filename.replace('.md5', ''),
'digest': md5_digest.read()
}
packages.append(item)
return render_template('simple_package.html', packages=packages,
letter=package[:1])
Use only lowercase letters in the source link as well"""Simple blueprint."""
import os
from flask import Blueprint, current_app, render_template
blueprint = Blueprint('simple', __name__, url_prefix='/simple',
template_folder='templates')
@blueprint.route('', methods=['GET'])
def get_simple():
"""List all packages."""
packages = os.listdir(current_app.config['BASEDIR'])
return render_template('simple.html', packages=packages)
@blueprint.route('/<package>', methods=['GET'])
def get_package(package):
"""List versions of a package."""
package_path = os.path.join(current_app.config['BASEDIR'],
package.lower())
files = os.listdir(package_path)
packages = []
for filename in files:
if filename.endswith('md5'):
with open(os.path.join(package_path, filename), 'r') as md5_digest:
item = {
'name': package,
'version': filename.replace('.md5', ''),
'digest': md5_digest.read()
}
packages.append(item)
return render_template('simple_package.html', packages=packages,
letter=package[:1].lower())
| <commit_before>"""Simple blueprint."""
import os
from flask import Blueprint, current_app, render_template
blueprint = Blueprint('simple', __name__, url_prefix='/simple',
template_folder='templates')
@blueprint.route('', methods=['GET'])
def get_simple():
"""List all packages."""
packages = os.listdir(current_app.config['BASEDIR'])
return render_template('simple.html', packages=packages)
@blueprint.route('/<package>', methods=['GET'])
def get_package(package):
"""List versions of a package."""
package_path = os.path.join(current_app.config['BASEDIR'],
package.lower())
files = os.listdir(package_path)
packages = []
for filename in files:
if filename.endswith('md5'):
with open(os.path.join(package_path, filename), 'r') as md5_digest:
item = {
'name': package,
'version': filename.replace('.md5', ''),
'digest': md5_digest.read()
}
packages.append(item)
return render_template('simple_package.html', packages=packages,
letter=package[:1])
<commit_msg>Use only lowercase letters in the source link as well<commit_after>"""Simple blueprint."""
import os
from flask import Blueprint, current_app, render_template
blueprint = Blueprint('simple', __name__, url_prefix='/simple',
template_folder='templates')
@blueprint.route('', methods=['GET'])
def get_simple():
"""List all packages."""
packages = os.listdir(current_app.config['BASEDIR'])
return render_template('simple.html', packages=packages)
@blueprint.route('/<package>', methods=['GET'])
def get_package(package):
"""List versions of a package."""
package_path = os.path.join(current_app.config['BASEDIR'],
package.lower())
files = os.listdir(package_path)
packages = []
for filename in files:
if filename.endswith('md5'):
with open(os.path.join(package_path, filename), 'r') as md5_digest:
item = {
'name': package,
'version': filename.replace('.md5', ''),
'digest': md5_digest.read()
}
packages.append(item)
return render_template('simple_package.html', packages=packages,
letter=package[:1].lower())
|
ee349a7e26579ec6dffc3153fb8d55fad3281c5b | app.py | app.py | #!/usr/bin/env python
from flask import Flask, abort, jsonify, request
from icalendar import Calendar
from urllib import urlopen
app = Flask(__name__)
@app.route('/')
def index():
return u'Please use like <code>%swww.myserver.com/path/to/file.ics' % request.host_url
@app.route('/<path:url>')
def convert_from_url(url):
url = 'http://%s' % url
uh = urlopen(url)
if uh.getcode() != 200:
abort(404)
ics = uh.read()
uh.close()
cal = Calendar.from_string(ics)
data = {}
data[cal.name] = dict(cal.items())
for component in cal.subcomponents:
if not data[cal.name].has_key(component.name):
data[cal.name][component.name] = []
comp_obj = {}
for item in component.items():
comp_obj[item[0]] = unicode(item[1])
data[cal.name][component.name].append(comp_obj)
resp = jsonify(data)
if 'callback' in request.args:
resp.data = "%s(%s);" % (request.args['callback'], resp.data)
return resp
if __name__ == '__main__':
app.run(debug=True)
| #!/usr/bin/env python
from flask import Flask, abort, jsonify, request
from icalendar import Calendar
from urllib import urlopen
app = Flask(__name__)
@app.route('/')
def index():
return u'Please use like <code>http://<script>document.write(location.host);</script><noscript>ical2json.ep.io</noscript>/www.myserver.com/path/to/file.ics</code>'
@app.route('/<path:url>')
def convert_from_url(url):
url = 'http://%s' % url
uh = urlopen(url)
if uh.getcode() != 200:
abort(404)
ics = uh.read()
uh.close()
cal = Calendar.from_string(ics)
data = {}
data[cal.name] = dict(cal.items())
for component in cal.subcomponents:
if not data[cal.name].has_key(component.name):
data[cal.name][component.name] = []
comp_obj = {}
for item in component.items():
comp_obj[item[0]] = unicode(item[1])
data[cal.name][component.name].append(comp_obj)
resp = jsonify(data)
if 'callback' in request.args:
resp.data = "%s(%s);" % (request.args['callback'], resp.data)
return resp
if __name__ == '__main__':
app.run(debug=True)
| Fix index page example URL. | Fix index page example URL.
| Python | mit | philippbosch/ical2json | #!/usr/bin/env python
from flask import Flask, abort, jsonify, request
from icalendar import Calendar
from urllib import urlopen
app = Flask(__name__)
@app.route('/')
def index():
return u'Please use like <code>%swww.myserver.com/path/to/file.ics' % request.host_url
@app.route('/<path:url>')
def convert_from_url(url):
url = 'http://%s' % url
uh = urlopen(url)
if uh.getcode() != 200:
abort(404)
ics = uh.read()
uh.close()
cal = Calendar.from_string(ics)
data = {}
data[cal.name] = dict(cal.items())
for component in cal.subcomponents:
if not data[cal.name].has_key(component.name):
data[cal.name][component.name] = []
comp_obj = {}
for item in component.items():
comp_obj[item[0]] = unicode(item[1])
data[cal.name][component.name].append(comp_obj)
resp = jsonify(data)
if 'callback' in request.args:
resp.data = "%s(%s);" % (request.args['callback'], resp.data)
return resp
if __name__ == '__main__':
app.run(debug=True)
Fix index page example URL. | #!/usr/bin/env python
from flask import Flask, abort, jsonify, request
from icalendar import Calendar
from urllib import urlopen
app = Flask(__name__)
@app.route('/')
def index():
return u'Please use like <code>http://<script>document.write(location.host);</script><noscript>ical2json.ep.io</noscript>/www.myserver.com/path/to/file.ics</code>'
@app.route('/<path:url>')
def convert_from_url(url):
url = 'http://%s' % url
uh = urlopen(url)
if uh.getcode() != 200:
abort(404)
ics = uh.read()
uh.close()
cal = Calendar.from_string(ics)
data = {}
data[cal.name] = dict(cal.items())
for component in cal.subcomponents:
if not data[cal.name].has_key(component.name):
data[cal.name][component.name] = []
comp_obj = {}
for item in component.items():
comp_obj[item[0]] = unicode(item[1])
data[cal.name][component.name].append(comp_obj)
resp = jsonify(data)
if 'callback' in request.args:
resp.data = "%s(%s);" % (request.args['callback'], resp.data)
return resp
if __name__ == '__main__':
app.run(debug=True)
| <commit_before>#!/usr/bin/env python
from flask import Flask, abort, jsonify, request
from icalendar import Calendar
from urllib import urlopen
app = Flask(__name__)
@app.route('/')
def index():
return u'Please use like <code>%swww.myserver.com/path/to/file.ics' % request.host_url
@app.route('/<path:url>')
def convert_from_url(url):
url = 'http://%s' % url
uh = urlopen(url)
if uh.getcode() != 200:
abort(404)
ics = uh.read()
uh.close()
cal = Calendar.from_string(ics)
data = {}
data[cal.name] = dict(cal.items())
for component in cal.subcomponents:
if not data[cal.name].has_key(component.name):
data[cal.name][component.name] = []
comp_obj = {}
for item in component.items():
comp_obj[item[0]] = unicode(item[1])
data[cal.name][component.name].append(comp_obj)
resp = jsonify(data)
if 'callback' in request.args:
resp.data = "%s(%s);" % (request.args['callback'], resp.data)
return resp
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Fix index page example URL.<commit_after> | #!/usr/bin/env python
from flask import Flask, abort, jsonify, request
from icalendar import Calendar
from urllib import urlopen
app = Flask(__name__)
@app.route('/')
def index():
return u'Please use like <code>http://<script>document.write(location.host);</script><noscript>ical2json.ep.io</noscript>/www.myserver.com/path/to/file.ics</code>'
@app.route('/<path:url>')
def convert_from_url(url):
url = 'http://%s' % url
uh = urlopen(url)
if uh.getcode() != 200:
abort(404)
ics = uh.read()
uh.close()
cal = Calendar.from_string(ics)
data = {}
data[cal.name] = dict(cal.items())
for component in cal.subcomponents:
if not data[cal.name].has_key(component.name):
data[cal.name][component.name] = []
comp_obj = {}
for item in component.items():
comp_obj[item[0]] = unicode(item[1])
data[cal.name][component.name].append(comp_obj)
resp = jsonify(data)
if 'callback' in request.args:
resp.data = "%s(%s);" % (request.args['callback'], resp.data)
return resp
if __name__ == '__main__':
app.run(debug=True)
| #!/usr/bin/env python
from flask import Flask, abort, jsonify, request
from icalendar import Calendar
from urllib import urlopen
app = Flask(__name__)
@app.route('/')
def index():
return u'Please use like <code>%swww.myserver.com/path/to/file.ics' % request.host_url
@app.route('/<path:url>')
def convert_from_url(url):
url = 'http://%s' % url
uh = urlopen(url)
if uh.getcode() != 200:
abort(404)
ics = uh.read()
uh.close()
cal = Calendar.from_string(ics)
data = {}
data[cal.name] = dict(cal.items())
for component in cal.subcomponents:
if not data[cal.name].has_key(component.name):
data[cal.name][component.name] = []
comp_obj = {}
for item in component.items():
comp_obj[item[0]] = unicode(item[1])
data[cal.name][component.name].append(comp_obj)
resp = jsonify(data)
if 'callback' in request.args:
resp.data = "%s(%s);" % (request.args['callback'], resp.data)
return resp
if __name__ == '__main__':
app.run(debug=True)
Fix index page example URL.#!/usr/bin/env python
from flask import Flask, abort, jsonify, request
from icalendar import Calendar
from urllib import urlopen
app = Flask(__name__)
@app.route('/')
def index():
return u'Please use like <code>http://<script>document.write(location.host);</script><noscript>ical2json.ep.io</noscript>/www.myserver.com/path/to/file.ics</code>'
@app.route('/<path:url>')
def convert_from_url(url):
url = 'http://%s' % url
uh = urlopen(url)
if uh.getcode() != 200:
abort(404)
ics = uh.read()
uh.close()
cal = Calendar.from_string(ics)
data = {}
data[cal.name] = dict(cal.items())
for component in cal.subcomponents:
if not data[cal.name].has_key(component.name):
data[cal.name][component.name] = []
comp_obj = {}
for item in component.items():
comp_obj[item[0]] = unicode(item[1])
data[cal.name][component.name].append(comp_obj)
resp = jsonify(data)
if 'callback' in request.args:
resp.data = "%s(%s);" % (request.args['callback'], resp.data)
return resp
if __name__ == '__main__':
app.run(debug=True)
| <commit_before>#!/usr/bin/env python
from flask import Flask, abort, jsonify, request
from icalendar import Calendar
from urllib import urlopen
app = Flask(__name__)
@app.route('/')
def index():
return u'Please use like <code>%swww.myserver.com/path/to/file.ics' % request.host_url
@app.route('/<path:url>')
def convert_from_url(url):
url = 'http://%s' % url
uh = urlopen(url)
if uh.getcode() != 200:
abort(404)
ics = uh.read()
uh.close()
cal = Calendar.from_string(ics)
data = {}
data[cal.name] = dict(cal.items())
for component in cal.subcomponents:
if not data[cal.name].has_key(component.name):
data[cal.name][component.name] = []
comp_obj = {}
for item in component.items():
comp_obj[item[0]] = unicode(item[1])
data[cal.name][component.name].append(comp_obj)
resp = jsonify(data)
if 'callback' in request.args:
resp.data = "%s(%s);" % (request.args['callback'], resp.data)
return resp
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Fix index page example URL.<commit_after>#!/usr/bin/env python
from flask import Flask, abort, jsonify, request
from icalendar import Calendar
from urllib import urlopen
app = Flask(__name__)
@app.route('/')
def index():
return u'Please use like <code>http://<script>document.write(location.host);</script><noscript>ical2json.ep.io</noscript>/www.myserver.com/path/to/file.ics</code>'
@app.route('/<path:url>')
def convert_from_url(url):
url = 'http://%s' % url
uh = urlopen(url)
if uh.getcode() != 200:
abort(404)
ics = uh.read()
uh.close()
cal = Calendar.from_string(ics)
data = {}
data[cal.name] = dict(cal.items())
for component in cal.subcomponents:
if not data[cal.name].has_key(component.name):
data[cal.name][component.name] = []
comp_obj = {}
for item in component.items():
comp_obj[item[0]] = unicode(item[1])
data[cal.name][component.name].append(comp_obj)
resp = jsonify(data)
if 'callback' in request.args:
resp.data = "%s(%s);" % (request.args['callback'], resp.data)
return resp
if __name__ == '__main__':
app.run(debug=True)
|
5c04957ca44fc43eae034fe389d39b879ec000ae | slideshow/event.py | slideshow/event.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import types
_subscribers = {}
class func:
def __init__(self, inst, method):
self._inst = inst
self._method = method
def __call__(self, *args, **kwargs):
# bind method to class instance
types.MethodType(self._method, self._inst)(*args, **kwargs)
def listener(cls):
for name, method in cls.__dict__.iteritems():
if hasattr(method, "subscribe"):
# methods are still not bound, so store both the instance and the
# method for late binding
subscribe(method.subscribe, func(cls, method))
return cls
def callback(event):
def decorate(func):
# func is not yet a class member, so just mark what event it want
func.subscribe = event
return func
return decorate
def trigger(event, *args, **kwargs):
if event not in _subscribers:
return
for func in _subscribers[event]:
func(*args, **kwargs)
def subscribe(event, callback):
if event not in _subscribers:
_subscribers[event] = []
_subscribers[event].append(callback)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import types
import cherrypy
_subscribers = {}
class func:
def __init__(self, inst, method, cls):
self._inst = inst
self._method = method
self._cls = cls
def __call__(self, *args, **kwargs):
# bind method to class instance
types.MethodType(self._method, self._inst)(*args, **kwargs)
def listener(cls):
for name, method in cls.__dict__.iteritems():
if hasattr(method, "subscribe"):
# methods are still not bound, so store both the instance and the
# method for late binding
subscribe(method.subscribe, func(cls, method, None))
return cls
def callback(event):
def decorate(func):
# func is not yet a class member, so just mark what event it want
func.subscribe = event
return func
return decorate
def trigger(event, *args, **kwargs):
cherrypy.engine.log('%s triggered' % event)
if event not in _subscribers:
return
for func in _subscribers[event]:
func(*args, **kwargs)
def subscribe(event, callback):
if not isinstance(event, basestring):
raise ValueError, 'event name must be string (got %s)' % event.__class__.__name__
if event not in _subscribers:
_subscribers[event] = []
_subscribers[event].append(callback)
| Add logging and some minor error handling. | Add logging and some minor error handling.
| Python | agpl-3.0 | ext/slideshow-frontend,ext/slideshow,ext/slideshow-frontend,ext/slideshow,ext/slideshow,ext/slideshow,ext/slideshow-frontend,ext/slideshow,ext/slideshow | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import types
_subscribers = {}
class func:
def __init__(self, inst, method):
self._inst = inst
self._method = method
def __call__(self, *args, **kwargs):
# bind method to class instance
types.MethodType(self._method, self._inst)(*args, **kwargs)
def listener(cls):
for name, method in cls.__dict__.iteritems():
if hasattr(method, "subscribe"):
# methods are still not bound, so store both the instance and the
# method for late binding
subscribe(method.subscribe, func(cls, method))
return cls
def callback(event):
def decorate(func):
# func is not yet a class member, so just mark what event it want
func.subscribe = event
return func
return decorate
def trigger(event, *args, **kwargs):
if event not in _subscribers:
return
for func in _subscribers[event]:
func(*args, **kwargs)
def subscribe(event, callback):
if event not in _subscribers:
_subscribers[event] = []
_subscribers[event].append(callback)
Add logging and some minor error handling. | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import types
import cherrypy
_subscribers = {}
class func:
def __init__(self, inst, method, cls):
self._inst = inst
self._method = method
self._cls = cls
def __call__(self, *args, **kwargs):
# bind method to class instance
types.MethodType(self._method, self._inst)(*args, **kwargs)
def listener(cls):
for name, method in cls.__dict__.iteritems():
if hasattr(method, "subscribe"):
# methods are still not bound, so store both the instance and the
# method for late binding
subscribe(method.subscribe, func(cls, method, None))
return cls
def callback(event):
def decorate(func):
# func is not yet a class member, so just mark what event it want
func.subscribe = event
return func
return decorate
def trigger(event, *args, **kwargs):
cherrypy.engine.log('%s triggered' % event)
if event not in _subscribers:
return
for func in _subscribers[event]:
func(*args, **kwargs)
def subscribe(event, callback):
if not isinstance(event, basestring):
raise ValueError, 'event name must be string (got %s)' % event.__class__.__name__
if event not in _subscribers:
_subscribers[event] = []
_subscribers[event].append(callback)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import types
_subscribers = {}
class func:
def __init__(self, inst, method):
self._inst = inst
self._method = method
def __call__(self, *args, **kwargs):
# bind method to class instance
types.MethodType(self._method, self._inst)(*args, **kwargs)
def listener(cls):
for name, method in cls.__dict__.iteritems():
if hasattr(method, "subscribe"):
# methods are still not bound, so store both the instance and the
# method for late binding
subscribe(method.subscribe, func(cls, method))
return cls
def callback(event):
def decorate(func):
# func is not yet a class member, so just mark what event it want
func.subscribe = event
return func
return decorate
def trigger(event, *args, **kwargs):
if event not in _subscribers:
return
for func in _subscribers[event]:
func(*args, **kwargs)
def subscribe(event, callback):
if event not in _subscribers:
_subscribers[event] = []
_subscribers[event].append(callback)
<commit_msg>Add logging and some minor error handling.<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import types
import cherrypy
_subscribers = {}
class func:
def __init__(self, inst, method, cls):
self._inst = inst
self._method = method
self._cls = cls
def __call__(self, *args, **kwargs):
# bind method to class instance
types.MethodType(self._method, self._inst)(*args, **kwargs)
def listener(cls):
for name, method in cls.__dict__.iteritems():
if hasattr(method, "subscribe"):
# methods are still not bound, so store both the instance and the
# method for late binding
subscribe(method.subscribe, func(cls, method, None))
return cls
def callback(event):
def decorate(func):
# func is not yet a class member, so just mark what event it want
func.subscribe = event
return func
return decorate
def trigger(event, *args, **kwargs):
cherrypy.engine.log('%s triggered' % event)
if event not in _subscribers:
return
for func in _subscribers[event]:
func(*args, **kwargs)
def subscribe(event, callback):
if not isinstance(event, basestring):
raise ValueError, 'event name must be string (got %s)' % event.__class__.__name__
if event not in _subscribers:
_subscribers[event] = []
_subscribers[event].append(callback)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import types
_subscribers = {}
class func:
def __init__(self, inst, method):
self._inst = inst
self._method = method
def __call__(self, *args, **kwargs):
# bind method to class instance
types.MethodType(self._method, self._inst)(*args, **kwargs)
def listener(cls):
for name, method in cls.__dict__.iteritems():
if hasattr(method, "subscribe"):
# methods are still not bound, so store both the instance and the
# method for late binding
subscribe(method.subscribe, func(cls, method))
return cls
def callback(event):
def decorate(func):
# func is not yet a class member, so just mark what event it want
func.subscribe = event
return func
return decorate
def trigger(event, *args, **kwargs):
if event not in _subscribers:
return
for func in _subscribers[event]:
func(*args, **kwargs)
def subscribe(event, callback):
if event not in _subscribers:
_subscribers[event] = []
_subscribers[event].append(callback)
Add logging and some minor error handling.#!/usr/bin/env python
# -*- coding: utf-8 -*-
import types
import cherrypy
_subscribers = {}
class func:
def __init__(self, inst, method, cls):
self._inst = inst
self._method = method
self._cls = cls
def __call__(self, *args, **kwargs):
# bind method to class instance
types.MethodType(self._method, self._inst)(*args, **kwargs)
def listener(cls):
for name, method in cls.__dict__.iteritems():
if hasattr(method, "subscribe"):
# methods are still not bound, so store both the instance and the
# method for late binding
subscribe(method.subscribe, func(cls, method, None))
return cls
def callback(event):
def decorate(func):
# func is not yet a class member, so just mark what event it want
func.subscribe = event
return func
return decorate
def trigger(event, *args, **kwargs):
cherrypy.engine.log('%s triggered' % event)
if event not in _subscribers:
return
for func in _subscribers[event]:
func(*args, **kwargs)
def subscribe(event, callback):
if not isinstance(event, basestring):
raise ValueError, 'event name must be string (got %s)' % event.__class__.__name__
if event not in _subscribers:
_subscribers[event] = []
_subscribers[event].append(callback)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import types
_subscribers = {}
class func:
def __init__(self, inst, method):
self._inst = inst
self._method = method
def __call__(self, *args, **kwargs):
# bind method to class instance
types.MethodType(self._method, self._inst)(*args, **kwargs)
def listener(cls):
for name, method in cls.__dict__.iteritems():
if hasattr(method, "subscribe"):
# methods are still not bound, so store both the instance and the
# method for late binding
subscribe(method.subscribe, func(cls, method))
return cls
def callback(event):
def decorate(func):
# func is not yet a class member, so just mark what event it want
func.subscribe = event
return func
return decorate
def trigger(event, *args, **kwargs):
if event not in _subscribers:
return
for func in _subscribers[event]:
func(*args, **kwargs)
def subscribe(event, callback):
if event not in _subscribers:
_subscribers[event] = []
_subscribers[event].append(callback)
<commit_msg>Add logging and some minor error handling.<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import types
import cherrypy
_subscribers = {}
class func:
def __init__(self, inst, method, cls):
self._inst = inst
self._method = method
self._cls = cls
def __call__(self, *args, **kwargs):
# bind method to class instance
types.MethodType(self._method, self._inst)(*args, **kwargs)
def listener(cls):
for name, method in cls.__dict__.iteritems():
if hasattr(method, "subscribe"):
# methods are still not bound, so store both the instance and the
# method for late binding
subscribe(method.subscribe, func(cls, method, None))
return cls
def callback(event):
def decorate(func):
# func is not yet a class member, so just mark what event it want
func.subscribe = event
return func
return decorate
def trigger(event, *args, **kwargs):
cherrypy.engine.log('%s triggered' % event)
if event not in _subscribers:
return
for func in _subscribers[event]:
func(*args, **kwargs)
def subscribe(event, callback):
if not isinstance(event, basestring):
raise ValueError, 'event name must be string (got %s)' % event.__class__.__name__
if event not in _subscribers:
_subscribers[event] = []
_subscribers[event].append(callback)
|
ea8f2c3b036ff62ea2fa7faea6eefc86ac5471c7 | redis_sessions_fork/settings.py | redis_sessions_fork/settings.py | import os
from django.conf import settings
SESSION_REDIS_HOST = getattr(
settings,
'SESSION_REDIS_HOST',
'127.0.0.1'
)
SESSION_REDIS_PORT = getattr(
settings,
'SESSION_REDIS_PORT',
6379
)
SESSION_REDIS_DB = getattr(
settings,
'SESSION_REDIS_DB',
0
)
SESSION_REDIS_PREFIX = getattr(
settings,
'SESSION_REDIS_PREFIX',
'django_sessions'
)
SESSION_REDIS_PASSWORD = getattr(
settings,
'SESSION_REDIS_PASSWORD',
None
)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings,
'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH',
None
)
SESSION_REDIS_URL = getattr(
settings,
'SESSION_REDIS_URL',
None
)
SESSION_REDIS_CONNECTION_POOL = getattr(
settings,
'SESSION_REDIS_CONNECTION_POOL',
None
)
if SESSION_REDIS_URL is None:
# redis clouds ENV variables
SESSION_REDIS_ENV_URLS = getattr(
settings,
'SESSION_REDIS_ENV_URLS', (
'REDISTOGO_URL',
'OPENREDIS_URL',
'REDISGREEN_URL',
'MYREDIS_URL',
'REDISCLOUD_URL'
)
)
for url in SESSION_REDIS_ENV_URLS:
redis_env_url = os.environ.get(url)
if redis_env_url:
SESSION_REDIS_URL = redis_env_url
break
| import os
from django.conf import settings
SESSION_REDIS_HOST = getattr(
settings,
'SESSION_REDIS_HOST',
'127.0.0.1'
)
SESSION_REDIS_PORT = getattr(
settings,
'SESSION_REDIS_PORT',
6379
)
SESSION_REDIS_DB = getattr(
settings,
'SESSION_REDIS_DB',
0
)
SESSION_REDIS_PREFIX = getattr(
settings,
'SESSION_REDIS_PREFIX',
'django_sessions'
)
SESSION_REDIS_PASSWORD = getattr(
settings,
'SESSION_REDIS_PASSWORD',
None
)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings,
'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH',
None
)
SESSION_REDIS_URL = getattr(
settings,
'SESSION_REDIS_URL',
None
)
SESSION_REDIS_CONNECTION_POOL = getattr(
settings,
'SESSION_REDIS_CONNECTION_POOL',
None
)
if SESSION_REDIS_URL is None:
# redis clouds ENV variables
SESSION_REDIS_ENV_URLS = getattr(
settings,
'SESSION_REDIS_ENV_URLS', (
'REDISCLOUD_URL'
'REDISTOGO_URL',
'OPENREDIS_URL',
'REDISGREEN_URL',
'MYREDIS_URL',
)
)
for url in SESSION_REDIS_ENV_URLS:
redis_env_url = os.environ.get(url)
if redis_env_url:
SESSION_REDIS_URL = redis_env_url
break
| Reorder ENV urls redis providers. | Reorder ENV urls redis providers.
| Python | bsd-3-clause | ProDG/django-redis-sessions-fork,hellysmile/django-redis-sessions-fork | import os
from django.conf import settings
SESSION_REDIS_HOST = getattr(
settings,
'SESSION_REDIS_HOST',
'127.0.0.1'
)
SESSION_REDIS_PORT = getattr(
settings,
'SESSION_REDIS_PORT',
6379
)
SESSION_REDIS_DB = getattr(
settings,
'SESSION_REDIS_DB',
0
)
SESSION_REDIS_PREFIX = getattr(
settings,
'SESSION_REDIS_PREFIX',
'django_sessions'
)
SESSION_REDIS_PASSWORD = getattr(
settings,
'SESSION_REDIS_PASSWORD',
None
)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings,
'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH',
None
)
SESSION_REDIS_URL = getattr(
settings,
'SESSION_REDIS_URL',
None
)
SESSION_REDIS_CONNECTION_POOL = getattr(
settings,
'SESSION_REDIS_CONNECTION_POOL',
None
)
if SESSION_REDIS_URL is None:
# redis clouds ENV variables
SESSION_REDIS_ENV_URLS = getattr(
settings,
'SESSION_REDIS_ENV_URLS', (
'REDISTOGO_URL',
'OPENREDIS_URL',
'REDISGREEN_URL',
'MYREDIS_URL',
'REDISCLOUD_URL'
)
)
for url in SESSION_REDIS_ENV_URLS:
redis_env_url = os.environ.get(url)
if redis_env_url:
SESSION_REDIS_URL = redis_env_url
break
Reorder ENV urls redis providers. | import os
from django.conf import settings
SESSION_REDIS_HOST = getattr(
settings,
'SESSION_REDIS_HOST',
'127.0.0.1'
)
SESSION_REDIS_PORT = getattr(
settings,
'SESSION_REDIS_PORT',
6379
)
SESSION_REDIS_DB = getattr(
settings,
'SESSION_REDIS_DB',
0
)
SESSION_REDIS_PREFIX = getattr(
settings,
'SESSION_REDIS_PREFIX',
'django_sessions'
)
SESSION_REDIS_PASSWORD = getattr(
settings,
'SESSION_REDIS_PASSWORD',
None
)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings,
'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH',
None
)
SESSION_REDIS_URL = getattr(
settings,
'SESSION_REDIS_URL',
None
)
SESSION_REDIS_CONNECTION_POOL = getattr(
settings,
'SESSION_REDIS_CONNECTION_POOL',
None
)
if SESSION_REDIS_URL is None:
# redis clouds ENV variables
SESSION_REDIS_ENV_URLS = getattr(
settings,
'SESSION_REDIS_ENV_URLS', (
'REDISCLOUD_URL'
'REDISTOGO_URL',
'OPENREDIS_URL',
'REDISGREEN_URL',
'MYREDIS_URL',
)
)
for url in SESSION_REDIS_ENV_URLS:
redis_env_url = os.environ.get(url)
if redis_env_url:
SESSION_REDIS_URL = redis_env_url
break
| <commit_before>import os
from django.conf import settings
SESSION_REDIS_HOST = getattr(
settings,
'SESSION_REDIS_HOST',
'127.0.0.1'
)
SESSION_REDIS_PORT = getattr(
settings,
'SESSION_REDIS_PORT',
6379
)
SESSION_REDIS_DB = getattr(
settings,
'SESSION_REDIS_DB',
0
)
SESSION_REDIS_PREFIX = getattr(
settings,
'SESSION_REDIS_PREFIX',
'django_sessions'
)
SESSION_REDIS_PASSWORD = getattr(
settings,
'SESSION_REDIS_PASSWORD',
None
)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings,
'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH',
None
)
SESSION_REDIS_URL = getattr(
settings,
'SESSION_REDIS_URL',
None
)
SESSION_REDIS_CONNECTION_POOL = getattr(
settings,
'SESSION_REDIS_CONNECTION_POOL',
None
)
if SESSION_REDIS_URL is None:
# redis clouds ENV variables
SESSION_REDIS_ENV_URLS = getattr(
settings,
'SESSION_REDIS_ENV_URLS', (
'REDISTOGO_URL',
'OPENREDIS_URL',
'REDISGREEN_URL',
'MYREDIS_URL',
'REDISCLOUD_URL'
)
)
for url in SESSION_REDIS_ENV_URLS:
redis_env_url = os.environ.get(url)
if redis_env_url:
SESSION_REDIS_URL = redis_env_url
break
<commit_msg>Reorder ENV urls redis providers.<commit_after> | import os
from django.conf import settings
SESSION_REDIS_HOST = getattr(
settings,
'SESSION_REDIS_HOST',
'127.0.0.1'
)
SESSION_REDIS_PORT = getattr(
settings,
'SESSION_REDIS_PORT',
6379
)
SESSION_REDIS_DB = getattr(
settings,
'SESSION_REDIS_DB',
0
)
SESSION_REDIS_PREFIX = getattr(
settings,
'SESSION_REDIS_PREFIX',
'django_sessions'
)
SESSION_REDIS_PASSWORD = getattr(
settings,
'SESSION_REDIS_PASSWORD',
None
)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings,
'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH',
None
)
SESSION_REDIS_URL = getattr(
settings,
'SESSION_REDIS_URL',
None
)
SESSION_REDIS_CONNECTION_POOL = getattr(
settings,
'SESSION_REDIS_CONNECTION_POOL',
None
)
if SESSION_REDIS_URL is None:
# redis clouds ENV variables
SESSION_REDIS_ENV_URLS = getattr(
settings,
'SESSION_REDIS_ENV_URLS', (
'REDISCLOUD_URL'
'REDISTOGO_URL',
'OPENREDIS_URL',
'REDISGREEN_URL',
'MYREDIS_URL',
)
)
for url in SESSION_REDIS_ENV_URLS:
redis_env_url = os.environ.get(url)
if redis_env_url:
SESSION_REDIS_URL = redis_env_url
break
| import os
from django.conf import settings
SESSION_REDIS_HOST = getattr(
settings,
'SESSION_REDIS_HOST',
'127.0.0.1'
)
SESSION_REDIS_PORT = getattr(
settings,
'SESSION_REDIS_PORT',
6379
)
SESSION_REDIS_DB = getattr(
settings,
'SESSION_REDIS_DB',
0
)
SESSION_REDIS_PREFIX = getattr(
settings,
'SESSION_REDIS_PREFIX',
'django_sessions'
)
SESSION_REDIS_PASSWORD = getattr(
settings,
'SESSION_REDIS_PASSWORD',
None
)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings,
'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH',
None
)
SESSION_REDIS_URL = getattr(
settings,
'SESSION_REDIS_URL',
None
)
SESSION_REDIS_CONNECTION_POOL = getattr(
settings,
'SESSION_REDIS_CONNECTION_POOL',
None
)
if SESSION_REDIS_URL is None:
# redis clouds ENV variables
SESSION_REDIS_ENV_URLS = getattr(
settings,
'SESSION_REDIS_ENV_URLS', (
'REDISTOGO_URL',
'OPENREDIS_URL',
'REDISGREEN_URL',
'MYREDIS_URL',
'REDISCLOUD_URL'
)
)
for url in SESSION_REDIS_ENV_URLS:
redis_env_url = os.environ.get(url)
if redis_env_url:
SESSION_REDIS_URL = redis_env_url
break
Reorder ENV urls redis providers.import os
from django.conf import settings
SESSION_REDIS_HOST = getattr(
settings,
'SESSION_REDIS_HOST',
'127.0.0.1'
)
SESSION_REDIS_PORT = getattr(
settings,
'SESSION_REDIS_PORT',
6379
)
SESSION_REDIS_DB = getattr(
settings,
'SESSION_REDIS_DB',
0
)
SESSION_REDIS_PREFIX = getattr(
settings,
'SESSION_REDIS_PREFIX',
'django_sessions'
)
SESSION_REDIS_PASSWORD = getattr(
settings,
'SESSION_REDIS_PASSWORD',
None
)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings,
'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH',
None
)
SESSION_REDIS_URL = getattr(
settings,
'SESSION_REDIS_URL',
None
)
SESSION_REDIS_CONNECTION_POOL = getattr(
settings,
'SESSION_REDIS_CONNECTION_POOL',
None
)
if SESSION_REDIS_URL is None:
# redis clouds ENV variables
SESSION_REDIS_ENV_URLS = getattr(
settings,
'SESSION_REDIS_ENV_URLS', (
'REDISCLOUD_URL'
'REDISTOGO_URL',
'OPENREDIS_URL',
'REDISGREEN_URL',
'MYREDIS_URL',
)
)
for url in SESSION_REDIS_ENV_URLS:
redis_env_url = os.environ.get(url)
if redis_env_url:
SESSION_REDIS_URL = redis_env_url
break
| <commit_before>import os
from django.conf import settings
SESSION_REDIS_HOST = getattr(
settings,
'SESSION_REDIS_HOST',
'127.0.0.1'
)
SESSION_REDIS_PORT = getattr(
settings,
'SESSION_REDIS_PORT',
6379
)
SESSION_REDIS_DB = getattr(
settings,
'SESSION_REDIS_DB',
0
)
SESSION_REDIS_PREFIX = getattr(
settings,
'SESSION_REDIS_PREFIX',
'django_sessions'
)
SESSION_REDIS_PASSWORD = getattr(
settings,
'SESSION_REDIS_PASSWORD',
None
)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings,
'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH',
None
)
SESSION_REDIS_URL = getattr(
settings,
'SESSION_REDIS_URL',
None
)
SESSION_REDIS_CONNECTION_POOL = getattr(
settings,
'SESSION_REDIS_CONNECTION_POOL',
None
)
if SESSION_REDIS_URL is None:
# redis clouds ENV variables
SESSION_REDIS_ENV_URLS = getattr(
settings,
'SESSION_REDIS_ENV_URLS', (
'REDISTOGO_URL',
'OPENREDIS_URL',
'REDISGREEN_URL',
'MYREDIS_URL',
'REDISCLOUD_URL'
)
)
for url in SESSION_REDIS_ENV_URLS:
redis_env_url = os.environ.get(url)
if redis_env_url:
SESSION_REDIS_URL = redis_env_url
break
<commit_msg>Reorder ENV urls redis providers.<commit_after>import os
from django.conf import settings
SESSION_REDIS_HOST = getattr(
settings,
'SESSION_REDIS_HOST',
'127.0.0.1'
)
SESSION_REDIS_PORT = getattr(
settings,
'SESSION_REDIS_PORT',
6379
)
SESSION_REDIS_DB = getattr(
settings,
'SESSION_REDIS_DB',
0
)
SESSION_REDIS_PREFIX = getattr(
settings,
'SESSION_REDIS_PREFIX',
'django_sessions'
)
SESSION_REDIS_PASSWORD = getattr(
settings,
'SESSION_REDIS_PASSWORD',
None
)
SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH = getattr(
settings,
'SESSION_REDIS_UNIX_DOMAIN_SOCKET_PATH',
None
)
SESSION_REDIS_URL = getattr(
settings,
'SESSION_REDIS_URL',
None
)
SESSION_REDIS_CONNECTION_POOL = getattr(
settings,
'SESSION_REDIS_CONNECTION_POOL',
None
)
if SESSION_REDIS_URL is None:
# redis clouds ENV variables
SESSION_REDIS_ENV_URLS = getattr(
settings,
'SESSION_REDIS_ENV_URLS', (
'REDISCLOUD_URL'
'REDISTOGO_URL',
'OPENREDIS_URL',
'REDISGREEN_URL',
'MYREDIS_URL',
)
)
for url in SESSION_REDIS_ENV_URLS:
redis_env_url = os.environ.get(url)
if redis_env_url:
SESSION_REDIS_URL = redis_env_url
break
|
5082354efec86bb0ebb111e51c8e5a039ab7ae88 | pypika/dialects.py | pypika/dialects.py | from .enums import Dialects
from .queries import (
Query,
QueryBuilder,
)
class MySQLQuery(Query):
"""
Defines a query class for use with MySQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(quote_char='`', dialect=Dialects.MYSQL, wrap_union_queries=False)
class VerticaQuery(Query):
"""
Defines a query class for use with Vertica.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.VERTICA)
class OracleQuery(Query):
"""
Defines a query class for use with Oracle.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.ORACLE)
class PostgreSQLQuery(Query):
"""
Defines a query class for use with PostgreSQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.POSTGRESQL)
class RedshiftQuery(Query):
"""
Defines a query class for use with Amazon Redshift.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.REDSHIFT)
class MSSQLQuery(Query):
"""
Defines a query class for use with Microsoft SQL Server.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.MSSQL)
class ClickHouseQuery(Query):
"""
Defines a query class for use with Yandex ClickHouse.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.CLICKHOUSE)
| from .enums import Dialects
from .queries import (
Query,
QueryBuilder,
)
class MySQLQuery(Query):
"""
Defines a query class for use with MySQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(quote_char='`', dialect=Dialects.MYSQL, wrap_union_queries=False)
class VerticaQuery(Query):
"""
Defines a query class for use with Vertica.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.VERTICA)
class OracleQuery(Query):
"""
Defines a query class for use with Oracle.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.ORACLE)
class PostgreSQLQuery(Query):
"""
Defines a query class for use with PostgreSQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.POSTGRESQL)
class RedshiftQuery(Query):
"""
Defines a query class for use with Amazon Redshift.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.REDSHIFT)
class MSSQLQuery(Query):
"""
Defines a query class for use with Microsoft SQL Server.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.MSSQL)
class ClickHouseQuery(Query):
"""
Defines a query class for use with Yandex ClickHouse.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.CLICKHOUSE, wrap_union_queries=False)
| Disable union wrap for clickhouse | Disable union wrap for clickhouse
| Python | apache-2.0 | kayak/pypika | from .enums import Dialects
from .queries import (
Query,
QueryBuilder,
)
class MySQLQuery(Query):
"""
Defines a query class for use with MySQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(quote_char='`', dialect=Dialects.MYSQL, wrap_union_queries=False)
class VerticaQuery(Query):
"""
Defines a query class for use with Vertica.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.VERTICA)
class OracleQuery(Query):
"""
Defines a query class for use with Oracle.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.ORACLE)
class PostgreSQLQuery(Query):
"""
Defines a query class for use with PostgreSQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.POSTGRESQL)
class RedshiftQuery(Query):
"""
Defines a query class for use with Amazon Redshift.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.REDSHIFT)
class MSSQLQuery(Query):
"""
Defines a query class for use with Microsoft SQL Server.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.MSSQL)
class ClickHouseQuery(Query):
"""
Defines a query class for use with Yandex ClickHouse.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.CLICKHOUSE)
Disable union wrap for clickhouse | from .enums import Dialects
from .queries import (
Query,
QueryBuilder,
)
class MySQLQuery(Query):
"""
Defines a query class for use with MySQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(quote_char='`', dialect=Dialects.MYSQL, wrap_union_queries=False)
class VerticaQuery(Query):
"""
Defines a query class for use with Vertica.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.VERTICA)
class OracleQuery(Query):
"""
Defines a query class for use with Oracle.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.ORACLE)
class PostgreSQLQuery(Query):
"""
Defines a query class for use with PostgreSQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.POSTGRESQL)
class RedshiftQuery(Query):
"""
Defines a query class for use with Amazon Redshift.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.REDSHIFT)
class MSSQLQuery(Query):
"""
Defines a query class for use with Microsoft SQL Server.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.MSSQL)
class ClickHouseQuery(Query):
"""
Defines a query class for use with Yandex ClickHouse.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.CLICKHOUSE, wrap_union_queries=False)
| <commit_before>from .enums import Dialects
from .queries import (
Query,
QueryBuilder,
)
class MySQLQuery(Query):
"""
Defines a query class for use with MySQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(quote_char='`', dialect=Dialects.MYSQL, wrap_union_queries=False)
class VerticaQuery(Query):
"""
Defines a query class for use with Vertica.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.VERTICA)
class OracleQuery(Query):
"""
Defines a query class for use with Oracle.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.ORACLE)
class PostgreSQLQuery(Query):
"""
Defines a query class for use with PostgreSQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.POSTGRESQL)
class RedshiftQuery(Query):
"""
Defines a query class for use with Amazon Redshift.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.REDSHIFT)
class MSSQLQuery(Query):
"""
Defines a query class for use with Microsoft SQL Server.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.MSSQL)
class ClickHouseQuery(Query):
"""
Defines a query class for use with Yandex ClickHouse.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.CLICKHOUSE)
<commit_msg>Disable union wrap for clickhouse<commit_after> | from .enums import Dialects
from .queries import (
Query,
QueryBuilder,
)
class MySQLQuery(Query):
"""
Defines a query class for use with MySQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(quote_char='`', dialect=Dialects.MYSQL, wrap_union_queries=False)
class VerticaQuery(Query):
"""
Defines a query class for use with Vertica.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.VERTICA)
class OracleQuery(Query):
"""
Defines a query class for use with Oracle.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.ORACLE)
class PostgreSQLQuery(Query):
"""
Defines a query class for use with PostgreSQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.POSTGRESQL)
class RedshiftQuery(Query):
"""
Defines a query class for use with Amazon Redshift.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.REDSHIFT)
class MSSQLQuery(Query):
"""
Defines a query class for use with Microsoft SQL Server.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.MSSQL)
class ClickHouseQuery(Query):
"""
Defines a query class for use with Yandex ClickHouse.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.CLICKHOUSE, wrap_union_queries=False)
| from .enums import Dialects
from .queries import (
Query,
QueryBuilder,
)
class MySQLQuery(Query):
"""
Defines a query class for use with MySQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(quote_char='`', dialect=Dialects.MYSQL, wrap_union_queries=False)
class VerticaQuery(Query):
"""
Defines a query class for use with Vertica.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.VERTICA)
class OracleQuery(Query):
"""
Defines a query class for use with Oracle.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.ORACLE)
class PostgreSQLQuery(Query):
"""
Defines a query class for use with PostgreSQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.POSTGRESQL)
class RedshiftQuery(Query):
"""
Defines a query class for use with Amazon Redshift.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.REDSHIFT)
class MSSQLQuery(Query):
"""
Defines a query class for use with Microsoft SQL Server.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.MSSQL)
class ClickHouseQuery(Query):
"""
Defines a query class for use with Yandex ClickHouse.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.CLICKHOUSE)
Disable union wrap for clickhousefrom .enums import Dialects
from .queries import (
Query,
QueryBuilder,
)
class MySQLQuery(Query):
"""
Defines a query class for use with MySQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(quote_char='`', dialect=Dialects.MYSQL, wrap_union_queries=False)
class VerticaQuery(Query):
"""
Defines a query class for use with Vertica.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.VERTICA)
class OracleQuery(Query):
"""
Defines a query class for use with Oracle.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.ORACLE)
class PostgreSQLQuery(Query):
"""
Defines a query class for use with PostgreSQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.POSTGRESQL)
class RedshiftQuery(Query):
"""
Defines a query class for use with Amazon Redshift.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.REDSHIFT)
class MSSQLQuery(Query):
"""
Defines a query class for use with Microsoft SQL Server.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.MSSQL)
class ClickHouseQuery(Query):
"""
Defines a query class for use with Yandex ClickHouse.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.CLICKHOUSE, wrap_union_queries=False)
| <commit_before>from .enums import Dialects
from .queries import (
Query,
QueryBuilder,
)
class MySQLQuery(Query):
"""
Defines a query class for use with MySQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(quote_char='`', dialect=Dialects.MYSQL, wrap_union_queries=False)
class VerticaQuery(Query):
"""
Defines a query class for use with Vertica.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.VERTICA)
class OracleQuery(Query):
"""
Defines a query class for use with Oracle.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.ORACLE)
class PostgreSQLQuery(Query):
"""
Defines a query class for use with PostgreSQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.POSTGRESQL)
class RedshiftQuery(Query):
"""
Defines a query class for use with Amazon Redshift.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.REDSHIFT)
class MSSQLQuery(Query):
"""
Defines a query class for use with Microsoft SQL Server.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.MSSQL)
class ClickHouseQuery(Query):
"""
Defines a query class for use with Yandex ClickHouse.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.CLICKHOUSE)
<commit_msg>Disable union wrap for clickhouse<commit_after>from .enums import Dialects
from .queries import (
Query,
QueryBuilder,
)
class MySQLQuery(Query):
"""
Defines a query class for use with MySQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(quote_char='`', dialect=Dialects.MYSQL, wrap_union_queries=False)
class VerticaQuery(Query):
"""
Defines a query class for use with Vertica.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.VERTICA)
class OracleQuery(Query):
"""
Defines a query class for use with Oracle.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.ORACLE)
class PostgreSQLQuery(Query):
"""
Defines a query class for use with PostgreSQL.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.POSTGRESQL)
class RedshiftQuery(Query):
"""
Defines a query class for use with Amazon Redshift.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.REDSHIFT)
class MSSQLQuery(Query):
"""
Defines a query class for use with Microsoft SQL Server.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.MSSQL)
class ClickHouseQuery(Query):
"""
Defines a query class for use with Yandex ClickHouse.
"""
@classmethod
def _builder(cls):
return QueryBuilder(dialect=Dialects.CLICKHOUSE, wrap_union_queries=False)
|
b3a9a4a1e451815f15dc35c9b6ec9f7b67387260 | scipy/misc/tests/test_common.py | scipy/misc/tests/test_common.py | from __future__ import division, print_function, absolute_import
import pytest
from numpy.testing import assert_equal, assert_allclose
from scipy._lib._numpy_compat import suppress_warnings
from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram
from scipy.special import logsumexp as sc_logsumexp
def test_logsumexp():
# make sure logsumexp can be imported from either scipy.misc or
# scipy.special
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`logsumexp` is deprecated")
assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16)
def test_pade():
# make sure scipy.misc.pade exists
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`pade` is deprecated")
pade([1, 2], 1)
def test_face():
assert_equal(face().shape, (768, 1024, 3))
def test_ascent():
assert_equal(ascent().shape, (512, 512))
def test_electrocardiogram():
# Test shape and dtype of signal
ecg = electrocardiogram()
assert_equal(ecg.shape, (108000,))
assert ecg.dtype == float
| from __future__ import division, print_function, absolute_import
import pytest
from numpy.testing import assert_equal, assert_allclose, assert_almost_equal
from scipy._lib._numpy_compat import suppress_warnings
from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram
from scipy.special import logsumexp as sc_logsumexp
def test_logsumexp():
# make sure logsumexp can be imported from either scipy.misc or
# scipy.special
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`logsumexp` is deprecated")
assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16)
def test_pade():
# make sure scipy.misc.pade exists
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`pade` is deprecated")
pade([1, 2], 1)
def test_face():
assert_equal(face().shape, (768, 1024, 3))
def test_ascent():
assert_equal(ascent().shape, (512, 512))
def test_electrocardiogram():
# Test shape, dtype and stats of signal
ecg = electrocardiogram()
assert ecg.dtype == float
assert_equal(ecg.shape, (108000,))
assert_almost_equal(ecg.mean(), -0.16510875)
assert_almost_equal(ecg.std(), 0.5992473991177294)
| Check mean and STD of returned ECG signal | TST: Check mean and STD of returned ECG signal
| Python | bsd-3-clause | Eric89GXL/scipy,jor-/scipy,andyfaff/scipy,ilayn/scipy,andyfaff/scipy,tylerjereddy/scipy,grlee77/scipy,aarchiba/scipy,arokem/scipy,lhilt/scipy,perimosocordiae/scipy,mdhaber/scipy,rgommers/scipy,matthew-brett/scipy,aeklant/scipy,endolith/scipy,rgommers/scipy,person142/scipy,ilayn/scipy,WarrenWeckesser/scipy,matthew-brett/scipy,scipy/scipy,nmayorov/scipy,aeklant/scipy,andyfaff/scipy,endolith/scipy,Stefan-Endres/scipy,aarchiba/scipy,person142/scipy,aarchiba/scipy,ilayn/scipy,Eric89GXL/scipy,person142/scipy,pizzathief/scipy,grlee77/scipy,vigna/scipy,ilayn/scipy,anntzer/scipy,scipy/scipy,pizzathief/scipy,jamestwebber/scipy,arokem/scipy,arokem/scipy,vigna/scipy,jor-/scipy,mdhaber/scipy,gertingold/scipy,ilayn/scipy,Stefan-Endres/scipy,andyfaff/scipy,gfyoung/scipy,rgommers/scipy,lhilt/scipy,andyfaff/scipy,e-q/scipy,perimosocordiae/scipy,pizzathief/scipy,tylerjereddy/scipy,endolith/scipy,WarrenWeckesser/scipy,gfyoung/scipy,person142/scipy,Eric89GXL/scipy,endolith/scipy,matthew-brett/scipy,grlee77/scipy,Stefan-Endres/scipy,aarchiba/scipy,WarrenWeckesser/scipy,Stefan-Endres/scipy,endolith/scipy,gertingold/scipy,perimosocordiae/scipy,e-q/scipy,scipy/scipy,vigna/scipy,gertingold/scipy,gfyoung/scipy,e-q/scipy,jor-/scipy,matthew-brett/scipy,perimosocordiae/scipy,aeklant/scipy,mdhaber/scipy,WarrenWeckesser/scipy,scipy/scipy,jamestwebber/scipy,anntzer/scipy,vigna/scipy,Eric89GXL/scipy,scipy/scipy,nmayorov/scipy,Stefan-Endres/scipy,gfyoung/scipy,lhilt/scipy,tylerjereddy/scipy,Eric89GXL/scipy,Eric89GXL/scipy,anntzer/scipy,nmayorov/scipy,WarrenWeckesser/scipy,WarrenWeckesser/scipy,pizzathief/scipy,gfyoung/scipy,zerothi/scipy,scipy/scipy,e-q/scipy,matthew-brett/scipy,aarchiba/scipy,pizzathief/scipy,tylerjereddy/scipy,aeklant/scipy,aeklant/scipy,jor-/scipy,perimosocordiae/scipy,vigna/scipy,e-q/scipy,arokem/scipy,anntzer/scipy,ilayn/scipy,jor-/scipy,zerothi/scipy,mdhaber/scipy,perimosocordiae/scipy,gertingold/scipy,Stefan-Endres/scipy,nmayorov/scipy,jamestwebber/scipy,anntzer/scipy,endolith/scipy,zerothi/scipy,nmayorov/scipy,person142/scipy,andyfaff/scipy,anntzer/scipy,rgommers/scipy,grlee77/scipy,rgommers/scipy,zerothi/scipy,zerothi/scipy,jamestwebber/scipy,arokem/scipy,tylerjereddy/scipy,jamestwebber/scipy,gertingold/scipy,mdhaber/scipy,zerothi/scipy,lhilt/scipy,lhilt/scipy,grlee77/scipy,mdhaber/scipy | from __future__ import division, print_function, absolute_import
import pytest
from numpy.testing import assert_equal, assert_allclose
from scipy._lib._numpy_compat import suppress_warnings
from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram
from scipy.special import logsumexp as sc_logsumexp
def test_logsumexp():
# make sure logsumexp can be imported from either scipy.misc or
# scipy.special
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`logsumexp` is deprecated")
assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16)
def test_pade():
# make sure scipy.misc.pade exists
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`pade` is deprecated")
pade([1, 2], 1)
def test_face():
assert_equal(face().shape, (768, 1024, 3))
def test_ascent():
assert_equal(ascent().shape, (512, 512))
def test_electrocardiogram():
# Test shape and dtype of signal
ecg = electrocardiogram()
assert_equal(ecg.shape, (108000,))
assert ecg.dtype == float
TST: Check mean and STD of returned ECG signal | from __future__ import division, print_function, absolute_import
import pytest
from numpy.testing import assert_equal, assert_allclose, assert_almost_equal
from scipy._lib._numpy_compat import suppress_warnings
from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram
from scipy.special import logsumexp as sc_logsumexp
def test_logsumexp():
# make sure logsumexp can be imported from either scipy.misc or
# scipy.special
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`logsumexp` is deprecated")
assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16)
def test_pade():
# make sure scipy.misc.pade exists
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`pade` is deprecated")
pade([1, 2], 1)
def test_face():
assert_equal(face().shape, (768, 1024, 3))
def test_ascent():
assert_equal(ascent().shape, (512, 512))
def test_electrocardiogram():
# Test shape, dtype and stats of signal
ecg = electrocardiogram()
assert ecg.dtype == float
assert_equal(ecg.shape, (108000,))
assert_almost_equal(ecg.mean(), -0.16510875)
assert_almost_equal(ecg.std(), 0.5992473991177294)
| <commit_before>from __future__ import division, print_function, absolute_import
import pytest
from numpy.testing import assert_equal, assert_allclose
from scipy._lib._numpy_compat import suppress_warnings
from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram
from scipy.special import logsumexp as sc_logsumexp
def test_logsumexp():
# make sure logsumexp can be imported from either scipy.misc or
# scipy.special
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`logsumexp` is deprecated")
assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16)
def test_pade():
# make sure scipy.misc.pade exists
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`pade` is deprecated")
pade([1, 2], 1)
def test_face():
assert_equal(face().shape, (768, 1024, 3))
def test_ascent():
assert_equal(ascent().shape, (512, 512))
def test_electrocardiogram():
# Test shape and dtype of signal
ecg = electrocardiogram()
assert_equal(ecg.shape, (108000,))
assert ecg.dtype == float
<commit_msg>TST: Check mean and STD of returned ECG signal<commit_after> | from __future__ import division, print_function, absolute_import
import pytest
from numpy.testing import assert_equal, assert_allclose, assert_almost_equal
from scipy._lib._numpy_compat import suppress_warnings
from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram
from scipy.special import logsumexp as sc_logsumexp
def test_logsumexp():
# make sure logsumexp can be imported from either scipy.misc or
# scipy.special
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`logsumexp` is deprecated")
assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16)
def test_pade():
# make sure scipy.misc.pade exists
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`pade` is deprecated")
pade([1, 2], 1)
def test_face():
assert_equal(face().shape, (768, 1024, 3))
def test_ascent():
assert_equal(ascent().shape, (512, 512))
def test_electrocardiogram():
# Test shape, dtype and stats of signal
ecg = electrocardiogram()
assert ecg.dtype == float
assert_equal(ecg.shape, (108000,))
assert_almost_equal(ecg.mean(), -0.16510875)
assert_almost_equal(ecg.std(), 0.5992473991177294)
| from __future__ import division, print_function, absolute_import
import pytest
from numpy.testing import assert_equal, assert_allclose
from scipy._lib._numpy_compat import suppress_warnings
from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram
from scipy.special import logsumexp as sc_logsumexp
def test_logsumexp():
# make sure logsumexp can be imported from either scipy.misc or
# scipy.special
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`logsumexp` is deprecated")
assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16)
def test_pade():
# make sure scipy.misc.pade exists
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`pade` is deprecated")
pade([1, 2], 1)
def test_face():
assert_equal(face().shape, (768, 1024, 3))
def test_ascent():
assert_equal(ascent().shape, (512, 512))
def test_electrocardiogram():
# Test shape and dtype of signal
ecg = electrocardiogram()
assert_equal(ecg.shape, (108000,))
assert ecg.dtype == float
TST: Check mean and STD of returned ECG signalfrom __future__ import division, print_function, absolute_import
import pytest
from numpy.testing import assert_equal, assert_allclose, assert_almost_equal
from scipy._lib._numpy_compat import suppress_warnings
from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram
from scipy.special import logsumexp as sc_logsumexp
def test_logsumexp():
# make sure logsumexp can be imported from either scipy.misc or
# scipy.special
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`logsumexp` is deprecated")
assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16)
def test_pade():
# make sure scipy.misc.pade exists
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`pade` is deprecated")
pade([1, 2], 1)
def test_face():
assert_equal(face().shape, (768, 1024, 3))
def test_ascent():
assert_equal(ascent().shape, (512, 512))
def test_electrocardiogram():
# Test shape, dtype and stats of signal
ecg = electrocardiogram()
assert ecg.dtype == float
assert_equal(ecg.shape, (108000,))
assert_almost_equal(ecg.mean(), -0.16510875)
assert_almost_equal(ecg.std(), 0.5992473991177294)
| <commit_before>from __future__ import division, print_function, absolute_import
import pytest
from numpy.testing import assert_equal, assert_allclose
from scipy._lib._numpy_compat import suppress_warnings
from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram
from scipy.special import logsumexp as sc_logsumexp
def test_logsumexp():
# make sure logsumexp can be imported from either scipy.misc or
# scipy.special
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`logsumexp` is deprecated")
assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16)
def test_pade():
# make sure scipy.misc.pade exists
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`pade` is deprecated")
pade([1, 2], 1)
def test_face():
assert_equal(face().shape, (768, 1024, 3))
def test_ascent():
assert_equal(ascent().shape, (512, 512))
def test_electrocardiogram():
# Test shape and dtype of signal
ecg = electrocardiogram()
assert_equal(ecg.shape, (108000,))
assert ecg.dtype == float
<commit_msg>TST: Check mean and STD of returned ECG signal<commit_after>from __future__ import division, print_function, absolute_import
import pytest
from numpy.testing import assert_equal, assert_allclose, assert_almost_equal
from scipy._lib._numpy_compat import suppress_warnings
from scipy.misc import pade, logsumexp, face, ascent, electrocardiogram
from scipy.special import logsumexp as sc_logsumexp
def test_logsumexp():
# make sure logsumexp can be imported from either scipy.misc or
# scipy.special
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`logsumexp` is deprecated")
assert_allclose(logsumexp([0, 1]), sc_logsumexp([0, 1]), atol=1e-16)
def test_pade():
# make sure scipy.misc.pade exists
with suppress_warnings() as sup:
sup.filter(DeprecationWarning, "`pade` is deprecated")
pade([1, 2], 1)
def test_face():
assert_equal(face().shape, (768, 1024, 3))
def test_ascent():
assert_equal(ascent().shape, (512, 512))
def test_electrocardiogram():
# Test shape, dtype and stats of signal
ecg = electrocardiogram()
assert ecg.dtype == float
assert_equal(ecg.shape, (108000,))
assert_almost_equal(ecg.mean(), -0.16510875)
assert_almost_equal(ecg.std(), 0.5992473991177294)
|
1ee41f5439f80af139e612591d48cdac5ecfda39 | hiapi/hi.py | hiapi/hi.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
return 'Hi!\n'
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-b', '--bind-address',
dest='bind', default='127.0.0.1')
parser.add_argument('-p', '--port', dest='port', default=4000, type=int)
return parser.parse_args()
def main():
opts = parse_args()
app.run(host=opts.bind, port=opts.port)
# Support for uWSGI
def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/plain')])
return [b'Hi!\n']
if __name__ == "__main__":
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import flask
RESPONSE_CODE = 200
app = flask.Flask(__name__)
@app.route('/')
def hello():
global RESPONSE_CODE
if RESPONSE_CODE == 200:
return 'Hi!\n'
else:
flask.abort(RESPONSE_CODE)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-b', '--bind-address',
dest='bind', default='127.0.0.1')
parser.add_argument('-p', '--port', dest='port', default=4000, type=int)
parser.add_argument('-c', '--response_code', dest='code', default=200, type=int)
return parser.parse_args()
def main():
global RESPONSE_CODE
opts = parse_args()
RESPONSE_CODE = opts.code
app.run(host=opts.bind, port=opts.port)
if __name__ == "__main__":
main()
| Remove uwsgi support, add support for simple alternative responses | Remove uwsgi support, add support for simple alternative responses
| Python | apache-2.0 | GradysGhost/pyhiapi | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
return 'Hi!\n'
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-b', '--bind-address',
dest='bind', default='127.0.0.1')
parser.add_argument('-p', '--port', dest='port', default=4000, type=int)
return parser.parse_args()
def main():
opts = parse_args()
app.run(host=opts.bind, port=opts.port)
# Support for uWSGI
def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/plain')])
return [b'Hi!\n']
if __name__ == "__main__":
main()
Remove uwsgi support, add support for simple alternative responses | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import flask
RESPONSE_CODE = 200
app = flask.Flask(__name__)
@app.route('/')
def hello():
global RESPONSE_CODE
if RESPONSE_CODE == 200:
return 'Hi!\n'
else:
flask.abort(RESPONSE_CODE)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-b', '--bind-address',
dest='bind', default='127.0.0.1')
parser.add_argument('-p', '--port', dest='port', default=4000, type=int)
parser.add_argument('-c', '--response_code', dest='code', default=200, type=int)
return parser.parse_args()
def main():
global RESPONSE_CODE
opts = parse_args()
RESPONSE_CODE = opts.code
app.run(host=opts.bind, port=opts.port)
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
return 'Hi!\n'
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-b', '--bind-address',
dest='bind', default='127.0.0.1')
parser.add_argument('-p', '--port', dest='port', default=4000, type=int)
return parser.parse_args()
def main():
opts = parse_args()
app.run(host=opts.bind, port=opts.port)
# Support for uWSGI
def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/plain')])
return [b'Hi!\n']
if __name__ == "__main__":
main()
<commit_msg>Remove uwsgi support, add support for simple alternative responses<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import flask
RESPONSE_CODE = 200
app = flask.Flask(__name__)
@app.route('/')
def hello():
global RESPONSE_CODE
if RESPONSE_CODE == 200:
return 'Hi!\n'
else:
flask.abort(RESPONSE_CODE)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-b', '--bind-address',
dest='bind', default='127.0.0.1')
parser.add_argument('-p', '--port', dest='port', default=4000, type=int)
parser.add_argument('-c', '--response_code', dest='code', default=200, type=int)
return parser.parse_args()
def main():
global RESPONSE_CODE
opts = parse_args()
RESPONSE_CODE = opts.code
app.run(host=opts.bind, port=opts.port)
if __name__ == "__main__":
main()
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
return 'Hi!\n'
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-b', '--bind-address',
dest='bind', default='127.0.0.1')
parser.add_argument('-p', '--port', dest='port', default=4000, type=int)
return parser.parse_args()
def main():
opts = parse_args()
app.run(host=opts.bind, port=opts.port)
# Support for uWSGI
def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/plain')])
return [b'Hi!\n']
if __name__ == "__main__":
main()
Remove uwsgi support, add support for simple alternative responses#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import flask
RESPONSE_CODE = 200
app = flask.Flask(__name__)
@app.route('/')
def hello():
global RESPONSE_CODE
if RESPONSE_CODE == 200:
return 'Hi!\n'
else:
flask.abort(RESPONSE_CODE)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-b', '--bind-address',
dest='bind', default='127.0.0.1')
parser.add_argument('-p', '--port', dest='port', default=4000, type=int)
parser.add_argument('-c', '--response_code', dest='code', default=200, type=int)
return parser.parse_args()
def main():
global RESPONSE_CODE
opts = parse_args()
RESPONSE_CODE = opts.code
app.run(host=opts.bind, port=opts.port)
if __name__ == "__main__":
main()
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
from flask import Flask
app = Flask(__name__)
@app.route('/')
def hello():
return 'Hi!\n'
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-b', '--bind-address',
dest='bind', default='127.0.0.1')
parser.add_argument('-p', '--port', dest='port', default=4000, type=int)
return parser.parse_args()
def main():
opts = parse_args()
app.run(host=opts.bind, port=opts.port)
# Support for uWSGI
def application(env, start_response):
start_response('200 OK', [('Content-Type', 'text/plain')])
return [b'Hi!\n']
if __name__ == "__main__":
main()
<commit_msg>Remove uwsgi support, add support for simple alternative responses<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import argparse
import flask
RESPONSE_CODE = 200
app = flask.Flask(__name__)
@app.route('/')
def hello():
global RESPONSE_CODE
if RESPONSE_CODE == 200:
return 'Hi!\n'
else:
flask.abort(RESPONSE_CODE)
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('-b', '--bind-address',
dest='bind', default='127.0.0.1')
parser.add_argument('-p', '--port', dest='port', default=4000, type=int)
parser.add_argument('-c', '--response_code', dest='code', default=200, type=int)
return parser.parse_args()
def main():
global RESPONSE_CODE
opts = parse_args()
RESPONSE_CODE = opts.code
app.run(host=opts.bind, port=opts.port)
if __name__ == "__main__":
main()
|
39b4141ebd939bde649a9dfeb4f4cc832d0f4c39 | solutions/problem_9/solution.py | solutions/problem_9/solution.py | import sys
import os
sys.path.append(os.path.abspath(os.path.dirname('./utils/python')))
from python.decorators import timeit
@timeit
def solution():
n = 1000
for b in xrange(1, n):
for c in xrange(1, n):
a = n - b - c
if (a**2) + (b**2) == (c**2):
return a * b * c
solution()
| import sys
import os
sys.path.append(os.path.abspath(os.path.dirname('./utils/python')))
from python.utils import timeit
@timeit
def solution():
n = 1000
for b in xrange(1, n):
for c in xrange(1, n):
a = n - b - c
if (a**2) + (b**2) == (c**2):
return a * b * c
solution()
| Fix python import for problem 9 | Fix python import for problem 9
| Python | mit | mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler,mdsrosa/project_euler | import sys
import os
sys.path.append(os.path.abspath(os.path.dirname('./utils/python')))
from python.decorators import timeit
@timeit
def solution():
n = 1000
for b in xrange(1, n):
for c in xrange(1, n):
a = n - b - c
if (a**2) + (b**2) == (c**2):
return a * b * c
solution()
Fix python import for problem 9 | import sys
import os
sys.path.append(os.path.abspath(os.path.dirname('./utils/python')))
from python.utils import timeit
@timeit
def solution():
n = 1000
for b in xrange(1, n):
for c in xrange(1, n):
a = n - b - c
if (a**2) + (b**2) == (c**2):
return a * b * c
solution()
| <commit_before>import sys
import os
sys.path.append(os.path.abspath(os.path.dirname('./utils/python')))
from python.decorators import timeit
@timeit
def solution():
n = 1000
for b in xrange(1, n):
for c in xrange(1, n):
a = n - b - c
if (a**2) + (b**2) == (c**2):
return a * b * c
solution()
<commit_msg>Fix python import for problem 9<commit_after> | import sys
import os
sys.path.append(os.path.abspath(os.path.dirname('./utils/python')))
from python.utils import timeit
@timeit
def solution():
n = 1000
for b in xrange(1, n):
for c in xrange(1, n):
a = n - b - c
if (a**2) + (b**2) == (c**2):
return a * b * c
solution()
| import sys
import os
sys.path.append(os.path.abspath(os.path.dirname('./utils/python')))
from python.decorators import timeit
@timeit
def solution():
n = 1000
for b in xrange(1, n):
for c in xrange(1, n):
a = n - b - c
if (a**2) + (b**2) == (c**2):
return a * b * c
solution()
Fix python import for problem 9import sys
import os
sys.path.append(os.path.abspath(os.path.dirname('./utils/python')))
from python.utils import timeit
@timeit
def solution():
n = 1000
for b in xrange(1, n):
for c in xrange(1, n):
a = n - b - c
if (a**2) + (b**2) == (c**2):
return a * b * c
solution()
| <commit_before>import sys
import os
sys.path.append(os.path.abspath(os.path.dirname('./utils/python')))
from python.decorators import timeit
@timeit
def solution():
n = 1000
for b in xrange(1, n):
for c in xrange(1, n):
a = n - b - c
if (a**2) + (b**2) == (c**2):
return a * b * c
solution()
<commit_msg>Fix python import for problem 9<commit_after>import sys
import os
sys.path.append(os.path.abspath(os.path.dirname('./utils/python')))
from python.utils import timeit
@timeit
def solution():
n = 1000
for b in xrange(1, n):
for c in xrange(1, n):
a = n - b - c
if (a**2) + (b**2) == (c**2):
return a * b * c
solution()
|
403bd1cdea0a8d1fae25710a48dc3148fc21ddd9 | bell.py | bell.py | #!/usr/bin/env python
from time import sleep
import subprocess
import httplib, urllib
import RPi.GPIO as GPIO
import config
import logger
GPIO.setmode(GPIO.BCM)
GPIO.setup(config.bell_pin, GPIO.IN)
LOW_PRIORITY = -1
MEDIUM_PRIORITY = 0
HIGH_PRIORITY = 1
log = logger.get(__name__)
def notifyPhones(message, priority=MEDIUM_PRIORITY):
log.debug('Sending pushover message "'
+ message
+ '" with priority '
+ str(priority))
conn = httplib.HTTPSConnection("api.pushover.net:443")
conn.request("POST", "/1/messages.json",
urllib.urlencode({
"token": config.application_token,
"user": config.user_token,
"title": config.message_title,
"message": message,
"url": config.message_url,
"priority": priority,
}), { "Content-type": "application/x-www-form-urlencoded" })
response = conn.getresponse()
log.debug('Got response: '
+ str(response.status)
+ ' ' + response.reason
+ ': ' + response.read())
conn.close()
notifyPhones('Listener started', LOW_PRIORITY)
log.info('Doorbell listener Started')
while True:
if (GPIO.input(23) == False):
subprocess.Popen(["ogg123","-q","dingdong.ogg"])
notifyPhones(config.message_text)
log.info('Doorbell pressed')
sleep(3); | #!/usr/bin/env python
import logging
import RPi.GPIO as GPIO
from application import logsetup, button, pushover
GPIO.setmode(GPIO.BCM)
GPIO.setup(config.bell_pin, GPIO.IN)
log = logging.getLogger(__name__)
log.info('Doorbell listener Started')
pushover.send('Listener started', pushover.LOW_PRIORITY)
while True:
if (GPIO.input(23) == False):
button.pressed() | Convert Pi version to use application modules | Convert Pi version to use application modules
| Python | mit | viv/pibell | #!/usr/bin/env python
from time import sleep
import subprocess
import httplib, urllib
import RPi.GPIO as GPIO
import config
import logger
GPIO.setmode(GPIO.BCM)
GPIO.setup(config.bell_pin, GPIO.IN)
LOW_PRIORITY = -1
MEDIUM_PRIORITY = 0
HIGH_PRIORITY = 1
log = logger.get(__name__)
def notifyPhones(message, priority=MEDIUM_PRIORITY):
log.debug('Sending pushover message "'
+ message
+ '" with priority '
+ str(priority))
conn = httplib.HTTPSConnection("api.pushover.net:443")
conn.request("POST", "/1/messages.json",
urllib.urlencode({
"token": config.application_token,
"user": config.user_token,
"title": config.message_title,
"message": message,
"url": config.message_url,
"priority": priority,
}), { "Content-type": "application/x-www-form-urlencoded" })
response = conn.getresponse()
log.debug('Got response: '
+ str(response.status)
+ ' ' + response.reason
+ ': ' + response.read())
conn.close()
notifyPhones('Listener started', LOW_PRIORITY)
log.info('Doorbell listener Started')
while True:
if (GPIO.input(23) == False):
subprocess.Popen(["ogg123","-q","dingdong.ogg"])
notifyPhones(config.message_text)
log.info('Doorbell pressed')
sleep(3);Convert Pi version to use application modules | #!/usr/bin/env python
import logging
import RPi.GPIO as GPIO
from application import logsetup, button, pushover
GPIO.setmode(GPIO.BCM)
GPIO.setup(config.bell_pin, GPIO.IN)
log = logging.getLogger(__name__)
log.info('Doorbell listener Started')
pushover.send('Listener started', pushover.LOW_PRIORITY)
while True:
if (GPIO.input(23) == False):
button.pressed() | <commit_before>#!/usr/bin/env python
from time import sleep
import subprocess
import httplib, urllib
import RPi.GPIO as GPIO
import config
import logger
GPIO.setmode(GPIO.BCM)
GPIO.setup(config.bell_pin, GPIO.IN)
LOW_PRIORITY = -1
MEDIUM_PRIORITY = 0
HIGH_PRIORITY = 1
log = logger.get(__name__)
def notifyPhones(message, priority=MEDIUM_PRIORITY):
log.debug('Sending pushover message "'
+ message
+ '" with priority '
+ str(priority))
conn = httplib.HTTPSConnection("api.pushover.net:443")
conn.request("POST", "/1/messages.json",
urllib.urlencode({
"token": config.application_token,
"user": config.user_token,
"title": config.message_title,
"message": message,
"url": config.message_url,
"priority": priority,
}), { "Content-type": "application/x-www-form-urlencoded" })
response = conn.getresponse()
log.debug('Got response: '
+ str(response.status)
+ ' ' + response.reason
+ ': ' + response.read())
conn.close()
notifyPhones('Listener started', LOW_PRIORITY)
log.info('Doorbell listener Started')
while True:
if (GPIO.input(23) == False):
subprocess.Popen(["ogg123","-q","dingdong.ogg"])
notifyPhones(config.message_text)
log.info('Doorbell pressed')
sleep(3);<commit_msg>Convert Pi version to use application modules<commit_after> | #!/usr/bin/env python
import logging
import RPi.GPIO as GPIO
from application import logsetup, button, pushover
GPIO.setmode(GPIO.BCM)
GPIO.setup(config.bell_pin, GPIO.IN)
log = logging.getLogger(__name__)
log.info('Doorbell listener Started')
pushover.send('Listener started', pushover.LOW_PRIORITY)
while True:
if (GPIO.input(23) == False):
button.pressed() | #!/usr/bin/env python
from time import sleep
import subprocess
import httplib, urllib
import RPi.GPIO as GPIO
import config
import logger
GPIO.setmode(GPIO.BCM)
GPIO.setup(config.bell_pin, GPIO.IN)
LOW_PRIORITY = -1
MEDIUM_PRIORITY = 0
HIGH_PRIORITY = 1
log = logger.get(__name__)
def notifyPhones(message, priority=MEDIUM_PRIORITY):
log.debug('Sending pushover message "'
+ message
+ '" with priority '
+ str(priority))
conn = httplib.HTTPSConnection("api.pushover.net:443")
conn.request("POST", "/1/messages.json",
urllib.urlencode({
"token": config.application_token,
"user": config.user_token,
"title": config.message_title,
"message": message,
"url": config.message_url,
"priority": priority,
}), { "Content-type": "application/x-www-form-urlencoded" })
response = conn.getresponse()
log.debug('Got response: '
+ str(response.status)
+ ' ' + response.reason
+ ': ' + response.read())
conn.close()
notifyPhones('Listener started', LOW_PRIORITY)
log.info('Doorbell listener Started')
while True:
if (GPIO.input(23) == False):
subprocess.Popen(["ogg123","-q","dingdong.ogg"])
notifyPhones(config.message_text)
log.info('Doorbell pressed')
sleep(3);Convert Pi version to use application modules#!/usr/bin/env python
import logging
import RPi.GPIO as GPIO
from application import logsetup, button, pushover
GPIO.setmode(GPIO.BCM)
GPIO.setup(config.bell_pin, GPIO.IN)
log = logging.getLogger(__name__)
log.info('Doorbell listener Started')
pushover.send('Listener started', pushover.LOW_PRIORITY)
while True:
if (GPIO.input(23) == False):
button.pressed() | <commit_before>#!/usr/bin/env python
from time import sleep
import subprocess
import httplib, urllib
import RPi.GPIO as GPIO
import config
import logger
GPIO.setmode(GPIO.BCM)
GPIO.setup(config.bell_pin, GPIO.IN)
LOW_PRIORITY = -1
MEDIUM_PRIORITY = 0
HIGH_PRIORITY = 1
log = logger.get(__name__)
def notifyPhones(message, priority=MEDIUM_PRIORITY):
log.debug('Sending pushover message "'
+ message
+ '" with priority '
+ str(priority))
conn = httplib.HTTPSConnection("api.pushover.net:443")
conn.request("POST", "/1/messages.json",
urllib.urlencode({
"token": config.application_token,
"user": config.user_token,
"title": config.message_title,
"message": message,
"url": config.message_url,
"priority": priority,
}), { "Content-type": "application/x-www-form-urlencoded" })
response = conn.getresponse()
log.debug('Got response: '
+ str(response.status)
+ ' ' + response.reason
+ ': ' + response.read())
conn.close()
notifyPhones('Listener started', LOW_PRIORITY)
log.info('Doorbell listener Started')
while True:
if (GPIO.input(23) == False):
subprocess.Popen(["ogg123","-q","dingdong.ogg"])
notifyPhones(config.message_text)
log.info('Doorbell pressed')
sleep(3);<commit_msg>Convert Pi version to use application modules<commit_after>#!/usr/bin/env python
import logging
import RPi.GPIO as GPIO
from application import logsetup, button, pushover
GPIO.setmode(GPIO.BCM)
GPIO.setup(config.bell_pin, GPIO.IN)
log = logging.getLogger(__name__)
log.info('Doorbell listener Started')
pushover.send('Listener started', pushover.LOW_PRIORITY)
while True:
if (GPIO.input(23) == False):
button.pressed() |
dde6e451a9e434b980d1ebac84626ec7515485c5 | instruments/bbn.py | instruments/bbn.py | from .instrument import Instrument, VisaInterface
from types import MethodType
class Attenuator(Instrument):
NUM_CHANNELS = 3
"""BBN 3 Channel Instrument"""
def __init__(self, name, resource_name):
super(Attenuator, self).__init__(name, resource_name, interface_type="VISA")
self.interface._resource.baud_rate = 115200
self.interface._resource.read_termination = u"\r\n"
self.interface._resource.write_termination = u"\n"
#Clear "unknown command" from connect
#TODO: where the heck does this come from
# self.interface.read()
# self.interface.read()
#Override query to look for ``end``
def query(self, query_string):
val = self._resource.query(query_string)
assert self.read() == "END"
return val
self.interface.query = MethodType(query, self.interface, VisaInterface)
def get_attenuation(self, chan):
return float(self.interface.query("GET {:d}".format(chan)))
def set_attenuation(self, chan, val):
self.interface.write("SET {:d} {:.1f}".format(chan, val))
assert self.interface.read() == "Setting channel {:d} to {:.2f}".format(chan, val)
assert self.interface.read() == "END"
| from .instrument import Instrument, VisaInterface
from types import MethodType
class Attenuator(Instrument):
"""BBN 3 Channel Instrument"""
NUM_CHANNELS = 3
def __init__(self, name, resource_name):
super(Attenuator, self).__init__(name, resource_name, interface_type="VISA")
self.interface._resource.baud_rate = 115200
self.interface._resource.read_termination = u"\r\n"
self.interface._resource.write_termination = u"\n"
#Override query to look for ``end``
def query(self, query_string):
val = self._resource.query(query_string)
assert self.read() == "END"
return val
self.interface.query = MethodType(query, self.interface, VisaInterface)
@classmethod
def channel_check(cls, chan):
""" Assert the channel requested is feasbile """
assert chan > 0 and chan <= cls.NUM_CHANNELS, "Invalid channel requested: channel ({:d}) must be between 1 and {:d}".format(chan, cls.NUM_CHANNELS)
def get_attenuation(self, chan):
Attenuator.channel_check(chan)
return float(self.interface.query("GET {:d}".format(chan)))
def set_attenuation(self, chan, val):
Attenuator.channel_check(chan)
self.interface.write("SET {:d} {:.1f}".format(chan, val))
assert self.interface.read() == "Setting channel {:d} to {:.2f}".format(chan, val)
assert self.interface.read() == "END"
| Add some channel validity checking to digital attenuator | Add some channel validity checking to digital attenuator
--CAR
| Python | apache-2.0 | BBN-Q/Auspex,BBN-Q/Auspex,BBN-Q/Auspex,BBN-Q/Auspex | from .instrument import Instrument, VisaInterface
from types import MethodType
class Attenuator(Instrument):
NUM_CHANNELS = 3
"""BBN 3 Channel Instrument"""
def __init__(self, name, resource_name):
super(Attenuator, self).__init__(name, resource_name, interface_type="VISA")
self.interface._resource.baud_rate = 115200
self.interface._resource.read_termination = u"\r\n"
self.interface._resource.write_termination = u"\n"
#Clear "unknown command" from connect
#TODO: where the heck does this come from
# self.interface.read()
# self.interface.read()
#Override query to look for ``end``
def query(self, query_string):
val = self._resource.query(query_string)
assert self.read() == "END"
return val
self.interface.query = MethodType(query, self.interface, VisaInterface)
def get_attenuation(self, chan):
return float(self.interface.query("GET {:d}".format(chan)))
def set_attenuation(self, chan, val):
self.interface.write("SET {:d} {:.1f}".format(chan, val))
assert self.interface.read() == "Setting channel {:d} to {:.2f}".format(chan, val)
assert self.interface.read() == "END"
Add some channel validity checking to digital attenuator
--CAR | from .instrument import Instrument, VisaInterface
from types import MethodType
class Attenuator(Instrument):
"""BBN 3 Channel Instrument"""
NUM_CHANNELS = 3
def __init__(self, name, resource_name):
super(Attenuator, self).__init__(name, resource_name, interface_type="VISA")
self.interface._resource.baud_rate = 115200
self.interface._resource.read_termination = u"\r\n"
self.interface._resource.write_termination = u"\n"
#Override query to look for ``end``
def query(self, query_string):
val = self._resource.query(query_string)
assert self.read() == "END"
return val
self.interface.query = MethodType(query, self.interface, VisaInterface)
@classmethod
def channel_check(cls, chan):
""" Assert the channel requested is feasbile """
assert chan > 0 and chan <= cls.NUM_CHANNELS, "Invalid channel requested: channel ({:d}) must be between 1 and {:d}".format(chan, cls.NUM_CHANNELS)
def get_attenuation(self, chan):
Attenuator.channel_check(chan)
return float(self.interface.query("GET {:d}".format(chan)))
def set_attenuation(self, chan, val):
Attenuator.channel_check(chan)
self.interface.write("SET {:d} {:.1f}".format(chan, val))
assert self.interface.read() == "Setting channel {:d} to {:.2f}".format(chan, val)
assert self.interface.read() == "END"
| <commit_before>from .instrument import Instrument, VisaInterface
from types import MethodType
class Attenuator(Instrument):
NUM_CHANNELS = 3
"""BBN 3 Channel Instrument"""
def __init__(self, name, resource_name):
super(Attenuator, self).__init__(name, resource_name, interface_type="VISA")
self.interface._resource.baud_rate = 115200
self.interface._resource.read_termination = u"\r\n"
self.interface._resource.write_termination = u"\n"
#Clear "unknown command" from connect
#TODO: where the heck does this come from
# self.interface.read()
# self.interface.read()
#Override query to look for ``end``
def query(self, query_string):
val = self._resource.query(query_string)
assert self.read() == "END"
return val
self.interface.query = MethodType(query, self.interface, VisaInterface)
def get_attenuation(self, chan):
return float(self.interface.query("GET {:d}".format(chan)))
def set_attenuation(self, chan, val):
self.interface.write("SET {:d} {:.1f}".format(chan, val))
assert self.interface.read() == "Setting channel {:d} to {:.2f}".format(chan, val)
assert self.interface.read() == "END"
<commit_msg>Add some channel validity checking to digital attenuator
--CAR<commit_after> | from .instrument import Instrument, VisaInterface
from types import MethodType
class Attenuator(Instrument):
"""BBN 3 Channel Instrument"""
NUM_CHANNELS = 3
def __init__(self, name, resource_name):
super(Attenuator, self).__init__(name, resource_name, interface_type="VISA")
self.interface._resource.baud_rate = 115200
self.interface._resource.read_termination = u"\r\n"
self.interface._resource.write_termination = u"\n"
#Override query to look for ``end``
def query(self, query_string):
val = self._resource.query(query_string)
assert self.read() == "END"
return val
self.interface.query = MethodType(query, self.interface, VisaInterface)
@classmethod
def channel_check(cls, chan):
""" Assert the channel requested is feasbile """
assert chan > 0 and chan <= cls.NUM_CHANNELS, "Invalid channel requested: channel ({:d}) must be between 1 and {:d}".format(chan, cls.NUM_CHANNELS)
def get_attenuation(self, chan):
Attenuator.channel_check(chan)
return float(self.interface.query("GET {:d}".format(chan)))
def set_attenuation(self, chan, val):
Attenuator.channel_check(chan)
self.interface.write("SET {:d} {:.1f}".format(chan, val))
assert self.interface.read() == "Setting channel {:d} to {:.2f}".format(chan, val)
assert self.interface.read() == "END"
| from .instrument import Instrument, VisaInterface
from types import MethodType
class Attenuator(Instrument):
NUM_CHANNELS = 3
"""BBN 3 Channel Instrument"""
def __init__(self, name, resource_name):
super(Attenuator, self).__init__(name, resource_name, interface_type="VISA")
self.interface._resource.baud_rate = 115200
self.interface._resource.read_termination = u"\r\n"
self.interface._resource.write_termination = u"\n"
#Clear "unknown command" from connect
#TODO: where the heck does this come from
# self.interface.read()
# self.interface.read()
#Override query to look for ``end``
def query(self, query_string):
val = self._resource.query(query_string)
assert self.read() == "END"
return val
self.interface.query = MethodType(query, self.interface, VisaInterface)
def get_attenuation(self, chan):
return float(self.interface.query("GET {:d}".format(chan)))
def set_attenuation(self, chan, val):
self.interface.write("SET {:d} {:.1f}".format(chan, val))
assert self.interface.read() == "Setting channel {:d} to {:.2f}".format(chan, val)
assert self.interface.read() == "END"
Add some channel validity checking to digital attenuator
--CARfrom .instrument import Instrument, VisaInterface
from types import MethodType
class Attenuator(Instrument):
"""BBN 3 Channel Instrument"""
NUM_CHANNELS = 3
def __init__(self, name, resource_name):
super(Attenuator, self).__init__(name, resource_name, interface_type="VISA")
self.interface._resource.baud_rate = 115200
self.interface._resource.read_termination = u"\r\n"
self.interface._resource.write_termination = u"\n"
#Override query to look for ``end``
def query(self, query_string):
val = self._resource.query(query_string)
assert self.read() == "END"
return val
self.interface.query = MethodType(query, self.interface, VisaInterface)
@classmethod
def channel_check(cls, chan):
""" Assert the channel requested is feasbile """
assert chan > 0 and chan <= cls.NUM_CHANNELS, "Invalid channel requested: channel ({:d}) must be between 1 and {:d}".format(chan, cls.NUM_CHANNELS)
def get_attenuation(self, chan):
Attenuator.channel_check(chan)
return float(self.interface.query("GET {:d}".format(chan)))
def set_attenuation(self, chan, val):
Attenuator.channel_check(chan)
self.interface.write("SET {:d} {:.1f}".format(chan, val))
assert self.interface.read() == "Setting channel {:d} to {:.2f}".format(chan, val)
assert self.interface.read() == "END"
| <commit_before>from .instrument import Instrument, VisaInterface
from types import MethodType
class Attenuator(Instrument):
NUM_CHANNELS = 3
"""BBN 3 Channel Instrument"""
def __init__(self, name, resource_name):
super(Attenuator, self).__init__(name, resource_name, interface_type="VISA")
self.interface._resource.baud_rate = 115200
self.interface._resource.read_termination = u"\r\n"
self.interface._resource.write_termination = u"\n"
#Clear "unknown command" from connect
#TODO: where the heck does this come from
# self.interface.read()
# self.interface.read()
#Override query to look for ``end``
def query(self, query_string):
val = self._resource.query(query_string)
assert self.read() == "END"
return val
self.interface.query = MethodType(query, self.interface, VisaInterface)
def get_attenuation(self, chan):
return float(self.interface.query("GET {:d}".format(chan)))
def set_attenuation(self, chan, val):
self.interface.write("SET {:d} {:.1f}".format(chan, val))
assert self.interface.read() == "Setting channel {:d} to {:.2f}".format(chan, val)
assert self.interface.read() == "END"
<commit_msg>Add some channel validity checking to digital attenuator
--CAR<commit_after>from .instrument import Instrument, VisaInterface
from types import MethodType
class Attenuator(Instrument):
"""BBN 3 Channel Instrument"""
NUM_CHANNELS = 3
def __init__(self, name, resource_name):
super(Attenuator, self).__init__(name, resource_name, interface_type="VISA")
self.interface._resource.baud_rate = 115200
self.interface._resource.read_termination = u"\r\n"
self.interface._resource.write_termination = u"\n"
#Override query to look for ``end``
def query(self, query_string):
val = self._resource.query(query_string)
assert self.read() == "END"
return val
self.interface.query = MethodType(query, self.interface, VisaInterface)
@classmethod
def channel_check(cls, chan):
""" Assert the channel requested is feasbile """
assert chan > 0 and chan <= cls.NUM_CHANNELS, "Invalid channel requested: channel ({:d}) must be between 1 and {:d}".format(chan, cls.NUM_CHANNELS)
def get_attenuation(self, chan):
Attenuator.channel_check(chan)
return float(self.interface.query("GET {:d}".format(chan)))
def set_attenuation(self, chan, val):
Attenuator.channel_check(chan)
self.interface.write("SET {:d} {:.1f}".format(chan, val))
assert self.interface.read() == "Setting channel {:d} to {:.2f}".format(chan, val)
assert self.interface.read() == "END"
|
cbd39a43d8da9b9fa81582d7e9aaf299631022ae | pytoon/main.py | pytoon/main.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from pytoon.connection import BrickConnection
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
db = SQLAlchemy(app)
print(db.get_app())
print('starting database')
db.create_all()
print('database created')
@app.route('/')
def index():
timestamps = Electricity.query.all()
data = '<table><tbody>'
for t in timestamps:
data += '<tr><td>{}</td></tr>'.format(t.timestamp)
data += '</tbody></table>'
return data
class PyToon(object):
def __init__(self, database):
host = "192.168.178.35"
port = 4223
BrickConnection(host, port, database)
class Electricity(db.Model):
timestamp = db.Column(db.DateTime, primary_key=True)
def __repr__(self):
return '<Timestamp {}>'.format(self.timestamp)
if __name__ == '__main__':
pt = PyToon(db)
app.run(debug=True, use_reloader=False)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from pytoon.connection import BrickConnection
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/app.db'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
db = SQLAlchemy(app)
print(db.get_app())
print('starting database')
print('database created')
@app.route('/')
def index():
timestamps = Electricity.query.all()
data = '<table><tbody>'
for t in timestamps:
data += '<tr><td>{}</td></tr>'.format(t.timestamp)
data += '</tbody></table>'
return data
class PyToon(object):
def __init__(self, database):
host = "192.168.178.35"
port = 4223
BrickConnection(host, port, database)
class Electricity(db.Model):
timestamp = db.Column(db.DateTime, primary_key=True)
def __init__(self, timestamp):
self.timestamp = timestamp
def __repr__(self):
return '<Timestamp {}>'.format(self.timestamp)
db.create_all()
if __name__ == '__main__':
pt = PyToon(db)
app.run(debug=True, use_reloader=False)
| Fix problem with creating tables | Fix problem with creating tables
| Python | bsd-3-clause | marcofinalist/pytoon,marcofinalist/pytoon,marcoplaisier/pytoon,marcoplaisier/pytoon | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from pytoon.connection import BrickConnection
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
db = SQLAlchemy(app)
print(db.get_app())
print('starting database')
db.create_all()
print('database created')
@app.route('/')
def index():
timestamps = Electricity.query.all()
data = '<table><tbody>'
for t in timestamps:
data += '<tr><td>{}</td></tr>'.format(t.timestamp)
data += '</tbody></table>'
return data
class PyToon(object):
def __init__(self, database):
host = "192.168.178.35"
port = 4223
BrickConnection(host, port, database)
class Electricity(db.Model):
timestamp = db.Column(db.DateTime, primary_key=True)
def __repr__(self):
return '<Timestamp {}>'.format(self.timestamp)
if __name__ == '__main__':
pt = PyToon(db)
app.run(debug=True, use_reloader=False)
Fix problem with creating tables | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from pytoon.connection import BrickConnection
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/app.db'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
db = SQLAlchemy(app)
print(db.get_app())
print('starting database')
print('database created')
@app.route('/')
def index():
timestamps = Electricity.query.all()
data = '<table><tbody>'
for t in timestamps:
data += '<tr><td>{}</td></tr>'.format(t.timestamp)
data += '</tbody></table>'
return data
class PyToon(object):
def __init__(self, database):
host = "192.168.178.35"
port = 4223
BrickConnection(host, port, database)
class Electricity(db.Model):
timestamp = db.Column(db.DateTime, primary_key=True)
def __init__(self, timestamp):
self.timestamp = timestamp
def __repr__(self):
return '<Timestamp {}>'.format(self.timestamp)
db.create_all()
if __name__ == '__main__':
pt = PyToon(db)
app.run(debug=True, use_reloader=False)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from pytoon.connection import BrickConnection
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
db = SQLAlchemy(app)
print(db.get_app())
print('starting database')
db.create_all()
print('database created')
@app.route('/')
def index():
timestamps = Electricity.query.all()
data = '<table><tbody>'
for t in timestamps:
data += '<tr><td>{}</td></tr>'.format(t.timestamp)
data += '</tbody></table>'
return data
class PyToon(object):
def __init__(self, database):
host = "192.168.178.35"
port = 4223
BrickConnection(host, port, database)
class Electricity(db.Model):
timestamp = db.Column(db.DateTime, primary_key=True)
def __repr__(self):
return '<Timestamp {}>'.format(self.timestamp)
if __name__ == '__main__':
pt = PyToon(db)
app.run(debug=True, use_reloader=False)
<commit_msg>Fix problem with creating tables<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from pytoon.connection import BrickConnection
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/app.db'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
db = SQLAlchemy(app)
print(db.get_app())
print('starting database')
print('database created')
@app.route('/')
def index():
timestamps = Electricity.query.all()
data = '<table><tbody>'
for t in timestamps:
data += '<tr><td>{}</td></tr>'.format(t.timestamp)
data += '</tbody></table>'
return data
class PyToon(object):
def __init__(self, database):
host = "192.168.178.35"
port = 4223
BrickConnection(host, port, database)
class Electricity(db.Model):
timestamp = db.Column(db.DateTime, primary_key=True)
def __init__(self, timestamp):
self.timestamp = timestamp
def __repr__(self):
return '<Timestamp {}>'.format(self.timestamp)
db.create_all()
if __name__ == '__main__':
pt = PyToon(db)
app.run(debug=True, use_reloader=False)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from pytoon.connection import BrickConnection
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
db = SQLAlchemy(app)
print(db.get_app())
print('starting database')
db.create_all()
print('database created')
@app.route('/')
def index():
timestamps = Electricity.query.all()
data = '<table><tbody>'
for t in timestamps:
data += '<tr><td>{}</td></tr>'.format(t.timestamp)
data += '</tbody></table>'
return data
class PyToon(object):
def __init__(self, database):
host = "192.168.178.35"
port = 4223
BrickConnection(host, port, database)
class Electricity(db.Model):
timestamp = db.Column(db.DateTime, primary_key=True)
def __repr__(self):
return '<Timestamp {}>'.format(self.timestamp)
if __name__ == '__main__':
pt = PyToon(db)
app.run(debug=True, use_reloader=False)
Fix problem with creating tables#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from pytoon.connection import BrickConnection
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/app.db'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
db = SQLAlchemy(app)
print(db.get_app())
print('starting database')
print('database created')
@app.route('/')
def index():
timestamps = Electricity.query.all()
data = '<table><tbody>'
for t in timestamps:
data += '<tr><td>{}</td></tr>'.format(t.timestamp)
data += '</tbody></table>'
return data
class PyToon(object):
def __init__(self, database):
host = "192.168.178.35"
port = 4223
BrickConnection(host, port, database)
class Electricity(db.Model):
timestamp = db.Column(db.DateTime, primary_key=True)
def __init__(self, timestamp):
self.timestamp = timestamp
def __repr__(self):
return '<Timestamp {}>'.format(self.timestamp)
db.create_all()
if __name__ == '__main__':
pt = PyToon(db)
app.run(debug=True, use_reloader=False)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from pytoon.connection import BrickConnection
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///app.db'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
db = SQLAlchemy(app)
print(db.get_app())
print('starting database')
db.create_all()
print('database created')
@app.route('/')
def index():
timestamps = Electricity.query.all()
data = '<table><tbody>'
for t in timestamps:
data += '<tr><td>{}</td></tr>'.format(t.timestamp)
data += '</tbody></table>'
return data
class PyToon(object):
def __init__(self, database):
host = "192.168.178.35"
port = 4223
BrickConnection(host, port, database)
class Electricity(db.Model):
timestamp = db.Column(db.DateTime, primary_key=True)
def __repr__(self):
return '<Timestamp {}>'.format(self.timestamp)
if __name__ == '__main__':
pt = PyToon(db)
app.run(debug=True, use_reloader=False)
<commit_msg>Fix problem with creating tables<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import os
from flask import Flask
from flask.ext.sqlalchemy import SQLAlchemy
from pytoon.connection import BrickConnection
app = Flask(__name__)
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:////tmp/app.db'
app.config['SQLALCHEMY_COMMIT_ON_TEARDOWN'] = True
db = SQLAlchemy(app)
print(db.get_app())
print('starting database')
print('database created')
@app.route('/')
def index():
timestamps = Electricity.query.all()
data = '<table><tbody>'
for t in timestamps:
data += '<tr><td>{}</td></tr>'.format(t.timestamp)
data += '</tbody></table>'
return data
class PyToon(object):
def __init__(self, database):
host = "192.168.178.35"
port = 4223
BrickConnection(host, port, database)
class Electricity(db.Model):
timestamp = db.Column(db.DateTime, primary_key=True)
def __init__(self, timestamp):
self.timestamp = timestamp
def __repr__(self):
return '<Timestamp {}>'.format(self.timestamp)
db.create_all()
if __name__ == '__main__':
pt = PyToon(db)
app.run(debug=True, use_reloader=False)
|
3a21d7d174dd8a9cbf76cbf149666337a8352c61 | h5py/_hl/compat.py | h5py/_hl/compat.py | """
Compatibility module for high-level h5py
"""
import sys
from os import fspath, fsencode, fsdecode
WINDOWS_ENCODING = "mbcs"
def filename_encode(filename):
"""
Encode filename for use in the HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed to the HDF5 library. See the documentation on
filenames in h5py for more information.
"""
filename = fspath(filename)
if sys.platform == "win32":
if isinstance(filename, str):
return filename.encode(WINDOWS_ENCODING, "strict")
return filename
return fsencode(filename)
def filename_decode(filename):
"""
Decode filename used by HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed from the HDF5 library. See the documentation
on filenames in h5py for more information.
"""
if sys.platform == "win32":
if isinstance(filename, bytes):
return filename.decode(WINDOWS_ENCODING, "strict")
elif isinstance(filename, str):
return filename
else:
raise TypeError("expect bytes or str, not %s" % type(filename).__name__)
return fsdecode(filename)
| """
Compatibility module for high-level h5py
"""
import sys
from os import fspath, fsencode, fsdecode
from ..version import hdf5_built_version_tuple
WINDOWS_ENCODING = "utf-8" if hdf5_built_version_tuple >= (1, 10, 6) else "mbcs"
def filename_encode(filename):
"""
Encode filename for use in the HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed to the HDF5 library. See the documentation on
filenames in h5py for more information.
"""
filename = fspath(filename)
if sys.platform == "win32":
if isinstance(filename, str):
return filename.encode(WINDOWS_ENCODING, "strict")
return filename
return fsencode(filename)
def filename_decode(filename):
"""
Decode filename used by HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed from the HDF5 library. See the documentation
on filenames in h5py for more information.
"""
if sys.platform == "win32":
if isinstance(filename, bytes):
return filename.decode(WINDOWS_ENCODING, "strict")
elif isinstance(filename, str):
return filename
else:
raise TypeError("expect bytes or str, not %s" % type(filename).__name__)
return fsdecode(filename)
| Use UTF-8 on windows where HDF5 >= 1.10.6 | Use UTF-8 on windows where HDF5 >= 1.10.6
| Python | bsd-3-clause | h5py/h5py,h5py/h5py,h5py/h5py | """
Compatibility module for high-level h5py
"""
import sys
from os import fspath, fsencode, fsdecode
WINDOWS_ENCODING = "mbcs"
def filename_encode(filename):
"""
Encode filename for use in the HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed to the HDF5 library. See the documentation on
filenames in h5py for more information.
"""
filename = fspath(filename)
if sys.platform == "win32":
if isinstance(filename, str):
return filename.encode(WINDOWS_ENCODING, "strict")
return filename
return fsencode(filename)
def filename_decode(filename):
"""
Decode filename used by HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed from the HDF5 library. See the documentation
on filenames in h5py for more information.
"""
if sys.platform == "win32":
if isinstance(filename, bytes):
return filename.decode(WINDOWS_ENCODING, "strict")
elif isinstance(filename, str):
return filename
else:
raise TypeError("expect bytes or str, not %s" % type(filename).__name__)
return fsdecode(filename)
Use UTF-8 on windows where HDF5 >= 1.10.6 | """
Compatibility module for high-level h5py
"""
import sys
from os import fspath, fsencode, fsdecode
from ..version import hdf5_built_version_tuple
WINDOWS_ENCODING = "utf-8" if hdf5_built_version_tuple >= (1, 10, 6) else "mbcs"
def filename_encode(filename):
"""
Encode filename for use in the HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed to the HDF5 library. See the documentation on
filenames in h5py for more information.
"""
filename = fspath(filename)
if sys.platform == "win32":
if isinstance(filename, str):
return filename.encode(WINDOWS_ENCODING, "strict")
return filename
return fsencode(filename)
def filename_decode(filename):
"""
Decode filename used by HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed from the HDF5 library. See the documentation
on filenames in h5py for more information.
"""
if sys.platform == "win32":
if isinstance(filename, bytes):
return filename.decode(WINDOWS_ENCODING, "strict")
elif isinstance(filename, str):
return filename
else:
raise TypeError("expect bytes or str, not %s" % type(filename).__name__)
return fsdecode(filename)
| <commit_before>"""
Compatibility module for high-level h5py
"""
import sys
from os import fspath, fsencode, fsdecode
WINDOWS_ENCODING = "mbcs"
def filename_encode(filename):
"""
Encode filename for use in the HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed to the HDF5 library. See the documentation on
filenames in h5py for more information.
"""
filename = fspath(filename)
if sys.platform == "win32":
if isinstance(filename, str):
return filename.encode(WINDOWS_ENCODING, "strict")
return filename
return fsencode(filename)
def filename_decode(filename):
"""
Decode filename used by HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed from the HDF5 library. See the documentation
on filenames in h5py for more information.
"""
if sys.platform == "win32":
if isinstance(filename, bytes):
return filename.decode(WINDOWS_ENCODING, "strict")
elif isinstance(filename, str):
return filename
else:
raise TypeError("expect bytes or str, not %s" % type(filename).__name__)
return fsdecode(filename)
<commit_msg>Use UTF-8 on windows where HDF5 >= 1.10.6<commit_after> | """
Compatibility module for high-level h5py
"""
import sys
from os import fspath, fsencode, fsdecode
from ..version import hdf5_built_version_tuple
WINDOWS_ENCODING = "utf-8" if hdf5_built_version_tuple >= (1, 10, 6) else "mbcs"
def filename_encode(filename):
"""
Encode filename for use in the HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed to the HDF5 library. See the documentation on
filenames in h5py for more information.
"""
filename = fspath(filename)
if sys.platform == "win32":
if isinstance(filename, str):
return filename.encode(WINDOWS_ENCODING, "strict")
return filename
return fsencode(filename)
def filename_decode(filename):
"""
Decode filename used by HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed from the HDF5 library. See the documentation
on filenames in h5py for more information.
"""
if sys.platform == "win32":
if isinstance(filename, bytes):
return filename.decode(WINDOWS_ENCODING, "strict")
elif isinstance(filename, str):
return filename
else:
raise TypeError("expect bytes or str, not %s" % type(filename).__name__)
return fsdecode(filename)
| """
Compatibility module for high-level h5py
"""
import sys
from os import fspath, fsencode, fsdecode
WINDOWS_ENCODING = "mbcs"
def filename_encode(filename):
"""
Encode filename for use in the HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed to the HDF5 library. See the documentation on
filenames in h5py for more information.
"""
filename = fspath(filename)
if sys.platform == "win32":
if isinstance(filename, str):
return filename.encode(WINDOWS_ENCODING, "strict")
return filename
return fsencode(filename)
def filename_decode(filename):
"""
Decode filename used by HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed from the HDF5 library. See the documentation
on filenames in h5py for more information.
"""
if sys.platform == "win32":
if isinstance(filename, bytes):
return filename.decode(WINDOWS_ENCODING, "strict")
elif isinstance(filename, str):
return filename
else:
raise TypeError("expect bytes or str, not %s" % type(filename).__name__)
return fsdecode(filename)
Use UTF-8 on windows where HDF5 >= 1.10.6"""
Compatibility module for high-level h5py
"""
import sys
from os import fspath, fsencode, fsdecode
from ..version import hdf5_built_version_tuple
WINDOWS_ENCODING = "utf-8" if hdf5_built_version_tuple >= (1, 10, 6) else "mbcs"
def filename_encode(filename):
"""
Encode filename for use in the HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed to the HDF5 library. See the documentation on
filenames in h5py for more information.
"""
filename = fspath(filename)
if sys.platform == "win32":
if isinstance(filename, str):
return filename.encode(WINDOWS_ENCODING, "strict")
return filename
return fsencode(filename)
def filename_decode(filename):
"""
Decode filename used by HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed from the HDF5 library. See the documentation
on filenames in h5py for more information.
"""
if sys.platform == "win32":
if isinstance(filename, bytes):
return filename.decode(WINDOWS_ENCODING, "strict")
elif isinstance(filename, str):
return filename
else:
raise TypeError("expect bytes or str, not %s" % type(filename).__name__)
return fsdecode(filename)
| <commit_before>"""
Compatibility module for high-level h5py
"""
import sys
from os import fspath, fsencode, fsdecode
WINDOWS_ENCODING = "mbcs"
def filename_encode(filename):
"""
Encode filename for use in the HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed to the HDF5 library. See the documentation on
filenames in h5py for more information.
"""
filename = fspath(filename)
if sys.platform == "win32":
if isinstance(filename, str):
return filename.encode(WINDOWS_ENCODING, "strict")
return filename
return fsencode(filename)
def filename_decode(filename):
"""
Decode filename used by HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed from the HDF5 library. See the documentation
on filenames in h5py for more information.
"""
if sys.platform == "win32":
if isinstance(filename, bytes):
return filename.decode(WINDOWS_ENCODING, "strict")
elif isinstance(filename, str):
return filename
else:
raise TypeError("expect bytes or str, not %s" % type(filename).__name__)
return fsdecode(filename)
<commit_msg>Use UTF-8 on windows where HDF5 >= 1.10.6<commit_after>"""
Compatibility module for high-level h5py
"""
import sys
from os import fspath, fsencode, fsdecode
from ..version import hdf5_built_version_tuple
WINDOWS_ENCODING = "utf-8" if hdf5_built_version_tuple >= (1, 10, 6) else "mbcs"
def filename_encode(filename):
"""
Encode filename for use in the HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed to the HDF5 library. See the documentation on
filenames in h5py for more information.
"""
filename = fspath(filename)
if sys.platform == "win32":
if isinstance(filename, str):
return filename.encode(WINDOWS_ENCODING, "strict")
return filename
return fsencode(filename)
def filename_decode(filename):
"""
Decode filename used by HDF5 library.
Due to how HDF5 handles filenames on different systems, this should be
called on any filenames passed from the HDF5 library. See the documentation
on filenames in h5py for more information.
"""
if sys.platform == "win32":
if isinstance(filename, bytes):
return filename.decode(WINDOWS_ENCODING, "strict")
elif isinstance(filename, str):
return filename
else:
raise TypeError("expect bytes or str, not %s" % type(filename).__name__)
return fsdecode(filename)
|
35fcaad0474df3352ccdf0545fc34cf2c431761c | tweet_s3_images.py | tweet_s3_images.py | import exifread
import os
class TweetS3Images(object):
def __init__(self, twitter, s3_client):
self._twitter = twitter
self._s3_client = s3_client
self._file = None
def send_image(self, bucket, image_name, cleanup=False):
temp_file = '/tmp/{}'.format(image_name)
self._s3_client.download_file(bucket, image_name, temp_file)
self._file = open(temp_file, 'rb')
status = 'New image {} brought to you by lambda-tweet'.format(image_name)
tags = exifread.process_file(self._file)
self._twitter.update_with_media(filename=image_name, status=status, file=self._file)
if cleanup:
self.cleanup(temp_file)
def get_file(self):
return self._file
def cleanup(self, file):
os.remove(file)
| import exifread
import os
class TweetS3Images(object):
def __init__(self, twitter, s3_client):
self._twitter = twitter
self._s3_client = s3_client
self._file = None
def send_image(self, bucket, image_name, cleanup=False):
temp_file = '/tmp/{}'.format(image_name)
self._s3_client.download_file(bucket, image_name, temp_file)
self._file = open(temp_file, 'rb')
tags = exifread.process_file(self._file)
status = self.get_image_description(tags, image_name)
self._twitter.update_with_media(filename=image_name, status=status, file=self._file)
if cleanup:
self.cleanup(temp_file)
def get_file(self):
return self._file
@staticmethod
def cleanup(file_to_remove):
os.remove(file_to_remove)
@staticmethod
def get_image_description(tags, image_name):
if 'Image ImageDescription' in tags:
status = tags['Image ImageDescription'].values
else:
status = 'New image {} brought to you by lambda-tweet'.format(image_name)
return status
| Update class to get image description if available and use it as the update message. | Update class to get image description if available and use it as the update message.
| Python | mit | onema/lambda-tweet | import exifread
import os
class TweetS3Images(object):
def __init__(self, twitter, s3_client):
self._twitter = twitter
self._s3_client = s3_client
self._file = None
def send_image(self, bucket, image_name, cleanup=False):
temp_file = '/tmp/{}'.format(image_name)
self._s3_client.download_file(bucket, image_name, temp_file)
self._file = open(temp_file, 'rb')
status = 'New image {} brought to you by lambda-tweet'.format(image_name)
tags = exifread.process_file(self._file)
self._twitter.update_with_media(filename=image_name, status=status, file=self._file)
if cleanup:
self.cleanup(temp_file)
def get_file(self):
return self._file
def cleanup(self, file):
os.remove(file)
Update class to get image description if available and use it as the update message. | import exifread
import os
class TweetS3Images(object):
def __init__(self, twitter, s3_client):
self._twitter = twitter
self._s3_client = s3_client
self._file = None
def send_image(self, bucket, image_name, cleanup=False):
temp_file = '/tmp/{}'.format(image_name)
self._s3_client.download_file(bucket, image_name, temp_file)
self._file = open(temp_file, 'rb')
tags = exifread.process_file(self._file)
status = self.get_image_description(tags, image_name)
self._twitter.update_with_media(filename=image_name, status=status, file=self._file)
if cleanup:
self.cleanup(temp_file)
def get_file(self):
return self._file
@staticmethod
def cleanup(file_to_remove):
os.remove(file_to_remove)
@staticmethod
def get_image_description(tags, image_name):
if 'Image ImageDescription' in tags:
status = tags['Image ImageDescription'].values
else:
status = 'New image {} brought to you by lambda-tweet'.format(image_name)
return status
| <commit_before>import exifread
import os
class TweetS3Images(object):
def __init__(self, twitter, s3_client):
self._twitter = twitter
self._s3_client = s3_client
self._file = None
def send_image(self, bucket, image_name, cleanup=False):
temp_file = '/tmp/{}'.format(image_name)
self._s3_client.download_file(bucket, image_name, temp_file)
self._file = open(temp_file, 'rb')
status = 'New image {} brought to you by lambda-tweet'.format(image_name)
tags = exifread.process_file(self._file)
self._twitter.update_with_media(filename=image_name, status=status, file=self._file)
if cleanup:
self.cleanup(temp_file)
def get_file(self):
return self._file
def cleanup(self, file):
os.remove(file)
<commit_msg>Update class to get image description if available and use it as the update message.<commit_after> | import exifread
import os
class TweetS3Images(object):
def __init__(self, twitter, s3_client):
self._twitter = twitter
self._s3_client = s3_client
self._file = None
def send_image(self, bucket, image_name, cleanup=False):
temp_file = '/tmp/{}'.format(image_name)
self._s3_client.download_file(bucket, image_name, temp_file)
self._file = open(temp_file, 'rb')
tags = exifread.process_file(self._file)
status = self.get_image_description(tags, image_name)
self._twitter.update_with_media(filename=image_name, status=status, file=self._file)
if cleanup:
self.cleanup(temp_file)
def get_file(self):
return self._file
@staticmethod
def cleanup(file_to_remove):
os.remove(file_to_remove)
@staticmethod
def get_image_description(tags, image_name):
if 'Image ImageDescription' in tags:
status = tags['Image ImageDescription'].values
else:
status = 'New image {} brought to you by lambda-tweet'.format(image_name)
return status
| import exifread
import os
class TweetS3Images(object):
def __init__(self, twitter, s3_client):
self._twitter = twitter
self._s3_client = s3_client
self._file = None
def send_image(self, bucket, image_name, cleanup=False):
temp_file = '/tmp/{}'.format(image_name)
self._s3_client.download_file(bucket, image_name, temp_file)
self._file = open(temp_file, 'rb')
status = 'New image {} brought to you by lambda-tweet'.format(image_name)
tags = exifread.process_file(self._file)
self._twitter.update_with_media(filename=image_name, status=status, file=self._file)
if cleanup:
self.cleanup(temp_file)
def get_file(self):
return self._file
def cleanup(self, file):
os.remove(file)
Update class to get image description if available and use it as the update message.import exifread
import os
class TweetS3Images(object):
def __init__(self, twitter, s3_client):
self._twitter = twitter
self._s3_client = s3_client
self._file = None
def send_image(self, bucket, image_name, cleanup=False):
temp_file = '/tmp/{}'.format(image_name)
self._s3_client.download_file(bucket, image_name, temp_file)
self._file = open(temp_file, 'rb')
tags = exifread.process_file(self._file)
status = self.get_image_description(tags, image_name)
self._twitter.update_with_media(filename=image_name, status=status, file=self._file)
if cleanup:
self.cleanup(temp_file)
def get_file(self):
return self._file
@staticmethod
def cleanup(file_to_remove):
os.remove(file_to_remove)
@staticmethod
def get_image_description(tags, image_name):
if 'Image ImageDescription' in tags:
status = tags['Image ImageDescription'].values
else:
status = 'New image {} brought to you by lambda-tweet'.format(image_name)
return status
| <commit_before>import exifread
import os
class TweetS3Images(object):
def __init__(self, twitter, s3_client):
self._twitter = twitter
self._s3_client = s3_client
self._file = None
def send_image(self, bucket, image_name, cleanup=False):
temp_file = '/tmp/{}'.format(image_name)
self._s3_client.download_file(bucket, image_name, temp_file)
self._file = open(temp_file, 'rb')
status = 'New image {} brought to you by lambda-tweet'.format(image_name)
tags = exifread.process_file(self._file)
self._twitter.update_with_media(filename=image_name, status=status, file=self._file)
if cleanup:
self.cleanup(temp_file)
def get_file(self):
return self._file
def cleanup(self, file):
os.remove(file)
<commit_msg>Update class to get image description if available and use it as the update message.<commit_after>import exifread
import os
class TweetS3Images(object):
def __init__(self, twitter, s3_client):
self._twitter = twitter
self._s3_client = s3_client
self._file = None
def send_image(self, bucket, image_name, cleanup=False):
temp_file = '/tmp/{}'.format(image_name)
self._s3_client.download_file(bucket, image_name, temp_file)
self._file = open(temp_file, 'rb')
tags = exifread.process_file(self._file)
status = self.get_image_description(tags, image_name)
self._twitter.update_with_media(filename=image_name, status=status, file=self._file)
if cleanup:
self.cleanup(temp_file)
def get_file(self):
return self._file
@staticmethod
def cleanup(file_to_remove):
os.remove(file_to_remove)
@staticmethod
def get_image_description(tags, image_name):
if 'Image ImageDescription' in tags:
status = tags['Image ImageDescription'].values
else:
status = 'New image {} brought to you by lambda-tweet'.format(image_name)
return status
|
b096d91564366392c4003b26bafd1e6c3fff47d3 | trayapp.py | trayapp.py |
# Github Tray App
import rumps
import config
import contribs
username = config.get_username()
class GithubTrayApp(rumps.App):
@rumps.timer(60*5)
def timer(self, sender):
count = contribs.get_contribs(username)
self.title = str(count)
@rumps.clicked('Update')
def onoff(self, sender):
count = contribs.get_contribs(username)
self.title = str(count)
@rumps.clicked('Change Frequency')
def onoff(self, sender):
rumps.alert('jk! not ready yet!')
@rumps.clicked('Change Username')
def prefs(self, _):
rumps.alert('jk! not ready yet!')
if __name__ == "__main__":
count = contribs.get_contribs(username)
GithubTrayApp('Github', icon='github.png', title=str(count)).run()
|
# Github Tray App
import rumps
import config
import contribs
class GithubTrayApp(rumps.App):
def __init__(self):
super(GithubTrayApp, self).__init__('Github')
self.count = rumps.MenuItem('commits')
self.username = config.get_username()
self.icon = 'github.png'
self.menu = [
self.count,
'Update Now',
'Change Frequency',
'Change Username'
]
self.update()
def update(self):
num = contribs.get_contribs(self.username)
self.count.title = str(num) + ' commits'
self.title = str(num)
@rumps.timer(60*5)
def timer(self, _):
print('Running timer')
self.update()
@rumps.clicked('Update Now')
def update_now(self, _):
self.update()
@rumps.clicked('Change Frequency')
def change_frequency(_):
rumps.alert('jk! not ready yet!')
@rumps.clicked('Change Username')
def change_username(_):
rumps.alert('jk! not ready yet!')
if __name__ == '__main__':
GithubTrayApp().run()
| Restructure app and display commit count as a disabled menu item | Restructure app and display commit count as a disabled menu item
| Python | mit | chrisfosterelli/commitwatch |
# Github Tray App
import rumps
import config
import contribs
username = config.get_username()
class GithubTrayApp(rumps.App):
@rumps.timer(60*5)
def timer(self, sender):
count = contribs.get_contribs(username)
self.title = str(count)
@rumps.clicked('Update')
def onoff(self, sender):
count = contribs.get_contribs(username)
self.title = str(count)
@rumps.clicked('Change Frequency')
def onoff(self, sender):
rumps.alert('jk! not ready yet!')
@rumps.clicked('Change Username')
def prefs(self, _):
rumps.alert('jk! not ready yet!')
if __name__ == "__main__":
count = contribs.get_contribs(username)
GithubTrayApp('Github', icon='github.png', title=str(count)).run()
Restructure app and display commit count as a disabled menu item |
# Github Tray App
import rumps
import config
import contribs
class GithubTrayApp(rumps.App):
def __init__(self):
super(GithubTrayApp, self).__init__('Github')
self.count = rumps.MenuItem('commits')
self.username = config.get_username()
self.icon = 'github.png'
self.menu = [
self.count,
'Update Now',
'Change Frequency',
'Change Username'
]
self.update()
def update(self):
num = contribs.get_contribs(self.username)
self.count.title = str(num) + ' commits'
self.title = str(num)
@rumps.timer(60*5)
def timer(self, _):
print('Running timer')
self.update()
@rumps.clicked('Update Now')
def update_now(self, _):
self.update()
@rumps.clicked('Change Frequency')
def change_frequency(_):
rumps.alert('jk! not ready yet!')
@rumps.clicked('Change Username')
def change_username(_):
rumps.alert('jk! not ready yet!')
if __name__ == '__main__':
GithubTrayApp().run()
| <commit_before>
# Github Tray App
import rumps
import config
import contribs
username = config.get_username()
class GithubTrayApp(rumps.App):
@rumps.timer(60*5)
def timer(self, sender):
count = contribs.get_contribs(username)
self.title = str(count)
@rumps.clicked('Update')
def onoff(self, sender):
count = contribs.get_contribs(username)
self.title = str(count)
@rumps.clicked('Change Frequency')
def onoff(self, sender):
rumps.alert('jk! not ready yet!')
@rumps.clicked('Change Username')
def prefs(self, _):
rumps.alert('jk! not ready yet!')
if __name__ == "__main__":
count = contribs.get_contribs(username)
GithubTrayApp('Github', icon='github.png', title=str(count)).run()
<commit_msg>Restructure app and display commit count as a disabled menu item<commit_after> |
# Github Tray App
import rumps
import config
import contribs
class GithubTrayApp(rumps.App):
def __init__(self):
super(GithubTrayApp, self).__init__('Github')
self.count = rumps.MenuItem('commits')
self.username = config.get_username()
self.icon = 'github.png'
self.menu = [
self.count,
'Update Now',
'Change Frequency',
'Change Username'
]
self.update()
def update(self):
num = contribs.get_contribs(self.username)
self.count.title = str(num) + ' commits'
self.title = str(num)
@rumps.timer(60*5)
def timer(self, _):
print('Running timer')
self.update()
@rumps.clicked('Update Now')
def update_now(self, _):
self.update()
@rumps.clicked('Change Frequency')
def change_frequency(_):
rumps.alert('jk! not ready yet!')
@rumps.clicked('Change Username')
def change_username(_):
rumps.alert('jk! not ready yet!')
if __name__ == '__main__':
GithubTrayApp().run()
|
# Github Tray App
import rumps
import config
import contribs
username = config.get_username()
class GithubTrayApp(rumps.App):
@rumps.timer(60*5)
def timer(self, sender):
count = contribs.get_contribs(username)
self.title = str(count)
@rumps.clicked('Update')
def onoff(self, sender):
count = contribs.get_contribs(username)
self.title = str(count)
@rumps.clicked('Change Frequency')
def onoff(self, sender):
rumps.alert('jk! not ready yet!')
@rumps.clicked('Change Username')
def prefs(self, _):
rumps.alert('jk! not ready yet!')
if __name__ == "__main__":
count = contribs.get_contribs(username)
GithubTrayApp('Github', icon='github.png', title=str(count)).run()
Restructure app and display commit count as a disabled menu item
# Github Tray App
import rumps
import config
import contribs
class GithubTrayApp(rumps.App):
def __init__(self):
super(GithubTrayApp, self).__init__('Github')
self.count = rumps.MenuItem('commits')
self.username = config.get_username()
self.icon = 'github.png'
self.menu = [
self.count,
'Update Now',
'Change Frequency',
'Change Username'
]
self.update()
def update(self):
num = contribs.get_contribs(self.username)
self.count.title = str(num) + ' commits'
self.title = str(num)
@rumps.timer(60*5)
def timer(self, _):
print('Running timer')
self.update()
@rumps.clicked('Update Now')
def update_now(self, _):
self.update()
@rumps.clicked('Change Frequency')
def change_frequency(_):
rumps.alert('jk! not ready yet!')
@rumps.clicked('Change Username')
def change_username(_):
rumps.alert('jk! not ready yet!')
if __name__ == '__main__':
GithubTrayApp().run()
| <commit_before>
# Github Tray App
import rumps
import config
import contribs
username = config.get_username()
class GithubTrayApp(rumps.App):
@rumps.timer(60*5)
def timer(self, sender):
count = contribs.get_contribs(username)
self.title = str(count)
@rumps.clicked('Update')
def onoff(self, sender):
count = contribs.get_contribs(username)
self.title = str(count)
@rumps.clicked('Change Frequency')
def onoff(self, sender):
rumps.alert('jk! not ready yet!')
@rumps.clicked('Change Username')
def prefs(self, _):
rumps.alert('jk! not ready yet!')
if __name__ == "__main__":
count = contribs.get_contribs(username)
GithubTrayApp('Github', icon='github.png', title=str(count)).run()
<commit_msg>Restructure app and display commit count as a disabled menu item<commit_after>
# Github Tray App
import rumps
import config
import contribs
class GithubTrayApp(rumps.App):
def __init__(self):
super(GithubTrayApp, self).__init__('Github')
self.count = rumps.MenuItem('commits')
self.username = config.get_username()
self.icon = 'github.png'
self.menu = [
self.count,
'Update Now',
'Change Frequency',
'Change Username'
]
self.update()
def update(self):
num = contribs.get_contribs(self.username)
self.count.title = str(num) + ' commits'
self.title = str(num)
@rumps.timer(60*5)
def timer(self, _):
print('Running timer')
self.update()
@rumps.clicked('Update Now')
def update_now(self, _):
self.update()
@rumps.clicked('Change Frequency')
def change_frequency(_):
rumps.alert('jk! not ready yet!')
@rumps.clicked('Change Username')
def change_username(_):
rumps.alert('jk! not ready yet!')
if __name__ == '__main__':
GithubTrayApp().run()
|
b8d3e62bae3559b24a2a135c921ccc9879fab339 | src/py65/memory.py | src/py65/memory.py |
class ObservableMemory:
def __init__(self, subject=None):
if subject is None:
subject = 0x10000 * [0x00]
self._subject = subject
self._read_subscribers = {}
self._write_subscribers = {}
def __setitem__(self, address, value):
callbacks = self._write_subscribers.setdefault(address, [])
for callback in callbacks:
result = callback(address, value)
if result is not None:
value = result
self._subject[address] = value
def __getitem__(self, address):
callbacks = self._read_subscribers.setdefault(address, [])
final_result = None
for callback in callbacks:
result = callback(address)
if result is not None:
final_result = result
if final_result is None:
return self._subject[address]
else:
return final_result
def __getattr__(self, attribute):
return getattr(self._subject, attribute)
def subscribe_to_write(self, address_range, callback):
for address in address_range:
callbacks = self._write_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def subscribe_to_read(self, address_range, callback):
for address in address_range:
callbacks = self._read_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def write(self, start_address, bytes):
self._subject[start_address:start_address+len(bytes)] = bytes
|
class ObservableMemory:
def __init__(self, subject=None):
if subject is None:
subject = 0x10000 * [0x00]
self._subject = subject
self._read_subscribers = {}
self._write_subscribers = {}
def __setitem__(self, address, value):
callbacks = self._write_subscribers.get(address, [])
for callback in callbacks:
result = callback(address, value)
if result is not None:
value = result
self._subject[address] = value
def __getitem__(self, address):
callbacks = self._read_subscribers.get(address, [])
final_result = None
for callback in callbacks:
result = callback(address)
if result is not None:
final_result = result
if final_result is None:
return self._subject[address]
else:
return final_result
def __getattr__(self, attribute):
return getattr(self._subject, attribute)
def subscribe_to_write(self, address_range, callback):
for address in address_range:
callbacks = self._write_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def subscribe_to_read(self, address_range, callback):
for address in address_range:
callbacks = self._read_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def write(self, start_address, bytes):
self._subject[start_address:start_address+len(bytes)] = bytes
| Use get() instead of setdefault(). | Use get() instead of setdefault().
| Python | bsd-3-clause | mkeller0815/py65,mnaberez/py65 |
class ObservableMemory:
def __init__(self, subject=None):
if subject is None:
subject = 0x10000 * [0x00]
self._subject = subject
self._read_subscribers = {}
self._write_subscribers = {}
def __setitem__(self, address, value):
callbacks = self._write_subscribers.setdefault(address, [])
for callback in callbacks:
result = callback(address, value)
if result is not None:
value = result
self._subject[address] = value
def __getitem__(self, address):
callbacks = self._read_subscribers.setdefault(address, [])
final_result = None
for callback in callbacks:
result = callback(address)
if result is not None:
final_result = result
if final_result is None:
return self._subject[address]
else:
return final_result
def __getattr__(self, attribute):
return getattr(self._subject, attribute)
def subscribe_to_write(self, address_range, callback):
for address in address_range:
callbacks = self._write_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def subscribe_to_read(self, address_range, callback):
for address in address_range:
callbacks = self._read_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def write(self, start_address, bytes):
self._subject[start_address:start_address+len(bytes)] = bytes
Use get() instead of setdefault(). |
class ObservableMemory:
def __init__(self, subject=None):
if subject is None:
subject = 0x10000 * [0x00]
self._subject = subject
self._read_subscribers = {}
self._write_subscribers = {}
def __setitem__(self, address, value):
callbacks = self._write_subscribers.get(address, [])
for callback in callbacks:
result = callback(address, value)
if result is not None:
value = result
self._subject[address] = value
def __getitem__(self, address):
callbacks = self._read_subscribers.get(address, [])
final_result = None
for callback in callbacks:
result = callback(address)
if result is not None:
final_result = result
if final_result is None:
return self._subject[address]
else:
return final_result
def __getattr__(self, attribute):
return getattr(self._subject, attribute)
def subscribe_to_write(self, address_range, callback):
for address in address_range:
callbacks = self._write_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def subscribe_to_read(self, address_range, callback):
for address in address_range:
callbacks = self._read_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def write(self, start_address, bytes):
self._subject[start_address:start_address+len(bytes)] = bytes
| <commit_before>
class ObservableMemory:
def __init__(self, subject=None):
if subject is None:
subject = 0x10000 * [0x00]
self._subject = subject
self._read_subscribers = {}
self._write_subscribers = {}
def __setitem__(self, address, value):
callbacks = self._write_subscribers.setdefault(address, [])
for callback in callbacks:
result = callback(address, value)
if result is not None:
value = result
self._subject[address] = value
def __getitem__(self, address):
callbacks = self._read_subscribers.setdefault(address, [])
final_result = None
for callback in callbacks:
result = callback(address)
if result is not None:
final_result = result
if final_result is None:
return self._subject[address]
else:
return final_result
def __getattr__(self, attribute):
return getattr(self._subject, attribute)
def subscribe_to_write(self, address_range, callback):
for address in address_range:
callbacks = self._write_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def subscribe_to_read(self, address_range, callback):
for address in address_range:
callbacks = self._read_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def write(self, start_address, bytes):
self._subject[start_address:start_address+len(bytes)] = bytes
<commit_msg>Use get() instead of setdefault().<commit_after> |
class ObservableMemory:
def __init__(self, subject=None):
if subject is None:
subject = 0x10000 * [0x00]
self._subject = subject
self._read_subscribers = {}
self._write_subscribers = {}
def __setitem__(self, address, value):
callbacks = self._write_subscribers.get(address, [])
for callback in callbacks:
result = callback(address, value)
if result is not None:
value = result
self._subject[address] = value
def __getitem__(self, address):
callbacks = self._read_subscribers.get(address, [])
final_result = None
for callback in callbacks:
result = callback(address)
if result is not None:
final_result = result
if final_result is None:
return self._subject[address]
else:
return final_result
def __getattr__(self, attribute):
return getattr(self._subject, attribute)
def subscribe_to_write(self, address_range, callback):
for address in address_range:
callbacks = self._write_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def subscribe_to_read(self, address_range, callback):
for address in address_range:
callbacks = self._read_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def write(self, start_address, bytes):
self._subject[start_address:start_address+len(bytes)] = bytes
|
class ObservableMemory:
def __init__(self, subject=None):
if subject is None:
subject = 0x10000 * [0x00]
self._subject = subject
self._read_subscribers = {}
self._write_subscribers = {}
def __setitem__(self, address, value):
callbacks = self._write_subscribers.setdefault(address, [])
for callback in callbacks:
result = callback(address, value)
if result is not None:
value = result
self._subject[address] = value
def __getitem__(self, address):
callbacks = self._read_subscribers.setdefault(address, [])
final_result = None
for callback in callbacks:
result = callback(address)
if result is not None:
final_result = result
if final_result is None:
return self._subject[address]
else:
return final_result
def __getattr__(self, attribute):
return getattr(self._subject, attribute)
def subscribe_to_write(self, address_range, callback):
for address in address_range:
callbacks = self._write_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def subscribe_to_read(self, address_range, callback):
for address in address_range:
callbacks = self._read_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def write(self, start_address, bytes):
self._subject[start_address:start_address+len(bytes)] = bytes
Use get() instead of setdefault().
class ObservableMemory:
def __init__(self, subject=None):
if subject is None:
subject = 0x10000 * [0x00]
self._subject = subject
self._read_subscribers = {}
self._write_subscribers = {}
def __setitem__(self, address, value):
callbacks = self._write_subscribers.get(address, [])
for callback in callbacks:
result = callback(address, value)
if result is not None:
value = result
self._subject[address] = value
def __getitem__(self, address):
callbacks = self._read_subscribers.get(address, [])
final_result = None
for callback in callbacks:
result = callback(address)
if result is not None:
final_result = result
if final_result is None:
return self._subject[address]
else:
return final_result
def __getattr__(self, attribute):
return getattr(self._subject, attribute)
def subscribe_to_write(self, address_range, callback):
for address in address_range:
callbacks = self._write_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def subscribe_to_read(self, address_range, callback):
for address in address_range:
callbacks = self._read_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def write(self, start_address, bytes):
self._subject[start_address:start_address+len(bytes)] = bytes
| <commit_before>
class ObservableMemory:
def __init__(self, subject=None):
if subject is None:
subject = 0x10000 * [0x00]
self._subject = subject
self._read_subscribers = {}
self._write_subscribers = {}
def __setitem__(self, address, value):
callbacks = self._write_subscribers.setdefault(address, [])
for callback in callbacks:
result = callback(address, value)
if result is not None:
value = result
self._subject[address] = value
def __getitem__(self, address):
callbacks = self._read_subscribers.setdefault(address, [])
final_result = None
for callback in callbacks:
result = callback(address)
if result is not None:
final_result = result
if final_result is None:
return self._subject[address]
else:
return final_result
def __getattr__(self, attribute):
return getattr(self._subject, attribute)
def subscribe_to_write(self, address_range, callback):
for address in address_range:
callbacks = self._write_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def subscribe_to_read(self, address_range, callback):
for address in address_range:
callbacks = self._read_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def write(self, start_address, bytes):
self._subject[start_address:start_address+len(bytes)] = bytes
<commit_msg>Use get() instead of setdefault().<commit_after>
class ObservableMemory:
def __init__(self, subject=None):
if subject is None:
subject = 0x10000 * [0x00]
self._subject = subject
self._read_subscribers = {}
self._write_subscribers = {}
def __setitem__(self, address, value):
callbacks = self._write_subscribers.get(address, [])
for callback in callbacks:
result = callback(address, value)
if result is not None:
value = result
self._subject[address] = value
def __getitem__(self, address):
callbacks = self._read_subscribers.get(address, [])
final_result = None
for callback in callbacks:
result = callback(address)
if result is not None:
final_result = result
if final_result is None:
return self._subject[address]
else:
return final_result
def __getattr__(self, attribute):
return getattr(self._subject, attribute)
def subscribe_to_write(self, address_range, callback):
for address in address_range:
callbacks = self._write_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def subscribe_to_read(self, address_range, callback):
for address in address_range:
callbacks = self._read_subscribers.setdefault(address, [])
if callback not in callbacks:
callbacks.append(callback)
def write(self, start_address, bytes):
self._subject[start_address:start_address+len(bytes)] = bytes
|
f6e42f0f2d931533a9cdcab749feceeb5fa98982 | mopidy/backends/spotify/__init__.py | mopidy/backends/spotify/__init__.py | """A backend for playing music from Spotify
`Spotify <http://www.spotify.com/>`_ is a music streaming service. The backend
uses the official `libspotify
<http://developer.spotify.com/en/libspotify/overview/>`_ library and the
`pyspotify <http://github.com/mopidy/pyspotify/>`_ Python bindings for
libspotify. This backend handles URIs starting with ``spotify:``.
See :ref:`music-from-spotify` for further instructions on using this backend.
.. note::
This product uses SPOTIFY(R) CORE but is not endorsed, certified or
otherwise approved in any way by Spotify. Spotify is the registered
trade mark of the Spotify Group.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Spotify+backend
**Dependencies:**
- libspotify >= 11, < 12 (libspotify11 package from apt.mopidy.com)
- pyspotify >= 1.7, < 1.8 (python-spotify package from apt.mopidy.com)
**Settings:**
- :attr:`mopidy.settings.SPOTIFY_CACHE_PATH`
- :attr:`mopidy.settings.SPOTIFY_USERNAME`
- :attr:`mopidy.settings.SPOTIFY_PASSWORD`
"""
# flake8: noqa
from .actor import SpotifyBackend
| """A backend for playing music from Spotify
`Spotify <http://www.spotify.com/>`_ is a music streaming service. The backend
uses the official `libspotify
<http://developer.spotify.com/en/libspotify/overview/>`_ library and the
`pyspotify <http://github.com/mopidy/pyspotify/>`_ Python bindings for
libspotify. This backend handles URIs starting with ``spotify:``.
See :ref:`music-from-spotify` for further instructions on using this backend.
.. note::
This product uses SPOTIFY(R) CORE but is not endorsed, certified or
otherwise approved in any way by Spotify. Spotify is the registered
trade mark of the Spotify Group.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Spotify+backend
**Dependencies:**
- libspotify >= 12, < 13 (libspotify12 package from apt.mopidy.com)
- pyspotify >= 1.8, < 1.9 (python-spotify package from apt.mopidy.com)
**Settings:**
- :attr:`mopidy.settings.SPOTIFY_CACHE_PATH`
- :attr:`mopidy.settings.SPOTIFY_USERNAME`
- :attr:`mopidy.settings.SPOTIFY_PASSWORD`
"""
# flake8: noqa
from .actor import SpotifyBackend
| Update recommended libspotify and pyspotify version | Update recommended libspotify and pyspotify version
| Python | apache-2.0 | SuperStarPL/mopidy,bacontext/mopidy,priestd09/mopidy,mokieyue/mopidy,hkariti/mopidy,liamw9534/mopidy,adamcik/mopidy,mokieyue/mopidy,rawdlite/mopidy,dbrgn/mopidy,kingosticks/mopidy,mokieyue/mopidy,mopidy/mopidy,SuperStarPL/mopidy,glogiotatidis/mopidy,adamcik/mopidy,rawdlite/mopidy,pacificIT/mopidy,kingosticks/mopidy,hkariti/mopidy,liamw9534/mopidy,swak/mopidy,glogiotatidis/mopidy,tkem/mopidy,dbrgn/mopidy,bencevans/mopidy,ali/mopidy,adamcik/mopidy,quartz55/mopidy,ZenithDK/mopidy,pacificIT/mopidy,hkariti/mopidy,ZenithDK/mopidy,swak/mopidy,tkem/mopidy,rawdlite/mopidy,pacificIT/mopidy,woutervanwijk/mopidy,glogiotatidis/mopidy,jmarsik/mopidy,jmarsik/mopidy,abarisain/mopidy,vrs01/mopidy,jcass77/mopidy,diandiankan/mopidy,rawdlite/mopidy,diandiankan/mopidy,glogiotatidis/mopidy,ali/mopidy,jmarsik/mopidy,diandiankan/mopidy,abarisain/mopidy,jcass77/mopidy,jodal/mopidy,jcass77/mopidy,bacontext/mopidy,hkariti/mopidy,tkem/mopidy,jodal/mopidy,tkem/mopidy,priestd09/mopidy,mopidy/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,bencevans/mopidy,priestd09/mopidy,pacificIT/mopidy,bacontext/mopidy,swak/mopidy,mopidy/mopidy,swak/mopidy,bacontext/mopidy,diandiankan/mopidy,jmarsik/mopidy,ZenithDK/mopidy,ZenithDK/mopidy,vrs01/mopidy,jodal/mopidy,vrs01/mopidy,dbrgn/mopidy,quartz55/mopidy,quartz55/mopidy,dbrgn/mopidy,SuperStarPL/mopidy,bencevans/mopidy,bencevans/mopidy,ali/mopidy,kingosticks/mopidy,vrs01/mopidy,quartz55/mopidy,ali/mopidy,woutervanwijk/mopidy | """A backend for playing music from Spotify
`Spotify <http://www.spotify.com/>`_ is a music streaming service. The backend
uses the official `libspotify
<http://developer.spotify.com/en/libspotify/overview/>`_ library and the
`pyspotify <http://github.com/mopidy/pyspotify/>`_ Python bindings for
libspotify. This backend handles URIs starting with ``spotify:``.
See :ref:`music-from-spotify` for further instructions on using this backend.
.. note::
This product uses SPOTIFY(R) CORE but is not endorsed, certified or
otherwise approved in any way by Spotify. Spotify is the registered
trade mark of the Spotify Group.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Spotify+backend
**Dependencies:**
- libspotify >= 11, < 12 (libspotify11 package from apt.mopidy.com)
- pyspotify >= 1.7, < 1.8 (python-spotify package from apt.mopidy.com)
**Settings:**
- :attr:`mopidy.settings.SPOTIFY_CACHE_PATH`
- :attr:`mopidy.settings.SPOTIFY_USERNAME`
- :attr:`mopidy.settings.SPOTIFY_PASSWORD`
"""
# flake8: noqa
from .actor import SpotifyBackend
Update recommended libspotify and pyspotify version | """A backend for playing music from Spotify
`Spotify <http://www.spotify.com/>`_ is a music streaming service. The backend
uses the official `libspotify
<http://developer.spotify.com/en/libspotify/overview/>`_ library and the
`pyspotify <http://github.com/mopidy/pyspotify/>`_ Python bindings for
libspotify. This backend handles URIs starting with ``spotify:``.
See :ref:`music-from-spotify` for further instructions on using this backend.
.. note::
This product uses SPOTIFY(R) CORE but is not endorsed, certified or
otherwise approved in any way by Spotify. Spotify is the registered
trade mark of the Spotify Group.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Spotify+backend
**Dependencies:**
- libspotify >= 12, < 13 (libspotify12 package from apt.mopidy.com)
- pyspotify >= 1.8, < 1.9 (python-spotify package from apt.mopidy.com)
**Settings:**
- :attr:`mopidy.settings.SPOTIFY_CACHE_PATH`
- :attr:`mopidy.settings.SPOTIFY_USERNAME`
- :attr:`mopidy.settings.SPOTIFY_PASSWORD`
"""
# flake8: noqa
from .actor import SpotifyBackend
| <commit_before>"""A backend for playing music from Spotify
`Spotify <http://www.spotify.com/>`_ is a music streaming service. The backend
uses the official `libspotify
<http://developer.spotify.com/en/libspotify/overview/>`_ library and the
`pyspotify <http://github.com/mopidy/pyspotify/>`_ Python bindings for
libspotify. This backend handles URIs starting with ``spotify:``.
See :ref:`music-from-spotify` for further instructions on using this backend.
.. note::
This product uses SPOTIFY(R) CORE but is not endorsed, certified or
otherwise approved in any way by Spotify. Spotify is the registered
trade mark of the Spotify Group.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Spotify+backend
**Dependencies:**
- libspotify >= 11, < 12 (libspotify11 package from apt.mopidy.com)
- pyspotify >= 1.7, < 1.8 (python-spotify package from apt.mopidy.com)
**Settings:**
- :attr:`mopidy.settings.SPOTIFY_CACHE_PATH`
- :attr:`mopidy.settings.SPOTIFY_USERNAME`
- :attr:`mopidy.settings.SPOTIFY_PASSWORD`
"""
# flake8: noqa
from .actor import SpotifyBackend
<commit_msg>Update recommended libspotify and pyspotify version<commit_after> | """A backend for playing music from Spotify
`Spotify <http://www.spotify.com/>`_ is a music streaming service. The backend
uses the official `libspotify
<http://developer.spotify.com/en/libspotify/overview/>`_ library and the
`pyspotify <http://github.com/mopidy/pyspotify/>`_ Python bindings for
libspotify. This backend handles URIs starting with ``spotify:``.
See :ref:`music-from-spotify` for further instructions on using this backend.
.. note::
This product uses SPOTIFY(R) CORE but is not endorsed, certified or
otherwise approved in any way by Spotify. Spotify is the registered
trade mark of the Spotify Group.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Spotify+backend
**Dependencies:**
- libspotify >= 12, < 13 (libspotify12 package from apt.mopidy.com)
- pyspotify >= 1.8, < 1.9 (python-spotify package from apt.mopidy.com)
**Settings:**
- :attr:`mopidy.settings.SPOTIFY_CACHE_PATH`
- :attr:`mopidy.settings.SPOTIFY_USERNAME`
- :attr:`mopidy.settings.SPOTIFY_PASSWORD`
"""
# flake8: noqa
from .actor import SpotifyBackend
| """A backend for playing music from Spotify
`Spotify <http://www.spotify.com/>`_ is a music streaming service. The backend
uses the official `libspotify
<http://developer.spotify.com/en/libspotify/overview/>`_ library and the
`pyspotify <http://github.com/mopidy/pyspotify/>`_ Python bindings for
libspotify. This backend handles URIs starting with ``spotify:``.
See :ref:`music-from-spotify` for further instructions on using this backend.
.. note::
This product uses SPOTIFY(R) CORE but is not endorsed, certified or
otherwise approved in any way by Spotify. Spotify is the registered
trade mark of the Spotify Group.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Spotify+backend
**Dependencies:**
- libspotify >= 11, < 12 (libspotify11 package from apt.mopidy.com)
- pyspotify >= 1.7, < 1.8 (python-spotify package from apt.mopidy.com)
**Settings:**
- :attr:`mopidy.settings.SPOTIFY_CACHE_PATH`
- :attr:`mopidy.settings.SPOTIFY_USERNAME`
- :attr:`mopidy.settings.SPOTIFY_PASSWORD`
"""
# flake8: noqa
from .actor import SpotifyBackend
Update recommended libspotify and pyspotify version"""A backend for playing music from Spotify
`Spotify <http://www.spotify.com/>`_ is a music streaming service. The backend
uses the official `libspotify
<http://developer.spotify.com/en/libspotify/overview/>`_ library and the
`pyspotify <http://github.com/mopidy/pyspotify/>`_ Python bindings for
libspotify. This backend handles URIs starting with ``spotify:``.
See :ref:`music-from-spotify` for further instructions on using this backend.
.. note::
This product uses SPOTIFY(R) CORE but is not endorsed, certified or
otherwise approved in any way by Spotify. Spotify is the registered
trade mark of the Spotify Group.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Spotify+backend
**Dependencies:**
- libspotify >= 12, < 13 (libspotify12 package from apt.mopidy.com)
- pyspotify >= 1.8, < 1.9 (python-spotify package from apt.mopidy.com)
**Settings:**
- :attr:`mopidy.settings.SPOTIFY_CACHE_PATH`
- :attr:`mopidy.settings.SPOTIFY_USERNAME`
- :attr:`mopidy.settings.SPOTIFY_PASSWORD`
"""
# flake8: noqa
from .actor import SpotifyBackend
| <commit_before>"""A backend for playing music from Spotify
`Spotify <http://www.spotify.com/>`_ is a music streaming service. The backend
uses the official `libspotify
<http://developer.spotify.com/en/libspotify/overview/>`_ library and the
`pyspotify <http://github.com/mopidy/pyspotify/>`_ Python bindings for
libspotify. This backend handles URIs starting with ``spotify:``.
See :ref:`music-from-spotify` for further instructions on using this backend.
.. note::
This product uses SPOTIFY(R) CORE but is not endorsed, certified or
otherwise approved in any way by Spotify. Spotify is the registered
trade mark of the Spotify Group.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Spotify+backend
**Dependencies:**
- libspotify >= 11, < 12 (libspotify11 package from apt.mopidy.com)
- pyspotify >= 1.7, < 1.8 (python-spotify package from apt.mopidy.com)
**Settings:**
- :attr:`mopidy.settings.SPOTIFY_CACHE_PATH`
- :attr:`mopidy.settings.SPOTIFY_USERNAME`
- :attr:`mopidy.settings.SPOTIFY_PASSWORD`
"""
# flake8: noqa
from .actor import SpotifyBackend
<commit_msg>Update recommended libspotify and pyspotify version<commit_after>"""A backend for playing music from Spotify
`Spotify <http://www.spotify.com/>`_ is a music streaming service. The backend
uses the official `libspotify
<http://developer.spotify.com/en/libspotify/overview/>`_ library and the
`pyspotify <http://github.com/mopidy/pyspotify/>`_ Python bindings for
libspotify. This backend handles URIs starting with ``spotify:``.
See :ref:`music-from-spotify` for further instructions on using this backend.
.. note::
This product uses SPOTIFY(R) CORE but is not endorsed, certified or
otherwise approved in any way by Spotify. Spotify is the registered
trade mark of the Spotify Group.
**Issues:**
https://github.com/mopidy/mopidy/issues?labels=Spotify+backend
**Dependencies:**
- libspotify >= 12, < 13 (libspotify12 package from apt.mopidy.com)
- pyspotify >= 1.8, < 1.9 (python-spotify package from apt.mopidy.com)
**Settings:**
- :attr:`mopidy.settings.SPOTIFY_CACHE_PATH`
- :attr:`mopidy.settings.SPOTIFY_USERNAME`
- :attr:`mopidy.settings.SPOTIFY_PASSWORD`
"""
# flake8: noqa
from .actor import SpotifyBackend
|
87c99c6839d7f74201393066651f3d69a5a2edce | nodeconductor/logging/middleware.py | nodeconductor/logging/middleware.py | from __future__ import unicode_literals
import threading
_locals = threading.local()
def get_event_context():
return getattr(_locals, 'context', None)
def set_event_context(context):
_locals.context = context
def reset_event_context():
if hasattr(_locals, 'context'):
del _locals.context
def get_ip_address(request):
"""
Correct IP address is expected as first element of HTTP_X_FORWARDED_FOR or REMOTE_ADDR
"""
if 'HTTP_X_FORWARDED_FOR' in request.META:
return request.META['HTTP_X_FORWARDED_FOR'].split(',')[0].strip()
else:
return request.META['REMOTE_ADDR']
class CaptureEventContextMiddleware(object):
def process_request(self, request):
context = {'ip_address': get_ip_address(request)}
user = getattr(request, 'user', None)
if user and not user.is_anonymous():
context.update(user._get_log_context('user'))
set_event_context(context)
def process_response(self, request, response):
reset_event_context()
return response
| from __future__ import unicode_literals
import threading
_locals = threading.local()
def get_event_context():
return getattr(_locals, 'context', None)
def set_event_context(context):
_locals.context = context
def reset_event_context():
if hasattr(_locals, 'context'):
del _locals.context
def set_current_user(user):
set_event_context(user._get_log_context('user'))
def get_ip_address(request):
"""
Correct IP address is expected as first element of HTTP_X_FORWARDED_FOR or REMOTE_ADDR
"""
if 'HTTP_X_FORWARDED_FOR' in request.META:
return request.META['HTTP_X_FORWARDED_FOR'].split(',')[0].strip()
else:
return request.META['REMOTE_ADDR']
class CaptureEventContextMiddleware(object):
def process_request(self, request):
context = {'ip_address': get_ip_address(request)}
user = getattr(request, 'user', None)
if user and not user.is_anonymous():
context.update(user._get_log_context('user'))
set_event_context(context)
def process_response(self, request, response):
reset_event_context()
return response
| Implement set_current_user for CapturingAuthentication (NC-529) | Implement set_current_user for CapturingAuthentication (NC-529)
| Python | mit | opennode/nodeconductor,opennode/nodeconductor,opennode/nodeconductor | from __future__ import unicode_literals
import threading
_locals = threading.local()
def get_event_context():
return getattr(_locals, 'context', None)
def set_event_context(context):
_locals.context = context
def reset_event_context():
if hasattr(_locals, 'context'):
del _locals.context
def get_ip_address(request):
"""
Correct IP address is expected as first element of HTTP_X_FORWARDED_FOR or REMOTE_ADDR
"""
if 'HTTP_X_FORWARDED_FOR' in request.META:
return request.META['HTTP_X_FORWARDED_FOR'].split(',')[0].strip()
else:
return request.META['REMOTE_ADDR']
class CaptureEventContextMiddleware(object):
def process_request(self, request):
context = {'ip_address': get_ip_address(request)}
user = getattr(request, 'user', None)
if user and not user.is_anonymous():
context.update(user._get_log_context('user'))
set_event_context(context)
def process_response(self, request, response):
reset_event_context()
return response
Implement set_current_user for CapturingAuthentication (NC-529) | from __future__ import unicode_literals
import threading
_locals = threading.local()
def get_event_context():
return getattr(_locals, 'context', None)
def set_event_context(context):
_locals.context = context
def reset_event_context():
if hasattr(_locals, 'context'):
del _locals.context
def set_current_user(user):
set_event_context(user._get_log_context('user'))
def get_ip_address(request):
"""
Correct IP address is expected as first element of HTTP_X_FORWARDED_FOR or REMOTE_ADDR
"""
if 'HTTP_X_FORWARDED_FOR' in request.META:
return request.META['HTTP_X_FORWARDED_FOR'].split(',')[0].strip()
else:
return request.META['REMOTE_ADDR']
class CaptureEventContextMiddleware(object):
def process_request(self, request):
context = {'ip_address': get_ip_address(request)}
user = getattr(request, 'user', None)
if user and not user.is_anonymous():
context.update(user._get_log_context('user'))
set_event_context(context)
def process_response(self, request, response):
reset_event_context()
return response
| <commit_before>from __future__ import unicode_literals
import threading
_locals = threading.local()
def get_event_context():
return getattr(_locals, 'context', None)
def set_event_context(context):
_locals.context = context
def reset_event_context():
if hasattr(_locals, 'context'):
del _locals.context
def get_ip_address(request):
"""
Correct IP address is expected as first element of HTTP_X_FORWARDED_FOR or REMOTE_ADDR
"""
if 'HTTP_X_FORWARDED_FOR' in request.META:
return request.META['HTTP_X_FORWARDED_FOR'].split(',')[0].strip()
else:
return request.META['REMOTE_ADDR']
class CaptureEventContextMiddleware(object):
def process_request(self, request):
context = {'ip_address': get_ip_address(request)}
user = getattr(request, 'user', None)
if user and not user.is_anonymous():
context.update(user._get_log_context('user'))
set_event_context(context)
def process_response(self, request, response):
reset_event_context()
return response
<commit_msg>Implement set_current_user for CapturingAuthentication (NC-529)<commit_after> | from __future__ import unicode_literals
import threading
_locals = threading.local()
def get_event_context():
return getattr(_locals, 'context', None)
def set_event_context(context):
_locals.context = context
def reset_event_context():
if hasattr(_locals, 'context'):
del _locals.context
def set_current_user(user):
set_event_context(user._get_log_context('user'))
def get_ip_address(request):
"""
Correct IP address is expected as first element of HTTP_X_FORWARDED_FOR or REMOTE_ADDR
"""
if 'HTTP_X_FORWARDED_FOR' in request.META:
return request.META['HTTP_X_FORWARDED_FOR'].split(',')[0].strip()
else:
return request.META['REMOTE_ADDR']
class CaptureEventContextMiddleware(object):
def process_request(self, request):
context = {'ip_address': get_ip_address(request)}
user = getattr(request, 'user', None)
if user and not user.is_anonymous():
context.update(user._get_log_context('user'))
set_event_context(context)
def process_response(self, request, response):
reset_event_context()
return response
| from __future__ import unicode_literals
import threading
_locals = threading.local()
def get_event_context():
return getattr(_locals, 'context', None)
def set_event_context(context):
_locals.context = context
def reset_event_context():
if hasattr(_locals, 'context'):
del _locals.context
def get_ip_address(request):
"""
Correct IP address is expected as first element of HTTP_X_FORWARDED_FOR or REMOTE_ADDR
"""
if 'HTTP_X_FORWARDED_FOR' in request.META:
return request.META['HTTP_X_FORWARDED_FOR'].split(',')[0].strip()
else:
return request.META['REMOTE_ADDR']
class CaptureEventContextMiddleware(object):
def process_request(self, request):
context = {'ip_address': get_ip_address(request)}
user = getattr(request, 'user', None)
if user and not user.is_anonymous():
context.update(user._get_log_context('user'))
set_event_context(context)
def process_response(self, request, response):
reset_event_context()
return response
Implement set_current_user for CapturingAuthentication (NC-529)from __future__ import unicode_literals
import threading
_locals = threading.local()
def get_event_context():
return getattr(_locals, 'context', None)
def set_event_context(context):
_locals.context = context
def reset_event_context():
if hasattr(_locals, 'context'):
del _locals.context
def set_current_user(user):
set_event_context(user._get_log_context('user'))
def get_ip_address(request):
"""
Correct IP address is expected as first element of HTTP_X_FORWARDED_FOR or REMOTE_ADDR
"""
if 'HTTP_X_FORWARDED_FOR' in request.META:
return request.META['HTTP_X_FORWARDED_FOR'].split(',')[0].strip()
else:
return request.META['REMOTE_ADDR']
class CaptureEventContextMiddleware(object):
def process_request(self, request):
context = {'ip_address': get_ip_address(request)}
user = getattr(request, 'user', None)
if user and not user.is_anonymous():
context.update(user._get_log_context('user'))
set_event_context(context)
def process_response(self, request, response):
reset_event_context()
return response
| <commit_before>from __future__ import unicode_literals
import threading
_locals = threading.local()
def get_event_context():
return getattr(_locals, 'context', None)
def set_event_context(context):
_locals.context = context
def reset_event_context():
if hasattr(_locals, 'context'):
del _locals.context
def get_ip_address(request):
"""
Correct IP address is expected as first element of HTTP_X_FORWARDED_FOR or REMOTE_ADDR
"""
if 'HTTP_X_FORWARDED_FOR' in request.META:
return request.META['HTTP_X_FORWARDED_FOR'].split(',')[0].strip()
else:
return request.META['REMOTE_ADDR']
class CaptureEventContextMiddleware(object):
def process_request(self, request):
context = {'ip_address': get_ip_address(request)}
user = getattr(request, 'user', None)
if user and not user.is_anonymous():
context.update(user._get_log_context('user'))
set_event_context(context)
def process_response(self, request, response):
reset_event_context()
return response
<commit_msg>Implement set_current_user for CapturingAuthentication (NC-529)<commit_after>from __future__ import unicode_literals
import threading
_locals = threading.local()
def get_event_context():
return getattr(_locals, 'context', None)
def set_event_context(context):
_locals.context = context
def reset_event_context():
if hasattr(_locals, 'context'):
del _locals.context
def set_current_user(user):
set_event_context(user._get_log_context('user'))
def get_ip_address(request):
"""
Correct IP address is expected as first element of HTTP_X_FORWARDED_FOR or REMOTE_ADDR
"""
if 'HTTP_X_FORWARDED_FOR' in request.META:
return request.META['HTTP_X_FORWARDED_FOR'].split(',')[0].strip()
else:
return request.META['REMOTE_ADDR']
class CaptureEventContextMiddleware(object):
def process_request(self, request):
context = {'ip_address': get_ip_address(request)}
user = getattr(request, 'user', None)
if user and not user.is_anonymous():
context.update(user._get_log_context('user'))
set_event_context(context)
def process_response(self, request, response):
reset_event_context()
return response
|
6183f41cf6fa2125d7d5de626a7b2a994253d4ca | src/levels/main.py | src/levels/main.py | # Copyright 2013 Daniel Stokes, Mitchell Stokes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from bge import logic
def init():
if ".." not in sys.path:
os.chdir(logic.expandPath("//"))
sys.path.append("..")
try:
from scripts.state import StartupState, DefaultState
from scripts.framework.state import StateSystem
if ('dostartup' in sys.argv):
logic.state_system = StateSystem(StartupState)
else:
print("Skipping startup")
logic.state_system = StateSystem(DefaultState)
except:
import traceback
traceback.print_exc()
logic.state_system = None
def run():
try:
logic.state_system
except AttributeError:
init()
if not logic.state_system:
return
logic.state_system.update()
| # Copyright 2013 Daniel Stokes, Mitchell Stokes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from bge import logic
def init():
if ".." not in sys.path:
os.chdir(logic.expandPath("//../"))
sys.path.append(".")
try:
from scripts.state import StartupState, DefaultState
from scripts.framework.state import StateSystem
if ('dostartup' in sys.argv):
logic.state_system = StateSystem(StartupState)
else:
print("Skipping startup")
logic.state_system = StateSystem(DefaultState)
except:
import traceback
traceback.print_exc()
logic.state_system = None
def run():
try:
logic.state_system
except AttributeError:
init()
if not logic.state_system:
return
logic.state_system.update()
| Use the src directory instead of the src/levels directory as the cwd. | Use the src directory instead of the src/levels directory as the cwd.
| Python | apache-2.0 | Moguri/ullur,Moguri/ullur | # Copyright 2013 Daniel Stokes, Mitchell Stokes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from bge import logic
def init():
if ".." not in sys.path:
os.chdir(logic.expandPath("//"))
sys.path.append("..")
try:
from scripts.state import StartupState, DefaultState
from scripts.framework.state import StateSystem
if ('dostartup' in sys.argv):
logic.state_system = StateSystem(StartupState)
else:
print("Skipping startup")
logic.state_system = StateSystem(DefaultState)
except:
import traceback
traceback.print_exc()
logic.state_system = None
def run():
try:
logic.state_system
except AttributeError:
init()
if not logic.state_system:
return
logic.state_system.update()
Use the src directory instead of the src/levels directory as the cwd. | # Copyright 2013 Daniel Stokes, Mitchell Stokes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from bge import logic
def init():
if ".." not in sys.path:
os.chdir(logic.expandPath("//../"))
sys.path.append(".")
try:
from scripts.state import StartupState, DefaultState
from scripts.framework.state import StateSystem
if ('dostartup' in sys.argv):
logic.state_system = StateSystem(StartupState)
else:
print("Skipping startup")
logic.state_system = StateSystem(DefaultState)
except:
import traceback
traceback.print_exc()
logic.state_system = None
def run():
try:
logic.state_system
except AttributeError:
init()
if not logic.state_system:
return
logic.state_system.update()
| <commit_before># Copyright 2013 Daniel Stokes, Mitchell Stokes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from bge import logic
def init():
if ".." not in sys.path:
os.chdir(logic.expandPath("//"))
sys.path.append("..")
try:
from scripts.state import StartupState, DefaultState
from scripts.framework.state import StateSystem
if ('dostartup' in sys.argv):
logic.state_system = StateSystem(StartupState)
else:
print("Skipping startup")
logic.state_system = StateSystem(DefaultState)
except:
import traceback
traceback.print_exc()
logic.state_system = None
def run():
try:
logic.state_system
except AttributeError:
init()
if not logic.state_system:
return
logic.state_system.update()
<commit_msg>Use the src directory instead of the src/levels directory as the cwd.<commit_after> | # Copyright 2013 Daniel Stokes, Mitchell Stokes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from bge import logic
def init():
if ".." not in sys.path:
os.chdir(logic.expandPath("//../"))
sys.path.append(".")
try:
from scripts.state import StartupState, DefaultState
from scripts.framework.state import StateSystem
if ('dostartup' in sys.argv):
logic.state_system = StateSystem(StartupState)
else:
print("Skipping startup")
logic.state_system = StateSystem(DefaultState)
except:
import traceback
traceback.print_exc()
logic.state_system = None
def run():
try:
logic.state_system
except AttributeError:
init()
if not logic.state_system:
return
logic.state_system.update()
| # Copyright 2013 Daniel Stokes, Mitchell Stokes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from bge import logic
def init():
if ".." not in sys.path:
os.chdir(logic.expandPath("//"))
sys.path.append("..")
try:
from scripts.state import StartupState, DefaultState
from scripts.framework.state import StateSystem
if ('dostartup' in sys.argv):
logic.state_system = StateSystem(StartupState)
else:
print("Skipping startup")
logic.state_system = StateSystem(DefaultState)
except:
import traceback
traceback.print_exc()
logic.state_system = None
def run():
try:
logic.state_system
except AttributeError:
init()
if not logic.state_system:
return
logic.state_system.update()
Use the src directory instead of the src/levels directory as the cwd.# Copyright 2013 Daniel Stokes, Mitchell Stokes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from bge import logic
def init():
if ".." not in sys.path:
os.chdir(logic.expandPath("//../"))
sys.path.append(".")
try:
from scripts.state import StartupState, DefaultState
from scripts.framework.state import StateSystem
if ('dostartup' in sys.argv):
logic.state_system = StateSystem(StartupState)
else:
print("Skipping startup")
logic.state_system = StateSystem(DefaultState)
except:
import traceback
traceback.print_exc()
logic.state_system = None
def run():
try:
logic.state_system
except AttributeError:
init()
if not logic.state_system:
return
logic.state_system.update()
| <commit_before># Copyright 2013 Daniel Stokes, Mitchell Stokes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from bge import logic
def init():
if ".." not in sys.path:
os.chdir(logic.expandPath("//"))
sys.path.append("..")
try:
from scripts.state import StartupState, DefaultState
from scripts.framework.state import StateSystem
if ('dostartup' in sys.argv):
logic.state_system = StateSystem(StartupState)
else:
print("Skipping startup")
logic.state_system = StateSystem(DefaultState)
except:
import traceback
traceback.print_exc()
logic.state_system = None
def run():
try:
logic.state_system
except AttributeError:
init()
if not logic.state_system:
return
logic.state_system.update()
<commit_msg>Use the src directory instead of the src/levels directory as the cwd.<commit_after># Copyright 2013 Daniel Stokes, Mitchell Stokes
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
import os
from bge import logic
def init():
if ".." not in sys.path:
os.chdir(logic.expandPath("//../"))
sys.path.append(".")
try:
from scripts.state import StartupState, DefaultState
from scripts.framework.state import StateSystem
if ('dostartup' in sys.argv):
logic.state_system = StateSystem(StartupState)
else:
print("Skipping startup")
logic.state_system = StateSystem(DefaultState)
except:
import traceback
traceback.print_exc()
logic.state_system = None
def run():
try:
logic.state_system
except AttributeError:
init()
if not logic.state_system:
return
logic.state_system.update()
|
3da6393345cca10d44c0823ef6c224a5ceaa4fcd | src/engine/SCons/Platform/darwin.py | src/engine/SCons/Platform/darwin.py | """engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 Steven Knight
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
| """engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
| Fix __COPYRIGHT__ and __REVISION__ in new Darwin module. | Fix __COPYRIGHT__ and __REVISION__ in new Darwin module.
| Python | mit | azatoth/scons,azatoth/scons,azatoth/scons,azatoth/scons,azatoth/scons | """engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 Steven Knight
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
Fix __COPYRIGHT__ and __REVISION__ in new Darwin module. | """engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
| <commit_before>"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 Steven Knight
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
<commit_msg>Fix __COPYRIGHT__ and __REVISION__ in new Darwin module.<commit_after> | """engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
| """engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 Steven Knight
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
Fix __COPYRIGHT__ and __REVISION__ in new Darwin module."""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
| <commit_before>"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# Copyright (c) 2001, 2002, 2003, 2004 Steven Knight
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
<commit_msg>Fix __COPYRIGHT__ and __REVISION__ in new Darwin module.<commit_after>"""engine.SCons.Platform.darwin
Platform-specific initialization for Mac OS X systems.
There normally shouldn't be any need to import this module directly. It
will usually be imported through the generic SCons.Platform.Platform()
selection method.
"""
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
import posix
import os
def generate(env):
posix.generate(env)
env['SHLIBSUFFIX'] = '.dylib'
|
6480e801de5f21486d99444c25006b70329e580e | luigi/tasks/release/process_data.py | luigi/tasks/release/process_data.py | # -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from tasks.ena import Ena
from tasks.ensembl.ensembl import Ensembl
from tasks.pdb import Pdb
from tasks.rfam import RfamSequences
from tasks.rfam import RfamFamilies
class ProcessData(luigi.WrapperTask): # pylint: disable=R0904
"""
This will generate the CSV's to import for all the databases we update each
release.
"""
def requires(self):
yield Ena()
yield Ensembl()
yield RfamSequences()
yield RfamFamilies()
yield Pdb()
| # -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from tasks.ena import Ena
from tasks.ensembl.ensembl import Ensembl
from tasks.pdb import Pdb
from tasks.rfam import RfamSequences
from tasks.rfam import RfamFamilies
from tasks.rgd import Rgd
class ProcessData(luigi.WrapperTask): # pylint: disable=R0904
"""
This will generate the CSV's to import for all the databases we update each
release.
"""
def requires(self):
yield Ena()
yield Ensembl()
yield RfamSequences()
yield RfamFamilies()
yield Pdb()
yield Rgd()
| Add RGD as part of the regular update pipeline | Add RGD as part of the regular update pipeline
I'm not sure if this will always be part of the update, but it is for at
least for this release.
| Python | apache-2.0 | RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline,RNAcentral/rnacentral-import-pipeline | # -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from tasks.ena import Ena
from tasks.ensembl.ensembl import Ensembl
from tasks.pdb import Pdb
from tasks.rfam import RfamSequences
from tasks.rfam import RfamFamilies
class ProcessData(luigi.WrapperTask): # pylint: disable=R0904
"""
This will generate the CSV's to import for all the databases we update each
release.
"""
def requires(self):
yield Ena()
yield Ensembl()
yield RfamSequences()
yield RfamFamilies()
yield Pdb()
Add RGD as part of the regular update pipeline
I'm not sure if this will always be part of the update, but it is for at
least for this release. | # -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from tasks.ena import Ena
from tasks.ensembl.ensembl import Ensembl
from tasks.pdb import Pdb
from tasks.rfam import RfamSequences
from tasks.rfam import RfamFamilies
from tasks.rgd import Rgd
class ProcessData(luigi.WrapperTask): # pylint: disable=R0904
"""
This will generate the CSV's to import for all the databases we update each
release.
"""
def requires(self):
yield Ena()
yield Ensembl()
yield RfamSequences()
yield RfamFamilies()
yield Pdb()
yield Rgd()
| <commit_before># -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from tasks.ena import Ena
from tasks.ensembl.ensembl import Ensembl
from tasks.pdb import Pdb
from tasks.rfam import RfamSequences
from tasks.rfam import RfamFamilies
class ProcessData(luigi.WrapperTask): # pylint: disable=R0904
"""
This will generate the CSV's to import for all the databases we update each
release.
"""
def requires(self):
yield Ena()
yield Ensembl()
yield RfamSequences()
yield RfamFamilies()
yield Pdb()
<commit_msg>Add RGD as part of the regular update pipeline
I'm not sure if this will always be part of the update, but it is for at
least for this release.<commit_after> | # -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from tasks.ena import Ena
from tasks.ensembl.ensembl import Ensembl
from tasks.pdb import Pdb
from tasks.rfam import RfamSequences
from tasks.rfam import RfamFamilies
from tasks.rgd import Rgd
class ProcessData(luigi.WrapperTask): # pylint: disable=R0904
"""
This will generate the CSV's to import for all the databases we update each
release.
"""
def requires(self):
yield Ena()
yield Ensembl()
yield RfamSequences()
yield RfamFamilies()
yield Pdb()
yield Rgd()
| # -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from tasks.ena import Ena
from tasks.ensembl.ensembl import Ensembl
from tasks.pdb import Pdb
from tasks.rfam import RfamSequences
from tasks.rfam import RfamFamilies
class ProcessData(luigi.WrapperTask): # pylint: disable=R0904
"""
This will generate the CSV's to import for all the databases we update each
release.
"""
def requires(self):
yield Ena()
yield Ensembl()
yield RfamSequences()
yield RfamFamilies()
yield Pdb()
Add RGD as part of the regular update pipeline
I'm not sure if this will always be part of the update, but it is for at
least for this release.# -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from tasks.ena import Ena
from tasks.ensembl.ensembl import Ensembl
from tasks.pdb import Pdb
from tasks.rfam import RfamSequences
from tasks.rfam import RfamFamilies
from tasks.rgd import Rgd
class ProcessData(luigi.WrapperTask): # pylint: disable=R0904
"""
This will generate the CSV's to import for all the databases we update each
release.
"""
def requires(self):
yield Ena()
yield Ensembl()
yield RfamSequences()
yield RfamFamilies()
yield Pdb()
yield Rgd()
| <commit_before># -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from tasks.ena import Ena
from tasks.ensembl.ensembl import Ensembl
from tasks.pdb import Pdb
from tasks.rfam import RfamSequences
from tasks.rfam import RfamFamilies
class ProcessData(luigi.WrapperTask): # pylint: disable=R0904
"""
This will generate the CSV's to import for all the databases we update each
release.
"""
def requires(self):
yield Ena()
yield Ensembl()
yield RfamSequences()
yield RfamFamilies()
yield Pdb()
<commit_msg>Add RGD as part of the regular update pipeline
I'm not sure if this will always be part of the update, but it is for at
least for this release.<commit_after># -*- coding: utf-8 -*-
"""
Copyright [2009-2017] EMBL-European Bioinformatics Institute
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import luigi
from tasks.ena import Ena
from tasks.ensembl.ensembl import Ensembl
from tasks.pdb import Pdb
from tasks.rfam import RfamSequences
from tasks.rfam import RfamFamilies
from tasks.rgd import Rgd
class ProcessData(luigi.WrapperTask): # pylint: disable=R0904
"""
This will generate the CSV's to import for all the databases we update each
release.
"""
def requires(self):
yield Ena()
yield Ensembl()
yield RfamSequences()
yield RfamFamilies()
yield Pdb()
yield Rgd()
|
57a1299e771b97c81249cd3075ff9e7e047356a6 | wikked/__init__.py | wikked/__init__.py | from flask import Flask, abort
# Create the main app.
app = Flask(__name__)
app.config.from_object('wikked.settings')
app.config.from_envvar('WIKKED_SETTINGS', silent=True)
if app.config['DEBUG']:
from werkzeug import SharedDataMiddleware
import os
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/': os.path.join(os.path.dirname(__file__), 'static')
})
# The main Wiki instance.
from wiki import Wiki
wiki = Wiki(logger=app.logger)
# Import views and user loader.
import wikked.views
# Login extension.
from flask.ext.login import LoginManager
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.user_loader(wiki.auth.getUser)
login_manager.unauthorized_handler(lambda: abort(401))
# Bcrypt extension.
from flaskext.bcrypt import Bcrypt
app.bcrypt = Bcrypt(app)
| from flask import Flask, abort
# Create the main app.
app = Flask(__name__)
app.config.from_object('wikked.settings')
app.config.from_envvar('WIKKED_SETTINGS', silent=True)
if app.config['DEBUG']:
from werkzeug import SharedDataMiddleware
import os
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/': os.path.join(os.path.dirname(__file__), 'static')
})
# The main Wiki instance.
from wiki import Wiki
wiki = Wiki(root=app.config.get('WIKI_ROOT'), logger=app.logger)
# Import views and user loader.
import wikked.views
# Login extension.
from flask.ext.login import LoginManager
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.user_loader(wiki.auth.getUser)
login_manager.unauthorized_handler(lambda: abort(401))
# Bcrypt extension.
from flaskext.bcrypt import Bcrypt
app.bcrypt = Bcrypt(app)
| Use the settings' wiki root, if any. | Use the settings' wiki root, if any.
| Python | apache-2.0 | ludovicchabant/Wikked,ludovicchabant/Wikked,ludovicchabant/Wikked | from flask import Flask, abort
# Create the main app.
app = Flask(__name__)
app.config.from_object('wikked.settings')
app.config.from_envvar('WIKKED_SETTINGS', silent=True)
if app.config['DEBUG']:
from werkzeug import SharedDataMiddleware
import os
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/': os.path.join(os.path.dirname(__file__), 'static')
})
# The main Wiki instance.
from wiki import Wiki
wiki = Wiki(logger=app.logger)
# Import views and user loader.
import wikked.views
# Login extension.
from flask.ext.login import LoginManager
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.user_loader(wiki.auth.getUser)
login_manager.unauthorized_handler(lambda: abort(401))
# Bcrypt extension.
from flaskext.bcrypt import Bcrypt
app.bcrypt = Bcrypt(app)
Use the settings' wiki root, if any. | from flask import Flask, abort
# Create the main app.
app = Flask(__name__)
app.config.from_object('wikked.settings')
app.config.from_envvar('WIKKED_SETTINGS', silent=True)
if app.config['DEBUG']:
from werkzeug import SharedDataMiddleware
import os
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/': os.path.join(os.path.dirname(__file__), 'static')
})
# The main Wiki instance.
from wiki import Wiki
wiki = Wiki(root=app.config.get('WIKI_ROOT'), logger=app.logger)
# Import views and user loader.
import wikked.views
# Login extension.
from flask.ext.login import LoginManager
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.user_loader(wiki.auth.getUser)
login_manager.unauthorized_handler(lambda: abort(401))
# Bcrypt extension.
from flaskext.bcrypt import Bcrypt
app.bcrypt = Bcrypt(app)
| <commit_before>from flask import Flask, abort
# Create the main app.
app = Flask(__name__)
app.config.from_object('wikked.settings')
app.config.from_envvar('WIKKED_SETTINGS', silent=True)
if app.config['DEBUG']:
from werkzeug import SharedDataMiddleware
import os
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/': os.path.join(os.path.dirname(__file__), 'static')
})
# The main Wiki instance.
from wiki import Wiki
wiki = Wiki(logger=app.logger)
# Import views and user loader.
import wikked.views
# Login extension.
from flask.ext.login import LoginManager
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.user_loader(wiki.auth.getUser)
login_manager.unauthorized_handler(lambda: abort(401))
# Bcrypt extension.
from flaskext.bcrypt import Bcrypt
app.bcrypt = Bcrypt(app)
<commit_msg>Use the settings' wiki root, if any.<commit_after> | from flask import Flask, abort
# Create the main app.
app = Flask(__name__)
app.config.from_object('wikked.settings')
app.config.from_envvar('WIKKED_SETTINGS', silent=True)
if app.config['DEBUG']:
from werkzeug import SharedDataMiddleware
import os
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/': os.path.join(os.path.dirname(__file__), 'static')
})
# The main Wiki instance.
from wiki import Wiki
wiki = Wiki(root=app.config.get('WIKI_ROOT'), logger=app.logger)
# Import views and user loader.
import wikked.views
# Login extension.
from flask.ext.login import LoginManager
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.user_loader(wiki.auth.getUser)
login_manager.unauthorized_handler(lambda: abort(401))
# Bcrypt extension.
from flaskext.bcrypt import Bcrypt
app.bcrypt = Bcrypt(app)
| from flask import Flask, abort
# Create the main app.
app = Flask(__name__)
app.config.from_object('wikked.settings')
app.config.from_envvar('WIKKED_SETTINGS', silent=True)
if app.config['DEBUG']:
from werkzeug import SharedDataMiddleware
import os
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/': os.path.join(os.path.dirname(__file__), 'static')
})
# The main Wiki instance.
from wiki import Wiki
wiki = Wiki(logger=app.logger)
# Import views and user loader.
import wikked.views
# Login extension.
from flask.ext.login import LoginManager
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.user_loader(wiki.auth.getUser)
login_manager.unauthorized_handler(lambda: abort(401))
# Bcrypt extension.
from flaskext.bcrypt import Bcrypt
app.bcrypt = Bcrypt(app)
Use the settings' wiki root, if any.from flask import Flask, abort
# Create the main app.
app = Flask(__name__)
app.config.from_object('wikked.settings')
app.config.from_envvar('WIKKED_SETTINGS', silent=True)
if app.config['DEBUG']:
from werkzeug import SharedDataMiddleware
import os
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/': os.path.join(os.path.dirname(__file__), 'static')
})
# The main Wiki instance.
from wiki import Wiki
wiki = Wiki(root=app.config.get('WIKI_ROOT'), logger=app.logger)
# Import views and user loader.
import wikked.views
# Login extension.
from flask.ext.login import LoginManager
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.user_loader(wiki.auth.getUser)
login_manager.unauthorized_handler(lambda: abort(401))
# Bcrypt extension.
from flaskext.bcrypt import Bcrypt
app.bcrypt = Bcrypt(app)
| <commit_before>from flask import Flask, abort
# Create the main app.
app = Flask(__name__)
app.config.from_object('wikked.settings')
app.config.from_envvar('WIKKED_SETTINGS', silent=True)
if app.config['DEBUG']:
from werkzeug import SharedDataMiddleware
import os
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/': os.path.join(os.path.dirname(__file__), 'static')
})
# The main Wiki instance.
from wiki import Wiki
wiki = Wiki(logger=app.logger)
# Import views and user loader.
import wikked.views
# Login extension.
from flask.ext.login import LoginManager
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.user_loader(wiki.auth.getUser)
login_manager.unauthorized_handler(lambda: abort(401))
# Bcrypt extension.
from flaskext.bcrypt import Bcrypt
app.bcrypt = Bcrypt(app)
<commit_msg>Use the settings' wiki root, if any.<commit_after>from flask import Flask, abort
# Create the main app.
app = Flask(__name__)
app.config.from_object('wikked.settings')
app.config.from_envvar('WIKKED_SETTINGS', silent=True)
if app.config['DEBUG']:
from werkzeug import SharedDataMiddleware
import os
app.wsgi_app = SharedDataMiddleware(app.wsgi_app, {
'/': os.path.join(os.path.dirname(__file__), 'static')
})
# The main Wiki instance.
from wiki import Wiki
wiki = Wiki(root=app.config.get('WIKI_ROOT'), logger=app.logger)
# Import views and user loader.
import wikked.views
# Login extension.
from flask.ext.login import LoginManager
login_manager = LoginManager()
login_manager.init_app(app)
login_manager.user_loader(wiki.auth.getUser)
login_manager.unauthorized_handler(lambda: abort(401))
# Bcrypt extension.
from flaskext.bcrypt import Bcrypt
app.bcrypt = Bcrypt(app)
|
08d8f2c30c810cda75961e5bf6025f1bf348fc02 | api.py | api.py | from os import environ
from eve import Eve
from settings import API_NAME
api = Eve(API_NAME)
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in environ:
port = int(environ.get('PORT'))
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
api.run(host=host, port=port)
| import json
from os import environ
from eve import Eve
from settings import API_NAME, URL_PREFIX
api = Eve(API_NAME)
def add_document(resource, document):
"Add a new document to the given resource."
return api.test_client().post('/' + URL_PREFIX + '/' + resource,
data=json.dumps(document),
content_type='application/json')
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in environ:
port = int(environ.get('PORT'))
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
api.run(host=host, port=port)
| Add utility method to add documents | Add utility method to add documents
| Python | apache-2.0 | gwob/Maarifa,gwob/Maarifa,gwob/Maarifa,gwob/Maarifa,gwob/Maarifa | from os import environ
from eve import Eve
from settings import API_NAME
api = Eve(API_NAME)
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in environ:
port = int(environ.get('PORT'))
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
api.run(host=host, port=port)
Add utility method to add documents | import json
from os import environ
from eve import Eve
from settings import API_NAME, URL_PREFIX
api = Eve(API_NAME)
def add_document(resource, document):
"Add a new document to the given resource."
return api.test_client().post('/' + URL_PREFIX + '/' + resource,
data=json.dumps(document),
content_type='application/json')
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in environ:
port = int(environ.get('PORT'))
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
api.run(host=host, port=port)
| <commit_before>from os import environ
from eve import Eve
from settings import API_NAME
api = Eve(API_NAME)
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in environ:
port = int(environ.get('PORT'))
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
api.run(host=host, port=port)
<commit_msg>Add utility method to add documents<commit_after> | import json
from os import environ
from eve import Eve
from settings import API_NAME, URL_PREFIX
api = Eve(API_NAME)
def add_document(resource, document):
"Add a new document to the given resource."
return api.test_client().post('/' + URL_PREFIX + '/' + resource,
data=json.dumps(document),
content_type='application/json')
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in environ:
port = int(environ.get('PORT'))
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
api.run(host=host, port=port)
| from os import environ
from eve import Eve
from settings import API_NAME
api = Eve(API_NAME)
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in environ:
port = int(environ.get('PORT'))
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
api.run(host=host, port=port)
Add utility method to add documentsimport json
from os import environ
from eve import Eve
from settings import API_NAME, URL_PREFIX
api = Eve(API_NAME)
def add_document(resource, document):
"Add a new document to the given resource."
return api.test_client().post('/' + URL_PREFIX + '/' + resource,
data=json.dumps(document),
content_type='application/json')
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in environ:
port = int(environ.get('PORT'))
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
api.run(host=host, port=port)
| <commit_before>from os import environ
from eve import Eve
from settings import API_NAME
api = Eve(API_NAME)
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in environ:
port = int(environ.get('PORT'))
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
api.run(host=host, port=port)
<commit_msg>Add utility method to add documents<commit_after>import json
from os import environ
from eve import Eve
from settings import API_NAME, URL_PREFIX
api = Eve(API_NAME)
def add_document(resource, document):
"Add a new document to the given resource."
return api.test_client().post('/' + URL_PREFIX + '/' + resource,
data=json.dumps(document),
content_type='application/json')
if __name__ == '__main__':
# Heroku support: bind to PORT if defined, otherwise default to 5000.
if 'PORT' in environ:
port = int(environ.get('PORT'))
host = '0.0.0.0'
else:
port = 5000
host = '127.0.0.1'
api.run(host=host, port=port)
|
a93607eea32fd5e56de47bf6c7e0e3f098d7bcd8 | runTwircBot.py | runTwircBot.py | #!/usr/bin/env python
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
| #!/usr/bin/env python3
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
| Change shebang to use python3 instead of python | Change shebang to use python3 instead of python
| Python | mit | johnmarcampbell/twircBot | #!/usr/bin/env python
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
Change shebang to use python3 instead of python | #!/usr/bin/env python3
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
| <commit_before>#!/usr/bin/env python
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
<commit_msg>Change shebang to use python3 instead of python<commit_after> | #!/usr/bin/env python3
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
| #!/usr/bin/env python
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
Change shebang to use python3 instead of python#!/usr/bin/env python3
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
| <commit_before>#!/usr/bin/env python
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
<commit_msg>Change shebang to use python3 instead of python<commit_after>#!/usr/bin/env python3
from src.TwircBot import TwircBot
import sys
try:
bot = TwircBot(sys.argv[1])
except IndexError:
bot = TwircBot()
bot.print_config()
bot.start()
|
dd9161c772e3c345fd21f742b09a62d43f7fa069 | scripts/c19.py | scripts/c19.py | from __future__ import print_function
import sys
from pyspark.sql import SparkSession
from pyspark.sql.functions import lit, col
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: c19.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Select c19').getOrCreate()
raw = spark.read.option('mergeSchema','true').load(sys.argv[1])
df = raw.filter(col('date') < '1900')
opens = df.filter(col('open') == 'true')\
.select('series', 'date', lit(1).alias('inopen')).distinct()
df.join(opens, ['series', 'date'], 'left_outer')\
.filter((col('open') == 'true') | col('inopen').isNull())\
.drop('inopen')\
.dropDuplicates(['id'])\
.write.save(sys.argv[2])
spark.stop()
| from __future__ import print_function
import sys
from pyspark.sql import SparkSession
from pyspark.sql.functions import lit, col, datediff
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: c19.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Select c19').getOrCreate()
raw = spark.read.option('mergeSchema','true').load(sys.argv[1])
df = raw.filter(col('date') < '1900')
opens = df.filter(col('open') == 'true')\
.select('series', 'date', lit(1).alias('inopen')).distinct()
df.join(opens, ['series', 'date'], 'left_outer')\
.filter((col('open') == 'true') | col('inopen').isNull())\
.drop('inopen')\
.dropDuplicates(['id'])\
.withColumn('ednum', col('ed').cast('int')).na.fill(1, ['ednum']) \
.withColumn('hour', datediff(col('date'), lit('1970-01-01')) * 24 + col('ednum') - 1) \
.drop('ednum') \
.write.save(sys.argv[2])
spark.stop()
| Add numeric time column (hour) that respects editions. | Add numeric time column (hour) that respects editions.
| Python | apache-2.0 | ViralTexts/vt-passim,ViralTexts/vt-passim,ViralTexts/vt-passim | from __future__ import print_function
import sys
from pyspark.sql import SparkSession
from pyspark.sql.functions import lit, col
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: c19.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Select c19').getOrCreate()
raw = spark.read.option('mergeSchema','true').load(sys.argv[1])
df = raw.filter(col('date') < '1900')
opens = df.filter(col('open') == 'true')\
.select('series', 'date', lit(1).alias('inopen')).distinct()
df.join(opens, ['series', 'date'], 'left_outer')\
.filter((col('open') == 'true') | col('inopen').isNull())\
.drop('inopen')\
.dropDuplicates(['id'])\
.write.save(sys.argv[2])
spark.stop()
Add numeric time column (hour) that respects editions. | from __future__ import print_function
import sys
from pyspark.sql import SparkSession
from pyspark.sql.functions import lit, col, datediff
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: c19.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Select c19').getOrCreate()
raw = spark.read.option('mergeSchema','true').load(sys.argv[1])
df = raw.filter(col('date') < '1900')
opens = df.filter(col('open') == 'true')\
.select('series', 'date', lit(1).alias('inopen')).distinct()
df.join(opens, ['series', 'date'], 'left_outer')\
.filter((col('open') == 'true') | col('inopen').isNull())\
.drop('inopen')\
.dropDuplicates(['id'])\
.withColumn('ednum', col('ed').cast('int')).na.fill(1, ['ednum']) \
.withColumn('hour', datediff(col('date'), lit('1970-01-01')) * 24 + col('ednum') - 1) \
.drop('ednum') \
.write.save(sys.argv[2])
spark.stop()
| <commit_before>from __future__ import print_function
import sys
from pyspark.sql import SparkSession
from pyspark.sql.functions import lit, col
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: c19.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Select c19').getOrCreate()
raw = spark.read.option('mergeSchema','true').load(sys.argv[1])
df = raw.filter(col('date') < '1900')
opens = df.filter(col('open') == 'true')\
.select('series', 'date', lit(1).alias('inopen')).distinct()
df.join(opens, ['series', 'date'], 'left_outer')\
.filter((col('open') == 'true') | col('inopen').isNull())\
.drop('inopen')\
.dropDuplicates(['id'])\
.write.save(sys.argv[2])
spark.stop()
<commit_msg>Add numeric time column (hour) that respects editions.<commit_after> | from __future__ import print_function
import sys
from pyspark.sql import SparkSession
from pyspark.sql.functions import lit, col, datediff
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: c19.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Select c19').getOrCreate()
raw = spark.read.option('mergeSchema','true').load(sys.argv[1])
df = raw.filter(col('date') < '1900')
opens = df.filter(col('open') == 'true')\
.select('series', 'date', lit(1).alias('inopen')).distinct()
df.join(opens, ['series', 'date'], 'left_outer')\
.filter((col('open') == 'true') | col('inopen').isNull())\
.drop('inopen')\
.dropDuplicates(['id'])\
.withColumn('ednum', col('ed').cast('int')).na.fill(1, ['ednum']) \
.withColumn('hour', datediff(col('date'), lit('1970-01-01')) * 24 + col('ednum') - 1) \
.drop('ednum') \
.write.save(sys.argv[2])
spark.stop()
| from __future__ import print_function
import sys
from pyspark.sql import SparkSession
from pyspark.sql.functions import lit, col
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: c19.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Select c19').getOrCreate()
raw = spark.read.option('mergeSchema','true').load(sys.argv[1])
df = raw.filter(col('date') < '1900')
opens = df.filter(col('open') == 'true')\
.select('series', 'date', lit(1).alias('inopen')).distinct()
df.join(opens, ['series', 'date'], 'left_outer')\
.filter((col('open') == 'true') | col('inopen').isNull())\
.drop('inopen')\
.dropDuplicates(['id'])\
.write.save(sys.argv[2])
spark.stop()
Add numeric time column (hour) that respects editions.from __future__ import print_function
import sys
from pyspark.sql import SparkSession
from pyspark.sql.functions import lit, col, datediff
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: c19.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Select c19').getOrCreate()
raw = spark.read.option('mergeSchema','true').load(sys.argv[1])
df = raw.filter(col('date') < '1900')
opens = df.filter(col('open') == 'true')\
.select('series', 'date', lit(1).alias('inopen')).distinct()
df.join(opens, ['series', 'date'], 'left_outer')\
.filter((col('open') == 'true') | col('inopen').isNull())\
.drop('inopen')\
.dropDuplicates(['id'])\
.withColumn('ednum', col('ed').cast('int')).na.fill(1, ['ednum']) \
.withColumn('hour', datediff(col('date'), lit('1970-01-01')) * 24 + col('ednum') - 1) \
.drop('ednum') \
.write.save(sys.argv[2])
spark.stop()
| <commit_before>from __future__ import print_function
import sys
from pyspark.sql import SparkSession
from pyspark.sql.functions import lit, col
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: c19.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Select c19').getOrCreate()
raw = spark.read.option('mergeSchema','true').load(sys.argv[1])
df = raw.filter(col('date') < '1900')
opens = df.filter(col('open') == 'true')\
.select('series', 'date', lit(1).alias('inopen')).distinct()
df.join(opens, ['series', 'date'], 'left_outer')\
.filter((col('open') == 'true') | col('inopen').isNull())\
.drop('inopen')\
.dropDuplicates(['id'])\
.write.save(sys.argv[2])
spark.stop()
<commit_msg>Add numeric time column (hour) that respects editions.<commit_after>from __future__ import print_function
import sys
from pyspark.sql import SparkSession
from pyspark.sql.functions import lit, col, datediff
if __name__ == "__main__":
if len(sys.argv) != 3:
print("Usage: c19.py <input> <output>", file=sys.stderr)
exit(-1)
spark = SparkSession.builder.appName('Select c19').getOrCreate()
raw = spark.read.option('mergeSchema','true').load(sys.argv[1])
df = raw.filter(col('date') < '1900')
opens = df.filter(col('open') == 'true')\
.select('series', 'date', lit(1).alias('inopen')).distinct()
df.join(opens, ['series', 'date'], 'left_outer')\
.filter((col('open') == 'true') | col('inopen').isNull())\
.drop('inopen')\
.dropDuplicates(['id'])\
.withColumn('ednum', col('ed').cast('int')).na.fill(1, ['ednum']) \
.withColumn('hour', datediff(col('date'), lit('1970-01-01')) * 24 + col('ednum') - 1) \
.drop('ednum') \
.write.save(sys.argv[2])
spark.stop()
|
fa67de4900be765a5ea4194b1a786cd237934a33 | displacy_service_tests/test_server.py | displacy_service_tests/test_server.py | import falcon.testing
import json
from displacy_service.server import APP
class TestAPI(falcon.testing.TestCase):
def __init__(self):
self.api = APP
def test_deps():
test_api = TestAPI()
result = test_api.simulate_post(path='/dep',
body='''{"text": "This is a test.", "model": "en",
"collapse_punctuation": false,
"collapse_phrases": false}''')
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents():
test_api = TestAPI()
result = test_api.simulate_post(path='/ent',
body='''{"text": "What a great company Google is.",
"model": "en"}''')
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents():
test_api = TestAPI()
sentences = test_api.simulate_post(
path='/sent', body='''{"text": "This a test that should split into sentences! This is the second. Is this the third?", "model": "en"}''')
assert sentences == ['This a test that should split into sentences!', 'This is the second.', 'Is this the third?']
| import falcon.testing
import json
from displacy_service.server import APP
class TestAPI(falcon.testing.TestCase):
def __init__(self):
self.api = APP
def test_deps():
test_api = TestAPI()
result = test_api.simulate_post(
path='/dep',
body='''{"text": "This is a test.", "model": "en",
"collapse_punctuation": false,
"collapse_phrases": false}'''
)
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents():
test_api = TestAPI()
result = test_api.simulate_post(
path='/ent',
body='''{"text": "What a great company Google is.",
"model": "en"}''')
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents():
test_api = TestAPI()
sentences = test_api.simulate_post(
path='/sent',
body='''{"text": "This a test that should split into sentences!
This is the second. Is this the third?", "model": "en"}'''
)
assert sentences == ['This a test that should split into sentences!',
'This is the second.', 'Is this the third?']
| Make test file PEP8 compliant. | Make test file PEP8 compliant.
| Python | mit | jgontrum/spacy-api-docker,jgontrum/spacy-api-docker,jgontrum/spacy-api-docker,jgontrum/spacy-api-docker | import falcon.testing
import json
from displacy_service.server import APP
class TestAPI(falcon.testing.TestCase):
def __init__(self):
self.api = APP
def test_deps():
test_api = TestAPI()
result = test_api.simulate_post(path='/dep',
body='''{"text": "This is a test.", "model": "en",
"collapse_punctuation": false,
"collapse_phrases": false}''')
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents():
test_api = TestAPI()
result = test_api.simulate_post(path='/ent',
body='''{"text": "What a great company Google is.",
"model": "en"}''')
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents():
test_api = TestAPI()
sentences = test_api.simulate_post(
path='/sent', body='''{"text": "This a test that should split into sentences! This is the second. Is this the third?", "model": "en"}''')
assert sentences == ['This a test that should split into sentences!', 'This is the second.', 'Is this the third?']
Make test file PEP8 compliant. | import falcon.testing
import json
from displacy_service.server import APP
class TestAPI(falcon.testing.TestCase):
def __init__(self):
self.api = APP
def test_deps():
test_api = TestAPI()
result = test_api.simulate_post(
path='/dep',
body='''{"text": "This is a test.", "model": "en",
"collapse_punctuation": false,
"collapse_phrases": false}'''
)
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents():
test_api = TestAPI()
result = test_api.simulate_post(
path='/ent',
body='''{"text": "What a great company Google is.",
"model": "en"}''')
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents():
test_api = TestAPI()
sentences = test_api.simulate_post(
path='/sent',
body='''{"text": "This a test that should split into sentences!
This is the second. Is this the third?", "model": "en"}'''
)
assert sentences == ['This a test that should split into sentences!',
'This is the second.', 'Is this the third?']
| <commit_before>import falcon.testing
import json
from displacy_service.server import APP
class TestAPI(falcon.testing.TestCase):
def __init__(self):
self.api = APP
def test_deps():
test_api = TestAPI()
result = test_api.simulate_post(path='/dep',
body='''{"text": "This is a test.", "model": "en",
"collapse_punctuation": false,
"collapse_phrases": false}''')
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents():
test_api = TestAPI()
result = test_api.simulate_post(path='/ent',
body='''{"text": "What a great company Google is.",
"model": "en"}''')
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents():
test_api = TestAPI()
sentences = test_api.simulate_post(
path='/sent', body='''{"text": "This a test that should split into sentences! This is the second. Is this the third?", "model": "en"}''')
assert sentences == ['This a test that should split into sentences!', 'This is the second.', 'Is this the third?']
<commit_msg>Make test file PEP8 compliant.<commit_after> | import falcon.testing
import json
from displacy_service.server import APP
class TestAPI(falcon.testing.TestCase):
def __init__(self):
self.api = APP
def test_deps():
test_api = TestAPI()
result = test_api.simulate_post(
path='/dep',
body='''{"text": "This is a test.", "model": "en",
"collapse_punctuation": false,
"collapse_phrases": false}'''
)
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents():
test_api = TestAPI()
result = test_api.simulate_post(
path='/ent',
body='''{"text": "What a great company Google is.",
"model": "en"}''')
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents():
test_api = TestAPI()
sentences = test_api.simulate_post(
path='/sent',
body='''{"text": "This a test that should split into sentences!
This is the second. Is this the third?", "model": "en"}'''
)
assert sentences == ['This a test that should split into sentences!',
'This is the second.', 'Is this the third?']
| import falcon.testing
import json
from displacy_service.server import APP
class TestAPI(falcon.testing.TestCase):
def __init__(self):
self.api = APP
def test_deps():
test_api = TestAPI()
result = test_api.simulate_post(path='/dep',
body='''{"text": "This is a test.", "model": "en",
"collapse_punctuation": false,
"collapse_phrases": false}''')
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents():
test_api = TestAPI()
result = test_api.simulate_post(path='/ent',
body='''{"text": "What a great company Google is.",
"model": "en"}''')
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents():
test_api = TestAPI()
sentences = test_api.simulate_post(
path='/sent', body='''{"text": "This a test that should split into sentences! This is the second. Is this the third?", "model": "en"}''')
assert sentences == ['This a test that should split into sentences!', 'This is the second.', 'Is this the third?']
Make test file PEP8 compliant.import falcon.testing
import json
from displacy_service.server import APP
class TestAPI(falcon.testing.TestCase):
def __init__(self):
self.api = APP
def test_deps():
test_api = TestAPI()
result = test_api.simulate_post(
path='/dep',
body='''{"text": "This is a test.", "model": "en",
"collapse_punctuation": false,
"collapse_phrases": false}'''
)
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents():
test_api = TestAPI()
result = test_api.simulate_post(
path='/ent',
body='''{"text": "What a great company Google is.",
"model": "en"}''')
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents():
test_api = TestAPI()
sentences = test_api.simulate_post(
path='/sent',
body='''{"text": "This a test that should split into sentences!
This is the second. Is this the third?", "model": "en"}'''
)
assert sentences == ['This a test that should split into sentences!',
'This is the second.', 'Is this the third?']
| <commit_before>import falcon.testing
import json
from displacy_service.server import APP
class TestAPI(falcon.testing.TestCase):
def __init__(self):
self.api = APP
def test_deps():
test_api = TestAPI()
result = test_api.simulate_post(path='/dep',
body='''{"text": "This is a test.", "model": "en",
"collapse_punctuation": false,
"collapse_phrases": false}''')
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents():
test_api = TestAPI()
result = test_api.simulate_post(path='/ent',
body='''{"text": "What a great company Google is.",
"model": "en"}''')
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents():
test_api = TestAPI()
sentences = test_api.simulate_post(
path='/sent', body='''{"text": "This a test that should split into sentences! This is the second. Is this the third?", "model": "en"}''')
assert sentences == ['This a test that should split into sentences!', 'This is the second.', 'Is this the third?']
<commit_msg>Make test file PEP8 compliant.<commit_after>import falcon.testing
import json
from displacy_service.server import APP
class TestAPI(falcon.testing.TestCase):
def __init__(self):
self.api = APP
def test_deps():
test_api = TestAPI()
result = test_api.simulate_post(
path='/dep',
body='''{"text": "This is a test.", "model": "en",
"collapse_punctuation": false,
"collapse_phrases": false}'''
)
result = json.loads(result.text)
words = [w['text'] for w in result['words']]
assert words == ["This", "is", "a", "test", "."]
def test_ents():
test_api = TestAPI()
result = test_api.simulate_post(
path='/ent',
body='''{"text": "What a great company Google is.",
"model": "en"}''')
ents = json.loads(result.text)
assert ents == [
{"start": 21, "end": 27, "type": "ORG", "text": "Google"}]
def test_sents():
test_api = TestAPI()
sentences = test_api.simulate_post(
path='/sent',
body='''{"text": "This a test that should split into sentences!
This is the second. Is this the third?", "model": "en"}'''
)
assert sentences == ['This a test that should split into sentences!',
'This is the second.', 'Is this the third?']
|
5db0af8fcf83519a44ed59d14bb00bc08e2a5131 | django_seo_js/middleware/useragent.py | django_seo_js/middleware/useragent.py | import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not hasattr(request, 'ENABLED') or not request.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
| import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not settings.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
| Change from request.ENABLED to settings.ENABLED | Change from request.ENABLED to settings.ENABLED | Python | mit | skoczen/django-seo-js | import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not hasattr(request, 'ENABLED') or not request.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
Change from request.ENABLED to settings.ENABLED | import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not settings.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
| <commit_before>import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not hasattr(request, 'ENABLED') or not request.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
<commit_msg>Change from request.ENABLED to settings.ENABLED<commit_after> | import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not settings.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
| import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not hasattr(request, 'ENABLED') or not request.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
Change from request.ENABLED to settings.ENABLEDimport re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not settings.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
| <commit_before>import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not hasattr(request, 'ENABLED') or not request.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
<commit_msg>Change from request.ENABLED to settings.ENABLED<commit_after>import re
from django_seo_js import settings
from django_seo_js.backends import SelectedBackend
from django_seo_js.helpers import request_should_be_ignored
import logging
logger = logging.getLogger(__name__)
class UserAgentMiddleware(SelectedBackend):
def __init__(self, *args, **kwargs):
super(UserAgentMiddleware, self).__init__(*args, **kwargs)
regex_str = "|".join(settings.USER_AGENTS)
regex_str = ".*?(%s)" % regex_str
self.USER_AGENT_REGEX = re.compile(regex_str, re.IGNORECASE)
def process_request(self, request):
if not settings.ENABLED:
return
if request_should_be_ignored(request):
return
if "HTTP_USER_AGENT" not in request.META:
return
if not self.USER_AGENT_REGEX.match(request.META["HTTP_USER_AGENT"]):
return
url = request.build_absolute_uri()
try:
return self.backend.get_response_for_url(url)
except Exception as e:
logger.exception(e)
|
5ff2390e462219f7dfa148c6ab1bdcecd9248484 | app.py | app.py | from flask import Flask, render_template, request
import requests
from requests_oauthlib import OAuth1
app = Flask(__name__)
app.config.from_pyfile('app.config')
@app.route('/')
def index():
term = request.args.get('q')
icons = []
if term:
auth = OAuth1(
app.config['OAUTH_KEY'],
client_secret=app.config['OAUTH_SECRET']
)
url = "http://api.thenounproject.com/icons/{0}?limit_to_public_domain=1".format(term)
response = requests.get(url, auth=auth)
if response.ok:
icons = response.json().get('icons', [])
else:
term=''
return render_template('index.html', icons=icons, query=term)
if __name__ == '__main__':
app.run(debug=True)
| from flask import Flask, render_template, request
import requests
from requests_oauthlib import OAuth1
app = Flask(__name__)
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
app.config.from_pyfile('app.config')
@app.route('/')
def index():
term = request.args.get('q')
icons = []
if term:
auth = OAuth1(
app.config['OAUTH_KEY'],
client_secret=app.config['OAUTH_SECRET']
)
url = "http://api.thenounproject.com/icons/{0}?limit_to_public_domain=1".format(term)
response = requests.get(url, auth=auth)
if response.ok:
icons = response.json().get('icons', [])
else:
term=''
return render_template('index.html', icons=icons, query=term)
if __name__ == '__main__':
app.run(debug=True)
| Trim whitespace on rendered templates | Trim whitespace on rendered templates
| Python | unlicense | ecarreras/pdicons,ecarreras/pdicons,ecarreras/pdicons,ecarreras/pdicons | from flask import Flask, render_template, request
import requests
from requests_oauthlib import OAuth1
app = Flask(__name__)
app.config.from_pyfile('app.config')
@app.route('/')
def index():
term = request.args.get('q')
icons = []
if term:
auth = OAuth1(
app.config['OAUTH_KEY'],
client_secret=app.config['OAUTH_SECRET']
)
url = "http://api.thenounproject.com/icons/{0}?limit_to_public_domain=1".format(term)
response = requests.get(url, auth=auth)
if response.ok:
icons = response.json().get('icons', [])
else:
term=''
return render_template('index.html', icons=icons, query=term)
if __name__ == '__main__':
app.run(debug=True)
Trim whitespace on rendered templates | from flask import Flask, render_template, request
import requests
from requests_oauthlib import OAuth1
app = Flask(__name__)
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
app.config.from_pyfile('app.config')
@app.route('/')
def index():
term = request.args.get('q')
icons = []
if term:
auth = OAuth1(
app.config['OAUTH_KEY'],
client_secret=app.config['OAUTH_SECRET']
)
url = "http://api.thenounproject.com/icons/{0}?limit_to_public_domain=1".format(term)
response = requests.get(url, auth=auth)
if response.ok:
icons = response.json().get('icons', [])
else:
term=''
return render_template('index.html', icons=icons, query=term)
if __name__ == '__main__':
app.run(debug=True)
| <commit_before>from flask import Flask, render_template, request
import requests
from requests_oauthlib import OAuth1
app = Flask(__name__)
app.config.from_pyfile('app.config')
@app.route('/')
def index():
term = request.args.get('q')
icons = []
if term:
auth = OAuth1(
app.config['OAUTH_KEY'],
client_secret=app.config['OAUTH_SECRET']
)
url = "http://api.thenounproject.com/icons/{0}?limit_to_public_domain=1".format(term)
response = requests.get(url, auth=auth)
if response.ok:
icons = response.json().get('icons', [])
else:
term=''
return render_template('index.html', icons=icons, query=term)
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Trim whitespace on rendered templates<commit_after> | from flask import Flask, render_template, request
import requests
from requests_oauthlib import OAuth1
app = Flask(__name__)
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
app.config.from_pyfile('app.config')
@app.route('/')
def index():
term = request.args.get('q')
icons = []
if term:
auth = OAuth1(
app.config['OAUTH_KEY'],
client_secret=app.config['OAUTH_SECRET']
)
url = "http://api.thenounproject.com/icons/{0}?limit_to_public_domain=1".format(term)
response = requests.get(url, auth=auth)
if response.ok:
icons = response.json().get('icons', [])
else:
term=''
return render_template('index.html', icons=icons, query=term)
if __name__ == '__main__':
app.run(debug=True)
| from flask import Flask, render_template, request
import requests
from requests_oauthlib import OAuth1
app = Flask(__name__)
app.config.from_pyfile('app.config')
@app.route('/')
def index():
term = request.args.get('q')
icons = []
if term:
auth = OAuth1(
app.config['OAUTH_KEY'],
client_secret=app.config['OAUTH_SECRET']
)
url = "http://api.thenounproject.com/icons/{0}?limit_to_public_domain=1".format(term)
response = requests.get(url, auth=auth)
if response.ok:
icons = response.json().get('icons', [])
else:
term=''
return render_template('index.html', icons=icons, query=term)
if __name__ == '__main__':
app.run(debug=True)
Trim whitespace on rendered templatesfrom flask import Flask, render_template, request
import requests
from requests_oauthlib import OAuth1
app = Flask(__name__)
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
app.config.from_pyfile('app.config')
@app.route('/')
def index():
term = request.args.get('q')
icons = []
if term:
auth = OAuth1(
app.config['OAUTH_KEY'],
client_secret=app.config['OAUTH_SECRET']
)
url = "http://api.thenounproject.com/icons/{0}?limit_to_public_domain=1".format(term)
response = requests.get(url, auth=auth)
if response.ok:
icons = response.json().get('icons', [])
else:
term=''
return render_template('index.html', icons=icons, query=term)
if __name__ == '__main__':
app.run(debug=True)
| <commit_before>from flask import Flask, render_template, request
import requests
from requests_oauthlib import OAuth1
app = Flask(__name__)
app.config.from_pyfile('app.config')
@app.route('/')
def index():
term = request.args.get('q')
icons = []
if term:
auth = OAuth1(
app.config['OAUTH_KEY'],
client_secret=app.config['OAUTH_SECRET']
)
url = "http://api.thenounproject.com/icons/{0}?limit_to_public_domain=1".format(term)
response = requests.get(url, auth=auth)
if response.ok:
icons = response.json().get('icons', [])
else:
term=''
return render_template('index.html', icons=icons, query=term)
if __name__ == '__main__':
app.run(debug=True)
<commit_msg>Trim whitespace on rendered templates<commit_after>from flask import Flask, render_template, request
import requests
from requests_oauthlib import OAuth1
app = Flask(__name__)
app.jinja_env.trim_blocks = True
app.jinja_env.lstrip_blocks = True
app.config.from_pyfile('app.config')
@app.route('/')
def index():
term = request.args.get('q')
icons = []
if term:
auth = OAuth1(
app.config['OAUTH_KEY'],
client_secret=app.config['OAUTH_SECRET']
)
url = "http://api.thenounproject.com/icons/{0}?limit_to_public_domain=1".format(term)
response = requests.get(url, auth=auth)
if response.ok:
icons = response.json().get('icons', [])
else:
term=''
return render_template('index.html', icons=icons, query=term)
if __name__ == '__main__':
app.run(debug=True)
|
7729af745b9894cfb07d99f754465934a90da40d | gigs/settings_prod.py | gigs/settings_prod.py | from gigs.settings_base import *
DEBUG = False
TEMPLATE_DEBUG = False
ADMINS = ()
INSTALLED_APPS += (
'gunicorn',
)
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'default': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg.log',
'maxBytes': 1024*1024*50, # 50 MB
'backupCount': 5,
'formatter':'standard',
},
'request_handler': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg_request.log',
'maxBytes': 1024*1024*50, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'DEBUG',
'propagate': True
},
'django.request': {
'handlers': ['request_handler'],
'level': 'DEBUG',
'propagate': False
},
},
}
| from gigs.settings_base import *
DEBUG = False
TEMPLATE_DEBUG = False
ADMINS = ()
INSTALLED_APPS += (
'gunicorn',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'django',
'OPTIONS':{
'read_default_file':'/etc/tugg/my.cnf',
},
},
}
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'default': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg.log',
'maxBytes': 1024*1024*50, # 50 MB
'backupCount': 5,
'formatter':'standard',
},
'request_handler': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg_request.log',
'maxBytes': 1024*1024*50, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'DEBUG',
'propagate': True
},
'django.request': {
'handlers': ['request_handler'],
'level': 'DEBUG',
'propagate': False
},
},
}
| Add databse config to production settings file | Add databse config to production settings file
| Python | bsd-3-clause | shaunokeefe/gigs,shaunokeefe/gigs | from gigs.settings_base import *
DEBUG = False
TEMPLATE_DEBUG = False
ADMINS = ()
INSTALLED_APPS += (
'gunicorn',
)
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'default': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg.log',
'maxBytes': 1024*1024*50, # 50 MB
'backupCount': 5,
'formatter':'standard',
},
'request_handler': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg_request.log',
'maxBytes': 1024*1024*50, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'DEBUG',
'propagate': True
},
'django.request': {
'handlers': ['request_handler'],
'level': 'DEBUG',
'propagate': False
},
},
}
Add databse config to production settings file | from gigs.settings_base import *
DEBUG = False
TEMPLATE_DEBUG = False
ADMINS = ()
INSTALLED_APPS += (
'gunicorn',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'django',
'OPTIONS':{
'read_default_file':'/etc/tugg/my.cnf',
},
},
}
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'default': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg.log',
'maxBytes': 1024*1024*50, # 50 MB
'backupCount': 5,
'formatter':'standard',
},
'request_handler': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg_request.log',
'maxBytes': 1024*1024*50, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'DEBUG',
'propagate': True
},
'django.request': {
'handlers': ['request_handler'],
'level': 'DEBUG',
'propagate': False
},
},
}
| <commit_before>from gigs.settings_base import *
DEBUG = False
TEMPLATE_DEBUG = False
ADMINS = ()
INSTALLED_APPS += (
'gunicorn',
)
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'default': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg.log',
'maxBytes': 1024*1024*50, # 50 MB
'backupCount': 5,
'formatter':'standard',
},
'request_handler': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg_request.log',
'maxBytes': 1024*1024*50, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'DEBUG',
'propagate': True
},
'django.request': {
'handlers': ['request_handler'],
'level': 'DEBUG',
'propagate': False
},
},
}
<commit_msg>Add databse config to production settings file<commit_after> | from gigs.settings_base import *
DEBUG = False
TEMPLATE_DEBUG = False
ADMINS = ()
INSTALLED_APPS += (
'gunicorn',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'django',
'OPTIONS':{
'read_default_file':'/etc/tugg/my.cnf',
},
},
}
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'default': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg.log',
'maxBytes': 1024*1024*50, # 50 MB
'backupCount': 5,
'formatter':'standard',
},
'request_handler': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg_request.log',
'maxBytes': 1024*1024*50, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'DEBUG',
'propagate': True
},
'django.request': {
'handlers': ['request_handler'],
'level': 'DEBUG',
'propagate': False
},
},
}
| from gigs.settings_base import *
DEBUG = False
TEMPLATE_DEBUG = False
ADMINS = ()
INSTALLED_APPS += (
'gunicorn',
)
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'default': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg.log',
'maxBytes': 1024*1024*50, # 50 MB
'backupCount': 5,
'formatter':'standard',
},
'request_handler': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg_request.log',
'maxBytes': 1024*1024*50, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'DEBUG',
'propagate': True
},
'django.request': {
'handlers': ['request_handler'],
'level': 'DEBUG',
'propagate': False
},
},
}
Add databse config to production settings filefrom gigs.settings_base import *
DEBUG = False
TEMPLATE_DEBUG = False
ADMINS = ()
INSTALLED_APPS += (
'gunicorn',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'django',
'OPTIONS':{
'read_default_file':'/etc/tugg/my.cnf',
},
},
}
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'default': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg.log',
'maxBytes': 1024*1024*50, # 50 MB
'backupCount': 5,
'formatter':'standard',
},
'request_handler': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg_request.log',
'maxBytes': 1024*1024*50, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'DEBUG',
'propagate': True
},
'django.request': {
'handlers': ['request_handler'],
'level': 'DEBUG',
'propagate': False
},
},
}
| <commit_before>from gigs.settings_base import *
DEBUG = False
TEMPLATE_DEBUG = False
ADMINS = ()
INSTALLED_APPS += (
'gunicorn',
)
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'default': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg.log',
'maxBytes': 1024*1024*50, # 50 MB
'backupCount': 5,
'formatter':'standard',
},
'request_handler': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg_request.log',
'maxBytes': 1024*1024*50, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'DEBUG',
'propagate': True
},
'django.request': {
'handlers': ['request_handler'],
'level': 'DEBUG',
'propagate': False
},
},
}
<commit_msg>Add databse config to production settings file<commit_after>from gigs.settings_base import *
DEBUG = False
TEMPLATE_DEBUG = False
ADMINS = ()
INSTALLED_APPS += (
'gunicorn',
)
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'django',
'OPTIONS':{
'read_default_file':'/etc/tugg/my.cnf',
},
},
}
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'formatters': {
'standard': {
'format': '%(asctime)s [%(levelname)s] %(name)s: %(message)s'
},
},
'handlers': {
'default': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg.log',
'maxBytes': 1024*1024*50, # 50 MB
'backupCount': 5,
'formatter':'standard',
},
'request_handler': {
'level':'DEBUG',
'class':'logging.handlers.RotatingFileHandler',
'filename': '/var/log/tugg/tugg_request.log',
'maxBytes': 1024*1024*50, # 5 MB
'backupCount': 5,
'formatter':'standard',
},
},
'loggers': {
'': {
'handlers': ['default'],
'level': 'DEBUG',
'propagate': True
},
'django.request': {
'handlers': ['request_handler'],
'level': 'DEBUG',
'propagate': False
},
},
}
|
9826c49225a3d8aac5ab5432e261babaa2585c1e | PRESUBMIT.py | PRESUBMIT.py | # Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""Presubmit script for dart_ci repository.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into git cl.
"""
import subprocess
def _NeedsFormat(path):
return subprocess.call(['dartfmt', '--set-exit-if-changed', '-n', path]) != 0
def _CheckDartFormat(input_api, output_api):
files = [
git_file.AbsoluteLocalPath()
for git_file in input_api.AffectedTextFiles()
]
unformatted_files = [
path for path in files
if path.endswith('.dart') and _NeedsFormat(path)
]
if unformatted_files:
escapedNewline = ' \\\n'
return [
output_api.PresubmitError(
'File output does not match dartfmt.\n'
'Fix these issues with:\n'
'dartfmt -w%s%s' %
(escapedNewline, escapedNewline.join(unformatted_files)))
]
return []
def CommonChecks(input_api, output_api):
return _CheckDartFormat(input_api, output_api)
CheckChangeOnCommit = CommonChecks
CheckChangeOnUpload = CommonChecks
| # Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""Presubmit script for dart_ci repository.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into git cl.
"""
import subprocess
def _NeedsFormat(path):
return subprocess.call(['dart', 'format', '--set-exit-if-changed',
'--output','none', path]) != 0
def _CheckDartFormat(input_api, output_api):
files = [
git_file.AbsoluteLocalPath()
for git_file in input_api.AffectedTextFiles()
]
unformatted_files = [
path for path in files
if path.endswith('.dart') and _NeedsFormat(path)
]
if unformatted_files:
escapedNewline = ' \\\n'
return [
output_api.PresubmitError(
'File output does not match dartfmt.\n'
'Fix these issues with:\n'
'dart format%s%s' %
(escapedNewline, escapedNewline.join(unformatted_files)))
]
return []
def CommonChecks(input_api, output_api):
return _CheckDartFormat(input_api, output_api)
CheckChangeOnCommit = CommonChecks
CheckChangeOnUpload = CommonChecks
| Update presubmit to use 'dart format' command | Update presubmit to use 'dart format' command
The presubmit checks will then work with the Flutter SDK in the path.
This is the case when working on the Flutter web app current_results_ui.
Change-Id: I8a8d5db4454b57bc7936197032460e877037b386
Reviewed-on: https://dart-review.googlesource.com/c/dart_ci/+/191921
Reviewed-by: Alexander Thomas <29642742b6693024c89de8232f2e2542cf7eedf7@google.com>
| Python | bsd-3-clause | dart-lang/dart_ci,dart-lang/dart_ci,dart-lang/dart_ci,dart-lang/dart_ci | # Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""Presubmit script for dart_ci repository.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into git cl.
"""
import subprocess
def _NeedsFormat(path):
return subprocess.call(['dartfmt', '--set-exit-if-changed', '-n', path]) != 0
def _CheckDartFormat(input_api, output_api):
files = [
git_file.AbsoluteLocalPath()
for git_file in input_api.AffectedTextFiles()
]
unformatted_files = [
path for path in files
if path.endswith('.dart') and _NeedsFormat(path)
]
if unformatted_files:
escapedNewline = ' \\\n'
return [
output_api.PresubmitError(
'File output does not match dartfmt.\n'
'Fix these issues with:\n'
'dartfmt -w%s%s' %
(escapedNewline, escapedNewline.join(unformatted_files)))
]
return []
def CommonChecks(input_api, output_api):
return _CheckDartFormat(input_api, output_api)
CheckChangeOnCommit = CommonChecks
CheckChangeOnUpload = CommonChecks
Update presubmit to use 'dart format' command
The presubmit checks will then work with the Flutter SDK in the path.
This is the case when working on the Flutter web app current_results_ui.
Change-Id: I8a8d5db4454b57bc7936197032460e877037b386
Reviewed-on: https://dart-review.googlesource.com/c/dart_ci/+/191921
Reviewed-by: Alexander Thomas <29642742b6693024c89de8232f2e2542cf7eedf7@google.com> | # Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""Presubmit script for dart_ci repository.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into git cl.
"""
import subprocess
def _NeedsFormat(path):
return subprocess.call(['dart', 'format', '--set-exit-if-changed',
'--output','none', path]) != 0
def _CheckDartFormat(input_api, output_api):
files = [
git_file.AbsoluteLocalPath()
for git_file in input_api.AffectedTextFiles()
]
unformatted_files = [
path for path in files
if path.endswith('.dart') and _NeedsFormat(path)
]
if unformatted_files:
escapedNewline = ' \\\n'
return [
output_api.PresubmitError(
'File output does not match dartfmt.\n'
'Fix these issues with:\n'
'dart format%s%s' %
(escapedNewline, escapedNewline.join(unformatted_files)))
]
return []
def CommonChecks(input_api, output_api):
return _CheckDartFormat(input_api, output_api)
CheckChangeOnCommit = CommonChecks
CheckChangeOnUpload = CommonChecks
| <commit_before># Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""Presubmit script for dart_ci repository.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into git cl.
"""
import subprocess
def _NeedsFormat(path):
return subprocess.call(['dartfmt', '--set-exit-if-changed', '-n', path]) != 0
def _CheckDartFormat(input_api, output_api):
files = [
git_file.AbsoluteLocalPath()
for git_file in input_api.AffectedTextFiles()
]
unformatted_files = [
path for path in files
if path.endswith('.dart') and _NeedsFormat(path)
]
if unformatted_files:
escapedNewline = ' \\\n'
return [
output_api.PresubmitError(
'File output does not match dartfmt.\n'
'Fix these issues with:\n'
'dartfmt -w%s%s' %
(escapedNewline, escapedNewline.join(unformatted_files)))
]
return []
def CommonChecks(input_api, output_api):
return _CheckDartFormat(input_api, output_api)
CheckChangeOnCommit = CommonChecks
CheckChangeOnUpload = CommonChecks
<commit_msg>Update presubmit to use 'dart format' command
The presubmit checks will then work with the Flutter SDK in the path.
This is the case when working on the Flutter web app current_results_ui.
Change-Id: I8a8d5db4454b57bc7936197032460e877037b386
Reviewed-on: https://dart-review.googlesource.com/c/dart_ci/+/191921
Reviewed-by: Alexander Thomas <29642742b6693024c89de8232f2e2542cf7eedf7@google.com><commit_after> | # Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""Presubmit script for dart_ci repository.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into git cl.
"""
import subprocess
def _NeedsFormat(path):
return subprocess.call(['dart', 'format', '--set-exit-if-changed',
'--output','none', path]) != 0
def _CheckDartFormat(input_api, output_api):
files = [
git_file.AbsoluteLocalPath()
for git_file in input_api.AffectedTextFiles()
]
unformatted_files = [
path for path in files
if path.endswith('.dart') and _NeedsFormat(path)
]
if unformatted_files:
escapedNewline = ' \\\n'
return [
output_api.PresubmitError(
'File output does not match dartfmt.\n'
'Fix these issues with:\n'
'dart format%s%s' %
(escapedNewline, escapedNewline.join(unformatted_files)))
]
return []
def CommonChecks(input_api, output_api):
return _CheckDartFormat(input_api, output_api)
CheckChangeOnCommit = CommonChecks
CheckChangeOnUpload = CommonChecks
| # Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""Presubmit script for dart_ci repository.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into git cl.
"""
import subprocess
def _NeedsFormat(path):
return subprocess.call(['dartfmt', '--set-exit-if-changed', '-n', path]) != 0
def _CheckDartFormat(input_api, output_api):
files = [
git_file.AbsoluteLocalPath()
for git_file in input_api.AffectedTextFiles()
]
unformatted_files = [
path for path in files
if path.endswith('.dart') and _NeedsFormat(path)
]
if unformatted_files:
escapedNewline = ' \\\n'
return [
output_api.PresubmitError(
'File output does not match dartfmt.\n'
'Fix these issues with:\n'
'dartfmt -w%s%s' %
(escapedNewline, escapedNewline.join(unformatted_files)))
]
return []
def CommonChecks(input_api, output_api):
return _CheckDartFormat(input_api, output_api)
CheckChangeOnCommit = CommonChecks
CheckChangeOnUpload = CommonChecks
Update presubmit to use 'dart format' command
The presubmit checks will then work with the Flutter SDK in the path.
This is the case when working on the Flutter web app current_results_ui.
Change-Id: I8a8d5db4454b57bc7936197032460e877037b386
Reviewed-on: https://dart-review.googlesource.com/c/dart_ci/+/191921
Reviewed-by: Alexander Thomas <29642742b6693024c89de8232f2e2542cf7eedf7@google.com># Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""Presubmit script for dart_ci repository.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into git cl.
"""
import subprocess
def _NeedsFormat(path):
return subprocess.call(['dart', 'format', '--set-exit-if-changed',
'--output','none', path]) != 0
def _CheckDartFormat(input_api, output_api):
files = [
git_file.AbsoluteLocalPath()
for git_file in input_api.AffectedTextFiles()
]
unformatted_files = [
path for path in files
if path.endswith('.dart') and _NeedsFormat(path)
]
if unformatted_files:
escapedNewline = ' \\\n'
return [
output_api.PresubmitError(
'File output does not match dartfmt.\n'
'Fix these issues with:\n'
'dart format%s%s' %
(escapedNewline, escapedNewline.join(unformatted_files)))
]
return []
def CommonChecks(input_api, output_api):
return _CheckDartFormat(input_api, output_api)
CheckChangeOnCommit = CommonChecks
CheckChangeOnUpload = CommonChecks
| <commit_before># Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""Presubmit script for dart_ci repository.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into git cl.
"""
import subprocess
def _NeedsFormat(path):
return subprocess.call(['dartfmt', '--set-exit-if-changed', '-n', path]) != 0
def _CheckDartFormat(input_api, output_api):
files = [
git_file.AbsoluteLocalPath()
for git_file in input_api.AffectedTextFiles()
]
unformatted_files = [
path for path in files
if path.endswith('.dart') and _NeedsFormat(path)
]
if unformatted_files:
escapedNewline = ' \\\n'
return [
output_api.PresubmitError(
'File output does not match dartfmt.\n'
'Fix these issues with:\n'
'dartfmt -w%s%s' %
(escapedNewline, escapedNewline.join(unformatted_files)))
]
return []
def CommonChecks(input_api, output_api):
return _CheckDartFormat(input_api, output_api)
CheckChangeOnCommit = CommonChecks
CheckChangeOnUpload = CommonChecks
<commit_msg>Update presubmit to use 'dart format' command
The presubmit checks will then work with the Flutter SDK in the path.
This is the case when working on the Flutter web app current_results_ui.
Change-Id: I8a8d5db4454b57bc7936197032460e877037b386
Reviewed-on: https://dart-review.googlesource.com/c/dart_ci/+/191921
Reviewed-by: Alexander Thomas <29642742b6693024c89de8232f2e2542cf7eedf7@google.com><commit_after># Copyright (c) 2020, the Dart project authors. Please see the AUTHORS file
# for details. All rights reserved. Use of this source code is governed by a
# BSD-style license that can be found in the LICENSE file.
"""Presubmit script for dart_ci repository.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into git cl.
"""
import subprocess
def _NeedsFormat(path):
return subprocess.call(['dart', 'format', '--set-exit-if-changed',
'--output','none', path]) != 0
def _CheckDartFormat(input_api, output_api):
files = [
git_file.AbsoluteLocalPath()
for git_file in input_api.AffectedTextFiles()
]
unformatted_files = [
path for path in files
if path.endswith('.dart') and _NeedsFormat(path)
]
if unformatted_files:
escapedNewline = ' \\\n'
return [
output_api.PresubmitError(
'File output does not match dartfmt.\n'
'Fix these issues with:\n'
'dart format%s%s' %
(escapedNewline, escapedNewline.join(unformatted_files)))
]
return []
def CommonChecks(input_api, output_api):
return _CheckDartFormat(input_api, output_api)
CheckChangeOnCommit = CommonChecks
CheckChangeOnUpload = CommonChecks
|
f64fabb83cf57e70f938f803ee0a50599f3ab83a | src/odin/fields/future.py | src/odin/fields/future.py | from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value == "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if (value is not None) and (value in self.enum):
return value.value
| from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value == "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if (value is not None) and isinstance(value, self.enum):
return value.value
| Update enum value to an is-instance check | Update enum value to an is-instance check
| Python | bsd-3-clause | python-odin/odin | from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value == "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if (value is not None) and (value in self.enum):
return value.value
Update enum value to an is-instance check | from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value == "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if (value is not None) and isinstance(value, self.enum):
return value.value
| <commit_before>from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value == "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if (value is not None) and (value in self.enum):
return value.value
<commit_msg>Update enum value to an is-instance check<commit_after> | from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value == "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if (value is not None) and isinstance(value, self.enum):
return value.value
| from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value == "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if (value is not None) and (value in self.enum):
return value.value
Update enum value to an is-instance checkfrom __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value == "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if (value is not None) and isinstance(value, self.enum):
return value.value
| <commit_before>from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value == "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if (value is not None) and (value in self.enum):
return value.value
<commit_msg>Update enum value to an is-instance check<commit_after>from __future__ import absolute_import
from enum import Enum
from typing import TypeVar, Optional, Any, Type # noqa
from odin.exceptions import ValidationError
from . import Field
__all__ = ("EnumField",)
ET = TypeVar("ET", Enum, Enum)
class EnumField(Field):
"""
Field for handling Python enums.
"""
data_type_name = "Enum"
def __init__(self, enum, **options):
# type: (Type[ET], **Any) -> None
# Generate choices structure from choices
choices = options.pop("choices", None)
options["choices"] = tuple((e, e.name) for e in choices or enum)
super(EnumField, self).__init__(**options)
self.enum = enum
@property
def choices_doc_text(self):
"""
Choices converted for documentation purposes.
"""
return tuple((v.value, n) for v, n in self.choices)
def to_python(self, value):
# type: (Any) -> Optional[ET]
if value is None:
return
# Attempt to convert
try:
return self.enum(value)
except ValueError:
# If value is an empty string return None
# Do this check here to support enums that define an option using
# an empty string.
if value == "":
return
raise ValidationError(self.error_messages["invalid_choice"] % value)
def prepare(self, value):
# type: (Optional[ET]) -> Any
if (value is not None) and isinstance(value, self.enum):
return value.value
|
c3cf9adc2428c4058817daa1aeefca300363e21f | glanerbeard/server.py | glanerbeard/server.py | import requests
import json
import logging
from glanerbeard import show
log = logging.getLogger(__name__)
class Server:
def __init__(self, name, url, apikey):
self.name = name
self.url = url
self.apikey = apikey
def requestJson(self, path):
url = '{url}/api/{apikey}{path}'.format(url=self.url,apikey=self.apikey,path=path)
response = requests.get(url)
response_str = response.content.decode('utf-8')
result = json.loads(response_str)
return result
def getShows(self):
shows = show.fromJson(self.requestJson('/?cmd=shows'))
for s in shows:
s.addServer(self)
return shows
def __repr__(self):
return 'Server {name} at {url}'.format(name=self.name,url=self.url)
def fromConfig(serverdict, apikeydict):
result = []
for name,url in serverdict.items():
result.append(Server(name, url, apikeydict[name]))
return result
| import requests
import logging
from glanerbeard import show
log = logging.getLogger(__name__)
class Server:
def __init__(self, name, url, apikey):
self.name = name
self.url = url
self.apikey = apikey
def requestJson(self, path):
url = '{url}/api/{apikey}{path}'.format(url=self.url,apikey=self.apikey,path=path)
return requests.get(url, verify=False).json()
def getShows(self):
shows = show.fromJson(self.requestJson('/?cmd=shows'))
for s in shows:
s.addServer(self)
return shows
def __repr__(self):
return 'Server {name} at {url}'.format(name=self.name,url=self.url)
def fromConfig(serverdict, apikeydict):
result = []
for name,url in serverdict.items():
result.append(Server(name, url, apikeydict[name]))
return result
| Use requests built-in .json(), pass verify=False. | Use requests built-in .json(), pass verify=False.
| Python | apache-2.0 | daenney/glanerbeard | import requests
import json
import logging
from glanerbeard import show
log = logging.getLogger(__name__)
class Server:
def __init__(self, name, url, apikey):
self.name = name
self.url = url
self.apikey = apikey
def requestJson(self, path):
url = '{url}/api/{apikey}{path}'.format(url=self.url,apikey=self.apikey,path=path)
response = requests.get(url)
response_str = response.content.decode('utf-8')
result = json.loads(response_str)
return result
def getShows(self):
shows = show.fromJson(self.requestJson('/?cmd=shows'))
for s in shows:
s.addServer(self)
return shows
def __repr__(self):
return 'Server {name} at {url}'.format(name=self.name,url=self.url)
def fromConfig(serverdict, apikeydict):
result = []
for name,url in serverdict.items():
result.append(Server(name, url, apikeydict[name]))
return result
Use requests built-in .json(), pass verify=False. | import requests
import logging
from glanerbeard import show
log = logging.getLogger(__name__)
class Server:
def __init__(self, name, url, apikey):
self.name = name
self.url = url
self.apikey = apikey
def requestJson(self, path):
url = '{url}/api/{apikey}{path}'.format(url=self.url,apikey=self.apikey,path=path)
return requests.get(url, verify=False).json()
def getShows(self):
shows = show.fromJson(self.requestJson('/?cmd=shows'))
for s in shows:
s.addServer(self)
return shows
def __repr__(self):
return 'Server {name} at {url}'.format(name=self.name,url=self.url)
def fromConfig(serverdict, apikeydict):
result = []
for name,url in serverdict.items():
result.append(Server(name, url, apikeydict[name]))
return result
| <commit_before>import requests
import json
import logging
from glanerbeard import show
log = logging.getLogger(__name__)
class Server:
def __init__(self, name, url, apikey):
self.name = name
self.url = url
self.apikey = apikey
def requestJson(self, path):
url = '{url}/api/{apikey}{path}'.format(url=self.url,apikey=self.apikey,path=path)
response = requests.get(url)
response_str = response.content.decode('utf-8')
result = json.loads(response_str)
return result
def getShows(self):
shows = show.fromJson(self.requestJson('/?cmd=shows'))
for s in shows:
s.addServer(self)
return shows
def __repr__(self):
return 'Server {name} at {url}'.format(name=self.name,url=self.url)
def fromConfig(serverdict, apikeydict):
result = []
for name,url in serverdict.items():
result.append(Server(name, url, apikeydict[name]))
return result
<commit_msg>Use requests built-in .json(), pass verify=False.<commit_after> | import requests
import logging
from glanerbeard import show
log = logging.getLogger(__name__)
class Server:
def __init__(self, name, url, apikey):
self.name = name
self.url = url
self.apikey = apikey
def requestJson(self, path):
url = '{url}/api/{apikey}{path}'.format(url=self.url,apikey=self.apikey,path=path)
return requests.get(url, verify=False).json()
def getShows(self):
shows = show.fromJson(self.requestJson('/?cmd=shows'))
for s in shows:
s.addServer(self)
return shows
def __repr__(self):
return 'Server {name} at {url}'.format(name=self.name,url=self.url)
def fromConfig(serverdict, apikeydict):
result = []
for name,url in serverdict.items():
result.append(Server(name, url, apikeydict[name]))
return result
| import requests
import json
import logging
from glanerbeard import show
log = logging.getLogger(__name__)
class Server:
def __init__(self, name, url, apikey):
self.name = name
self.url = url
self.apikey = apikey
def requestJson(self, path):
url = '{url}/api/{apikey}{path}'.format(url=self.url,apikey=self.apikey,path=path)
response = requests.get(url)
response_str = response.content.decode('utf-8')
result = json.loads(response_str)
return result
def getShows(self):
shows = show.fromJson(self.requestJson('/?cmd=shows'))
for s in shows:
s.addServer(self)
return shows
def __repr__(self):
return 'Server {name} at {url}'.format(name=self.name,url=self.url)
def fromConfig(serverdict, apikeydict):
result = []
for name,url in serverdict.items():
result.append(Server(name, url, apikeydict[name]))
return result
Use requests built-in .json(), pass verify=False.import requests
import logging
from glanerbeard import show
log = logging.getLogger(__name__)
class Server:
def __init__(self, name, url, apikey):
self.name = name
self.url = url
self.apikey = apikey
def requestJson(self, path):
url = '{url}/api/{apikey}{path}'.format(url=self.url,apikey=self.apikey,path=path)
return requests.get(url, verify=False).json()
def getShows(self):
shows = show.fromJson(self.requestJson('/?cmd=shows'))
for s in shows:
s.addServer(self)
return shows
def __repr__(self):
return 'Server {name} at {url}'.format(name=self.name,url=self.url)
def fromConfig(serverdict, apikeydict):
result = []
for name,url in serverdict.items():
result.append(Server(name, url, apikeydict[name]))
return result
| <commit_before>import requests
import json
import logging
from glanerbeard import show
log = logging.getLogger(__name__)
class Server:
def __init__(self, name, url, apikey):
self.name = name
self.url = url
self.apikey = apikey
def requestJson(self, path):
url = '{url}/api/{apikey}{path}'.format(url=self.url,apikey=self.apikey,path=path)
response = requests.get(url)
response_str = response.content.decode('utf-8')
result = json.loads(response_str)
return result
def getShows(self):
shows = show.fromJson(self.requestJson('/?cmd=shows'))
for s in shows:
s.addServer(self)
return shows
def __repr__(self):
return 'Server {name} at {url}'.format(name=self.name,url=self.url)
def fromConfig(serverdict, apikeydict):
result = []
for name,url in serverdict.items():
result.append(Server(name, url, apikeydict[name]))
return result
<commit_msg>Use requests built-in .json(), pass verify=False.<commit_after>import requests
import logging
from glanerbeard import show
log = logging.getLogger(__name__)
class Server:
def __init__(self, name, url, apikey):
self.name = name
self.url = url
self.apikey = apikey
def requestJson(self, path):
url = '{url}/api/{apikey}{path}'.format(url=self.url,apikey=self.apikey,path=path)
return requests.get(url, verify=False).json()
def getShows(self):
shows = show.fromJson(self.requestJson('/?cmd=shows'))
for s in shows:
s.addServer(self)
return shows
def __repr__(self):
return 'Server {name} at {url}'.format(name=self.name,url=self.url)
def fromConfig(serverdict, apikeydict):
result = []
for name,url in serverdict.items():
result.append(Server(name, url, apikeydict[name]))
return result
|
321924fff843896fc67d3a4594d635546cf90bec | mycli/packages/expanded.py | mycli/packages/expanded.py | from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
| from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
if value is None: value = '<null>'
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
| Make the null value consistent between vertical and tabular output. | Make the null value consistent between vertical and tabular output.
| Python | bsd-3-clause | MnO2/rediscli,martijnengler/mycli,qbdsoft/mycli,j-bennet/mycli,D-e-e-m-o/mycli,mdsrosa/mycli,martijnengler/mycli,mattn/mycli,jinstrive/mycli,webwlsong/mycli,brewneaux/mycli,danieljwest/mycli,thanatoskira/mycli,ksmaheshkumar/mycli,brewneaux/mycli,MnO2/rediscli,evook/mycli,webwlsong/mycli,j-bennet/mycli,evook/mycli,suzukaze/mycli,chenpingzhao/mycli,shoma/mycli,ZuoGuocai/mycli,tkuipers/mycli,ksmaheshkumar/mycli,mattn/mycli,ZuoGuocai/mycli,douglasvegas/mycli,oguzy/mycli,nkhuyu/mycli,oguzy/mycli,douglasvegas/mycli,shoma/mycli,qbdsoft/mycli,tkuipers/mycli,danieljwest/mycli,suzukaze/mycli,mdsrosa/mycli,jinstrive/mycli,chenpingzhao/mycli,nkhuyu/mycli,D-e-e-m-o/mycli,thanatoskira/mycli | from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
Make the null value consistent between vertical and tabular output. | from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
if value is None: value = '<null>'
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
| <commit_before>from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
<commit_msg>Make the null value consistent between vertical and tabular output.<commit_after> | from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
if value is None: value = '<null>'
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
| from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
Make the null value consistent between vertical and tabular output.from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
if value is None: value = '<null>'
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
| <commit_before>from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
<commit_msg>Make the null value consistent between vertical and tabular output.<commit_after>from .tabulate import _text_type
def pad(field, total, char=u" "):
return field + (char * (total - len(field)))
def get_separator(num, header_len, data_len):
sep = u"***************************[ %d. row ]***************************\n" % (num + 1)
return sep
def expanded_table(rows, headers):
header_len = max([len(x) for x in headers])
max_row_len = 0
results = []
padded_headers = [pad(x, header_len) + u" |" for x in headers]
header_len += 2
for row in rows:
row_len = max([len(_text_type(x)) for x in row])
row_result = []
if row_len > max_row_len:
max_row_len = row_len
for header, value in zip(padded_headers, row):
if value is None: value = '<null>'
row_result.append(u"%s %s" % (header, value))
results.append('\n'.join(row_result))
output = []
for i, result in enumerate(results):
output.append(get_separator(i, header_len, max_row_len))
output.append(result)
output.append('\n')
return ''.join(output)
|
495a368098357c94ccec2f9f84c077fd5b27bde7 | tests/PushoverAPI/test_PushoverAPI.py | tests/PushoverAPI/test_PushoverAPI.py | from urllib.parse import urljoin, parse_qs
import responses
from tests.constants import *
from tests.fixtures import PushoverAPI
from tests.util import messages_callback
@responses.activate
def test_PushoverAPI_sends_message(PushoverAPI):
responses.add_callback(
responses.POST,
urljoin(PUSHOVER_API_URL, 'messages.json'),
callback=messages_callback,
content_type='application/json'
)
resp = PushoverAPI.send_message(TEST_USER, TEST_MESSAGE)
request_body = parse_qs(resp.request.body)
assert request_body['token'][0] == TEST_TOKEN
assert request_body['user'][0] == TEST_USER
assert request_body['message'][0] == TEST_MESSAGE
assert request_body['html'][0] == 'False'
| from urllib.parse import urljoin, parse_qs
import responses
from tests.constants import *
from tests.fixtures import PushoverAPI
from tests.util import messages_callback
@responses.activate
def test_PushoverAPI_sends_message(PushoverAPI):
responses.add_callback(
responses.POST,
urljoin(PUSHOVER_API_URL, 'messages.json'),
callback=messages_callback,
content_type='application/json'
)
resp = PushoverAPI.send_message(TEST_USER, TEST_MESSAGE)
request_body = parse_qs(resp.request.body)
assert request_body['token'][0] == TEST_TOKEN
assert request_body['user'][0] == TEST_USER
assert request_body['message'][0] == TEST_MESSAGE
assert request_body['html'][0] == 'False'
assert resp.json() == {
'status': 1,
'request': TEST_REQUEST_ID
}
| Add a test for a proper response, not just proper request | Add a test for a proper response, not just proper request
| Python | mit | scolby33/pushover_complete | from urllib.parse import urljoin, parse_qs
import responses
from tests.constants import *
from tests.fixtures import PushoverAPI
from tests.util import messages_callback
@responses.activate
def test_PushoverAPI_sends_message(PushoverAPI):
responses.add_callback(
responses.POST,
urljoin(PUSHOVER_API_URL, 'messages.json'),
callback=messages_callback,
content_type='application/json'
)
resp = PushoverAPI.send_message(TEST_USER, TEST_MESSAGE)
request_body = parse_qs(resp.request.body)
assert request_body['token'][0] == TEST_TOKEN
assert request_body['user'][0] == TEST_USER
assert request_body['message'][0] == TEST_MESSAGE
assert request_body['html'][0] == 'False'
Add a test for a proper response, not just proper request | from urllib.parse import urljoin, parse_qs
import responses
from tests.constants import *
from tests.fixtures import PushoverAPI
from tests.util import messages_callback
@responses.activate
def test_PushoverAPI_sends_message(PushoverAPI):
responses.add_callback(
responses.POST,
urljoin(PUSHOVER_API_URL, 'messages.json'),
callback=messages_callback,
content_type='application/json'
)
resp = PushoverAPI.send_message(TEST_USER, TEST_MESSAGE)
request_body = parse_qs(resp.request.body)
assert request_body['token'][0] == TEST_TOKEN
assert request_body['user'][0] == TEST_USER
assert request_body['message'][0] == TEST_MESSAGE
assert request_body['html'][0] == 'False'
assert resp.json() == {
'status': 1,
'request': TEST_REQUEST_ID
}
| <commit_before>from urllib.parse import urljoin, parse_qs
import responses
from tests.constants import *
from tests.fixtures import PushoverAPI
from tests.util import messages_callback
@responses.activate
def test_PushoverAPI_sends_message(PushoverAPI):
responses.add_callback(
responses.POST,
urljoin(PUSHOVER_API_URL, 'messages.json'),
callback=messages_callback,
content_type='application/json'
)
resp = PushoverAPI.send_message(TEST_USER, TEST_MESSAGE)
request_body = parse_qs(resp.request.body)
assert request_body['token'][0] == TEST_TOKEN
assert request_body['user'][0] == TEST_USER
assert request_body['message'][0] == TEST_MESSAGE
assert request_body['html'][0] == 'False'
<commit_msg>Add a test for a proper response, not just proper request<commit_after> | from urllib.parse import urljoin, parse_qs
import responses
from tests.constants import *
from tests.fixtures import PushoverAPI
from tests.util import messages_callback
@responses.activate
def test_PushoverAPI_sends_message(PushoverAPI):
responses.add_callback(
responses.POST,
urljoin(PUSHOVER_API_URL, 'messages.json'),
callback=messages_callback,
content_type='application/json'
)
resp = PushoverAPI.send_message(TEST_USER, TEST_MESSAGE)
request_body = parse_qs(resp.request.body)
assert request_body['token'][0] == TEST_TOKEN
assert request_body['user'][0] == TEST_USER
assert request_body['message'][0] == TEST_MESSAGE
assert request_body['html'][0] == 'False'
assert resp.json() == {
'status': 1,
'request': TEST_REQUEST_ID
}
| from urllib.parse import urljoin, parse_qs
import responses
from tests.constants import *
from tests.fixtures import PushoverAPI
from tests.util import messages_callback
@responses.activate
def test_PushoverAPI_sends_message(PushoverAPI):
responses.add_callback(
responses.POST,
urljoin(PUSHOVER_API_URL, 'messages.json'),
callback=messages_callback,
content_type='application/json'
)
resp = PushoverAPI.send_message(TEST_USER, TEST_MESSAGE)
request_body = parse_qs(resp.request.body)
assert request_body['token'][0] == TEST_TOKEN
assert request_body['user'][0] == TEST_USER
assert request_body['message'][0] == TEST_MESSAGE
assert request_body['html'][0] == 'False'
Add a test for a proper response, not just proper requestfrom urllib.parse import urljoin, parse_qs
import responses
from tests.constants import *
from tests.fixtures import PushoverAPI
from tests.util import messages_callback
@responses.activate
def test_PushoverAPI_sends_message(PushoverAPI):
responses.add_callback(
responses.POST,
urljoin(PUSHOVER_API_URL, 'messages.json'),
callback=messages_callback,
content_type='application/json'
)
resp = PushoverAPI.send_message(TEST_USER, TEST_MESSAGE)
request_body = parse_qs(resp.request.body)
assert request_body['token'][0] == TEST_TOKEN
assert request_body['user'][0] == TEST_USER
assert request_body['message'][0] == TEST_MESSAGE
assert request_body['html'][0] == 'False'
assert resp.json() == {
'status': 1,
'request': TEST_REQUEST_ID
}
| <commit_before>from urllib.parse import urljoin, parse_qs
import responses
from tests.constants import *
from tests.fixtures import PushoverAPI
from tests.util import messages_callback
@responses.activate
def test_PushoverAPI_sends_message(PushoverAPI):
responses.add_callback(
responses.POST,
urljoin(PUSHOVER_API_URL, 'messages.json'),
callback=messages_callback,
content_type='application/json'
)
resp = PushoverAPI.send_message(TEST_USER, TEST_MESSAGE)
request_body = parse_qs(resp.request.body)
assert request_body['token'][0] == TEST_TOKEN
assert request_body['user'][0] == TEST_USER
assert request_body['message'][0] == TEST_MESSAGE
assert request_body['html'][0] == 'False'
<commit_msg>Add a test for a proper response, not just proper request<commit_after>from urllib.parse import urljoin, parse_qs
import responses
from tests.constants import *
from tests.fixtures import PushoverAPI
from tests.util import messages_callback
@responses.activate
def test_PushoverAPI_sends_message(PushoverAPI):
responses.add_callback(
responses.POST,
urljoin(PUSHOVER_API_URL, 'messages.json'),
callback=messages_callback,
content_type='application/json'
)
resp = PushoverAPI.send_message(TEST_USER, TEST_MESSAGE)
request_body = parse_qs(resp.request.body)
assert request_body['token'][0] == TEST_TOKEN
assert request_body['user'][0] == TEST_USER
assert request_body['message'][0] == TEST_MESSAGE
assert request_body['html'][0] == 'False'
assert resp.json() == {
'status': 1,
'request': TEST_REQUEST_ID
}
|
dc86283bb517c56eec177804801f66227477a097 | networkx/generators/ego.py | networkx/generators/ego.py | """
Ego graph.
"""
# Copyright (C) 2010 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__author__ = """\n""".join(['Drew Conway <drew.conway@nyu.edu>',
'Aric Hagberg <hagberg@lanl.gov>'])
__all__ = ['ego_graph']
import networkx as nx
def ego_graph(G,n,radius=1,center=True):
"""Returns induced subgraph of neighbors centered at node n.
Parameters
----------
G : graph
A NetworkX Graph or DiGraph
n : node
A single node
radius : integer
Include all neighbors of distance<=radius from n
center : bool, optional
If False, do not include center node in graph
"""
sp=nx.single_source_shortest_path_length(G,n,cutoff=radius)
H=G.subgraph(sp.keys())
if not center:
H.remove_node(n)
return H
| """
Ego graph.
"""
# Copyright (C) 2010 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__author__ = """\n""".join(['Drew Conway <drew.conway@nyu.edu>',
'Aric Hagberg <hagberg@lanl.gov>'])
__all__ = ['ego_graph']
import networkx as nx
def ego_graph(G,n,radius=1,center=True):
"""Returns induced subgraph of neighbors centered at node n.
Parameters
----------
G : graph
A NetworkX Graph or DiGraph
n : node
A single node
radius : integer
Include all neighbors of distance<=radius from n
center : bool, optional
If False, do not include center node in graph
Notes
-----
For directed graphs D this produces the "out" neighborhood
or successors. If you want the neighborhood of predecessors
first reverse the graph with D.reverse(). If you want both
first convert the graph to an undirected graph using G=nx.Graph(D).
"""
sp=nx.single_source_shortest_path_length(G,n,cutoff=radius)
H=G.subgraph(sp.keys())
if not center:
H.remove_node(n)
return H
| Add note about directed graphs | Add note about directed graphs
--HG--
extra : convert_revision : svn%3A3ed01bd8-26fb-0310-9e4c-ca1a4053419f/networkx/trunk%401776
| Python | bsd-3-clause | ghdk/networkx,farhaanbukhsh/networkx,yashu-seth/networkx,bzero/networkx,nathania/networkx,jakevdp/networkx,goulu/networkx,Sixshaman/networkx,ionanrozenfeld/networkx,chrisnatali/networkx,jcurbelo/networkx,tmilicic/networkx,chrisnatali/networkx,RMKD/networkx,ionanrozenfeld/networkx,sharifulgeo/networkx,sharifulgeo/networkx,ghdk/networkx,jtorrents/networkx,kai5263499/networkx,nathania/networkx,aureooms/networkx,sharifulgeo/networkx,RMKD/networkx,OrkoHunter/networkx,dhimmel/networkx,nathania/networkx,bzero/networkx,kai5263499/networkx,jfinkels/networkx,dmoliveira/networkx,kernc/networkx,dmoliveira/networkx,ltiao/networkx,kai5263499/networkx,cmtm/networkx,debsankha/networkx,dhimmel/networkx,beni55/networkx,RMKD/networkx,chrisnatali/networkx,harlowja/networkx,ionanrozenfeld/networkx,JamesClough/networkx,blublud/networkx,dhimmel/networkx,aureooms/networkx,blublud/networkx,farhaanbukhsh/networkx,ghdk/networkx,dmoliveira/networkx,harlowja/networkx,jni/networkx,wasade/networkx,blublud/networkx,SanketDG/networkx,debsankha/networkx,jni/networkx,jakevdp/networkx,debsankha/networkx,farhaanbukhsh/networkx,michaelpacer/networkx,bzero/networkx,andnovar/networkx,jtorrents/networkx,jni/networkx,aureooms/networkx,jakevdp/networkx,kernc/networkx,harlowja/networkx,NvanAdrichem/networkx,kernc/networkx | """
Ego graph.
"""
# Copyright (C) 2010 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__author__ = """\n""".join(['Drew Conway <drew.conway@nyu.edu>',
'Aric Hagberg <hagberg@lanl.gov>'])
__all__ = ['ego_graph']
import networkx as nx
def ego_graph(G,n,radius=1,center=True):
"""Returns induced subgraph of neighbors centered at node n.
Parameters
----------
G : graph
A NetworkX Graph or DiGraph
n : node
A single node
radius : integer
Include all neighbors of distance<=radius from n
center : bool, optional
If False, do not include center node in graph
"""
sp=nx.single_source_shortest_path_length(G,n,cutoff=radius)
H=G.subgraph(sp.keys())
if not center:
H.remove_node(n)
return H
Add note about directed graphs
--HG--
extra : convert_revision : svn%3A3ed01bd8-26fb-0310-9e4c-ca1a4053419f/networkx/trunk%401776 | """
Ego graph.
"""
# Copyright (C) 2010 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__author__ = """\n""".join(['Drew Conway <drew.conway@nyu.edu>',
'Aric Hagberg <hagberg@lanl.gov>'])
__all__ = ['ego_graph']
import networkx as nx
def ego_graph(G,n,radius=1,center=True):
"""Returns induced subgraph of neighbors centered at node n.
Parameters
----------
G : graph
A NetworkX Graph or DiGraph
n : node
A single node
radius : integer
Include all neighbors of distance<=radius from n
center : bool, optional
If False, do not include center node in graph
Notes
-----
For directed graphs D this produces the "out" neighborhood
or successors. If you want the neighborhood of predecessors
first reverse the graph with D.reverse(). If you want both
first convert the graph to an undirected graph using G=nx.Graph(D).
"""
sp=nx.single_source_shortest_path_length(G,n,cutoff=radius)
H=G.subgraph(sp.keys())
if not center:
H.remove_node(n)
return H
| <commit_before>"""
Ego graph.
"""
# Copyright (C) 2010 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__author__ = """\n""".join(['Drew Conway <drew.conway@nyu.edu>',
'Aric Hagberg <hagberg@lanl.gov>'])
__all__ = ['ego_graph']
import networkx as nx
def ego_graph(G,n,radius=1,center=True):
"""Returns induced subgraph of neighbors centered at node n.
Parameters
----------
G : graph
A NetworkX Graph or DiGraph
n : node
A single node
radius : integer
Include all neighbors of distance<=radius from n
center : bool, optional
If False, do not include center node in graph
"""
sp=nx.single_source_shortest_path_length(G,n,cutoff=radius)
H=G.subgraph(sp.keys())
if not center:
H.remove_node(n)
return H
<commit_msg>Add note about directed graphs
--HG--
extra : convert_revision : svn%3A3ed01bd8-26fb-0310-9e4c-ca1a4053419f/networkx/trunk%401776<commit_after> | """
Ego graph.
"""
# Copyright (C) 2010 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__author__ = """\n""".join(['Drew Conway <drew.conway@nyu.edu>',
'Aric Hagberg <hagberg@lanl.gov>'])
__all__ = ['ego_graph']
import networkx as nx
def ego_graph(G,n,radius=1,center=True):
"""Returns induced subgraph of neighbors centered at node n.
Parameters
----------
G : graph
A NetworkX Graph or DiGraph
n : node
A single node
radius : integer
Include all neighbors of distance<=radius from n
center : bool, optional
If False, do not include center node in graph
Notes
-----
For directed graphs D this produces the "out" neighborhood
or successors. If you want the neighborhood of predecessors
first reverse the graph with D.reverse(). If you want both
first convert the graph to an undirected graph using G=nx.Graph(D).
"""
sp=nx.single_source_shortest_path_length(G,n,cutoff=radius)
H=G.subgraph(sp.keys())
if not center:
H.remove_node(n)
return H
| """
Ego graph.
"""
# Copyright (C) 2010 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__author__ = """\n""".join(['Drew Conway <drew.conway@nyu.edu>',
'Aric Hagberg <hagberg@lanl.gov>'])
__all__ = ['ego_graph']
import networkx as nx
def ego_graph(G,n,radius=1,center=True):
"""Returns induced subgraph of neighbors centered at node n.
Parameters
----------
G : graph
A NetworkX Graph or DiGraph
n : node
A single node
radius : integer
Include all neighbors of distance<=radius from n
center : bool, optional
If False, do not include center node in graph
"""
sp=nx.single_source_shortest_path_length(G,n,cutoff=radius)
H=G.subgraph(sp.keys())
if not center:
H.remove_node(n)
return H
Add note about directed graphs
--HG--
extra : convert_revision : svn%3A3ed01bd8-26fb-0310-9e4c-ca1a4053419f/networkx/trunk%401776"""
Ego graph.
"""
# Copyright (C) 2010 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__author__ = """\n""".join(['Drew Conway <drew.conway@nyu.edu>',
'Aric Hagberg <hagberg@lanl.gov>'])
__all__ = ['ego_graph']
import networkx as nx
def ego_graph(G,n,radius=1,center=True):
"""Returns induced subgraph of neighbors centered at node n.
Parameters
----------
G : graph
A NetworkX Graph or DiGraph
n : node
A single node
radius : integer
Include all neighbors of distance<=radius from n
center : bool, optional
If False, do not include center node in graph
Notes
-----
For directed graphs D this produces the "out" neighborhood
or successors. If you want the neighborhood of predecessors
first reverse the graph with D.reverse(). If you want both
first convert the graph to an undirected graph using G=nx.Graph(D).
"""
sp=nx.single_source_shortest_path_length(G,n,cutoff=radius)
H=G.subgraph(sp.keys())
if not center:
H.remove_node(n)
return H
| <commit_before>"""
Ego graph.
"""
# Copyright (C) 2010 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__author__ = """\n""".join(['Drew Conway <drew.conway@nyu.edu>',
'Aric Hagberg <hagberg@lanl.gov>'])
__all__ = ['ego_graph']
import networkx as nx
def ego_graph(G,n,radius=1,center=True):
"""Returns induced subgraph of neighbors centered at node n.
Parameters
----------
G : graph
A NetworkX Graph or DiGraph
n : node
A single node
radius : integer
Include all neighbors of distance<=radius from n
center : bool, optional
If False, do not include center node in graph
"""
sp=nx.single_source_shortest_path_length(G,n,cutoff=radius)
H=G.subgraph(sp.keys())
if not center:
H.remove_node(n)
return H
<commit_msg>Add note about directed graphs
--HG--
extra : convert_revision : svn%3A3ed01bd8-26fb-0310-9e4c-ca1a4053419f/networkx/trunk%401776<commit_after>"""
Ego graph.
"""
# Copyright (C) 2010 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
__author__ = """\n""".join(['Drew Conway <drew.conway@nyu.edu>',
'Aric Hagberg <hagberg@lanl.gov>'])
__all__ = ['ego_graph']
import networkx as nx
def ego_graph(G,n,radius=1,center=True):
"""Returns induced subgraph of neighbors centered at node n.
Parameters
----------
G : graph
A NetworkX Graph or DiGraph
n : node
A single node
radius : integer
Include all neighbors of distance<=radius from n
center : bool, optional
If False, do not include center node in graph
Notes
-----
For directed graphs D this produces the "out" neighborhood
or successors. If you want the neighborhood of predecessors
first reverse the graph with D.reverse(). If you want both
first convert the graph to an undirected graph using G=nx.Graph(D).
"""
sp=nx.single_source_shortest_path_length(G,n,cutoff=radius)
H=G.subgraph(sp.keys())
if not center:
H.remove_node(n)
return H
|
21f7d85d5f22834e04a25ea23eabfd07b279bfe6 | openedx/features/badging/constants.py | openedx/features/badging/constants.py | CONVERSATIONALIST = ('conversationalist', 'Conversationalist')
TEAM_PLAYER = ('team', 'Team player')
BADGES_KEY = 'badges'
BADGE_NOT_FOUND_ERROR = 'There exists no badge with id {badge_id}'
BADGE_TYPE_ERROR = 'Cannot assign badge {badge_id} of unknown type {badge_type}'
FILTER_BADGES_ERROR = 'Unable to get badges for team {team_id}'
INVALID_COMMUNITY_ERROR = 'Cannot assign badge {badge_id} for invalid community {community_id}'
INVALID_TEAM_ERROR = 'Cannot assign badge {badge_id} for invalid team {community_id}'
TEAM_ID_KEY = 'team_id'
TEAM_ROOM_ID_KEY = 'team__room_id'
UNKNOWN_COURSE_ERROR = 'Cannot assign badge {badge_id} for team {community_id} in unknown course'
| CONVERSATIONALIST = ('conversationalist', 'Conversationalist')
TEAM_PLAYER = ('team', 'Team player')
BADGES_KEY = 'badges'
BADGE_NOT_FOUND_ERROR = 'There exists no badge with id {badge_id}'
BADGE_TYPE_ERROR = 'Cannot assign badge {badge_id} of unknown type {badge_type}'
BADGE_ROOT_URL = '{root_url}/courses/{course_id}'
FILTER_BADGES_ERROR = 'Unable to get badges for team {team_id}'
INVALID_COMMUNITY_ERROR = 'Cannot assign badge {badge_id} for invalid community {community_id}'
INVALID_TEAM_ERROR = 'Cannot assign badge {badge_id} for invalid team {community_id}'
TEAM_ID_KEY = 'team_id'
TEAM_ROOM_ID_KEY = 'team__room_id'
UNKNOWN_COURSE_ERROR = 'Cannot assign badge {badge_id} for team {community_id} in unknown course'
| Add constant for badge url | Add constant for badge url
| Python | agpl-3.0 | philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform,philanthropy-u/edx-platform | CONVERSATIONALIST = ('conversationalist', 'Conversationalist')
TEAM_PLAYER = ('team', 'Team player')
BADGES_KEY = 'badges'
BADGE_NOT_FOUND_ERROR = 'There exists no badge with id {badge_id}'
BADGE_TYPE_ERROR = 'Cannot assign badge {badge_id} of unknown type {badge_type}'
FILTER_BADGES_ERROR = 'Unable to get badges for team {team_id}'
INVALID_COMMUNITY_ERROR = 'Cannot assign badge {badge_id} for invalid community {community_id}'
INVALID_TEAM_ERROR = 'Cannot assign badge {badge_id} for invalid team {community_id}'
TEAM_ID_KEY = 'team_id'
TEAM_ROOM_ID_KEY = 'team__room_id'
UNKNOWN_COURSE_ERROR = 'Cannot assign badge {badge_id} for team {community_id} in unknown course'
Add constant for badge url | CONVERSATIONALIST = ('conversationalist', 'Conversationalist')
TEAM_PLAYER = ('team', 'Team player')
BADGES_KEY = 'badges'
BADGE_NOT_FOUND_ERROR = 'There exists no badge with id {badge_id}'
BADGE_TYPE_ERROR = 'Cannot assign badge {badge_id} of unknown type {badge_type}'
BADGE_ROOT_URL = '{root_url}/courses/{course_id}'
FILTER_BADGES_ERROR = 'Unable to get badges for team {team_id}'
INVALID_COMMUNITY_ERROR = 'Cannot assign badge {badge_id} for invalid community {community_id}'
INVALID_TEAM_ERROR = 'Cannot assign badge {badge_id} for invalid team {community_id}'
TEAM_ID_KEY = 'team_id'
TEAM_ROOM_ID_KEY = 'team__room_id'
UNKNOWN_COURSE_ERROR = 'Cannot assign badge {badge_id} for team {community_id} in unknown course'
| <commit_before>CONVERSATIONALIST = ('conversationalist', 'Conversationalist')
TEAM_PLAYER = ('team', 'Team player')
BADGES_KEY = 'badges'
BADGE_NOT_FOUND_ERROR = 'There exists no badge with id {badge_id}'
BADGE_TYPE_ERROR = 'Cannot assign badge {badge_id} of unknown type {badge_type}'
FILTER_BADGES_ERROR = 'Unable to get badges for team {team_id}'
INVALID_COMMUNITY_ERROR = 'Cannot assign badge {badge_id} for invalid community {community_id}'
INVALID_TEAM_ERROR = 'Cannot assign badge {badge_id} for invalid team {community_id}'
TEAM_ID_KEY = 'team_id'
TEAM_ROOM_ID_KEY = 'team__room_id'
UNKNOWN_COURSE_ERROR = 'Cannot assign badge {badge_id} for team {community_id} in unknown course'
<commit_msg>Add constant for badge url<commit_after> | CONVERSATIONALIST = ('conversationalist', 'Conversationalist')
TEAM_PLAYER = ('team', 'Team player')
BADGES_KEY = 'badges'
BADGE_NOT_FOUND_ERROR = 'There exists no badge with id {badge_id}'
BADGE_TYPE_ERROR = 'Cannot assign badge {badge_id} of unknown type {badge_type}'
BADGE_ROOT_URL = '{root_url}/courses/{course_id}'
FILTER_BADGES_ERROR = 'Unable to get badges for team {team_id}'
INVALID_COMMUNITY_ERROR = 'Cannot assign badge {badge_id} for invalid community {community_id}'
INVALID_TEAM_ERROR = 'Cannot assign badge {badge_id} for invalid team {community_id}'
TEAM_ID_KEY = 'team_id'
TEAM_ROOM_ID_KEY = 'team__room_id'
UNKNOWN_COURSE_ERROR = 'Cannot assign badge {badge_id} for team {community_id} in unknown course'
| CONVERSATIONALIST = ('conversationalist', 'Conversationalist')
TEAM_PLAYER = ('team', 'Team player')
BADGES_KEY = 'badges'
BADGE_NOT_FOUND_ERROR = 'There exists no badge with id {badge_id}'
BADGE_TYPE_ERROR = 'Cannot assign badge {badge_id} of unknown type {badge_type}'
FILTER_BADGES_ERROR = 'Unable to get badges for team {team_id}'
INVALID_COMMUNITY_ERROR = 'Cannot assign badge {badge_id} for invalid community {community_id}'
INVALID_TEAM_ERROR = 'Cannot assign badge {badge_id} for invalid team {community_id}'
TEAM_ID_KEY = 'team_id'
TEAM_ROOM_ID_KEY = 'team__room_id'
UNKNOWN_COURSE_ERROR = 'Cannot assign badge {badge_id} for team {community_id} in unknown course'
Add constant for badge urlCONVERSATIONALIST = ('conversationalist', 'Conversationalist')
TEAM_PLAYER = ('team', 'Team player')
BADGES_KEY = 'badges'
BADGE_NOT_FOUND_ERROR = 'There exists no badge with id {badge_id}'
BADGE_TYPE_ERROR = 'Cannot assign badge {badge_id} of unknown type {badge_type}'
BADGE_ROOT_URL = '{root_url}/courses/{course_id}'
FILTER_BADGES_ERROR = 'Unable to get badges for team {team_id}'
INVALID_COMMUNITY_ERROR = 'Cannot assign badge {badge_id} for invalid community {community_id}'
INVALID_TEAM_ERROR = 'Cannot assign badge {badge_id} for invalid team {community_id}'
TEAM_ID_KEY = 'team_id'
TEAM_ROOM_ID_KEY = 'team__room_id'
UNKNOWN_COURSE_ERROR = 'Cannot assign badge {badge_id} for team {community_id} in unknown course'
| <commit_before>CONVERSATIONALIST = ('conversationalist', 'Conversationalist')
TEAM_PLAYER = ('team', 'Team player')
BADGES_KEY = 'badges'
BADGE_NOT_FOUND_ERROR = 'There exists no badge with id {badge_id}'
BADGE_TYPE_ERROR = 'Cannot assign badge {badge_id} of unknown type {badge_type}'
FILTER_BADGES_ERROR = 'Unable to get badges for team {team_id}'
INVALID_COMMUNITY_ERROR = 'Cannot assign badge {badge_id} for invalid community {community_id}'
INVALID_TEAM_ERROR = 'Cannot assign badge {badge_id} for invalid team {community_id}'
TEAM_ID_KEY = 'team_id'
TEAM_ROOM_ID_KEY = 'team__room_id'
UNKNOWN_COURSE_ERROR = 'Cannot assign badge {badge_id} for team {community_id} in unknown course'
<commit_msg>Add constant for badge url<commit_after>CONVERSATIONALIST = ('conversationalist', 'Conversationalist')
TEAM_PLAYER = ('team', 'Team player')
BADGES_KEY = 'badges'
BADGE_NOT_FOUND_ERROR = 'There exists no badge with id {badge_id}'
BADGE_TYPE_ERROR = 'Cannot assign badge {badge_id} of unknown type {badge_type}'
BADGE_ROOT_URL = '{root_url}/courses/{course_id}'
FILTER_BADGES_ERROR = 'Unable to get badges for team {team_id}'
INVALID_COMMUNITY_ERROR = 'Cannot assign badge {badge_id} for invalid community {community_id}'
INVALID_TEAM_ERROR = 'Cannot assign badge {badge_id} for invalid team {community_id}'
TEAM_ID_KEY = 'team_id'
TEAM_ROOM_ID_KEY = 'team__room_id'
UNKNOWN_COURSE_ERROR = 'Cannot assign badge {badge_id} for team {community_id} in unknown course'
|
3b5880c375ee92ce931c29a978ff64cd8849d028 | src/simple-http-server.py | src/simple-http-server.py | #!/usr/bin/env python3
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
import os, http.server
def main(args):
os.chdir(args.directory)
addr = ('' ,args.port)
httpd = http.server.HTTPServer(addr, http.server.SimpleHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('-p', '--port', default=8000,
help='Port on which to listen')
parser.add_argument('-d', '--directory', metavar='DIR', default=os.getcwd(),
help='Directory to serve')
args = parser.parse_args()
try: exit(main(args))
except KeyboardInterrupt as e: pass
| #!/usr/bin/env python3
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
import os, http.server
def main(args):
os.chdir(args.directory)
addr = ('' ,args.port)
print('Serving', args.directory, 'on', addr)
httpd = http.server.HTTPServer(addr, http.server.SimpleHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('-p', '--port', default=8000,
help='Port on which to listen')
parser.add_argument('-d', '--directory', metavar='DIR', default=os.getcwd(),
help='Directory to serve')
args = parser.parse_args()
try: exit(main(args))
except KeyboardInterrupt as e: pass
| Add message about where we're listening for connections | Add message about where we're listening for connections
| Python | unlicense | pastly/python-snippits | #!/usr/bin/env python3
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
import os, http.server
def main(args):
os.chdir(args.directory)
addr = ('' ,args.port)
httpd = http.server.HTTPServer(addr, http.server.SimpleHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('-p', '--port', default=8000,
help='Port on which to listen')
parser.add_argument('-d', '--directory', metavar='DIR', default=os.getcwd(),
help='Directory to serve')
args = parser.parse_args()
try: exit(main(args))
except KeyboardInterrupt as e: pass
Add message about where we're listening for connections | #!/usr/bin/env python3
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
import os, http.server
def main(args):
os.chdir(args.directory)
addr = ('' ,args.port)
print('Serving', args.directory, 'on', addr)
httpd = http.server.HTTPServer(addr, http.server.SimpleHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('-p', '--port', default=8000,
help='Port on which to listen')
parser.add_argument('-d', '--directory', metavar='DIR', default=os.getcwd(),
help='Directory to serve')
args = parser.parse_args()
try: exit(main(args))
except KeyboardInterrupt as e: pass
| <commit_before>#!/usr/bin/env python3
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
import os, http.server
def main(args):
os.chdir(args.directory)
addr = ('' ,args.port)
httpd = http.server.HTTPServer(addr, http.server.SimpleHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('-p', '--port', default=8000,
help='Port on which to listen')
parser.add_argument('-d', '--directory', metavar='DIR', default=os.getcwd(),
help='Directory to serve')
args = parser.parse_args()
try: exit(main(args))
except KeyboardInterrupt as e: pass
<commit_msg>Add message about where we're listening for connections<commit_after> | #!/usr/bin/env python3
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
import os, http.server
def main(args):
os.chdir(args.directory)
addr = ('' ,args.port)
print('Serving', args.directory, 'on', addr)
httpd = http.server.HTTPServer(addr, http.server.SimpleHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('-p', '--port', default=8000,
help='Port on which to listen')
parser.add_argument('-d', '--directory', metavar='DIR', default=os.getcwd(),
help='Directory to serve')
args = parser.parse_args()
try: exit(main(args))
except KeyboardInterrupt as e: pass
| #!/usr/bin/env python3
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
import os, http.server
def main(args):
os.chdir(args.directory)
addr = ('' ,args.port)
httpd = http.server.HTTPServer(addr, http.server.SimpleHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('-p', '--port', default=8000,
help='Port on which to listen')
parser.add_argument('-d', '--directory', metavar='DIR', default=os.getcwd(),
help='Directory to serve')
args = parser.parse_args()
try: exit(main(args))
except KeyboardInterrupt as e: pass
Add message about where we're listening for connections#!/usr/bin/env python3
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
import os, http.server
def main(args):
os.chdir(args.directory)
addr = ('' ,args.port)
print('Serving', args.directory, 'on', addr)
httpd = http.server.HTTPServer(addr, http.server.SimpleHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('-p', '--port', default=8000,
help='Port on which to listen')
parser.add_argument('-d', '--directory', metavar='DIR', default=os.getcwd(),
help='Directory to serve')
args = parser.parse_args()
try: exit(main(args))
except KeyboardInterrupt as e: pass
| <commit_before>#!/usr/bin/env python3
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
import os, http.server
def main(args):
os.chdir(args.directory)
addr = ('' ,args.port)
httpd = http.server.HTTPServer(addr, http.server.SimpleHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('-p', '--port', default=8000,
help='Port on which to listen')
parser.add_argument('-d', '--directory', metavar='DIR', default=os.getcwd(),
help='Directory to serve')
args = parser.parse_args()
try: exit(main(args))
except KeyboardInterrupt as e: pass
<commit_msg>Add message about where we're listening for connections<commit_after>#!/usr/bin/env python3
from argparse import ArgumentParser, ArgumentDefaultsHelpFormatter
import os, http.server
def main(args):
os.chdir(args.directory)
addr = ('' ,args.port)
print('Serving', args.directory, 'on', addr)
httpd = http.server.HTTPServer(addr, http.server.SimpleHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
parser = ArgumentParser(formatter_class=ArgumentDefaultsHelpFormatter)
parser.add_argument('-p', '--port', default=8000,
help='Port on which to listen')
parser.add_argument('-d', '--directory', metavar='DIR', default=os.getcwd(),
help='Directory to serve')
args = parser.parse_args()
try: exit(main(args))
except KeyboardInterrupt as e: pass
|
22382935be99e027da46303107926a15cd8f3017 | tests/twisted/vcard/test-set-alias.py | tests/twisted/vcard/test-set-alias.py |
"""
Test alias setting support.
"""
from servicetest import EventPattern
from gabbletest import exec_test, acknowledge_iq
import constants as cs
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
|
"""
Test alias setting support.
"""
from servicetest import EventPattern, assertEquals
from gabbletest import exec_test, acknowledge_iq
import constants as cs
import ns
def validate_pep_update(pep_update, expected_nickname):
publish = pep_update.query.elements(uri=ns.PUBSUB, name='publish').next()
assertEquals(ns.NICK, publish['node'])
item = publish.elements(uri=ns.PUBSUB, name='item').next()
nick = item.elements(uri=ns.NICK, name='nick').next()
assertEquals(expected_nickname, nick.children[0])
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
pep_update = q.expect('stream-iq', iq_type='set', query_ns=ns.PUBSUB, query_name='pubsub')
validate_pep_update(pep_update, 'lala')
acknowledge_iq(stream, pep_update.stanza)
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
| Test setting our own alias via PEP | Test setting our own alias via PEP
Astonishingly, this was untested...
| Python | lgpl-2.1 | Ziemin/telepathy-gabble,jku/telepathy-gabble,mlundblad/telepathy-gabble,Ziemin/telepathy-gabble,mlundblad/telepathy-gabble,jku/telepathy-gabble,Ziemin/telepathy-gabble,mlundblad/telepathy-gabble,jku/telepathy-gabble,Ziemin/telepathy-gabble |
"""
Test alias setting support.
"""
from servicetest import EventPattern
from gabbletest import exec_test, acknowledge_iq
import constants as cs
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
Test setting our own alias via PEP
Astonishingly, this was untested... |
"""
Test alias setting support.
"""
from servicetest import EventPattern, assertEquals
from gabbletest import exec_test, acknowledge_iq
import constants as cs
import ns
def validate_pep_update(pep_update, expected_nickname):
publish = pep_update.query.elements(uri=ns.PUBSUB, name='publish').next()
assertEquals(ns.NICK, publish['node'])
item = publish.elements(uri=ns.PUBSUB, name='item').next()
nick = item.elements(uri=ns.NICK, name='nick').next()
assertEquals(expected_nickname, nick.children[0])
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
pep_update = q.expect('stream-iq', iq_type='set', query_ns=ns.PUBSUB, query_name='pubsub')
validate_pep_update(pep_update, 'lala')
acknowledge_iq(stream, pep_update.stanza)
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
| <commit_before>
"""
Test alias setting support.
"""
from servicetest import EventPattern
from gabbletest import exec_test, acknowledge_iq
import constants as cs
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
<commit_msg>Test setting our own alias via PEP
Astonishingly, this was untested...<commit_after> |
"""
Test alias setting support.
"""
from servicetest import EventPattern, assertEquals
from gabbletest import exec_test, acknowledge_iq
import constants as cs
import ns
def validate_pep_update(pep_update, expected_nickname):
publish = pep_update.query.elements(uri=ns.PUBSUB, name='publish').next()
assertEquals(ns.NICK, publish['node'])
item = publish.elements(uri=ns.PUBSUB, name='item').next()
nick = item.elements(uri=ns.NICK, name='nick').next()
assertEquals(expected_nickname, nick.children[0])
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
pep_update = q.expect('stream-iq', iq_type='set', query_ns=ns.PUBSUB, query_name='pubsub')
validate_pep_update(pep_update, 'lala')
acknowledge_iq(stream, pep_update.stanza)
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
|
"""
Test alias setting support.
"""
from servicetest import EventPattern
from gabbletest import exec_test, acknowledge_iq
import constants as cs
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
Test setting our own alias via PEP
Astonishingly, this was untested...
"""
Test alias setting support.
"""
from servicetest import EventPattern, assertEquals
from gabbletest import exec_test, acknowledge_iq
import constants as cs
import ns
def validate_pep_update(pep_update, expected_nickname):
publish = pep_update.query.elements(uri=ns.PUBSUB, name='publish').next()
assertEquals(ns.NICK, publish['node'])
item = publish.elements(uri=ns.PUBSUB, name='item').next()
nick = item.elements(uri=ns.NICK, name='nick').next()
assertEquals(expected_nickname, nick.children[0])
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
pep_update = q.expect('stream-iq', iq_type='set', query_ns=ns.PUBSUB, query_name='pubsub')
validate_pep_update(pep_update, 'lala')
acknowledge_iq(stream, pep_update.stanza)
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
| <commit_before>
"""
Test alias setting support.
"""
from servicetest import EventPattern
from gabbletest import exec_test, acknowledge_iq
import constants as cs
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
<commit_msg>Test setting our own alias via PEP
Astonishingly, this was untested...<commit_after>
"""
Test alias setting support.
"""
from servicetest import EventPattern, assertEquals
from gabbletest import exec_test, acknowledge_iq
import constants as cs
import ns
def validate_pep_update(pep_update, expected_nickname):
publish = pep_update.query.elements(uri=ns.PUBSUB, name='publish').next()
assertEquals(ns.NICK, publish['node'])
item = publish.elements(uri=ns.PUBSUB, name='item').next()
nick = item.elements(uri=ns.NICK, name='nick').next()
assertEquals(expected_nickname, nick.children[0])
def test(q, bus, conn, stream):
iq_event = q.expect('stream-iq', to=None, query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
conn.Aliasing.SetAliases({1: 'lala'})
pep_update = q.expect('stream-iq', iq_type='set', query_ns=ns.PUBSUB, query_name='pubsub')
validate_pep_update(pep_update, 'lala')
acknowledge_iq(stream, pep_update.stanza)
iq_event = q.expect('stream-iq', iq_type='set', query_ns='vcard-temp',
query_name='vCard')
acknowledge_iq(stream, iq_event.stanza)
event = q.expect('dbus-signal', signal='AliasesChanged',
args=[[(1, u'lala')]])
if __name__ == '__main__':
exec_test(test)
|
fe9f48415017bcd873140da82a5f2e463d13b307 | tests/scoring_engine/models/test_setting.py | tests/scoring_engine/models/test_setting.py | from scoring_engine.models.setting import Setting
from tests.scoring_engine.unit_test import UnitTest
class TestSetting(UnitTest):
def test_init_setting(self):
setting = Setting(name='test_setting', value='test value example')
assert setting.id is None
assert setting.name == 'test_setting'
assert setting.value == 'test value example'
self.session.add(setting)
self.session.commit()
assert setting.id is not None
def test_get_setting(self):
setting_old = Setting(name='test_setting', value='test value example')
self.session.add(setting_old)
setting_new = Setting(name='test_setting', value='updated example')
self.session.add(setting_new)
self.session.commit()
assert Setting.get_setting('test_setting').value == 'updated example'
def test_boolean_value(self):
setting = Setting(name='test_setting', value=True)
assert setting.name == 'test_setting'
assert setting.value is True
self.session.add(setting)
self.session.commit()
assert setting.value is True
| from scoring_engine.models.setting import Setting
from tests.scoring_engine.unit_test import UnitTest
class TestSetting(UnitTest):
def test_init_setting(self):
setting = Setting(name='test_setting', value='test value example')
assert setting.id is None
assert setting.name == 'test_setting'
assert setting.value == 'test value example'
assert setting._value_type == 'String'
self.session.add(setting)
self.session.commit()
assert setting.id is not None
def test_get_setting(self):
setting_old = Setting(name='test_setting', value='test value example')
self.session.add(setting_old)
setting_new = Setting(name='test_setting', value='updated example')
self.session.add(setting_new)
self.session.commit()
assert Setting.get_setting('test_setting').value == 'updated example'
def test_boolean_value(self):
setting = Setting(name='test_setting', value=True)
assert setting.name == 'test_setting'
assert setting.value is True
self.session.add(setting)
self.session.commit()
assert setting.value is True
def test_boolean_value_advanced(self):
setting = Setting(name='test_setting', value=True)
assert setting.name == 'test_setting'
assert setting.value is True
self.session.add(setting)
self.session.commit()
setting.value = 'somevalue'
assert setting.value == 'somevalue'
self.session.add(setting)
self.session.commit()
| Add test for setting as boolean value | Add test for setting as boolean value
| Python | mit | pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine,pwnbus/scoring_engine | from scoring_engine.models.setting import Setting
from tests.scoring_engine.unit_test import UnitTest
class TestSetting(UnitTest):
def test_init_setting(self):
setting = Setting(name='test_setting', value='test value example')
assert setting.id is None
assert setting.name == 'test_setting'
assert setting.value == 'test value example'
self.session.add(setting)
self.session.commit()
assert setting.id is not None
def test_get_setting(self):
setting_old = Setting(name='test_setting', value='test value example')
self.session.add(setting_old)
setting_new = Setting(name='test_setting', value='updated example')
self.session.add(setting_new)
self.session.commit()
assert Setting.get_setting('test_setting').value == 'updated example'
def test_boolean_value(self):
setting = Setting(name='test_setting', value=True)
assert setting.name == 'test_setting'
assert setting.value is True
self.session.add(setting)
self.session.commit()
assert setting.value is True
Add test for setting as boolean value | from scoring_engine.models.setting import Setting
from tests.scoring_engine.unit_test import UnitTest
class TestSetting(UnitTest):
def test_init_setting(self):
setting = Setting(name='test_setting', value='test value example')
assert setting.id is None
assert setting.name == 'test_setting'
assert setting.value == 'test value example'
assert setting._value_type == 'String'
self.session.add(setting)
self.session.commit()
assert setting.id is not None
def test_get_setting(self):
setting_old = Setting(name='test_setting', value='test value example')
self.session.add(setting_old)
setting_new = Setting(name='test_setting', value='updated example')
self.session.add(setting_new)
self.session.commit()
assert Setting.get_setting('test_setting').value == 'updated example'
def test_boolean_value(self):
setting = Setting(name='test_setting', value=True)
assert setting.name == 'test_setting'
assert setting.value is True
self.session.add(setting)
self.session.commit()
assert setting.value is True
def test_boolean_value_advanced(self):
setting = Setting(name='test_setting', value=True)
assert setting.name == 'test_setting'
assert setting.value is True
self.session.add(setting)
self.session.commit()
setting.value = 'somevalue'
assert setting.value == 'somevalue'
self.session.add(setting)
self.session.commit()
| <commit_before>from scoring_engine.models.setting import Setting
from tests.scoring_engine.unit_test import UnitTest
class TestSetting(UnitTest):
def test_init_setting(self):
setting = Setting(name='test_setting', value='test value example')
assert setting.id is None
assert setting.name == 'test_setting'
assert setting.value == 'test value example'
self.session.add(setting)
self.session.commit()
assert setting.id is not None
def test_get_setting(self):
setting_old = Setting(name='test_setting', value='test value example')
self.session.add(setting_old)
setting_new = Setting(name='test_setting', value='updated example')
self.session.add(setting_new)
self.session.commit()
assert Setting.get_setting('test_setting').value == 'updated example'
def test_boolean_value(self):
setting = Setting(name='test_setting', value=True)
assert setting.name == 'test_setting'
assert setting.value is True
self.session.add(setting)
self.session.commit()
assert setting.value is True
<commit_msg>Add test for setting as boolean value<commit_after> | from scoring_engine.models.setting import Setting
from tests.scoring_engine.unit_test import UnitTest
class TestSetting(UnitTest):
def test_init_setting(self):
setting = Setting(name='test_setting', value='test value example')
assert setting.id is None
assert setting.name == 'test_setting'
assert setting.value == 'test value example'
assert setting._value_type == 'String'
self.session.add(setting)
self.session.commit()
assert setting.id is not None
def test_get_setting(self):
setting_old = Setting(name='test_setting', value='test value example')
self.session.add(setting_old)
setting_new = Setting(name='test_setting', value='updated example')
self.session.add(setting_new)
self.session.commit()
assert Setting.get_setting('test_setting').value == 'updated example'
def test_boolean_value(self):
setting = Setting(name='test_setting', value=True)
assert setting.name == 'test_setting'
assert setting.value is True
self.session.add(setting)
self.session.commit()
assert setting.value is True
def test_boolean_value_advanced(self):
setting = Setting(name='test_setting', value=True)
assert setting.name == 'test_setting'
assert setting.value is True
self.session.add(setting)
self.session.commit()
setting.value = 'somevalue'
assert setting.value == 'somevalue'
self.session.add(setting)
self.session.commit()
| from scoring_engine.models.setting import Setting
from tests.scoring_engine.unit_test import UnitTest
class TestSetting(UnitTest):
def test_init_setting(self):
setting = Setting(name='test_setting', value='test value example')
assert setting.id is None
assert setting.name == 'test_setting'
assert setting.value == 'test value example'
self.session.add(setting)
self.session.commit()
assert setting.id is not None
def test_get_setting(self):
setting_old = Setting(name='test_setting', value='test value example')
self.session.add(setting_old)
setting_new = Setting(name='test_setting', value='updated example')
self.session.add(setting_new)
self.session.commit()
assert Setting.get_setting('test_setting').value == 'updated example'
def test_boolean_value(self):
setting = Setting(name='test_setting', value=True)
assert setting.name == 'test_setting'
assert setting.value is True
self.session.add(setting)
self.session.commit()
assert setting.value is True
Add test for setting as boolean valuefrom scoring_engine.models.setting import Setting
from tests.scoring_engine.unit_test import UnitTest
class TestSetting(UnitTest):
def test_init_setting(self):
setting = Setting(name='test_setting', value='test value example')
assert setting.id is None
assert setting.name == 'test_setting'
assert setting.value == 'test value example'
assert setting._value_type == 'String'
self.session.add(setting)
self.session.commit()
assert setting.id is not None
def test_get_setting(self):
setting_old = Setting(name='test_setting', value='test value example')
self.session.add(setting_old)
setting_new = Setting(name='test_setting', value='updated example')
self.session.add(setting_new)
self.session.commit()
assert Setting.get_setting('test_setting').value == 'updated example'
def test_boolean_value(self):
setting = Setting(name='test_setting', value=True)
assert setting.name == 'test_setting'
assert setting.value is True
self.session.add(setting)
self.session.commit()
assert setting.value is True
def test_boolean_value_advanced(self):
setting = Setting(name='test_setting', value=True)
assert setting.name == 'test_setting'
assert setting.value is True
self.session.add(setting)
self.session.commit()
setting.value = 'somevalue'
assert setting.value == 'somevalue'
self.session.add(setting)
self.session.commit()
| <commit_before>from scoring_engine.models.setting import Setting
from tests.scoring_engine.unit_test import UnitTest
class TestSetting(UnitTest):
def test_init_setting(self):
setting = Setting(name='test_setting', value='test value example')
assert setting.id is None
assert setting.name == 'test_setting'
assert setting.value == 'test value example'
self.session.add(setting)
self.session.commit()
assert setting.id is not None
def test_get_setting(self):
setting_old = Setting(name='test_setting', value='test value example')
self.session.add(setting_old)
setting_new = Setting(name='test_setting', value='updated example')
self.session.add(setting_new)
self.session.commit()
assert Setting.get_setting('test_setting').value == 'updated example'
def test_boolean_value(self):
setting = Setting(name='test_setting', value=True)
assert setting.name == 'test_setting'
assert setting.value is True
self.session.add(setting)
self.session.commit()
assert setting.value is True
<commit_msg>Add test for setting as boolean value<commit_after>from scoring_engine.models.setting import Setting
from tests.scoring_engine.unit_test import UnitTest
class TestSetting(UnitTest):
def test_init_setting(self):
setting = Setting(name='test_setting', value='test value example')
assert setting.id is None
assert setting.name == 'test_setting'
assert setting.value == 'test value example'
assert setting._value_type == 'String'
self.session.add(setting)
self.session.commit()
assert setting.id is not None
def test_get_setting(self):
setting_old = Setting(name='test_setting', value='test value example')
self.session.add(setting_old)
setting_new = Setting(name='test_setting', value='updated example')
self.session.add(setting_new)
self.session.commit()
assert Setting.get_setting('test_setting').value == 'updated example'
def test_boolean_value(self):
setting = Setting(name='test_setting', value=True)
assert setting.name == 'test_setting'
assert setting.value is True
self.session.add(setting)
self.session.commit()
assert setting.value is True
def test_boolean_value_advanced(self):
setting = Setting(name='test_setting', value=True)
assert setting.name == 'test_setting'
assert setting.value is True
self.session.add(setting)
self.session.commit()
setting.value = 'somevalue'
assert setting.value == 'somevalue'
self.session.add(setting)
self.session.commit()
|
bd5336d52a1cd2e086dbf1665b90823505d31840 | tests/testapp/tests/test_timer_collector.py | tests/testapp/tests/test_timer_collector.py | from datetime import timedelta
from django_performance_testing.timing import TimeCollector
from freezegun import freeze_time
import pytest
from testapp.test_helpers import capture_result_collected
@pytest.mark.parametrize('seconds', [10, 5, 0.04])
def test_captures_and_measures_elapsed_time(seconds):
with capture_result_collected() as captured:
with freeze_time('2016-09-22 15:57:01') as frozen_time:
with TimeCollector():
frozen_time.tick(timedelta(seconds=seconds))
assert len(captured.calls) == 1
assert pytest.approx(seconds) == captured.calls[0]['results']
| from datetime import timedelta
from django_performance_testing.timing import TimeCollector
from freezegun import freeze_time
import pytest
from testapp.test_helpers import capture_result_collected
@pytest.mark.parametrize('seconds', [10, 5, 0.04])
def test_captures_and_measures_elapsed_time(seconds):
with capture_result_collected() as captured:
with freeze_time('2016-09-22 15:57:01') as frozen_time:
with TimeCollector():
frozen_time.tick(timedelta(seconds=seconds))
assert len(captured.calls) == 1
assert pytest.approx(seconds) == captured.calls[0]['results'][0].value
| Fix for failing test test_captures_and_measures_elapsed_time: TypeError: unsupported operand type(s) for -: 'float' and 'list' | Fix for failing test test_captures_and_measures_elapsed_time: TypeError: unsupported operand type(s) for -: 'float' and 'list'
| Python | bsd-3-clause | PaesslerAG/django-performance-testing | from datetime import timedelta
from django_performance_testing.timing import TimeCollector
from freezegun import freeze_time
import pytest
from testapp.test_helpers import capture_result_collected
@pytest.mark.parametrize('seconds', [10, 5, 0.04])
def test_captures_and_measures_elapsed_time(seconds):
with capture_result_collected() as captured:
with freeze_time('2016-09-22 15:57:01') as frozen_time:
with TimeCollector():
frozen_time.tick(timedelta(seconds=seconds))
assert len(captured.calls) == 1
assert pytest.approx(seconds) == captured.calls[0]['results']
Fix for failing test test_captures_and_measures_elapsed_time: TypeError: unsupported operand type(s) for -: 'float' and 'list' | from datetime import timedelta
from django_performance_testing.timing import TimeCollector
from freezegun import freeze_time
import pytest
from testapp.test_helpers import capture_result_collected
@pytest.mark.parametrize('seconds', [10, 5, 0.04])
def test_captures_and_measures_elapsed_time(seconds):
with capture_result_collected() as captured:
with freeze_time('2016-09-22 15:57:01') as frozen_time:
with TimeCollector():
frozen_time.tick(timedelta(seconds=seconds))
assert len(captured.calls) == 1
assert pytest.approx(seconds) == captured.calls[0]['results'][0].value
| <commit_before>from datetime import timedelta
from django_performance_testing.timing import TimeCollector
from freezegun import freeze_time
import pytest
from testapp.test_helpers import capture_result_collected
@pytest.mark.parametrize('seconds', [10, 5, 0.04])
def test_captures_and_measures_elapsed_time(seconds):
with capture_result_collected() as captured:
with freeze_time('2016-09-22 15:57:01') as frozen_time:
with TimeCollector():
frozen_time.tick(timedelta(seconds=seconds))
assert len(captured.calls) == 1
assert pytest.approx(seconds) == captured.calls[0]['results']
<commit_msg>Fix for failing test test_captures_and_measures_elapsed_time: TypeError: unsupported operand type(s) for -: 'float' and 'list'<commit_after> | from datetime import timedelta
from django_performance_testing.timing import TimeCollector
from freezegun import freeze_time
import pytest
from testapp.test_helpers import capture_result_collected
@pytest.mark.parametrize('seconds', [10, 5, 0.04])
def test_captures_and_measures_elapsed_time(seconds):
with capture_result_collected() as captured:
with freeze_time('2016-09-22 15:57:01') as frozen_time:
with TimeCollector():
frozen_time.tick(timedelta(seconds=seconds))
assert len(captured.calls) == 1
assert pytest.approx(seconds) == captured.calls[0]['results'][0].value
| from datetime import timedelta
from django_performance_testing.timing import TimeCollector
from freezegun import freeze_time
import pytest
from testapp.test_helpers import capture_result_collected
@pytest.mark.parametrize('seconds', [10, 5, 0.04])
def test_captures_and_measures_elapsed_time(seconds):
with capture_result_collected() as captured:
with freeze_time('2016-09-22 15:57:01') as frozen_time:
with TimeCollector():
frozen_time.tick(timedelta(seconds=seconds))
assert len(captured.calls) == 1
assert pytest.approx(seconds) == captured.calls[0]['results']
Fix for failing test test_captures_and_measures_elapsed_time: TypeError: unsupported operand type(s) for -: 'float' and 'list'from datetime import timedelta
from django_performance_testing.timing import TimeCollector
from freezegun import freeze_time
import pytest
from testapp.test_helpers import capture_result_collected
@pytest.mark.parametrize('seconds', [10, 5, 0.04])
def test_captures_and_measures_elapsed_time(seconds):
with capture_result_collected() as captured:
with freeze_time('2016-09-22 15:57:01') as frozen_time:
with TimeCollector():
frozen_time.tick(timedelta(seconds=seconds))
assert len(captured.calls) == 1
assert pytest.approx(seconds) == captured.calls[0]['results'][0].value
| <commit_before>from datetime import timedelta
from django_performance_testing.timing import TimeCollector
from freezegun import freeze_time
import pytest
from testapp.test_helpers import capture_result_collected
@pytest.mark.parametrize('seconds', [10, 5, 0.04])
def test_captures_and_measures_elapsed_time(seconds):
with capture_result_collected() as captured:
with freeze_time('2016-09-22 15:57:01') as frozen_time:
with TimeCollector():
frozen_time.tick(timedelta(seconds=seconds))
assert len(captured.calls) == 1
assert pytest.approx(seconds) == captured.calls[0]['results']
<commit_msg>Fix for failing test test_captures_and_measures_elapsed_time: TypeError: unsupported operand type(s) for -: 'float' and 'list'<commit_after>from datetime import timedelta
from django_performance_testing.timing import TimeCollector
from freezegun import freeze_time
import pytest
from testapp.test_helpers import capture_result_collected
@pytest.mark.parametrize('seconds', [10, 5, 0.04])
def test_captures_and_measures_elapsed_time(seconds):
with capture_result_collected() as captured:
with freeze_time('2016-09-22 15:57:01') as frozen_time:
with TimeCollector():
frozen_time.tick(timedelta(seconds=seconds))
assert len(captured.calls) == 1
assert pytest.approx(seconds) == captured.calls[0]['results'][0].value
|
4456f5604c1d824f5012bcee550d274c905d74c8 | bigcrunch/shutdown.py | bigcrunch/shutdown.py | import asyncio
import botocore.exceptions
from bigcrunch import webapp
@asyncio.coroutine
def shutdown():
client = yield from webapp.redshift_client()
cluster_control = webapp.ClusterControl(client)
try:
cluster = yield from cluster_control.get()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != 'ClusterNotFound':
raise e
else:
print('Redshift already shutdown')
return
engine = yield from webapp.create_engine(cluster)
with (yield from engine) as conn:
db = webapp.Database(conn=conn)
sessions = yield from db.running_test_sessions()
if sessions == 0:
print('shutting down Redshift')
yield from cluster_control.destroy()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(shutdown())
loop.close()
| import asyncio
import botocore.exceptions
from bigcrunch import webapp
@asyncio.coroutine
def shutdown():
client = webapp.redshift_client()
cluster_control = webapp.ClusterControl(client)
try:
cluster = yield from cluster_control.get()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != 'ClusterNotFound':
raise e
else:
print('Redshift already shutdown')
return
engine = yield from webapp.create_engine(cluster)
with (yield from engine) as conn:
db = webapp.Database(conn=conn)
sessions = yield from db.running_test_sessions()
if sessions == 0:
print('shutting down Redshift')
yield from cluster_control.destroy()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(shutdown())
loop.close()
| Remove yield from on redshift_client | Remove yield from on redshift_client
| Python | agpl-3.0 | sqlalchemy-redshift/bigcrunch | import asyncio
import botocore.exceptions
from bigcrunch import webapp
@asyncio.coroutine
def shutdown():
client = yield from webapp.redshift_client()
cluster_control = webapp.ClusterControl(client)
try:
cluster = yield from cluster_control.get()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != 'ClusterNotFound':
raise e
else:
print('Redshift already shutdown')
return
engine = yield from webapp.create_engine(cluster)
with (yield from engine) as conn:
db = webapp.Database(conn=conn)
sessions = yield from db.running_test_sessions()
if sessions == 0:
print('shutting down Redshift')
yield from cluster_control.destroy()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(shutdown())
loop.close()
Remove yield from on redshift_client | import asyncio
import botocore.exceptions
from bigcrunch import webapp
@asyncio.coroutine
def shutdown():
client = webapp.redshift_client()
cluster_control = webapp.ClusterControl(client)
try:
cluster = yield from cluster_control.get()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != 'ClusterNotFound':
raise e
else:
print('Redshift already shutdown')
return
engine = yield from webapp.create_engine(cluster)
with (yield from engine) as conn:
db = webapp.Database(conn=conn)
sessions = yield from db.running_test_sessions()
if sessions == 0:
print('shutting down Redshift')
yield from cluster_control.destroy()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(shutdown())
loop.close()
| <commit_before>import asyncio
import botocore.exceptions
from bigcrunch import webapp
@asyncio.coroutine
def shutdown():
client = yield from webapp.redshift_client()
cluster_control = webapp.ClusterControl(client)
try:
cluster = yield from cluster_control.get()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != 'ClusterNotFound':
raise e
else:
print('Redshift already shutdown')
return
engine = yield from webapp.create_engine(cluster)
with (yield from engine) as conn:
db = webapp.Database(conn=conn)
sessions = yield from db.running_test_sessions()
if sessions == 0:
print('shutting down Redshift')
yield from cluster_control.destroy()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(shutdown())
loop.close()
<commit_msg>Remove yield from on redshift_client<commit_after> | import asyncio
import botocore.exceptions
from bigcrunch import webapp
@asyncio.coroutine
def shutdown():
client = webapp.redshift_client()
cluster_control = webapp.ClusterControl(client)
try:
cluster = yield from cluster_control.get()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != 'ClusterNotFound':
raise e
else:
print('Redshift already shutdown')
return
engine = yield from webapp.create_engine(cluster)
with (yield from engine) as conn:
db = webapp.Database(conn=conn)
sessions = yield from db.running_test_sessions()
if sessions == 0:
print('shutting down Redshift')
yield from cluster_control.destroy()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(shutdown())
loop.close()
| import asyncio
import botocore.exceptions
from bigcrunch import webapp
@asyncio.coroutine
def shutdown():
client = yield from webapp.redshift_client()
cluster_control = webapp.ClusterControl(client)
try:
cluster = yield from cluster_control.get()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != 'ClusterNotFound':
raise e
else:
print('Redshift already shutdown')
return
engine = yield from webapp.create_engine(cluster)
with (yield from engine) as conn:
db = webapp.Database(conn=conn)
sessions = yield from db.running_test_sessions()
if sessions == 0:
print('shutting down Redshift')
yield from cluster_control.destroy()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(shutdown())
loop.close()
Remove yield from on redshift_clientimport asyncio
import botocore.exceptions
from bigcrunch import webapp
@asyncio.coroutine
def shutdown():
client = webapp.redshift_client()
cluster_control = webapp.ClusterControl(client)
try:
cluster = yield from cluster_control.get()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != 'ClusterNotFound':
raise e
else:
print('Redshift already shutdown')
return
engine = yield from webapp.create_engine(cluster)
with (yield from engine) as conn:
db = webapp.Database(conn=conn)
sessions = yield from db.running_test_sessions()
if sessions == 0:
print('shutting down Redshift')
yield from cluster_control.destroy()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(shutdown())
loop.close()
| <commit_before>import asyncio
import botocore.exceptions
from bigcrunch import webapp
@asyncio.coroutine
def shutdown():
client = yield from webapp.redshift_client()
cluster_control = webapp.ClusterControl(client)
try:
cluster = yield from cluster_control.get()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != 'ClusterNotFound':
raise e
else:
print('Redshift already shutdown')
return
engine = yield from webapp.create_engine(cluster)
with (yield from engine) as conn:
db = webapp.Database(conn=conn)
sessions = yield from db.running_test_sessions()
if sessions == 0:
print('shutting down Redshift')
yield from cluster_control.destroy()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(shutdown())
loop.close()
<commit_msg>Remove yield from on redshift_client<commit_after>import asyncio
import botocore.exceptions
from bigcrunch import webapp
@asyncio.coroutine
def shutdown():
client = webapp.redshift_client()
cluster_control = webapp.ClusterControl(client)
try:
cluster = yield from cluster_control.get()
except botocore.exceptions.ClientError as e:
if e.response['Error']['Code'] != 'ClusterNotFound':
raise e
else:
print('Redshift already shutdown')
return
engine = yield from webapp.create_engine(cluster)
with (yield from engine) as conn:
db = webapp.Database(conn=conn)
sessions = yield from db.running_test_sessions()
if sessions == 0:
print('shutting down Redshift')
yield from cluster_control.destroy()
if __name__ == '__main__':
loop = asyncio.get_event_loop()
loop.run_until_complete(shutdown())
loop.close()
|
1c2ade08262259bb952424d7605d5fc68e038d73 | scripts/generate_labos_aggrega.py | scripts/generate_labos_aggrega.py | #!/usr/bin/env python
from csv import reader
with open("data/public/labos.departements.csv") as f:
data = list(reader(f))
output = {}
keys = data.pop(0)
keys.remove("LABO")
keys.remove("DEPARTEMENT")
for row in data:
if not row[0]:
continue
if row[0] not in output:
output[row[0]] = dict({k: 0 for k in keys})
for i, k in enumerate(keys):
output[row[0]][k] += float(row[2+i]) if row[2+i] else 0
print "LABO,"+",".join(keys)
for labo in sorted(output.keys(), key=lambda x: -output[x]["MONTANT AVANTAGES"]):
print labo+","+",".join([str(output[labo][k]) for k in keys])
| #!/usr/bin/env python
from csv import reader
with open("data/public/labos.departements.csv") as f:
data = list(reader(f))
output = {}
keys = data.pop(0)
keys.remove("LABO")
keys.remove("DEPARTEMENT")
for row in data:
if not row[0]:
continue
if row[0] not in output:
output[row[0]] = dict({k: 0 for k in keys})
for i, k in enumerate(keys):
output[row[0]][k] += float(row[2+i]) if row[2+i] else 0
print("LABO,"+",".join(keys))
for labo in sorted(output.keys(), key=lambda x: -output[x]["MONTANT AVANTAGES"]):
print(labo+","+",".join([str(output[labo][k]) for k in keys]))
| Make aggregation script compatible with python 3 | Make aggregation script compatible with python 3
| Python | agpl-3.0 | regardscitoyens/sunshine-data,regardscitoyens/sunshine-data,regardscitoyens/sunshine-data,regardscitoyens/sunshine-data,regardscitoyens/sunshine-data,regardscitoyens/sunshine-data | #!/usr/bin/env python
from csv import reader
with open("data/public/labos.departements.csv") as f:
data = list(reader(f))
output = {}
keys = data.pop(0)
keys.remove("LABO")
keys.remove("DEPARTEMENT")
for row in data:
if not row[0]:
continue
if row[0] not in output:
output[row[0]] = dict({k: 0 for k in keys})
for i, k in enumerate(keys):
output[row[0]][k] += float(row[2+i]) if row[2+i] else 0
print "LABO,"+",".join(keys)
for labo in sorted(output.keys(), key=lambda x: -output[x]["MONTANT AVANTAGES"]):
print labo+","+",".join([str(output[labo][k]) for k in keys])
Make aggregation script compatible with python 3 | #!/usr/bin/env python
from csv import reader
with open("data/public/labos.departements.csv") as f:
data = list(reader(f))
output = {}
keys = data.pop(0)
keys.remove("LABO")
keys.remove("DEPARTEMENT")
for row in data:
if not row[0]:
continue
if row[0] not in output:
output[row[0]] = dict({k: 0 for k in keys})
for i, k in enumerate(keys):
output[row[0]][k] += float(row[2+i]) if row[2+i] else 0
print("LABO,"+",".join(keys))
for labo in sorted(output.keys(), key=lambda x: -output[x]["MONTANT AVANTAGES"]):
print(labo+","+",".join([str(output[labo][k]) for k in keys]))
| <commit_before>#!/usr/bin/env python
from csv import reader
with open("data/public/labos.departements.csv") as f:
data = list(reader(f))
output = {}
keys = data.pop(0)
keys.remove("LABO")
keys.remove("DEPARTEMENT")
for row in data:
if not row[0]:
continue
if row[0] not in output:
output[row[0]] = dict({k: 0 for k in keys})
for i, k in enumerate(keys):
output[row[0]][k] += float(row[2+i]) if row[2+i] else 0
print "LABO,"+",".join(keys)
for labo in sorted(output.keys(), key=lambda x: -output[x]["MONTANT AVANTAGES"]):
print labo+","+",".join([str(output[labo][k]) for k in keys])
<commit_msg>Make aggregation script compatible with python 3<commit_after> | #!/usr/bin/env python
from csv import reader
with open("data/public/labos.departements.csv") as f:
data = list(reader(f))
output = {}
keys = data.pop(0)
keys.remove("LABO")
keys.remove("DEPARTEMENT")
for row in data:
if not row[0]:
continue
if row[0] not in output:
output[row[0]] = dict({k: 0 for k in keys})
for i, k in enumerate(keys):
output[row[0]][k] += float(row[2+i]) if row[2+i] else 0
print("LABO,"+",".join(keys))
for labo in sorted(output.keys(), key=lambda x: -output[x]["MONTANT AVANTAGES"]):
print(labo+","+",".join([str(output[labo][k]) for k in keys]))
| #!/usr/bin/env python
from csv import reader
with open("data/public/labos.departements.csv") as f:
data = list(reader(f))
output = {}
keys = data.pop(0)
keys.remove("LABO")
keys.remove("DEPARTEMENT")
for row in data:
if not row[0]:
continue
if row[0] not in output:
output[row[0]] = dict({k: 0 for k in keys})
for i, k in enumerate(keys):
output[row[0]][k] += float(row[2+i]) if row[2+i] else 0
print "LABO,"+",".join(keys)
for labo in sorted(output.keys(), key=lambda x: -output[x]["MONTANT AVANTAGES"]):
print labo+","+",".join([str(output[labo][k]) for k in keys])
Make aggregation script compatible with python 3#!/usr/bin/env python
from csv import reader
with open("data/public/labos.departements.csv") as f:
data = list(reader(f))
output = {}
keys = data.pop(0)
keys.remove("LABO")
keys.remove("DEPARTEMENT")
for row in data:
if not row[0]:
continue
if row[0] not in output:
output[row[0]] = dict({k: 0 for k in keys})
for i, k in enumerate(keys):
output[row[0]][k] += float(row[2+i]) if row[2+i] else 0
print("LABO,"+",".join(keys))
for labo in sorted(output.keys(), key=lambda x: -output[x]["MONTANT AVANTAGES"]):
print(labo+","+",".join([str(output[labo][k]) for k in keys]))
| <commit_before>#!/usr/bin/env python
from csv import reader
with open("data/public/labos.departements.csv") as f:
data = list(reader(f))
output = {}
keys = data.pop(0)
keys.remove("LABO")
keys.remove("DEPARTEMENT")
for row in data:
if not row[0]:
continue
if row[0] not in output:
output[row[0]] = dict({k: 0 for k in keys})
for i, k in enumerate(keys):
output[row[0]][k] += float(row[2+i]) if row[2+i] else 0
print "LABO,"+",".join(keys)
for labo in sorted(output.keys(), key=lambda x: -output[x]["MONTANT AVANTAGES"]):
print labo+","+",".join([str(output[labo][k]) for k in keys])
<commit_msg>Make aggregation script compatible with python 3<commit_after>#!/usr/bin/env python
from csv import reader
with open("data/public/labos.departements.csv") as f:
data = list(reader(f))
output = {}
keys = data.pop(0)
keys.remove("LABO")
keys.remove("DEPARTEMENT")
for row in data:
if not row[0]:
continue
if row[0] not in output:
output[row[0]] = dict({k: 0 for k in keys})
for i, k in enumerate(keys):
output[row[0]][k] += float(row[2+i]) if row[2+i] else 0
print("LABO,"+",".join(keys))
for labo in sorted(output.keys(), key=lambda x: -output[x]["MONTANT AVANTAGES"]):
print(labo+","+",".join([str(output[labo][k]) for k in keys]))
|
f25a32dd0180af91277ace186fc878c8baffed65 | heisen/core/__init__.py | heisen/core/__init__.py | from heisen.config import settings
from heisen.core.log import logger
from jsonrpclib.request import Connection
def get_rpc_connection():
servers = {
'self': [
('127.0.0.1', settings.RPC_PORT, 'aliehsanmilad', 'Key1_s!3cr3t')
],
}
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return Connection(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
| from heisen.config import settings
from heisen.core.log import logger
from jsonrpclib.request import ConnectionPool
def get_rpc_connection():
if settings.CREDENTIALS:
username, passowrd = settings.CREDENTIALS[0]
else:
username = passowrd = None
servers = {'self': [('localhost', settings.RPC_PORT, username, passowrd)]}
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return ConnectionPool(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
| Use connection pool for jsonrpc | Use connection pool for jsonrpc
| Python | mit | HeisenCore/heisen | from heisen.config import settings
from heisen.core.log import logger
from jsonrpclib.request import Connection
def get_rpc_connection():
servers = {
'self': [
('127.0.0.1', settings.RPC_PORT, 'aliehsanmilad', 'Key1_s!3cr3t')
],
}
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return Connection(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
Use connection pool for jsonrpc | from heisen.config import settings
from heisen.core.log import logger
from jsonrpclib.request import ConnectionPool
def get_rpc_connection():
if settings.CREDENTIALS:
username, passowrd = settings.CREDENTIALS[0]
else:
username = passowrd = None
servers = {'self': [('localhost', settings.RPC_PORT, username, passowrd)]}
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return ConnectionPool(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
| <commit_before>from heisen.config import settings
from heisen.core.log import logger
from jsonrpclib.request import Connection
def get_rpc_connection():
servers = {
'self': [
('127.0.0.1', settings.RPC_PORT, 'aliehsanmilad', 'Key1_s!3cr3t')
],
}
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return Connection(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
<commit_msg>Use connection pool for jsonrpc<commit_after> | from heisen.config import settings
from heisen.core.log import logger
from jsonrpclib.request import ConnectionPool
def get_rpc_connection():
if settings.CREDENTIALS:
username, passowrd = settings.CREDENTIALS[0]
else:
username = passowrd = None
servers = {'self': [('localhost', settings.RPC_PORT, username, passowrd)]}
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return ConnectionPool(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
| from heisen.config import settings
from heisen.core.log import logger
from jsonrpclib.request import Connection
def get_rpc_connection():
servers = {
'self': [
('127.0.0.1', settings.RPC_PORT, 'aliehsanmilad', 'Key1_s!3cr3t')
],
}
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return Connection(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
Use connection pool for jsonrpcfrom heisen.config import settings
from heisen.core.log import logger
from jsonrpclib.request import ConnectionPool
def get_rpc_connection():
if settings.CREDENTIALS:
username, passowrd = settings.CREDENTIALS[0]
else:
username = passowrd = None
servers = {'self': [('localhost', settings.RPC_PORT, username, passowrd)]}
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return ConnectionPool(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
| <commit_before>from heisen.config import settings
from heisen.core.log import logger
from jsonrpclib.request import Connection
def get_rpc_connection():
servers = {
'self': [
('127.0.0.1', settings.RPC_PORT, 'aliehsanmilad', 'Key1_s!3cr3t')
],
}
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return Connection(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
<commit_msg>Use connection pool for jsonrpc<commit_after>from heisen.config import settings
from heisen.core.log import logger
from jsonrpclib.request import ConnectionPool
def get_rpc_connection():
if settings.CREDENTIALS:
username, passowrd = settings.CREDENTIALS[0]
else:
username = passowrd = None
servers = {'self': [('localhost', settings.RPC_PORT, username, passowrd)]}
servers.update(getattr(settings, 'RPC_SERVERS', {}))
return ConnectionPool(servers, 'heisen', settings.APP_NAME)
rpc_call = get_rpc_connection()
|
57164efc9827f7975cdfa171a5a88e6fcc4059e5 | neutron/conf/policies/service_type.py | neutron/conf/policies/service_type.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from neutron.conf.policies import base
rules = [
policy.DocumentedRuleDefault(
'get_service_provider',
base.RULE_ANY,
'Get service providers',
[
{
'method': 'GET',
'path': '/service-providers',
},
]
),
]
def list_rules():
return rules
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import versionutils
from oslo_policy import policy
from neutron.conf.policies import base
DEPRECATION_REASON = (
"The Service Providers API now supports system scope and default roles.")
rules = [
policy.DocumentedRuleDefault(
name='get_service_provider',
check_str=base.SYSTEM_OR_PROJECT_READER,
description='Get service providers',
operations=[
{
'method': 'GET',
'path': '/service-providers',
},
],
scope_types=['system', 'project'],
deprecated_rule=policy.DeprecatedRule(
name='get_service_provider',
check_str=base.RULE_ANY),
deprecated_reason=DEPRECATION_REASON,
deprecated_since=versionutils.deprecated.WALLABY
),
]
def list_rules():
return rules
| Implement secure RBAC for service providers API | Implement secure RBAC for service providers API
This commit updates the policies for service providers API
to understand scope checking and account for a read-only role.
This is part of a broader series of changes across OpenStack to
provide a consistent RBAC experience and improve security.
Partially-Implements blueprint: secure-rbac-roles
Change-Id: I76af24a5153f1bc38b630da42de07f0f7b40cc41
| Python | apache-2.0 | mahak/neutron,mahak/neutron,openstack/neutron,openstack/neutron,openstack/neutron,mahak/neutron | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from neutron.conf.policies import base
rules = [
policy.DocumentedRuleDefault(
'get_service_provider',
base.RULE_ANY,
'Get service providers',
[
{
'method': 'GET',
'path': '/service-providers',
},
]
),
]
def list_rules():
return rules
Implement secure RBAC for service providers API
This commit updates the policies for service providers API
to understand scope checking and account for a read-only role.
This is part of a broader series of changes across OpenStack to
provide a consistent RBAC experience and improve security.
Partially-Implements blueprint: secure-rbac-roles
Change-Id: I76af24a5153f1bc38b630da42de07f0f7b40cc41 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import versionutils
from oslo_policy import policy
from neutron.conf.policies import base
DEPRECATION_REASON = (
"The Service Providers API now supports system scope and default roles.")
rules = [
policy.DocumentedRuleDefault(
name='get_service_provider',
check_str=base.SYSTEM_OR_PROJECT_READER,
description='Get service providers',
operations=[
{
'method': 'GET',
'path': '/service-providers',
},
],
scope_types=['system', 'project'],
deprecated_rule=policy.DeprecatedRule(
name='get_service_provider',
check_str=base.RULE_ANY),
deprecated_reason=DEPRECATION_REASON,
deprecated_since=versionutils.deprecated.WALLABY
),
]
def list_rules():
return rules
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from neutron.conf.policies import base
rules = [
policy.DocumentedRuleDefault(
'get_service_provider',
base.RULE_ANY,
'Get service providers',
[
{
'method': 'GET',
'path': '/service-providers',
},
]
),
]
def list_rules():
return rules
<commit_msg>Implement secure RBAC for service providers API
This commit updates the policies for service providers API
to understand scope checking and account for a read-only role.
This is part of a broader series of changes across OpenStack to
provide a consistent RBAC experience and improve security.
Partially-Implements blueprint: secure-rbac-roles
Change-Id: I76af24a5153f1bc38b630da42de07f0f7b40cc41<commit_after> | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import versionutils
from oslo_policy import policy
from neutron.conf.policies import base
DEPRECATION_REASON = (
"The Service Providers API now supports system scope and default roles.")
rules = [
policy.DocumentedRuleDefault(
name='get_service_provider',
check_str=base.SYSTEM_OR_PROJECT_READER,
description='Get service providers',
operations=[
{
'method': 'GET',
'path': '/service-providers',
},
],
scope_types=['system', 'project'],
deprecated_rule=policy.DeprecatedRule(
name='get_service_provider',
check_str=base.RULE_ANY),
deprecated_reason=DEPRECATION_REASON,
deprecated_since=versionutils.deprecated.WALLABY
),
]
def list_rules():
return rules
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from neutron.conf.policies import base
rules = [
policy.DocumentedRuleDefault(
'get_service_provider',
base.RULE_ANY,
'Get service providers',
[
{
'method': 'GET',
'path': '/service-providers',
},
]
),
]
def list_rules():
return rules
Implement secure RBAC for service providers API
This commit updates the policies for service providers API
to understand scope checking and account for a read-only role.
This is part of a broader series of changes across OpenStack to
provide a consistent RBAC experience and improve security.
Partially-Implements blueprint: secure-rbac-roles
Change-Id: I76af24a5153f1bc38b630da42de07f0f7b40cc41# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import versionutils
from oslo_policy import policy
from neutron.conf.policies import base
DEPRECATION_REASON = (
"The Service Providers API now supports system scope and default roles.")
rules = [
policy.DocumentedRuleDefault(
name='get_service_provider',
check_str=base.SYSTEM_OR_PROJECT_READER,
description='Get service providers',
operations=[
{
'method': 'GET',
'path': '/service-providers',
},
],
scope_types=['system', 'project'],
deprecated_rule=policy.DeprecatedRule(
name='get_service_provider',
check_str=base.RULE_ANY),
deprecated_reason=DEPRECATION_REASON,
deprecated_since=versionutils.deprecated.WALLABY
),
]
def list_rules():
return rules
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from neutron.conf.policies import base
rules = [
policy.DocumentedRuleDefault(
'get_service_provider',
base.RULE_ANY,
'Get service providers',
[
{
'method': 'GET',
'path': '/service-providers',
},
]
),
]
def list_rules():
return rules
<commit_msg>Implement secure RBAC for service providers API
This commit updates the policies for service providers API
to understand scope checking and account for a read-only role.
This is part of a broader series of changes across OpenStack to
provide a consistent RBAC experience and improve security.
Partially-Implements blueprint: secure-rbac-roles
Change-Id: I76af24a5153f1bc38b630da42de07f0f7b40cc41<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import versionutils
from oslo_policy import policy
from neutron.conf.policies import base
DEPRECATION_REASON = (
"The Service Providers API now supports system scope and default roles.")
rules = [
policy.DocumentedRuleDefault(
name='get_service_provider',
check_str=base.SYSTEM_OR_PROJECT_READER,
description='Get service providers',
operations=[
{
'method': 'GET',
'path': '/service-providers',
},
],
scope_types=['system', 'project'],
deprecated_rule=policy.DeprecatedRule(
name='get_service_provider',
check_str=base.RULE_ANY),
deprecated_reason=DEPRECATION_REASON,
deprecated_since=versionutils.deprecated.WALLABY
),
]
def list_rules():
return rules
|
c916cc05c57cdda5a9b07901af8af032a97c3841 | karma/api/__init__.py | karma/api/__init__.py | #!/usr/bin/env python
from flask import Blueprint
from flask.ext.restful import Api
api = Blueprint("api", __name__, template_folder="templates")
rest = Api(api)
from karma.api.charity import Charity, Charities
from karma.api.post import Post, Posts
# Adding resources
rest.add_resource(Charity, '/charities/<int:charity_name>')
rest.add_resource(Charities, '/charities')
rest.add_resource(Post, '/posts/<int:post_id>')
rest.add_resource(Posts, '/posts')
| #!/usr/bin/env python
from flask import Blueprint
from flask.ext.restful import Api
api = Blueprint("api", __name__, template_folder="templates")
rest = Api(api)
from karma.api.charity import Charity, Charities
from karma.api.post import Post, Posts
# Adding resources
rest.add_resource(Charity, '/charities/<int:charity_name>')
rest.add_resource(Charities, '/charities/')
rest.add_resource(Post, '/posts/<int:post_id>')
rest.add_resource(Posts, '/posts/')
| Add changes to url thingies | Add changes to url thingies
| Python | mit | hamhut1066/ghu-hack,hamhut1066/ghu-hack | #!/usr/bin/env python
from flask import Blueprint
from flask.ext.restful import Api
api = Blueprint("api", __name__, template_folder="templates")
rest = Api(api)
from karma.api.charity import Charity, Charities
from karma.api.post import Post, Posts
# Adding resources
rest.add_resource(Charity, '/charities/<int:charity_name>')
rest.add_resource(Charities, '/charities')
rest.add_resource(Post, '/posts/<int:post_id>')
rest.add_resource(Posts, '/posts')
Add changes to url thingies | #!/usr/bin/env python
from flask import Blueprint
from flask.ext.restful import Api
api = Blueprint("api", __name__, template_folder="templates")
rest = Api(api)
from karma.api.charity import Charity, Charities
from karma.api.post import Post, Posts
# Adding resources
rest.add_resource(Charity, '/charities/<int:charity_name>')
rest.add_resource(Charities, '/charities/')
rest.add_resource(Post, '/posts/<int:post_id>')
rest.add_resource(Posts, '/posts/')
| <commit_before>#!/usr/bin/env python
from flask import Blueprint
from flask.ext.restful import Api
api = Blueprint("api", __name__, template_folder="templates")
rest = Api(api)
from karma.api.charity import Charity, Charities
from karma.api.post import Post, Posts
# Adding resources
rest.add_resource(Charity, '/charities/<int:charity_name>')
rest.add_resource(Charities, '/charities')
rest.add_resource(Post, '/posts/<int:post_id>')
rest.add_resource(Posts, '/posts')
<commit_msg>Add changes to url thingies<commit_after> | #!/usr/bin/env python
from flask import Blueprint
from flask.ext.restful import Api
api = Blueprint("api", __name__, template_folder="templates")
rest = Api(api)
from karma.api.charity import Charity, Charities
from karma.api.post import Post, Posts
# Adding resources
rest.add_resource(Charity, '/charities/<int:charity_name>')
rest.add_resource(Charities, '/charities/')
rest.add_resource(Post, '/posts/<int:post_id>')
rest.add_resource(Posts, '/posts/')
| #!/usr/bin/env python
from flask import Blueprint
from flask.ext.restful import Api
api = Blueprint("api", __name__, template_folder="templates")
rest = Api(api)
from karma.api.charity import Charity, Charities
from karma.api.post import Post, Posts
# Adding resources
rest.add_resource(Charity, '/charities/<int:charity_name>')
rest.add_resource(Charities, '/charities')
rest.add_resource(Post, '/posts/<int:post_id>')
rest.add_resource(Posts, '/posts')
Add changes to url thingies#!/usr/bin/env python
from flask import Blueprint
from flask.ext.restful import Api
api = Blueprint("api", __name__, template_folder="templates")
rest = Api(api)
from karma.api.charity import Charity, Charities
from karma.api.post import Post, Posts
# Adding resources
rest.add_resource(Charity, '/charities/<int:charity_name>')
rest.add_resource(Charities, '/charities/')
rest.add_resource(Post, '/posts/<int:post_id>')
rest.add_resource(Posts, '/posts/')
| <commit_before>#!/usr/bin/env python
from flask import Blueprint
from flask.ext.restful import Api
api = Blueprint("api", __name__, template_folder="templates")
rest = Api(api)
from karma.api.charity import Charity, Charities
from karma.api.post import Post, Posts
# Adding resources
rest.add_resource(Charity, '/charities/<int:charity_name>')
rest.add_resource(Charities, '/charities')
rest.add_resource(Post, '/posts/<int:post_id>')
rest.add_resource(Posts, '/posts')
<commit_msg>Add changes to url thingies<commit_after>#!/usr/bin/env python
from flask import Blueprint
from flask.ext.restful import Api
api = Blueprint("api", __name__, template_folder="templates")
rest = Api(api)
from karma.api.charity import Charity, Charities
from karma.api.post import Post, Posts
# Adding resources
rest.add_resource(Charity, '/charities/<int:charity_name>')
rest.add_resource(Charities, '/charities/')
rest.add_resource(Post, '/posts/<int:post_id>')
rest.add_resource(Posts, '/posts/')
|
026d887dd85bdcc5db97d65706509a18252de8ba | l10n_ar_aeroo_sale/__openerp__.py | l10n_ar_aeroo_sale/__openerp__.py | # -*- coding: utf-8 -*-
{
'name': 'Argentinian Like Sale Order Aeroo Report',
'version': '1.0',
'category': 'Localization/Argentina',
'sequence': 14,
'summary': '',
'description': """
Argentinian Like Sale Order / Quotation Aeroo Report
====================================================
""",
'author': 'Ingenieria ADHOC',
'website': 'www.ingadhoc.com',
'images': [
],
'depends': [
'report_extended_sale',
'l10n_ar_aeroo_base',
'portal_sale',
],
'data': [
'report_configuration_defaults_data.xml',
'sale_order_report.xml',
'sale_order_template.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | # -*- coding: utf-8 -*-
{
'name': 'Argentinian Like Sale Order Aeroo Report',
'version': '1.0',
'category': 'Localization/Argentina',
'sequence': 14,
'summary': '',
'description': """
Argentinian Like Sale Order / Quotation Aeroo Report
====================================================
""",
'author': 'Ingenieria ADHOC',
'website': 'www.ingadhoc.com',
'images': [
],
'depends': [
'report_extended_sale',
'l10n_ar_aeroo_base',
'l10n_ar_aeroo_invoice', #esta dependencia es porque actualizamos algo que crea portal_sale con un valor de las invoice
'portal_sale',
],
'data': [
'report_configuration_defaults_data.xml',
'sale_order_report.xml',
'sale_order_template.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | FIX dependency on aeroo rpoert | FIX dependency on aeroo rpoert
| Python | agpl-3.0 | ingadhoc/argentina-reporting | # -*- coding: utf-8 -*-
{
'name': 'Argentinian Like Sale Order Aeroo Report',
'version': '1.0',
'category': 'Localization/Argentina',
'sequence': 14,
'summary': '',
'description': """
Argentinian Like Sale Order / Quotation Aeroo Report
====================================================
""",
'author': 'Ingenieria ADHOC',
'website': 'www.ingadhoc.com',
'images': [
],
'depends': [
'report_extended_sale',
'l10n_ar_aeroo_base',
'portal_sale',
],
'data': [
'report_configuration_defaults_data.xml',
'sale_order_report.xml',
'sale_order_template.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:FIX dependency on aeroo rpoert | # -*- coding: utf-8 -*-
{
'name': 'Argentinian Like Sale Order Aeroo Report',
'version': '1.0',
'category': 'Localization/Argentina',
'sequence': 14,
'summary': '',
'description': """
Argentinian Like Sale Order / Quotation Aeroo Report
====================================================
""",
'author': 'Ingenieria ADHOC',
'website': 'www.ingadhoc.com',
'images': [
],
'depends': [
'report_extended_sale',
'l10n_ar_aeroo_base',
'l10n_ar_aeroo_invoice', #esta dependencia es porque actualizamos algo que crea portal_sale con un valor de las invoice
'portal_sale',
],
'data': [
'report_configuration_defaults_data.xml',
'sale_order_report.xml',
'sale_order_template.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | <commit_before># -*- coding: utf-8 -*-
{
'name': 'Argentinian Like Sale Order Aeroo Report',
'version': '1.0',
'category': 'Localization/Argentina',
'sequence': 14,
'summary': '',
'description': """
Argentinian Like Sale Order / Quotation Aeroo Report
====================================================
""",
'author': 'Ingenieria ADHOC',
'website': 'www.ingadhoc.com',
'images': [
],
'depends': [
'report_extended_sale',
'l10n_ar_aeroo_base',
'portal_sale',
],
'data': [
'report_configuration_defaults_data.xml',
'sale_order_report.xml',
'sale_order_template.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<commit_msg>FIX dependency on aeroo rpoert<commit_after> | # -*- coding: utf-8 -*-
{
'name': 'Argentinian Like Sale Order Aeroo Report',
'version': '1.0',
'category': 'Localization/Argentina',
'sequence': 14,
'summary': '',
'description': """
Argentinian Like Sale Order / Quotation Aeroo Report
====================================================
""",
'author': 'Ingenieria ADHOC',
'website': 'www.ingadhoc.com',
'images': [
],
'depends': [
'report_extended_sale',
'l10n_ar_aeroo_base',
'l10n_ar_aeroo_invoice', #esta dependencia es porque actualizamos algo que crea portal_sale con un valor de las invoice
'portal_sale',
],
'data': [
'report_configuration_defaults_data.xml',
'sale_order_report.xml',
'sale_order_template.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | # -*- coding: utf-8 -*-
{
'name': 'Argentinian Like Sale Order Aeroo Report',
'version': '1.0',
'category': 'Localization/Argentina',
'sequence': 14,
'summary': '',
'description': """
Argentinian Like Sale Order / Quotation Aeroo Report
====================================================
""",
'author': 'Ingenieria ADHOC',
'website': 'www.ingadhoc.com',
'images': [
],
'depends': [
'report_extended_sale',
'l10n_ar_aeroo_base',
'portal_sale',
],
'data': [
'report_configuration_defaults_data.xml',
'sale_order_report.xml',
'sale_order_template.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:FIX dependency on aeroo rpoert# -*- coding: utf-8 -*-
{
'name': 'Argentinian Like Sale Order Aeroo Report',
'version': '1.0',
'category': 'Localization/Argentina',
'sequence': 14,
'summary': '',
'description': """
Argentinian Like Sale Order / Quotation Aeroo Report
====================================================
""",
'author': 'Ingenieria ADHOC',
'website': 'www.ingadhoc.com',
'images': [
],
'depends': [
'report_extended_sale',
'l10n_ar_aeroo_base',
'l10n_ar_aeroo_invoice', #esta dependencia es porque actualizamos algo que crea portal_sale con un valor de las invoice
'portal_sale',
],
'data': [
'report_configuration_defaults_data.xml',
'sale_order_report.xml',
'sale_order_template.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: | <commit_before># -*- coding: utf-8 -*-
{
'name': 'Argentinian Like Sale Order Aeroo Report',
'version': '1.0',
'category': 'Localization/Argentina',
'sequence': 14,
'summary': '',
'description': """
Argentinian Like Sale Order / Quotation Aeroo Report
====================================================
""",
'author': 'Ingenieria ADHOC',
'website': 'www.ingadhoc.com',
'images': [
],
'depends': [
'report_extended_sale',
'l10n_ar_aeroo_base',
'portal_sale',
],
'data': [
'report_configuration_defaults_data.xml',
'sale_order_report.xml',
'sale_order_template.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:<commit_msg>FIX dependency on aeroo rpoert<commit_after># -*- coding: utf-8 -*-
{
'name': 'Argentinian Like Sale Order Aeroo Report',
'version': '1.0',
'category': 'Localization/Argentina',
'sequence': 14,
'summary': '',
'description': """
Argentinian Like Sale Order / Quotation Aeroo Report
====================================================
""",
'author': 'Ingenieria ADHOC',
'website': 'www.ingadhoc.com',
'images': [
],
'depends': [
'report_extended_sale',
'l10n_ar_aeroo_base',
'l10n_ar_aeroo_invoice', #esta dependencia es porque actualizamos algo que crea portal_sale con un valor de las invoice
'portal_sale',
],
'data': [
'report_configuration_defaults_data.xml',
'sale_order_report.xml',
'sale_order_template.xml',
],
'demo': [
],
'test': [
],
'installable': True,
'auto_install': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4: |
6f745ae05a22031a36cb5cedc6b627cbf7ba6512 | import_goodline_iptv.py | import_goodline_iptv.py | #!/usr/bin/env python3
import configargparse
from goodline_iptv.importer import do_import
if __name__ == '__main__':
parser = configargparse.ArgParser()
parser.add_argument('-o', '--out-dir', required=True, help='Output directory')
parser.add_argument('-e', '--encoding',
default='cp1251',
env_var='ENCODING',
help='Source JTV teleguide encoding')
parser.add_argument('-t', '--timezone',
default='+0700',
env_var='TIMEZONE',
help='Source JTV teleguide timezone')
parser.add_argument('-u', '--udpxy',
env_var='UDPXY_ADDR',
help='Address of the udproxy service, if available')
parser.add_argument('-v', '--verbosity',
default=0,
env_var='VERBOSITY',
type=int, choices=range(0, 4),
help='Verbosity level')
args = parser.parse_args()
do_import(args.verbosity, args.out_dir, args.encoding, args.timezone, args.udpxy)
| #!/usr/bin/env python3
import configargparse
from goodline_iptv.importer import do_import
if __name__ == '__main__':
parser = configargparse.ArgParser()
parser.add_argument('-o', '--out-dir',
required=True,
env_var='OUTDIR',
help='Output directory')
parser.add_argument('-e', '--encoding',
default='cp1251',
env_var='ENCODING',
help='Source JTV teleguide encoding')
parser.add_argument('-t', '--timezone',
default='+0700',
env_var='TIMEZONE',
help='Source JTV teleguide timezone')
parser.add_argument('-u', '--udpxy',
env_var='UDPXY_ADDR',
help='Address of the udproxy service, if available')
parser.add_argument('-v', '--verbosity',
default=0,
env_var='VERBOSITY',
type=int, choices=range(0, 4),
help='Verbosity level')
args = parser.parse_args()
do_import(args.verbosity, args.out_dir, args.encoding, args.timezone, args.udpxy)
| Add environment variable for ability to set the output directory | Add environment variable for ability to set the output directory
| Python | mit | nsadovskiy/goodline_tv | #!/usr/bin/env python3
import configargparse
from goodline_iptv.importer import do_import
if __name__ == '__main__':
parser = configargparse.ArgParser()
parser.add_argument('-o', '--out-dir', required=True, help='Output directory')
parser.add_argument('-e', '--encoding',
default='cp1251',
env_var='ENCODING',
help='Source JTV teleguide encoding')
parser.add_argument('-t', '--timezone',
default='+0700',
env_var='TIMEZONE',
help='Source JTV teleguide timezone')
parser.add_argument('-u', '--udpxy',
env_var='UDPXY_ADDR',
help='Address of the udproxy service, if available')
parser.add_argument('-v', '--verbosity',
default=0,
env_var='VERBOSITY',
type=int, choices=range(0, 4),
help='Verbosity level')
args = parser.parse_args()
do_import(args.verbosity, args.out_dir, args.encoding, args.timezone, args.udpxy)
Add environment variable for ability to set the output directory | #!/usr/bin/env python3
import configargparse
from goodline_iptv.importer import do_import
if __name__ == '__main__':
parser = configargparse.ArgParser()
parser.add_argument('-o', '--out-dir',
required=True,
env_var='OUTDIR',
help='Output directory')
parser.add_argument('-e', '--encoding',
default='cp1251',
env_var='ENCODING',
help='Source JTV teleguide encoding')
parser.add_argument('-t', '--timezone',
default='+0700',
env_var='TIMEZONE',
help='Source JTV teleguide timezone')
parser.add_argument('-u', '--udpxy',
env_var='UDPXY_ADDR',
help='Address of the udproxy service, if available')
parser.add_argument('-v', '--verbosity',
default=0,
env_var='VERBOSITY',
type=int, choices=range(0, 4),
help='Verbosity level')
args = parser.parse_args()
do_import(args.verbosity, args.out_dir, args.encoding, args.timezone, args.udpxy)
| <commit_before>#!/usr/bin/env python3
import configargparse
from goodline_iptv.importer import do_import
if __name__ == '__main__':
parser = configargparse.ArgParser()
parser.add_argument('-o', '--out-dir', required=True, help='Output directory')
parser.add_argument('-e', '--encoding',
default='cp1251',
env_var='ENCODING',
help='Source JTV teleguide encoding')
parser.add_argument('-t', '--timezone',
default='+0700',
env_var='TIMEZONE',
help='Source JTV teleguide timezone')
parser.add_argument('-u', '--udpxy',
env_var='UDPXY_ADDR',
help='Address of the udproxy service, if available')
parser.add_argument('-v', '--verbosity',
default=0,
env_var='VERBOSITY',
type=int, choices=range(0, 4),
help='Verbosity level')
args = parser.parse_args()
do_import(args.verbosity, args.out_dir, args.encoding, args.timezone, args.udpxy)
<commit_msg>Add environment variable for ability to set the output directory<commit_after> | #!/usr/bin/env python3
import configargparse
from goodline_iptv.importer import do_import
if __name__ == '__main__':
parser = configargparse.ArgParser()
parser.add_argument('-o', '--out-dir',
required=True,
env_var='OUTDIR',
help='Output directory')
parser.add_argument('-e', '--encoding',
default='cp1251',
env_var='ENCODING',
help='Source JTV teleguide encoding')
parser.add_argument('-t', '--timezone',
default='+0700',
env_var='TIMEZONE',
help='Source JTV teleguide timezone')
parser.add_argument('-u', '--udpxy',
env_var='UDPXY_ADDR',
help='Address of the udproxy service, if available')
parser.add_argument('-v', '--verbosity',
default=0,
env_var='VERBOSITY',
type=int, choices=range(0, 4),
help='Verbosity level')
args = parser.parse_args()
do_import(args.verbosity, args.out_dir, args.encoding, args.timezone, args.udpxy)
| #!/usr/bin/env python3
import configargparse
from goodline_iptv.importer import do_import
if __name__ == '__main__':
parser = configargparse.ArgParser()
parser.add_argument('-o', '--out-dir', required=True, help='Output directory')
parser.add_argument('-e', '--encoding',
default='cp1251',
env_var='ENCODING',
help='Source JTV teleguide encoding')
parser.add_argument('-t', '--timezone',
default='+0700',
env_var='TIMEZONE',
help='Source JTV teleguide timezone')
parser.add_argument('-u', '--udpxy',
env_var='UDPXY_ADDR',
help='Address of the udproxy service, if available')
parser.add_argument('-v', '--verbosity',
default=0,
env_var='VERBOSITY',
type=int, choices=range(0, 4),
help='Verbosity level')
args = parser.parse_args()
do_import(args.verbosity, args.out_dir, args.encoding, args.timezone, args.udpxy)
Add environment variable for ability to set the output directory#!/usr/bin/env python3
import configargparse
from goodline_iptv.importer import do_import
if __name__ == '__main__':
parser = configargparse.ArgParser()
parser.add_argument('-o', '--out-dir',
required=True,
env_var='OUTDIR',
help='Output directory')
parser.add_argument('-e', '--encoding',
default='cp1251',
env_var='ENCODING',
help='Source JTV teleguide encoding')
parser.add_argument('-t', '--timezone',
default='+0700',
env_var='TIMEZONE',
help='Source JTV teleguide timezone')
parser.add_argument('-u', '--udpxy',
env_var='UDPXY_ADDR',
help='Address of the udproxy service, if available')
parser.add_argument('-v', '--verbosity',
default=0,
env_var='VERBOSITY',
type=int, choices=range(0, 4),
help='Verbosity level')
args = parser.parse_args()
do_import(args.verbosity, args.out_dir, args.encoding, args.timezone, args.udpxy)
| <commit_before>#!/usr/bin/env python3
import configargparse
from goodline_iptv.importer import do_import
if __name__ == '__main__':
parser = configargparse.ArgParser()
parser.add_argument('-o', '--out-dir', required=True, help='Output directory')
parser.add_argument('-e', '--encoding',
default='cp1251',
env_var='ENCODING',
help='Source JTV teleguide encoding')
parser.add_argument('-t', '--timezone',
default='+0700',
env_var='TIMEZONE',
help='Source JTV teleguide timezone')
parser.add_argument('-u', '--udpxy',
env_var='UDPXY_ADDR',
help='Address of the udproxy service, if available')
parser.add_argument('-v', '--verbosity',
default=0,
env_var='VERBOSITY',
type=int, choices=range(0, 4),
help='Verbosity level')
args = parser.parse_args()
do_import(args.verbosity, args.out_dir, args.encoding, args.timezone, args.udpxy)
<commit_msg>Add environment variable for ability to set the output directory<commit_after>#!/usr/bin/env python3
import configargparse
from goodline_iptv.importer import do_import
if __name__ == '__main__':
parser = configargparse.ArgParser()
parser.add_argument('-o', '--out-dir',
required=True,
env_var='OUTDIR',
help='Output directory')
parser.add_argument('-e', '--encoding',
default='cp1251',
env_var='ENCODING',
help='Source JTV teleguide encoding')
parser.add_argument('-t', '--timezone',
default='+0700',
env_var='TIMEZONE',
help='Source JTV teleguide timezone')
parser.add_argument('-u', '--udpxy',
env_var='UDPXY_ADDR',
help='Address of the udproxy service, if available')
parser.add_argument('-v', '--verbosity',
default=0,
env_var='VERBOSITY',
type=int, choices=range(0, 4),
help='Verbosity level')
args = parser.parse_args()
do_import(args.verbosity, args.out_dir, args.encoding, args.timezone, args.udpxy)
|
5c8fd314dd89d8964cc5cb75c2b57ed32be275c5 | plsync/users.py | plsync/users.py |
# NOTE: User roles are not managed here. Visit PlanetLab to change user roles.
user_list = [('Stephen', 'Stuart', 'sstuart@google.com'),
('Will', 'Hawkins', 'hawkinsw@opentechinstitute.org'),
('Jordan', 'McCarthy', 'mccarthy@opentechinstitute.org'),
('Chris', 'Ritzo', 'critzo@opentechinstitute.org'),
('Josh', 'Bailey', 'joshb@google.com'),
('Steph', 'Alarcon', 'salarcon@measurementlab.net'),
('Nathan', 'Kinkade', 'kinkade@opentechinstitute.org'),
('Matt', 'Mathis', 'mattmathis@google.com')]
|
# NOTE: User roles are not managed here. Visit PlanetLab to change user roles.
user_list = [('Stephen', 'Stuart', 'sstuart@google.com'),
('Will', 'Hawkins', 'hawkinsw@opentechinstitute.org'),
('Jordan', 'McCarthy', 'mccarthy@opentechinstitute.org'),
('Chris', 'Ritzo', 'critzo@opentechinstitute.org'),
('Josh', 'Bailey', 'joshb@google.com'),
('Steph', 'Alarcon', 'salarcon@measurementlab.net'),
('Nathan', 'Kinkade', 'kinkade@opentechinstitute.org'),
('Matt', 'Mathis', 'mattmathis@google.com')
('Peter', 'Boothe', 'pboothe@google.com')]
| Add pboothe temporarily for testing | Add pboothe temporarily for testing
| Python | apache-2.0 | jheretic/operator,stephen-soltesz/operator,nkinkade/operator,critzo/operator,critzo/operator,jheretic/operator,nkinkade/operator,m-lab/operator,salarcon215/operator,m-lab/operator,salarcon215/operator,stephen-soltesz/operator |
# NOTE: User roles are not managed here. Visit PlanetLab to change user roles.
user_list = [('Stephen', 'Stuart', 'sstuart@google.com'),
('Will', 'Hawkins', 'hawkinsw@opentechinstitute.org'),
('Jordan', 'McCarthy', 'mccarthy@opentechinstitute.org'),
('Chris', 'Ritzo', 'critzo@opentechinstitute.org'),
('Josh', 'Bailey', 'joshb@google.com'),
('Steph', 'Alarcon', 'salarcon@measurementlab.net'),
('Nathan', 'Kinkade', 'kinkade@opentechinstitute.org'),
('Matt', 'Mathis', 'mattmathis@google.com')]
Add pboothe temporarily for testing |
# NOTE: User roles are not managed here. Visit PlanetLab to change user roles.
user_list = [('Stephen', 'Stuart', 'sstuart@google.com'),
('Will', 'Hawkins', 'hawkinsw@opentechinstitute.org'),
('Jordan', 'McCarthy', 'mccarthy@opentechinstitute.org'),
('Chris', 'Ritzo', 'critzo@opentechinstitute.org'),
('Josh', 'Bailey', 'joshb@google.com'),
('Steph', 'Alarcon', 'salarcon@measurementlab.net'),
('Nathan', 'Kinkade', 'kinkade@opentechinstitute.org'),
('Matt', 'Mathis', 'mattmathis@google.com')
('Peter', 'Boothe', 'pboothe@google.com')]
| <commit_before>
# NOTE: User roles are not managed here. Visit PlanetLab to change user roles.
user_list = [('Stephen', 'Stuart', 'sstuart@google.com'),
('Will', 'Hawkins', 'hawkinsw@opentechinstitute.org'),
('Jordan', 'McCarthy', 'mccarthy@opentechinstitute.org'),
('Chris', 'Ritzo', 'critzo@opentechinstitute.org'),
('Josh', 'Bailey', 'joshb@google.com'),
('Steph', 'Alarcon', 'salarcon@measurementlab.net'),
('Nathan', 'Kinkade', 'kinkade@opentechinstitute.org'),
('Matt', 'Mathis', 'mattmathis@google.com')]
<commit_msg>Add pboothe temporarily for testing<commit_after> |
# NOTE: User roles are not managed here. Visit PlanetLab to change user roles.
user_list = [('Stephen', 'Stuart', 'sstuart@google.com'),
('Will', 'Hawkins', 'hawkinsw@opentechinstitute.org'),
('Jordan', 'McCarthy', 'mccarthy@opentechinstitute.org'),
('Chris', 'Ritzo', 'critzo@opentechinstitute.org'),
('Josh', 'Bailey', 'joshb@google.com'),
('Steph', 'Alarcon', 'salarcon@measurementlab.net'),
('Nathan', 'Kinkade', 'kinkade@opentechinstitute.org'),
('Matt', 'Mathis', 'mattmathis@google.com')
('Peter', 'Boothe', 'pboothe@google.com')]
|
# NOTE: User roles are not managed here. Visit PlanetLab to change user roles.
user_list = [('Stephen', 'Stuart', 'sstuart@google.com'),
('Will', 'Hawkins', 'hawkinsw@opentechinstitute.org'),
('Jordan', 'McCarthy', 'mccarthy@opentechinstitute.org'),
('Chris', 'Ritzo', 'critzo@opentechinstitute.org'),
('Josh', 'Bailey', 'joshb@google.com'),
('Steph', 'Alarcon', 'salarcon@measurementlab.net'),
('Nathan', 'Kinkade', 'kinkade@opentechinstitute.org'),
('Matt', 'Mathis', 'mattmathis@google.com')]
Add pboothe temporarily for testing
# NOTE: User roles are not managed here. Visit PlanetLab to change user roles.
user_list = [('Stephen', 'Stuart', 'sstuart@google.com'),
('Will', 'Hawkins', 'hawkinsw@opentechinstitute.org'),
('Jordan', 'McCarthy', 'mccarthy@opentechinstitute.org'),
('Chris', 'Ritzo', 'critzo@opentechinstitute.org'),
('Josh', 'Bailey', 'joshb@google.com'),
('Steph', 'Alarcon', 'salarcon@measurementlab.net'),
('Nathan', 'Kinkade', 'kinkade@opentechinstitute.org'),
('Matt', 'Mathis', 'mattmathis@google.com')
('Peter', 'Boothe', 'pboothe@google.com')]
| <commit_before>
# NOTE: User roles are not managed here. Visit PlanetLab to change user roles.
user_list = [('Stephen', 'Stuart', 'sstuart@google.com'),
('Will', 'Hawkins', 'hawkinsw@opentechinstitute.org'),
('Jordan', 'McCarthy', 'mccarthy@opentechinstitute.org'),
('Chris', 'Ritzo', 'critzo@opentechinstitute.org'),
('Josh', 'Bailey', 'joshb@google.com'),
('Steph', 'Alarcon', 'salarcon@measurementlab.net'),
('Nathan', 'Kinkade', 'kinkade@opentechinstitute.org'),
('Matt', 'Mathis', 'mattmathis@google.com')]
<commit_msg>Add pboothe temporarily for testing<commit_after>
# NOTE: User roles are not managed here. Visit PlanetLab to change user roles.
user_list = [('Stephen', 'Stuart', 'sstuart@google.com'),
('Will', 'Hawkins', 'hawkinsw@opentechinstitute.org'),
('Jordan', 'McCarthy', 'mccarthy@opentechinstitute.org'),
('Chris', 'Ritzo', 'critzo@opentechinstitute.org'),
('Josh', 'Bailey', 'joshb@google.com'),
('Steph', 'Alarcon', 'salarcon@measurementlab.net'),
('Nathan', 'Kinkade', 'kinkade@opentechinstitute.org'),
('Matt', 'Mathis', 'mattmathis@google.com')
('Peter', 'Boothe', 'pboothe@google.com')]
|
4f9b01dcf194c5ce05640852cf8f436fa4e3bf80 | manager/urls.py | manager/urls.py | from django.conf.urls import url
from manager import views
_PACKAGE_NAME_REGEX = r'(?P<package_name>[a-zA-Z0-9_+-]+)'
_PACKAGE_NAME_REGEX_WITH_EMPTY = r'(?P<package_name>[a-zA-Z0-9_+-]*)'
_BUILD_NUMBER_REGEX = r'(?P<build_number>\d+)'
_REGEX = {'name': _PACKAGE_NAME_REGEX, 'number': _BUILD_NUMBER_REGEX}
urlpatterns = [
url(r'^$', views.package_list, name='package_list'),
url(r'^register/$', views.package_register, name='package_register'),
url(r'^register/{name}$'.format(name=_PACKAGE_NAME_REGEX_WITH_EMPTY), views.package_register_detail,
name='package_register_detail'),
url(r'^{name}/$'.format(**_REGEX), views.package_detail, name='package_detail'),
url(r'^{name}/{number}/$'.format(**_REGEX), views.build_detail, name='build_detail'),
url(r'^{name}/{number}/download/$'.format(**_REGEX), views.build_download, name='build_download'),
url(r'^{name}/{number}/log/$'.format(**_REGEX), views.build_log, name='build_log'),
url(r'^repository/(?P<file_name>.*)$', views.repository, name='repository'),
]
| from django.conf.urls import url
from manager import views
_PACKAGE_NAME_REGEX = r'(?P<package_name>[a-zA-Z0-9_+-.]+)'
_PACKAGE_NAME_REGEX_WITH_EMPTY = r'(?P<package_name>[a-zA-Z0-9_+-.]*)'
_BUILD_NUMBER_REGEX = r'(?P<build_number>\d+)'
_REGEX = {'name': _PACKAGE_NAME_REGEX, 'number': _BUILD_NUMBER_REGEX}
urlpatterns = [
url(r'^$', views.package_list, name='package_list'),
url(r'^register/$', views.package_register, name='package_register'),
url(r'^register/{name}$'.format(name=_PACKAGE_NAME_REGEX_WITH_EMPTY), views.package_register_detail,
name='package_register_detail'),
url(r'^{name}/$'.format(**_REGEX), views.package_detail, name='package_detail'),
url(r'^{name}/{number}/$'.format(**_REGEX), views.build_detail, name='build_detail'),
url(r'^{name}/{number}/download/$'.format(**_REGEX), views.build_download, name='build_download'),
url(r'^{name}/{number}/log/$'.format(**_REGEX), views.build_log, name='build_log'),
url(r'^repository/(?P<file_name>.*)$', views.repository, name='repository'),
]
| Allow '.' in package name | Allow '.' in package name
| Python | mit | colajam93/aurpackager,colajam93/aurpackager,colajam93/aurpackager,colajam93/aurpackager | from django.conf.urls import url
from manager import views
_PACKAGE_NAME_REGEX = r'(?P<package_name>[a-zA-Z0-9_+-]+)'
_PACKAGE_NAME_REGEX_WITH_EMPTY = r'(?P<package_name>[a-zA-Z0-9_+-]*)'
_BUILD_NUMBER_REGEX = r'(?P<build_number>\d+)'
_REGEX = {'name': _PACKAGE_NAME_REGEX, 'number': _BUILD_NUMBER_REGEX}
urlpatterns = [
url(r'^$', views.package_list, name='package_list'),
url(r'^register/$', views.package_register, name='package_register'),
url(r'^register/{name}$'.format(name=_PACKAGE_NAME_REGEX_WITH_EMPTY), views.package_register_detail,
name='package_register_detail'),
url(r'^{name}/$'.format(**_REGEX), views.package_detail, name='package_detail'),
url(r'^{name}/{number}/$'.format(**_REGEX), views.build_detail, name='build_detail'),
url(r'^{name}/{number}/download/$'.format(**_REGEX), views.build_download, name='build_download'),
url(r'^{name}/{number}/log/$'.format(**_REGEX), views.build_log, name='build_log'),
url(r'^repository/(?P<file_name>.*)$', views.repository, name='repository'),
]
Allow '.' in package name | from django.conf.urls import url
from manager import views
_PACKAGE_NAME_REGEX = r'(?P<package_name>[a-zA-Z0-9_+-.]+)'
_PACKAGE_NAME_REGEX_WITH_EMPTY = r'(?P<package_name>[a-zA-Z0-9_+-.]*)'
_BUILD_NUMBER_REGEX = r'(?P<build_number>\d+)'
_REGEX = {'name': _PACKAGE_NAME_REGEX, 'number': _BUILD_NUMBER_REGEX}
urlpatterns = [
url(r'^$', views.package_list, name='package_list'),
url(r'^register/$', views.package_register, name='package_register'),
url(r'^register/{name}$'.format(name=_PACKAGE_NAME_REGEX_WITH_EMPTY), views.package_register_detail,
name='package_register_detail'),
url(r'^{name}/$'.format(**_REGEX), views.package_detail, name='package_detail'),
url(r'^{name}/{number}/$'.format(**_REGEX), views.build_detail, name='build_detail'),
url(r'^{name}/{number}/download/$'.format(**_REGEX), views.build_download, name='build_download'),
url(r'^{name}/{number}/log/$'.format(**_REGEX), views.build_log, name='build_log'),
url(r'^repository/(?P<file_name>.*)$', views.repository, name='repository'),
]
| <commit_before>from django.conf.urls import url
from manager import views
_PACKAGE_NAME_REGEX = r'(?P<package_name>[a-zA-Z0-9_+-]+)'
_PACKAGE_NAME_REGEX_WITH_EMPTY = r'(?P<package_name>[a-zA-Z0-9_+-]*)'
_BUILD_NUMBER_REGEX = r'(?P<build_number>\d+)'
_REGEX = {'name': _PACKAGE_NAME_REGEX, 'number': _BUILD_NUMBER_REGEX}
urlpatterns = [
url(r'^$', views.package_list, name='package_list'),
url(r'^register/$', views.package_register, name='package_register'),
url(r'^register/{name}$'.format(name=_PACKAGE_NAME_REGEX_WITH_EMPTY), views.package_register_detail,
name='package_register_detail'),
url(r'^{name}/$'.format(**_REGEX), views.package_detail, name='package_detail'),
url(r'^{name}/{number}/$'.format(**_REGEX), views.build_detail, name='build_detail'),
url(r'^{name}/{number}/download/$'.format(**_REGEX), views.build_download, name='build_download'),
url(r'^{name}/{number}/log/$'.format(**_REGEX), views.build_log, name='build_log'),
url(r'^repository/(?P<file_name>.*)$', views.repository, name='repository'),
]
<commit_msg>Allow '.' in package name<commit_after> | from django.conf.urls import url
from manager import views
_PACKAGE_NAME_REGEX = r'(?P<package_name>[a-zA-Z0-9_+-.]+)'
_PACKAGE_NAME_REGEX_WITH_EMPTY = r'(?P<package_name>[a-zA-Z0-9_+-.]*)'
_BUILD_NUMBER_REGEX = r'(?P<build_number>\d+)'
_REGEX = {'name': _PACKAGE_NAME_REGEX, 'number': _BUILD_NUMBER_REGEX}
urlpatterns = [
url(r'^$', views.package_list, name='package_list'),
url(r'^register/$', views.package_register, name='package_register'),
url(r'^register/{name}$'.format(name=_PACKAGE_NAME_REGEX_WITH_EMPTY), views.package_register_detail,
name='package_register_detail'),
url(r'^{name}/$'.format(**_REGEX), views.package_detail, name='package_detail'),
url(r'^{name}/{number}/$'.format(**_REGEX), views.build_detail, name='build_detail'),
url(r'^{name}/{number}/download/$'.format(**_REGEX), views.build_download, name='build_download'),
url(r'^{name}/{number}/log/$'.format(**_REGEX), views.build_log, name='build_log'),
url(r'^repository/(?P<file_name>.*)$', views.repository, name='repository'),
]
| from django.conf.urls import url
from manager import views
_PACKAGE_NAME_REGEX = r'(?P<package_name>[a-zA-Z0-9_+-]+)'
_PACKAGE_NAME_REGEX_WITH_EMPTY = r'(?P<package_name>[a-zA-Z0-9_+-]*)'
_BUILD_NUMBER_REGEX = r'(?P<build_number>\d+)'
_REGEX = {'name': _PACKAGE_NAME_REGEX, 'number': _BUILD_NUMBER_REGEX}
urlpatterns = [
url(r'^$', views.package_list, name='package_list'),
url(r'^register/$', views.package_register, name='package_register'),
url(r'^register/{name}$'.format(name=_PACKAGE_NAME_REGEX_WITH_EMPTY), views.package_register_detail,
name='package_register_detail'),
url(r'^{name}/$'.format(**_REGEX), views.package_detail, name='package_detail'),
url(r'^{name}/{number}/$'.format(**_REGEX), views.build_detail, name='build_detail'),
url(r'^{name}/{number}/download/$'.format(**_REGEX), views.build_download, name='build_download'),
url(r'^{name}/{number}/log/$'.format(**_REGEX), views.build_log, name='build_log'),
url(r'^repository/(?P<file_name>.*)$', views.repository, name='repository'),
]
Allow '.' in package namefrom django.conf.urls import url
from manager import views
_PACKAGE_NAME_REGEX = r'(?P<package_name>[a-zA-Z0-9_+-.]+)'
_PACKAGE_NAME_REGEX_WITH_EMPTY = r'(?P<package_name>[a-zA-Z0-9_+-.]*)'
_BUILD_NUMBER_REGEX = r'(?P<build_number>\d+)'
_REGEX = {'name': _PACKAGE_NAME_REGEX, 'number': _BUILD_NUMBER_REGEX}
urlpatterns = [
url(r'^$', views.package_list, name='package_list'),
url(r'^register/$', views.package_register, name='package_register'),
url(r'^register/{name}$'.format(name=_PACKAGE_NAME_REGEX_WITH_EMPTY), views.package_register_detail,
name='package_register_detail'),
url(r'^{name}/$'.format(**_REGEX), views.package_detail, name='package_detail'),
url(r'^{name}/{number}/$'.format(**_REGEX), views.build_detail, name='build_detail'),
url(r'^{name}/{number}/download/$'.format(**_REGEX), views.build_download, name='build_download'),
url(r'^{name}/{number}/log/$'.format(**_REGEX), views.build_log, name='build_log'),
url(r'^repository/(?P<file_name>.*)$', views.repository, name='repository'),
]
| <commit_before>from django.conf.urls import url
from manager import views
_PACKAGE_NAME_REGEX = r'(?P<package_name>[a-zA-Z0-9_+-]+)'
_PACKAGE_NAME_REGEX_WITH_EMPTY = r'(?P<package_name>[a-zA-Z0-9_+-]*)'
_BUILD_NUMBER_REGEX = r'(?P<build_number>\d+)'
_REGEX = {'name': _PACKAGE_NAME_REGEX, 'number': _BUILD_NUMBER_REGEX}
urlpatterns = [
url(r'^$', views.package_list, name='package_list'),
url(r'^register/$', views.package_register, name='package_register'),
url(r'^register/{name}$'.format(name=_PACKAGE_NAME_REGEX_WITH_EMPTY), views.package_register_detail,
name='package_register_detail'),
url(r'^{name}/$'.format(**_REGEX), views.package_detail, name='package_detail'),
url(r'^{name}/{number}/$'.format(**_REGEX), views.build_detail, name='build_detail'),
url(r'^{name}/{number}/download/$'.format(**_REGEX), views.build_download, name='build_download'),
url(r'^{name}/{number}/log/$'.format(**_REGEX), views.build_log, name='build_log'),
url(r'^repository/(?P<file_name>.*)$', views.repository, name='repository'),
]
<commit_msg>Allow '.' in package name<commit_after>from django.conf.urls import url
from manager import views
_PACKAGE_NAME_REGEX = r'(?P<package_name>[a-zA-Z0-9_+-.]+)'
_PACKAGE_NAME_REGEX_WITH_EMPTY = r'(?P<package_name>[a-zA-Z0-9_+-.]*)'
_BUILD_NUMBER_REGEX = r'(?P<build_number>\d+)'
_REGEX = {'name': _PACKAGE_NAME_REGEX, 'number': _BUILD_NUMBER_REGEX}
urlpatterns = [
url(r'^$', views.package_list, name='package_list'),
url(r'^register/$', views.package_register, name='package_register'),
url(r'^register/{name}$'.format(name=_PACKAGE_NAME_REGEX_WITH_EMPTY), views.package_register_detail,
name='package_register_detail'),
url(r'^{name}/$'.format(**_REGEX), views.package_detail, name='package_detail'),
url(r'^{name}/{number}/$'.format(**_REGEX), views.build_detail, name='build_detail'),
url(r'^{name}/{number}/download/$'.format(**_REGEX), views.build_download, name='build_download'),
url(r'^{name}/{number}/log/$'.format(**_REGEX), views.build_log, name='build_log'),
url(r'^repository/(?P<file_name>.*)$', views.repository, name='repository'),
]
|
88b81ee89800592bccf0a714ae79be74507c8f29 | test_engine.py | test_engine.py | import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
print coord
assert engine._coord_to_a1.get(coord, False) is False
| import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is False
def test_a1_to_coord():
for a1 in VALID_A1:
assert engine._a1_to_coord.get(a1, False) is not False
for a1 in INVALID_A1:
assert engine._a1_to_coord.get(a1, False) is False
| Add test_a1_to_coord() to assert that only valid board coordinates are in teh _a1_to_coord dictionary | Add test_a1_to_coord() to assert that only valid board coordinates are in teh _a1_to_coord dictionary
| Python | mit | EyuelAbebe/gamer,EyuelAbebe/gamer | import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
print coord
assert engine._coord_to_a1.get(coord, False) is False
Add test_a1_to_coord() to assert that only valid board coordinates are in teh _a1_to_coord dictionary | import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is False
def test_a1_to_coord():
for a1 in VALID_A1:
assert engine._a1_to_coord.get(a1, False) is not False
for a1 in INVALID_A1:
assert engine._a1_to_coord.get(a1, False) is False
| <commit_before>import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
print coord
assert engine._coord_to_a1.get(coord, False) is False
<commit_msg>Add test_a1_to_coord() to assert that only valid board coordinates are in teh _a1_to_coord dictionary<commit_after> | import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is False
def test_a1_to_coord():
for a1 in VALID_A1:
assert engine._a1_to_coord.get(a1, False) is not False
for a1 in INVALID_A1:
assert engine._a1_to_coord.get(a1, False) is False
| import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
print coord
assert engine._coord_to_a1.get(coord, False) is False
Add test_a1_to_coord() to assert that only valid board coordinates are in teh _a1_to_coord dictionaryimport engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is False
def test_a1_to_coord():
for a1 in VALID_A1:
assert engine._a1_to_coord.get(a1, False) is not False
for a1 in INVALID_A1:
assert engine._a1_to_coord.get(a1, False) is False
| <commit_before>import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
print coord
assert engine._coord_to_a1.get(coord, False) is False
<commit_msg>Add test_a1_to_coord() to assert that only valid board coordinates are in teh _a1_to_coord dictionary<commit_after>import engine
VALID_COORDS = [(x, y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_COORDS = [
(0, 0), (-1, -1),
(96, 49), (96, 48),
(105, 49), (104, 48),
(96, 56), (97, 57),
(105, 56), (104, 57)
]
VALID_A1 = [chr(x) + chr(y) for x in xrange(97, 105) for y in xrange(49, 57)]
INVALID_A1 = ['a0', 'a9', 'h0', 'h9', 'z1', 'z8']
def test_coord_to_a1():
for coord in VALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is not False
for coord in INVALID_COORDS:
assert engine._coord_to_a1.get(coord, False) is False
def test_a1_to_coord():
for a1 in VALID_A1:
assert engine._a1_to_coord.get(a1, False) is not False
for a1 in INVALID_A1:
assert engine._a1_to_coord.get(a1, False) is False
|
4c3dd0c9d27af0f186f81c4fed0003a9190b4d9e | jal_stats/stats/serializers.py | jal_stats/stats/serializers.py | # from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Activity, Stat
class StatSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Stat
fields = ('id', 'activity', 'reps', 'date')
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
| # from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Activity, Stat
class StatAddSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Stat
fields = ('id', 'reps', 'date')
class StatSerializer(StatAddSerializer):
class Meta:
model = Stat
fields = tuple(list(StatAddSerializer.Meta.fields) + ['activity'])
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
| Add new serializer for StatAdd that doesn't have activity | Add new serializer for StatAdd that doesn't have activity
| Python | mit | jal-stats/django | # from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Activity, Stat
class StatSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Stat
fields = ('id', 'activity', 'reps', 'date')
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
Add new serializer for StatAdd that doesn't have activity | # from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Activity, Stat
class StatAddSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Stat
fields = ('id', 'reps', 'date')
class StatSerializer(StatAddSerializer):
class Meta:
model = Stat
fields = tuple(list(StatAddSerializer.Meta.fields) + ['activity'])
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
| <commit_before># from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Activity, Stat
class StatSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Stat
fields = ('id', 'activity', 'reps', 'date')
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
<commit_msg>Add new serializer for StatAdd that doesn't have activity<commit_after> | # from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Activity, Stat
class StatAddSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Stat
fields = ('id', 'reps', 'date')
class StatSerializer(StatAddSerializer):
class Meta:
model = Stat
fields = tuple(list(StatAddSerializer.Meta.fields) + ['activity'])
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
| # from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Activity, Stat
class StatSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Stat
fields = ('id', 'activity', 'reps', 'date')
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
Add new serializer for StatAdd that doesn't have activity# from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Activity, Stat
class StatAddSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Stat
fields = ('id', 'reps', 'date')
class StatSerializer(StatAddSerializer):
class Meta:
model = Stat
fields = tuple(list(StatAddSerializer.Meta.fields) + ['activity'])
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
| <commit_before># from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Activity, Stat
class StatSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Stat
fields = ('id', 'activity', 'reps', 'date')
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
<commit_msg>Add new serializer for StatAdd that doesn't have activity<commit_after># from django.contrib.auth.models import User
from rest_framework import serializers
from .models import Activity, Stat
class StatAddSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Stat
fields = ('id', 'reps', 'date')
class StatSerializer(StatAddSerializer):
class Meta:
model = Stat
fields = tuple(list(StatAddSerializer.Meta.fields) + ['activity'])
def create(self, validated_data):
validated_data['activity'] = self.context['activity']
stat = Stat.objects.create(**validated_data)
return stat
class ActivitySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = Activity
fields = ('id', 'full_description', 'units', 'url')
class ActivityListSerializer(ActivitySerializer):
stats = StatSerializer(many=True, read_only=True)
class Meta:
model = Activity
fields = tuple(list(ActivitySerializer.Meta.fields) + ['stats'])
# class UserSerializer(serializers.HyperlinkedModelSerializer):
#
# class Meta:
# model = User
# fields = ('id', 'username', 'email', 'activities')
|
fd6702fbb43eb4e6c5129ac6026908946f03c1a7 | paws/handler.py | paws/handler.py | from .request import Request
from .response import response
class Handler(object):
'''
Simple dispatcher class.
'''
def __call__(self, event, context):
self.request = request = Request(event, context)
func = getattr(self, self.event['httpMethod'], self.invalid)
return func(request, *self.event['pathParameters'])
def invalid(self, *args):
return response(status=405)
| from .request import Request
from .response import response
class Handler(object):
'''
Simple dispatcher class.
'''
def __init__(self, event, context):
self.request = Request(event, context)
def __call__(self, event, context):
func = getattr(self, self.event['httpMethod'], self.invalid)
return func(self.request, *self.event['pathParameters'])
def invalid(self, *args):
return response(status=405)
| Move request construction to init | Move request construction to init
| Python | bsd-3-clause | funkybob/paws | from .request import Request
from .response import response
class Handler(object):
'''
Simple dispatcher class.
'''
def __call__(self, event, context):
self.request = request = Request(event, context)
func = getattr(self, self.event['httpMethod'], self.invalid)
return func(request, *self.event['pathParameters'])
def invalid(self, *args):
return response(status=405)
Move request construction to init | from .request import Request
from .response import response
class Handler(object):
'''
Simple dispatcher class.
'''
def __init__(self, event, context):
self.request = Request(event, context)
def __call__(self, event, context):
func = getattr(self, self.event['httpMethod'], self.invalid)
return func(self.request, *self.event['pathParameters'])
def invalid(self, *args):
return response(status=405)
| <commit_before>from .request import Request
from .response import response
class Handler(object):
'''
Simple dispatcher class.
'''
def __call__(self, event, context):
self.request = request = Request(event, context)
func = getattr(self, self.event['httpMethod'], self.invalid)
return func(request, *self.event['pathParameters'])
def invalid(self, *args):
return response(status=405)
<commit_msg>Move request construction to init<commit_after> | from .request import Request
from .response import response
class Handler(object):
'''
Simple dispatcher class.
'''
def __init__(self, event, context):
self.request = Request(event, context)
def __call__(self, event, context):
func = getattr(self, self.event['httpMethod'], self.invalid)
return func(self.request, *self.event['pathParameters'])
def invalid(self, *args):
return response(status=405)
| from .request import Request
from .response import response
class Handler(object):
'''
Simple dispatcher class.
'''
def __call__(self, event, context):
self.request = request = Request(event, context)
func = getattr(self, self.event['httpMethod'], self.invalid)
return func(request, *self.event['pathParameters'])
def invalid(self, *args):
return response(status=405)
Move request construction to initfrom .request import Request
from .response import response
class Handler(object):
'''
Simple dispatcher class.
'''
def __init__(self, event, context):
self.request = Request(event, context)
def __call__(self, event, context):
func = getattr(self, self.event['httpMethod'], self.invalid)
return func(self.request, *self.event['pathParameters'])
def invalid(self, *args):
return response(status=405)
| <commit_before>from .request import Request
from .response import response
class Handler(object):
'''
Simple dispatcher class.
'''
def __call__(self, event, context):
self.request = request = Request(event, context)
func = getattr(self, self.event['httpMethod'], self.invalid)
return func(request, *self.event['pathParameters'])
def invalid(self, *args):
return response(status=405)
<commit_msg>Move request construction to init<commit_after>from .request import Request
from .response import response
class Handler(object):
'''
Simple dispatcher class.
'''
def __init__(self, event, context):
self.request = Request(event, context)
def __call__(self, event, context):
func = getattr(self, self.event['httpMethod'], self.invalid)
return func(self.request, *self.event['pathParameters'])
def invalid(self, *args):
return response(status=405)
|
513709e123c63e1eb30065b6f35c18faceac820b | user.py | user.py | import re
class User:
Groups = {' ':0,'+':1,'☆':1,'%':2,'@':3,'*':3.1,'&':4,'#':5,'~':6}
@staticmethod
def compareRanks(rank1, rank2):
try:
return User.Groups[rank1] >= User.Groups[rank2]
except:
if not rank1 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank1))
if not rank2 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank2))
return False
def __init__(self, name, rank, owner = False):
self.name = name
self.id = re.sub(r'[^a-zA-z0-9]', '', name).lower()
self.rank = rank
self.owner = owner
def hasRank(self, rank):
return self.owner or User.compareRanks(self.rank, rank)
def isOwner(self):
return self.owner | import re
class User:
Groups = {' ':0,'+':1,'☆':1,'%':2,'@':3,'*':3.1,'&':4,'#':5,'~':6}
@staticmethod
def compareRanks(rank1, rank2):
try:
return User.Groups[rank1] >= User.Groups[rank2]
except:
if not rank1 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank1))
if not rank2 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank2))
return False
def __init__(self, name, rank = ' ', owner = False):
self.name = name
self.id = re.sub(r'[^a-zA-z0-9]', '', name).lower()
self.rank = rank
self.owner = owner
def hasRank(self, rank):
return self.owner or User.compareRanks(self.rank, rank)
def isOwner(self):
return self.owner | Add default rank to User for simplified tests | Add default rank to User for simplified tests
| Python | mit | QuiteQuiet/PokemonShowdownBot | import re
class User:
Groups = {' ':0,'+':1,'☆':1,'%':2,'@':3,'*':3.1,'&':4,'#':5,'~':6}
@staticmethod
def compareRanks(rank1, rank2):
try:
return User.Groups[rank1] >= User.Groups[rank2]
except:
if not rank1 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank1))
if not rank2 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank2))
return False
def __init__(self, name, rank, owner = False):
self.name = name
self.id = re.sub(r'[^a-zA-z0-9]', '', name).lower()
self.rank = rank
self.owner = owner
def hasRank(self, rank):
return self.owner or User.compareRanks(self.rank, rank)
def isOwner(self):
return self.ownerAdd default rank to User for simplified tests | import re
class User:
Groups = {' ':0,'+':1,'☆':1,'%':2,'@':3,'*':3.1,'&':4,'#':5,'~':6}
@staticmethod
def compareRanks(rank1, rank2):
try:
return User.Groups[rank1] >= User.Groups[rank2]
except:
if not rank1 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank1))
if not rank2 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank2))
return False
def __init__(self, name, rank = ' ', owner = False):
self.name = name
self.id = re.sub(r'[^a-zA-z0-9]', '', name).lower()
self.rank = rank
self.owner = owner
def hasRank(self, rank):
return self.owner or User.compareRanks(self.rank, rank)
def isOwner(self):
return self.owner | <commit_before>import re
class User:
Groups = {' ':0,'+':1,'☆':1,'%':2,'@':3,'*':3.1,'&':4,'#':5,'~':6}
@staticmethod
def compareRanks(rank1, rank2):
try:
return User.Groups[rank1] >= User.Groups[rank2]
except:
if not rank1 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank1))
if not rank2 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank2))
return False
def __init__(self, name, rank, owner = False):
self.name = name
self.id = re.sub(r'[^a-zA-z0-9]', '', name).lower()
self.rank = rank
self.owner = owner
def hasRank(self, rank):
return self.owner or User.compareRanks(self.rank, rank)
def isOwner(self):
return self.owner<commit_msg>Add default rank to User for simplified tests<commit_after> | import re
class User:
Groups = {' ':0,'+':1,'☆':1,'%':2,'@':3,'*':3.1,'&':4,'#':5,'~':6}
@staticmethod
def compareRanks(rank1, rank2):
try:
return User.Groups[rank1] >= User.Groups[rank2]
except:
if not rank1 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank1))
if not rank2 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank2))
return False
def __init__(self, name, rank = ' ', owner = False):
self.name = name
self.id = re.sub(r'[^a-zA-z0-9]', '', name).lower()
self.rank = rank
self.owner = owner
def hasRank(self, rank):
return self.owner or User.compareRanks(self.rank, rank)
def isOwner(self):
return self.owner | import re
class User:
Groups = {' ':0,'+':1,'☆':1,'%':2,'@':3,'*':3.1,'&':4,'#':5,'~':6}
@staticmethod
def compareRanks(rank1, rank2):
try:
return User.Groups[rank1] >= User.Groups[rank2]
except:
if not rank1 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank1))
if not rank2 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank2))
return False
def __init__(self, name, rank, owner = False):
self.name = name
self.id = re.sub(r'[^a-zA-z0-9]', '', name).lower()
self.rank = rank
self.owner = owner
def hasRank(self, rank):
return self.owner or User.compareRanks(self.rank, rank)
def isOwner(self):
return self.ownerAdd default rank to User for simplified testsimport re
class User:
Groups = {' ':0,'+':1,'☆':1,'%':2,'@':3,'*':3.1,'&':4,'#':5,'~':6}
@staticmethod
def compareRanks(rank1, rank2):
try:
return User.Groups[rank1] >= User.Groups[rank2]
except:
if not rank1 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank1))
if not rank2 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank2))
return False
def __init__(self, name, rank = ' ', owner = False):
self.name = name
self.id = re.sub(r'[^a-zA-z0-9]', '', name).lower()
self.rank = rank
self.owner = owner
def hasRank(self, rank):
return self.owner or User.compareRanks(self.rank, rank)
def isOwner(self):
return self.owner | <commit_before>import re
class User:
Groups = {' ':0,'+':1,'☆':1,'%':2,'@':3,'*':3.1,'&':4,'#':5,'~':6}
@staticmethod
def compareRanks(rank1, rank2):
try:
return User.Groups[rank1] >= User.Groups[rank2]
except:
if not rank1 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank1))
if not rank2 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank2))
return False
def __init__(self, name, rank, owner = False):
self.name = name
self.id = re.sub(r'[^a-zA-z0-9]', '', name).lower()
self.rank = rank
self.owner = owner
def hasRank(self, rank):
return self.owner or User.compareRanks(self.rank, rank)
def isOwner(self):
return self.owner<commit_msg>Add default rank to User for simplified tests<commit_after>import re
class User:
Groups = {' ':0,'+':1,'☆':1,'%':2,'@':3,'*':3.1,'&':4,'#':5,'~':6}
@staticmethod
def compareRanks(rank1, rank2):
try:
return User.Groups[rank1] >= User.Groups[rank2]
except:
if not rank1 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank1))
if not rank2 in User.Groups:
print('{rank} is not a supported usergroup'.format(rank = rank2))
return False
def __init__(self, name, rank = ' ', owner = False):
self.name = name
self.id = re.sub(r'[^a-zA-z0-9]', '', name).lower()
self.rank = rank
self.owner = owner
def hasRank(self, rank):
return self.owner or User.compareRanks(self.rank, rank)
def isOwner(self):
return self.owner |
bdcf90a0fdf782b1c6cfd261e0dbb208e013eb1b | python/day12.py | python/day12.py | #!/usr/local/bin/python3
import json
import pathlib
input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt')
def sum_data(d):
total = 0
if isinstance(d, dict):
d = d.values()
for item in d:
if isinstance(item, int):
total += item
elif isinstance(item, (list, dict)):
total += sum_data(item)
else:
continue # Some other type we’re not interested in
return total
def sum_json(raw_json):
parsed = json.loads(raw_json)
return sum_data(parsed)
def test_simple():
assert sum_json('[1,2,3]') == 6
assert sum_json('{"a":2,"b":4}') == 6
def test_nested():
assert sum_json('[[[3]]]') == 3
assert sum_json('{"a":{"b":4},"c":-1}') == 3
def test_mixed():
assert sum_json('{"a":[-1,1]}') == 0
assert sum_json('[-1,{"a":1}]') == 0
def test_empty():
assert sum_json('[]') == 0
assert sum_json('{}') == 0
if __name__ == '__main__':
with open(input_file) as json_file:
json_data = json_file.read()
print(sum_json(json_data))
| #!/usr/local/bin/python3
import json
import pathlib
input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt')
def sum_data(d):
total = 0
if isinstance(d, dict):
d = d.values()
if 'red' in d: return 0
for item in d:
if isinstance(item, int):
total += item
elif isinstance(item, (list, dict)):
total += sum_data(item)
else:
continue # Some other type we’re not interested in
return total
def sum_json(raw_json):
parsed = json.loads(raw_json)
return sum_data(parsed)
def test_simple():
assert sum_json('[1,2,3]') == 6
assert sum_json('{"a":2,"b":4}') == 6
def test_nested():
assert sum_json('[[[3]]]') == 3
assert sum_json('{"a":{"b":4},"c":-1}') == 3
def test_mixed():
assert sum_json('{"a":[-1,1]}') == 0
assert sum_json('[-1,{"a":1}]') == 0
def test_empty():
assert sum_json('[]') == 0
assert sum_json('{}') == 0
if __name__ == '__main__':
with open(input_file) as json_file:
json_data = json_file.read()
print(sum_json(json_data))
| Add day 12 part two solution in python | Add day 12 part two solution in python
| Python | mit | robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions,robjwells/adventofcode-solutions | #!/usr/local/bin/python3
import json
import pathlib
input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt')
def sum_data(d):
total = 0
if isinstance(d, dict):
d = d.values()
for item in d:
if isinstance(item, int):
total += item
elif isinstance(item, (list, dict)):
total += sum_data(item)
else:
continue # Some other type we’re not interested in
return total
def sum_json(raw_json):
parsed = json.loads(raw_json)
return sum_data(parsed)
def test_simple():
assert sum_json('[1,2,3]') == 6
assert sum_json('{"a":2,"b":4}') == 6
def test_nested():
assert sum_json('[[[3]]]') == 3
assert sum_json('{"a":{"b":4},"c":-1}') == 3
def test_mixed():
assert sum_json('{"a":[-1,1]}') == 0
assert sum_json('[-1,{"a":1}]') == 0
def test_empty():
assert sum_json('[]') == 0
assert sum_json('{}') == 0
if __name__ == '__main__':
with open(input_file) as json_file:
json_data = json_file.read()
print(sum_json(json_data))
Add day 12 part two solution in python | #!/usr/local/bin/python3
import json
import pathlib
input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt')
def sum_data(d):
total = 0
if isinstance(d, dict):
d = d.values()
if 'red' in d: return 0
for item in d:
if isinstance(item, int):
total += item
elif isinstance(item, (list, dict)):
total += sum_data(item)
else:
continue # Some other type we’re not interested in
return total
def sum_json(raw_json):
parsed = json.loads(raw_json)
return sum_data(parsed)
def test_simple():
assert sum_json('[1,2,3]') == 6
assert sum_json('{"a":2,"b":4}') == 6
def test_nested():
assert sum_json('[[[3]]]') == 3
assert sum_json('{"a":{"b":4},"c":-1}') == 3
def test_mixed():
assert sum_json('{"a":[-1,1]}') == 0
assert sum_json('[-1,{"a":1}]') == 0
def test_empty():
assert sum_json('[]') == 0
assert sum_json('{}') == 0
if __name__ == '__main__':
with open(input_file) as json_file:
json_data = json_file.read()
print(sum_json(json_data))
| <commit_before>#!/usr/local/bin/python3
import json
import pathlib
input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt')
def sum_data(d):
total = 0
if isinstance(d, dict):
d = d.values()
for item in d:
if isinstance(item, int):
total += item
elif isinstance(item, (list, dict)):
total += sum_data(item)
else:
continue # Some other type we’re not interested in
return total
def sum_json(raw_json):
parsed = json.loads(raw_json)
return sum_data(parsed)
def test_simple():
assert sum_json('[1,2,3]') == 6
assert sum_json('{"a":2,"b":4}') == 6
def test_nested():
assert sum_json('[[[3]]]') == 3
assert sum_json('{"a":{"b":4},"c":-1}') == 3
def test_mixed():
assert sum_json('{"a":[-1,1]}') == 0
assert sum_json('[-1,{"a":1}]') == 0
def test_empty():
assert sum_json('[]') == 0
assert sum_json('{}') == 0
if __name__ == '__main__':
with open(input_file) as json_file:
json_data = json_file.read()
print(sum_json(json_data))
<commit_msg>Add day 12 part two solution in python<commit_after> | #!/usr/local/bin/python3
import json
import pathlib
input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt')
def sum_data(d):
total = 0
if isinstance(d, dict):
d = d.values()
if 'red' in d: return 0
for item in d:
if isinstance(item, int):
total += item
elif isinstance(item, (list, dict)):
total += sum_data(item)
else:
continue # Some other type we’re not interested in
return total
def sum_json(raw_json):
parsed = json.loads(raw_json)
return sum_data(parsed)
def test_simple():
assert sum_json('[1,2,3]') == 6
assert sum_json('{"a":2,"b":4}') == 6
def test_nested():
assert sum_json('[[[3]]]') == 3
assert sum_json('{"a":{"b":4},"c":-1}') == 3
def test_mixed():
assert sum_json('{"a":[-1,1]}') == 0
assert sum_json('[-1,{"a":1}]') == 0
def test_empty():
assert sum_json('[]') == 0
assert sum_json('{}') == 0
if __name__ == '__main__':
with open(input_file) as json_file:
json_data = json_file.read()
print(sum_json(json_data))
| #!/usr/local/bin/python3
import json
import pathlib
input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt')
def sum_data(d):
total = 0
if isinstance(d, dict):
d = d.values()
for item in d:
if isinstance(item, int):
total += item
elif isinstance(item, (list, dict)):
total += sum_data(item)
else:
continue # Some other type we’re not interested in
return total
def sum_json(raw_json):
parsed = json.loads(raw_json)
return sum_data(parsed)
def test_simple():
assert sum_json('[1,2,3]') == 6
assert sum_json('{"a":2,"b":4}') == 6
def test_nested():
assert sum_json('[[[3]]]') == 3
assert sum_json('{"a":{"b":4},"c":-1}') == 3
def test_mixed():
assert sum_json('{"a":[-1,1]}') == 0
assert sum_json('[-1,{"a":1}]') == 0
def test_empty():
assert sum_json('[]') == 0
assert sum_json('{}') == 0
if __name__ == '__main__':
with open(input_file) as json_file:
json_data = json_file.read()
print(sum_json(json_data))
Add day 12 part two solution in python#!/usr/local/bin/python3
import json
import pathlib
input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt')
def sum_data(d):
total = 0
if isinstance(d, dict):
d = d.values()
if 'red' in d: return 0
for item in d:
if isinstance(item, int):
total += item
elif isinstance(item, (list, dict)):
total += sum_data(item)
else:
continue # Some other type we’re not interested in
return total
def sum_json(raw_json):
parsed = json.loads(raw_json)
return sum_data(parsed)
def test_simple():
assert sum_json('[1,2,3]') == 6
assert sum_json('{"a":2,"b":4}') == 6
def test_nested():
assert sum_json('[[[3]]]') == 3
assert sum_json('{"a":{"b":4},"c":-1}') == 3
def test_mixed():
assert sum_json('{"a":[-1,1]}') == 0
assert sum_json('[-1,{"a":1}]') == 0
def test_empty():
assert sum_json('[]') == 0
assert sum_json('{}') == 0
if __name__ == '__main__':
with open(input_file) as json_file:
json_data = json_file.read()
print(sum_json(json_data))
| <commit_before>#!/usr/local/bin/python3
import json
import pathlib
input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt')
def sum_data(d):
total = 0
if isinstance(d, dict):
d = d.values()
for item in d:
if isinstance(item, int):
total += item
elif isinstance(item, (list, dict)):
total += sum_data(item)
else:
continue # Some other type we’re not interested in
return total
def sum_json(raw_json):
parsed = json.loads(raw_json)
return sum_data(parsed)
def test_simple():
assert sum_json('[1,2,3]') == 6
assert sum_json('{"a":2,"b":4}') == 6
def test_nested():
assert sum_json('[[[3]]]') == 3
assert sum_json('{"a":{"b":4},"c":-1}') == 3
def test_mixed():
assert sum_json('{"a":[-1,1]}') == 0
assert sum_json('[-1,{"a":1}]') == 0
def test_empty():
assert sum_json('[]') == 0
assert sum_json('{}') == 0
if __name__ == '__main__':
with open(input_file) as json_file:
json_data = json_file.read()
print(sum_json(json_data))
<commit_msg>Add day 12 part two solution in python<commit_after>#!/usr/local/bin/python3
import json
import pathlib
input_file = pathlib.Path(__file__).parent.parent.joinpath('day12_input.txt')
def sum_data(d):
total = 0
if isinstance(d, dict):
d = d.values()
if 'red' in d: return 0
for item in d:
if isinstance(item, int):
total += item
elif isinstance(item, (list, dict)):
total += sum_data(item)
else:
continue # Some other type we’re not interested in
return total
def sum_json(raw_json):
parsed = json.loads(raw_json)
return sum_data(parsed)
def test_simple():
assert sum_json('[1,2,3]') == 6
assert sum_json('{"a":2,"b":4}') == 6
def test_nested():
assert sum_json('[[[3]]]') == 3
assert sum_json('{"a":{"b":4},"c":-1}') == 3
def test_mixed():
assert sum_json('{"a":[-1,1]}') == 0
assert sum_json('[-1,{"a":1}]') == 0
def test_empty():
assert sum_json('[]') == 0
assert sum_json('{}') == 0
if __name__ == '__main__':
with open(input_file) as json_file:
json_data = json_file.read()
print(sum_json(json_data))
|
025b8915306670ac45f200ac4cbf32115eab53c7 | python/setup.py | python/setup.py | from setuptools import setup
# This is the cabb_scheduler Python library.
# Jamie Stevens 2017
# ATCA Senior Systems Scientist
# Jamie.Stevens@csiro.au
setup(name='cabb_scheduler',
version='1.1',
description='CABB Scheduling Python Library',
url='https://github.com/ste616/cabb-schedule-api',
author='Jamie Stevens',
author_email='Jamie.Stevens@csiro.au',
license='MIT',
packages=[ 'cabb_scheduler' ],
install_requires=[
'numpy',
'requests'
],
zip_safe=False)
# Changelog:
# 2017-03-14, v1.1: Added the parse routine to the schedule library, to read/write in
# strings instead of just files, and return number of scans read in.
| from setuptools import setup
# This is the cabb_scheduler Python library.
# Jamie Stevens 2017
# ATCA Senior Systems Scientist
# Jamie.Stevens@csiro.au
setup(name='cabb_scheduler',
version='1.2',
description='CABB Scheduling Python Library',
url='https://github.com/ste616/cabb-schedule-api',
author='Jamie Stevens',
author_email='Jamie.Stevens@csiro.au',
license='MIT',
packages=[ 'cabb_scheduler' ],
install_requires=[
'numpy',
'requests'
],
zip_safe=False)
# Changelog:
# 2017-03-14, v1.1: Added the parse routine to the schedule library, to read/write in
# strings instead of just files, and return number of scans read in.
# 2017-03-15, v1.2: Add the ability to query MoniCA for the array name and the
# frequencies currently in use.
| Change the library version number. | Change the library version number.
| Python | mit | ste616/cabb-schedule-api | from setuptools import setup
# This is the cabb_scheduler Python library.
# Jamie Stevens 2017
# ATCA Senior Systems Scientist
# Jamie.Stevens@csiro.au
setup(name='cabb_scheduler',
version='1.1',
description='CABB Scheduling Python Library',
url='https://github.com/ste616/cabb-schedule-api',
author='Jamie Stevens',
author_email='Jamie.Stevens@csiro.au',
license='MIT',
packages=[ 'cabb_scheduler' ],
install_requires=[
'numpy',
'requests'
],
zip_safe=False)
# Changelog:
# 2017-03-14, v1.1: Added the parse routine to the schedule library, to read/write in
# strings instead of just files, and return number of scans read in.
Change the library version number. | from setuptools import setup
# This is the cabb_scheduler Python library.
# Jamie Stevens 2017
# ATCA Senior Systems Scientist
# Jamie.Stevens@csiro.au
setup(name='cabb_scheduler',
version='1.2',
description='CABB Scheduling Python Library',
url='https://github.com/ste616/cabb-schedule-api',
author='Jamie Stevens',
author_email='Jamie.Stevens@csiro.au',
license='MIT',
packages=[ 'cabb_scheduler' ],
install_requires=[
'numpy',
'requests'
],
zip_safe=False)
# Changelog:
# 2017-03-14, v1.1: Added the parse routine to the schedule library, to read/write in
# strings instead of just files, and return number of scans read in.
# 2017-03-15, v1.2: Add the ability to query MoniCA for the array name and the
# frequencies currently in use.
| <commit_before>from setuptools import setup
# This is the cabb_scheduler Python library.
# Jamie Stevens 2017
# ATCA Senior Systems Scientist
# Jamie.Stevens@csiro.au
setup(name='cabb_scheduler',
version='1.1',
description='CABB Scheduling Python Library',
url='https://github.com/ste616/cabb-schedule-api',
author='Jamie Stevens',
author_email='Jamie.Stevens@csiro.au',
license='MIT',
packages=[ 'cabb_scheduler' ],
install_requires=[
'numpy',
'requests'
],
zip_safe=False)
# Changelog:
# 2017-03-14, v1.1: Added the parse routine to the schedule library, to read/write in
# strings instead of just files, and return number of scans read in.
<commit_msg>Change the library version number.<commit_after> | from setuptools import setup
# This is the cabb_scheduler Python library.
# Jamie Stevens 2017
# ATCA Senior Systems Scientist
# Jamie.Stevens@csiro.au
setup(name='cabb_scheduler',
version='1.2',
description='CABB Scheduling Python Library',
url='https://github.com/ste616/cabb-schedule-api',
author='Jamie Stevens',
author_email='Jamie.Stevens@csiro.au',
license='MIT',
packages=[ 'cabb_scheduler' ],
install_requires=[
'numpy',
'requests'
],
zip_safe=False)
# Changelog:
# 2017-03-14, v1.1: Added the parse routine to the schedule library, to read/write in
# strings instead of just files, and return number of scans read in.
# 2017-03-15, v1.2: Add the ability to query MoniCA for the array name and the
# frequencies currently in use.
| from setuptools import setup
# This is the cabb_scheduler Python library.
# Jamie Stevens 2017
# ATCA Senior Systems Scientist
# Jamie.Stevens@csiro.au
setup(name='cabb_scheduler',
version='1.1',
description='CABB Scheduling Python Library',
url='https://github.com/ste616/cabb-schedule-api',
author='Jamie Stevens',
author_email='Jamie.Stevens@csiro.au',
license='MIT',
packages=[ 'cabb_scheduler' ],
install_requires=[
'numpy',
'requests'
],
zip_safe=False)
# Changelog:
# 2017-03-14, v1.1: Added the parse routine to the schedule library, to read/write in
# strings instead of just files, and return number of scans read in.
Change the library version number.from setuptools import setup
# This is the cabb_scheduler Python library.
# Jamie Stevens 2017
# ATCA Senior Systems Scientist
# Jamie.Stevens@csiro.au
setup(name='cabb_scheduler',
version='1.2',
description='CABB Scheduling Python Library',
url='https://github.com/ste616/cabb-schedule-api',
author='Jamie Stevens',
author_email='Jamie.Stevens@csiro.au',
license='MIT',
packages=[ 'cabb_scheduler' ],
install_requires=[
'numpy',
'requests'
],
zip_safe=False)
# Changelog:
# 2017-03-14, v1.1: Added the parse routine to the schedule library, to read/write in
# strings instead of just files, and return number of scans read in.
# 2017-03-15, v1.2: Add the ability to query MoniCA for the array name and the
# frequencies currently in use.
| <commit_before>from setuptools import setup
# This is the cabb_scheduler Python library.
# Jamie Stevens 2017
# ATCA Senior Systems Scientist
# Jamie.Stevens@csiro.au
setup(name='cabb_scheduler',
version='1.1',
description='CABB Scheduling Python Library',
url='https://github.com/ste616/cabb-schedule-api',
author='Jamie Stevens',
author_email='Jamie.Stevens@csiro.au',
license='MIT',
packages=[ 'cabb_scheduler' ],
install_requires=[
'numpy',
'requests'
],
zip_safe=False)
# Changelog:
# 2017-03-14, v1.1: Added the parse routine to the schedule library, to read/write in
# strings instead of just files, and return number of scans read in.
<commit_msg>Change the library version number.<commit_after>from setuptools import setup
# This is the cabb_scheduler Python library.
# Jamie Stevens 2017
# ATCA Senior Systems Scientist
# Jamie.Stevens@csiro.au
setup(name='cabb_scheduler',
version='1.2',
description='CABB Scheduling Python Library',
url='https://github.com/ste616/cabb-schedule-api',
author='Jamie Stevens',
author_email='Jamie.Stevens@csiro.au',
license='MIT',
packages=[ 'cabb_scheduler' ],
install_requires=[
'numpy',
'requests'
],
zip_safe=False)
# Changelog:
# 2017-03-14, v1.1: Added the parse routine to the schedule library, to read/write in
# strings instead of just files, and return number of scans read in.
# 2017-03-15, v1.2: Add the ability to query MoniCA for the array name and the
# frequencies currently in use.
|
3436b94a4c69b843c65f6ddf6756c18ca540c090 | linked-list/is-list-palindrome.py | linked-list/is-list-palindrome.py | # Given a singly linked list of integers, determine whether or not it's a palindrome
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
def is_list_palindrome(l):
if not l.value or not l.next.value:
return True
def create_nodes(l):
root = Node(-1)
current_node = root
for value in l:
current_node.next = Node(value)
current_node = current_node.next
return root.next | # Given a singly linked list of integers, determine whether or not it's a palindrome
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
def is_list_palindrome(l):
if not l.value or not l.next.value:
return True
fake_head = Node(None)
fake_head.next = l
fast_node = fake_head
slow_node = fake_head
while fast_node.next and fast_node.next.next:
fast_node = fast_node.next.next
slow_node = slow_node.next
def create_nodes(l):
root = Node(-1)
current_node = root
for value in l:
current_node.next = Node(value)
current_node = current_node.next
return root.next
is_list_palindrome(create_nodes([1, 2, 3, 4]))
| Create fast node that is twice the speed of slow node to get to center of list | Create fast node that is twice the speed of slow node to get to center of list
| Python | mit | derekmpham/interview-prep,derekmpham/interview-prep | # Given a singly linked list of integers, determine whether or not it's a palindrome
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
def is_list_palindrome(l):
if not l.value or not l.next.value:
return True
def create_nodes(l):
root = Node(-1)
current_node = root
for value in l:
current_node.next = Node(value)
current_node = current_node.next
return root.nextCreate fast node that is twice the speed of slow node to get to center of list | # Given a singly linked list of integers, determine whether or not it's a palindrome
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
def is_list_palindrome(l):
if not l.value or not l.next.value:
return True
fake_head = Node(None)
fake_head.next = l
fast_node = fake_head
slow_node = fake_head
while fast_node.next and fast_node.next.next:
fast_node = fast_node.next.next
slow_node = slow_node.next
def create_nodes(l):
root = Node(-1)
current_node = root
for value in l:
current_node.next = Node(value)
current_node = current_node.next
return root.next
is_list_palindrome(create_nodes([1, 2, 3, 4]))
| <commit_before># Given a singly linked list of integers, determine whether or not it's a palindrome
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
def is_list_palindrome(l):
if not l.value or not l.next.value:
return True
def create_nodes(l):
root = Node(-1)
current_node = root
for value in l:
current_node.next = Node(value)
current_node = current_node.next
return root.next<commit_msg>Create fast node that is twice the speed of slow node to get to center of list<commit_after> | # Given a singly linked list of integers, determine whether or not it's a palindrome
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
def is_list_palindrome(l):
if not l.value or not l.next.value:
return True
fake_head = Node(None)
fake_head.next = l
fast_node = fake_head
slow_node = fake_head
while fast_node.next and fast_node.next.next:
fast_node = fast_node.next.next
slow_node = slow_node.next
def create_nodes(l):
root = Node(-1)
current_node = root
for value in l:
current_node.next = Node(value)
current_node = current_node.next
return root.next
is_list_palindrome(create_nodes([1, 2, 3, 4]))
| # Given a singly linked list of integers, determine whether or not it's a palindrome
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
def is_list_palindrome(l):
if not l.value or not l.next.value:
return True
def create_nodes(l):
root = Node(-1)
current_node = root
for value in l:
current_node.next = Node(value)
current_node = current_node.next
return root.nextCreate fast node that is twice the speed of slow node to get to center of list# Given a singly linked list of integers, determine whether or not it's a palindrome
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
def is_list_palindrome(l):
if not l.value or not l.next.value:
return True
fake_head = Node(None)
fake_head.next = l
fast_node = fake_head
slow_node = fake_head
while fast_node.next and fast_node.next.next:
fast_node = fast_node.next.next
slow_node = slow_node.next
def create_nodes(l):
root = Node(-1)
current_node = root
for value in l:
current_node.next = Node(value)
current_node = current_node.next
return root.next
is_list_palindrome(create_nodes([1, 2, 3, 4]))
| <commit_before># Given a singly linked list of integers, determine whether or not it's a palindrome
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
def is_list_palindrome(l):
if not l.value or not l.next.value:
return True
def create_nodes(l):
root = Node(-1)
current_node = root
for value in l:
current_node.next = Node(value)
current_node = current_node.next
return root.next<commit_msg>Create fast node that is twice the speed of slow node to get to center of list<commit_after># Given a singly linked list of integers, determine whether or not it's a palindrome
class Node(object): # define constructor
def __init__(self, value):
self.value = value
self.next = None
def is_list_palindrome(l):
if not l.value or not l.next.value:
return True
fake_head = Node(None)
fake_head.next = l
fast_node = fake_head
slow_node = fake_head
while fast_node.next and fast_node.next.next:
fast_node = fast_node.next.next
slow_node = slow_node.next
def create_nodes(l):
root = Node(-1)
current_node = root
for value in l:
current_node.next = Node(value)
current_node = current_node.next
return root.next
is_list_palindrome(create_nodes([1, 2, 3, 4]))
|
a1a9ab2b2c0ca7749984f5ad6e6430980e0d0ecc | tests/basics/list_sort.py | tests/basics/list_sort.py | l = [1, 3, 2, 5]
print(l)
l.sort()
print(l)
l.sort(key=lambda x: -x)
print(l)
l.sort(key=lambda x: -x, reverse=True)
print(l)
l.sort(reverse=True)
print(l)
l.sort(reverse=False)
print(l)
| l = [1, 3, 2, 5]
print(l)
print(sorted(l))
l.sort()
print(l)
print(l == sorted(l))
print(sorted(l, key=lambda x: -x))
l.sort(key=lambda x: -x)
print(l)
print(l == sorted(l, key=lambda x: -x))
print(sorted(l, key=lambda x: -x, reverse=True))
l.sort(key=lambda x: -x, reverse=True)
print(l)
print(l == sorted(l, key=lambda x: -x, reverse=True))
print(sorted(l, reverse=True))
l.sort(reverse=True)
print(l)
print(l == sorted(l, reverse=True))
print(sorted(l, reverse=False))
l.sort(reverse=False)
print(l)
print(l == sorted(l, reverse=False))
| Add tests for sorted() function and check that sorted(list) produces same output as list.sort() | Add tests for sorted() function
and check that sorted(list) produces same output as list.sort()
| Python | mit | MrSurly/micropython-esp32,KISSMonX/micropython,mpalomer/micropython,mianos/micropython,ernesto-g/micropython,dxxb/micropython,tuc-osg/micropython,lbattraw/micropython,turbinenreiter/micropython,swegener/micropython,swegener/micropython,praemdonck/micropython,SungEun-Steve-Kim/test-mp,pfalcon/micropython,pozetroninc/micropython,mgyenik/micropython,vriera/micropython,noahwilliamsson/micropython,aethaniel/micropython,orionrobots/micropython,rubencabrera/micropython,vitiral/micropython,torwag/micropython,omtinez/micropython,drrk/micropython,lbattraw/micropython,cnoviello/micropython,galenhz/micropython,TDAbboud/micropython,jimkmc/micropython,ganshun666/micropython,mhoffma/micropython,adafruit/circuitpython,drrk/micropython,oopy/micropython,rubencabrera/micropython,tralamazza/micropython,MrSurly/micropython-esp32,alex-robbins/micropython,cwyark/micropython,slzatz/micropython,turbinenreiter/micropython,pozetroninc/micropython,HenrikSolver/micropython,ganshun666/micropython,pramasoul/micropython,aitjcize/micropython,noahwilliamsson/micropython,matthewelse/micropython,hosaka/micropython,martinribelotta/micropython,PappaPeppar/micropython,redbear/micropython,slzatz/micropython,AriZuu/micropython,alex-robbins/micropython,MrSurly/micropython-esp32,xuxiaoxin/micropython,misterdanb/micropython,vitiral/micropython,lbattraw/micropython,emfcamp/micropython,mianos/micropython,skybird6672/micropython,turbinenreiter/micropython,hiway/micropython,ryannathans/micropython,trezor/micropython,heisewangluo/micropython,xhat/micropython,ChuckM/micropython,tuc-osg/micropython,danicampora/micropython,methoxid/micropystat,toolmacher/micropython,adafruit/circuitpython,pramasoul/micropython,tdautc19841202/micropython,martinribelotta/micropython,Peetz0r/micropython-esp32,warner83/micropython,adafruit/micropython,tobbad/micropython,TDAbboud/micropython,adamkh/micropython,Peetz0r/micropython-esp32,pfalcon/micropython,ruffy91/micropython,alex-march/micropython,adafruit/micropython,orionrobots/micropython,mgyenik/micropython,martinribelotta/micropython,tdautc19841202/micropython,mianos/micropython,henriknelson/micropython,orionrobots/micropython,tdautc19841202/micropython,ryannathans/micropython,neilh10/micropython,lowRISC/micropython,tobbad/micropython,lbattraw/micropython,ernesto-g/micropython,ChuckM/micropython,adamkh/micropython,dxxb/micropython,tdautc19841202/micropython,cloudformdesign/micropython,ernesto-g/micropython,alex-march/micropython,HenrikSolver/micropython,redbear/micropython,mpalomer/micropython,xuxiaoxin/micropython,SHA2017-badge/micropython-esp32,adamkh/micropython,jlillest/micropython,suda/micropython,trezor/micropython,TDAbboud/micropython,Vogtinator/micropython,jimkmc/micropython,chrisdearman/micropython,lbattraw/micropython,aethaniel/micropython,redbear/micropython,pozetroninc/micropython,jmarcelino/pycom-micropython,stonegithubs/micropython,xuxiaoxin/micropython,Vogtinator/micropython,utopiaprince/micropython,turbinenreiter/micropython,blazewicz/micropython,orionrobots/micropython,dxxb/micropython,skybird6672/micropython,skybird6672/micropython,ceramos/micropython,supergis/micropython,xyb/micropython,paul-xxx/micropython,tdautc19841202/micropython,adafruit/micropython,selste/micropython,neilh10/micropython,danicampora/micropython,AriZuu/micropython,utopiaprince/micropython,misterdanb/micropython,oopy/micropython,galenhz/micropython,supergis/micropython,aethaniel/micropython,noahwilliamsson/micropython,mpalomer/micropython,pozetroninc/micropython,dmazzella/micropython,neilh10/micropython,ruffy91/micropython,ernesto-g/micropython,cnoviello/micropython,vriera/micropython,PappaPeppar/micropython,matthewelse/micropython,emfcamp/micropython,neilh10/micropython,mgyenik/micropython,firstval/micropython,jimkmc/micropython,utopiaprince/micropython,hiway/micropython,pramasoul/micropython,skybird6672/micropython,stonegithubs/micropython,tralamazza/micropython,ryannathans/micropython,blmorris/micropython,adamkh/micropython,tralamazza/micropython,dinau/micropython,jmarcelino/pycom-micropython,ceramos/micropython,torwag/micropython,dmazzella/micropython,ericsnowcurrently/micropython,infinnovation/micropython,ahotam/micropython,adamkh/micropython,supergis/micropython,noahwilliamsson/micropython,tobbad/micropython,orionrobots/micropython,SungEun-Steve-Kim/test-mp,paul-xxx/micropython,paul-xxx/micropython,ruffy91/micropython,emfcamp/micropython,dmazzella/micropython,kostyll/micropython,mpalomer/micropython,oopy/micropython,selste/micropython,hiway/micropython,trezor/micropython,pfalcon/micropython,deshipu/micropython,SHA2017-badge/micropython-esp32,alex-robbins/micropython,dinau/micropython,omtinez/micropython,tuc-osg/micropython,warner83/micropython,swegener/micropython,chrisdearman/micropython,mgyenik/micropython,rubencabrera/micropython,martinribelotta/micropython,toolmacher/micropython,mpalomer/micropython,hosaka/micropython,Timmenem/micropython,oopy/micropython,trezor/micropython,pramasoul/micropython,matthewelse/micropython,tuc-osg/micropython,dhylands/micropython,henriknelson/micropython,tuc-osg/micropython,alex-robbins/micropython,omtinez/micropython,KISSMonX/micropython,cnoviello/micropython,paul-xxx/micropython,drrk/micropython,toolmacher/micropython,hosaka/micropython,vitiral/micropython,vriera/micropython,ruffy91/micropython,dxxb/micropython,tralamazza/micropython,micropython/micropython-esp32,MrSurly/micropython-esp32,hiway/micropython,Timmenem/micropython,ganshun666/micropython,feilongfl/micropython,stonegithubs/micropython,xyb/micropython,adafruit/circuitpython,dinau/micropython,turbinenreiter/micropython,noahchense/micropython,infinnovation/micropython,EcmaXp/micropython,supergis/micropython,kostyll/micropython,jimkmc/micropython,supergis/micropython,aitjcize/micropython,vitiral/micropython,ericsnowcurrently/micropython,galenhz/micropython,hosaka/micropython,jimkmc/micropython,cwyark/micropython,suda/micropython,praemdonck/micropython,lowRISC/micropython,xyb/micropython,micropython/micropython-esp32,omtinez/micropython,heisewangluo/micropython,HenrikSolver/micropython,aethaniel/micropython,ahotam/micropython,puuu/micropython,suda/micropython,ernesto-g/micropython,misterdanb/micropython,cnoviello/micropython,cloudformdesign/micropython,noahchense/micropython,HenrikSolver/micropython,firstval/micropython,chrisdearman/micropython,blazewicz/micropython,cnoviello/micropython,suda/micropython,AriZuu/micropython,SungEun-Steve-Kim/test-mp,AriZuu/micropython,puuu/micropython,deshipu/micropython,xhat/micropython,HenrikSolver/micropython,kerneltask/micropython,pramasoul/micropython,galenhz/micropython,MrSurly/micropython,stonegithubs/micropython,jmarcelino/pycom-micropython,SHA2017-badge/micropython-esp32,ChuckM/micropython,Vogtinator/micropython,warner83/micropython,KISSMonX/micropython,mianos/micropython,xyb/micropython,firstval/micropython,martinribelotta/micropython,cwyark/micropython,mhoffma/micropython,MrSurly/micropython,jlillest/micropython,vitiral/micropython,deshipu/micropython,pozetroninc/micropython,heisewangluo/micropython,firstval/micropython,pfalcon/micropython,lowRISC/micropython,TDAbboud/micropython,KISSMonX/micropython,jlillest/micropython,EcmaXp/micropython,adafruit/circuitpython,kerneltask/micropython,puuu/micropython,omtinez/micropython,praemdonck/micropython,Vogtinator/micropython,puuu/micropython,warner83/micropython,aethaniel/micropython,oopy/micropython,mhoffma/micropython,methoxid/micropystat,puuu/micropython,redbear/micropython,rubencabrera/micropython,hiway/micropython,dhylands/micropython,bvernoux/micropython,bvernoux/micropython,emfcamp/micropython,feilongfl/micropython,EcmaXp/micropython,xhat/micropython,PappaPeppar/micropython,SHA2017-badge/micropython-esp32,noahchense/micropython,noahchense/micropython,swegener/micropython,mhoffma/micropython,cwyark/micropython,ryannathans/micropython,aitjcize/micropython,jmarcelino/pycom-micropython,methoxid/micropystat,xuxiaoxin/micropython,ceramos/micropython,kostyll/micropython,neilh10/micropython,ericsnowcurrently/micropython,infinnovation/micropython,adafruit/circuitpython,dhylands/micropython,mianos/micropython,EcmaXp/micropython,xyb/micropython,deshipu/micropython,trezor/micropython,TDAbboud/micropython,praemdonck/micropython,mhoffma/micropython,ChuckM/micropython,SHA2017-badge/micropython-esp32,aitjcize/micropython,bvernoux/micropython,kerneltask/micropython,KISSMonX/micropython,selste/micropython,utopiaprince/micropython,bvernoux/micropython,alex-march/micropython,alex-robbins/micropython,kostyll/micropython,blmorris/micropython,ganshun666/micropython,paul-xxx/micropython,kerneltask/micropython,hosaka/micropython,kerneltask/micropython,ryannathans/micropython,ceramos/micropython,vriera/micropython,adafruit/circuitpython,slzatz/micropython,xhat/micropython,swegener/micropython,toolmacher/micropython,torwag/micropython,xhat/micropython,danicampora/micropython,suda/micropython,lowRISC/micropython,heisewangluo/micropython,rubencabrera/micropython,torwag/micropython,SungEun-Steve-Kim/test-mp,cwyark/micropython,lowRISC/micropython,dinau/micropython,micropython/micropython-esp32,dhylands/micropython,PappaPeppar/micropython,danicampora/micropython,noahwilliamsson/micropython,Peetz0r/micropython-esp32,infinnovation/micropython,ganshun666/micropython,jlillest/micropython,ceramos/micropython,henriknelson/micropython,firstval/micropython,cloudformdesign/micropython,MrSurly/micropython-esp32,drrk/micropython,Timmenem/micropython,blazewicz/micropython,selste/micropython,matthewelse/micropython,Peetz0r/micropython-esp32,torwag/micropython,galenhz/micropython,alex-march/micropython,mgyenik/micropython,skybird6672/micropython,noahchense/micropython,misterdanb/micropython,feilongfl/micropython,heisewangluo/micropython,misterdanb/micropython,cloudformdesign/micropython,ahotam/micropython,ahotam/micropython,Timmenem/micropython,vriera/micropython,cloudformdesign/micropython,emfcamp/micropython,matthewelse/micropython,blmorris/micropython,slzatz/micropython,dxxb/micropython,feilongfl/micropython,feilongfl/micropython,MrSurly/micropython,blazewicz/micropython,toolmacher/micropython,Timmenem/micropython,infinnovation/micropython,stonegithubs/micropython,dhylands/micropython,kostyll/micropython,Peetz0r/micropython-esp32,blmorris/micropython,ericsnowcurrently/micropython,praemdonck/micropython,blazewicz/micropython,jmarcelino/pycom-micropython,ahotam/micropython,ruffy91/micropython,drrk/micropython,chrisdearman/micropython,Vogtinator/micropython,alex-march/micropython,methoxid/micropystat,pfalcon/micropython,jlillest/micropython,micropython/micropython-esp32,MrSurly/micropython,adafruit/micropython,warner83/micropython,EcmaXp/micropython,MrSurly/micropython,slzatz/micropython,AriZuu/micropython,blmorris/micropython,bvernoux/micropython,danicampora/micropython,henriknelson/micropython,SungEun-Steve-Kim/test-mp,ericsnowcurrently/micropython,matthewelse/micropython,ChuckM/micropython,PappaPeppar/micropython,adafruit/micropython,dinau/micropython,selste/micropython,henriknelson/micropython,tobbad/micropython,xuxiaoxin/micropython,dmazzella/micropython,utopiaprince/micropython,chrisdearman/micropython,redbear/micropython,deshipu/micropython,methoxid/micropystat,micropython/micropython-esp32,tobbad/micropython | l = [1, 3, 2, 5]
print(l)
l.sort()
print(l)
l.sort(key=lambda x: -x)
print(l)
l.sort(key=lambda x: -x, reverse=True)
print(l)
l.sort(reverse=True)
print(l)
l.sort(reverse=False)
print(l)
Add tests for sorted() function
and check that sorted(list) produces same output as list.sort() | l = [1, 3, 2, 5]
print(l)
print(sorted(l))
l.sort()
print(l)
print(l == sorted(l))
print(sorted(l, key=lambda x: -x))
l.sort(key=lambda x: -x)
print(l)
print(l == sorted(l, key=lambda x: -x))
print(sorted(l, key=lambda x: -x, reverse=True))
l.sort(key=lambda x: -x, reverse=True)
print(l)
print(l == sorted(l, key=lambda x: -x, reverse=True))
print(sorted(l, reverse=True))
l.sort(reverse=True)
print(l)
print(l == sorted(l, reverse=True))
print(sorted(l, reverse=False))
l.sort(reverse=False)
print(l)
print(l == sorted(l, reverse=False))
| <commit_before>l = [1, 3, 2, 5]
print(l)
l.sort()
print(l)
l.sort(key=lambda x: -x)
print(l)
l.sort(key=lambda x: -x, reverse=True)
print(l)
l.sort(reverse=True)
print(l)
l.sort(reverse=False)
print(l)
<commit_msg>Add tests for sorted() function
and check that sorted(list) produces same output as list.sort()<commit_after> | l = [1, 3, 2, 5]
print(l)
print(sorted(l))
l.sort()
print(l)
print(l == sorted(l))
print(sorted(l, key=lambda x: -x))
l.sort(key=lambda x: -x)
print(l)
print(l == sorted(l, key=lambda x: -x))
print(sorted(l, key=lambda x: -x, reverse=True))
l.sort(key=lambda x: -x, reverse=True)
print(l)
print(l == sorted(l, key=lambda x: -x, reverse=True))
print(sorted(l, reverse=True))
l.sort(reverse=True)
print(l)
print(l == sorted(l, reverse=True))
print(sorted(l, reverse=False))
l.sort(reverse=False)
print(l)
print(l == sorted(l, reverse=False))
| l = [1, 3, 2, 5]
print(l)
l.sort()
print(l)
l.sort(key=lambda x: -x)
print(l)
l.sort(key=lambda x: -x, reverse=True)
print(l)
l.sort(reverse=True)
print(l)
l.sort(reverse=False)
print(l)
Add tests for sorted() function
and check that sorted(list) produces same output as list.sort()l = [1, 3, 2, 5]
print(l)
print(sorted(l))
l.sort()
print(l)
print(l == sorted(l))
print(sorted(l, key=lambda x: -x))
l.sort(key=lambda x: -x)
print(l)
print(l == sorted(l, key=lambda x: -x))
print(sorted(l, key=lambda x: -x, reverse=True))
l.sort(key=lambda x: -x, reverse=True)
print(l)
print(l == sorted(l, key=lambda x: -x, reverse=True))
print(sorted(l, reverse=True))
l.sort(reverse=True)
print(l)
print(l == sorted(l, reverse=True))
print(sorted(l, reverse=False))
l.sort(reverse=False)
print(l)
print(l == sorted(l, reverse=False))
| <commit_before>l = [1, 3, 2, 5]
print(l)
l.sort()
print(l)
l.sort(key=lambda x: -x)
print(l)
l.sort(key=lambda x: -x, reverse=True)
print(l)
l.sort(reverse=True)
print(l)
l.sort(reverse=False)
print(l)
<commit_msg>Add tests for sorted() function
and check that sorted(list) produces same output as list.sort()<commit_after>l = [1, 3, 2, 5]
print(l)
print(sorted(l))
l.sort()
print(l)
print(l == sorted(l))
print(sorted(l, key=lambda x: -x))
l.sort(key=lambda x: -x)
print(l)
print(l == sorted(l, key=lambda x: -x))
print(sorted(l, key=lambda x: -x, reverse=True))
l.sort(key=lambda x: -x, reverse=True)
print(l)
print(l == sorted(l, key=lambda x: -x, reverse=True))
print(sorted(l, reverse=True))
l.sort(reverse=True)
print(l)
print(l == sorted(l, reverse=True))
print(sorted(l, reverse=False))
l.sort(reverse=False)
print(l)
print(l == sorted(l, reverse=False))
|
2d688f97b9869fdfed9237b91fdce287278e3c6c | wsgi.py | wsgi.py | import os
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
def application(environ, start_response):
index = get_index(environ)
transport.send(environ['wsgi.input'].read(), index)
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
| import os
from queue import Queue
from threading import Thread
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
blocking_queue = Queue()
def send():
while True:
data, index = blocking_queue.get()
transport.send(data, index)
blocking_queue.task_done()
sender = Thread(target=send)
sender.daemon = True
sender.start()
def application(environ, start_response):
index = get_index(environ)
length = int(environ.get('CONTENT_LENGTH', '0'))
data = environ['wsgi.input'].read(length)
blocking_queue.put((data, index))
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
| Send data to elasticsearch asynchronously. | Send data to elasticsearch asynchronously.
| Python | mit | socialwifi/elasticsearch-raven,pozytywnie/elasticsearch-raven,serathius/elasticsearch-raven | import os
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
def application(environ, start_response):
index = get_index(environ)
transport.send(environ['wsgi.input'].read(), index)
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
Send data to elasticsearch asynchronously. | import os
from queue import Queue
from threading import Thread
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
blocking_queue = Queue()
def send():
while True:
data, index = blocking_queue.get()
transport.send(data, index)
blocking_queue.task_done()
sender = Thread(target=send)
sender.daemon = True
sender.start()
def application(environ, start_response):
index = get_index(environ)
length = int(environ.get('CONTENT_LENGTH', '0'))
data = environ['wsgi.input'].read(length)
blocking_queue.put((data, index))
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
| <commit_before>import os
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
def application(environ, start_response):
index = get_index(environ)
transport.send(environ['wsgi.input'].read(), index)
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
<commit_msg>Send data to elasticsearch asynchronously.<commit_after> | import os
from queue import Queue
from threading import Thread
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
blocking_queue = Queue()
def send():
while True:
data, index = blocking_queue.get()
transport.send(data, index)
blocking_queue.task_done()
sender = Thread(target=send)
sender.daemon = True
sender.start()
def application(environ, start_response):
index = get_index(environ)
length = int(environ.get('CONTENT_LENGTH', '0'))
data = environ['wsgi.input'].read(length)
blocking_queue.put((data, index))
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
| import os
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
def application(environ, start_response):
index = get_index(environ)
transport.send(environ['wsgi.input'].read(), index)
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
Send data to elasticsearch asynchronously.import os
from queue import Queue
from threading import Thread
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
blocking_queue = Queue()
def send():
while True:
data, index = blocking_queue.get()
transport.send(data, index)
blocking_queue.task_done()
sender = Thread(target=send)
sender.daemon = True
sender.start()
def application(environ, start_response):
index = get_index(environ)
length = int(environ.get('CONTENT_LENGTH', '0'))
data = environ['wsgi.input'].read(length)
blocking_queue.put((data, index))
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
| <commit_before>import os
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
def application(environ, start_response):
index = get_index(environ)
transport.send(environ['wsgi.input'].read(), index)
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
<commit_msg>Send data to elasticsearch asynchronously.<commit_after>import os
from queue import Queue
from threading import Thread
from elasticsearch_raven.transport import ElasticsearchTransport
from elasticsearch_raven.utils import get_index
host = os.environ.get('ELASTICSEARCH_HOST', 'localhost:9200')
transport = ElasticsearchTransport(host)
blocking_queue = Queue()
def send():
while True:
data, index = blocking_queue.get()
transport.send(data, index)
blocking_queue.task_done()
sender = Thread(target=send)
sender.daemon = True
sender.start()
def application(environ, start_response):
index = get_index(environ)
length = int(environ.get('CONTENT_LENGTH', '0'))
data = environ['wsgi.input'].read(length)
blocking_queue.put((data, index))
status = '200 OK'
response_headers = [('Content-Type', 'text/plain')]
start_response(status, response_headers)
return [''.encode('utf-8')]
|
d703d7cb8d75a5c660beabccdd0082794a8471d1 | edisgo/tools/networkx_helper.py | edisgo/tools/networkx_helper.py | from networkx import OrderedGraph
def translate_df_to_graph(buses_df, lines_df, transformers_df=None):
graph = OrderedGraph()
buses = buses_df.index
# add nodes
graph.add_nodes_from(buses)
# add branches
branches = []
for line_name, line in lines_df.iterrows():
branches.append(
(
line.bus0,
line.bus1,
{"branch_name": line_name, "length": line.length},
)
)
if transformers_df is not None:
for trafo_name, trafo in transformers_df.iterrows():
branches.append(
(
trafo.bus0,
trafo.bus1,
{"branch_name": trafo_name, "length": 0},
)
)
graph.add_edges_from(branches)
return graph
| from networkx import Graph
def translate_df_to_graph(buses_df, lines_df, transformers_df=None):
graph = Graph()
buses = []
for bus_name, bus in buses_df.iterrows():
pos = (bus.x, bus.y)
buses.append((bus_name, {'pos': pos}))
# add nodes
graph.add_nodes_from(buses)
# add branches
branches = []
for line_name, line in lines_df.iterrows():
branches.append(
(
line.bus0,
line.bus1,
{"branch_name": line_name, "length": line.length},
)
)
if transformers_df is not None:
for trafo_name, trafo in transformers_df.iterrows():
branches.append(
(
trafo.bus0,
trafo.bus1,
{"branch_name": trafo_name, "length": 0},
)
)
graph.add_edges_from(branches)
return graph
| Include the position into the graph | Include the position into the graph
| Python | agpl-3.0 | openego/eDisGo,openego/eDisGo | from networkx import OrderedGraph
def translate_df_to_graph(buses_df, lines_df, transformers_df=None):
graph = OrderedGraph()
buses = buses_df.index
# add nodes
graph.add_nodes_from(buses)
# add branches
branches = []
for line_name, line in lines_df.iterrows():
branches.append(
(
line.bus0,
line.bus1,
{"branch_name": line_name, "length": line.length},
)
)
if transformers_df is not None:
for trafo_name, trafo in transformers_df.iterrows():
branches.append(
(
trafo.bus0,
trafo.bus1,
{"branch_name": trafo_name, "length": 0},
)
)
graph.add_edges_from(branches)
return graph
Include the position into the graph | from networkx import Graph
def translate_df_to_graph(buses_df, lines_df, transformers_df=None):
graph = Graph()
buses = []
for bus_name, bus in buses_df.iterrows():
pos = (bus.x, bus.y)
buses.append((bus_name, {'pos': pos}))
# add nodes
graph.add_nodes_from(buses)
# add branches
branches = []
for line_name, line in lines_df.iterrows():
branches.append(
(
line.bus0,
line.bus1,
{"branch_name": line_name, "length": line.length},
)
)
if transformers_df is not None:
for trafo_name, trafo in transformers_df.iterrows():
branches.append(
(
trafo.bus0,
trafo.bus1,
{"branch_name": trafo_name, "length": 0},
)
)
graph.add_edges_from(branches)
return graph
| <commit_before>from networkx import OrderedGraph
def translate_df_to_graph(buses_df, lines_df, transformers_df=None):
graph = OrderedGraph()
buses = buses_df.index
# add nodes
graph.add_nodes_from(buses)
# add branches
branches = []
for line_name, line in lines_df.iterrows():
branches.append(
(
line.bus0,
line.bus1,
{"branch_name": line_name, "length": line.length},
)
)
if transformers_df is not None:
for trafo_name, trafo in transformers_df.iterrows():
branches.append(
(
trafo.bus0,
trafo.bus1,
{"branch_name": trafo_name, "length": 0},
)
)
graph.add_edges_from(branches)
return graph
<commit_msg>Include the position into the graph<commit_after> | from networkx import Graph
def translate_df_to_graph(buses_df, lines_df, transformers_df=None):
graph = Graph()
buses = []
for bus_name, bus in buses_df.iterrows():
pos = (bus.x, bus.y)
buses.append((bus_name, {'pos': pos}))
# add nodes
graph.add_nodes_from(buses)
# add branches
branches = []
for line_name, line in lines_df.iterrows():
branches.append(
(
line.bus0,
line.bus1,
{"branch_name": line_name, "length": line.length},
)
)
if transformers_df is not None:
for trafo_name, trafo in transformers_df.iterrows():
branches.append(
(
trafo.bus0,
trafo.bus1,
{"branch_name": trafo_name, "length": 0},
)
)
graph.add_edges_from(branches)
return graph
| from networkx import OrderedGraph
def translate_df_to_graph(buses_df, lines_df, transformers_df=None):
graph = OrderedGraph()
buses = buses_df.index
# add nodes
graph.add_nodes_from(buses)
# add branches
branches = []
for line_name, line in lines_df.iterrows():
branches.append(
(
line.bus0,
line.bus1,
{"branch_name": line_name, "length": line.length},
)
)
if transformers_df is not None:
for trafo_name, trafo in transformers_df.iterrows():
branches.append(
(
trafo.bus0,
trafo.bus1,
{"branch_name": trafo_name, "length": 0},
)
)
graph.add_edges_from(branches)
return graph
Include the position into the graphfrom networkx import Graph
def translate_df_to_graph(buses_df, lines_df, transformers_df=None):
graph = Graph()
buses = []
for bus_name, bus in buses_df.iterrows():
pos = (bus.x, bus.y)
buses.append((bus_name, {'pos': pos}))
# add nodes
graph.add_nodes_from(buses)
# add branches
branches = []
for line_name, line in lines_df.iterrows():
branches.append(
(
line.bus0,
line.bus1,
{"branch_name": line_name, "length": line.length},
)
)
if transformers_df is not None:
for trafo_name, trafo in transformers_df.iterrows():
branches.append(
(
trafo.bus0,
trafo.bus1,
{"branch_name": trafo_name, "length": 0},
)
)
graph.add_edges_from(branches)
return graph
| <commit_before>from networkx import OrderedGraph
def translate_df_to_graph(buses_df, lines_df, transformers_df=None):
graph = OrderedGraph()
buses = buses_df.index
# add nodes
graph.add_nodes_from(buses)
# add branches
branches = []
for line_name, line in lines_df.iterrows():
branches.append(
(
line.bus0,
line.bus1,
{"branch_name": line_name, "length": line.length},
)
)
if transformers_df is not None:
for trafo_name, trafo in transformers_df.iterrows():
branches.append(
(
trafo.bus0,
trafo.bus1,
{"branch_name": trafo_name, "length": 0},
)
)
graph.add_edges_from(branches)
return graph
<commit_msg>Include the position into the graph<commit_after>from networkx import Graph
def translate_df_to_graph(buses_df, lines_df, transformers_df=None):
graph = Graph()
buses = []
for bus_name, bus in buses_df.iterrows():
pos = (bus.x, bus.y)
buses.append((bus_name, {'pos': pos}))
# add nodes
graph.add_nodes_from(buses)
# add branches
branches = []
for line_name, line in lines_df.iterrows():
branches.append(
(
line.bus0,
line.bus1,
{"branch_name": line_name, "length": line.length},
)
)
if transformers_df is not None:
for trafo_name, trafo in transformers_df.iterrows():
branches.append(
(
trafo.bus0,
trafo.bus1,
{"branch_name": trafo_name, "length": 0},
)
)
graph.add_edges_from(branches)
return graph
|
78da548cbf7646ea1d67859cbd0946d6931b2b0d | ad-hoc-scripts/lift.py | ad-hoc-scripts/lift.py | #! /usr/bin/env python3
import sys
import json
for arg in sys.argv[1:]:
with open(arg) as f:
equajson = json.load(f)
field_list = [
"always-an-integer",
"always-positive",
"always-dimensionless",
"bound-variable",
"fixed-constant",
"special-function"
]
try:
for term in equajson["unicode-pretty-print"]["terms"]:
if any(x in field_list for x in term.keys()):
term["classification"] = {}
del_list = []
for field in term.keys():
if field in field_list:
term["classification"][field] = term[field]
del_list.append(field)
for field in del_list:
del term[field]
except KeyError:
pass
with open(sys.argv[1], 'w') as f:
json.dump(equajson, f, indent=4, separators=(',', ': '), ensure_ascii=False, sort_keys=True)
| #! /usr/bin/env python3
import sys
import json
for arg in sys.argv[1:]:
with open(arg) as f:
equajson = json.load(f)
field_list = [
"always-an-integer",
"always-positive",
"always-dimensionless",
"bound-variable",
"fixed-constant",
"special-function"
]
try:
for term in equajson["unicode-pretty-print"]["terms"]:
if any(x in field_list for x in term.keys()):
term["classification"] = {}
del_list = []
for field in term.keys():
if field in field_list:
term["classification"][field] = term[field]
del_list.append(field)
for field in del_list:
del term[field]
except KeyError:
pass
with open(arg, 'w') as f:
json.dump(equajson, f, indent=4, separators=(',', ': '), ensure_ascii=False, sort_keys=True)
| Fix serious file overwriting problem. | Fix serious file overwriting problem.
| Python | mit | nbeaver/equajson | #! /usr/bin/env python3
import sys
import json
for arg in sys.argv[1:]:
with open(arg) as f:
equajson = json.load(f)
field_list = [
"always-an-integer",
"always-positive",
"always-dimensionless",
"bound-variable",
"fixed-constant",
"special-function"
]
try:
for term in equajson["unicode-pretty-print"]["terms"]:
if any(x in field_list for x in term.keys()):
term["classification"] = {}
del_list = []
for field in term.keys():
if field in field_list:
term["classification"][field] = term[field]
del_list.append(field)
for field in del_list:
del term[field]
except KeyError:
pass
with open(sys.argv[1], 'w') as f:
json.dump(equajson, f, indent=4, separators=(',', ': '), ensure_ascii=False, sort_keys=True)
Fix serious file overwriting problem. | #! /usr/bin/env python3
import sys
import json
for arg in sys.argv[1:]:
with open(arg) as f:
equajson = json.load(f)
field_list = [
"always-an-integer",
"always-positive",
"always-dimensionless",
"bound-variable",
"fixed-constant",
"special-function"
]
try:
for term in equajson["unicode-pretty-print"]["terms"]:
if any(x in field_list for x in term.keys()):
term["classification"] = {}
del_list = []
for field in term.keys():
if field in field_list:
term["classification"][field] = term[field]
del_list.append(field)
for field in del_list:
del term[field]
except KeyError:
pass
with open(arg, 'w') as f:
json.dump(equajson, f, indent=4, separators=(',', ': '), ensure_ascii=False, sort_keys=True)
| <commit_before>#! /usr/bin/env python3
import sys
import json
for arg in sys.argv[1:]:
with open(arg) as f:
equajson = json.load(f)
field_list = [
"always-an-integer",
"always-positive",
"always-dimensionless",
"bound-variable",
"fixed-constant",
"special-function"
]
try:
for term in equajson["unicode-pretty-print"]["terms"]:
if any(x in field_list for x in term.keys()):
term["classification"] = {}
del_list = []
for field in term.keys():
if field in field_list:
term["classification"][field] = term[field]
del_list.append(field)
for field in del_list:
del term[field]
except KeyError:
pass
with open(sys.argv[1], 'w') as f:
json.dump(equajson, f, indent=4, separators=(',', ': '), ensure_ascii=False, sort_keys=True)
<commit_msg>Fix serious file overwriting problem.<commit_after> | #! /usr/bin/env python3
import sys
import json
for arg in sys.argv[1:]:
with open(arg) as f:
equajson = json.load(f)
field_list = [
"always-an-integer",
"always-positive",
"always-dimensionless",
"bound-variable",
"fixed-constant",
"special-function"
]
try:
for term in equajson["unicode-pretty-print"]["terms"]:
if any(x in field_list for x in term.keys()):
term["classification"] = {}
del_list = []
for field in term.keys():
if field in field_list:
term["classification"][field] = term[field]
del_list.append(field)
for field in del_list:
del term[field]
except KeyError:
pass
with open(arg, 'w') as f:
json.dump(equajson, f, indent=4, separators=(',', ': '), ensure_ascii=False, sort_keys=True)
| #! /usr/bin/env python3
import sys
import json
for arg in sys.argv[1:]:
with open(arg) as f:
equajson = json.load(f)
field_list = [
"always-an-integer",
"always-positive",
"always-dimensionless",
"bound-variable",
"fixed-constant",
"special-function"
]
try:
for term in equajson["unicode-pretty-print"]["terms"]:
if any(x in field_list for x in term.keys()):
term["classification"] = {}
del_list = []
for field in term.keys():
if field in field_list:
term["classification"][field] = term[field]
del_list.append(field)
for field in del_list:
del term[field]
except KeyError:
pass
with open(sys.argv[1], 'w') as f:
json.dump(equajson, f, indent=4, separators=(',', ': '), ensure_ascii=False, sort_keys=True)
Fix serious file overwriting problem.#! /usr/bin/env python3
import sys
import json
for arg in sys.argv[1:]:
with open(arg) as f:
equajson = json.load(f)
field_list = [
"always-an-integer",
"always-positive",
"always-dimensionless",
"bound-variable",
"fixed-constant",
"special-function"
]
try:
for term in equajson["unicode-pretty-print"]["terms"]:
if any(x in field_list for x in term.keys()):
term["classification"] = {}
del_list = []
for field in term.keys():
if field in field_list:
term["classification"][field] = term[field]
del_list.append(field)
for field in del_list:
del term[field]
except KeyError:
pass
with open(arg, 'w') as f:
json.dump(equajson, f, indent=4, separators=(',', ': '), ensure_ascii=False, sort_keys=True)
| <commit_before>#! /usr/bin/env python3
import sys
import json
for arg in sys.argv[1:]:
with open(arg) as f:
equajson = json.load(f)
field_list = [
"always-an-integer",
"always-positive",
"always-dimensionless",
"bound-variable",
"fixed-constant",
"special-function"
]
try:
for term in equajson["unicode-pretty-print"]["terms"]:
if any(x in field_list for x in term.keys()):
term["classification"] = {}
del_list = []
for field in term.keys():
if field in field_list:
term["classification"][field] = term[field]
del_list.append(field)
for field in del_list:
del term[field]
except KeyError:
pass
with open(sys.argv[1], 'w') as f:
json.dump(equajson, f, indent=4, separators=(',', ': '), ensure_ascii=False, sort_keys=True)
<commit_msg>Fix serious file overwriting problem.<commit_after>#! /usr/bin/env python3
import sys
import json
for arg in sys.argv[1:]:
with open(arg) as f:
equajson = json.load(f)
field_list = [
"always-an-integer",
"always-positive",
"always-dimensionless",
"bound-variable",
"fixed-constant",
"special-function"
]
try:
for term in equajson["unicode-pretty-print"]["terms"]:
if any(x in field_list for x in term.keys()):
term["classification"] = {}
del_list = []
for field in term.keys():
if field in field_list:
term["classification"][field] = term[field]
del_list.append(field)
for field in del_list:
del term[field]
except KeyError:
pass
with open(arg, 'w') as f:
json.dump(equajson, f, indent=4, separators=(',', ': '), ensure_ascii=False, sort_keys=True)
|
80271752183c3f71fba9139bb77466427bd48a0a | alfred_listener/__main__.py | alfred_listener/__main__.py | #!/usr/bin/env python
import os
from argh import arg, ArghParser
from functools import wraps
CONFIG = os.environ.get('ALFRED_LISTENER_CONFIG')
def with_app(func, args):
if CONFIG is None:
raise RuntimeError('ALFRED_LISTENER_CONFIG env variable is not set.')
@wraps(func)
def wrapper(*args, **kwargs):
from alfred_listener import create_app
app = create_app(config)
return func(app, *args, **kwargs)
return wrapper
@arg('--host', default='127.0.0.1', help='the host')
@arg('--port', default=5000, help='the port')
@with_app
def runserver(app, args):
app.run(args.host, args.port)
@with_app
def shell(app, args):
from alfred_listener.helpers import get_shell
with app.test_request_context():
sh = get_shell()
sh(app=app)
def main():
parser = ArghParser()
parser.add_commands([runserver, shell])
parser.dispatch()
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import os
from argh import arg, ArghParser
from functools import wraps
def with_app(func):
@arg('--config', help='Path to config file')
@wraps(func)
def wrapper(*args, **kwargs):
config = args[0].config
from alfred_listener import create_app
app = create_app(config)
return func(app, *args, **kwargs)
return wrapper
@arg('--host', default='127.0.0.1', help='the host')
@arg('--port', default=5000, help='the port')
@with_app
def runserver(app, args):
app.run(args.host, args.port)
@with_app
def shell(app, args):
from alfred_listener.helpers import get_shell
with app.test_request_context():
sh = get_shell()
sh(app=app)
def main():
parser = ArghParser()
parser.add_commands([runserver, shell])
parser.dispatch()
if __name__ == '__main__':
main()
| Change the way to determine path to config file | Change the way to determine path to config file
| Python | isc | alfredhq/alfred-listener | #!/usr/bin/env python
import os
from argh import arg, ArghParser
from functools import wraps
CONFIG = os.environ.get('ALFRED_LISTENER_CONFIG')
def with_app(func, args):
if CONFIG is None:
raise RuntimeError('ALFRED_LISTENER_CONFIG env variable is not set.')
@wraps(func)
def wrapper(*args, **kwargs):
from alfred_listener import create_app
app = create_app(config)
return func(app, *args, **kwargs)
return wrapper
@arg('--host', default='127.0.0.1', help='the host')
@arg('--port', default=5000, help='the port')
@with_app
def runserver(app, args):
app.run(args.host, args.port)
@with_app
def shell(app, args):
from alfred_listener.helpers import get_shell
with app.test_request_context():
sh = get_shell()
sh(app=app)
def main():
parser = ArghParser()
parser.add_commands([runserver, shell])
parser.dispatch()
if __name__ == '__main__':
main()
Change the way to determine path to config file | #!/usr/bin/env python
import os
from argh import arg, ArghParser
from functools import wraps
def with_app(func):
@arg('--config', help='Path to config file')
@wraps(func)
def wrapper(*args, **kwargs):
config = args[0].config
from alfred_listener import create_app
app = create_app(config)
return func(app, *args, **kwargs)
return wrapper
@arg('--host', default='127.0.0.1', help='the host')
@arg('--port', default=5000, help='the port')
@with_app
def runserver(app, args):
app.run(args.host, args.port)
@with_app
def shell(app, args):
from alfred_listener.helpers import get_shell
with app.test_request_context():
sh = get_shell()
sh(app=app)
def main():
parser = ArghParser()
parser.add_commands([runserver, shell])
parser.dispatch()
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
import os
from argh import arg, ArghParser
from functools import wraps
CONFIG = os.environ.get('ALFRED_LISTENER_CONFIG')
def with_app(func, args):
if CONFIG is None:
raise RuntimeError('ALFRED_LISTENER_CONFIG env variable is not set.')
@wraps(func)
def wrapper(*args, **kwargs):
from alfred_listener import create_app
app = create_app(config)
return func(app, *args, **kwargs)
return wrapper
@arg('--host', default='127.0.0.1', help='the host')
@arg('--port', default=5000, help='the port')
@with_app
def runserver(app, args):
app.run(args.host, args.port)
@with_app
def shell(app, args):
from alfred_listener.helpers import get_shell
with app.test_request_context():
sh = get_shell()
sh(app=app)
def main():
parser = ArghParser()
parser.add_commands([runserver, shell])
parser.dispatch()
if __name__ == '__main__':
main()
<commit_msg>Change the way to determine path to config file<commit_after> | #!/usr/bin/env python
import os
from argh import arg, ArghParser
from functools import wraps
def with_app(func):
@arg('--config', help='Path to config file')
@wraps(func)
def wrapper(*args, **kwargs):
config = args[0].config
from alfred_listener import create_app
app = create_app(config)
return func(app, *args, **kwargs)
return wrapper
@arg('--host', default='127.0.0.1', help='the host')
@arg('--port', default=5000, help='the port')
@with_app
def runserver(app, args):
app.run(args.host, args.port)
@with_app
def shell(app, args):
from alfred_listener.helpers import get_shell
with app.test_request_context():
sh = get_shell()
sh(app=app)
def main():
parser = ArghParser()
parser.add_commands([runserver, shell])
parser.dispatch()
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import os
from argh import arg, ArghParser
from functools import wraps
CONFIG = os.environ.get('ALFRED_LISTENER_CONFIG')
def with_app(func, args):
if CONFIG is None:
raise RuntimeError('ALFRED_LISTENER_CONFIG env variable is not set.')
@wraps(func)
def wrapper(*args, **kwargs):
from alfred_listener import create_app
app = create_app(config)
return func(app, *args, **kwargs)
return wrapper
@arg('--host', default='127.0.0.1', help='the host')
@arg('--port', default=5000, help='the port')
@with_app
def runserver(app, args):
app.run(args.host, args.port)
@with_app
def shell(app, args):
from alfred_listener.helpers import get_shell
with app.test_request_context():
sh = get_shell()
sh(app=app)
def main():
parser = ArghParser()
parser.add_commands([runserver, shell])
parser.dispatch()
if __name__ == '__main__':
main()
Change the way to determine path to config file#!/usr/bin/env python
import os
from argh import arg, ArghParser
from functools import wraps
def with_app(func):
@arg('--config', help='Path to config file')
@wraps(func)
def wrapper(*args, **kwargs):
config = args[0].config
from alfred_listener import create_app
app = create_app(config)
return func(app, *args, **kwargs)
return wrapper
@arg('--host', default='127.0.0.1', help='the host')
@arg('--port', default=5000, help='the port')
@with_app
def runserver(app, args):
app.run(args.host, args.port)
@with_app
def shell(app, args):
from alfred_listener.helpers import get_shell
with app.test_request_context():
sh = get_shell()
sh(app=app)
def main():
parser = ArghParser()
parser.add_commands([runserver, shell])
parser.dispatch()
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
import os
from argh import arg, ArghParser
from functools import wraps
CONFIG = os.environ.get('ALFRED_LISTENER_CONFIG')
def with_app(func, args):
if CONFIG is None:
raise RuntimeError('ALFRED_LISTENER_CONFIG env variable is not set.')
@wraps(func)
def wrapper(*args, **kwargs):
from alfred_listener import create_app
app = create_app(config)
return func(app, *args, **kwargs)
return wrapper
@arg('--host', default='127.0.0.1', help='the host')
@arg('--port', default=5000, help='the port')
@with_app
def runserver(app, args):
app.run(args.host, args.port)
@with_app
def shell(app, args):
from alfred_listener.helpers import get_shell
with app.test_request_context():
sh = get_shell()
sh(app=app)
def main():
parser = ArghParser()
parser.add_commands([runserver, shell])
parser.dispatch()
if __name__ == '__main__':
main()
<commit_msg>Change the way to determine path to config file<commit_after>#!/usr/bin/env python
import os
from argh import arg, ArghParser
from functools import wraps
def with_app(func):
@arg('--config', help='Path to config file')
@wraps(func)
def wrapper(*args, **kwargs):
config = args[0].config
from alfred_listener import create_app
app = create_app(config)
return func(app, *args, **kwargs)
return wrapper
@arg('--host', default='127.0.0.1', help='the host')
@arg('--port', default=5000, help='the port')
@with_app
def runserver(app, args):
app.run(args.host, args.port)
@with_app
def shell(app, args):
from alfred_listener.helpers import get_shell
with app.test_request_context():
sh = get_shell()
sh(app=app)
def main():
parser = ArghParser()
parser.add_commands([runserver, shell])
parser.dispatch()
if __name__ == '__main__':
main()
|
30b64d934d12510e2021aa3bf2e80f5837b88f35 | mnist_sequence_cli.py | mnist_sequence_cli.py | from __future__ import print_function
from mnist_sequence_api import MNIST_Sequence_API
import sys
def main():
arguments = list(sys.argv[1:])
if len(arguments) == 4:
sequence, min_spacing = list(map(int, list(arguments[0])), int(arguments[1]))
max_spacing, image_width = int(arguments[2]), int(arguments[3])
api_object = MNIST_Sequence_API()
img_data = api_object.generate_mnist_sequence(sequence, (min_spacing, max_spacing), image_width)
api_object.save_image(img_data, sequence)
else:
print("Incorrect number of arguments.")
print("Usage: python mnist_sequence_cli.py <sequence(no spaces) " +
"min_spacing max_spacing image_width>")
if __name__ == "__main__":
main()
| from __future__ import print_function
from mnist_sequence_api import MNIST_Sequence_API
import sys
def main():
arguments = list(sys.argv[1:])
if len(arguments) == 4:
sequence, min_spacing = list(map(int, list(arguments[0]))), int(arguments[1])
max_spacing, image_width = int(arguments[2]), int(arguments[3])
api_object = MNIST_Sequence_API()
img_data = api_object.generate_mnist_sequence(sequence, (min_spacing, max_spacing), image_width)
api_object.save_image(img_data, sequence)
else:
print("Incorrect number of arguments.")
print("Usage: python mnist_sequence_cli.py <sequence(no spaces) " +
"min_spacing max_spacing image_width>")
if __name__ == "__main__":
main()
| Fix argument inputs to list() | Fix argument inputs to list()
| Python | mit | ankitaggarwal011/MNIST-Sequence | from __future__ import print_function
from mnist_sequence_api import MNIST_Sequence_API
import sys
def main():
arguments = list(sys.argv[1:])
if len(arguments) == 4:
sequence, min_spacing = list(map(int, list(arguments[0])), int(arguments[1]))
max_spacing, image_width = int(arguments[2]), int(arguments[3])
api_object = MNIST_Sequence_API()
img_data = api_object.generate_mnist_sequence(sequence, (min_spacing, max_spacing), image_width)
api_object.save_image(img_data, sequence)
else:
print("Incorrect number of arguments.")
print("Usage: python mnist_sequence_cli.py <sequence(no spaces) " +
"min_spacing max_spacing image_width>")
if __name__ == "__main__":
main()
Fix argument inputs to list() | from __future__ import print_function
from mnist_sequence_api import MNIST_Sequence_API
import sys
def main():
arguments = list(sys.argv[1:])
if len(arguments) == 4:
sequence, min_spacing = list(map(int, list(arguments[0]))), int(arguments[1])
max_spacing, image_width = int(arguments[2]), int(arguments[3])
api_object = MNIST_Sequence_API()
img_data = api_object.generate_mnist_sequence(sequence, (min_spacing, max_spacing), image_width)
api_object.save_image(img_data, sequence)
else:
print("Incorrect number of arguments.")
print("Usage: python mnist_sequence_cli.py <sequence(no spaces) " +
"min_spacing max_spacing image_width>")
if __name__ == "__main__":
main()
| <commit_before>from __future__ import print_function
from mnist_sequence_api import MNIST_Sequence_API
import sys
def main():
arguments = list(sys.argv[1:])
if len(arguments) == 4:
sequence, min_spacing = list(map(int, list(arguments[0])), int(arguments[1]))
max_spacing, image_width = int(arguments[2]), int(arguments[3])
api_object = MNIST_Sequence_API()
img_data = api_object.generate_mnist_sequence(sequence, (min_spacing, max_spacing), image_width)
api_object.save_image(img_data, sequence)
else:
print("Incorrect number of arguments.")
print("Usage: python mnist_sequence_cli.py <sequence(no spaces) " +
"min_spacing max_spacing image_width>")
if __name__ == "__main__":
main()
<commit_msg>Fix argument inputs to list()<commit_after> | from __future__ import print_function
from mnist_sequence_api import MNIST_Sequence_API
import sys
def main():
arguments = list(sys.argv[1:])
if len(arguments) == 4:
sequence, min_spacing = list(map(int, list(arguments[0]))), int(arguments[1])
max_spacing, image_width = int(arguments[2]), int(arguments[3])
api_object = MNIST_Sequence_API()
img_data = api_object.generate_mnist_sequence(sequence, (min_spacing, max_spacing), image_width)
api_object.save_image(img_data, sequence)
else:
print("Incorrect number of arguments.")
print("Usage: python mnist_sequence_cli.py <sequence(no spaces) " +
"min_spacing max_spacing image_width>")
if __name__ == "__main__":
main()
| from __future__ import print_function
from mnist_sequence_api import MNIST_Sequence_API
import sys
def main():
arguments = list(sys.argv[1:])
if len(arguments) == 4:
sequence, min_spacing = list(map(int, list(arguments[0])), int(arguments[1]))
max_spacing, image_width = int(arguments[2]), int(arguments[3])
api_object = MNIST_Sequence_API()
img_data = api_object.generate_mnist_sequence(sequence, (min_spacing, max_spacing), image_width)
api_object.save_image(img_data, sequence)
else:
print("Incorrect number of arguments.")
print("Usage: python mnist_sequence_cli.py <sequence(no spaces) " +
"min_spacing max_spacing image_width>")
if __name__ == "__main__":
main()
Fix argument inputs to list()from __future__ import print_function
from mnist_sequence_api import MNIST_Sequence_API
import sys
def main():
arguments = list(sys.argv[1:])
if len(arguments) == 4:
sequence, min_spacing = list(map(int, list(arguments[0]))), int(arguments[1])
max_spacing, image_width = int(arguments[2]), int(arguments[3])
api_object = MNIST_Sequence_API()
img_data = api_object.generate_mnist_sequence(sequence, (min_spacing, max_spacing), image_width)
api_object.save_image(img_data, sequence)
else:
print("Incorrect number of arguments.")
print("Usage: python mnist_sequence_cli.py <sequence(no spaces) " +
"min_spacing max_spacing image_width>")
if __name__ == "__main__":
main()
| <commit_before>from __future__ import print_function
from mnist_sequence_api import MNIST_Sequence_API
import sys
def main():
arguments = list(sys.argv[1:])
if len(arguments) == 4:
sequence, min_spacing = list(map(int, list(arguments[0])), int(arguments[1]))
max_spacing, image_width = int(arguments[2]), int(arguments[3])
api_object = MNIST_Sequence_API()
img_data = api_object.generate_mnist_sequence(sequence, (min_spacing, max_spacing), image_width)
api_object.save_image(img_data, sequence)
else:
print("Incorrect number of arguments.")
print("Usage: python mnist_sequence_cli.py <sequence(no spaces) " +
"min_spacing max_spacing image_width>")
if __name__ == "__main__":
main()
<commit_msg>Fix argument inputs to list()<commit_after>from __future__ import print_function
from mnist_sequence_api import MNIST_Sequence_API
import sys
def main():
arguments = list(sys.argv[1:])
if len(arguments) == 4:
sequence, min_spacing = list(map(int, list(arguments[0]))), int(arguments[1])
max_spacing, image_width = int(arguments[2]), int(arguments[3])
api_object = MNIST_Sequence_API()
img_data = api_object.generate_mnist_sequence(sequence, (min_spacing, max_spacing), image_width)
api_object.save_image(img_data, sequence)
else:
print("Incorrect number of arguments.")
print("Usage: python mnist_sequence_cli.py <sequence(no spaces) " +
"min_spacing max_spacing image_width>")
if __name__ == "__main__":
main()
|
8fdd73b7b8b907da264eac26ea52eef2bb8f6ce7 | ml/img/load_images.py | ml/img/load_images.py | # ==============================================================================
# LOAD_IMAGE_AS_ARRAY
# ==============================================================================
import scipy.misc
def load_image_as_array(f, rescale=None):
""" Given a filepath to an image file, it loads an image as a numpy array.
Optionally resize the images to [width, height]"""
img = scipy.misc.imread(f)
if rescale:
width, height = rescale
img = scipy.misc.imresize(img, (height,width))
return img
| # ==============================================================================
# LOAD_IMAGE_AS_ARRAY
# ==============================================================================
# USING SCIPY (with i think PIL on the backend)
import scipy.misc
def load_image_as_array(f, rescale=None):
""" Given a filepath to an image file, it loads an image as a numpy array.
Optionally resize the images to [width, height]"""
img = scipy.misc.imread(f)
if rescale:
width, height = rescale
img = scipy.misc.imresize(img, (height,width))
return img
# USING OPEN CV
def load_image_as_array(f, rescale=None):
# TODO: Check the order of the dimensions for resizing in open cv
img = cv2.imread(f, cv2.IMREAD_COLOR)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) # Convert to RGB
if rescale:
img = cv2.resize(img, rescale)
return img
| Add opencv version of load_image_as_array() | FEAT: Add opencv version of load_image_as_array()
| Python | apache-2.0 | ronrest/convenience_py,ronrest/convenience_py | # ==============================================================================
# LOAD_IMAGE_AS_ARRAY
# ==============================================================================
import scipy.misc
def load_image_as_array(f, rescale=None):
""" Given a filepath to an image file, it loads an image as a numpy array.
Optionally resize the images to [width, height]"""
img = scipy.misc.imread(f)
if rescale:
width, height = rescale
img = scipy.misc.imresize(img, (height,width))
return img
FEAT: Add opencv version of load_image_as_array() | # ==============================================================================
# LOAD_IMAGE_AS_ARRAY
# ==============================================================================
# USING SCIPY (with i think PIL on the backend)
import scipy.misc
def load_image_as_array(f, rescale=None):
""" Given a filepath to an image file, it loads an image as a numpy array.
Optionally resize the images to [width, height]"""
img = scipy.misc.imread(f)
if rescale:
width, height = rescale
img = scipy.misc.imresize(img, (height,width))
return img
# USING OPEN CV
def load_image_as_array(f, rescale=None):
# TODO: Check the order of the dimensions for resizing in open cv
img = cv2.imread(f, cv2.IMREAD_COLOR)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) # Convert to RGB
if rescale:
img = cv2.resize(img, rescale)
return img
| <commit_before># ==============================================================================
# LOAD_IMAGE_AS_ARRAY
# ==============================================================================
import scipy.misc
def load_image_as_array(f, rescale=None):
""" Given a filepath to an image file, it loads an image as a numpy array.
Optionally resize the images to [width, height]"""
img = scipy.misc.imread(f)
if rescale:
width, height = rescale
img = scipy.misc.imresize(img, (height,width))
return img
<commit_msg>FEAT: Add opencv version of load_image_as_array()<commit_after> | # ==============================================================================
# LOAD_IMAGE_AS_ARRAY
# ==============================================================================
# USING SCIPY (with i think PIL on the backend)
import scipy.misc
def load_image_as_array(f, rescale=None):
""" Given a filepath to an image file, it loads an image as a numpy array.
Optionally resize the images to [width, height]"""
img = scipy.misc.imread(f)
if rescale:
width, height = rescale
img = scipy.misc.imresize(img, (height,width))
return img
# USING OPEN CV
def load_image_as_array(f, rescale=None):
# TODO: Check the order of the dimensions for resizing in open cv
img = cv2.imread(f, cv2.IMREAD_COLOR)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) # Convert to RGB
if rescale:
img = cv2.resize(img, rescale)
return img
| # ==============================================================================
# LOAD_IMAGE_AS_ARRAY
# ==============================================================================
import scipy.misc
def load_image_as_array(f, rescale=None):
""" Given a filepath to an image file, it loads an image as a numpy array.
Optionally resize the images to [width, height]"""
img = scipy.misc.imread(f)
if rescale:
width, height = rescale
img = scipy.misc.imresize(img, (height,width))
return img
FEAT: Add opencv version of load_image_as_array()# ==============================================================================
# LOAD_IMAGE_AS_ARRAY
# ==============================================================================
# USING SCIPY (with i think PIL on the backend)
import scipy.misc
def load_image_as_array(f, rescale=None):
""" Given a filepath to an image file, it loads an image as a numpy array.
Optionally resize the images to [width, height]"""
img = scipy.misc.imread(f)
if rescale:
width, height = rescale
img = scipy.misc.imresize(img, (height,width))
return img
# USING OPEN CV
def load_image_as_array(f, rescale=None):
# TODO: Check the order of the dimensions for resizing in open cv
img = cv2.imread(f, cv2.IMREAD_COLOR)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) # Convert to RGB
if rescale:
img = cv2.resize(img, rescale)
return img
| <commit_before># ==============================================================================
# LOAD_IMAGE_AS_ARRAY
# ==============================================================================
import scipy.misc
def load_image_as_array(f, rescale=None):
""" Given a filepath to an image file, it loads an image as a numpy array.
Optionally resize the images to [width, height]"""
img = scipy.misc.imread(f)
if rescale:
width, height = rescale
img = scipy.misc.imresize(img, (height,width))
return img
<commit_msg>FEAT: Add opencv version of load_image_as_array()<commit_after># ==============================================================================
# LOAD_IMAGE_AS_ARRAY
# ==============================================================================
# USING SCIPY (with i think PIL on the backend)
import scipy.misc
def load_image_as_array(f, rescale=None):
""" Given a filepath to an image file, it loads an image as a numpy array.
Optionally resize the images to [width, height]"""
img = scipy.misc.imread(f)
if rescale:
width, height = rescale
img = scipy.misc.imresize(img, (height,width))
return img
# USING OPEN CV
def load_image_as_array(f, rescale=None):
# TODO: Check the order of the dimensions for resizing in open cv
img = cv2.imread(f, cv2.IMREAD_COLOR)
img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB) # Convert to RGB
if rescale:
img = cv2.resize(img, rescale)
return img
|
9516caa0c62289c89b605b9b8a34622a0bb54e2b | tests/70_program_libpfasst_swe_sphere_timestepper_convergence_l1/postprocessing_pickle.py | tests/70_program_libpfasst_swe_sphere_timestepper_convergence_l1/postprocessing_pickle.py | #! /usr/bin/env python3
import sys
import math
import glob
from mule_local.postprocessing.pickle_SphereDataPhysicalDiff import *
from mule.exec_program import *
pickle_SphereDataPhysicalDiff()
| #! /usr/bin/env python3
import sys
import math
import glob
from mule_local.postprocessing.pickle_SphereDataSpectralDiff import *
from mule.exec_program import *
pickle_SphereDataSpectralDiff()
| Use SphereDataSpectral instead of SphereDataPhysical | Use SphereDataSpectral instead of SphereDataPhysical
The script for physical data requires CSV files as default output files,
whereas the spectral script uses .sweet binary files as default.
Since libpfasst_swe_sphere's CSV output is not in the same format as
swe_sphere's CSV output, the CSV parsing does not work --> this is
the easiest way to use the (working) binary output files.
| Python | mit | schreiberx/sweet,schreiberx/sweet,schreiberx/sweet,schreiberx/sweet | #! /usr/bin/env python3
import sys
import math
import glob
from mule_local.postprocessing.pickle_SphereDataPhysicalDiff import *
from mule.exec_program import *
pickle_SphereDataPhysicalDiff()
Use SphereDataSpectral instead of SphereDataPhysical
The script for physical data requires CSV files as default output files,
whereas the spectral script uses .sweet binary files as default.
Since libpfasst_swe_sphere's CSV output is not in the same format as
swe_sphere's CSV output, the CSV parsing does not work --> this is
the easiest way to use the (working) binary output files. | #! /usr/bin/env python3
import sys
import math
import glob
from mule_local.postprocessing.pickle_SphereDataSpectralDiff import *
from mule.exec_program import *
pickle_SphereDataSpectralDiff()
| <commit_before>#! /usr/bin/env python3
import sys
import math
import glob
from mule_local.postprocessing.pickle_SphereDataPhysicalDiff import *
from mule.exec_program import *
pickle_SphereDataPhysicalDiff()
<commit_msg>Use SphereDataSpectral instead of SphereDataPhysical
The script for physical data requires CSV files as default output files,
whereas the spectral script uses .sweet binary files as default.
Since libpfasst_swe_sphere's CSV output is not in the same format as
swe_sphere's CSV output, the CSV parsing does not work --> this is
the easiest way to use the (working) binary output files.<commit_after> | #! /usr/bin/env python3
import sys
import math
import glob
from mule_local.postprocessing.pickle_SphereDataSpectralDiff import *
from mule.exec_program import *
pickle_SphereDataSpectralDiff()
| #! /usr/bin/env python3
import sys
import math
import glob
from mule_local.postprocessing.pickle_SphereDataPhysicalDiff import *
from mule.exec_program import *
pickle_SphereDataPhysicalDiff()
Use SphereDataSpectral instead of SphereDataPhysical
The script for physical data requires CSV files as default output files,
whereas the spectral script uses .sweet binary files as default.
Since libpfasst_swe_sphere's CSV output is not in the same format as
swe_sphere's CSV output, the CSV parsing does not work --> this is
the easiest way to use the (working) binary output files.#! /usr/bin/env python3
import sys
import math
import glob
from mule_local.postprocessing.pickle_SphereDataSpectralDiff import *
from mule.exec_program import *
pickle_SphereDataSpectralDiff()
| <commit_before>#! /usr/bin/env python3
import sys
import math
import glob
from mule_local.postprocessing.pickle_SphereDataPhysicalDiff import *
from mule.exec_program import *
pickle_SphereDataPhysicalDiff()
<commit_msg>Use SphereDataSpectral instead of SphereDataPhysical
The script for physical data requires CSV files as default output files,
whereas the spectral script uses .sweet binary files as default.
Since libpfasst_swe_sphere's CSV output is not in the same format as
swe_sphere's CSV output, the CSV parsing does not work --> this is
the easiest way to use the (working) binary output files.<commit_after>#! /usr/bin/env python3
import sys
import math
import glob
from mule_local.postprocessing.pickle_SphereDataSpectralDiff import *
from mule.exec_program import *
pickle_SphereDataSpectralDiff()
|
33f4036825c6ff4d9df0038471727648e0df100d | feder/virus_scan/engine/base.py | feder/virus_scan/engine/base.py | from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
self.signer.sign(self.name),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
| import urllib.parse
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
urllib.parse.quote(self.signer.sign(self.name)),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
| Fix urlencode in webhook url | Fix urlencode in webhook url | Python | mit | watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder,watchdogpolska/feder | from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
self.signer.sign(self.name),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
Fix urlencode in webhook url | import urllib.parse
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
urllib.parse.quote(self.signer.sign(self.name)),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
| <commit_before>from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
self.signer.sign(self.name),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
<commit_msg>Fix urlencode in webhook url<commit_after> | import urllib.parse
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
urllib.parse.quote(self.signer.sign(self.name)),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
| from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
self.signer.sign(self.name),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
Fix urlencode in webhook urlimport urllib.parse
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
urllib.parse.quote(self.signer.sign(self.name)),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
| <commit_before>from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
self.signer.sign(self.name),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
<commit_msg>Fix urlencode in webhook url<commit_after>import urllib.parse
from django.contrib.sites.shortcuts import get_current_site
from django.urls import reverse
from django.core.signing import TimestampSigner
class BaseEngine:
def __init__(self):
self.signer = TimestampSigner()
def get_webhook_url(self):
return "{}://{}{}?token={}".format(
"https",
get_current_site(None).domain,
reverse("virus_scan:webhook"),
urllib.parse.quote(self.signer.sign(self.name)),
)
def send_scan(self, this_file, filename):
raise NotImplementedError(
"Provide 'send' in {name}".format(name=self.__class__.__name__)
)
def receive_scan(self, engine_id):
raise NotImplementedError(
"Provide 'receive_scan' in {name}".format(name=self.__class__.__name__)
)
|
d618f430c143874011b70afe0a4fa62c06f5e28c | md5bot.py | md5bot.py | """
md5bot.py -- Twitter bot that tweets the current time as an md5 value
"""
import time
import hashlib
import tweepy
CONSUMER_KEY = 'xxxxxxxxxxxx'
CONSUMER_SECRET = 'xxxxxxxxxxxx'
ACCESS_KEY = 'xxxxxxxxxxxx'
ACCESS_SECRET = 'xxxxxxxxxxxx'
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
api = tweepy.API(auth)
def checkTime():
ts = time.time()
m = hashlib.md5()
m.update(str(ts))
return m.hexdigest()
def postStatus():
currentTime = checkTime()
api.update_status(currentTime)
int = 0
while int == 0:
postStatus()
time.sleep(3600) #Sleep for one hour
| #!/usr/bin/env python
__author__ = "Patrick Guelcher"
__copyright__ = "(C) 2016 Patrick Guelcher"
__license__ = "MIT"
__version__ = "1.0"
"""
A bot for Twitter that checks the time and then posts it as an md5 hash value.
"""
import time
import hashlib
import tweepy
# Configuration (Twitter API Settings)
CONSUMER_KEY = 'npfl47weJ6vSn3MRXUq342dMB'
CONSUMER_SECRET = 'dKv6zrr7ExIWAtVE3gWG4xZFs3LziZaeHvmycTHkttGC3etP4d'
ACCESS_TOKEN = '2489159084-t6A6zXVZSJFdZYP8jb78Mat8Lg3TfnIdffBgUTs'
ACCESS_SECRET = '0C83TOgZ4WE00zWuDxVT2TS6E5sVo0Bp0P1IpRn2ipNhD'
sleep_time = 3600 # Time is in seconds (Default 3600)
# Do not edit beyond this comment
def main():
index = 0
while index == 0:
post_status()
time.sleep(sleep_time) # Sleep for one hour
def check_time():
time_stamp = str(time.time())
encode_time = time_stamp.encode('utf-8')
md = hashlib.md5()
md.update(encode_time)
return md.hexdigest()
def post_status():
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_SECRET)
api = tweepy.API(auth)
current_time = check_time()
api.update_status(current_time)
print(current_time)
if __name__ == '__main__':
main()
| Clean code and conform to python conventions | Clean code and conform to python conventions
Some things are still a bit weird, mostly due to my limited knowledge of
Python.
Also fixed code to conform to Python naming conventions for
variables/functions.
| Python | mit | aerovolts/python-scripts | """
md5bot.py -- Twitter bot that tweets the current time as an md5 value
"""
import time
import hashlib
import tweepy
CONSUMER_KEY = 'xxxxxxxxxxxx'
CONSUMER_SECRET = 'xxxxxxxxxxxx'
ACCESS_KEY = 'xxxxxxxxxxxx'
ACCESS_SECRET = 'xxxxxxxxxxxx'
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
api = tweepy.API(auth)
def checkTime():
ts = time.time()
m = hashlib.md5()
m.update(str(ts))
return m.hexdigest()
def postStatus():
currentTime = checkTime()
api.update_status(currentTime)
int = 0
while int == 0:
postStatus()
time.sleep(3600) #Sleep for one hour
Clean code and conform to python conventions
Some things are still a bit weird, mostly due to my limited knowledge of
Python.
Also fixed code to conform to Python naming conventions for
variables/functions. | #!/usr/bin/env python
__author__ = "Patrick Guelcher"
__copyright__ = "(C) 2016 Patrick Guelcher"
__license__ = "MIT"
__version__ = "1.0"
"""
A bot for Twitter that checks the time and then posts it as an md5 hash value.
"""
import time
import hashlib
import tweepy
# Configuration (Twitter API Settings)
CONSUMER_KEY = 'npfl47weJ6vSn3MRXUq342dMB'
CONSUMER_SECRET = 'dKv6zrr7ExIWAtVE3gWG4xZFs3LziZaeHvmycTHkttGC3etP4d'
ACCESS_TOKEN = '2489159084-t6A6zXVZSJFdZYP8jb78Mat8Lg3TfnIdffBgUTs'
ACCESS_SECRET = '0C83TOgZ4WE00zWuDxVT2TS6E5sVo0Bp0P1IpRn2ipNhD'
sleep_time = 3600 # Time is in seconds (Default 3600)
# Do not edit beyond this comment
def main():
index = 0
while index == 0:
post_status()
time.sleep(sleep_time) # Sleep for one hour
def check_time():
time_stamp = str(time.time())
encode_time = time_stamp.encode('utf-8')
md = hashlib.md5()
md.update(encode_time)
return md.hexdigest()
def post_status():
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_SECRET)
api = tweepy.API(auth)
current_time = check_time()
api.update_status(current_time)
print(current_time)
if __name__ == '__main__':
main()
| <commit_before>"""
md5bot.py -- Twitter bot that tweets the current time as an md5 value
"""
import time
import hashlib
import tweepy
CONSUMER_KEY = 'xxxxxxxxxxxx'
CONSUMER_SECRET = 'xxxxxxxxxxxx'
ACCESS_KEY = 'xxxxxxxxxxxx'
ACCESS_SECRET = 'xxxxxxxxxxxx'
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
api = tweepy.API(auth)
def checkTime():
ts = time.time()
m = hashlib.md5()
m.update(str(ts))
return m.hexdigest()
def postStatus():
currentTime = checkTime()
api.update_status(currentTime)
int = 0
while int == 0:
postStatus()
time.sleep(3600) #Sleep for one hour
<commit_msg>Clean code and conform to python conventions
Some things are still a bit weird, mostly due to my limited knowledge of
Python.
Also fixed code to conform to Python naming conventions for
variables/functions.<commit_after> | #!/usr/bin/env python
__author__ = "Patrick Guelcher"
__copyright__ = "(C) 2016 Patrick Guelcher"
__license__ = "MIT"
__version__ = "1.0"
"""
A bot for Twitter that checks the time and then posts it as an md5 hash value.
"""
import time
import hashlib
import tweepy
# Configuration (Twitter API Settings)
CONSUMER_KEY = 'npfl47weJ6vSn3MRXUq342dMB'
CONSUMER_SECRET = 'dKv6zrr7ExIWAtVE3gWG4xZFs3LziZaeHvmycTHkttGC3etP4d'
ACCESS_TOKEN = '2489159084-t6A6zXVZSJFdZYP8jb78Mat8Lg3TfnIdffBgUTs'
ACCESS_SECRET = '0C83TOgZ4WE00zWuDxVT2TS6E5sVo0Bp0P1IpRn2ipNhD'
sleep_time = 3600 # Time is in seconds (Default 3600)
# Do not edit beyond this comment
def main():
index = 0
while index == 0:
post_status()
time.sleep(sleep_time) # Sleep for one hour
def check_time():
time_stamp = str(time.time())
encode_time = time_stamp.encode('utf-8')
md = hashlib.md5()
md.update(encode_time)
return md.hexdigest()
def post_status():
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_SECRET)
api = tweepy.API(auth)
current_time = check_time()
api.update_status(current_time)
print(current_time)
if __name__ == '__main__':
main()
| """
md5bot.py -- Twitter bot that tweets the current time as an md5 value
"""
import time
import hashlib
import tweepy
CONSUMER_KEY = 'xxxxxxxxxxxx'
CONSUMER_SECRET = 'xxxxxxxxxxxx'
ACCESS_KEY = 'xxxxxxxxxxxx'
ACCESS_SECRET = 'xxxxxxxxxxxx'
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
api = tweepy.API(auth)
def checkTime():
ts = time.time()
m = hashlib.md5()
m.update(str(ts))
return m.hexdigest()
def postStatus():
currentTime = checkTime()
api.update_status(currentTime)
int = 0
while int == 0:
postStatus()
time.sleep(3600) #Sleep for one hour
Clean code and conform to python conventions
Some things are still a bit weird, mostly due to my limited knowledge of
Python.
Also fixed code to conform to Python naming conventions for
variables/functions.#!/usr/bin/env python
__author__ = "Patrick Guelcher"
__copyright__ = "(C) 2016 Patrick Guelcher"
__license__ = "MIT"
__version__ = "1.0"
"""
A bot for Twitter that checks the time and then posts it as an md5 hash value.
"""
import time
import hashlib
import tweepy
# Configuration (Twitter API Settings)
CONSUMER_KEY = 'npfl47weJ6vSn3MRXUq342dMB'
CONSUMER_SECRET = 'dKv6zrr7ExIWAtVE3gWG4xZFs3LziZaeHvmycTHkttGC3etP4d'
ACCESS_TOKEN = '2489159084-t6A6zXVZSJFdZYP8jb78Mat8Lg3TfnIdffBgUTs'
ACCESS_SECRET = '0C83TOgZ4WE00zWuDxVT2TS6E5sVo0Bp0P1IpRn2ipNhD'
sleep_time = 3600 # Time is in seconds (Default 3600)
# Do not edit beyond this comment
def main():
index = 0
while index == 0:
post_status()
time.sleep(sleep_time) # Sleep for one hour
def check_time():
time_stamp = str(time.time())
encode_time = time_stamp.encode('utf-8')
md = hashlib.md5()
md.update(encode_time)
return md.hexdigest()
def post_status():
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_SECRET)
api = tweepy.API(auth)
current_time = check_time()
api.update_status(current_time)
print(current_time)
if __name__ == '__main__':
main()
| <commit_before>"""
md5bot.py -- Twitter bot that tweets the current time as an md5 value
"""
import time
import hashlib
import tweepy
CONSUMER_KEY = 'xxxxxxxxxxxx'
CONSUMER_SECRET = 'xxxxxxxxxxxx'
ACCESS_KEY = 'xxxxxxxxxxxx'
ACCESS_SECRET = 'xxxxxxxxxxxx'
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_KEY, ACCESS_SECRET)
api = tweepy.API(auth)
def checkTime():
ts = time.time()
m = hashlib.md5()
m.update(str(ts))
return m.hexdigest()
def postStatus():
currentTime = checkTime()
api.update_status(currentTime)
int = 0
while int == 0:
postStatus()
time.sleep(3600) #Sleep for one hour
<commit_msg>Clean code and conform to python conventions
Some things are still a bit weird, mostly due to my limited knowledge of
Python.
Also fixed code to conform to Python naming conventions for
variables/functions.<commit_after>#!/usr/bin/env python
__author__ = "Patrick Guelcher"
__copyright__ = "(C) 2016 Patrick Guelcher"
__license__ = "MIT"
__version__ = "1.0"
"""
A bot for Twitter that checks the time and then posts it as an md5 hash value.
"""
import time
import hashlib
import tweepy
# Configuration (Twitter API Settings)
CONSUMER_KEY = 'npfl47weJ6vSn3MRXUq342dMB'
CONSUMER_SECRET = 'dKv6zrr7ExIWAtVE3gWG4xZFs3LziZaeHvmycTHkttGC3etP4d'
ACCESS_TOKEN = '2489159084-t6A6zXVZSJFdZYP8jb78Mat8Lg3TfnIdffBgUTs'
ACCESS_SECRET = '0C83TOgZ4WE00zWuDxVT2TS6E5sVo0Bp0P1IpRn2ipNhD'
sleep_time = 3600 # Time is in seconds (Default 3600)
# Do not edit beyond this comment
def main():
index = 0
while index == 0:
post_status()
time.sleep(sleep_time) # Sleep for one hour
def check_time():
time_stamp = str(time.time())
encode_time = time_stamp.encode('utf-8')
md = hashlib.md5()
md.update(encode_time)
return md.hexdigest()
def post_status():
auth = tweepy.OAuthHandler(CONSUMER_KEY, CONSUMER_SECRET)
auth.set_access_token(ACCESS_TOKEN, ACCESS_SECRET)
api = tweepy.API(auth)
current_time = check_time()
api.update_status(current_time)
print(current_time)
if __name__ == '__main__':
main()
|
1a39eea8225ebdf7f654df9ba5b87479e9dbc867 | minify.py | minify.py | """
Copyright 2016 Dee Reddy
"""
import sys
import re
args = sys.argv[1:]
def minify(filepath, comments=False):
""" Minifies/uglifies file
:param
file_:
comments: Boolean. If False, deletes comments during output.
:return:
Minified string.
"""
pattern = re.compile(r"""
\s | # matches all whitespace characters OR
( #
/\* # /*
[
\w\s
(?=\*)
:@!"'~\.\^\$\+\?\{\}\[\]\\\|\(\)
]* # AND
\*/ # */
) #
| //.*\n # OR any character from // until end-line (inclusive)
""", re.VERBOSE)
with open(filepath, "r") as file_:
temp = []
for line in file_:
temp.append(line)
output = ''.join(temp)
return pattern.sub('', output)
if __name__ == "__main__":
print(minify('./test/stylesheet.css'))
| """
Copyright 2016 Dee Reddy
"""
import sys
import re
args = sys.argv[1:]
def minify(input_path, output_path, comments=False):
""" Minifies/uglifies file
args:
input_path: input file path
output_path: write-out file path
comments: Boolean. If False, deletes comments during output.
returns:
Minified string.
example:
`$ python minify.py ./src/styles.css ./src/output.css`
"""
pattern = re.compile(r"""
\s | # matches all whitespace characters OR
( #
/\* # /* AND
[ # 0 or more of any character
\w\s #
(?=\*) # (positive lookahead: doesn't make * part of the match)
:@!"'~,#%&-=;<>` #
\.\^\$\+\{\[\]\\\| #
]* #
\*/ # AND */
) #
| //.*\n # OR any character from // until end-line (inclusive)
""", re.VERBOSE)
# read file and apply regex:
with open(input_path, "r") as file_in:
temp = []
for line in file_in:
temp.append(line)
output = ''.join(temp)
output = pattern.sub('', output)
# write to file:
# (`w+` mode: writing/reading; overwrites existing files; creates file if doesn't exit)
with open(output_path, "w+") as file_out:
file_out.write(output)
#############################
# Main #
#############################
if __name__ == "__main__":
# specify input and output paths in args:
minify(args[0], args[1])
| Update regex. Add file write-out. | feat: Update regex. Add file write-out.
Regex is bugged, however. Need to fix regex pattern.
| Python | apache-2.0 | Deesus/Punt | """
Copyright 2016 Dee Reddy
"""
import sys
import re
args = sys.argv[1:]
def minify(filepath, comments=False):
""" Minifies/uglifies file
:param
file_:
comments: Boolean. If False, deletes comments during output.
:return:
Minified string.
"""
pattern = re.compile(r"""
\s | # matches all whitespace characters OR
( #
/\* # /*
[
\w\s
(?=\*)
:@!"'~\.\^\$\+\?\{\}\[\]\\\|\(\)
]* # AND
\*/ # */
) #
| //.*\n # OR any character from // until end-line (inclusive)
""", re.VERBOSE)
with open(filepath, "r") as file_:
temp = []
for line in file_:
temp.append(line)
output = ''.join(temp)
return pattern.sub('', output)
if __name__ == "__main__":
print(minify('./test/stylesheet.css'))
feat: Update regex. Add file write-out.
Regex is bugged, however. Need to fix regex pattern. | """
Copyright 2016 Dee Reddy
"""
import sys
import re
args = sys.argv[1:]
def minify(input_path, output_path, comments=False):
""" Minifies/uglifies file
args:
input_path: input file path
output_path: write-out file path
comments: Boolean. If False, deletes comments during output.
returns:
Minified string.
example:
`$ python minify.py ./src/styles.css ./src/output.css`
"""
pattern = re.compile(r"""
\s | # matches all whitespace characters OR
( #
/\* # /* AND
[ # 0 or more of any character
\w\s #
(?=\*) # (positive lookahead: doesn't make * part of the match)
:@!"'~,#%&-=;<>` #
\.\^\$\+\{\[\]\\\| #
]* #
\*/ # AND */
) #
| //.*\n # OR any character from // until end-line (inclusive)
""", re.VERBOSE)
# read file and apply regex:
with open(input_path, "r") as file_in:
temp = []
for line in file_in:
temp.append(line)
output = ''.join(temp)
output = pattern.sub('', output)
# write to file:
# (`w+` mode: writing/reading; overwrites existing files; creates file if doesn't exit)
with open(output_path, "w+") as file_out:
file_out.write(output)
#############################
# Main #
#############################
if __name__ == "__main__":
# specify input and output paths in args:
minify(args[0], args[1])
| <commit_before>"""
Copyright 2016 Dee Reddy
"""
import sys
import re
args = sys.argv[1:]
def minify(filepath, comments=False):
""" Minifies/uglifies file
:param
file_:
comments: Boolean. If False, deletes comments during output.
:return:
Minified string.
"""
pattern = re.compile(r"""
\s | # matches all whitespace characters OR
( #
/\* # /*
[
\w\s
(?=\*)
:@!"'~\.\^\$\+\?\{\}\[\]\\\|\(\)
]* # AND
\*/ # */
) #
| //.*\n # OR any character from // until end-line (inclusive)
""", re.VERBOSE)
with open(filepath, "r") as file_:
temp = []
for line in file_:
temp.append(line)
output = ''.join(temp)
return pattern.sub('', output)
if __name__ == "__main__":
print(minify('./test/stylesheet.css'))
<commit_msg>feat: Update regex. Add file write-out.
Regex is bugged, however. Need to fix regex pattern.<commit_after> | """
Copyright 2016 Dee Reddy
"""
import sys
import re
args = sys.argv[1:]
def minify(input_path, output_path, comments=False):
""" Minifies/uglifies file
args:
input_path: input file path
output_path: write-out file path
comments: Boolean. If False, deletes comments during output.
returns:
Minified string.
example:
`$ python minify.py ./src/styles.css ./src/output.css`
"""
pattern = re.compile(r"""
\s | # matches all whitespace characters OR
( #
/\* # /* AND
[ # 0 or more of any character
\w\s #
(?=\*) # (positive lookahead: doesn't make * part of the match)
:@!"'~,#%&-=;<>` #
\.\^\$\+\{\[\]\\\| #
]* #
\*/ # AND */
) #
| //.*\n # OR any character from // until end-line (inclusive)
""", re.VERBOSE)
# read file and apply regex:
with open(input_path, "r") as file_in:
temp = []
for line in file_in:
temp.append(line)
output = ''.join(temp)
output = pattern.sub('', output)
# write to file:
# (`w+` mode: writing/reading; overwrites existing files; creates file if doesn't exit)
with open(output_path, "w+") as file_out:
file_out.write(output)
#############################
# Main #
#############################
if __name__ == "__main__":
# specify input and output paths in args:
minify(args[0], args[1])
| """
Copyright 2016 Dee Reddy
"""
import sys
import re
args = sys.argv[1:]
def minify(filepath, comments=False):
""" Minifies/uglifies file
:param
file_:
comments: Boolean. If False, deletes comments during output.
:return:
Minified string.
"""
pattern = re.compile(r"""
\s | # matches all whitespace characters OR
( #
/\* # /*
[
\w\s
(?=\*)
:@!"'~\.\^\$\+\?\{\}\[\]\\\|\(\)
]* # AND
\*/ # */
) #
| //.*\n # OR any character from // until end-line (inclusive)
""", re.VERBOSE)
with open(filepath, "r") as file_:
temp = []
for line in file_:
temp.append(line)
output = ''.join(temp)
return pattern.sub('', output)
if __name__ == "__main__":
print(minify('./test/stylesheet.css'))
feat: Update regex. Add file write-out.
Regex is bugged, however. Need to fix regex pattern."""
Copyright 2016 Dee Reddy
"""
import sys
import re
args = sys.argv[1:]
def minify(input_path, output_path, comments=False):
""" Minifies/uglifies file
args:
input_path: input file path
output_path: write-out file path
comments: Boolean. If False, deletes comments during output.
returns:
Minified string.
example:
`$ python minify.py ./src/styles.css ./src/output.css`
"""
pattern = re.compile(r"""
\s | # matches all whitespace characters OR
( #
/\* # /* AND
[ # 0 or more of any character
\w\s #
(?=\*) # (positive lookahead: doesn't make * part of the match)
:@!"'~,#%&-=;<>` #
\.\^\$\+\{\[\]\\\| #
]* #
\*/ # AND */
) #
| //.*\n # OR any character from // until end-line (inclusive)
""", re.VERBOSE)
# read file and apply regex:
with open(input_path, "r") as file_in:
temp = []
for line in file_in:
temp.append(line)
output = ''.join(temp)
output = pattern.sub('', output)
# write to file:
# (`w+` mode: writing/reading; overwrites existing files; creates file if doesn't exit)
with open(output_path, "w+") as file_out:
file_out.write(output)
#############################
# Main #
#############################
if __name__ == "__main__":
# specify input and output paths in args:
minify(args[0], args[1])
| <commit_before>"""
Copyright 2016 Dee Reddy
"""
import sys
import re
args = sys.argv[1:]
def minify(filepath, comments=False):
""" Minifies/uglifies file
:param
file_:
comments: Boolean. If False, deletes comments during output.
:return:
Minified string.
"""
pattern = re.compile(r"""
\s | # matches all whitespace characters OR
( #
/\* # /*
[
\w\s
(?=\*)
:@!"'~\.\^\$\+\?\{\}\[\]\\\|\(\)
]* # AND
\*/ # */
) #
| //.*\n # OR any character from // until end-line (inclusive)
""", re.VERBOSE)
with open(filepath, "r") as file_:
temp = []
for line in file_:
temp.append(line)
output = ''.join(temp)
return pattern.sub('', output)
if __name__ == "__main__":
print(minify('./test/stylesheet.css'))
<commit_msg>feat: Update regex. Add file write-out.
Regex is bugged, however. Need to fix regex pattern.<commit_after>"""
Copyright 2016 Dee Reddy
"""
import sys
import re
args = sys.argv[1:]
def minify(input_path, output_path, comments=False):
""" Minifies/uglifies file
args:
input_path: input file path
output_path: write-out file path
comments: Boolean. If False, deletes comments during output.
returns:
Minified string.
example:
`$ python minify.py ./src/styles.css ./src/output.css`
"""
pattern = re.compile(r"""
\s | # matches all whitespace characters OR
( #
/\* # /* AND
[ # 0 or more of any character
\w\s #
(?=\*) # (positive lookahead: doesn't make * part of the match)
:@!"'~,#%&-=;<>` #
\.\^\$\+\{\[\]\\\| #
]* #
\*/ # AND */
) #
| //.*\n # OR any character from // until end-line (inclusive)
""", re.VERBOSE)
# read file and apply regex:
with open(input_path, "r") as file_in:
temp = []
for line in file_in:
temp.append(line)
output = ''.join(temp)
output = pattern.sub('', output)
# write to file:
# (`w+` mode: writing/reading; overwrites existing files; creates file if doesn't exit)
with open(output_path, "w+") as file_out:
file_out.write(output)
#############################
# Main #
#############################
if __name__ == "__main__":
# specify input and output paths in args:
minify(args[0], args[1])
|
f7a57135ec328063eb4d2d5c4a035994a85ad290 | modules/piperename.py | modules/piperename.py | # piperename.py
#
from pipe2py import util
def pipe_rename(context, _INPUT, conf, **kwargs):
"""This operator renames or copies fields in the input source.
Keyword arguments:
context -- pipeline context
_INPUT -- source generator
kwargs -- other inputs, e.g. to feed terminals for rule values
conf:
RULE -- rules - each rule comprising (op, field, newval)
Yields (_OUTPUT):
source items after copying/renaming
"""
rules = []
for rule in conf['RULE']:
newval = util.get_value(rule['newval'], kwargs) #todo use subkey?
rules.append((rule['op']['value'], rule['field']['value'], newval))
for item in _INPUT:
for rule in rules:
item[rule[2]] = item[rule[1]]
if rule[0] == 'rename':
del item[rule[1]]
yield item
| # piperename.py
#
from pipe2py import util
def pipe_rename(context, _INPUT, conf, **kwargs):
"""This operator renames or copies fields in the input source.
Keyword arguments:
context -- pipeline context
_INPUT -- source generator
kwargs -- other inputs, e.g. to feed terminals for rule values
conf:
RULE -- rules - each rule comprising (op, field, newval)
Yields (_OUTPUT):
source items after copying/renaming
"""
rules = []
for rule in conf['RULE']:
newval = util.get_value(rule['newval'], kwargs) #todo use subkey?
rules.append((rule['op']['value'], rule['field']['value'], newval))
for item in _INPUT:
for rule in rules:
#Map names with dot notation onto nested dictionaries, e.g. 'a.content' -> ['a']['content']
#todo: optimise by pre-calculating splits
# and if this logic is stable, wrap in util functions and use everywhere items are accessed
reduce(lambda i,k:i.get(k), [item] + rule[2].split('.')[:-1])[rule[2].split('.')[-1]] = reduce(lambda i,k:i.get(k), [item] + rule[1].split('.'))
if rule[0] == 'rename':
del reduce(lambda i,k:i.get(k), [item] + rule[1].split('.')[:-1])[rule[1].split('.')[-1]]
yield item
| Allow dot notation to map to nested dictionaries | Allow dot notation to map to nested dictionaries
| Python | mit | nerevu/riko,nerevu/riko | # piperename.py
#
from pipe2py import util
def pipe_rename(context, _INPUT, conf, **kwargs):
"""This operator renames or copies fields in the input source.
Keyword arguments:
context -- pipeline context
_INPUT -- source generator
kwargs -- other inputs, e.g. to feed terminals for rule values
conf:
RULE -- rules - each rule comprising (op, field, newval)
Yields (_OUTPUT):
source items after copying/renaming
"""
rules = []
for rule in conf['RULE']:
newval = util.get_value(rule['newval'], kwargs) #todo use subkey?
rules.append((rule['op']['value'], rule['field']['value'], newval))
for item in _INPUT:
for rule in rules:
item[rule[2]] = item[rule[1]]
if rule[0] == 'rename':
del item[rule[1]]
yield item
Allow dot notation to map to nested dictionaries | # piperename.py
#
from pipe2py import util
def pipe_rename(context, _INPUT, conf, **kwargs):
"""This operator renames or copies fields in the input source.
Keyword arguments:
context -- pipeline context
_INPUT -- source generator
kwargs -- other inputs, e.g. to feed terminals for rule values
conf:
RULE -- rules - each rule comprising (op, field, newval)
Yields (_OUTPUT):
source items after copying/renaming
"""
rules = []
for rule in conf['RULE']:
newval = util.get_value(rule['newval'], kwargs) #todo use subkey?
rules.append((rule['op']['value'], rule['field']['value'], newval))
for item in _INPUT:
for rule in rules:
#Map names with dot notation onto nested dictionaries, e.g. 'a.content' -> ['a']['content']
#todo: optimise by pre-calculating splits
# and if this logic is stable, wrap in util functions and use everywhere items are accessed
reduce(lambda i,k:i.get(k), [item] + rule[2].split('.')[:-1])[rule[2].split('.')[-1]] = reduce(lambda i,k:i.get(k), [item] + rule[1].split('.'))
if rule[0] == 'rename':
del reduce(lambda i,k:i.get(k), [item] + rule[1].split('.')[:-1])[rule[1].split('.')[-1]]
yield item
| <commit_before># piperename.py
#
from pipe2py import util
def pipe_rename(context, _INPUT, conf, **kwargs):
"""This operator renames or copies fields in the input source.
Keyword arguments:
context -- pipeline context
_INPUT -- source generator
kwargs -- other inputs, e.g. to feed terminals for rule values
conf:
RULE -- rules - each rule comprising (op, field, newval)
Yields (_OUTPUT):
source items after copying/renaming
"""
rules = []
for rule in conf['RULE']:
newval = util.get_value(rule['newval'], kwargs) #todo use subkey?
rules.append((rule['op']['value'], rule['field']['value'], newval))
for item in _INPUT:
for rule in rules:
item[rule[2]] = item[rule[1]]
if rule[0] == 'rename':
del item[rule[1]]
yield item
<commit_msg>Allow dot notation to map to nested dictionaries<commit_after> | # piperename.py
#
from pipe2py import util
def pipe_rename(context, _INPUT, conf, **kwargs):
"""This operator renames or copies fields in the input source.
Keyword arguments:
context -- pipeline context
_INPUT -- source generator
kwargs -- other inputs, e.g. to feed terminals for rule values
conf:
RULE -- rules - each rule comprising (op, field, newval)
Yields (_OUTPUT):
source items after copying/renaming
"""
rules = []
for rule in conf['RULE']:
newval = util.get_value(rule['newval'], kwargs) #todo use subkey?
rules.append((rule['op']['value'], rule['field']['value'], newval))
for item in _INPUT:
for rule in rules:
#Map names with dot notation onto nested dictionaries, e.g. 'a.content' -> ['a']['content']
#todo: optimise by pre-calculating splits
# and if this logic is stable, wrap in util functions and use everywhere items are accessed
reduce(lambda i,k:i.get(k), [item] + rule[2].split('.')[:-1])[rule[2].split('.')[-1]] = reduce(lambda i,k:i.get(k), [item] + rule[1].split('.'))
if rule[0] == 'rename':
del reduce(lambda i,k:i.get(k), [item] + rule[1].split('.')[:-1])[rule[1].split('.')[-1]]
yield item
| # piperename.py
#
from pipe2py import util
def pipe_rename(context, _INPUT, conf, **kwargs):
"""This operator renames or copies fields in the input source.
Keyword arguments:
context -- pipeline context
_INPUT -- source generator
kwargs -- other inputs, e.g. to feed terminals for rule values
conf:
RULE -- rules - each rule comprising (op, field, newval)
Yields (_OUTPUT):
source items after copying/renaming
"""
rules = []
for rule in conf['RULE']:
newval = util.get_value(rule['newval'], kwargs) #todo use subkey?
rules.append((rule['op']['value'], rule['field']['value'], newval))
for item in _INPUT:
for rule in rules:
item[rule[2]] = item[rule[1]]
if rule[0] == 'rename':
del item[rule[1]]
yield item
Allow dot notation to map to nested dictionaries# piperename.py
#
from pipe2py import util
def pipe_rename(context, _INPUT, conf, **kwargs):
"""This operator renames or copies fields in the input source.
Keyword arguments:
context -- pipeline context
_INPUT -- source generator
kwargs -- other inputs, e.g. to feed terminals for rule values
conf:
RULE -- rules - each rule comprising (op, field, newval)
Yields (_OUTPUT):
source items after copying/renaming
"""
rules = []
for rule in conf['RULE']:
newval = util.get_value(rule['newval'], kwargs) #todo use subkey?
rules.append((rule['op']['value'], rule['field']['value'], newval))
for item in _INPUT:
for rule in rules:
#Map names with dot notation onto nested dictionaries, e.g. 'a.content' -> ['a']['content']
#todo: optimise by pre-calculating splits
# and if this logic is stable, wrap in util functions and use everywhere items are accessed
reduce(lambda i,k:i.get(k), [item] + rule[2].split('.')[:-1])[rule[2].split('.')[-1]] = reduce(lambda i,k:i.get(k), [item] + rule[1].split('.'))
if rule[0] == 'rename':
del reduce(lambda i,k:i.get(k), [item] + rule[1].split('.')[:-1])[rule[1].split('.')[-1]]
yield item
| <commit_before># piperename.py
#
from pipe2py import util
def pipe_rename(context, _INPUT, conf, **kwargs):
"""This operator renames or copies fields in the input source.
Keyword arguments:
context -- pipeline context
_INPUT -- source generator
kwargs -- other inputs, e.g. to feed terminals for rule values
conf:
RULE -- rules - each rule comprising (op, field, newval)
Yields (_OUTPUT):
source items after copying/renaming
"""
rules = []
for rule in conf['RULE']:
newval = util.get_value(rule['newval'], kwargs) #todo use subkey?
rules.append((rule['op']['value'], rule['field']['value'], newval))
for item in _INPUT:
for rule in rules:
item[rule[2]] = item[rule[1]]
if rule[0] == 'rename':
del item[rule[1]]
yield item
<commit_msg>Allow dot notation to map to nested dictionaries<commit_after># piperename.py
#
from pipe2py import util
def pipe_rename(context, _INPUT, conf, **kwargs):
"""This operator renames or copies fields in the input source.
Keyword arguments:
context -- pipeline context
_INPUT -- source generator
kwargs -- other inputs, e.g. to feed terminals for rule values
conf:
RULE -- rules - each rule comprising (op, field, newval)
Yields (_OUTPUT):
source items after copying/renaming
"""
rules = []
for rule in conf['RULE']:
newval = util.get_value(rule['newval'], kwargs) #todo use subkey?
rules.append((rule['op']['value'], rule['field']['value'], newval))
for item in _INPUT:
for rule in rules:
#Map names with dot notation onto nested dictionaries, e.g. 'a.content' -> ['a']['content']
#todo: optimise by pre-calculating splits
# and if this logic is stable, wrap in util functions and use everywhere items are accessed
reduce(lambda i,k:i.get(k), [item] + rule[2].split('.')[:-1])[rule[2].split('.')[-1]] = reduce(lambda i,k:i.get(k), [item] + rule[1].split('.'))
if rule[0] == 'rename':
del reduce(lambda i,k:i.get(k), [item] + rule[1].split('.')[:-1])[rule[1].split('.')[-1]]
yield item
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.