commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b9701cfb65c4c641231bef385dde74b8d940f901
|
gimlet/backends/sql.py
|
gimlet/backends/sql.py
|
from sqlalchemy import MetaData, Table, Column, types, create_engine, select
from .base import BaseBackend
class SQLBackend(BaseBackend):
def __init__(self, url, table_name='gimlet_channels', **engine_kwargs):
meta = MetaData(bind=create_engine(url, **engine_kwargs))
self.table = Table(table_name, meta,
Column('id', types.Integer, primary_key=True),
Column('key', types.CHAR(32), nullable=False,
unique=True),
Column('data', types.LargeBinary, nullable=False))
self.table.create(checkfirst=True)
def __setitem__(self, key, value):
raw = self.serialize(value)
# Check if this key exists with a SELECT FOR UPDATE, to protect
# against a race with other concurrent writers of this key.
r = self.table.select('1', for_update=True).\
where(self.table.c.key == key).execute().fetchone()
if r:
# If it exists, use an UPDATE.
self.table.update().values(data=raw).\
where(self.table.c.key == key).execute()
else:
# Otherwise INSERT.
self.table.insert().values(key=key, data=raw).execute()
def __getitem__(self, key):
r = select([self.table.c.data], self.table.c.key == key).\
execute().fetchone()
if r:
raw = r[0]
return self.deserialize(raw)
else:
raise KeyError('key %r not found' % key)
|
from sqlalchemy import MetaData, Table, Column, types, create_engine, select
from .base import BaseBackend
class SQLBackend(BaseBackend):
def __init__(self, url, table_name='gimlet_channels', **engine_kwargs):
meta = MetaData(bind=create_engine(url, **engine_kwargs))
self.table = Table(table_name, meta,
Column('id', types.Integer, primary_key=True),
Column('key', types.CHAR(32), nullable=False,
unique=True),
Column('data', types.LargeBinary, nullable=False))
self.table.create(checkfirst=True)
def __setitem__(self, key, value):
table = self.table
key_col = table.c.key
raw = self.serialize(value)
# Check if this key exists with a SELECT FOR UPDATE, to protect
# against a race with other concurrent writers of this key.
r = table.count(key_col == key, for_update=True).scalar()
if r:
# If it exists, use an UPDATE.
table.update().values(data=raw).where(key_col == key).execute()
else:
# Otherwise INSERT.
table.insert().values(key=key, data=raw).execute()
def __getitem__(self, key):
r = select([self.table.c.data], self.table.c.key == key).\
execute().fetchone()
if r:
raw = r[0]
return self.deserialize(raw)
else:
raise KeyError('key %r not found' % key)
|
Use count/scalar to test if key is present in SQL back end
|
Use count/scalar to test if key is present in SQL back end
This is simpler than using select/execute/fetchone. Also, scalar automatically
closes the result set whereas fetchone does not. This may fix some performance
issues.
|
Python
|
mit
|
storborg/gimlet
|
from sqlalchemy import MetaData, Table, Column, types, create_engine, select
from .base import BaseBackend
class SQLBackend(BaseBackend):
def __init__(self, url, table_name='gimlet_channels', **engine_kwargs):
meta = MetaData(bind=create_engine(url, **engine_kwargs))
self.table = Table(table_name, meta,
Column('id', types.Integer, primary_key=True),
Column('key', types.CHAR(32), nullable=False,
unique=True),
Column('data', types.LargeBinary, nullable=False))
self.table.create(checkfirst=True)
def __setitem__(self, key, value):
raw = self.serialize(value)
# Check if this key exists with a SELECT FOR UPDATE, to protect
# against a race with other concurrent writers of this key.
r = self.table.select('1', for_update=True).\
where(self.table.c.key == key).execute().fetchone()
if r:
# If it exists, use an UPDATE.
self.table.update().values(data=raw).\
where(self.table.c.key == key).execute()
else:
# Otherwise INSERT.
self.table.insert().values(key=key, data=raw).execute()
def __getitem__(self, key):
r = select([self.table.c.data], self.table.c.key == key).\
execute().fetchone()
if r:
raw = r[0]
return self.deserialize(raw)
else:
raise KeyError('key %r not found' % key)
Use count/scalar to test if key is present in SQL back end
This is simpler than using select/execute/fetchone. Also, scalar automatically
closes the result set whereas fetchone does not. This may fix some performance
issues.
|
from sqlalchemy import MetaData, Table, Column, types, create_engine, select
from .base import BaseBackend
class SQLBackend(BaseBackend):
def __init__(self, url, table_name='gimlet_channels', **engine_kwargs):
meta = MetaData(bind=create_engine(url, **engine_kwargs))
self.table = Table(table_name, meta,
Column('id', types.Integer, primary_key=True),
Column('key', types.CHAR(32), nullable=False,
unique=True),
Column('data', types.LargeBinary, nullable=False))
self.table.create(checkfirst=True)
def __setitem__(self, key, value):
table = self.table
key_col = table.c.key
raw = self.serialize(value)
# Check if this key exists with a SELECT FOR UPDATE, to protect
# against a race with other concurrent writers of this key.
r = table.count(key_col == key, for_update=True).scalar()
if r:
# If it exists, use an UPDATE.
table.update().values(data=raw).where(key_col == key).execute()
else:
# Otherwise INSERT.
table.insert().values(key=key, data=raw).execute()
def __getitem__(self, key):
r = select([self.table.c.data], self.table.c.key == key).\
execute().fetchone()
if r:
raw = r[0]
return self.deserialize(raw)
else:
raise KeyError('key %r not found' % key)
|
<commit_before>from sqlalchemy import MetaData, Table, Column, types, create_engine, select
from .base import BaseBackend
class SQLBackend(BaseBackend):
def __init__(self, url, table_name='gimlet_channels', **engine_kwargs):
meta = MetaData(bind=create_engine(url, **engine_kwargs))
self.table = Table(table_name, meta,
Column('id', types.Integer, primary_key=True),
Column('key', types.CHAR(32), nullable=False,
unique=True),
Column('data', types.LargeBinary, nullable=False))
self.table.create(checkfirst=True)
def __setitem__(self, key, value):
raw = self.serialize(value)
# Check if this key exists with a SELECT FOR UPDATE, to protect
# against a race with other concurrent writers of this key.
r = self.table.select('1', for_update=True).\
where(self.table.c.key == key).execute().fetchone()
if r:
# If it exists, use an UPDATE.
self.table.update().values(data=raw).\
where(self.table.c.key == key).execute()
else:
# Otherwise INSERT.
self.table.insert().values(key=key, data=raw).execute()
def __getitem__(self, key):
r = select([self.table.c.data], self.table.c.key == key).\
execute().fetchone()
if r:
raw = r[0]
return self.deserialize(raw)
else:
raise KeyError('key %r not found' % key)
<commit_msg>Use count/scalar to test if key is present in SQL back end
This is simpler than using select/execute/fetchone. Also, scalar automatically
closes the result set whereas fetchone does not. This may fix some performance
issues.<commit_after>
|
from sqlalchemy import MetaData, Table, Column, types, create_engine, select
from .base import BaseBackend
class SQLBackend(BaseBackend):
def __init__(self, url, table_name='gimlet_channels', **engine_kwargs):
meta = MetaData(bind=create_engine(url, **engine_kwargs))
self.table = Table(table_name, meta,
Column('id', types.Integer, primary_key=True),
Column('key', types.CHAR(32), nullable=False,
unique=True),
Column('data', types.LargeBinary, nullable=False))
self.table.create(checkfirst=True)
def __setitem__(self, key, value):
table = self.table
key_col = table.c.key
raw = self.serialize(value)
# Check if this key exists with a SELECT FOR UPDATE, to protect
# against a race with other concurrent writers of this key.
r = table.count(key_col == key, for_update=True).scalar()
if r:
# If it exists, use an UPDATE.
table.update().values(data=raw).where(key_col == key).execute()
else:
# Otherwise INSERT.
table.insert().values(key=key, data=raw).execute()
def __getitem__(self, key):
r = select([self.table.c.data], self.table.c.key == key).\
execute().fetchone()
if r:
raw = r[0]
return self.deserialize(raw)
else:
raise KeyError('key %r not found' % key)
|
from sqlalchemy import MetaData, Table, Column, types, create_engine, select
from .base import BaseBackend
class SQLBackend(BaseBackend):
def __init__(self, url, table_name='gimlet_channels', **engine_kwargs):
meta = MetaData(bind=create_engine(url, **engine_kwargs))
self.table = Table(table_name, meta,
Column('id', types.Integer, primary_key=True),
Column('key', types.CHAR(32), nullable=False,
unique=True),
Column('data', types.LargeBinary, nullable=False))
self.table.create(checkfirst=True)
def __setitem__(self, key, value):
raw = self.serialize(value)
# Check if this key exists with a SELECT FOR UPDATE, to protect
# against a race with other concurrent writers of this key.
r = self.table.select('1', for_update=True).\
where(self.table.c.key == key).execute().fetchone()
if r:
# If it exists, use an UPDATE.
self.table.update().values(data=raw).\
where(self.table.c.key == key).execute()
else:
# Otherwise INSERT.
self.table.insert().values(key=key, data=raw).execute()
def __getitem__(self, key):
r = select([self.table.c.data], self.table.c.key == key).\
execute().fetchone()
if r:
raw = r[0]
return self.deserialize(raw)
else:
raise KeyError('key %r not found' % key)
Use count/scalar to test if key is present in SQL back end
This is simpler than using select/execute/fetchone. Also, scalar automatically
closes the result set whereas fetchone does not. This may fix some performance
issues.from sqlalchemy import MetaData, Table, Column, types, create_engine, select
from .base import BaseBackend
class SQLBackend(BaseBackend):
def __init__(self, url, table_name='gimlet_channels', **engine_kwargs):
meta = MetaData(bind=create_engine(url, **engine_kwargs))
self.table = Table(table_name, meta,
Column('id', types.Integer, primary_key=True),
Column('key', types.CHAR(32), nullable=False,
unique=True),
Column('data', types.LargeBinary, nullable=False))
self.table.create(checkfirst=True)
def __setitem__(self, key, value):
table = self.table
key_col = table.c.key
raw = self.serialize(value)
# Check if this key exists with a SELECT FOR UPDATE, to protect
# against a race with other concurrent writers of this key.
r = table.count(key_col == key, for_update=True).scalar()
if r:
# If it exists, use an UPDATE.
table.update().values(data=raw).where(key_col == key).execute()
else:
# Otherwise INSERT.
table.insert().values(key=key, data=raw).execute()
def __getitem__(self, key):
r = select([self.table.c.data], self.table.c.key == key).\
execute().fetchone()
if r:
raw = r[0]
return self.deserialize(raw)
else:
raise KeyError('key %r not found' % key)
|
<commit_before>from sqlalchemy import MetaData, Table, Column, types, create_engine, select
from .base import BaseBackend
class SQLBackend(BaseBackend):
def __init__(self, url, table_name='gimlet_channels', **engine_kwargs):
meta = MetaData(bind=create_engine(url, **engine_kwargs))
self.table = Table(table_name, meta,
Column('id', types.Integer, primary_key=True),
Column('key', types.CHAR(32), nullable=False,
unique=True),
Column('data', types.LargeBinary, nullable=False))
self.table.create(checkfirst=True)
def __setitem__(self, key, value):
raw = self.serialize(value)
# Check if this key exists with a SELECT FOR UPDATE, to protect
# against a race with other concurrent writers of this key.
r = self.table.select('1', for_update=True).\
where(self.table.c.key == key).execute().fetchone()
if r:
# If it exists, use an UPDATE.
self.table.update().values(data=raw).\
where(self.table.c.key == key).execute()
else:
# Otherwise INSERT.
self.table.insert().values(key=key, data=raw).execute()
def __getitem__(self, key):
r = select([self.table.c.data], self.table.c.key == key).\
execute().fetchone()
if r:
raw = r[0]
return self.deserialize(raw)
else:
raise KeyError('key %r not found' % key)
<commit_msg>Use count/scalar to test if key is present in SQL back end
This is simpler than using select/execute/fetchone. Also, scalar automatically
closes the result set whereas fetchone does not. This may fix some performance
issues.<commit_after>from sqlalchemy import MetaData, Table, Column, types, create_engine, select
from .base import BaseBackend
class SQLBackend(BaseBackend):
def __init__(self, url, table_name='gimlet_channels', **engine_kwargs):
meta = MetaData(bind=create_engine(url, **engine_kwargs))
self.table = Table(table_name, meta,
Column('id', types.Integer, primary_key=True),
Column('key', types.CHAR(32), nullable=False,
unique=True),
Column('data', types.LargeBinary, nullable=False))
self.table.create(checkfirst=True)
def __setitem__(self, key, value):
table = self.table
key_col = table.c.key
raw = self.serialize(value)
# Check if this key exists with a SELECT FOR UPDATE, to protect
# against a race with other concurrent writers of this key.
r = table.count(key_col == key, for_update=True).scalar()
if r:
# If it exists, use an UPDATE.
table.update().values(data=raw).where(key_col == key).execute()
else:
# Otherwise INSERT.
table.insert().values(key=key, data=raw).execute()
def __getitem__(self, key):
r = select([self.table.c.data], self.table.c.key == key).\
execute().fetchone()
if r:
raw = r[0]
return self.deserialize(raw)
else:
raise KeyError('key %r not found' % key)
|
5cb6e90714ffe91377e01743451ed4aefe4a1e24
|
greencard/greencard.py
|
greencard/greencard.py
|
"""Greencard implementation."""
from functools import wraps
TESTS = []
def greencard(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped
def descovery(testdir):
"""Descover and load greencard tests."""
import os
from glob import glob
import importlib
for testpath in glob(os.path.join(testdir, "*.py")):
importlib.import_module(testpath)
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="./tests/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
failures = 0
for card in library.retrieve_all():
for test in TESTS:
try:
test(card)
except AssertionError:
print("{0} failed {1}".format(card, test.__name__))
failures += 1
sys.exit(failures)
|
"""Greencard implementation."""
from functools import wraps
TESTS = []
def greencard(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped
def descovery(testdir):
"""Descover and load greencard tests."""
from os.path import splitext, basename, join, exists
if not exists(testdir):
return None
import sys
from glob import glob
import importlib
sys.path.append(testdir)
for testpath in glob(join(testdir, "*.py")):
name, _ = splitext(basename(testpath))
importlib.import_module(name)
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="./tests/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
failures = 0
for card in library.retrieve_all():
for test in TESTS:
try:
test(card)
except AssertionError:
print("{0} failed {1}".format(card, test.__name__))
failures += 1
sys.exit(failures)
|
Fix test descovery to correctly add test dir to path and import modules rather then files
|
Fix test descovery to correctly add test dir to path and import modules rather then files
|
Python
|
mit
|
Nekroze/greencard,Nekroze/greencard
|
"""Greencard implementation."""
from functools import wraps
TESTS = []
def greencard(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped
def descovery(testdir):
"""Descover and load greencard tests."""
import os
from glob import glob
import importlib
for testpath in glob(os.path.join(testdir, "*.py")):
importlib.import_module(testpath)
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="./tests/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
failures = 0
for card in library.retrieve_all():
for test in TESTS:
try:
test(card)
except AssertionError:
print("{0} failed {1}".format(card, test.__name__))
failures += 1
sys.exit(failures)
Fix test descovery to correctly add test dir to path and import modules rather then files
|
"""Greencard implementation."""
from functools import wraps
TESTS = []
def greencard(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped
def descovery(testdir):
"""Descover and load greencard tests."""
from os.path import splitext, basename, join, exists
if not exists(testdir):
return None
import sys
from glob import glob
import importlib
sys.path.append(testdir)
for testpath in glob(join(testdir, "*.py")):
name, _ = splitext(basename(testpath))
importlib.import_module(name)
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="./tests/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
failures = 0
for card in library.retrieve_all():
for test in TESTS:
try:
test(card)
except AssertionError:
print("{0} failed {1}".format(card, test.__name__))
failures += 1
sys.exit(failures)
|
<commit_before>"""Greencard implementation."""
from functools import wraps
TESTS = []
def greencard(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped
def descovery(testdir):
"""Descover and load greencard tests."""
import os
from glob import glob
import importlib
for testpath in glob(os.path.join(testdir, "*.py")):
importlib.import_module(testpath)
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="./tests/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
failures = 0
for card in library.retrieve_all():
for test in TESTS:
try:
test(card)
except AssertionError:
print("{0} failed {1}".format(card, test.__name__))
failures += 1
sys.exit(failures)
<commit_msg>Fix test descovery to correctly add test dir to path and import modules rather then files<commit_after>
|
"""Greencard implementation."""
from functools import wraps
TESTS = []
def greencard(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped
def descovery(testdir):
"""Descover and load greencard tests."""
from os.path import splitext, basename, join, exists
if not exists(testdir):
return None
import sys
from glob import glob
import importlib
sys.path.append(testdir)
for testpath in glob(join(testdir, "*.py")):
name, _ = splitext(basename(testpath))
importlib.import_module(name)
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="./tests/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
failures = 0
for card in library.retrieve_all():
for test in TESTS:
try:
test(card)
except AssertionError:
print("{0} failed {1}".format(card, test.__name__))
failures += 1
sys.exit(failures)
|
"""Greencard implementation."""
from functools import wraps
TESTS = []
def greencard(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped
def descovery(testdir):
"""Descover and load greencard tests."""
import os
from glob import glob
import importlib
for testpath in glob(os.path.join(testdir, "*.py")):
importlib.import_module(testpath)
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="./tests/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
failures = 0
for card in library.retrieve_all():
for test in TESTS:
try:
test(card)
except AssertionError:
print("{0} failed {1}".format(card, test.__name__))
failures += 1
sys.exit(failures)
Fix test descovery to correctly add test dir to path and import modules rather then files"""Greencard implementation."""
from functools import wraps
TESTS = []
def greencard(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped
def descovery(testdir):
"""Descover and load greencard tests."""
from os.path import splitext, basename, join, exists
if not exists(testdir):
return None
import sys
from glob import glob
import importlib
sys.path.append(testdir)
for testpath in glob(join(testdir, "*.py")):
name, _ = splitext(basename(testpath))
importlib.import_module(name)
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="./tests/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
failures = 0
for card in library.retrieve_all():
for test in TESTS:
try:
test(card)
except AssertionError:
print("{0} failed {1}".format(card, test.__name__))
failures += 1
sys.exit(failures)
|
<commit_before>"""Greencard implementation."""
from functools import wraps
TESTS = []
def greencard(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped
def descovery(testdir):
"""Descover and load greencard tests."""
import os
from glob import glob
import importlib
for testpath in glob(os.path.join(testdir, "*.py")):
importlib.import_module(testpath)
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="./tests/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
failures = 0
for card in library.retrieve_all():
for test in TESTS:
try:
test(card)
except AssertionError:
print("{0} failed {1}".format(card, test.__name__))
failures += 1
sys.exit(failures)
<commit_msg>Fix test descovery to correctly add test dir to path and import modules rather then files<commit_after>"""Greencard implementation."""
from functools import wraps
TESTS = []
def greencard(func):
"""
A decorator for providing a unittesting function/method with every card in
a librarian card library database when it is called.
"""
@wraps(func)
def wrapped(*args, **kwargs):
"""Transparent wrapper."""
return func(*args, **kwargs)
TESTS.append(wrapped)
return wrapped
def descovery(testdir):
"""Descover and load greencard tests."""
from os.path import splitext, basename, join, exists
if not exists(testdir):
return None
import sys
from glob import glob
import importlib
sys.path.append(testdir)
for testpath in glob(join(testdir, "*.py")):
name, _ = splitext(basename(testpath))
importlib.import_module(name)
def main(clargs=None):
"""Command line entry point."""
from argparse import ArgumentParser
from librarian.library import Library
import sys
parser = ArgumentParser(
description="A test runner for each card in a librarian library.")
parser.add_argument("library", help="Library database")
parser.add_argument("-t", "--tests", default="./tests/",
help="Test directory")
args = parser.parse_args(clargs)
descovery(args.tests)
library = Library(args.library)
failures = 0
for card in library.retrieve_all():
for test in TESTS:
try:
test(card)
except AssertionError:
print("{0} failed {1}".format(card, test.__name__))
failures += 1
sys.exit(failures)
|
8e3445e0ddedd5611be1f35166a9f37ae018e232
|
client/initialize.py
|
client/initialize.py
|
#!/usr/bin/env python
import os
from tempfile import NamedTemporaryFile
from textwrap import dedent
import shutil
from qlmdm import top_dir
from qlmdm.prompts import get_bool
os.chdir(top_dir)
cron_file = '/etc/cron.d/qlmdm'
cron_exists = os.path.exists(cron_file)
if cron_exists:
prompt = 'Do you want to replace the crontab?'
else:
prompt = 'Do you want to install the crontab?'
do_crontab = get_bool(prompt, not cron_exists)
if do_crontab:
with NamedTemporaryFile() as temp_cron_file:
temp_cron_file.write(dedent('''\
* * * * * root {}/bin/client-cron
'''.format(top_dir)))
temp_cron_file.flush()
shutil.copy(temp_cron_file.name, cron_file)
os.chmod(cron_file, 0644)
print('Installed {}'.format(cron_file))
|
#!/usr/bin/env python
import os
from tempfile import NamedTemporaryFile
from textwrap import dedent
import shutil
from qlmdm import top_dir
from qlmdm.prompts import get_bool
os.chdir(top_dir)
cron_file = '/etc/cron.d/qlmdm'
cron_exists = os.path.exists(cron_file)
if cron_exists:
prompt = 'Do you want to replace the crontab?'
else:
prompt = 'Do you want to install the crontab?'
do_crontab = get_bool(prompt, not cron_exists)
if do_crontab:
with NamedTemporaryFile() as temp_cron_file:
temp_cron_file.write(dedent('''\
* * * * * root {}/bin/client-cron
'''.format(top_dir)))
temp_cron_file.flush()
os.chmod(cron_file, 0644)
shutil.copy(temp_cron_file.name, cron_file)
print('Installed {}'.format(cron_file))
|
Set client crontab file permissions before copying it into place
|
Set client crontab file permissions before copying it into place
Set the permissions on the client crontab file before copying it
rather than after, so the time during which the file is inconsistent
is reduced.
|
Python
|
apache-2.0
|
quantopian/PenguinDome,quantopian/PenguinDome
|
#!/usr/bin/env python
import os
from tempfile import NamedTemporaryFile
from textwrap import dedent
import shutil
from qlmdm import top_dir
from qlmdm.prompts import get_bool
os.chdir(top_dir)
cron_file = '/etc/cron.d/qlmdm'
cron_exists = os.path.exists(cron_file)
if cron_exists:
prompt = 'Do you want to replace the crontab?'
else:
prompt = 'Do you want to install the crontab?'
do_crontab = get_bool(prompt, not cron_exists)
if do_crontab:
with NamedTemporaryFile() as temp_cron_file:
temp_cron_file.write(dedent('''\
* * * * * root {}/bin/client-cron
'''.format(top_dir)))
temp_cron_file.flush()
shutil.copy(temp_cron_file.name, cron_file)
os.chmod(cron_file, 0644)
print('Installed {}'.format(cron_file))
Set client crontab file permissions before copying it into place
Set the permissions on the client crontab file before copying it
rather than after, so the time during which the file is inconsistent
is reduced.
|
#!/usr/bin/env python
import os
from tempfile import NamedTemporaryFile
from textwrap import dedent
import shutil
from qlmdm import top_dir
from qlmdm.prompts import get_bool
os.chdir(top_dir)
cron_file = '/etc/cron.d/qlmdm'
cron_exists = os.path.exists(cron_file)
if cron_exists:
prompt = 'Do you want to replace the crontab?'
else:
prompt = 'Do you want to install the crontab?'
do_crontab = get_bool(prompt, not cron_exists)
if do_crontab:
with NamedTemporaryFile() as temp_cron_file:
temp_cron_file.write(dedent('''\
* * * * * root {}/bin/client-cron
'''.format(top_dir)))
temp_cron_file.flush()
os.chmod(cron_file, 0644)
shutil.copy(temp_cron_file.name, cron_file)
print('Installed {}'.format(cron_file))
|
<commit_before>#!/usr/bin/env python
import os
from tempfile import NamedTemporaryFile
from textwrap import dedent
import shutil
from qlmdm import top_dir
from qlmdm.prompts import get_bool
os.chdir(top_dir)
cron_file = '/etc/cron.d/qlmdm'
cron_exists = os.path.exists(cron_file)
if cron_exists:
prompt = 'Do you want to replace the crontab?'
else:
prompt = 'Do you want to install the crontab?'
do_crontab = get_bool(prompt, not cron_exists)
if do_crontab:
with NamedTemporaryFile() as temp_cron_file:
temp_cron_file.write(dedent('''\
* * * * * root {}/bin/client-cron
'''.format(top_dir)))
temp_cron_file.flush()
shutil.copy(temp_cron_file.name, cron_file)
os.chmod(cron_file, 0644)
print('Installed {}'.format(cron_file))
<commit_msg>Set client crontab file permissions before copying it into place
Set the permissions on the client crontab file before copying it
rather than after, so the time during which the file is inconsistent
is reduced.<commit_after>
|
#!/usr/bin/env python
import os
from tempfile import NamedTemporaryFile
from textwrap import dedent
import shutil
from qlmdm import top_dir
from qlmdm.prompts import get_bool
os.chdir(top_dir)
cron_file = '/etc/cron.d/qlmdm'
cron_exists = os.path.exists(cron_file)
if cron_exists:
prompt = 'Do you want to replace the crontab?'
else:
prompt = 'Do you want to install the crontab?'
do_crontab = get_bool(prompt, not cron_exists)
if do_crontab:
with NamedTemporaryFile() as temp_cron_file:
temp_cron_file.write(dedent('''\
* * * * * root {}/bin/client-cron
'''.format(top_dir)))
temp_cron_file.flush()
os.chmod(cron_file, 0644)
shutil.copy(temp_cron_file.name, cron_file)
print('Installed {}'.format(cron_file))
|
#!/usr/bin/env python
import os
from tempfile import NamedTemporaryFile
from textwrap import dedent
import shutil
from qlmdm import top_dir
from qlmdm.prompts import get_bool
os.chdir(top_dir)
cron_file = '/etc/cron.d/qlmdm'
cron_exists = os.path.exists(cron_file)
if cron_exists:
prompt = 'Do you want to replace the crontab?'
else:
prompt = 'Do you want to install the crontab?'
do_crontab = get_bool(prompt, not cron_exists)
if do_crontab:
with NamedTemporaryFile() as temp_cron_file:
temp_cron_file.write(dedent('''\
* * * * * root {}/bin/client-cron
'''.format(top_dir)))
temp_cron_file.flush()
shutil.copy(temp_cron_file.name, cron_file)
os.chmod(cron_file, 0644)
print('Installed {}'.format(cron_file))
Set client crontab file permissions before copying it into place
Set the permissions on the client crontab file before copying it
rather than after, so the time during which the file is inconsistent
is reduced.#!/usr/bin/env python
import os
from tempfile import NamedTemporaryFile
from textwrap import dedent
import shutil
from qlmdm import top_dir
from qlmdm.prompts import get_bool
os.chdir(top_dir)
cron_file = '/etc/cron.d/qlmdm'
cron_exists = os.path.exists(cron_file)
if cron_exists:
prompt = 'Do you want to replace the crontab?'
else:
prompt = 'Do you want to install the crontab?'
do_crontab = get_bool(prompt, not cron_exists)
if do_crontab:
with NamedTemporaryFile() as temp_cron_file:
temp_cron_file.write(dedent('''\
* * * * * root {}/bin/client-cron
'''.format(top_dir)))
temp_cron_file.flush()
os.chmod(cron_file, 0644)
shutil.copy(temp_cron_file.name, cron_file)
print('Installed {}'.format(cron_file))
|
<commit_before>#!/usr/bin/env python
import os
from tempfile import NamedTemporaryFile
from textwrap import dedent
import shutil
from qlmdm import top_dir
from qlmdm.prompts import get_bool
os.chdir(top_dir)
cron_file = '/etc/cron.d/qlmdm'
cron_exists = os.path.exists(cron_file)
if cron_exists:
prompt = 'Do you want to replace the crontab?'
else:
prompt = 'Do you want to install the crontab?'
do_crontab = get_bool(prompt, not cron_exists)
if do_crontab:
with NamedTemporaryFile() as temp_cron_file:
temp_cron_file.write(dedent('''\
* * * * * root {}/bin/client-cron
'''.format(top_dir)))
temp_cron_file.flush()
shutil.copy(temp_cron_file.name, cron_file)
os.chmod(cron_file, 0644)
print('Installed {}'.format(cron_file))
<commit_msg>Set client crontab file permissions before copying it into place
Set the permissions on the client crontab file before copying it
rather than after, so the time during which the file is inconsistent
is reduced.<commit_after>#!/usr/bin/env python
import os
from tempfile import NamedTemporaryFile
from textwrap import dedent
import shutil
from qlmdm import top_dir
from qlmdm.prompts import get_bool
os.chdir(top_dir)
cron_file = '/etc/cron.d/qlmdm'
cron_exists = os.path.exists(cron_file)
if cron_exists:
prompt = 'Do you want to replace the crontab?'
else:
prompt = 'Do you want to install the crontab?'
do_crontab = get_bool(prompt, not cron_exists)
if do_crontab:
with NamedTemporaryFile() as temp_cron_file:
temp_cron_file.write(dedent('''\
* * * * * root {}/bin/client-cron
'''.format(top_dir)))
temp_cron_file.flush()
os.chmod(cron_file, 0644)
shutil.copy(temp_cron_file.name, cron_file)
print('Installed {}'.format(cron_file))
|
2085cf0c103df44c500bae9bccdc2ce16cd8710f
|
amivapi/settings.py
|
amivapi/settings.py
|
"""Default settings for all environments.
These settings will be extended by additional config files in ROOT/config.
Run `python manage.py create_config` to create such a config file.
"""
from os.path import abspath, dirname, join
# Custom
ROOT_DIR = abspath(join(dirname(__file__), ".."))
# Flask
DEBUG = False
TESTING = False
# Flask-SQLALchemy
# Eve
ID_FIELD = "id"
AUTH_FIELD = "_author"
DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
BANDWIDTH_SAVER = False
RESOURCE_METHODS = ['GET', 'POST']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
PUBLIC_METHODS = ['GET'] # This is the only way to make / public
XML = False
# Eve, file storage options
RETURN_MEDIA_AS_BASE64_STRING = False
EXTENDED_MEDIA_INFO = ['filename', 'size', 'content_url']
STORAGE_DIR = r'D:\Programmieren\amivapi\src\filedump'
STORAGE_URL = r'/storage'
|
"""Default settings for all environments.
These settings will be extended by additional config files in ROOT/config.
Run `python manage.py create_config` to create such a config file.
"""
from os.path import abspath, dirname, join
# Custom
ROOT_DIR = abspath(join(dirname(__file__), ".."))
# Flask
DEBUG = False
TESTING = False
# Flask-SQLALchemy
# Eve
ID_FIELD = "id"
AUTH_FIELD = "_author"
DATE_FORMAT = "%Y-%m-%dT%H:%M:%S"
BANDWIDTH_SAVER = False
RESOURCE_METHODS = ['GET', 'POST']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
PUBLIC_METHODS = ['GET'] # This is the only way to make / public
XML = False
# Eve, file storage options
RETURN_MEDIA_AS_BASE64_STRING = False
EXTENDED_MEDIA_INFO = ['filename', 'size', 'content_url']
STORAGE_DIR = r'D:\Programmieren\amivapi\src\filedump'
STORAGE_URL = r'/storage'
|
Change DATE_FORMAT to be equivalent to datetime.isoformat()
|
Change DATE_FORMAT to be equivalent to datetime.isoformat()
|
Python
|
agpl-3.0
|
amiv-eth/amivapi,amiv-eth/amivapi,amiv-eth/amivapi
|
"""Default settings for all environments.
These settings will be extended by additional config files in ROOT/config.
Run `python manage.py create_config` to create such a config file.
"""
from os.path import abspath, dirname, join
# Custom
ROOT_DIR = abspath(join(dirname(__file__), ".."))
# Flask
DEBUG = False
TESTING = False
# Flask-SQLALchemy
# Eve
ID_FIELD = "id"
AUTH_FIELD = "_author"
DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
BANDWIDTH_SAVER = False
RESOURCE_METHODS = ['GET', 'POST']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
PUBLIC_METHODS = ['GET'] # This is the only way to make / public
XML = False
# Eve, file storage options
RETURN_MEDIA_AS_BASE64_STRING = False
EXTENDED_MEDIA_INFO = ['filename', 'size', 'content_url']
STORAGE_DIR = r'D:\Programmieren\amivapi\src\filedump'
STORAGE_URL = r'/storage'
Change DATE_FORMAT to be equivalent to datetime.isoformat()
|
"""Default settings for all environments.
These settings will be extended by additional config files in ROOT/config.
Run `python manage.py create_config` to create such a config file.
"""
from os.path import abspath, dirname, join
# Custom
ROOT_DIR = abspath(join(dirname(__file__), ".."))
# Flask
DEBUG = False
TESTING = False
# Flask-SQLALchemy
# Eve
ID_FIELD = "id"
AUTH_FIELD = "_author"
DATE_FORMAT = "%Y-%m-%dT%H:%M:%S"
BANDWIDTH_SAVER = False
RESOURCE_METHODS = ['GET', 'POST']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
PUBLIC_METHODS = ['GET'] # This is the only way to make / public
XML = False
# Eve, file storage options
RETURN_MEDIA_AS_BASE64_STRING = False
EXTENDED_MEDIA_INFO = ['filename', 'size', 'content_url']
STORAGE_DIR = r'D:\Programmieren\amivapi\src\filedump'
STORAGE_URL = r'/storage'
|
<commit_before>"""Default settings for all environments.
These settings will be extended by additional config files in ROOT/config.
Run `python manage.py create_config` to create such a config file.
"""
from os.path import abspath, dirname, join
# Custom
ROOT_DIR = abspath(join(dirname(__file__), ".."))
# Flask
DEBUG = False
TESTING = False
# Flask-SQLALchemy
# Eve
ID_FIELD = "id"
AUTH_FIELD = "_author"
DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
BANDWIDTH_SAVER = False
RESOURCE_METHODS = ['GET', 'POST']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
PUBLIC_METHODS = ['GET'] # This is the only way to make / public
XML = False
# Eve, file storage options
RETURN_MEDIA_AS_BASE64_STRING = False
EXTENDED_MEDIA_INFO = ['filename', 'size', 'content_url']
STORAGE_DIR = r'D:\Programmieren\amivapi\src\filedump'
STORAGE_URL = r'/storage'
<commit_msg>Change DATE_FORMAT to be equivalent to datetime.isoformat()<commit_after>
|
"""Default settings for all environments.
These settings will be extended by additional config files in ROOT/config.
Run `python manage.py create_config` to create such a config file.
"""
from os.path import abspath, dirname, join
# Custom
ROOT_DIR = abspath(join(dirname(__file__), ".."))
# Flask
DEBUG = False
TESTING = False
# Flask-SQLALchemy
# Eve
ID_FIELD = "id"
AUTH_FIELD = "_author"
DATE_FORMAT = "%Y-%m-%dT%H:%M:%S"
BANDWIDTH_SAVER = False
RESOURCE_METHODS = ['GET', 'POST']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
PUBLIC_METHODS = ['GET'] # This is the only way to make / public
XML = False
# Eve, file storage options
RETURN_MEDIA_AS_BASE64_STRING = False
EXTENDED_MEDIA_INFO = ['filename', 'size', 'content_url']
STORAGE_DIR = r'D:\Programmieren\amivapi\src\filedump'
STORAGE_URL = r'/storage'
|
"""Default settings for all environments.
These settings will be extended by additional config files in ROOT/config.
Run `python manage.py create_config` to create such a config file.
"""
from os.path import abspath, dirname, join
# Custom
ROOT_DIR = abspath(join(dirname(__file__), ".."))
# Flask
DEBUG = False
TESTING = False
# Flask-SQLALchemy
# Eve
ID_FIELD = "id"
AUTH_FIELD = "_author"
DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
BANDWIDTH_SAVER = False
RESOURCE_METHODS = ['GET', 'POST']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
PUBLIC_METHODS = ['GET'] # This is the only way to make / public
XML = False
# Eve, file storage options
RETURN_MEDIA_AS_BASE64_STRING = False
EXTENDED_MEDIA_INFO = ['filename', 'size', 'content_url']
STORAGE_DIR = r'D:\Programmieren\amivapi\src\filedump'
STORAGE_URL = r'/storage'
Change DATE_FORMAT to be equivalent to datetime.isoformat()"""Default settings for all environments.
These settings will be extended by additional config files in ROOT/config.
Run `python manage.py create_config` to create such a config file.
"""
from os.path import abspath, dirname, join
# Custom
ROOT_DIR = abspath(join(dirname(__file__), ".."))
# Flask
DEBUG = False
TESTING = False
# Flask-SQLALchemy
# Eve
ID_FIELD = "id"
AUTH_FIELD = "_author"
DATE_FORMAT = "%Y-%m-%dT%H:%M:%S"
BANDWIDTH_SAVER = False
RESOURCE_METHODS = ['GET', 'POST']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
PUBLIC_METHODS = ['GET'] # This is the only way to make / public
XML = False
# Eve, file storage options
RETURN_MEDIA_AS_BASE64_STRING = False
EXTENDED_MEDIA_INFO = ['filename', 'size', 'content_url']
STORAGE_DIR = r'D:\Programmieren\amivapi\src\filedump'
STORAGE_URL = r'/storage'
|
<commit_before>"""Default settings for all environments.
These settings will be extended by additional config files in ROOT/config.
Run `python manage.py create_config` to create such a config file.
"""
from os.path import abspath, dirname, join
# Custom
ROOT_DIR = abspath(join(dirname(__file__), ".."))
# Flask
DEBUG = False
TESTING = False
# Flask-SQLALchemy
# Eve
ID_FIELD = "id"
AUTH_FIELD = "_author"
DATE_FORMAT = "%Y-%m-%dT%H:%M:%SZ"
BANDWIDTH_SAVER = False
RESOURCE_METHODS = ['GET', 'POST']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
PUBLIC_METHODS = ['GET'] # This is the only way to make / public
XML = False
# Eve, file storage options
RETURN_MEDIA_AS_BASE64_STRING = False
EXTENDED_MEDIA_INFO = ['filename', 'size', 'content_url']
STORAGE_DIR = r'D:\Programmieren\amivapi\src\filedump'
STORAGE_URL = r'/storage'
<commit_msg>Change DATE_FORMAT to be equivalent to datetime.isoformat()<commit_after>"""Default settings for all environments.
These settings will be extended by additional config files in ROOT/config.
Run `python manage.py create_config` to create such a config file.
"""
from os.path import abspath, dirname, join
# Custom
ROOT_DIR = abspath(join(dirname(__file__), ".."))
# Flask
DEBUG = False
TESTING = False
# Flask-SQLALchemy
# Eve
ID_FIELD = "id"
AUTH_FIELD = "_author"
DATE_FORMAT = "%Y-%m-%dT%H:%M:%S"
BANDWIDTH_SAVER = False
RESOURCE_METHODS = ['GET', 'POST']
ITEM_METHODS = ['GET', 'PATCH', 'PUT', 'DELETE']
PUBLIC_METHODS = ['GET'] # This is the only way to make / public
XML = False
# Eve, file storage options
RETURN_MEDIA_AS_BASE64_STRING = False
EXTENDED_MEDIA_INFO = ['filename', 'size', 'content_url']
STORAGE_DIR = r'D:\Programmieren\amivapi\src\filedump'
STORAGE_URL = r'/storage'
|
c454e2ccafe0c8981ca0789edd2850cbde15c6a3
|
wallace/environments.py
|
wallace/environments.py
|
from sqlalchemy import ForeignKey, Column, String, desc
from .models import Node, Info
from information import State
class Environment(Node):
"""Defines an environment node.
Environments are nodes that have a state and that receive a transmission
from anyone that observes them.
"""
__tablename__ = "environment"
__mapper_args__ = {"polymorphic_identity": "environment"}
# the unique environment id
uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True)
@property
def state(self):
"""The state is the most recently created info of type State."""
return State\
.query\
.filter_by(origin_uuid=self.uuid)\
.order_by(desc(Info.creation_time))\
.first()
def get_observed(self, by_whom=None):
"""When observed, transmit the state."""
if not self.has_connection_to(by_whom):
self.connect_to(by_whom)
self.transmit(what=self.state, to_whom=by_whom)
def __repr__(self):
"""Print the environment in a nice format."""
return "Environment-{}-{}".format(self.uuid[:6], self.type)
|
from sqlalchemy import ForeignKey, Column, String, desc
from .models import Node, Info
from information import State
class Environment(Node):
"""Defines an environment node.
Environments are nodes that have a state and that receive a transmission
from anyone that observes them.
"""
__tablename__ = "environment"
__mapper_args__ = {"polymorphic_identity": "environment"}
# the unique environment id
uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True)
@property
def state(self):
"""The state is the most recently created info of type State."""
return State\
.query\
.filter_by(origin_uuid=self.uuid)\
.order_by(desc(Info.creation_time))\
.first()
def get_observed(self, by_whom=None):
"""When observed, transmit the state."""
self.transmit(what=self.state, to_whom=by_whom)
def __repr__(self):
"""Print the environment in a nice format."""
return "Environment-{}-{}".format(self.uuid[:6], self.type)
|
Remove side effect where observing connects
|
Remove side effect where observing connects
|
Python
|
mit
|
Dallinger/Dallinger,suchow/Wallace,berkeley-cocosci/Wallace,jcpeterson/Dallinger,Dallinger/Dallinger,berkeley-cocosci/Wallace,Dallinger/Dallinger,suchow/Wallace,jcpeterson/Dallinger,jcpeterson/Dallinger,jcpeterson/Dallinger,berkeley-cocosci/Wallace,Dallinger/Dallinger,jcpeterson/Dallinger,Dallinger/Dallinger,suchow/Wallace
|
from sqlalchemy import ForeignKey, Column, String, desc
from .models import Node, Info
from information import State
class Environment(Node):
"""Defines an environment node.
Environments are nodes that have a state and that receive a transmission
from anyone that observes them.
"""
__tablename__ = "environment"
__mapper_args__ = {"polymorphic_identity": "environment"}
# the unique environment id
uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True)
@property
def state(self):
"""The state is the most recently created info of type State."""
return State\
.query\
.filter_by(origin_uuid=self.uuid)\
.order_by(desc(Info.creation_time))\
.first()
def get_observed(self, by_whom=None):
"""When observed, transmit the state."""
if not self.has_connection_to(by_whom):
self.connect_to(by_whom)
self.transmit(what=self.state, to_whom=by_whom)
def __repr__(self):
"""Print the environment in a nice format."""
return "Environment-{}-{}".format(self.uuid[:6], self.type)
Remove side effect where observing connects
|
from sqlalchemy import ForeignKey, Column, String, desc
from .models import Node, Info
from information import State
class Environment(Node):
"""Defines an environment node.
Environments are nodes that have a state and that receive a transmission
from anyone that observes them.
"""
__tablename__ = "environment"
__mapper_args__ = {"polymorphic_identity": "environment"}
# the unique environment id
uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True)
@property
def state(self):
"""The state is the most recently created info of type State."""
return State\
.query\
.filter_by(origin_uuid=self.uuid)\
.order_by(desc(Info.creation_time))\
.first()
def get_observed(self, by_whom=None):
"""When observed, transmit the state."""
self.transmit(what=self.state, to_whom=by_whom)
def __repr__(self):
"""Print the environment in a nice format."""
return "Environment-{}-{}".format(self.uuid[:6], self.type)
|
<commit_before>from sqlalchemy import ForeignKey, Column, String, desc
from .models import Node, Info
from information import State
class Environment(Node):
"""Defines an environment node.
Environments are nodes that have a state and that receive a transmission
from anyone that observes them.
"""
__tablename__ = "environment"
__mapper_args__ = {"polymorphic_identity": "environment"}
# the unique environment id
uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True)
@property
def state(self):
"""The state is the most recently created info of type State."""
return State\
.query\
.filter_by(origin_uuid=self.uuid)\
.order_by(desc(Info.creation_time))\
.first()
def get_observed(self, by_whom=None):
"""When observed, transmit the state."""
if not self.has_connection_to(by_whom):
self.connect_to(by_whom)
self.transmit(what=self.state, to_whom=by_whom)
def __repr__(self):
"""Print the environment in a nice format."""
return "Environment-{}-{}".format(self.uuid[:6], self.type)
<commit_msg>Remove side effect where observing connects<commit_after>
|
from sqlalchemy import ForeignKey, Column, String, desc
from .models import Node, Info
from information import State
class Environment(Node):
"""Defines an environment node.
Environments are nodes that have a state and that receive a transmission
from anyone that observes them.
"""
__tablename__ = "environment"
__mapper_args__ = {"polymorphic_identity": "environment"}
# the unique environment id
uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True)
@property
def state(self):
"""The state is the most recently created info of type State."""
return State\
.query\
.filter_by(origin_uuid=self.uuid)\
.order_by(desc(Info.creation_time))\
.first()
def get_observed(self, by_whom=None):
"""When observed, transmit the state."""
self.transmit(what=self.state, to_whom=by_whom)
def __repr__(self):
"""Print the environment in a nice format."""
return "Environment-{}-{}".format(self.uuid[:6], self.type)
|
from sqlalchemy import ForeignKey, Column, String, desc
from .models import Node, Info
from information import State
class Environment(Node):
"""Defines an environment node.
Environments are nodes that have a state and that receive a transmission
from anyone that observes them.
"""
__tablename__ = "environment"
__mapper_args__ = {"polymorphic_identity": "environment"}
# the unique environment id
uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True)
@property
def state(self):
"""The state is the most recently created info of type State."""
return State\
.query\
.filter_by(origin_uuid=self.uuid)\
.order_by(desc(Info.creation_time))\
.first()
def get_observed(self, by_whom=None):
"""When observed, transmit the state."""
if not self.has_connection_to(by_whom):
self.connect_to(by_whom)
self.transmit(what=self.state, to_whom=by_whom)
def __repr__(self):
"""Print the environment in a nice format."""
return "Environment-{}-{}".format(self.uuid[:6], self.type)
Remove side effect where observing connectsfrom sqlalchemy import ForeignKey, Column, String, desc
from .models import Node, Info
from information import State
class Environment(Node):
"""Defines an environment node.
Environments are nodes that have a state and that receive a transmission
from anyone that observes them.
"""
__tablename__ = "environment"
__mapper_args__ = {"polymorphic_identity": "environment"}
# the unique environment id
uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True)
@property
def state(self):
"""The state is the most recently created info of type State."""
return State\
.query\
.filter_by(origin_uuid=self.uuid)\
.order_by(desc(Info.creation_time))\
.first()
def get_observed(self, by_whom=None):
"""When observed, transmit the state."""
self.transmit(what=self.state, to_whom=by_whom)
def __repr__(self):
"""Print the environment in a nice format."""
return "Environment-{}-{}".format(self.uuid[:6], self.type)
|
<commit_before>from sqlalchemy import ForeignKey, Column, String, desc
from .models import Node, Info
from information import State
class Environment(Node):
"""Defines an environment node.
Environments are nodes that have a state and that receive a transmission
from anyone that observes them.
"""
__tablename__ = "environment"
__mapper_args__ = {"polymorphic_identity": "environment"}
# the unique environment id
uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True)
@property
def state(self):
"""The state is the most recently created info of type State."""
return State\
.query\
.filter_by(origin_uuid=self.uuid)\
.order_by(desc(Info.creation_time))\
.first()
def get_observed(self, by_whom=None):
"""When observed, transmit the state."""
if not self.has_connection_to(by_whom):
self.connect_to(by_whom)
self.transmit(what=self.state, to_whom=by_whom)
def __repr__(self):
"""Print the environment in a nice format."""
return "Environment-{}-{}".format(self.uuid[:6], self.type)
<commit_msg>Remove side effect where observing connects<commit_after>from sqlalchemy import ForeignKey, Column, String, desc
from .models import Node, Info
from information import State
class Environment(Node):
"""Defines an environment node.
Environments are nodes that have a state and that receive a transmission
from anyone that observes them.
"""
__tablename__ = "environment"
__mapper_args__ = {"polymorphic_identity": "environment"}
# the unique environment id
uuid = Column(String(32), ForeignKey("node.uuid"), primary_key=True)
@property
def state(self):
"""The state is the most recently created info of type State."""
return State\
.query\
.filter_by(origin_uuid=self.uuid)\
.order_by(desc(Info.creation_time))\
.first()
def get_observed(self, by_whom=None):
"""When observed, transmit the state."""
self.transmit(what=self.state, to_whom=by_whom)
def __repr__(self):
"""Print the environment in a nice format."""
return "Environment-{}-{}".format(self.uuid[:6], self.type)
|
83efdc63bed6c280e62eae1fb3a741adc2ac730a
|
duralex/ForkReferenceVisitor.py
|
duralex/ForkReferenceVisitor.py
|
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
import duralex.node_type
class ForkReferenceVisitor(AbstractVisitor):
def visit_node(self, node):
if duralex.tree.is_reference(node) and 'children' in node and len(node['children']) > 1:
ref_nodes = filter(lambda n: duralex.tree.is_reference(n), node['children'])
for i in range(1, len(ref_nodes)):
ref = ref_nodes[i]
fork = copy_node(node, recursive=False)
remove_node(node, ref)
push_node(fork, ref)
push_node(node['parent'], fork)
super(ForkReferenceVisitor, self).visit_node(node)
|
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
import duralex.tree
class ForkReferenceVisitor(AbstractVisitor):
def visit_node(self, node):
if duralex.tree.is_reference(node) and 'children' in node and len(node['children']) > 1:
ref_nodes = filter(lambda n: duralex.tree.is_reference(n), node['children'])
for i in range(1, len(ref_nodes)):
ref = ref_nodes[i]
fork = copy_node(node, recursive=False)
remove_node(node, ref)
push_node(fork, ref)
push_node(node['parent'], fork)
super(ForkReferenceVisitor, self).visit_node(node)
|
Fix broken reference to node_type.
|
Fix broken reference to node_type.
|
Python
|
mit
|
Legilibre/duralex
|
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
import duralex.node_type
class ForkReferenceVisitor(AbstractVisitor):
def visit_node(self, node):
if duralex.tree.is_reference(node) and 'children' in node and len(node['children']) > 1:
ref_nodes = filter(lambda n: duralex.tree.is_reference(n), node['children'])
for i in range(1, len(ref_nodes)):
ref = ref_nodes[i]
fork = copy_node(node, recursive=False)
remove_node(node, ref)
push_node(fork, ref)
push_node(node['parent'], fork)
super(ForkReferenceVisitor, self).visit_node(node)
Fix broken reference to node_type.
|
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
import duralex.tree
class ForkReferenceVisitor(AbstractVisitor):
def visit_node(self, node):
if duralex.tree.is_reference(node) and 'children' in node and len(node['children']) > 1:
ref_nodes = filter(lambda n: duralex.tree.is_reference(n), node['children'])
for i in range(1, len(ref_nodes)):
ref = ref_nodes[i]
fork = copy_node(node, recursive=False)
remove_node(node, ref)
push_node(fork, ref)
push_node(node['parent'], fork)
super(ForkReferenceVisitor, self).visit_node(node)
|
<commit_before>from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
import duralex.node_type
class ForkReferenceVisitor(AbstractVisitor):
def visit_node(self, node):
if duralex.tree.is_reference(node) and 'children' in node and len(node['children']) > 1:
ref_nodes = filter(lambda n: duralex.tree.is_reference(n), node['children'])
for i in range(1, len(ref_nodes)):
ref = ref_nodes[i]
fork = copy_node(node, recursive=False)
remove_node(node, ref)
push_node(fork, ref)
push_node(node['parent'], fork)
super(ForkReferenceVisitor, self).visit_node(node)
<commit_msg>Fix broken reference to node_type.<commit_after>
|
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
import duralex.tree
class ForkReferenceVisitor(AbstractVisitor):
def visit_node(self, node):
if duralex.tree.is_reference(node) and 'children' in node and len(node['children']) > 1:
ref_nodes = filter(lambda n: duralex.tree.is_reference(n), node['children'])
for i in range(1, len(ref_nodes)):
ref = ref_nodes[i]
fork = copy_node(node, recursive=False)
remove_node(node, ref)
push_node(fork, ref)
push_node(node['parent'], fork)
super(ForkReferenceVisitor, self).visit_node(node)
|
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
import duralex.node_type
class ForkReferenceVisitor(AbstractVisitor):
def visit_node(self, node):
if duralex.tree.is_reference(node) and 'children' in node and len(node['children']) > 1:
ref_nodes = filter(lambda n: duralex.tree.is_reference(n), node['children'])
for i in range(1, len(ref_nodes)):
ref = ref_nodes[i]
fork = copy_node(node, recursive=False)
remove_node(node, ref)
push_node(fork, ref)
push_node(node['parent'], fork)
super(ForkReferenceVisitor, self).visit_node(node)
Fix broken reference to node_type.from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
import duralex.tree
class ForkReferenceVisitor(AbstractVisitor):
def visit_node(self, node):
if duralex.tree.is_reference(node) and 'children' in node and len(node['children']) > 1:
ref_nodes = filter(lambda n: duralex.tree.is_reference(n), node['children'])
for i in range(1, len(ref_nodes)):
ref = ref_nodes[i]
fork = copy_node(node, recursive=False)
remove_node(node, ref)
push_node(fork, ref)
push_node(node['parent'], fork)
super(ForkReferenceVisitor, self).visit_node(node)
|
<commit_before>from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
import duralex.node_type
class ForkReferenceVisitor(AbstractVisitor):
def visit_node(self, node):
if duralex.tree.is_reference(node) and 'children' in node and len(node['children']) > 1:
ref_nodes = filter(lambda n: duralex.tree.is_reference(n), node['children'])
for i in range(1, len(ref_nodes)):
ref = ref_nodes[i]
fork = copy_node(node, recursive=False)
remove_node(node, ref)
push_node(fork, ref)
push_node(node['parent'], fork)
super(ForkReferenceVisitor, self).visit_node(node)
<commit_msg>Fix broken reference to node_type.<commit_after>from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
import duralex.tree
class ForkReferenceVisitor(AbstractVisitor):
def visit_node(self, node):
if duralex.tree.is_reference(node) and 'children' in node and len(node['children']) > 1:
ref_nodes = filter(lambda n: duralex.tree.is_reference(n), node['children'])
for i in range(1, len(ref_nodes)):
ref = ref_nodes[i]
fork = copy_node(node, recursive=False)
remove_node(node, ref)
push_node(fork, ref)
push_node(node['parent'], fork)
super(ForkReferenceVisitor, self).visit_node(node)
|
2d13b639f17fd7430191c45ee14f6d200228fd5a
|
geoportal/geoportailv3_geoportal/views/luxthemes.py
|
geoportal/geoportailv3_geoportal/views/luxthemes.py
|
from pyramid.view import view_config
from c2cgeoportal_commons.models import DBSession
from c2cgeoportal_commons.models.main import Theme
import logging
log = logging.getLogger(__name__)
class LuxThemes(object):
def __init__(self, request):
self.request = request
@view_config(route_name='isthemeprivate', renderer='json')
def is_theme_private(self):
theme = self.request.params.get('theme', '')
cnt = DBSession.query(Theme).filter(
Theme.public == False).filter(
Theme.name == theme).count() # noqa
if cnt == 1:
return {'name': theme, 'is_private': True}
return {'name': theme, 'is_private': False}
|
import logging
import re
from c2cgeoportal_commons.models import DBSession, main
from c2cgeoportal_geoportal.lib.caching import get_region
from c2cgeoportal_geoportal.lib.wmstparsing import TimeInformation
from c2cgeoportal_geoportal.views.theme import Theme
from pyramid.view import view_config
from geoportailv3_geoportal.models import LuxLayerInternalWMS
log = logging.getLogger(__name__)
CACHE_REGION = get_region("std")
class LuxThemes(Theme):
@view_config(route_name='isthemeprivate', renderer='json')
def is_theme_private(self):
theme = self.request.params.get('theme', '')
cnt = DBSession.query(main.Theme).filter(
main.Theme.public == False).filter(
main.Theme.name == theme).count() # noqa
if cnt == 1:
return {'name': theme, 'is_private': True}
return {'name': theme, 'is_private': False}
@view_config(route_name="themes", renderer="json")
def themes(self):
"""Fake capabilities for Internal WMS"""
return super().themes()
def _wms_layers(self, ogc_server):
"""Fake capabilities for Internal WMS"""
if ogc_server.name == "Internal WMS":
return self._wms_layers_internal(), set()
return super()._wms_layers(ogc_server)
@CACHE_REGION.cache_on_arguments()
def _wms_layers_internal(self):
"""Fake capabilities for Internal WMS"""
wms_layers = []
for layer in DBSession.query(LuxLayerInternalWMS):
wms_layers += layer.layers.split(",") if layer.layers else []
return {
"layers": {
name: {
"children": [],
"info": [],
}
for name in set(wms_layers)
}
}
|
Fix themes.json with internal WMS
|
Fix themes.json with internal WMS
|
Python
|
mit
|
Geoportail-Luxembourg/geoportailv3,Geoportail-Luxembourg/geoportailv3,Geoportail-Luxembourg/geoportailv3,Geoportail-Luxembourg/geoportailv3,Geoportail-Luxembourg/geoportailv3
|
from pyramid.view import view_config
from c2cgeoportal_commons.models import DBSession
from c2cgeoportal_commons.models.main import Theme
import logging
log = logging.getLogger(__name__)
class LuxThemes(object):
def __init__(self, request):
self.request = request
@view_config(route_name='isthemeprivate', renderer='json')
def is_theme_private(self):
theme = self.request.params.get('theme', '')
cnt = DBSession.query(Theme).filter(
Theme.public == False).filter(
Theme.name == theme).count() # noqa
if cnt == 1:
return {'name': theme, 'is_private': True}
return {'name': theme, 'is_private': False}
Fix themes.json with internal WMS
|
import logging
import re
from c2cgeoportal_commons.models import DBSession, main
from c2cgeoportal_geoportal.lib.caching import get_region
from c2cgeoportal_geoportal.lib.wmstparsing import TimeInformation
from c2cgeoportal_geoportal.views.theme import Theme
from pyramid.view import view_config
from geoportailv3_geoportal.models import LuxLayerInternalWMS
log = logging.getLogger(__name__)
CACHE_REGION = get_region("std")
class LuxThemes(Theme):
@view_config(route_name='isthemeprivate', renderer='json')
def is_theme_private(self):
theme = self.request.params.get('theme', '')
cnt = DBSession.query(main.Theme).filter(
main.Theme.public == False).filter(
main.Theme.name == theme).count() # noqa
if cnt == 1:
return {'name': theme, 'is_private': True}
return {'name': theme, 'is_private': False}
@view_config(route_name="themes", renderer="json")
def themes(self):
"""Fake capabilities for Internal WMS"""
return super().themes()
def _wms_layers(self, ogc_server):
"""Fake capabilities for Internal WMS"""
if ogc_server.name == "Internal WMS":
return self._wms_layers_internal(), set()
return super()._wms_layers(ogc_server)
@CACHE_REGION.cache_on_arguments()
def _wms_layers_internal(self):
"""Fake capabilities for Internal WMS"""
wms_layers = []
for layer in DBSession.query(LuxLayerInternalWMS):
wms_layers += layer.layers.split(",") if layer.layers else []
return {
"layers": {
name: {
"children": [],
"info": [],
}
for name in set(wms_layers)
}
}
|
<commit_before>from pyramid.view import view_config
from c2cgeoportal_commons.models import DBSession
from c2cgeoportal_commons.models.main import Theme
import logging
log = logging.getLogger(__name__)
class LuxThemes(object):
def __init__(self, request):
self.request = request
@view_config(route_name='isthemeprivate', renderer='json')
def is_theme_private(self):
theme = self.request.params.get('theme', '')
cnt = DBSession.query(Theme).filter(
Theme.public == False).filter(
Theme.name == theme).count() # noqa
if cnt == 1:
return {'name': theme, 'is_private': True}
return {'name': theme, 'is_private': False}
<commit_msg>Fix themes.json with internal WMS<commit_after>
|
import logging
import re
from c2cgeoportal_commons.models import DBSession, main
from c2cgeoportal_geoportal.lib.caching import get_region
from c2cgeoportal_geoportal.lib.wmstparsing import TimeInformation
from c2cgeoportal_geoportal.views.theme import Theme
from pyramid.view import view_config
from geoportailv3_geoportal.models import LuxLayerInternalWMS
log = logging.getLogger(__name__)
CACHE_REGION = get_region("std")
class LuxThemes(Theme):
@view_config(route_name='isthemeprivate', renderer='json')
def is_theme_private(self):
theme = self.request.params.get('theme', '')
cnt = DBSession.query(main.Theme).filter(
main.Theme.public == False).filter(
main.Theme.name == theme).count() # noqa
if cnt == 1:
return {'name': theme, 'is_private': True}
return {'name': theme, 'is_private': False}
@view_config(route_name="themes", renderer="json")
def themes(self):
"""Fake capabilities for Internal WMS"""
return super().themes()
def _wms_layers(self, ogc_server):
"""Fake capabilities for Internal WMS"""
if ogc_server.name == "Internal WMS":
return self._wms_layers_internal(), set()
return super()._wms_layers(ogc_server)
@CACHE_REGION.cache_on_arguments()
def _wms_layers_internal(self):
"""Fake capabilities for Internal WMS"""
wms_layers = []
for layer in DBSession.query(LuxLayerInternalWMS):
wms_layers += layer.layers.split(",") if layer.layers else []
return {
"layers": {
name: {
"children": [],
"info": [],
}
for name in set(wms_layers)
}
}
|
from pyramid.view import view_config
from c2cgeoportal_commons.models import DBSession
from c2cgeoportal_commons.models.main import Theme
import logging
log = logging.getLogger(__name__)
class LuxThemes(object):
def __init__(self, request):
self.request = request
@view_config(route_name='isthemeprivate', renderer='json')
def is_theme_private(self):
theme = self.request.params.get('theme', '')
cnt = DBSession.query(Theme).filter(
Theme.public == False).filter(
Theme.name == theme).count() # noqa
if cnt == 1:
return {'name': theme, 'is_private': True}
return {'name': theme, 'is_private': False}
Fix themes.json with internal WMSimport logging
import re
from c2cgeoportal_commons.models import DBSession, main
from c2cgeoportal_geoportal.lib.caching import get_region
from c2cgeoportal_geoportal.lib.wmstparsing import TimeInformation
from c2cgeoportal_geoportal.views.theme import Theme
from pyramid.view import view_config
from geoportailv3_geoportal.models import LuxLayerInternalWMS
log = logging.getLogger(__name__)
CACHE_REGION = get_region("std")
class LuxThemes(Theme):
@view_config(route_name='isthemeprivate', renderer='json')
def is_theme_private(self):
theme = self.request.params.get('theme', '')
cnt = DBSession.query(main.Theme).filter(
main.Theme.public == False).filter(
main.Theme.name == theme).count() # noqa
if cnt == 1:
return {'name': theme, 'is_private': True}
return {'name': theme, 'is_private': False}
@view_config(route_name="themes", renderer="json")
def themes(self):
"""Fake capabilities for Internal WMS"""
return super().themes()
def _wms_layers(self, ogc_server):
"""Fake capabilities for Internal WMS"""
if ogc_server.name == "Internal WMS":
return self._wms_layers_internal(), set()
return super()._wms_layers(ogc_server)
@CACHE_REGION.cache_on_arguments()
def _wms_layers_internal(self):
"""Fake capabilities for Internal WMS"""
wms_layers = []
for layer in DBSession.query(LuxLayerInternalWMS):
wms_layers += layer.layers.split(",") if layer.layers else []
return {
"layers": {
name: {
"children": [],
"info": [],
}
for name in set(wms_layers)
}
}
|
<commit_before>from pyramid.view import view_config
from c2cgeoportal_commons.models import DBSession
from c2cgeoportal_commons.models.main import Theme
import logging
log = logging.getLogger(__name__)
class LuxThemes(object):
def __init__(self, request):
self.request = request
@view_config(route_name='isthemeprivate', renderer='json')
def is_theme_private(self):
theme = self.request.params.get('theme', '')
cnt = DBSession.query(Theme).filter(
Theme.public == False).filter(
Theme.name == theme).count() # noqa
if cnt == 1:
return {'name': theme, 'is_private': True}
return {'name': theme, 'is_private': False}
<commit_msg>Fix themes.json with internal WMS<commit_after>import logging
import re
from c2cgeoportal_commons.models import DBSession, main
from c2cgeoportal_geoportal.lib.caching import get_region
from c2cgeoportal_geoportal.lib.wmstparsing import TimeInformation
from c2cgeoportal_geoportal.views.theme import Theme
from pyramid.view import view_config
from geoportailv3_geoportal.models import LuxLayerInternalWMS
log = logging.getLogger(__name__)
CACHE_REGION = get_region("std")
class LuxThemes(Theme):
@view_config(route_name='isthemeprivate', renderer='json')
def is_theme_private(self):
theme = self.request.params.get('theme', '')
cnt = DBSession.query(main.Theme).filter(
main.Theme.public == False).filter(
main.Theme.name == theme).count() # noqa
if cnt == 1:
return {'name': theme, 'is_private': True}
return {'name': theme, 'is_private': False}
@view_config(route_name="themes", renderer="json")
def themes(self):
"""Fake capabilities for Internal WMS"""
return super().themes()
def _wms_layers(self, ogc_server):
"""Fake capabilities for Internal WMS"""
if ogc_server.name == "Internal WMS":
return self._wms_layers_internal(), set()
return super()._wms_layers(ogc_server)
@CACHE_REGION.cache_on_arguments()
def _wms_layers_internal(self):
"""Fake capabilities for Internal WMS"""
wms_layers = []
for layer in DBSession.query(LuxLayerInternalWMS):
wms_layers += layer.layers.split(",") if layer.layers else []
return {
"layers": {
name: {
"children": [],
"info": [],
}
for name in set(wms_layers)
}
}
|
5d2858d740eebfe180ceef22ae5cc80b902a5ccf
|
books/views.py
|
books/views.py
|
from django.core.urlresolvers import reverse
from django.http.response import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.utils.translation import ugettext as _
from books.forms import BookForm
from shared.models import BookType
def index(request):
book_list = BookType.objects.all()
args = {'book_list': book_list}
if request.session['success_msg']:
args['success_msg'] = {
'book_added': _("The book was added successfully!")
}[request.session['success_msg']]
return render(request, 'books/index.html', args)
def add_book(request):
if request.method == 'POST':
form = BookForm(request.POST)
if form.is_valid():
book_type = BookType(publisher=form.cleaned_data['publisher'], title=form.cleaned_data['title'],
issue=form.cleaned_data['issue'], issue_year=form.cleaned_data['issue_year'],
price=form.cleaned_data['price'] * 100)
book_type.save()
request.session['success_msg'] = 'book_added'
return HttpResponseRedirect(reverse('index'))
else:
form = BookForm()
return render(request, 'books/add.html', {'form': form})
def edit_book(request, book_id):
return HttpResponse("Hello world!")
def remove_book(request, book_id):
return HttpResponse("Hello world!")
|
from django.core.urlresolvers import reverse
from django.http.response import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.utils.translation import ugettext as _
from books.forms import BookForm
from shared.models import BookType
def index(request):
book_list = BookType.objects.all()
args = {'book_list': book_list}
if 'success_msg' in request.session:
args['success_msg'] = {
'book_added': _("The book was added successfully!")
}[request.session['success_msg']]
del request.session['success_msg']
return render(request, 'books/index.html', args)
def add_book(request):
if request.method == 'POST':
form = BookForm(request.POST)
if form.is_valid():
book_type = BookType(publisher=form.cleaned_data['publisher'], title=form.cleaned_data['title'],
issue=form.cleaned_data['issue'], issue_year=form.cleaned_data['issue_year'],
price=form.cleaned_data['price'] * 100)
book_type.save()
request.session['success_msg'] = 'book_added'
return HttpResponseRedirect(reverse('index'))
else:
form = BookForm()
return render(request, 'books/add.html', {'form': form})
def edit_book(request, book_id):
return HttpResponse("Hello world!")
def remove_book(request, book_id):
return HttpResponse("Hello world!")
|
Fix KeyError in alerts implementation
|
Fix KeyError in alerts implementation
- Fix for alert that wasn't dismissing after refreshing the page
|
Python
|
agpl-3.0
|
m4tx/egielda,m4tx/egielda,m4tx/egielda
|
from django.core.urlresolvers import reverse
from django.http.response import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.utils.translation import ugettext as _
from books.forms import BookForm
from shared.models import BookType
def index(request):
book_list = BookType.objects.all()
args = {'book_list': book_list}
if request.session['success_msg']:
args['success_msg'] = {
'book_added': _("The book was added successfully!")
}[request.session['success_msg']]
return render(request, 'books/index.html', args)
def add_book(request):
if request.method == 'POST':
form = BookForm(request.POST)
if form.is_valid():
book_type = BookType(publisher=form.cleaned_data['publisher'], title=form.cleaned_data['title'],
issue=form.cleaned_data['issue'], issue_year=form.cleaned_data['issue_year'],
price=form.cleaned_data['price'] * 100)
book_type.save()
request.session['success_msg'] = 'book_added'
return HttpResponseRedirect(reverse('index'))
else:
form = BookForm()
return render(request, 'books/add.html', {'form': form})
def edit_book(request, book_id):
return HttpResponse("Hello world!")
def remove_book(request, book_id):
return HttpResponse("Hello world!")
Fix KeyError in alerts implementation
- Fix for alert that wasn't dismissing after refreshing the page
|
from django.core.urlresolvers import reverse
from django.http.response import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.utils.translation import ugettext as _
from books.forms import BookForm
from shared.models import BookType
def index(request):
book_list = BookType.objects.all()
args = {'book_list': book_list}
if 'success_msg' in request.session:
args['success_msg'] = {
'book_added': _("The book was added successfully!")
}[request.session['success_msg']]
del request.session['success_msg']
return render(request, 'books/index.html', args)
def add_book(request):
if request.method == 'POST':
form = BookForm(request.POST)
if form.is_valid():
book_type = BookType(publisher=form.cleaned_data['publisher'], title=form.cleaned_data['title'],
issue=form.cleaned_data['issue'], issue_year=form.cleaned_data['issue_year'],
price=form.cleaned_data['price'] * 100)
book_type.save()
request.session['success_msg'] = 'book_added'
return HttpResponseRedirect(reverse('index'))
else:
form = BookForm()
return render(request, 'books/add.html', {'form': form})
def edit_book(request, book_id):
return HttpResponse("Hello world!")
def remove_book(request, book_id):
return HttpResponse("Hello world!")
|
<commit_before>from django.core.urlresolvers import reverse
from django.http.response import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.utils.translation import ugettext as _
from books.forms import BookForm
from shared.models import BookType
def index(request):
book_list = BookType.objects.all()
args = {'book_list': book_list}
if request.session['success_msg']:
args['success_msg'] = {
'book_added': _("The book was added successfully!")
}[request.session['success_msg']]
return render(request, 'books/index.html', args)
def add_book(request):
if request.method == 'POST':
form = BookForm(request.POST)
if form.is_valid():
book_type = BookType(publisher=form.cleaned_data['publisher'], title=form.cleaned_data['title'],
issue=form.cleaned_data['issue'], issue_year=form.cleaned_data['issue_year'],
price=form.cleaned_data['price'] * 100)
book_type.save()
request.session['success_msg'] = 'book_added'
return HttpResponseRedirect(reverse('index'))
else:
form = BookForm()
return render(request, 'books/add.html', {'form': form})
def edit_book(request, book_id):
return HttpResponse("Hello world!")
def remove_book(request, book_id):
return HttpResponse("Hello world!")
<commit_msg>Fix KeyError in alerts implementation
- Fix for alert that wasn't dismissing after refreshing the page<commit_after>
|
from django.core.urlresolvers import reverse
from django.http.response import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.utils.translation import ugettext as _
from books.forms import BookForm
from shared.models import BookType
def index(request):
book_list = BookType.objects.all()
args = {'book_list': book_list}
if 'success_msg' in request.session:
args['success_msg'] = {
'book_added': _("The book was added successfully!")
}[request.session['success_msg']]
del request.session['success_msg']
return render(request, 'books/index.html', args)
def add_book(request):
if request.method == 'POST':
form = BookForm(request.POST)
if form.is_valid():
book_type = BookType(publisher=form.cleaned_data['publisher'], title=form.cleaned_data['title'],
issue=form.cleaned_data['issue'], issue_year=form.cleaned_data['issue_year'],
price=form.cleaned_data['price'] * 100)
book_type.save()
request.session['success_msg'] = 'book_added'
return HttpResponseRedirect(reverse('index'))
else:
form = BookForm()
return render(request, 'books/add.html', {'form': form})
def edit_book(request, book_id):
return HttpResponse("Hello world!")
def remove_book(request, book_id):
return HttpResponse("Hello world!")
|
from django.core.urlresolvers import reverse
from django.http.response import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.utils.translation import ugettext as _
from books.forms import BookForm
from shared.models import BookType
def index(request):
book_list = BookType.objects.all()
args = {'book_list': book_list}
if request.session['success_msg']:
args['success_msg'] = {
'book_added': _("The book was added successfully!")
}[request.session['success_msg']]
return render(request, 'books/index.html', args)
def add_book(request):
if request.method == 'POST':
form = BookForm(request.POST)
if form.is_valid():
book_type = BookType(publisher=form.cleaned_data['publisher'], title=form.cleaned_data['title'],
issue=form.cleaned_data['issue'], issue_year=form.cleaned_data['issue_year'],
price=form.cleaned_data['price'] * 100)
book_type.save()
request.session['success_msg'] = 'book_added'
return HttpResponseRedirect(reverse('index'))
else:
form = BookForm()
return render(request, 'books/add.html', {'form': form})
def edit_book(request, book_id):
return HttpResponse("Hello world!")
def remove_book(request, book_id):
return HttpResponse("Hello world!")
Fix KeyError in alerts implementation
- Fix for alert that wasn't dismissing after refreshing the pagefrom django.core.urlresolvers import reverse
from django.http.response import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.utils.translation import ugettext as _
from books.forms import BookForm
from shared.models import BookType
def index(request):
book_list = BookType.objects.all()
args = {'book_list': book_list}
if 'success_msg' in request.session:
args['success_msg'] = {
'book_added': _("The book was added successfully!")
}[request.session['success_msg']]
del request.session['success_msg']
return render(request, 'books/index.html', args)
def add_book(request):
if request.method == 'POST':
form = BookForm(request.POST)
if form.is_valid():
book_type = BookType(publisher=form.cleaned_data['publisher'], title=form.cleaned_data['title'],
issue=form.cleaned_data['issue'], issue_year=form.cleaned_data['issue_year'],
price=form.cleaned_data['price'] * 100)
book_type.save()
request.session['success_msg'] = 'book_added'
return HttpResponseRedirect(reverse('index'))
else:
form = BookForm()
return render(request, 'books/add.html', {'form': form})
def edit_book(request, book_id):
return HttpResponse("Hello world!")
def remove_book(request, book_id):
return HttpResponse("Hello world!")
|
<commit_before>from django.core.urlresolvers import reverse
from django.http.response import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.utils.translation import ugettext as _
from books.forms import BookForm
from shared.models import BookType
def index(request):
book_list = BookType.objects.all()
args = {'book_list': book_list}
if request.session['success_msg']:
args['success_msg'] = {
'book_added': _("The book was added successfully!")
}[request.session['success_msg']]
return render(request, 'books/index.html', args)
def add_book(request):
if request.method == 'POST':
form = BookForm(request.POST)
if form.is_valid():
book_type = BookType(publisher=form.cleaned_data['publisher'], title=form.cleaned_data['title'],
issue=form.cleaned_data['issue'], issue_year=form.cleaned_data['issue_year'],
price=form.cleaned_data['price'] * 100)
book_type.save()
request.session['success_msg'] = 'book_added'
return HttpResponseRedirect(reverse('index'))
else:
form = BookForm()
return render(request, 'books/add.html', {'form': form})
def edit_book(request, book_id):
return HttpResponse("Hello world!")
def remove_book(request, book_id):
return HttpResponse("Hello world!")
<commit_msg>Fix KeyError in alerts implementation
- Fix for alert that wasn't dismissing after refreshing the page<commit_after>from django.core.urlresolvers import reverse
from django.http.response import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from django.utils.translation import ugettext as _
from books.forms import BookForm
from shared.models import BookType
def index(request):
book_list = BookType.objects.all()
args = {'book_list': book_list}
if 'success_msg' in request.session:
args['success_msg'] = {
'book_added': _("The book was added successfully!")
}[request.session['success_msg']]
del request.session['success_msg']
return render(request, 'books/index.html', args)
def add_book(request):
if request.method == 'POST':
form = BookForm(request.POST)
if form.is_valid():
book_type = BookType(publisher=form.cleaned_data['publisher'], title=form.cleaned_data['title'],
issue=form.cleaned_data['issue'], issue_year=form.cleaned_data['issue_year'],
price=form.cleaned_data['price'] * 100)
book_type.save()
request.session['success_msg'] = 'book_added'
return HttpResponseRedirect(reverse('index'))
else:
form = BookForm()
return render(request, 'books/add.html', {'form': form})
def edit_book(request, book_id):
return HttpResponse("Hello world!")
def remove_book(request, book_id):
return HttpResponse("Hello world!")
|
29c7e05a02366362db1801636e2b71e042ea6461
|
authorizenet/conf.py
|
authorizenet/conf.py
|
"""Application-specific settings for django-authorizenet"""
from django.conf import settings as django_settings
class Settings(object):
"""
Retrieves django.conf settings, using defaults from Default subclass
All usable settings are specified in settings attribute. Use an
``AUTHNET_`` prefix when specifying settings in django.conf.
"""
prefix = 'AUTHNET_'
settings = {'DEBUG', 'LOGIN_ID', 'TRANSACTION_KEY', 'CUSTOMER_MODEL',
'DELIM_CHAR', 'FORCE_TEST_REQUEST', 'EMAIL_CUSTOMER',
'MD5_HASH'}
class Default:
CUSTOMER_MODEL = getattr(
django_settings, 'AUTH_USER_MODEL', "auth.User")
DELIM_CHAR = "|"
FORCE_TEST_REQUEST = False
EMAIL_CUSTOMER = None
MD5_HASH = ""
def __init__(self):
self.defaults = Settings.Default()
def __getattr__(self, name):
if name not in self.settings:
raise AttributeError("Setting %s not understood" % name)
try:
return getattr(django_settings, self.prefix + name)
except AttributeError:
return getattr(self.defaults, name)
settings = Settings()
|
"""Application-specific settings for django-authorizenet"""
from django.conf import settings as django_settings
class Settings(object):
"""
Retrieves django.conf settings, using defaults from Default subclass
All usable settings are specified in settings attribute. Use an
``AUTHNET_`` prefix when specifying settings in django.conf.
"""
prefix = 'AUTHNET_'
settings = set(('DEBUG', 'LOGIN_ID', 'TRANSACTION_KEY', 'CUSTOMER_MODEL',
'DELIM_CHAR', 'FORCE_TEST_REQUEST', 'EMAIL_CUSTOMER',
'MD5_HASH'))
class Default:
CUSTOMER_MODEL = getattr(
django_settings, 'AUTH_USER_MODEL', "auth.User")
DELIM_CHAR = "|"
FORCE_TEST_REQUEST = False
EMAIL_CUSTOMER = None
MD5_HASH = ""
def __init__(self):
self.defaults = Settings.Default()
def __getattr__(self, name):
if name not in self.settings:
raise AttributeError("Setting %s not understood" % name)
try:
return getattr(django_settings, self.prefix + name)
except AttributeError:
return getattr(self.defaults, name)
settings = Settings()
|
Fix set notation for Python 2.6
|
Fix set notation for Python 2.6
|
Python
|
mit
|
zen4ever/django-authorizenet,zen4ever/django-authorizenet,zen4ever/django-authorizenet
|
"""Application-specific settings for django-authorizenet"""
from django.conf import settings as django_settings
class Settings(object):
"""
Retrieves django.conf settings, using defaults from Default subclass
All usable settings are specified in settings attribute. Use an
``AUTHNET_`` prefix when specifying settings in django.conf.
"""
prefix = 'AUTHNET_'
settings = {'DEBUG', 'LOGIN_ID', 'TRANSACTION_KEY', 'CUSTOMER_MODEL',
'DELIM_CHAR', 'FORCE_TEST_REQUEST', 'EMAIL_CUSTOMER',
'MD5_HASH'}
class Default:
CUSTOMER_MODEL = getattr(
django_settings, 'AUTH_USER_MODEL', "auth.User")
DELIM_CHAR = "|"
FORCE_TEST_REQUEST = False
EMAIL_CUSTOMER = None
MD5_HASH = ""
def __init__(self):
self.defaults = Settings.Default()
def __getattr__(self, name):
if name not in self.settings:
raise AttributeError("Setting %s not understood" % name)
try:
return getattr(django_settings, self.prefix + name)
except AttributeError:
return getattr(self.defaults, name)
settings = Settings()
Fix set notation for Python 2.6
|
"""Application-specific settings for django-authorizenet"""
from django.conf import settings as django_settings
class Settings(object):
"""
Retrieves django.conf settings, using defaults from Default subclass
All usable settings are specified in settings attribute. Use an
``AUTHNET_`` prefix when specifying settings in django.conf.
"""
prefix = 'AUTHNET_'
settings = set(('DEBUG', 'LOGIN_ID', 'TRANSACTION_KEY', 'CUSTOMER_MODEL',
'DELIM_CHAR', 'FORCE_TEST_REQUEST', 'EMAIL_CUSTOMER',
'MD5_HASH'))
class Default:
CUSTOMER_MODEL = getattr(
django_settings, 'AUTH_USER_MODEL', "auth.User")
DELIM_CHAR = "|"
FORCE_TEST_REQUEST = False
EMAIL_CUSTOMER = None
MD5_HASH = ""
def __init__(self):
self.defaults = Settings.Default()
def __getattr__(self, name):
if name not in self.settings:
raise AttributeError("Setting %s not understood" % name)
try:
return getattr(django_settings, self.prefix + name)
except AttributeError:
return getattr(self.defaults, name)
settings = Settings()
|
<commit_before>"""Application-specific settings for django-authorizenet"""
from django.conf import settings as django_settings
class Settings(object):
"""
Retrieves django.conf settings, using defaults from Default subclass
All usable settings are specified in settings attribute. Use an
``AUTHNET_`` prefix when specifying settings in django.conf.
"""
prefix = 'AUTHNET_'
settings = {'DEBUG', 'LOGIN_ID', 'TRANSACTION_KEY', 'CUSTOMER_MODEL',
'DELIM_CHAR', 'FORCE_TEST_REQUEST', 'EMAIL_CUSTOMER',
'MD5_HASH'}
class Default:
CUSTOMER_MODEL = getattr(
django_settings, 'AUTH_USER_MODEL', "auth.User")
DELIM_CHAR = "|"
FORCE_TEST_REQUEST = False
EMAIL_CUSTOMER = None
MD5_HASH = ""
def __init__(self):
self.defaults = Settings.Default()
def __getattr__(self, name):
if name not in self.settings:
raise AttributeError("Setting %s not understood" % name)
try:
return getattr(django_settings, self.prefix + name)
except AttributeError:
return getattr(self.defaults, name)
settings = Settings()
<commit_msg>Fix set notation for Python 2.6<commit_after>
|
"""Application-specific settings for django-authorizenet"""
from django.conf import settings as django_settings
class Settings(object):
"""
Retrieves django.conf settings, using defaults from Default subclass
All usable settings are specified in settings attribute. Use an
``AUTHNET_`` prefix when specifying settings in django.conf.
"""
prefix = 'AUTHNET_'
settings = set(('DEBUG', 'LOGIN_ID', 'TRANSACTION_KEY', 'CUSTOMER_MODEL',
'DELIM_CHAR', 'FORCE_TEST_REQUEST', 'EMAIL_CUSTOMER',
'MD5_HASH'))
class Default:
CUSTOMER_MODEL = getattr(
django_settings, 'AUTH_USER_MODEL', "auth.User")
DELIM_CHAR = "|"
FORCE_TEST_REQUEST = False
EMAIL_CUSTOMER = None
MD5_HASH = ""
def __init__(self):
self.defaults = Settings.Default()
def __getattr__(self, name):
if name not in self.settings:
raise AttributeError("Setting %s not understood" % name)
try:
return getattr(django_settings, self.prefix + name)
except AttributeError:
return getattr(self.defaults, name)
settings = Settings()
|
"""Application-specific settings for django-authorizenet"""
from django.conf import settings as django_settings
class Settings(object):
"""
Retrieves django.conf settings, using defaults from Default subclass
All usable settings are specified in settings attribute. Use an
``AUTHNET_`` prefix when specifying settings in django.conf.
"""
prefix = 'AUTHNET_'
settings = {'DEBUG', 'LOGIN_ID', 'TRANSACTION_KEY', 'CUSTOMER_MODEL',
'DELIM_CHAR', 'FORCE_TEST_REQUEST', 'EMAIL_CUSTOMER',
'MD5_HASH'}
class Default:
CUSTOMER_MODEL = getattr(
django_settings, 'AUTH_USER_MODEL', "auth.User")
DELIM_CHAR = "|"
FORCE_TEST_REQUEST = False
EMAIL_CUSTOMER = None
MD5_HASH = ""
def __init__(self):
self.defaults = Settings.Default()
def __getattr__(self, name):
if name not in self.settings:
raise AttributeError("Setting %s not understood" % name)
try:
return getattr(django_settings, self.prefix + name)
except AttributeError:
return getattr(self.defaults, name)
settings = Settings()
Fix set notation for Python 2.6"""Application-specific settings for django-authorizenet"""
from django.conf import settings as django_settings
class Settings(object):
"""
Retrieves django.conf settings, using defaults from Default subclass
All usable settings are specified in settings attribute. Use an
``AUTHNET_`` prefix when specifying settings in django.conf.
"""
prefix = 'AUTHNET_'
settings = set(('DEBUG', 'LOGIN_ID', 'TRANSACTION_KEY', 'CUSTOMER_MODEL',
'DELIM_CHAR', 'FORCE_TEST_REQUEST', 'EMAIL_CUSTOMER',
'MD5_HASH'))
class Default:
CUSTOMER_MODEL = getattr(
django_settings, 'AUTH_USER_MODEL', "auth.User")
DELIM_CHAR = "|"
FORCE_TEST_REQUEST = False
EMAIL_CUSTOMER = None
MD5_HASH = ""
def __init__(self):
self.defaults = Settings.Default()
def __getattr__(self, name):
if name not in self.settings:
raise AttributeError("Setting %s not understood" % name)
try:
return getattr(django_settings, self.prefix + name)
except AttributeError:
return getattr(self.defaults, name)
settings = Settings()
|
<commit_before>"""Application-specific settings for django-authorizenet"""
from django.conf import settings as django_settings
class Settings(object):
"""
Retrieves django.conf settings, using defaults from Default subclass
All usable settings are specified in settings attribute. Use an
``AUTHNET_`` prefix when specifying settings in django.conf.
"""
prefix = 'AUTHNET_'
settings = {'DEBUG', 'LOGIN_ID', 'TRANSACTION_KEY', 'CUSTOMER_MODEL',
'DELIM_CHAR', 'FORCE_TEST_REQUEST', 'EMAIL_CUSTOMER',
'MD5_HASH'}
class Default:
CUSTOMER_MODEL = getattr(
django_settings, 'AUTH_USER_MODEL', "auth.User")
DELIM_CHAR = "|"
FORCE_TEST_REQUEST = False
EMAIL_CUSTOMER = None
MD5_HASH = ""
def __init__(self):
self.defaults = Settings.Default()
def __getattr__(self, name):
if name not in self.settings:
raise AttributeError("Setting %s not understood" % name)
try:
return getattr(django_settings, self.prefix + name)
except AttributeError:
return getattr(self.defaults, name)
settings = Settings()
<commit_msg>Fix set notation for Python 2.6<commit_after>"""Application-specific settings for django-authorizenet"""
from django.conf import settings as django_settings
class Settings(object):
"""
Retrieves django.conf settings, using defaults from Default subclass
All usable settings are specified in settings attribute. Use an
``AUTHNET_`` prefix when specifying settings in django.conf.
"""
prefix = 'AUTHNET_'
settings = set(('DEBUG', 'LOGIN_ID', 'TRANSACTION_KEY', 'CUSTOMER_MODEL',
'DELIM_CHAR', 'FORCE_TEST_REQUEST', 'EMAIL_CUSTOMER',
'MD5_HASH'))
class Default:
CUSTOMER_MODEL = getattr(
django_settings, 'AUTH_USER_MODEL', "auth.User")
DELIM_CHAR = "|"
FORCE_TEST_REQUEST = False
EMAIL_CUSTOMER = None
MD5_HASH = ""
def __init__(self):
self.defaults = Settings.Default()
def __getattr__(self, name):
if name not in self.settings:
raise AttributeError("Setting %s not understood" % name)
try:
return getattr(django_settings, self.prefix + name)
except AttributeError:
return getattr(self.defaults, name)
settings = Settings()
|
643b47b2b805a045d9344e11e85ae4334ea79056
|
casia/conf/global_settings.py
|
casia/conf/global_settings.py
|
# -*- coding: utf-8 -*-
# This file is part of Casia - CAS server based on Django
# Copyright (C) 2013 Mateusz Małek
# Casia is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# You should have received a copy of the GNU Affero General Public License
# along with Casia. If not, see <http://www.gnu.org/licenses/>.
# Default Casia settings. Override these with settings in the module
# pointed-to by the DJANGO_SETTINGS_MODULE environment variable.
TIME_ZONE = 'UTC'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'casia.conf.urls'
WSGI_APPLICATION = 'casia.core.wsgi.application'
|
# -*- coding: utf-8 -*-
# This file is part of Casia - CAS server based on Django
# Copyright (C) 2013 Mateusz Małek
# Casia is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# You should have received a copy of the GNU Affero General Public License
# along with Casia. If not, see <http://www.gnu.org/licenses/>.
# Default Casia settings. Override these with settings in the module
# pointed-to by the DJANGO_SETTINGS_MODULE environment variable.
TIME_ZONE = 'UTC'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
)
ROOT_URLCONF = 'casia.conf.urls'
WSGI_APPLICATION = 'casia.core.wsgi.application'
|
Remove middleware classes which are currently unnecessary
|
Remove middleware classes which are currently unnecessary
|
Python
|
agpl-3.0
|
mkwm/casia,mkwm/casia
|
# -*- coding: utf-8 -*-
# This file is part of Casia - CAS server based on Django
# Copyright (C) 2013 Mateusz Małek
# Casia is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# You should have received a copy of the GNU Affero General Public License
# along with Casia. If not, see <http://www.gnu.org/licenses/>.
# Default Casia settings. Override these with settings in the module
# pointed-to by the DJANGO_SETTINGS_MODULE environment variable.
TIME_ZONE = 'UTC'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'casia.conf.urls'
WSGI_APPLICATION = 'casia.core.wsgi.application'
Remove middleware classes which are currently unnecessary
|
# -*- coding: utf-8 -*-
# This file is part of Casia - CAS server based on Django
# Copyright (C) 2013 Mateusz Małek
# Casia is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# You should have received a copy of the GNU Affero General Public License
# along with Casia. If not, see <http://www.gnu.org/licenses/>.
# Default Casia settings. Override these with settings in the module
# pointed-to by the DJANGO_SETTINGS_MODULE environment variable.
TIME_ZONE = 'UTC'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
)
ROOT_URLCONF = 'casia.conf.urls'
WSGI_APPLICATION = 'casia.core.wsgi.application'
|
<commit_before># -*- coding: utf-8 -*-
# This file is part of Casia - CAS server based on Django
# Copyright (C) 2013 Mateusz Małek
# Casia is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# You should have received a copy of the GNU Affero General Public License
# along with Casia. If not, see <http://www.gnu.org/licenses/>.
# Default Casia settings. Override these with settings in the module
# pointed-to by the DJANGO_SETTINGS_MODULE environment variable.
TIME_ZONE = 'UTC'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'casia.conf.urls'
WSGI_APPLICATION = 'casia.core.wsgi.application'
<commit_msg>Remove middleware classes which are currently unnecessary<commit_after>
|
# -*- coding: utf-8 -*-
# This file is part of Casia - CAS server based on Django
# Copyright (C) 2013 Mateusz Małek
# Casia is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# You should have received a copy of the GNU Affero General Public License
# along with Casia. If not, see <http://www.gnu.org/licenses/>.
# Default Casia settings. Override these with settings in the module
# pointed-to by the DJANGO_SETTINGS_MODULE environment variable.
TIME_ZONE = 'UTC'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
)
ROOT_URLCONF = 'casia.conf.urls'
WSGI_APPLICATION = 'casia.core.wsgi.application'
|
# -*- coding: utf-8 -*-
# This file is part of Casia - CAS server based on Django
# Copyright (C) 2013 Mateusz Małek
# Casia is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# You should have received a copy of the GNU Affero General Public License
# along with Casia. If not, see <http://www.gnu.org/licenses/>.
# Default Casia settings. Override these with settings in the module
# pointed-to by the DJANGO_SETTINGS_MODULE environment variable.
TIME_ZONE = 'UTC'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'casia.conf.urls'
WSGI_APPLICATION = 'casia.core.wsgi.application'
Remove middleware classes which are currently unnecessary# -*- coding: utf-8 -*-
# This file is part of Casia - CAS server based on Django
# Copyright (C) 2013 Mateusz Małek
# Casia is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# You should have received a copy of the GNU Affero General Public License
# along with Casia. If not, see <http://www.gnu.org/licenses/>.
# Default Casia settings. Override these with settings in the module
# pointed-to by the DJANGO_SETTINGS_MODULE environment variable.
TIME_ZONE = 'UTC'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
)
ROOT_URLCONF = 'casia.conf.urls'
WSGI_APPLICATION = 'casia.core.wsgi.application'
|
<commit_before># -*- coding: utf-8 -*-
# This file is part of Casia - CAS server based on Django
# Copyright (C) 2013 Mateusz Małek
# Casia is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# You should have received a copy of the GNU Affero General Public License
# along with Casia. If not, see <http://www.gnu.org/licenses/>.
# Default Casia settings. Override these with settings in the module
# pointed-to by the DJANGO_SETTINGS_MODULE environment variable.
TIME_ZONE = 'UTC'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'casia.conf.urls'
WSGI_APPLICATION = 'casia.core.wsgi.application'
<commit_msg>Remove middleware classes which are currently unnecessary<commit_after># -*- coding: utf-8 -*-
# This file is part of Casia - CAS server based on Django
# Copyright (C) 2013 Mateusz Małek
# Casia is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
# You should have received a copy of the GNU Affero General Public License
# along with Casia. If not, see <http://www.gnu.org/licenses/>.
# Default Casia settings. Override these with settings in the module
# pointed-to by the DJANGO_SETTINGS_MODULE environment variable.
TIME_ZONE = 'UTC'
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
)
ROOT_URLCONF = 'casia.conf.urls'
WSGI_APPLICATION = 'casia.core.wsgi.application'
|
df37b65872bc1b5a21a1e74934b834472fc6ca7b
|
buffer/managers/updates.py
|
buffer/managers/updates.py
|
from buffer.models.update import Update
PATHS = {
'GET_PENDING': 'profiles/%s/updates/pending.json',
}
class Updates(list):
def __init__(self, api, profile_id):
self.api = api
self.profile_id = profile_id
@property
def pending(self):
url = PATHS['GET_PENDING'] % self.profile_id
response = self.api.get(url=url)
for update in response['updates']:
self.append(Update(api=self.api, raw_response=update))
return self
|
from buffer.models.update import Update
PATHS = {
'GET_PENDING': 'profiles/%s/updates/pending.json',
'GET_SENT': 'profiles/%s/updates/sent.json',
}
class Updates(list):
def __init__(self, api, profile_id):
self.api = api
self.profile_id = profile_id
self.__pending = []
self.__sent = []
@property
def pending(self):
pending_updates = []
url = paths['get_pending'] % self.profile_id
response = self.api.get(url=url)
for update in response['updates']:
pending_updates.append(update(api=self.api, raw_response=update))
self.__pending = pending_updates
return self.__pending
@property
def sent(self):
url = PATHS['GET_SENT'] % self.profile_id
response = self.api.get(url=url)
for update in response['updates']:
self.append(Update(api=self.api, raw_response=update))
return self
|
Improve proper using of property decorator and logic
|
Improve proper using of property decorator and logic
|
Python
|
mit
|
vtemian/buffpy,bufferapp/buffer-python
|
from buffer.models.update import Update
PATHS = {
'GET_PENDING': 'profiles/%s/updates/pending.json',
}
class Updates(list):
def __init__(self, api, profile_id):
self.api = api
self.profile_id = profile_id
@property
def pending(self):
url = PATHS['GET_PENDING'] % self.profile_id
response = self.api.get(url=url)
for update in response['updates']:
self.append(Update(api=self.api, raw_response=update))
return self
Improve proper using of property decorator and logic
|
from buffer.models.update import Update
PATHS = {
'GET_PENDING': 'profiles/%s/updates/pending.json',
'GET_SENT': 'profiles/%s/updates/sent.json',
}
class Updates(list):
def __init__(self, api, profile_id):
self.api = api
self.profile_id = profile_id
self.__pending = []
self.__sent = []
@property
def pending(self):
pending_updates = []
url = paths['get_pending'] % self.profile_id
response = self.api.get(url=url)
for update in response['updates']:
pending_updates.append(update(api=self.api, raw_response=update))
self.__pending = pending_updates
return self.__pending
@property
def sent(self):
url = PATHS['GET_SENT'] % self.profile_id
response = self.api.get(url=url)
for update in response['updates']:
self.append(Update(api=self.api, raw_response=update))
return self
|
<commit_before>from buffer.models.update import Update
PATHS = {
'GET_PENDING': 'profiles/%s/updates/pending.json',
}
class Updates(list):
def __init__(self, api, profile_id):
self.api = api
self.profile_id = profile_id
@property
def pending(self):
url = PATHS['GET_PENDING'] % self.profile_id
response = self.api.get(url=url)
for update in response['updates']:
self.append(Update(api=self.api, raw_response=update))
return self
<commit_msg>Improve proper using of property decorator and logic<commit_after>
|
from buffer.models.update import Update
PATHS = {
'GET_PENDING': 'profiles/%s/updates/pending.json',
'GET_SENT': 'profiles/%s/updates/sent.json',
}
class Updates(list):
def __init__(self, api, profile_id):
self.api = api
self.profile_id = profile_id
self.__pending = []
self.__sent = []
@property
def pending(self):
pending_updates = []
url = paths['get_pending'] % self.profile_id
response = self.api.get(url=url)
for update in response['updates']:
pending_updates.append(update(api=self.api, raw_response=update))
self.__pending = pending_updates
return self.__pending
@property
def sent(self):
url = PATHS['GET_SENT'] % self.profile_id
response = self.api.get(url=url)
for update in response['updates']:
self.append(Update(api=self.api, raw_response=update))
return self
|
from buffer.models.update import Update
PATHS = {
'GET_PENDING': 'profiles/%s/updates/pending.json',
}
class Updates(list):
def __init__(self, api, profile_id):
self.api = api
self.profile_id = profile_id
@property
def pending(self):
url = PATHS['GET_PENDING'] % self.profile_id
response = self.api.get(url=url)
for update in response['updates']:
self.append(Update(api=self.api, raw_response=update))
return self
Improve proper using of property decorator and logicfrom buffer.models.update import Update
PATHS = {
'GET_PENDING': 'profiles/%s/updates/pending.json',
'GET_SENT': 'profiles/%s/updates/sent.json',
}
class Updates(list):
def __init__(self, api, profile_id):
self.api = api
self.profile_id = profile_id
self.__pending = []
self.__sent = []
@property
def pending(self):
pending_updates = []
url = paths['get_pending'] % self.profile_id
response = self.api.get(url=url)
for update in response['updates']:
pending_updates.append(update(api=self.api, raw_response=update))
self.__pending = pending_updates
return self.__pending
@property
def sent(self):
url = PATHS['GET_SENT'] % self.profile_id
response = self.api.get(url=url)
for update in response['updates']:
self.append(Update(api=self.api, raw_response=update))
return self
|
<commit_before>from buffer.models.update import Update
PATHS = {
'GET_PENDING': 'profiles/%s/updates/pending.json',
}
class Updates(list):
def __init__(self, api, profile_id):
self.api = api
self.profile_id = profile_id
@property
def pending(self):
url = PATHS['GET_PENDING'] % self.profile_id
response = self.api.get(url=url)
for update in response['updates']:
self.append(Update(api=self.api, raw_response=update))
return self
<commit_msg>Improve proper using of property decorator and logic<commit_after>from buffer.models.update import Update
PATHS = {
'GET_PENDING': 'profiles/%s/updates/pending.json',
'GET_SENT': 'profiles/%s/updates/sent.json',
}
class Updates(list):
def __init__(self, api, profile_id):
self.api = api
self.profile_id = profile_id
self.__pending = []
self.__sent = []
@property
def pending(self):
pending_updates = []
url = paths['get_pending'] % self.profile_id
response = self.api.get(url=url)
for update in response['updates']:
pending_updates.append(update(api=self.api, raw_response=update))
self.__pending = pending_updates
return self.__pending
@property
def sent(self):
url = PATHS['GET_SENT'] % self.profile_id
response = self.api.get(url=url)
for update in response['updates']:
self.append(Update(api=self.api, raw_response=update))
return self
|
e45b1f417c535bb8fef1ed18c8736525bbd0acc6
|
appengine_config.py
|
appengine_config.py
|
"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Appstats URL.
# TODO: Drop this once it is the default.
appstats_stats_url = '/_ah/stats'
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.0')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
|
"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Appstats URL.
# TODO: Drop this once it is the default.
appstats_stats_url = '/_ah/stats'
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.1')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
|
Switch Django version from 1.0 to 1.1
|
Switch Django version from 1.0 to 1.1
|
Python
|
apache-2.0
|
fuzan/rietveld,ericmckean/rietveld,gavioto/rietveld,supriyantomaftuh/rietveld,gavioto/rietveld,nareshboddepalli/touchites-codereview,Koulio/rietveld,openlabs/cr.openlabs.co.in,nareshboddepalli/touchites-codereview,nbodepallictr/touchites-codereview,salomon1184/rietveld,openlabs/cr.openlabs.co.in,draem0507/rietveld,foolonhill/rietveld,arg0/rietveld,google-code-export/rietveld,andyzsf/rietveld,gco/rietveld,berkus/rietveld,gco/rietveld,fuzan/rietveld,rietveld-codereview/rietveld,xtypebee/rietveld,xtypebee/rietveld,ericmckean/rietveld,arg0/rietveld,aungzanbaw/rietveld,andyzsf/rietveld,kscharding/integral-solutions-smxq,sajingeo/rietveld,google-code-export/rietveld,nareshboddepalli/touchites-test,v3ss0n/rietveld,sajingeo/rietveld,dushmis/rietveld,robfig/rietveld,draem0507/rietveld,aungzanbaw/rietveld,kscharding/integral-solutions-smxq,v3ss0n/rietveld,salomon1184/rietveld,aungzanbaw/rietveld,ericmckean/rietveld,nareshboddepalli/touchites-test,gavioto/rietveld,DeanHere/rietveld,nareshboddepalli/touchites-test,rietveld-codereview/rietveld,google-code-export/rietveld,nbodepallictr/touchites-codereview,robfig/rietveld,kscharding/integral-solutions-smxq,robfig/rietveld,supriyantomaftuh/rietveld,foolonhill/rietveld,foolonhill/rietveld,berkus/rietveld,sajingeo/rietveld,supriyantomaftuh/rietveld,v3ss0n/rietveld,rietveld-codereview/rietveld,nbodepallictr/touchites-test,salomon1184/rietveld,nbodepallictr/touchites-codereview,Koulio/rietveld,dushmis/rietveld,arg0/rietveld,nbodepallictr/touchites-test,xtypebee/rietveld,dushmis/rietveld,openlabs/cr.openlabs.co.in,andyzsf/rietveld,berkus/rietveld,gco/rietveld,draem0507/rietveld,nbodepallictr/touchites-test,DeanHere/rietveld,rietveld-codereview/rietveld,nareshboddepalli/touchites-codereview,Koulio/rietveld,fuzan/rietveld,DeanHere/rietveld
|
"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Appstats URL.
# TODO: Drop this once it is the default.
appstats_stats_url = '/_ah/stats'
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.0')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
Switch Django version from 1.0 to 1.1
|
"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Appstats URL.
# TODO: Drop this once it is the default.
appstats_stats_url = '/_ah/stats'
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.1')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
|
<commit_before>"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Appstats URL.
# TODO: Drop this once it is the default.
appstats_stats_url = '/_ah/stats'
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.0')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
<commit_msg>Switch Django version from 1.0 to 1.1<commit_after>
|
"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Appstats URL.
# TODO: Drop this once it is the default.
appstats_stats_url = '/_ah/stats'
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.1')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
|
"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Appstats URL.
# TODO: Drop this once it is the default.
appstats_stats_url = '/_ah/stats'
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.0')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
Switch Django version from 1.0 to 1.1"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Appstats URL.
# TODO: Drop this once it is the default.
appstats_stats_url = '/_ah/stats'
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.1')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
|
<commit_before>"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Appstats URL.
# TODO: Drop this once it is the default.
appstats_stats_url = '/_ah/stats'
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.0')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
<commit_msg>Switch Django version from 1.0 to 1.1<commit_after>"""Configuration."""
import logging
import os
import re
from google.appengine.ext.appstats import recording
logging.info('Loading %s from %s', __name__, __file__)
# Custom webapp middleware to add Appstats.
def webapp_add_wsgi_middleware(app):
app = recording.appstats_wsgi_middleware(app)
return app
# Appstats URL.
# TODO: Drop this once it is the default.
appstats_stats_url = '/_ah/stats'
# Custom Appstats path normalization.
def appstats_normalize_path(path):
if path.startswith('/user/'):
return '/user/X'
if path.startswith('/user_popup/'):
return '/user_popup/X'
if path.startswith('/rss/'):
i = path.find('/', 5)
if i > 0:
return path[:i] + '/X'
return re.sub(r'\d+', 'X', path)
# Declare the Django version we need.
from google.appengine.dist import use_library
use_library('django', '1.1')
# Fail early if we can't import Django 1.x. Log identifying information.
import django
logging.info('django.__file__ = %r, django.VERSION = %r',
django.__file__, django.VERSION)
assert django.VERSION[0] >= 1, "This Django version is too old"
|
a4375a6ec5ca54b887527885235317986011801c
|
guesser.py
|
guesser.py
|
from synt.utils.redis_manager import RedisManager
from synt.utils.extractors import best_word_feats
from synt.utils.text import sanitize_text
MANAGER = RedisManager()
DEFAULT_CLASSIFIER = MANAGER.load_classifier()
def guess(text, classifier=DEFAULT_CLASSIFIER):
"""Takes a blob of text and returns the sentiment and confidence score."""
assert classifier, "Needs a classifier."
bag_of_words = best_word_feats(sanitize_text(text))
if bag_of_words:
guess = classifier.classify(bag_of_words)
prob = classifier.prob_classify(bag_of_words)
return (guess, [(prob.prob(sample),sample) for sample in prob.samples()])
|
from synt.utils.redis_manager import RedisManager
from synt.utils.extractors import best_word_feats
from synt.utils.text import sanitize_text
MANAGER = RedisManager()
DEFAULT_CLASSIFIER = MANAGER.load_classifier()
def guess(text, classifier=DEFAULT_CLASSIFIER):
"""Takes a blob of text and returns the sentiment and confidence score."""
assert classifier, "Needs a classifier."
bag_of_words = best_word_feats(sanitize_text(text))
if bag_of_words:
guess = classifier.classify(bag_of_words)
prob = classifier.prob_classify(bag_of_words)
#return a -1 .. 1 score
score = prob.prob('positive') - prob.prob('negative')
return score
|
Return a -1 .. 1 sentiment score.
|
Return a -1 .. 1 sentiment score.
|
Python
|
agpl-3.0
|
lrvick/synt
|
from synt.utils.redis_manager import RedisManager
from synt.utils.extractors import best_word_feats
from synt.utils.text import sanitize_text
MANAGER = RedisManager()
DEFAULT_CLASSIFIER = MANAGER.load_classifier()
def guess(text, classifier=DEFAULT_CLASSIFIER):
"""Takes a blob of text and returns the sentiment and confidence score."""
assert classifier, "Needs a classifier."
bag_of_words = best_word_feats(sanitize_text(text))
if bag_of_words:
guess = classifier.classify(bag_of_words)
prob = classifier.prob_classify(bag_of_words)
return (guess, [(prob.prob(sample),sample) for sample in prob.samples()])
Return a -1 .. 1 sentiment score.
|
from synt.utils.redis_manager import RedisManager
from synt.utils.extractors import best_word_feats
from synt.utils.text import sanitize_text
MANAGER = RedisManager()
DEFAULT_CLASSIFIER = MANAGER.load_classifier()
def guess(text, classifier=DEFAULT_CLASSIFIER):
"""Takes a blob of text and returns the sentiment and confidence score."""
assert classifier, "Needs a classifier."
bag_of_words = best_word_feats(sanitize_text(text))
if bag_of_words:
guess = classifier.classify(bag_of_words)
prob = classifier.prob_classify(bag_of_words)
#return a -1 .. 1 score
score = prob.prob('positive') - prob.prob('negative')
return score
|
<commit_before>from synt.utils.redis_manager import RedisManager
from synt.utils.extractors import best_word_feats
from synt.utils.text import sanitize_text
MANAGER = RedisManager()
DEFAULT_CLASSIFIER = MANAGER.load_classifier()
def guess(text, classifier=DEFAULT_CLASSIFIER):
"""Takes a blob of text and returns the sentiment and confidence score."""
assert classifier, "Needs a classifier."
bag_of_words = best_word_feats(sanitize_text(text))
if bag_of_words:
guess = classifier.classify(bag_of_words)
prob = classifier.prob_classify(bag_of_words)
return (guess, [(prob.prob(sample),sample) for sample in prob.samples()])
<commit_msg>Return a -1 .. 1 sentiment score.<commit_after>
|
from synt.utils.redis_manager import RedisManager
from synt.utils.extractors import best_word_feats
from synt.utils.text import sanitize_text
MANAGER = RedisManager()
DEFAULT_CLASSIFIER = MANAGER.load_classifier()
def guess(text, classifier=DEFAULT_CLASSIFIER):
"""Takes a blob of text and returns the sentiment and confidence score."""
assert classifier, "Needs a classifier."
bag_of_words = best_word_feats(sanitize_text(text))
if bag_of_words:
guess = classifier.classify(bag_of_words)
prob = classifier.prob_classify(bag_of_words)
#return a -1 .. 1 score
score = prob.prob('positive') - prob.prob('negative')
return score
|
from synt.utils.redis_manager import RedisManager
from synt.utils.extractors import best_word_feats
from synt.utils.text import sanitize_text
MANAGER = RedisManager()
DEFAULT_CLASSIFIER = MANAGER.load_classifier()
def guess(text, classifier=DEFAULT_CLASSIFIER):
"""Takes a blob of text and returns the sentiment and confidence score."""
assert classifier, "Needs a classifier."
bag_of_words = best_word_feats(sanitize_text(text))
if bag_of_words:
guess = classifier.classify(bag_of_words)
prob = classifier.prob_classify(bag_of_words)
return (guess, [(prob.prob(sample),sample) for sample in prob.samples()])
Return a -1 .. 1 sentiment score.from synt.utils.redis_manager import RedisManager
from synt.utils.extractors import best_word_feats
from synt.utils.text import sanitize_text
MANAGER = RedisManager()
DEFAULT_CLASSIFIER = MANAGER.load_classifier()
def guess(text, classifier=DEFAULT_CLASSIFIER):
"""Takes a blob of text and returns the sentiment and confidence score."""
assert classifier, "Needs a classifier."
bag_of_words = best_word_feats(sanitize_text(text))
if bag_of_words:
guess = classifier.classify(bag_of_words)
prob = classifier.prob_classify(bag_of_words)
#return a -1 .. 1 score
score = prob.prob('positive') - prob.prob('negative')
return score
|
<commit_before>from synt.utils.redis_manager import RedisManager
from synt.utils.extractors import best_word_feats
from synt.utils.text import sanitize_text
MANAGER = RedisManager()
DEFAULT_CLASSIFIER = MANAGER.load_classifier()
def guess(text, classifier=DEFAULT_CLASSIFIER):
"""Takes a blob of text and returns the sentiment and confidence score."""
assert classifier, "Needs a classifier."
bag_of_words = best_word_feats(sanitize_text(text))
if bag_of_words:
guess = classifier.classify(bag_of_words)
prob = classifier.prob_classify(bag_of_words)
return (guess, [(prob.prob(sample),sample) for sample in prob.samples()])
<commit_msg>Return a -1 .. 1 sentiment score.<commit_after>from synt.utils.redis_manager import RedisManager
from synt.utils.extractors import best_word_feats
from synt.utils.text import sanitize_text
MANAGER = RedisManager()
DEFAULT_CLASSIFIER = MANAGER.load_classifier()
def guess(text, classifier=DEFAULT_CLASSIFIER):
"""Takes a blob of text and returns the sentiment and confidence score."""
assert classifier, "Needs a classifier."
bag_of_words = best_word_feats(sanitize_text(text))
if bag_of_words:
guess = classifier.classify(bag_of_words)
prob = classifier.prob_classify(bag_of_words)
#return a -1 .. 1 score
score = prob.prob('positive') - prob.prob('negative')
return score
|
e01e0d7b1f3bb5d54d428c8f237b72a3c5170b7d
|
number_to_words_test.py
|
number_to_words_test.py
|
import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
if __name__ == '__main__':
unittest.main()
|
import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
def test_zero_and_single_digits(self):
NUMBERS = {
0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five',
6: 'six', 7: 'seven', 8: 'eight', 9: 'nine'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def assert_numbers_equal_to_strings(self, numbers):
for number, string in numbers.iteritems():
self.assertEqual(string, self.n2w.convert(number))
if __name__ == '__main__':
unittest.main()
|
Add tests for number 0 to 9
|
Add tests for number 0 to 9
|
Python
|
mit
|
ianfieldhouse/number_to_words
|
import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
if __name__ == '__main__':
unittest.main()
Add tests for number 0 to 9
|
import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
def test_zero_and_single_digits(self):
NUMBERS = {
0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five',
6: 'six', 7: 'seven', 8: 'eight', 9: 'nine'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def assert_numbers_equal_to_strings(self, numbers):
for number, string in numbers.iteritems():
self.assertEqual(string, self.n2w.convert(number))
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
if __name__ == '__main__':
unittest.main()
<commit_msg>Add tests for number 0 to 9<commit_after>
|
import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
def test_zero_and_single_digits(self):
NUMBERS = {
0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five',
6: 'six', 7: 'seven', 8: 'eight', 9: 'nine'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def assert_numbers_equal_to_strings(self, numbers):
for number, string in numbers.iteritems():
self.assertEqual(string, self.n2w.convert(number))
if __name__ == '__main__':
unittest.main()
|
import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
if __name__ == '__main__':
unittest.main()
Add tests for number 0 to 9import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
def test_zero_and_single_digits(self):
NUMBERS = {
0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five',
6: 'six', 7: 'seven', 8: 'eight', 9: 'nine'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def assert_numbers_equal_to_strings(self, numbers):
for number, string in numbers.iteritems():
self.assertEqual(string, self.n2w.convert(number))
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
if __name__ == '__main__':
unittest.main()
<commit_msg>Add tests for number 0 to 9<commit_after>import unittest
from number_to_words import NumberToWords
class TestNumberToWords(unittest.TestCase):
def setUp(self):
self.n2w = NumberToWords()
def tearDown(self):
self.n2w = None
def test_zero_and_single_digits(self):
NUMBERS = {
0: 'zero', 1: 'one', 2: 'two', 3: 'three', 4: 'four', 5: 'five',
6: 'six', 7: 'seven', 8: 'eight', 9: 'nine'
}
self.assert_numbers_equal_to_strings(NUMBERS)
def assert_numbers_equal_to_strings(self, numbers):
for number, string in numbers.iteritems():
self.assertEqual(string, self.n2w.convert(number))
if __name__ == '__main__':
unittest.main()
|
3b768fdc642471446092a08446ec8f2ab08281c3
|
clean.py
|
clean.py
|
import GutterColor.settings as settings
class Clean:
"""Clean up the cache and generated icons"""
def __init__(self, view):
pass
|
import GutterColor.settings as settings
from os import walk, remove, path, listdir
from shutil import rmtree
from threading import Thread
class Clean(Thread):
"""Clean up the cache and generated icons"""
def __init__(self, files):
Thread.__init__(self)
self.files = files
def run(self):
self.remove_folders()
self.remove_files()
def folder_ids(self, name):
"""Return all the open folder ids"""
name = name.split('/')[-1]
return int(name) if not name == 'icons' else None
def file_ids(self, name):
"""Return all file ids"""
name = name.split('/')[-1]
return int(name) if not name == 'icons' else None
def remove_folders(self):
"""Remove all the icon folders which are not currently open"""
# Get all the folder ids
folders = list(filter(None, map(self.folder_ids, [x[0] for x in walk(settings.ICON_PATH)])))
# Delete the folders
for folder in folders:
if folder not in self.files:
rmtree(path.join(settings.ICON_PATH, str(folder)))
def remove_files(self):
"""Remove all the cached files which are not currently open"""
files = [ f for f in listdir(settings.CACHE_PATH) if path.isfile(path.join(settings.CACHE_PATH,f)) ]
for f in files:
if f == '.keep': pass
if int(f) not in self.files:
remove(path.join(settings.CACHE_PATH, f))
|
Add Clean class to remove files/folders.
|
Add Clean class to remove files/folders.
|
Python
|
mit
|
ggordan/GutterColor,ggordan/GutterColor
|
import GutterColor.settings as settings
class Clean:
"""Clean up the cache and generated icons"""
def __init__(self, view):
pass
Add Clean class to remove files/folders.
|
import GutterColor.settings as settings
from os import walk, remove, path, listdir
from shutil import rmtree
from threading import Thread
class Clean(Thread):
"""Clean up the cache and generated icons"""
def __init__(self, files):
Thread.__init__(self)
self.files = files
def run(self):
self.remove_folders()
self.remove_files()
def folder_ids(self, name):
"""Return all the open folder ids"""
name = name.split('/')[-1]
return int(name) if not name == 'icons' else None
def file_ids(self, name):
"""Return all file ids"""
name = name.split('/')[-1]
return int(name) if not name == 'icons' else None
def remove_folders(self):
"""Remove all the icon folders which are not currently open"""
# Get all the folder ids
folders = list(filter(None, map(self.folder_ids, [x[0] for x in walk(settings.ICON_PATH)])))
# Delete the folders
for folder in folders:
if folder not in self.files:
rmtree(path.join(settings.ICON_PATH, str(folder)))
def remove_files(self):
"""Remove all the cached files which are not currently open"""
files = [ f for f in listdir(settings.CACHE_PATH) if path.isfile(path.join(settings.CACHE_PATH,f)) ]
for f in files:
if f == '.keep': pass
if int(f) not in self.files:
remove(path.join(settings.CACHE_PATH, f))
|
<commit_before>import GutterColor.settings as settings
class Clean:
"""Clean up the cache and generated icons"""
def __init__(self, view):
pass
<commit_msg>Add Clean class to remove files/folders.<commit_after>
|
import GutterColor.settings as settings
from os import walk, remove, path, listdir
from shutil import rmtree
from threading import Thread
class Clean(Thread):
"""Clean up the cache and generated icons"""
def __init__(self, files):
Thread.__init__(self)
self.files = files
def run(self):
self.remove_folders()
self.remove_files()
def folder_ids(self, name):
"""Return all the open folder ids"""
name = name.split('/')[-1]
return int(name) if not name == 'icons' else None
def file_ids(self, name):
"""Return all file ids"""
name = name.split('/')[-1]
return int(name) if not name == 'icons' else None
def remove_folders(self):
"""Remove all the icon folders which are not currently open"""
# Get all the folder ids
folders = list(filter(None, map(self.folder_ids, [x[0] for x in walk(settings.ICON_PATH)])))
# Delete the folders
for folder in folders:
if folder not in self.files:
rmtree(path.join(settings.ICON_PATH, str(folder)))
def remove_files(self):
"""Remove all the cached files which are not currently open"""
files = [ f for f in listdir(settings.CACHE_PATH) if path.isfile(path.join(settings.CACHE_PATH,f)) ]
for f in files:
if f == '.keep': pass
if int(f) not in self.files:
remove(path.join(settings.CACHE_PATH, f))
|
import GutterColor.settings as settings
class Clean:
"""Clean up the cache and generated icons"""
def __init__(self, view):
pass
Add Clean class to remove files/folders.import GutterColor.settings as settings
from os import walk, remove, path, listdir
from shutil import rmtree
from threading import Thread
class Clean(Thread):
"""Clean up the cache and generated icons"""
def __init__(self, files):
Thread.__init__(self)
self.files = files
def run(self):
self.remove_folders()
self.remove_files()
def folder_ids(self, name):
"""Return all the open folder ids"""
name = name.split('/')[-1]
return int(name) if not name == 'icons' else None
def file_ids(self, name):
"""Return all file ids"""
name = name.split('/')[-1]
return int(name) if not name == 'icons' else None
def remove_folders(self):
"""Remove all the icon folders which are not currently open"""
# Get all the folder ids
folders = list(filter(None, map(self.folder_ids, [x[0] for x in walk(settings.ICON_PATH)])))
# Delete the folders
for folder in folders:
if folder not in self.files:
rmtree(path.join(settings.ICON_PATH, str(folder)))
def remove_files(self):
"""Remove all the cached files which are not currently open"""
files = [ f for f in listdir(settings.CACHE_PATH) if path.isfile(path.join(settings.CACHE_PATH,f)) ]
for f in files:
if f == '.keep': pass
if int(f) not in self.files:
remove(path.join(settings.CACHE_PATH, f))
|
<commit_before>import GutterColor.settings as settings
class Clean:
"""Clean up the cache and generated icons"""
def __init__(self, view):
pass
<commit_msg>Add Clean class to remove files/folders.<commit_after>import GutterColor.settings as settings
from os import walk, remove, path, listdir
from shutil import rmtree
from threading import Thread
class Clean(Thread):
"""Clean up the cache and generated icons"""
def __init__(self, files):
Thread.__init__(self)
self.files = files
def run(self):
self.remove_folders()
self.remove_files()
def folder_ids(self, name):
"""Return all the open folder ids"""
name = name.split('/')[-1]
return int(name) if not name == 'icons' else None
def file_ids(self, name):
"""Return all file ids"""
name = name.split('/')[-1]
return int(name) if not name == 'icons' else None
def remove_folders(self):
"""Remove all the icon folders which are not currently open"""
# Get all the folder ids
folders = list(filter(None, map(self.folder_ids, [x[0] for x in walk(settings.ICON_PATH)])))
# Delete the folders
for folder in folders:
if folder not in self.files:
rmtree(path.join(settings.ICON_PATH, str(folder)))
def remove_files(self):
"""Remove all the cached files which are not currently open"""
files = [ f for f in listdir(settings.CACHE_PATH) if path.isfile(path.join(settings.CACHE_PATH,f)) ]
for f in files:
if f == '.keep': pass
if int(f) not in self.files:
remove(path.join(settings.CACHE_PATH, f))
|
88995b5e2bcd6f3e21d8810a97f3c38cc84e8189
|
pulldb/subscriptions.py
|
pulldb/subscriptions.py
|
# Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
class Subscription(ndb.Model):
'''Subscription object in datastore.
Holds subscription data. Parent should be User.
'''
start_date = ndb.DateProperty()
volume = ndb.KeyProperty(kind='Volume')
|
# Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
from pulldb.users import user_key
class Subscription(ndb.Model):
'''Subscription object in datastore.
Holds subscription data. Parent should be User.
'''
start_date = ndb.DateProperty()
volume = ndb.KeyProperty(kind='Volume')
def subscription_key(volume_key, create=False):
key = None
user = user_key()
subscription = Subscription.query(Subscription.volume==volume_key,
ancestor=user).get()
if subscription:
key = subscription.key
elif create:
subscription = Subscription(parent=user,
volume=volume_key)
subscription.put()
key = user.key
return key
|
Add basic subscription fetcher / creator
|
Add basic subscription fetcher / creator
|
Python
|
mit
|
xchewtoyx/pulldb
|
# Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
class Subscription(ndb.Model):
'''Subscription object in datastore.
Holds subscription data. Parent should be User.
'''
start_date = ndb.DateProperty()
volume = ndb.KeyProperty(kind='Volume')
Add basic subscription fetcher / creator
|
# Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
from pulldb.users import user_key
class Subscription(ndb.Model):
'''Subscription object in datastore.
Holds subscription data. Parent should be User.
'''
start_date = ndb.DateProperty()
volume = ndb.KeyProperty(kind='Volume')
def subscription_key(volume_key, create=False):
key = None
user = user_key()
subscription = Subscription.query(Subscription.volume==volume_key,
ancestor=user).get()
if subscription:
key = subscription.key
elif create:
subscription = Subscription(parent=user,
volume=volume_key)
subscription.put()
key = user.key
return key
|
<commit_before># Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
class Subscription(ndb.Model):
'''Subscription object in datastore.
Holds subscription data. Parent should be User.
'''
start_date = ndb.DateProperty()
volume = ndb.KeyProperty(kind='Volume')
<commit_msg>Add basic subscription fetcher / creator<commit_after>
|
# Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
from pulldb.users import user_key
class Subscription(ndb.Model):
'''Subscription object in datastore.
Holds subscription data. Parent should be User.
'''
start_date = ndb.DateProperty()
volume = ndb.KeyProperty(kind='Volume')
def subscription_key(volume_key, create=False):
key = None
user = user_key()
subscription = Subscription.query(Subscription.volume==volume_key,
ancestor=user).get()
if subscription:
key = subscription.key
elif create:
subscription = Subscription(parent=user,
volume=volume_key)
subscription.put()
key = user.key
return key
|
# Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
class Subscription(ndb.Model):
'''Subscription object in datastore.
Holds subscription data. Parent should be User.
'''
start_date = ndb.DateProperty()
volume = ndb.KeyProperty(kind='Volume')
Add basic subscription fetcher / creator# Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
from pulldb.users import user_key
class Subscription(ndb.Model):
'''Subscription object in datastore.
Holds subscription data. Parent should be User.
'''
start_date = ndb.DateProperty()
volume = ndb.KeyProperty(kind='Volume')
def subscription_key(volume_key, create=False):
key = None
user = user_key()
subscription = Subscription.query(Subscription.volume==volume_key,
ancestor=user).get()
if subscription:
key = subscription.key
elif create:
subscription = Subscription(parent=user,
volume=volume_key)
subscription.put()
key = user.key
return key
|
<commit_before># Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
class Subscription(ndb.Model):
'''Subscription object in datastore.
Holds subscription data. Parent should be User.
'''
start_date = ndb.DateProperty()
volume = ndb.KeyProperty(kind='Volume')
<commit_msg>Add basic subscription fetcher / creator<commit_after># Copyright 2013 Russell Heilling
from google.appengine.ext import ndb
from pulldb.users import user_key
class Subscription(ndb.Model):
'''Subscription object in datastore.
Holds subscription data. Parent should be User.
'''
start_date = ndb.DateProperty()
volume = ndb.KeyProperty(kind='Volume')
def subscription_key(volume_key, create=False):
key = None
user = user_key()
subscription = Subscription.query(Subscription.volume==volume_key,
ancestor=user).get()
if subscription:
key = subscription.key
elif create:
subscription = Subscription(parent=user,
volume=volume_key)
subscription.put()
key = user.key
return key
|
6424d1998d10d4d5e1165e7c530d414e86e1067b
|
tests/example_project/tests/test_newman/helpers.py
|
tests/example_project/tests/test_newman/helpers.py
|
from djangosanetesting import SeleniumTestCase
class NewmanTestCase(SeleniumTestCase):
fixtures = ['newman_admin_user']
SUPERUSER_USERNAME = u"superman"
SUPERUSER_PASSWORD = u"xxx"
NEWMAN_URI = "/newman/"
def __init__(self):
super(NewmanTestCase, self).__init__()
self.elements = {
'navigation' : {
'logout' : '//a[@href="%slogout/"]' % self.NEWMAN_URI
},
'pages' : {
'login' : {
'submit' : "//input[@type='submit']"
}
}
}
def login_superuser(self):
self.selenium.open(self.NEWMAN_URI)
self.selenium.type("id_username", self.SUPERUSER_USERNAME)
self.selenium.type("id_password", self.SUPERUSER_PASSWORD)
self.selenium.click(self.elements['pages']['login']['submit'])
def logout(self):
self.selenium.click(self.elements['navigation']['logout'])
self.selenium.wait_for_page_to_load(30000)
self.selenium.is_text_present(u"Thanks for spending some quality time with the Web site today.")
|
from djangosanetesting import SeleniumTestCase
class NewmanTestCase(SeleniumTestCase):
fixtures = ['newman_admin_user']
SUPERUSER_USERNAME = u"superman"
SUPERUSER_PASSWORD = u"xxx"
NEWMAN_URI = "/newman/"
def __init__(self):
super(NewmanTestCase, self).__init__()
self.elements = {
'navigation' : {
'logout' : "//a[@class='icn logout']"
},
'pages' : {
'login' : {
'submit' : "//input[@type='submit']"
}
}
}
def login_superuser(self):
self.selenium.open(self.NEWMAN_URI)
self.selenium.type("id_username", self.SUPERUSER_USERNAME)
self.selenium.type("id_password", self.SUPERUSER_PASSWORD)
self.selenium.click(self.elements['pages']['login']['submit'])
def logout(self):
self.selenium.click(self.elements['navigation']['logout'])
self.selenium.wait_for_page_to_load(30000)
self.selenium.is_text_present(u"Thanks for spending some quality time with the Web site today.")
|
Use class for logout instead of href, to please IE8
|
Use class for logout instead of href, to please IE8
|
Python
|
bsd-3-clause
|
WhiskeyMedia/ella,MichalMaM/ella,petrlosa/ella,petrlosa/ella,WhiskeyMedia/ella,ella/ella,whalerock/ella,MichalMaM/ella,whalerock/ella,whalerock/ella
|
from djangosanetesting import SeleniumTestCase
class NewmanTestCase(SeleniumTestCase):
fixtures = ['newman_admin_user']
SUPERUSER_USERNAME = u"superman"
SUPERUSER_PASSWORD = u"xxx"
NEWMAN_URI = "/newman/"
def __init__(self):
super(NewmanTestCase, self).__init__()
self.elements = {
'navigation' : {
'logout' : '//a[@href="%slogout/"]' % self.NEWMAN_URI
},
'pages' : {
'login' : {
'submit' : "//input[@type='submit']"
}
}
}
def login_superuser(self):
self.selenium.open(self.NEWMAN_URI)
self.selenium.type("id_username", self.SUPERUSER_USERNAME)
self.selenium.type("id_password", self.SUPERUSER_PASSWORD)
self.selenium.click(self.elements['pages']['login']['submit'])
def logout(self):
self.selenium.click(self.elements['navigation']['logout'])
self.selenium.wait_for_page_to_load(30000)
self.selenium.is_text_present(u"Thanks for spending some quality time with the Web site today.")
Use class for logout instead of href, to please IE8
|
from djangosanetesting import SeleniumTestCase
class NewmanTestCase(SeleniumTestCase):
fixtures = ['newman_admin_user']
SUPERUSER_USERNAME = u"superman"
SUPERUSER_PASSWORD = u"xxx"
NEWMAN_URI = "/newman/"
def __init__(self):
super(NewmanTestCase, self).__init__()
self.elements = {
'navigation' : {
'logout' : "//a[@class='icn logout']"
},
'pages' : {
'login' : {
'submit' : "//input[@type='submit']"
}
}
}
def login_superuser(self):
self.selenium.open(self.NEWMAN_URI)
self.selenium.type("id_username", self.SUPERUSER_USERNAME)
self.selenium.type("id_password", self.SUPERUSER_PASSWORD)
self.selenium.click(self.elements['pages']['login']['submit'])
def logout(self):
self.selenium.click(self.elements['navigation']['logout'])
self.selenium.wait_for_page_to_load(30000)
self.selenium.is_text_present(u"Thanks for spending some quality time with the Web site today.")
|
<commit_before>from djangosanetesting import SeleniumTestCase
class NewmanTestCase(SeleniumTestCase):
fixtures = ['newman_admin_user']
SUPERUSER_USERNAME = u"superman"
SUPERUSER_PASSWORD = u"xxx"
NEWMAN_URI = "/newman/"
def __init__(self):
super(NewmanTestCase, self).__init__()
self.elements = {
'navigation' : {
'logout' : '//a[@href="%slogout/"]' % self.NEWMAN_URI
},
'pages' : {
'login' : {
'submit' : "//input[@type='submit']"
}
}
}
def login_superuser(self):
self.selenium.open(self.NEWMAN_URI)
self.selenium.type("id_username", self.SUPERUSER_USERNAME)
self.selenium.type("id_password", self.SUPERUSER_PASSWORD)
self.selenium.click(self.elements['pages']['login']['submit'])
def logout(self):
self.selenium.click(self.elements['navigation']['logout'])
self.selenium.wait_for_page_to_load(30000)
self.selenium.is_text_present(u"Thanks for spending some quality time with the Web site today.")
<commit_msg>Use class for logout instead of href, to please IE8<commit_after>
|
from djangosanetesting import SeleniumTestCase
class NewmanTestCase(SeleniumTestCase):
fixtures = ['newman_admin_user']
SUPERUSER_USERNAME = u"superman"
SUPERUSER_PASSWORD = u"xxx"
NEWMAN_URI = "/newman/"
def __init__(self):
super(NewmanTestCase, self).__init__()
self.elements = {
'navigation' : {
'logout' : "//a[@class='icn logout']"
},
'pages' : {
'login' : {
'submit' : "//input[@type='submit']"
}
}
}
def login_superuser(self):
self.selenium.open(self.NEWMAN_URI)
self.selenium.type("id_username", self.SUPERUSER_USERNAME)
self.selenium.type("id_password", self.SUPERUSER_PASSWORD)
self.selenium.click(self.elements['pages']['login']['submit'])
def logout(self):
self.selenium.click(self.elements['navigation']['logout'])
self.selenium.wait_for_page_to_load(30000)
self.selenium.is_text_present(u"Thanks for spending some quality time with the Web site today.")
|
from djangosanetesting import SeleniumTestCase
class NewmanTestCase(SeleniumTestCase):
fixtures = ['newman_admin_user']
SUPERUSER_USERNAME = u"superman"
SUPERUSER_PASSWORD = u"xxx"
NEWMAN_URI = "/newman/"
def __init__(self):
super(NewmanTestCase, self).__init__()
self.elements = {
'navigation' : {
'logout' : '//a[@href="%slogout/"]' % self.NEWMAN_URI
},
'pages' : {
'login' : {
'submit' : "//input[@type='submit']"
}
}
}
def login_superuser(self):
self.selenium.open(self.NEWMAN_URI)
self.selenium.type("id_username", self.SUPERUSER_USERNAME)
self.selenium.type("id_password", self.SUPERUSER_PASSWORD)
self.selenium.click(self.elements['pages']['login']['submit'])
def logout(self):
self.selenium.click(self.elements['navigation']['logout'])
self.selenium.wait_for_page_to_load(30000)
self.selenium.is_text_present(u"Thanks for spending some quality time with the Web site today.")
Use class for logout instead of href, to please IE8from djangosanetesting import SeleniumTestCase
class NewmanTestCase(SeleniumTestCase):
fixtures = ['newman_admin_user']
SUPERUSER_USERNAME = u"superman"
SUPERUSER_PASSWORD = u"xxx"
NEWMAN_URI = "/newman/"
def __init__(self):
super(NewmanTestCase, self).__init__()
self.elements = {
'navigation' : {
'logout' : "//a[@class='icn logout']"
},
'pages' : {
'login' : {
'submit' : "//input[@type='submit']"
}
}
}
def login_superuser(self):
self.selenium.open(self.NEWMAN_URI)
self.selenium.type("id_username", self.SUPERUSER_USERNAME)
self.selenium.type("id_password", self.SUPERUSER_PASSWORD)
self.selenium.click(self.elements['pages']['login']['submit'])
def logout(self):
self.selenium.click(self.elements['navigation']['logout'])
self.selenium.wait_for_page_to_load(30000)
self.selenium.is_text_present(u"Thanks for spending some quality time with the Web site today.")
|
<commit_before>from djangosanetesting import SeleniumTestCase
class NewmanTestCase(SeleniumTestCase):
fixtures = ['newman_admin_user']
SUPERUSER_USERNAME = u"superman"
SUPERUSER_PASSWORD = u"xxx"
NEWMAN_URI = "/newman/"
def __init__(self):
super(NewmanTestCase, self).__init__()
self.elements = {
'navigation' : {
'logout' : '//a[@href="%slogout/"]' % self.NEWMAN_URI
},
'pages' : {
'login' : {
'submit' : "//input[@type='submit']"
}
}
}
def login_superuser(self):
self.selenium.open(self.NEWMAN_URI)
self.selenium.type("id_username", self.SUPERUSER_USERNAME)
self.selenium.type("id_password", self.SUPERUSER_PASSWORD)
self.selenium.click(self.elements['pages']['login']['submit'])
def logout(self):
self.selenium.click(self.elements['navigation']['logout'])
self.selenium.wait_for_page_to_load(30000)
self.selenium.is_text_present(u"Thanks for spending some quality time with the Web site today.")
<commit_msg>Use class for logout instead of href, to please IE8<commit_after>from djangosanetesting import SeleniumTestCase
class NewmanTestCase(SeleniumTestCase):
fixtures = ['newman_admin_user']
SUPERUSER_USERNAME = u"superman"
SUPERUSER_PASSWORD = u"xxx"
NEWMAN_URI = "/newman/"
def __init__(self):
super(NewmanTestCase, self).__init__()
self.elements = {
'navigation' : {
'logout' : "//a[@class='icn logout']"
},
'pages' : {
'login' : {
'submit' : "//input[@type='submit']"
}
}
}
def login_superuser(self):
self.selenium.open(self.NEWMAN_URI)
self.selenium.type("id_username", self.SUPERUSER_USERNAME)
self.selenium.type("id_password", self.SUPERUSER_PASSWORD)
self.selenium.click(self.elements['pages']['login']['submit'])
def logout(self):
self.selenium.click(self.elements['navigation']['logout'])
self.selenium.wait_for_page_to_load(30000)
self.selenium.is_text_present(u"Thanks for spending some quality time with the Web site today.")
|
bcbe9da43a5e6564a33ec3d78098393cb5ecb3d0
|
tests/test_collector.py
|
tests/test_collector.py
|
"""Tests of coverage/collector.py and other collectors."""
import re
import coverage
from coverage.backward import StringIO
from tests.coveragetest import CoverageTest
class CollectorTest(CoverageTest):
"""Test specific aspects of the collection process."""
def test_should_trace_cache(self):
# The tracers should only invoke should_trace once for each file name.
# Make some files that invoke each other.
self.make_file("f1.py", """\
def f1(x, f):
return f(x)
""")
self.make_file("f2.py", """\
import f1
def func(x):
return f1.f1(x, otherfunc)
def otherfunc(x):
return x*x
for i in range(10):
func(i)
""")
debug_out = StringIO()
cov = coverage.coverage(
include=["f1.py"], debug=['trace'], debug_file=debug_out
)
# Import the python file, executing it.
self.start_import_stop(cov, "f2")
# Grab all the filenames mentioned in debug output, there should be no
# duplicates.
filenames = re.findall(r"'[^']+'", debug_out.getvalue())
self.assertEqual(len(filenames), len(set(filenames)))
|
"""Tests of coverage/collector.py and other collectors."""
import re
import coverage
from coverage.backward import StringIO
from tests.coveragetest import CoverageTest
class CollectorTest(CoverageTest):
"""Test specific aspects of the collection process."""
def test_should_trace_cache(self):
# The tracers should only invoke should_trace once for each file name.
# Make some files that invoke each other.
self.make_file("f1.py", """\
def f1(x, f):
return f(x)
""")
self.make_file("f2.py", """\
import f1
def func(x):
return f1.f1(x, otherfunc)
def otherfunc(x):
return x*x
for i in range(10):
func(i)
""")
debug_out = StringIO()
cov = coverage.coverage(
include=["f1.py"], debug=['trace'], debug_file=debug_out
)
# Import the python file, executing it.
self.start_import_stop(cov, "f2")
# Grab all the filenames mentioned in debug output, there should be no
# duplicates.
trace_lines = [
l for l in debug_out.getvalue().splitlines()
if l.startswith("Tracing ") or l.startswith("Not tracing ")
]
filenames = [re.search(r"'[^']+'", l).group() for l in trace_lines]
self.assertEqual(len(filenames), len(set(filenames)))
# Double-check that the tracing messages are in there somewhere.
self.assertTrue(len(filenames) > 5)
|
Make the should_trace_cache test a little more bullet-proof.
|
Make the should_trace_cache test a little more bullet-proof.
|
Python
|
apache-2.0
|
larsbutler/coveragepy,blueyed/coveragepy,blueyed/coveragepy,jayhetee/coveragepy,hugovk/coveragepy,7WebPages/coveragepy,nedbat/coveragepy,blueyed/coveragepy,larsbutler/coveragepy,larsbutler/coveragepy,larsbutler/coveragepy,nedbat/coveragepy,hugovk/coveragepy,nedbat/coveragepy,jayhetee/coveragepy,7WebPages/coveragepy,7WebPages/coveragepy,jayhetee/coveragepy,blueyed/coveragepy,jayhetee/coveragepy,blueyed/coveragepy,hugovk/coveragepy,hugovk/coveragepy,hugovk/coveragepy,nedbat/coveragepy,7WebPages/coveragepy,jayhetee/coveragepy,larsbutler/coveragepy,nedbat/coveragepy
|
"""Tests of coverage/collector.py and other collectors."""
import re
import coverage
from coverage.backward import StringIO
from tests.coveragetest import CoverageTest
class CollectorTest(CoverageTest):
"""Test specific aspects of the collection process."""
def test_should_trace_cache(self):
# The tracers should only invoke should_trace once for each file name.
# Make some files that invoke each other.
self.make_file("f1.py", """\
def f1(x, f):
return f(x)
""")
self.make_file("f2.py", """\
import f1
def func(x):
return f1.f1(x, otherfunc)
def otherfunc(x):
return x*x
for i in range(10):
func(i)
""")
debug_out = StringIO()
cov = coverage.coverage(
include=["f1.py"], debug=['trace'], debug_file=debug_out
)
# Import the python file, executing it.
self.start_import_stop(cov, "f2")
# Grab all the filenames mentioned in debug output, there should be no
# duplicates.
filenames = re.findall(r"'[^']+'", debug_out.getvalue())
self.assertEqual(len(filenames), len(set(filenames)))
Make the should_trace_cache test a little more bullet-proof.
|
"""Tests of coverage/collector.py and other collectors."""
import re
import coverage
from coverage.backward import StringIO
from tests.coveragetest import CoverageTest
class CollectorTest(CoverageTest):
"""Test specific aspects of the collection process."""
def test_should_trace_cache(self):
# The tracers should only invoke should_trace once for each file name.
# Make some files that invoke each other.
self.make_file("f1.py", """\
def f1(x, f):
return f(x)
""")
self.make_file("f2.py", """\
import f1
def func(x):
return f1.f1(x, otherfunc)
def otherfunc(x):
return x*x
for i in range(10):
func(i)
""")
debug_out = StringIO()
cov = coverage.coverage(
include=["f1.py"], debug=['trace'], debug_file=debug_out
)
# Import the python file, executing it.
self.start_import_stop(cov, "f2")
# Grab all the filenames mentioned in debug output, there should be no
# duplicates.
trace_lines = [
l for l in debug_out.getvalue().splitlines()
if l.startswith("Tracing ") or l.startswith("Not tracing ")
]
filenames = [re.search(r"'[^']+'", l).group() for l in trace_lines]
self.assertEqual(len(filenames), len(set(filenames)))
# Double-check that the tracing messages are in there somewhere.
self.assertTrue(len(filenames) > 5)
|
<commit_before>"""Tests of coverage/collector.py and other collectors."""
import re
import coverage
from coverage.backward import StringIO
from tests.coveragetest import CoverageTest
class CollectorTest(CoverageTest):
"""Test specific aspects of the collection process."""
def test_should_trace_cache(self):
# The tracers should only invoke should_trace once for each file name.
# Make some files that invoke each other.
self.make_file("f1.py", """\
def f1(x, f):
return f(x)
""")
self.make_file("f2.py", """\
import f1
def func(x):
return f1.f1(x, otherfunc)
def otherfunc(x):
return x*x
for i in range(10):
func(i)
""")
debug_out = StringIO()
cov = coverage.coverage(
include=["f1.py"], debug=['trace'], debug_file=debug_out
)
# Import the python file, executing it.
self.start_import_stop(cov, "f2")
# Grab all the filenames mentioned in debug output, there should be no
# duplicates.
filenames = re.findall(r"'[^']+'", debug_out.getvalue())
self.assertEqual(len(filenames), len(set(filenames)))
<commit_msg>Make the should_trace_cache test a little more bullet-proof.<commit_after>
|
"""Tests of coverage/collector.py and other collectors."""
import re
import coverage
from coverage.backward import StringIO
from tests.coveragetest import CoverageTest
class CollectorTest(CoverageTest):
"""Test specific aspects of the collection process."""
def test_should_trace_cache(self):
# The tracers should only invoke should_trace once for each file name.
# Make some files that invoke each other.
self.make_file("f1.py", """\
def f1(x, f):
return f(x)
""")
self.make_file("f2.py", """\
import f1
def func(x):
return f1.f1(x, otherfunc)
def otherfunc(x):
return x*x
for i in range(10):
func(i)
""")
debug_out = StringIO()
cov = coverage.coverage(
include=["f1.py"], debug=['trace'], debug_file=debug_out
)
# Import the python file, executing it.
self.start_import_stop(cov, "f2")
# Grab all the filenames mentioned in debug output, there should be no
# duplicates.
trace_lines = [
l for l in debug_out.getvalue().splitlines()
if l.startswith("Tracing ") or l.startswith("Not tracing ")
]
filenames = [re.search(r"'[^']+'", l).group() for l in trace_lines]
self.assertEqual(len(filenames), len(set(filenames)))
# Double-check that the tracing messages are in there somewhere.
self.assertTrue(len(filenames) > 5)
|
"""Tests of coverage/collector.py and other collectors."""
import re
import coverage
from coverage.backward import StringIO
from tests.coveragetest import CoverageTest
class CollectorTest(CoverageTest):
"""Test specific aspects of the collection process."""
def test_should_trace_cache(self):
# The tracers should only invoke should_trace once for each file name.
# Make some files that invoke each other.
self.make_file("f1.py", """\
def f1(x, f):
return f(x)
""")
self.make_file("f2.py", """\
import f1
def func(x):
return f1.f1(x, otherfunc)
def otherfunc(x):
return x*x
for i in range(10):
func(i)
""")
debug_out = StringIO()
cov = coverage.coverage(
include=["f1.py"], debug=['trace'], debug_file=debug_out
)
# Import the python file, executing it.
self.start_import_stop(cov, "f2")
# Grab all the filenames mentioned in debug output, there should be no
# duplicates.
filenames = re.findall(r"'[^']+'", debug_out.getvalue())
self.assertEqual(len(filenames), len(set(filenames)))
Make the should_trace_cache test a little more bullet-proof."""Tests of coverage/collector.py and other collectors."""
import re
import coverage
from coverage.backward import StringIO
from tests.coveragetest import CoverageTest
class CollectorTest(CoverageTest):
"""Test specific aspects of the collection process."""
def test_should_trace_cache(self):
# The tracers should only invoke should_trace once for each file name.
# Make some files that invoke each other.
self.make_file("f1.py", """\
def f1(x, f):
return f(x)
""")
self.make_file("f2.py", """\
import f1
def func(x):
return f1.f1(x, otherfunc)
def otherfunc(x):
return x*x
for i in range(10):
func(i)
""")
debug_out = StringIO()
cov = coverage.coverage(
include=["f1.py"], debug=['trace'], debug_file=debug_out
)
# Import the python file, executing it.
self.start_import_stop(cov, "f2")
# Grab all the filenames mentioned in debug output, there should be no
# duplicates.
trace_lines = [
l for l in debug_out.getvalue().splitlines()
if l.startswith("Tracing ") or l.startswith("Not tracing ")
]
filenames = [re.search(r"'[^']+'", l).group() for l in trace_lines]
self.assertEqual(len(filenames), len(set(filenames)))
# Double-check that the tracing messages are in there somewhere.
self.assertTrue(len(filenames) > 5)
|
<commit_before>"""Tests of coverage/collector.py and other collectors."""
import re
import coverage
from coverage.backward import StringIO
from tests.coveragetest import CoverageTest
class CollectorTest(CoverageTest):
"""Test specific aspects of the collection process."""
def test_should_trace_cache(self):
# The tracers should only invoke should_trace once for each file name.
# Make some files that invoke each other.
self.make_file("f1.py", """\
def f1(x, f):
return f(x)
""")
self.make_file("f2.py", """\
import f1
def func(x):
return f1.f1(x, otherfunc)
def otherfunc(x):
return x*x
for i in range(10):
func(i)
""")
debug_out = StringIO()
cov = coverage.coverage(
include=["f1.py"], debug=['trace'], debug_file=debug_out
)
# Import the python file, executing it.
self.start_import_stop(cov, "f2")
# Grab all the filenames mentioned in debug output, there should be no
# duplicates.
filenames = re.findall(r"'[^']+'", debug_out.getvalue())
self.assertEqual(len(filenames), len(set(filenames)))
<commit_msg>Make the should_trace_cache test a little more bullet-proof.<commit_after>"""Tests of coverage/collector.py and other collectors."""
import re
import coverage
from coverage.backward import StringIO
from tests.coveragetest import CoverageTest
class CollectorTest(CoverageTest):
"""Test specific aspects of the collection process."""
def test_should_trace_cache(self):
# The tracers should only invoke should_trace once for each file name.
# Make some files that invoke each other.
self.make_file("f1.py", """\
def f1(x, f):
return f(x)
""")
self.make_file("f2.py", """\
import f1
def func(x):
return f1.f1(x, otherfunc)
def otherfunc(x):
return x*x
for i in range(10):
func(i)
""")
debug_out = StringIO()
cov = coverage.coverage(
include=["f1.py"], debug=['trace'], debug_file=debug_out
)
# Import the python file, executing it.
self.start_import_stop(cov, "f2")
# Grab all the filenames mentioned in debug output, there should be no
# duplicates.
trace_lines = [
l for l in debug_out.getvalue().splitlines()
if l.startswith("Tracing ") or l.startswith("Not tracing ")
]
filenames = [re.search(r"'[^']+'", l).group() for l in trace_lines]
self.assertEqual(len(filenames), len(set(filenames)))
# Double-check that the tracing messages are in there somewhere.
self.assertTrue(len(filenames) > 5)
|
d5be5401a1666f6a4caa2604c9918345f6202b70
|
tests/testapp/models.py
|
tests/testapp/models.py
|
from django.db import models
from django.utils.timezone import now
from towel import deletion
from towel.managers import SearchManager
from towel.modelview import ModelViewURLs
class PersonManager(SearchManager):
search_fields = ('family_name', 'given_name')
class Person(models.Model):
created = models.DateTimeField(default=now)
family_name = models.CharField(max_length=100)
given_name = models.CharField(max_length=100)
objects = PersonManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return u'%s %s' % (self.given_name, self.family_name)
def get_absolute_url(self):
return self.urls.url('detail')
class EmailManager(SearchManager):
search_fields = ('person__family_name', 'person__given_name', 'email')
class EmailAddress(deletion.Model):
person = models.ForeignKey(Person)
email = models.EmailField()
objects = EmailManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return self.email
def get_absolute_url(self):
return self.urls.url('detail')
|
from django.db import models
from django.utils.timezone import now
from towel import deletion
from towel.managers import SearchManager
from towel.modelview import ModelViewURLs
class PersonManager(SearchManager):
search_fields = ('family_name', 'given_name')
class Person(models.Model):
created = models.DateTimeField(default=now)
family_name = models.CharField(max_length=100)
given_name = models.CharField(max_length=100)
objects = PersonManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return u'%s %s' % (self.given_name, self.family_name)
def get_absolute_url(self):
return self.urls['detail']
class EmailManager(SearchManager):
search_fields = ('person__family_name', 'person__given_name', 'email')
class EmailAddress(deletion.Model):
person = models.ForeignKey(Person)
email = models.EmailField()
objects = EmailManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return self.email
def get_absolute_url(self):
return self.urls['detail']
|
Use the item access variant instead
|
Use the item access variant instead
|
Python
|
bsd-3-clause
|
matthiask/towel,matthiask/towel,matthiask/towel,matthiask/towel
|
from django.db import models
from django.utils.timezone import now
from towel import deletion
from towel.managers import SearchManager
from towel.modelview import ModelViewURLs
class PersonManager(SearchManager):
search_fields = ('family_name', 'given_name')
class Person(models.Model):
created = models.DateTimeField(default=now)
family_name = models.CharField(max_length=100)
given_name = models.CharField(max_length=100)
objects = PersonManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return u'%s %s' % (self.given_name, self.family_name)
def get_absolute_url(self):
return self.urls.url('detail')
class EmailManager(SearchManager):
search_fields = ('person__family_name', 'person__given_name', 'email')
class EmailAddress(deletion.Model):
person = models.ForeignKey(Person)
email = models.EmailField()
objects = EmailManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return self.email
def get_absolute_url(self):
return self.urls.url('detail')
Use the item access variant instead
|
from django.db import models
from django.utils.timezone import now
from towel import deletion
from towel.managers import SearchManager
from towel.modelview import ModelViewURLs
class PersonManager(SearchManager):
search_fields = ('family_name', 'given_name')
class Person(models.Model):
created = models.DateTimeField(default=now)
family_name = models.CharField(max_length=100)
given_name = models.CharField(max_length=100)
objects = PersonManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return u'%s %s' % (self.given_name, self.family_name)
def get_absolute_url(self):
return self.urls['detail']
class EmailManager(SearchManager):
search_fields = ('person__family_name', 'person__given_name', 'email')
class EmailAddress(deletion.Model):
person = models.ForeignKey(Person)
email = models.EmailField()
objects = EmailManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return self.email
def get_absolute_url(self):
return self.urls['detail']
|
<commit_before>from django.db import models
from django.utils.timezone import now
from towel import deletion
from towel.managers import SearchManager
from towel.modelview import ModelViewURLs
class PersonManager(SearchManager):
search_fields = ('family_name', 'given_name')
class Person(models.Model):
created = models.DateTimeField(default=now)
family_name = models.CharField(max_length=100)
given_name = models.CharField(max_length=100)
objects = PersonManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return u'%s %s' % (self.given_name, self.family_name)
def get_absolute_url(self):
return self.urls.url('detail')
class EmailManager(SearchManager):
search_fields = ('person__family_name', 'person__given_name', 'email')
class EmailAddress(deletion.Model):
person = models.ForeignKey(Person)
email = models.EmailField()
objects = EmailManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return self.email
def get_absolute_url(self):
return self.urls.url('detail')
<commit_msg>Use the item access variant instead<commit_after>
|
from django.db import models
from django.utils.timezone import now
from towel import deletion
from towel.managers import SearchManager
from towel.modelview import ModelViewURLs
class PersonManager(SearchManager):
search_fields = ('family_name', 'given_name')
class Person(models.Model):
created = models.DateTimeField(default=now)
family_name = models.CharField(max_length=100)
given_name = models.CharField(max_length=100)
objects = PersonManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return u'%s %s' % (self.given_name, self.family_name)
def get_absolute_url(self):
return self.urls['detail']
class EmailManager(SearchManager):
search_fields = ('person__family_name', 'person__given_name', 'email')
class EmailAddress(deletion.Model):
person = models.ForeignKey(Person)
email = models.EmailField()
objects = EmailManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return self.email
def get_absolute_url(self):
return self.urls['detail']
|
from django.db import models
from django.utils.timezone import now
from towel import deletion
from towel.managers import SearchManager
from towel.modelview import ModelViewURLs
class PersonManager(SearchManager):
search_fields = ('family_name', 'given_name')
class Person(models.Model):
created = models.DateTimeField(default=now)
family_name = models.CharField(max_length=100)
given_name = models.CharField(max_length=100)
objects = PersonManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return u'%s %s' % (self.given_name, self.family_name)
def get_absolute_url(self):
return self.urls.url('detail')
class EmailManager(SearchManager):
search_fields = ('person__family_name', 'person__given_name', 'email')
class EmailAddress(deletion.Model):
person = models.ForeignKey(Person)
email = models.EmailField()
objects = EmailManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return self.email
def get_absolute_url(self):
return self.urls.url('detail')
Use the item access variant insteadfrom django.db import models
from django.utils.timezone import now
from towel import deletion
from towel.managers import SearchManager
from towel.modelview import ModelViewURLs
class PersonManager(SearchManager):
search_fields = ('family_name', 'given_name')
class Person(models.Model):
created = models.DateTimeField(default=now)
family_name = models.CharField(max_length=100)
given_name = models.CharField(max_length=100)
objects = PersonManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return u'%s %s' % (self.given_name, self.family_name)
def get_absolute_url(self):
return self.urls['detail']
class EmailManager(SearchManager):
search_fields = ('person__family_name', 'person__given_name', 'email')
class EmailAddress(deletion.Model):
person = models.ForeignKey(Person)
email = models.EmailField()
objects = EmailManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return self.email
def get_absolute_url(self):
return self.urls['detail']
|
<commit_before>from django.db import models
from django.utils.timezone import now
from towel import deletion
from towel.managers import SearchManager
from towel.modelview import ModelViewURLs
class PersonManager(SearchManager):
search_fields = ('family_name', 'given_name')
class Person(models.Model):
created = models.DateTimeField(default=now)
family_name = models.CharField(max_length=100)
given_name = models.CharField(max_length=100)
objects = PersonManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return u'%s %s' % (self.given_name, self.family_name)
def get_absolute_url(self):
return self.urls.url('detail')
class EmailManager(SearchManager):
search_fields = ('person__family_name', 'person__given_name', 'email')
class EmailAddress(deletion.Model):
person = models.ForeignKey(Person)
email = models.EmailField()
objects = EmailManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return self.email
def get_absolute_url(self):
return self.urls.url('detail')
<commit_msg>Use the item access variant instead<commit_after>from django.db import models
from django.utils.timezone import now
from towel import deletion
from towel.managers import SearchManager
from towel.modelview import ModelViewURLs
class PersonManager(SearchManager):
search_fields = ('family_name', 'given_name')
class Person(models.Model):
created = models.DateTimeField(default=now)
family_name = models.CharField(max_length=100)
given_name = models.CharField(max_length=100)
objects = PersonManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return u'%s %s' % (self.given_name, self.family_name)
def get_absolute_url(self):
return self.urls['detail']
class EmailManager(SearchManager):
search_fields = ('person__family_name', 'person__given_name', 'email')
class EmailAddress(deletion.Model):
person = models.ForeignKey(Person)
email = models.EmailField()
objects = EmailManager()
urls = ModelViewURLs(lambda obj: {'pk': obj.pk})
def __unicode__(self):
return self.email
def get_absolute_url(self):
return self.urls['detail']
|
115bb7cf36bad5d38ac3b0be9a0bab7823c3b003
|
IATISimpleTester/lib/helpers.py
|
IATISimpleTester/lib/helpers.py
|
from collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
explanation = re.sub(r'\*([^\*]+)\*', r'<strong>\1</strong>', explanation)
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
|
from collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
|
Remove this emboldening thing again
|
Remove this emboldening thing again
|
Python
|
mit
|
pwyf/data-quality-tester,pwyf/data-quality-tester,pwyf/data-quality-tester,pwyf/data-quality-tester
|
from collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
explanation = re.sub(r'\*([^\*]+)\*', r'<strong>\1</strong>', explanation)
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
Remove this emboldening thing again
|
from collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
|
<commit_before>from collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
explanation = re.sub(r'\*([^\*]+)\*', r'<strong>\1</strong>', explanation)
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
<commit_msg>Remove this emboldening thing again<commit_after>
|
from collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
|
from collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
explanation = re.sub(r'\*([^\*]+)\*', r'<strong>\1</strong>', explanation)
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
Remove this emboldening thing againfrom collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
|
<commit_before>from collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
explanation = re.sub(r'\*([^\*]+)\*', r'<strong>\1</strong>', explanation)
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
<commit_msg>Remove this emboldening thing again<commit_after>from collections import defaultdict
import re
from lxml import etree
from IATISimpleTester import app
# given an expression list and the name of an expression,
# select it,
def select_expression(expression_list, expression_name, default_expression_name=None):
expression_dicts = {x["id"]: x for x in expression_list}
if expression_name not in expression_dicts:
expression_name = default_expression_name
return expression_name, expression_dicts.get(expression_name)
def slugify(inp):
return inp.lower().replace(' ', '-')
def pprint(explanation):
explanation = explanation.strip()
if len(explanation) > 0:
explanation = explanation[0].upper() + explanation[1:]
explanation = explanation.replace('\n', '<br>') + '.'
return re.sub(r'`([^`]*)`', r'<code>\1</code>', explanation)
|
a9d3f47098bc7499d62d4866883fa45622f01b74
|
app/main/errors.py
|
app/main/errors.py
|
# coding=utf-8
from flask import render_template, current_app, request
from . import main
from ..helpers.search_helpers import get_template_data
@main.app_errorhandler(404)
def page_not_found(e):
template_data = get_template_data(main, {})
return render_template("errors/404.html", **template_data), 404
@main.app_errorhandler(500)
def page_not_found(e):
template_data = get_template_data(main, {})
return render_template("errors/500.html", **template_data), 500
|
# coding=utf-8
from flask import render_template, current_app, request
from . import main
from ..helpers.search_helpers import get_template_data
from dmutils.apiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def page_not_found(e):
return _render_error_page(500)
def _render_error_page(status_code):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
template_data = get_template_data(main, {})
return render_template(templates[status_code], **template_data), status_code
|
Add API error handling similar to supplier app
|
Add API error handling similar to supplier app
Currently 404s returned by the API are resulting in 500s on the buyer
app for invalid supplier requests. This change takes the model used in
the supplier frontend to automatically handle uncaught APIErrors. It is
not identical to the supplier app version because the default template
data is generated in a different way.
|
Python
|
mit
|
alphagov/digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,alphagov/digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend
|
# coding=utf-8
from flask import render_template, current_app, request
from . import main
from ..helpers.search_helpers import get_template_data
@main.app_errorhandler(404)
def page_not_found(e):
template_data = get_template_data(main, {})
return render_template("errors/404.html", **template_data), 404
@main.app_errorhandler(500)
def page_not_found(e):
template_data = get_template_data(main, {})
return render_template("errors/500.html", **template_data), 500
Add API error handling similar to supplier app
Currently 404s returned by the API are resulting in 500s on the buyer
app for invalid supplier requests. This change takes the model used in
the supplier frontend to automatically handle uncaught APIErrors. It is
not identical to the supplier app version because the default template
data is generated in a different way.
|
# coding=utf-8
from flask import render_template, current_app, request
from . import main
from ..helpers.search_helpers import get_template_data
from dmutils.apiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def page_not_found(e):
return _render_error_page(500)
def _render_error_page(status_code):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
template_data = get_template_data(main, {})
return render_template(templates[status_code], **template_data), status_code
|
<commit_before># coding=utf-8
from flask import render_template, current_app, request
from . import main
from ..helpers.search_helpers import get_template_data
@main.app_errorhandler(404)
def page_not_found(e):
template_data = get_template_data(main, {})
return render_template("errors/404.html", **template_data), 404
@main.app_errorhandler(500)
def page_not_found(e):
template_data = get_template_data(main, {})
return render_template("errors/500.html", **template_data), 500
<commit_msg>Add API error handling similar to supplier app
Currently 404s returned by the API are resulting in 500s on the buyer
app for invalid supplier requests. This change takes the model used in
the supplier frontend to automatically handle uncaught APIErrors. It is
not identical to the supplier app version because the default template
data is generated in a different way.<commit_after>
|
# coding=utf-8
from flask import render_template, current_app, request
from . import main
from ..helpers.search_helpers import get_template_data
from dmutils.apiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def page_not_found(e):
return _render_error_page(500)
def _render_error_page(status_code):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
template_data = get_template_data(main, {})
return render_template(templates[status_code], **template_data), status_code
|
# coding=utf-8
from flask import render_template, current_app, request
from . import main
from ..helpers.search_helpers import get_template_data
@main.app_errorhandler(404)
def page_not_found(e):
template_data = get_template_data(main, {})
return render_template("errors/404.html", **template_data), 404
@main.app_errorhandler(500)
def page_not_found(e):
template_data = get_template_data(main, {})
return render_template("errors/500.html", **template_data), 500
Add API error handling similar to supplier app
Currently 404s returned by the API are resulting in 500s on the buyer
app for invalid supplier requests. This change takes the model used in
the supplier frontend to automatically handle uncaught APIErrors. It is
not identical to the supplier app version because the default template
data is generated in a different way.# coding=utf-8
from flask import render_template, current_app, request
from . import main
from ..helpers.search_helpers import get_template_data
from dmutils.apiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def page_not_found(e):
return _render_error_page(500)
def _render_error_page(status_code):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
template_data = get_template_data(main, {})
return render_template(templates[status_code], **template_data), status_code
|
<commit_before># coding=utf-8
from flask import render_template, current_app, request
from . import main
from ..helpers.search_helpers import get_template_data
@main.app_errorhandler(404)
def page_not_found(e):
template_data = get_template_data(main, {})
return render_template("errors/404.html", **template_data), 404
@main.app_errorhandler(500)
def page_not_found(e):
template_data = get_template_data(main, {})
return render_template("errors/500.html", **template_data), 500
<commit_msg>Add API error handling similar to supplier app
Currently 404s returned by the API are resulting in 500s on the buyer
app for invalid supplier requests. This change takes the model used in
the supplier frontend to automatically handle uncaught APIErrors. It is
not identical to the supplier app version because the default template
data is generated in a different way.<commit_after># coding=utf-8
from flask import render_template, current_app, request
from . import main
from ..helpers.search_helpers import get_template_data
from dmutils.apiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def page_not_found(e):
return _render_error_page(500)
def _render_error_page(status_code):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
template_data = get_template_data(main, {})
return render_template(templates[status_code], **template_data), status_code
|
e66e2f19611e4f7bca9be400b13238e249b1b3d2
|
cadorsfeed/fetch.py
|
cadorsfeed/fetch.py
|
import mechanize
import re
def fetchLatest():
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
latestDate = br["txt_ReportDate"]
return latestDate
def fetchReport(reportDate):
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
br["txt_ReportDate"] = reportDate
response2 = br.submit(name="btn_SearchTop")
data = response2.get_data()
data_filtered = re.sub("""<input type="hidden" name="__VIEWSTATE" id="__VIEWSTATE" value="[A-Za-z0-9/+=]*" />""",
"<!-- viewstate field stripped -->",
data)
return data_filtered.decode("utf-8")
|
from werkzeug.exceptions import NotFound, InternalServerError
import mechanize
import re
def fetchLatest():
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
latestDate = br["txt_ReportDate"]
return latestDate
def fetchReport(reportDate):
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
br["txt_ReportDate"] = reportDate
response2 = br.submit(name="btn_SearchTop")
print response2.geturl()
if not response2.geturl().startswith("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/rpt.aspx"):
raise InternalServerError()
data = response2.get_data()
if re.search("There were no results for the search criteria you entered",
data):
raise NotFound()
data_filtered = re.sub("""<input type="hidden" name="__VIEWSTATE" id="__VIEWSTATE" value="[A-Za-z0-9/+=]*" />""",
"<!-- viewstate field stripped -->",
data)
return data_filtered.decode("utf-8")
|
Raise exceptions for common errors rather than returning invalid data.
|
Raise exceptions for common errors rather than returning invalid data.
|
Python
|
mit
|
kurtraschke/cadors-parse,kurtraschke/cadors-parse
|
import mechanize
import re
def fetchLatest():
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
latestDate = br["txt_ReportDate"]
return latestDate
def fetchReport(reportDate):
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
br["txt_ReportDate"] = reportDate
response2 = br.submit(name="btn_SearchTop")
data = response2.get_data()
data_filtered = re.sub("""<input type="hidden" name="__VIEWSTATE" id="__VIEWSTATE" value="[A-Za-z0-9/+=]*" />""",
"<!-- viewstate field stripped -->",
data)
return data_filtered.decode("utf-8")
Raise exceptions for common errors rather than returning invalid data.
|
from werkzeug.exceptions import NotFound, InternalServerError
import mechanize
import re
def fetchLatest():
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
latestDate = br["txt_ReportDate"]
return latestDate
def fetchReport(reportDate):
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
br["txt_ReportDate"] = reportDate
response2 = br.submit(name="btn_SearchTop")
print response2.geturl()
if not response2.geturl().startswith("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/rpt.aspx"):
raise InternalServerError()
data = response2.get_data()
if re.search("There were no results for the search criteria you entered",
data):
raise NotFound()
data_filtered = re.sub("""<input type="hidden" name="__VIEWSTATE" id="__VIEWSTATE" value="[A-Za-z0-9/+=]*" />""",
"<!-- viewstate field stripped -->",
data)
return data_filtered.decode("utf-8")
|
<commit_before>import mechanize
import re
def fetchLatest():
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
latestDate = br["txt_ReportDate"]
return latestDate
def fetchReport(reportDate):
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
br["txt_ReportDate"] = reportDate
response2 = br.submit(name="btn_SearchTop")
data = response2.get_data()
data_filtered = re.sub("""<input type="hidden" name="__VIEWSTATE" id="__VIEWSTATE" value="[A-Za-z0-9/+=]*" />""",
"<!-- viewstate field stripped -->",
data)
return data_filtered.decode("utf-8")
<commit_msg>Raise exceptions for common errors rather than returning invalid data.<commit_after>
|
from werkzeug.exceptions import NotFound, InternalServerError
import mechanize
import re
def fetchLatest():
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
latestDate = br["txt_ReportDate"]
return latestDate
def fetchReport(reportDate):
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
br["txt_ReportDate"] = reportDate
response2 = br.submit(name="btn_SearchTop")
print response2.geturl()
if not response2.geturl().startswith("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/rpt.aspx"):
raise InternalServerError()
data = response2.get_data()
if re.search("There were no results for the search criteria you entered",
data):
raise NotFound()
data_filtered = re.sub("""<input type="hidden" name="__VIEWSTATE" id="__VIEWSTATE" value="[A-Za-z0-9/+=]*" />""",
"<!-- viewstate field stripped -->",
data)
return data_filtered.decode("utf-8")
|
import mechanize
import re
def fetchLatest():
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
latestDate = br["txt_ReportDate"]
return latestDate
def fetchReport(reportDate):
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
br["txt_ReportDate"] = reportDate
response2 = br.submit(name="btn_SearchTop")
data = response2.get_data()
data_filtered = re.sub("""<input type="hidden" name="__VIEWSTATE" id="__VIEWSTATE" value="[A-Za-z0-9/+=]*" />""",
"<!-- viewstate field stripped -->",
data)
return data_filtered.decode("utf-8")
Raise exceptions for common errors rather than returning invalid data.from werkzeug.exceptions import NotFound, InternalServerError
import mechanize
import re
def fetchLatest():
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
latestDate = br["txt_ReportDate"]
return latestDate
def fetchReport(reportDate):
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
br["txt_ReportDate"] = reportDate
response2 = br.submit(name="btn_SearchTop")
print response2.geturl()
if not response2.geturl().startswith("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/rpt.aspx"):
raise InternalServerError()
data = response2.get_data()
if re.search("There were no results for the search criteria you entered",
data):
raise NotFound()
data_filtered = re.sub("""<input type="hidden" name="__VIEWSTATE" id="__VIEWSTATE" value="[A-Za-z0-9/+=]*" />""",
"<!-- viewstate field stripped -->",
data)
return data_filtered.decode("utf-8")
|
<commit_before>import mechanize
import re
def fetchLatest():
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
latestDate = br["txt_ReportDate"]
return latestDate
def fetchReport(reportDate):
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
br["txt_ReportDate"] = reportDate
response2 = br.submit(name="btn_SearchTop")
data = response2.get_data()
data_filtered = re.sub("""<input type="hidden" name="__VIEWSTATE" id="__VIEWSTATE" value="[A-Za-z0-9/+=]*" />""",
"<!-- viewstate field stripped -->",
data)
return data_filtered.decode("utf-8")
<commit_msg>Raise exceptions for common errors rather than returning invalid data.<commit_after>from werkzeug.exceptions import NotFound, InternalServerError
import mechanize
import re
def fetchLatest():
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
latestDate = br["txt_ReportDate"]
return latestDate
def fetchReport(reportDate):
br = mechanize.Browser()
br.open("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/nrpt.aspx?lang=eng")
br.select_form(name="pageForm")
br["txt_ReportDate"] = reportDate
response2 = br.submit(name="btn_SearchTop")
print response2.geturl()
if not response2.geturl().startswith("http://wwwapps.tc.gc.ca/Saf-Sec-Sur/2/cadors-screaq/rpt.aspx"):
raise InternalServerError()
data = response2.get_data()
if re.search("There were no results for the search criteria you entered",
data):
raise NotFound()
data_filtered = re.sub("""<input type="hidden" name="__VIEWSTATE" id="__VIEWSTATE" value="[A-Za-z0-9/+=]*" />""",
"<!-- viewstate field stripped -->",
data)
return data_filtered.decode("utf-8")
|
91d7e27882c4317199f2de99964da4ef3a2e3950
|
edx_data_research/web_app/__init__.py
|
edx_data_research/web_app/__init__.py
|
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.mongoengine import MongoEngine
from flask.ext.security import MongoEngineUserDatastore, Security
# Create app
app = Flask(__name__)
app.config.from_object('config')
# Create mail object
mail = Mail(app)
|
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.mongoengine import MongoEngine
from flask.ext.security import MongoEngineUserDatastore, Security
# Create app
app = Flask(__name__)
app.config.from_object('config')
# Create mail object
mail = Mail(app)
# Create database connection object
db = MongoEngine(app)
from edx_data_research.web_app.models import User, Role
# Setup Flask-Security
user_datastore = MongoEngineUserDatastore(db, User, Role)
security = Security(app, user_datastore)
|
Define flask security object for login stuff
|
Define flask security object for login stuff
|
Python
|
mit
|
McGillX/edx_data_research,McGillX/edx_data_research,McGillX/edx_data_research
|
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.mongoengine import MongoEngine
from flask.ext.security import MongoEngineUserDatastore, Security
# Create app
app = Flask(__name__)
app.config.from_object('config')
# Create mail object
mail = Mail(app)
Define flask security object for login stuff
|
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.mongoengine import MongoEngine
from flask.ext.security import MongoEngineUserDatastore, Security
# Create app
app = Flask(__name__)
app.config.from_object('config')
# Create mail object
mail = Mail(app)
# Create database connection object
db = MongoEngine(app)
from edx_data_research.web_app.models import User, Role
# Setup Flask-Security
user_datastore = MongoEngineUserDatastore(db, User, Role)
security = Security(app, user_datastore)
|
<commit_before>from flask import Flask
from flask.ext.mail import Mail
from flask.ext.mongoengine import MongoEngine
from flask.ext.security import MongoEngineUserDatastore, Security
# Create app
app = Flask(__name__)
app.config.from_object('config')
# Create mail object
mail = Mail(app)
<commit_msg>Define flask security object for login stuff<commit_after>
|
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.mongoengine import MongoEngine
from flask.ext.security import MongoEngineUserDatastore, Security
# Create app
app = Flask(__name__)
app.config.from_object('config')
# Create mail object
mail = Mail(app)
# Create database connection object
db = MongoEngine(app)
from edx_data_research.web_app.models import User, Role
# Setup Flask-Security
user_datastore = MongoEngineUserDatastore(db, User, Role)
security = Security(app, user_datastore)
|
from flask import Flask
from flask.ext.mail import Mail
from flask.ext.mongoengine import MongoEngine
from flask.ext.security import MongoEngineUserDatastore, Security
# Create app
app = Flask(__name__)
app.config.from_object('config')
# Create mail object
mail = Mail(app)
Define flask security object for login stufffrom flask import Flask
from flask.ext.mail import Mail
from flask.ext.mongoengine import MongoEngine
from flask.ext.security import MongoEngineUserDatastore, Security
# Create app
app = Flask(__name__)
app.config.from_object('config')
# Create mail object
mail = Mail(app)
# Create database connection object
db = MongoEngine(app)
from edx_data_research.web_app.models import User, Role
# Setup Flask-Security
user_datastore = MongoEngineUserDatastore(db, User, Role)
security = Security(app, user_datastore)
|
<commit_before>from flask import Flask
from flask.ext.mail import Mail
from flask.ext.mongoengine import MongoEngine
from flask.ext.security import MongoEngineUserDatastore, Security
# Create app
app = Flask(__name__)
app.config.from_object('config')
# Create mail object
mail = Mail(app)
<commit_msg>Define flask security object for login stuff<commit_after>from flask import Flask
from flask.ext.mail import Mail
from flask.ext.mongoengine import MongoEngine
from flask.ext.security import MongoEngineUserDatastore, Security
# Create app
app = Flask(__name__)
app.config.from_object('config')
# Create mail object
mail = Mail(app)
# Create database connection object
db = MongoEngine(app)
from edx_data_research.web_app.models import User, Role
# Setup Flask-Security
user_datastore = MongoEngineUserDatastore(db, User, Role)
security = Security(app, user_datastore)
|
3384d2d933b9038f88b9ac0ac1c41545fa7c65c8
|
utils/swift_build_support/swift_build_support/debug.py
|
utils/swift_build_support/swift_build_support/debug.py
|
# swift_build_support/debug.py - Print information on the build -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
#
# Convenient functions for printing out information on the build process.
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import print_function
import sys
from . import shell
def print_xcodebuild_versions(file=sys.stdout):
"""
Print the host machine's `xcodebuild` version, as well as version
information for all available SDKs.
"""
version = shell.capture(
['xcodebuild', '-version'],
dry_run=False, echo=False, optional=True).rstrip()
sdks = shell.capture(
['xcodebuild', '-version', '-sdk'],
dry_run=False, echo=False, optional=True).rstrip()
fmt = """\
{version}
--- SDK versions ---
{sdks}
"""
print(fmt.format(version=version, sdks=sdks), file=file)
file.flush()
|
# swift_build_support/debug.py - Print information on the build -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
#
# Convenient functions for printing out information on the build process.
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import print_function
import sys
from . import shell
def print_xcodebuild_versions(file=sys.stdout):
"""
Print the host machine's `xcodebuild` version, as well as version
information for all available SDKs.
"""
version = shell.capture(
['xcodebuild', '-version'], dry_run=False, echo=False).rstrip()
sdks = shell.capture(
['xcodebuild', '-version', '-sdk'], dry_run=False, echo=False).rstrip()
fmt = """\
{version}
--- SDK versions ---
{sdks}
"""
print(fmt.format(version=version, sdks=sdks), file=file)
file.flush()
|
Make --show-sdks fail if calling `xcodebuild` failed
|
[build-script] Make --show-sdks fail if calling `xcodebuild` failed
|
Python
|
apache-2.0
|
airspeedswift/swift,stephentyrone/swift,xwu/swift,shahmishal/swift,frootloops/swift,lorentey/swift,huonw/swift,gottesmm/swift,rudkx/swift,hooman/swift,jtbandes/swift,tjw/swift,tjw/swift,Jnosh/swift,jckarter/swift,karwa/swift,ben-ng/swift,felix91gr/swift,kperryua/swift,karwa/swift,devincoughlin/swift,tkremenek/swift,manavgabhawala/swift,brentdax/swift,calebd/swift,xedin/swift,austinzheng/swift,sschiau/swift,shajrawi/swift,calebd/swift,shajrawi/swift,xedin/swift,alblue/swift,manavgabhawala/swift,ben-ng/swift,jmgc/swift,alblue/swift,ben-ng/swift,karwa/swift,tardieu/swift,Jnosh/swift,roambotics/swift,modocache/swift,brentdax/swift,brentdax/swift,xedin/swift,tinysun212/swift-windows,felix91gr/swift,brentdax/swift,harlanhaskins/swift,JaSpa/swift,alblue/swift,apple/swift,benlangmuir/swift,deyton/swift,xwu/swift,gribozavr/swift,kperryua/swift,uasys/swift,frootloops/swift,apple/swift,atrick/swift,glessard/swift,lorentey/swift,deyton/swift,natecook1000/swift,JGiola/swift,ahoppen/swift,stephentyrone/swift,benlangmuir/swift,djwbrown/swift,ben-ng/swift,OscarSwanros/swift,harlanhaskins/swift,bitjammer/swift,allevato/swift,swiftix/swift,devincoughlin/swift,jckarter/swift,CodaFi/swift,airspeedswift/swift,gregomni/swift,jckarter/swift,atrick/swift,airspeedswift/swift,uasys/swift,swiftix/swift,harlanhaskins/swift,bitjammer/swift,felix91gr/swift,practicalswift/swift,hughbe/swift,jopamer/swift,uasys/swift,jtbandes/swift,kstaring/swift,xwu/swift,devincoughlin/swift,danielmartin/swift,parkera/swift,JaSpa/swift,tkremenek/swift,tardieu/swift,alblue/swift,manavgabhawala/swift,OscarSwanros/swift,huonw/swift,gregomni/swift,swiftix/swift,sschiau/swift,Jnosh/swift,ben-ng/swift,ahoppen/swift,codestergit/swift,danielmartin/swift,gregomni/swift,gribozavr/swift,natecook1000/swift,austinzheng/swift,shajrawi/swift,gregomni/swift,JaSpa/swift,codestergit/swift,JGiola/swift,hooman/swift,OscarSwanros/swift,karwa/swift,devincoughlin/swift,tinysun212/swift-windows,gmilos/swift,jtbandes/swift,hughbe/swift,return/swift,rudkx/swift,alblue/swift,manavgabhawala/swift,JaSpa/swift,manavgabhawala/swift,austinzheng/swift,therealbnut/swift,huonw/swift,jopamer/swift,JGiola/swift,tjw/swift,zisko/swift,codestergit/swift,hooman/swift,IngmarStein/swift,nathawes/swift,djwbrown/swift,milseman/swift,kperryua/swift,jmgc/swift,arvedviehweger/swift,stephentyrone/swift,aschwaighofer/swift,kperryua/swift,alblue/swift,kperryua/swift,therealbnut/swift,aschwaighofer/swift,benlangmuir/swift,brentdax/swift,austinzheng/swift,jtbandes/swift,xedin/swift,return/swift,glessard/swift,tkremenek/swift,karwa/swift,practicalswift/swift,brentdax/swift,kstaring/swift,karwa/swift,zisko/swift,hooman/swift,kstaring/swift,zisko/swift,amraboelela/swift,shahmishal/swift,OscarSwanros/swift,sschiau/swift,xedin/swift,deyton/swift,zisko/swift,benlangmuir/swift,jopamer/swift,therealbnut/swift,milseman/swift,danielmartin/swift,lorentey/swift,tjw/swift,gmilos/swift,roambotics/swift,djwbrown/swift,practicalswift/swift,hooman/swift,danielmartin/swift,modocache/swift,russbishop/swift,parkera/swift,aschwaighofer/swift,arvedviehweger/swift,apple/swift,brentdax/swift,CodaFi/swift,deyton/swift,IngmarStein/swift,JaSpa/swift,uasys/swift,apple/swift,russbishop/swift,aschwaighofer/swift,deyton/swift,IngmarStein/swift,tjw/swift,parkera/swift,aschwaighofer/swift,jmgc/swift,karwa/swift,ahoppen/swift,lorentey/swift,modocache/swift,jtbandes/swift,tinysun212/swift-windows,bitjammer/swift,djwbrown/swift,gmilos/swift,hughbe/swift,tinysun212/swift-windows,airspeedswift/swift,rudkx/swift,atrick/swift,Jnosh/swift,jmgc/swift,shajrawi/swift,manavgabhawala/swift,russbishop/swift,Jnosh/swift,modocache/swift,frootloops/swift,gottesmm/swift,gottesmm/swift,xwu/swift,tkremenek/swift,sschiau/swift,aschwaighofer/swift,calebd/swift,rudkx/swift,nathawes/swift,CodaFi/swift,swiftix/swift,codestergit/swift,ahoppen/swift,amraboelela/swift,shajrawi/swift,uasys/swift,parkera/swift,kstaring/swift,benlangmuir/swift,modocache/swift,harlanhaskins/swift,shahmishal/swift,xwu/swift,xwu/swift,tardieu/swift,glessard/swift,shahmishal/swift,shahmishal/swift,allevato/swift,natecook1000/swift,amraboelela/swift,jckarter/swift,felix91gr/swift,allevato/swift,milseman/swift,jopamer/swift,manavgabhawala/swift,devincoughlin/swift,return/swift,felix91gr/swift,jtbandes/swift,harlanhaskins/swift,bitjammer/swift,sschiau/swift,zisko/swift,lorentey/swift,gribozavr/swift,amraboelela/swift,IngmarStein/swift,tardieu/swift,felix91gr/swift,djwbrown/swift,stephentyrone/swift,JGiola/swift,shajrawi/swift,harlanhaskins/swift,xedin/swift,roambotics/swift,gottesmm/swift,parkera/swift,lorentey/swift,parkera/swift,russbishop/swift,CodaFi/swift,return/swift,gmilos/swift,OscarSwanros/swift,alblue/swift,natecook1000/swift,codestergit/swift,jopamer/swift,airspeedswift/swift,ben-ng/swift,modocache/swift,bitjammer/swift,ahoppen/swift,gmilos/swift,glessard/swift,CodaFi/swift,practicalswift/swift,harlanhaskins/swift,russbishop/swift,practicalswift/swift,parkera/swift,deyton/swift,jckarter/swift,atrick/swift,devincoughlin/swift,gregomni/swift,return/swift,lorentey/swift,milseman/swift,airspeedswift/swift,calebd/swift,tinysun212/swift-windows,gribozavr/swift,xedin/swift,amraboelela/swift,tardieu/swift,roambotics/swift,tardieu/swift,arvedviehweger/swift,frootloops/swift,OscarSwanros/swift,therealbnut/swift,arvedviehweger/swift,sschiau/swift,codestergit/swift,kstaring/swift,apple/swift,arvedviehweger/swift,milseman/swift,JGiola/swift,therealbnut/swift,natecook1000/swift,jckarter/swift,practicalswift/swift,tkremenek/swift,atrick/swift,ben-ng/swift,calebd/swift,russbishop/swift,airspeedswift/swift,xedin/swift,danielmartin/swift,Jnosh/swift,gmilos/swift,natecook1000/swift,arvedviehweger/swift,shahmishal/swift,jopamer/swift,huonw/swift,hughbe/swift,tardieu/swift,stephentyrone/swift,tjw/swift,gottesmm/swift,practicalswift/swift,bitjammer/swift,JaSpa/swift,sschiau/swift,jmgc/swift,nathawes/swift,amraboelela/swift,apple/swift,tinysun212/swift-windows,hughbe/swift,gregomni/swift,frootloops/swift,tkremenek/swift,modocache/swift,jopamer/swift,swiftix/swift,djwbrown/swift,austinzheng/swift,tjw/swift,IngmarStein/swift,gmilos/swift,allevato/swift,rudkx/swift,shajrawi/swift,kperryua/swift,uasys/swift,tkremenek/swift,austinzheng/swift,zisko/swift,austinzheng/swift,IngmarStein/swift,devincoughlin/swift,uasys/swift,ahoppen/swift,nathawes/swift,huonw/swift,nathawes/swift,jmgc/swift,bitjammer/swift,swiftix/swift,nathawes/swift,allevato/swift,gribozavr/swift,gribozavr/swift,gottesmm/swift,devincoughlin/swift,tinysun212/swift-windows,natecook1000/swift,huonw/swift,jckarter/swift,danielmartin/swift,swiftix/swift,hughbe/swift,deyton/swift,return/swift,benlangmuir/swift,milseman/swift,shahmishal/swift,xwu/swift,atrick/swift,stephentyrone/swift,roambotics/swift,CodaFi/swift,russbishop/swift,OscarSwanros/swift,hooman/swift,zisko/swift,aschwaighofer/swift,hooman/swift,CodaFi/swift,practicalswift/swift,glessard/swift,kperryua/swift,hughbe/swift,Jnosh/swift,therealbnut/swift,arvedviehweger/swift,gribozavr/swift,rudkx/swift,codestergit/swift,djwbrown/swift,jtbandes/swift,frootloops/swift,kstaring/swift,jmgc/swift,danielmartin/swift,glessard/swift,allevato/swift,return/swift,amraboelela/swift,calebd/swift,huonw/swift,kstaring/swift,nathawes/swift,sschiau/swift,gribozavr/swift,roambotics/swift,stephentyrone/swift,allevato/swift,felix91gr/swift,gottesmm/swift,milseman/swift,JGiola/swift,parkera/swift,karwa/swift,shajrawi/swift,calebd/swift,lorentey/swift,IngmarStein/swift,shahmishal/swift,therealbnut/swift,JaSpa/swift,frootloops/swift
|
# swift_build_support/debug.py - Print information on the build -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
#
# Convenient functions for printing out information on the build process.
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import print_function
import sys
from . import shell
def print_xcodebuild_versions(file=sys.stdout):
"""
Print the host machine's `xcodebuild` version, as well as version
information for all available SDKs.
"""
version = shell.capture(
['xcodebuild', '-version'],
dry_run=False, echo=False, optional=True).rstrip()
sdks = shell.capture(
['xcodebuild', '-version', '-sdk'],
dry_run=False, echo=False, optional=True).rstrip()
fmt = """\
{version}
--- SDK versions ---
{sdks}
"""
print(fmt.format(version=version, sdks=sdks), file=file)
file.flush()
[build-script] Make --show-sdks fail if calling `xcodebuild` failed
|
# swift_build_support/debug.py - Print information on the build -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
#
# Convenient functions for printing out information on the build process.
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import print_function
import sys
from . import shell
def print_xcodebuild_versions(file=sys.stdout):
"""
Print the host machine's `xcodebuild` version, as well as version
information for all available SDKs.
"""
version = shell.capture(
['xcodebuild', '-version'], dry_run=False, echo=False).rstrip()
sdks = shell.capture(
['xcodebuild', '-version', '-sdk'], dry_run=False, echo=False).rstrip()
fmt = """\
{version}
--- SDK versions ---
{sdks}
"""
print(fmt.format(version=version, sdks=sdks), file=file)
file.flush()
|
<commit_before># swift_build_support/debug.py - Print information on the build -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
#
# Convenient functions for printing out information on the build process.
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import print_function
import sys
from . import shell
def print_xcodebuild_versions(file=sys.stdout):
"""
Print the host machine's `xcodebuild` version, as well as version
information for all available SDKs.
"""
version = shell.capture(
['xcodebuild', '-version'],
dry_run=False, echo=False, optional=True).rstrip()
sdks = shell.capture(
['xcodebuild', '-version', '-sdk'],
dry_run=False, echo=False, optional=True).rstrip()
fmt = """\
{version}
--- SDK versions ---
{sdks}
"""
print(fmt.format(version=version, sdks=sdks), file=file)
file.flush()
<commit_msg>[build-script] Make --show-sdks fail if calling `xcodebuild` failed<commit_after>
|
# swift_build_support/debug.py - Print information on the build -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
#
# Convenient functions for printing out information on the build process.
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import print_function
import sys
from . import shell
def print_xcodebuild_versions(file=sys.stdout):
"""
Print the host machine's `xcodebuild` version, as well as version
information for all available SDKs.
"""
version = shell.capture(
['xcodebuild', '-version'], dry_run=False, echo=False).rstrip()
sdks = shell.capture(
['xcodebuild', '-version', '-sdk'], dry_run=False, echo=False).rstrip()
fmt = """\
{version}
--- SDK versions ---
{sdks}
"""
print(fmt.format(version=version, sdks=sdks), file=file)
file.flush()
|
# swift_build_support/debug.py - Print information on the build -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
#
# Convenient functions for printing out information on the build process.
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import print_function
import sys
from . import shell
def print_xcodebuild_versions(file=sys.stdout):
"""
Print the host machine's `xcodebuild` version, as well as version
information for all available SDKs.
"""
version = shell.capture(
['xcodebuild', '-version'],
dry_run=False, echo=False, optional=True).rstrip()
sdks = shell.capture(
['xcodebuild', '-version', '-sdk'],
dry_run=False, echo=False, optional=True).rstrip()
fmt = """\
{version}
--- SDK versions ---
{sdks}
"""
print(fmt.format(version=version, sdks=sdks), file=file)
file.flush()
[build-script] Make --show-sdks fail if calling `xcodebuild` failed# swift_build_support/debug.py - Print information on the build -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
#
# Convenient functions for printing out information on the build process.
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import print_function
import sys
from . import shell
def print_xcodebuild_versions(file=sys.stdout):
"""
Print the host machine's `xcodebuild` version, as well as version
information for all available SDKs.
"""
version = shell.capture(
['xcodebuild', '-version'], dry_run=False, echo=False).rstrip()
sdks = shell.capture(
['xcodebuild', '-version', '-sdk'], dry_run=False, echo=False).rstrip()
fmt = """\
{version}
--- SDK versions ---
{sdks}
"""
print(fmt.format(version=version, sdks=sdks), file=file)
file.flush()
|
<commit_before># swift_build_support/debug.py - Print information on the build -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
#
# Convenient functions for printing out information on the build process.
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import print_function
import sys
from . import shell
def print_xcodebuild_versions(file=sys.stdout):
"""
Print the host machine's `xcodebuild` version, as well as version
information for all available SDKs.
"""
version = shell.capture(
['xcodebuild', '-version'],
dry_run=False, echo=False, optional=True).rstrip()
sdks = shell.capture(
['xcodebuild', '-version', '-sdk'],
dry_run=False, echo=False, optional=True).rstrip()
fmt = """\
{version}
--- SDK versions ---
{sdks}
"""
print(fmt.format(version=version, sdks=sdks), file=file)
file.flush()
<commit_msg>[build-script] Make --show-sdks fail if calling `xcodebuild` failed<commit_after># swift_build_support/debug.py - Print information on the build -*- python -*-
#
# This source file is part of the Swift.org open source project
#
# Copyright (c) 2014 - 2016 Apple Inc. and the Swift project authors
# Licensed under Apache License v2.0 with Runtime Library Exception
#
# See http://swift.org/LICENSE.txt for license information
# See http://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
#
# ----------------------------------------------------------------------------
#
# Convenient functions for printing out information on the build process.
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import print_function
import sys
from . import shell
def print_xcodebuild_versions(file=sys.stdout):
"""
Print the host machine's `xcodebuild` version, as well as version
information for all available SDKs.
"""
version = shell.capture(
['xcodebuild', '-version'], dry_run=False, echo=False).rstrip()
sdks = shell.capture(
['xcodebuild', '-version', '-sdk'], dry_run=False, echo=False).rstrip()
fmt = """\
{version}
--- SDK versions ---
{sdks}
"""
print(fmt.format(version=version, sdks=sdks), file=file)
file.flush()
|
e6e525746613505e5f6be49a92901bb95a4e2199
|
k8s/models/pod_disruption_budget.py
|
k8s/models/pod_disruption_budget.py
|
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from .common import ObjectMeta
from ..base import Model
from ..fields import Field, ListField
class PodDisruptionBudgetMatchExpressions(Model):
key = Field(six.text_type)
operator = Field(six.text_type)
values = ListField(six.text_type)
class PodDisruptionBudgetSelector(Model):
matchExpressions = Field(PodDisruptionBudgetMatchExpressions)
matchLabels = Field(dict)
class PodDisruptionBudgetSpec(Model):
minAvailable = Field(six.text_type)
maxUnavailable = Field(six.text_type)
selector = Field(PodDisruptionBudgetSelector)
class PodDisruptionBudget(Model):
class Meta:
url_template = "/apis/autoscaling/v1/namespaces/{namespace}/poddisruptionbudget/{name}"
metadata = Field(ObjectMeta)
spec = Field(PodDisruptionBudgetSpec)
|
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from .common import ObjectMeta
from ..base import Model
from ..fields import Field, ListField
class LabelSelectorRequirement(Model):
key = Field(six.text_type)
operator = Field(six.text_type)
values = ListField(six.text_type)
class LabelSelector(Model):
matchExpressions = Field(LabelSelectorRequirement)
matchLabels = Field(dict)
class PodDisruptionBudgetSpec(Model):
minAvailable = Field(six.text_type)
maxUnavailable = Field(six.text_type)
selector = Field(LabelSelector)
class PodDisruptionBudget(Model):
class Meta:
url_template = "/apis/autoscaling/v1/namespaces/{namespace}/poddisruptionbudget/{name}"
metadata = Field(ObjectMeta)
spec = Field(PodDisruptionBudgetSpec)
|
Fix issues with class naming
|
Fix issues with class naming
|
Python
|
apache-2.0
|
fiaas/k8s
|
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from .common import ObjectMeta
from ..base import Model
from ..fields import Field, ListField
class PodDisruptionBudgetMatchExpressions(Model):
key = Field(six.text_type)
operator = Field(six.text_type)
values = ListField(six.text_type)
class PodDisruptionBudgetSelector(Model):
matchExpressions = Field(PodDisruptionBudgetMatchExpressions)
matchLabels = Field(dict)
class PodDisruptionBudgetSpec(Model):
minAvailable = Field(six.text_type)
maxUnavailable = Field(six.text_type)
selector = Field(PodDisruptionBudgetSelector)
class PodDisruptionBudget(Model):
class Meta:
url_template = "/apis/autoscaling/v1/namespaces/{namespace}/poddisruptionbudget/{name}"
metadata = Field(ObjectMeta)
spec = Field(PodDisruptionBudgetSpec)
Fix issues with class naming
|
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from .common import ObjectMeta
from ..base import Model
from ..fields import Field, ListField
class LabelSelectorRequirement(Model):
key = Field(six.text_type)
operator = Field(six.text_type)
values = ListField(six.text_type)
class LabelSelector(Model):
matchExpressions = Field(LabelSelectorRequirement)
matchLabels = Field(dict)
class PodDisruptionBudgetSpec(Model):
minAvailable = Field(six.text_type)
maxUnavailable = Field(six.text_type)
selector = Field(LabelSelector)
class PodDisruptionBudget(Model):
class Meta:
url_template = "/apis/autoscaling/v1/namespaces/{namespace}/poddisruptionbudget/{name}"
metadata = Field(ObjectMeta)
spec = Field(PodDisruptionBudgetSpec)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from .common import ObjectMeta
from ..base import Model
from ..fields import Field, ListField
class PodDisruptionBudgetMatchExpressions(Model):
key = Field(six.text_type)
operator = Field(six.text_type)
values = ListField(six.text_type)
class PodDisruptionBudgetSelector(Model):
matchExpressions = Field(PodDisruptionBudgetMatchExpressions)
matchLabels = Field(dict)
class PodDisruptionBudgetSpec(Model):
minAvailable = Field(six.text_type)
maxUnavailable = Field(six.text_type)
selector = Field(PodDisruptionBudgetSelector)
class PodDisruptionBudget(Model):
class Meta:
url_template = "/apis/autoscaling/v1/namespaces/{namespace}/poddisruptionbudget/{name}"
metadata = Field(ObjectMeta)
spec = Field(PodDisruptionBudgetSpec)
<commit_msg>Fix issues with class naming<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from .common import ObjectMeta
from ..base import Model
from ..fields import Field, ListField
class LabelSelectorRequirement(Model):
key = Field(six.text_type)
operator = Field(six.text_type)
values = ListField(six.text_type)
class LabelSelector(Model):
matchExpressions = Field(LabelSelectorRequirement)
matchLabels = Field(dict)
class PodDisruptionBudgetSpec(Model):
minAvailable = Field(six.text_type)
maxUnavailable = Field(six.text_type)
selector = Field(LabelSelector)
class PodDisruptionBudget(Model):
class Meta:
url_template = "/apis/autoscaling/v1/namespaces/{namespace}/poddisruptionbudget/{name}"
metadata = Field(ObjectMeta)
spec = Field(PodDisruptionBudgetSpec)
|
#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from .common import ObjectMeta
from ..base import Model
from ..fields import Field, ListField
class PodDisruptionBudgetMatchExpressions(Model):
key = Field(six.text_type)
operator = Field(six.text_type)
values = ListField(six.text_type)
class PodDisruptionBudgetSelector(Model):
matchExpressions = Field(PodDisruptionBudgetMatchExpressions)
matchLabels = Field(dict)
class PodDisruptionBudgetSpec(Model):
minAvailable = Field(six.text_type)
maxUnavailable = Field(six.text_type)
selector = Field(PodDisruptionBudgetSelector)
class PodDisruptionBudget(Model):
class Meta:
url_template = "/apis/autoscaling/v1/namespaces/{namespace}/poddisruptionbudget/{name}"
metadata = Field(ObjectMeta)
spec = Field(PodDisruptionBudgetSpec)
Fix issues with class naming#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from .common import ObjectMeta
from ..base import Model
from ..fields import Field, ListField
class LabelSelectorRequirement(Model):
key = Field(six.text_type)
operator = Field(six.text_type)
values = ListField(six.text_type)
class LabelSelector(Model):
matchExpressions = Field(LabelSelectorRequirement)
matchLabels = Field(dict)
class PodDisruptionBudgetSpec(Model):
minAvailable = Field(six.text_type)
maxUnavailable = Field(six.text_type)
selector = Field(LabelSelector)
class PodDisruptionBudget(Model):
class Meta:
url_template = "/apis/autoscaling/v1/namespaces/{namespace}/poddisruptionbudget/{name}"
metadata = Field(ObjectMeta)
spec = Field(PodDisruptionBudgetSpec)
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from .common import ObjectMeta
from ..base import Model
from ..fields import Field, ListField
class PodDisruptionBudgetMatchExpressions(Model):
key = Field(six.text_type)
operator = Field(six.text_type)
values = ListField(six.text_type)
class PodDisruptionBudgetSelector(Model):
matchExpressions = Field(PodDisruptionBudgetMatchExpressions)
matchLabels = Field(dict)
class PodDisruptionBudgetSpec(Model):
minAvailable = Field(six.text_type)
maxUnavailable = Field(six.text_type)
selector = Field(PodDisruptionBudgetSelector)
class PodDisruptionBudget(Model):
class Meta:
url_template = "/apis/autoscaling/v1/namespaces/{namespace}/poddisruptionbudget/{name}"
metadata = Field(ObjectMeta)
spec = Field(PodDisruptionBudgetSpec)
<commit_msg>Fix issues with class naming<commit_after>#!/usr/bin/env python
# -*- coding: utf-8
from __future__ import absolute_import
import six
from .common import ObjectMeta
from ..base import Model
from ..fields import Field, ListField
class LabelSelectorRequirement(Model):
key = Field(six.text_type)
operator = Field(six.text_type)
values = ListField(six.text_type)
class LabelSelector(Model):
matchExpressions = Field(LabelSelectorRequirement)
matchLabels = Field(dict)
class PodDisruptionBudgetSpec(Model):
minAvailable = Field(six.text_type)
maxUnavailable = Field(six.text_type)
selector = Field(LabelSelector)
class PodDisruptionBudget(Model):
class Meta:
url_template = "/apis/autoscaling/v1/namespaces/{namespace}/poddisruptionbudget/{name}"
metadata = Field(ObjectMeta)
spec = Field(PodDisruptionBudgetSpec)
|
027f016c6168325cb0e8b66adb1c10461399e0e1
|
katagawa/sql/__init__.py
|
katagawa/sql/__init__.py
|
"""
SQL generators for Katagawa.
"""
import abc
import typing
class Token(abc.ABC):
"""
Base class for a token.
"""
__slots__ = ()
def __init__(self, subtokens: typing.List['Token']):
"""
:param subtokens: Any subtokens this token has.
"""
self.subtokens = subtokens
@abc.abstractproperty
def name(self):
"""
Returns the name of the token.
This is a unique identifier, but is not always related to the actual SQL underneath it.
"""
@abc.abstractmethod
def generate_sql(self):
"""
Generate SQL from this statement.
:return: The generated SQL.
"""
class Aliased(Token):
"""
Mixin class for an aliased token.
"""
__slots__ = ("alias",)
def __init__(self, subtokens: typing.List['Token'], alias: str):
"""
:param subtokens: Any subtokens this token has.
:param alias: The alias this token has.
"""
super().__init__(subtokens)
self.alias = alias
|
"""
SQL generators for Katagawa.
"""
import abc
import typing
class Token(abc.ABC):
"""
Base class for a token.
"""
__slots__ = ()
def __init__(self, subtokens: typing.List['Token']=None):
"""
:param subtokens: Any subtokens this token has.
"""
if subtokens is None:
subtokens = []
self.subtokens = subtokens
def consume_tokens(self, name) -> typing.List['Token']:
"""
Consumes tokens from the current subtokens and returns a new list of these tokens.
This will remove the tokens from the current subtokens.
:param name: The name of the token to consume.
:return: A list of :class:`Token` that match the type.
"""
returned = []
for item in self.subtokens[:]:
if item.name == name:
returned.append(item)
self.subtokens.remove(item)
return returned
@abc.abstractproperty
def name(self):
"""
Returns the name of the token.
This is a unique identifier, but is not always related to the actual SQL underneath it.
"""
@abc.abstractmethod
def generate_sql(self):
"""
Generate SQL from this statement.
:return: The generated SQL.
"""
class Aliased(abc.ABC, Token):
"""
Mixin class for an aliased token.
"""
__slots__ = ("alias",)
def __init__(self, subtokens: typing.List['Token'], alias: str=None):
"""
:param subtokens: Any subtokens this token has.
:param alias: The alias this token has.
"""
super().__init__(subtokens)
self.alias = alias
|
Add consume_tokens function to Token.
|
Add consume_tokens function to Token.
|
Python
|
mit
|
SunDwarf/asyncqlio
|
"""
SQL generators for Katagawa.
"""
import abc
import typing
class Token(abc.ABC):
"""
Base class for a token.
"""
__slots__ = ()
def __init__(self, subtokens: typing.List['Token']):
"""
:param subtokens: Any subtokens this token has.
"""
self.subtokens = subtokens
@abc.abstractproperty
def name(self):
"""
Returns the name of the token.
This is a unique identifier, but is not always related to the actual SQL underneath it.
"""
@abc.abstractmethod
def generate_sql(self):
"""
Generate SQL from this statement.
:return: The generated SQL.
"""
class Aliased(Token):
"""
Mixin class for an aliased token.
"""
__slots__ = ("alias",)
def __init__(self, subtokens: typing.List['Token'], alias: str):
"""
:param subtokens: Any subtokens this token has.
:param alias: The alias this token has.
"""
super().__init__(subtokens)
self.alias = alias
Add consume_tokens function to Token.
|
"""
SQL generators for Katagawa.
"""
import abc
import typing
class Token(abc.ABC):
"""
Base class for a token.
"""
__slots__ = ()
def __init__(self, subtokens: typing.List['Token']=None):
"""
:param subtokens: Any subtokens this token has.
"""
if subtokens is None:
subtokens = []
self.subtokens = subtokens
def consume_tokens(self, name) -> typing.List['Token']:
"""
Consumes tokens from the current subtokens and returns a new list of these tokens.
This will remove the tokens from the current subtokens.
:param name: The name of the token to consume.
:return: A list of :class:`Token` that match the type.
"""
returned = []
for item in self.subtokens[:]:
if item.name == name:
returned.append(item)
self.subtokens.remove(item)
return returned
@abc.abstractproperty
def name(self):
"""
Returns the name of the token.
This is a unique identifier, but is not always related to the actual SQL underneath it.
"""
@abc.abstractmethod
def generate_sql(self):
"""
Generate SQL from this statement.
:return: The generated SQL.
"""
class Aliased(abc.ABC, Token):
"""
Mixin class for an aliased token.
"""
__slots__ = ("alias",)
def __init__(self, subtokens: typing.List['Token'], alias: str=None):
"""
:param subtokens: Any subtokens this token has.
:param alias: The alias this token has.
"""
super().__init__(subtokens)
self.alias = alias
|
<commit_before>"""
SQL generators for Katagawa.
"""
import abc
import typing
class Token(abc.ABC):
"""
Base class for a token.
"""
__slots__ = ()
def __init__(self, subtokens: typing.List['Token']):
"""
:param subtokens: Any subtokens this token has.
"""
self.subtokens = subtokens
@abc.abstractproperty
def name(self):
"""
Returns the name of the token.
This is a unique identifier, but is not always related to the actual SQL underneath it.
"""
@abc.abstractmethod
def generate_sql(self):
"""
Generate SQL from this statement.
:return: The generated SQL.
"""
class Aliased(Token):
"""
Mixin class for an aliased token.
"""
__slots__ = ("alias",)
def __init__(self, subtokens: typing.List['Token'], alias: str):
"""
:param subtokens: Any subtokens this token has.
:param alias: The alias this token has.
"""
super().__init__(subtokens)
self.alias = alias
<commit_msg>Add consume_tokens function to Token.<commit_after>
|
"""
SQL generators for Katagawa.
"""
import abc
import typing
class Token(abc.ABC):
"""
Base class for a token.
"""
__slots__ = ()
def __init__(self, subtokens: typing.List['Token']=None):
"""
:param subtokens: Any subtokens this token has.
"""
if subtokens is None:
subtokens = []
self.subtokens = subtokens
def consume_tokens(self, name) -> typing.List['Token']:
"""
Consumes tokens from the current subtokens and returns a new list of these tokens.
This will remove the tokens from the current subtokens.
:param name: The name of the token to consume.
:return: A list of :class:`Token` that match the type.
"""
returned = []
for item in self.subtokens[:]:
if item.name == name:
returned.append(item)
self.subtokens.remove(item)
return returned
@abc.abstractproperty
def name(self):
"""
Returns the name of the token.
This is a unique identifier, but is not always related to the actual SQL underneath it.
"""
@abc.abstractmethod
def generate_sql(self):
"""
Generate SQL from this statement.
:return: The generated SQL.
"""
class Aliased(abc.ABC, Token):
"""
Mixin class for an aliased token.
"""
__slots__ = ("alias",)
def __init__(self, subtokens: typing.List['Token'], alias: str=None):
"""
:param subtokens: Any subtokens this token has.
:param alias: The alias this token has.
"""
super().__init__(subtokens)
self.alias = alias
|
"""
SQL generators for Katagawa.
"""
import abc
import typing
class Token(abc.ABC):
"""
Base class for a token.
"""
__slots__ = ()
def __init__(self, subtokens: typing.List['Token']):
"""
:param subtokens: Any subtokens this token has.
"""
self.subtokens = subtokens
@abc.abstractproperty
def name(self):
"""
Returns the name of the token.
This is a unique identifier, but is not always related to the actual SQL underneath it.
"""
@abc.abstractmethod
def generate_sql(self):
"""
Generate SQL from this statement.
:return: The generated SQL.
"""
class Aliased(Token):
"""
Mixin class for an aliased token.
"""
__slots__ = ("alias",)
def __init__(self, subtokens: typing.List['Token'], alias: str):
"""
:param subtokens: Any subtokens this token has.
:param alias: The alias this token has.
"""
super().__init__(subtokens)
self.alias = alias
Add consume_tokens function to Token."""
SQL generators for Katagawa.
"""
import abc
import typing
class Token(abc.ABC):
"""
Base class for a token.
"""
__slots__ = ()
def __init__(self, subtokens: typing.List['Token']=None):
"""
:param subtokens: Any subtokens this token has.
"""
if subtokens is None:
subtokens = []
self.subtokens = subtokens
def consume_tokens(self, name) -> typing.List['Token']:
"""
Consumes tokens from the current subtokens and returns a new list of these tokens.
This will remove the tokens from the current subtokens.
:param name: The name of the token to consume.
:return: A list of :class:`Token` that match the type.
"""
returned = []
for item in self.subtokens[:]:
if item.name == name:
returned.append(item)
self.subtokens.remove(item)
return returned
@abc.abstractproperty
def name(self):
"""
Returns the name of the token.
This is a unique identifier, but is not always related to the actual SQL underneath it.
"""
@abc.abstractmethod
def generate_sql(self):
"""
Generate SQL from this statement.
:return: The generated SQL.
"""
class Aliased(abc.ABC, Token):
"""
Mixin class for an aliased token.
"""
__slots__ = ("alias",)
def __init__(self, subtokens: typing.List['Token'], alias: str=None):
"""
:param subtokens: Any subtokens this token has.
:param alias: The alias this token has.
"""
super().__init__(subtokens)
self.alias = alias
|
<commit_before>"""
SQL generators for Katagawa.
"""
import abc
import typing
class Token(abc.ABC):
"""
Base class for a token.
"""
__slots__ = ()
def __init__(self, subtokens: typing.List['Token']):
"""
:param subtokens: Any subtokens this token has.
"""
self.subtokens = subtokens
@abc.abstractproperty
def name(self):
"""
Returns the name of the token.
This is a unique identifier, but is not always related to the actual SQL underneath it.
"""
@abc.abstractmethod
def generate_sql(self):
"""
Generate SQL from this statement.
:return: The generated SQL.
"""
class Aliased(Token):
"""
Mixin class for an aliased token.
"""
__slots__ = ("alias",)
def __init__(self, subtokens: typing.List['Token'], alias: str):
"""
:param subtokens: Any subtokens this token has.
:param alias: The alias this token has.
"""
super().__init__(subtokens)
self.alias = alias
<commit_msg>Add consume_tokens function to Token.<commit_after>"""
SQL generators for Katagawa.
"""
import abc
import typing
class Token(abc.ABC):
"""
Base class for a token.
"""
__slots__ = ()
def __init__(self, subtokens: typing.List['Token']=None):
"""
:param subtokens: Any subtokens this token has.
"""
if subtokens is None:
subtokens = []
self.subtokens = subtokens
def consume_tokens(self, name) -> typing.List['Token']:
"""
Consumes tokens from the current subtokens and returns a new list of these tokens.
This will remove the tokens from the current subtokens.
:param name: The name of the token to consume.
:return: A list of :class:`Token` that match the type.
"""
returned = []
for item in self.subtokens[:]:
if item.name == name:
returned.append(item)
self.subtokens.remove(item)
return returned
@abc.abstractproperty
def name(self):
"""
Returns the name of the token.
This is a unique identifier, but is not always related to the actual SQL underneath it.
"""
@abc.abstractmethod
def generate_sql(self):
"""
Generate SQL from this statement.
:return: The generated SQL.
"""
class Aliased(abc.ABC, Token):
"""
Mixin class for an aliased token.
"""
__slots__ = ("alias",)
def __init__(self, subtokens: typing.List['Token'], alias: str=None):
"""
:param subtokens: Any subtokens this token has.
:param alias: The alias this token has.
"""
super().__init__(subtokens)
self.alias = alias
|
9c16b71ecbb38115f107c7baba56304fb9630ec5
|
ocds/export/__init__.py
|
ocds/export/__init__.py
|
from .models import (
Release,
ReleasePackage,
Record,
RecordPackage
)
from .schema import Tender
from .helpers import (
mode_test,
get_ocid
)
def release_tender(tender, prefix):
""" returns Release object created from `tender` with ocid `prefix` """
date = tender.get('dateModified', '')
ocid = get_ocid(prefix, tender['tenderID'])
return Release(dict(tender=tender, ocid=ocid, date=date))
def release_tenders(tenders, prefix):
""" returns list of Release object created from `tenders` with amendment info and ocid `prefix` """
prev_tender = next(tenders)
for tender in tenders:
yield Tender.with_diff(prev_tender, tender)
prev_tender = tender
def package_tenders(tenders, params):
data = {}
for field in ReleasePackage._fields:
if field in params:
data[field] = params.get(field, '')
data['releases'] = [release_tender(tender, params.get('prefix')) for tender in tenders]
return ReleasePackage(dict(**data)).serialize()
|
from .models import (
Release,
ReleasePackage,
Record,
RecordPackage
)
from .schema import Tender
from .helpers import (
mode_test,
get_ocid
)
def release_tender(tender, prefix):
""" returns Release object created from `tender` with ocid `prefix` """
date = tender.get('dateModified', '')
ocid = get_ocid(prefix, tender['tenderID'])
return Release(dict(tender=tender, ocid=ocid, date=date))
def release_tenders(tenders, prefix):
""" returns list of Release object created from `tenders` with amendment info and ocid `prefix` """
prev_tender = next(tenders)
for tender in tenders:
data = {}
for field in ['tender', 'awards', 'contracts']:
model = getattr(Release, field).model_class
if field in tender:
data[field] = model.fromDiff(prev_tender.get(field, ''), new_tender.get(field, ''))
elif field == 'tender':
data['tender'] = model.fromDiff(prev_tender, tender)
yield Release(data)
prev_tender = tender
def package_tenders(tenders, params):
data = {}
for field in ReleasePackage._fields:
if field in params:
data[field] = params.get(field, '')
data['releases'] = [release_tender(tender, params.get('prefix')) for tender in tenders]
return ReleasePackage(dict(**data)).serialize()
|
Update helpers for generating releases
|
Update helpers for generating releases
|
Python
|
apache-2.0
|
yshalenyk/openprocurement.ocds.export,yshalenyk/openprocurement.ocds.export,yshalenyk/ocds.export
|
from .models import (
Release,
ReleasePackage,
Record,
RecordPackage
)
from .schema import Tender
from .helpers import (
mode_test,
get_ocid
)
def release_tender(tender, prefix):
""" returns Release object created from `tender` with ocid `prefix` """
date = tender.get('dateModified', '')
ocid = get_ocid(prefix, tender['tenderID'])
return Release(dict(tender=tender, ocid=ocid, date=date))
def release_tenders(tenders, prefix):
""" returns list of Release object created from `tenders` with amendment info and ocid `prefix` """
prev_tender = next(tenders)
for tender in tenders:
yield Tender.with_diff(prev_tender, tender)
prev_tender = tender
def package_tenders(tenders, params):
data = {}
for field in ReleasePackage._fields:
if field in params:
data[field] = params.get(field, '')
data['releases'] = [release_tender(tender, params.get('prefix')) for tender in tenders]
return ReleasePackage(dict(**data)).serialize()
Update helpers for generating releases
|
from .models import (
Release,
ReleasePackage,
Record,
RecordPackage
)
from .schema import Tender
from .helpers import (
mode_test,
get_ocid
)
def release_tender(tender, prefix):
""" returns Release object created from `tender` with ocid `prefix` """
date = tender.get('dateModified', '')
ocid = get_ocid(prefix, tender['tenderID'])
return Release(dict(tender=tender, ocid=ocid, date=date))
def release_tenders(tenders, prefix):
""" returns list of Release object created from `tenders` with amendment info and ocid `prefix` """
prev_tender = next(tenders)
for tender in tenders:
data = {}
for field in ['tender', 'awards', 'contracts']:
model = getattr(Release, field).model_class
if field in tender:
data[field] = model.fromDiff(prev_tender.get(field, ''), new_tender.get(field, ''))
elif field == 'tender':
data['tender'] = model.fromDiff(prev_tender, tender)
yield Release(data)
prev_tender = tender
def package_tenders(tenders, params):
data = {}
for field in ReleasePackage._fields:
if field in params:
data[field] = params.get(field, '')
data['releases'] = [release_tender(tender, params.get('prefix')) for tender in tenders]
return ReleasePackage(dict(**data)).serialize()
|
<commit_before>from .models import (
Release,
ReleasePackage,
Record,
RecordPackage
)
from .schema import Tender
from .helpers import (
mode_test,
get_ocid
)
def release_tender(tender, prefix):
""" returns Release object created from `tender` with ocid `prefix` """
date = tender.get('dateModified', '')
ocid = get_ocid(prefix, tender['tenderID'])
return Release(dict(tender=tender, ocid=ocid, date=date))
def release_tenders(tenders, prefix):
""" returns list of Release object created from `tenders` with amendment info and ocid `prefix` """
prev_tender = next(tenders)
for tender in tenders:
yield Tender.with_diff(prev_tender, tender)
prev_tender = tender
def package_tenders(tenders, params):
data = {}
for field in ReleasePackage._fields:
if field in params:
data[field] = params.get(field, '')
data['releases'] = [release_tender(tender, params.get('prefix')) for tender in tenders]
return ReleasePackage(dict(**data)).serialize()
<commit_msg>Update helpers for generating releases<commit_after>
|
from .models import (
Release,
ReleasePackage,
Record,
RecordPackage
)
from .schema import Tender
from .helpers import (
mode_test,
get_ocid
)
def release_tender(tender, prefix):
""" returns Release object created from `tender` with ocid `prefix` """
date = tender.get('dateModified', '')
ocid = get_ocid(prefix, tender['tenderID'])
return Release(dict(tender=tender, ocid=ocid, date=date))
def release_tenders(tenders, prefix):
""" returns list of Release object created from `tenders` with amendment info and ocid `prefix` """
prev_tender = next(tenders)
for tender in tenders:
data = {}
for field in ['tender', 'awards', 'contracts']:
model = getattr(Release, field).model_class
if field in tender:
data[field] = model.fromDiff(prev_tender.get(field, ''), new_tender.get(field, ''))
elif field == 'tender':
data['tender'] = model.fromDiff(prev_tender, tender)
yield Release(data)
prev_tender = tender
def package_tenders(tenders, params):
data = {}
for field in ReleasePackage._fields:
if field in params:
data[field] = params.get(field, '')
data['releases'] = [release_tender(tender, params.get('prefix')) for tender in tenders]
return ReleasePackage(dict(**data)).serialize()
|
from .models import (
Release,
ReleasePackage,
Record,
RecordPackage
)
from .schema import Tender
from .helpers import (
mode_test,
get_ocid
)
def release_tender(tender, prefix):
""" returns Release object created from `tender` with ocid `prefix` """
date = tender.get('dateModified', '')
ocid = get_ocid(prefix, tender['tenderID'])
return Release(dict(tender=tender, ocid=ocid, date=date))
def release_tenders(tenders, prefix):
""" returns list of Release object created from `tenders` with amendment info and ocid `prefix` """
prev_tender = next(tenders)
for tender in tenders:
yield Tender.with_diff(prev_tender, tender)
prev_tender = tender
def package_tenders(tenders, params):
data = {}
for field in ReleasePackage._fields:
if field in params:
data[field] = params.get(field, '')
data['releases'] = [release_tender(tender, params.get('prefix')) for tender in tenders]
return ReleasePackage(dict(**data)).serialize()
Update helpers for generating releasesfrom .models import (
Release,
ReleasePackage,
Record,
RecordPackage
)
from .schema import Tender
from .helpers import (
mode_test,
get_ocid
)
def release_tender(tender, prefix):
""" returns Release object created from `tender` with ocid `prefix` """
date = tender.get('dateModified', '')
ocid = get_ocid(prefix, tender['tenderID'])
return Release(dict(tender=tender, ocid=ocid, date=date))
def release_tenders(tenders, prefix):
""" returns list of Release object created from `tenders` with amendment info and ocid `prefix` """
prev_tender = next(tenders)
for tender in tenders:
data = {}
for field in ['tender', 'awards', 'contracts']:
model = getattr(Release, field).model_class
if field in tender:
data[field] = model.fromDiff(prev_tender.get(field, ''), new_tender.get(field, ''))
elif field == 'tender':
data['tender'] = model.fromDiff(prev_tender, tender)
yield Release(data)
prev_tender = tender
def package_tenders(tenders, params):
data = {}
for field in ReleasePackage._fields:
if field in params:
data[field] = params.get(field, '')
data['releases'] = [release_tender(tender, params.get('prefix')) for tender in tenders]
return ReleasePackage(dict(**data)).serialize()
|
<commit_before>from .models import (
Release,
ReleasePackage,
Record,
RecordPackage
)
from .schema import Tender
from .helpers import (
mode_test,
get_ocid
)
def release_tender(tender, prefix):
""" returns Release object created from `tender` with ocid `prefix` """
date = tender.get('dateModified', '')
ocid = get_ocid(prefix, tender['tenderID'])
return Release(dict(tender=tender, ocid=ocid, date=date))
def release_tenders(tenders, prefix):
""" returns list of Release object created from `tenders` with amendment info and ocid `prefix` """
prev_tender = next(tenders)
for tender in tenders:
yield Tender.with_diff(prev_tender, tender)
prev_tender = tender
def package_tenders(tenders, params):
data = {}
for field in ReleasePackage._fields:
if field in params:
data[field] = params.get(field, '')
data['releases'] = [release_tender(tender, params.get('prefix')) for tender in tenders]
return ReleasePackage(dict(**data)).serialize()
<commit_msg>Update helpers for generating releases<commit_after>from .models import (
Release,
ReleasePackage,
Record,
RecordPackage
)
from .schema import Tender
from .helpers import (
mode_test,
get_ocid
)
def release_tender(tender, prefix):
""" returns Release object created from `tender` with ocid `prefix` """
date = tender.get('dateModified', '')
ocid = get_ocid(prefix, tender['tenderID'])
return Release(dict(tender=tender, ocid=ocid, date=date))
def release_tenders(tenders, prefix):
""" returns list of Release object created from `tenders` with amendment info and ocid `prefix` """
prev_tender = next(tenders)
for tender in tenders:
data = {}
for field in ['tender', 'awards', 'contracts']:
model = getattr(Release, field).model_class
if field in tender:
data[field] = model.fromDiff(prev_tender.get(field, ''), new_tender.get(field, ''))
elif field == 'tender':
data['tender'] = model.fromDiff(prev_tender, tender)
yield Release(data)
prev_tender = tender
def package_tenders(tenders, params):
data = {}
for field in ReleasePackage._fields:
if field in params:
data[field] = params.get(field, '')
data['releases'] = [release_tender(tender, params.get('prefix')) for tender in tenders]
return ReleasePackage(dict(**data)).serialize()
|
fb3abf0d1cf27d23c78dd8101dd0c54cf589c2ef
|
corehq/apps/locations/resources/v0_6.py
|
corehq/apps/locations/resources/v0_6.py
|
from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.locations.models import SQLLocation
from corehq.apps.users.models import HqPermissions
from corehq.apps.locations.resources import v0_5
class LocationResource(v0_5.LocationResource):
resource_name = 'location'
class Meta:
queryset = SQLLocation.objects.filter(is_archived=False).all()
detail_uri_name = 'location_id'
authentication = RequirePermissionAuthentication(HqPermissions.edit_locations)
allowed_methods = ['get']
include_resource_uri = False
fields = {
'domain',
'location_id',
'name',
'site_code',
'last_modified',
'latitude',
'longitude',
'location_data',
}
filtering = {
"domain": ('exact',),
}
def dehydrate(self, bundle):
if bundle.obj.parent:
bundle.data['parent_location_id'] = bundle.obj.parent.location_id
else:
bundle.data['parent_location_id'] = ''
bundle.data['location_type_name'] = bundle.obj.location_type.name
bundle.data['location_type_code'] = bundle.obj.location_type.code
return bundle
|
from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.locations.models import SQLLocation
from corehq.apps.users.models import HqPermissions
from corehq.apps.locations.resources import v0_5
class LocationResource(v0_5.LocationResource):
resource_name = 'location'
class Meta:
queryset = SQLLocation.active_objects.all()
detail_uri_name = 'location_id'
authentication = RequirePermissionAuthentication(HqPermissions.edit_locations)
allowed_methods = ['get']
include_resource_uri = False
fields = {
'domain',
'location_id',
'name',
'site_code',
'last_modified',
'latitude',
'longitude',
'location_data',
}
filtering = {
"domain": ('exact',),
}
def dehydrate(self, bundle):
if bundle.obj.parent:
bundle.data['parent_location_id'] = bundle.obj.parent.location_id
else:
bundle.data['parent_location_id'] = ''
bundle.data['location_type_name'] = bundle.obj.location_type.name
bundle.data['location_type_code'] = bundle.obj.location_type.code
return bundle
|
Use objects manager that automatically filters out archived forms
|
Use objects manager that automatically filters out archived forms
Co-authored-by: Ethan Soergel <c1732a0c832c5c8cbfae77286e6475129315f488@users.noreply.github.com>
|
Python
|
bsd-3-clause
|
dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq,dimagi/commcare-hq
|
from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.locations.models import SQLLocation
from corehq.apps.users.models import HqPermissions
from corehq.apps.locations.resources import v0_5
class LocationResource(v0_5.LocationResource):
resource_name = 'location'
class Meta:
queryset = SQLLocation.objects.filter(is_archived=False).all()
detail_uri_name = 'location_id'
authentication = RequirePermissionAuthentication(HqPermissions.edit_locations)
allowed_methods = ['get']
include_resource_uri = False
fields = {
'domain',
'location_id',
'name',
'site_code',
'last_modified',
'latitude',
'longitude',
'location_data',
}
filtering = {
"domain": ('exact',),
}
def dehydrate(self, bundle):
if bundle.obj.parent:
bundle.data['parent_location_id'] = bundle.obj.parent.location_id
else:
bundle.data['parent_location_id'] = ''
bundle.data['location_type_name'] = bundle.obj.location_type.name
bundle.data['location_type_code'] = bundle.obj.location_type.code
return bundle
Use objects manager that automatically filters out archived forms
Co-authored-by: Ethan Soergel <c1732a0c832c5c8cbfae77286e6475129315f488@users.noreply.github.com>
|
from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.locations.models import SQLLocation
from corehq.apps.users.models import HqPermissions
from corehq.apps.locations.resources import v0_5
class LocationResource(v0_5.LocationResource):
resource_name = 'location'
class Meta:
queryset = SQLLocation.active_objects.all()
detail_uri_name = 'location_id'
authentication = RequirePermissionAuthentication(HqPermissions.edit_locations)
allowed_methods = ['get']
include_resource_uri = False
fields = {
'domain',
'location_id',
'name',
'site_code',
'last_modified',
'latitude',
'longitude',
'location_data',
}
filtering = {
"domain": ('exact',),
}
def dehydrate(self, bundle):
if bundle.obj.parent:
bundle.data['parent_location_id'] = bundle.obj.parent.location_id
else:
bundle.data['parent_location_id'] = ''
bundle.data['location_type_name'] = bundle.obj.location_type.name
bundle.data['location_type_code'] = bundle.obj.location_type.code
return bundle
|
<commit_before>from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.locations.models import SQLLocation
from corehq.apps.users.models import HqPermissions
from corehq.apps.locations.resources import v0_5
class LocationResource(v0_5.LocationResource):
resource_name = 'location'
class Meta:
queryset = SQLLocation.objects.filter(is_archived=False).all()
detail_uri_name = 'location_id'
authentication = RequirePermissionAuthentication(HqPermissions.edit_locations)
allowed_methods = ['get']
include_resource_uri = False
fields = {
'domain',
'location_id',
'name',
'site_code',
'last_modified',
'latitude',
'longitude',
'location_data',
}
filtering = {
"domain": ('exact',),
}
def dehydrate(self, bundle):
if bundle.obj.parent:
bundle.data['parent_location_id'] = bundle.obj.parent.location_id
else:
bundle.data['parent_location_id'] = ''
bundle.data['location_type_name'] = bundle.obj.location_type.name
bundle.data['location_type_code'] = bundle.obj.location_type.code
return bundle
<commit_msg>Use objects manager that automatically filters out archived forms
Co-authored-by: Ethan Soergel <c1732a0c832c5c8cbfae77286e6475129315f488@users.noreply.github.com><commit_after>
|
from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.locations.models import SQLLocation
from corehq.apps.users.models import HqPermissions
from corehq.apps.locations.resources import v0_5
class LocationResource(v0_5.LocationResource):
resource_name = 'location'
class Meta:
queryset = SQLLocation.active_objects.all()
detail_uri_name = 'location_id'
authentication = RequirePermissionAuthentication(HqPermissions.edit_locations)
allowed_methods = ['get']
include_resource_uri = False
fields = {
'domain',
'location_id',
'name',
'site_code',
'last_modified',
'latitude',
'longitude',
'location_data',
}
filtering = {
"domain": ('exact',),
}
def dehydrate(self, bundle):
if bundle.obj.parent:
bundle.data['parent_location_id'] = bundle.obj.parent.location_id
else:
bundle.data['parent_location_id'] = ''
bundle.data['location_type_name'] = bundle.obj.location_type.name
bundle.data['location_type_code'] = bundle.obj.location_type.code
return bundle
|
from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.locations.models import SQLLocation
from corehq.apps.users.models import HqPermissions
from corehq.apps.locations.resources import v0_5
class LocationResource(v0_5.LocationResource):
resource_name = 'location'
class Meta:
queryset = SQLLocation.objects.filter(is_archived=False).all()
detail_uri_name = 'location_id'
authentication = RequirePermissionAuthentication(HqPermissions.edit_locations)
allowed_methods = ['get']
include_resource_uri = False
fields = {
'domain',
'location_id',
'name',
'site_code',
'last_modified',
'latitude',
'longitude',
'location_data',
}
filtering = {
"domain": ('exact',),
}
def dehydrate(self, bundle):
if bundle.obj.parent:
bundle.data['parent_location_id'] = bundle.obj.parent.location_id
else:
bundle.data['parent_location_id'] = ''
bundle.data['location_type_name'] = bundle.obj.location_type.name
bundle.data['location_type_code'] = bundle.obj.location_type.code
return bundle
Use objects manager that automatically filters out archived forms
Co-authored-by: Ethan Soergel <c1732a0c832c5c8cbfae77286e6475129315f488@users.noreply.github.com>from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.locations.models import SQLLocation
from corehq.apps.users.models import HqPermissions
from corehq.apps.locations.resources import v0_5
class LocationResource(v0_5.LocationResource):
resource_name = 'location'
class Meta:
queryset = SQLLocation.active_objects.all()
detail_uri_name = 'location_id'
authentication = RequirePermissionAuthentication(HqPermissions.edit_locations)
allowed_methods = ['get']
include_resource_uri = False
fields = {
'domain',
'location_id',
'name',
'site_code',
'last_modified',
'latitude',
'longitude',
'location_data',
}
filtering = {
"domain": ('exact',),
}
def dehydrate(self, bundle):
if bundle.obj.parent:
bundle.data['parent_location_id'] = bundle.obj.parent.location_id
else:
bundle.data['parent_location_id'] = ''
bundle.data['location_type_name'] = bundle.obj.location_type.name
bundle.data['location_type_code'] = bundle.obj.location_type.code
return bundle
|
<commit_before>from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.locations.models import SQLLocation
from corehq.apps.users.models import HqPermissions
from corehq.apps.locations.resources import v0_5
class LocationResource(v0_5.LocationResource):
resource_name = 'location'
class Meta:
queryset = SQLLocation.objects.filter(is_archived=False).all()
detail_uri_name = 'location_id'
authentication = RequirePermissionAuthentication(HqPermissions.edit_locations)
allowed_methods = ['get']
include_resource_uri = False
fields = {
'domain',
'location_id',
'name',
'site_code',
'last_modified',
'latitude',
'longitude',
'location_data',
}
filtering = {
"domain": ('exact',),
}
def dehydrate(self, bundle):
if bundle.obj.parent:
bundle.data['parent_location_id'] = bundle.obj.parent.location_id
else:
bundle.data['parent_location_id'] = ''
bundle.data['location_type_name'] = bundle.obj.location_type.name
bundle.data['location_type_code'] = bundle.obj.location_type.code
return bundle
<commit_msg>Use objects manager that automatically filters out archived forms
Co-authored-by: Ethan Soergel <c1732a0c832c5c8cbfae77286e6475129315f488@users.noreply.github.com><commit_after>from corehq.apps.api.resources.auth import RequirePermissionAuthentication
from corehq.apps.locations.models import SQLLocation
from corehq.apps.users.models import HqPermissions
from corehq.apps.locations.resources import v0_5
class LocationResource(v0_5.LocationResource):
resource_name = 'location'
class Meta:
queryset = SQLLocation.active_objects.all()
detail_uri_name = 'location_id'
authentication = RequirePermissionAuthentication(HqPermissions.edit_locations)
allowed_methods = ['get']
include_resource_uri = False
fields = {
'domain',
'location_id',
'name',
'site_code',
'last_modified',
'latitude',
'longitude',
'location_data',
}
filtering = {
"domain": ('exact',),
}
def dehydrate(self, bundle):
if bundle.obj.parent:
bundle.data['parent_location_id'] = bundle.obj.parent.location_id
else:
bundle.data['parent_location_id'] = ''
bundle.data['location_type_name'] = bundle.obj.location_type.name
bundle.data['location_type_code'] = bundle.obj.location_type.code
return bundle
|
8c6d68bcf9f3f6932ea22bbe0ce0944a3cd20662
|
class4/exercise5.py
|
class4/exercise5.py
|
# Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
|
# Use Netmiko to enter into configuration mode on pynet-rtr2.
# Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
|
Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
|
Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
|
Python
|
apache-2.0
|
linkdebian/pynet_course
|
# Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
|
# Use Netmiko to enter into configuration mode on pynet-rtr2.
# Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
|
<commit_before># Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
<commit_msg>Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).<commit_after>
|
# Use Netmiko to enter into configuration mode on pynet-rtr2.
# Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
|
# Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).# Use Netmiko to enter into configuration mode on pynet-rtr2.
# Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
|
<commit_before># Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
<commit_msg>Use Netmiko to enter into configuration mode on pynet-rtr2. Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).<commit_after># Use Netmiko to enter into configuration mode on pynet-rtr2.
# Also use Netmiko to verify your state (i.e. that you are currently in configuration mode).
from getpass import getpass
import time
from netmiko import ConnectHandler
password = getpass()
pynet_rtr2 = {'device_type': 'cisco_ios', 'ip': '50.76.53.27', 'username': 'pyclass', 'password': password, 'port': 8022}
ssh_connection = ConnectHandler(**pynet_rtr2)
time.sleep(2)
ssh_connection.config_mode()
output = ssh_connection.find_prompt()
print "The current state of the prompt is %s" % output
|
577e2a03c15e4e489d0df4c3c2a2bea8b9aa54b6
|
fluent_utils/softdeps/any_urlfield.py
|
fluent_utils/softdeps/any_urlfield.py
|
"""
Optional integration with django-any-urlfield
"""
from __future__ import absolute_import
from django.db import models
from fluent_utils.django_compat import is_installed
if is_installed('any_urlfield'):
from any_urlfield.models import AnyUrlField as BaseUrlField
else:
BaseUrlField = models.URLField
# subclassing here so South or Django migrations detect a single class.
class AnyUrlField(BaseUrlField):
"""
A CharField that can either refer to a CMS page ID, or external URL.
If *django-any-urlfield* is not installed, only regular URLs can be used.
"""
def __init__(self, *args, **kwargs):
if 'max_length' not in kwargs:
kwargs['max_length'] = 300 # Standardize
super(AnyUrlField, self).__init__(*args, **kwargs)
def south_field_triple(self):
# Masquerade as normal URLField, so the soft-dependency also exists in the migrations.
from south.modelsinspector import introspector
path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__)
args, kwargs = introspector(self)
return (path, args, kwargs)
def deconstruct(self):
# For Django 1.7 migrations, masquerade as normal URLField too
name, path, args, kwargs = super(AnyUrlField, self).deconstruct()
path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__)
return name, path, args, kwargs
|
"""
Optional integration with django-any-urlfield
"""
from __future__ import absolute_import
from django.db import models
from fluent_utils.django_compat import is_installed
if is_installed('any_urlfield'):
from any_urlfield.models import AnyUrlField as BaseUrlField
else:
BaseUrlField = models.URLField
# subclassing here so South or Django migrations detect a single class.
class AnyUrlField(BaseUrlField):
"""
A CharField that can either refer to a CMS page ID, or external URL.
If *django-any-urlfield* is not installed, only regular URLs can be used.
"""
def __init__(self, *args, **kwargs):
if 'max_length' not in kwargs:
kwargs['max_length'] = 300 # Standardize
super(AnyUrlField, self).__init__(*args, **kwargs)
def south_field_triple(self):
# Masquerade as normal URLField, so the soft-dependency also exists in the migrations.
from south.modelsinspector import introspector
path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__)
args, kwargs = introspector(self)
return (path, args, kwargs)
def deconstruct(self):
# For Django 1.7 migrations, masquerade as normal URLField too
name, path, args, kwargs = super(AnyUrlField, self).deconstruct()
path = "django.db.models.{}".format(models.URLField.__name__)
return name, path, args, kwargs
|
Fix AnyUrlField migration issue on Django 1.11.
|
Fix AnyUrlField migration issue on Django 1.11.
|
Python
|
apache-2.0
|
edoburu/django-fluent-utils
|
"""
Optional integration with django-any-urlfield
"""
from __future__ import absolute_import
from django.db import models
from fluent_utils.django_compat import is_installed
if is_installed('any_urlfield'):
from any_urlfield.models import AnyUrlField as BaseUrlField
else:
BaseUrlField = models.URLField
# subclassing here so South or Django migrations detect a single class.
class AnyUrlField(BaseUrlField):
"""
A CharField that can either refer to a CMS page ID, or external URL.
If *django-any-urlfield* is not installed, only regular URLs can be used.
"""
def __init__(self, *args, **kwargs):
if 'max_length' not in kwargs:
kwargs['max_length'] = 300 # Standardize
super(AnyUrlField, self).__init__(*args, **kwargs)
def south_field_triple(self):
# Masquerade as normal URLField, so the soft-dependency also exists in the migrations.
from south.modelsinspector import introspector
path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__)
args, kwargs = introspector(self)
return (path, args, kwargs)
def deconstruct(self):
# For Django 1.7 migrations, masquerade as normal URLField too
name, path, args, kwargs = super(AnyUrlField, self).deconstruct()
path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__)
return name, path, args, kwargs
Fix AnyUrlField migration issue on Django 1.11.
|
"""
Optional integration with django-any-urlfield
"""
from __future__ import absolute_import
from django.db import models
from fluent_utils.django_compat import is_installed
if is_installed('any_urlfield'):
from any_urlfield.models import AnyUrlField as BaseUrlField
else:
BaseUrlField = models.URLField
# subclassing here so South or Django migrations detect a single class.
class AnyUrlField(BaseUrlField):
"""
A CharField that can either refer to a CMS page ID, or external URL.
If *django-any-urlfield* is not installed, only regular URLs can be used.
"""
def __init__(self, *args, **kwargs):
if 'max_length' not in kwargs:
kwargs['max_length'] = 300 # Standardize
super(AnyUrlField, self).__init__(*args, **kwargs)
def south_field_triple(self):
# Masquerade as normal URLField, so the soft-dependency also exists in the migrations.
from south.modelsinspector import introspector
path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__)
args, kwargs = introspector(self)
return (path, args, kwargs)
def deconstruct(self):
# For Django 1.7 migrations, masquerade as normal URLField too
name, path, args, kwargs = super(AnyUrlField, self).deconstruct()
path = "django.db.models.{}".format(models.URLField.__name__)
return name, path, args, kwargs
|
<commit_before>"""
Optional integration with django-any-urlfield
"""
from __future__ import absolute_import
from django.db import models
from fluent_utils.django_compat import is_installed
if is_installed('any_urlfield'):
from any_urlfield.models import AnyUrlField as BaseUrlField
else:
BaseUrlField = models.URLField
# subclassing here so South or Django migrations detect a single class.
class AnyUrlField(BaseUrlField):
"""
A CharField that can either refer to a CMS page ID, or external URL.
If *django-any-urlfield* is not installed, only regular URLs can be used.
"""
def __init__(self, *args, **kwargs):
if 'max_length' not in kwargs:
kwargs['max_length'] = 300 # Standardize
super(AnyUrlField, self).__init__(*args, **kwargs)
def south_field_triple(self):
# Masquerade as normal URLField, so the soft-dependency also exists in the migrations.
from south.modelsinspector import introspector
path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__)
args, kwargs = introspector(self)
return (path, args, kwargs)
def deconstruct(self):
# For Django 1.7 migrations, masquerade as normal URLField too
name, path, args, kwargs = super(AnyUrlField, self).deconstruct()
path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__)
return name, path, args, kwargs
<commit_msg>Fix AnyUrlField migration issue on Django 1.11.<commit_after>
|
"""
Optional integration with django-any-urlfield
"""
from __future__ import absolute_import
from django.db import models
from fluent_utils.django_compat import is_installed
if is_installed('any_urlfield'):
from any_urlfield.models import AnyUrlField as BaseUrlField
else:
BaseUrlField = models.URLField
# subclassing here so South or Django migrations detect a single class.
class AnyUrlField(BaseUrlField):
"""
A CharField that can either refer to a CMS page ID, or external URL.
If *django-any-urlfield* is not installed, only regular URLs can be used.
"""
def __init__(self, *args, **kwargs):
if 'max_length' not in kwargs:
kwargs['max_length'] = 300 # Standardize
super(AnyUrlField, self).__init__(*args, **kwargs)
def south_field_triple(self):
# Masquerade as normal URLField, so the soft-dependency also exists in the migrations.
from south.modelsinspector import introspector
path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__)
args, kwargs = introspector(self)
return (path, args, kwargs)
def deconstruct(self):
# For Django 1.7 migrations, masquerade as normal URLField too
name, path, args, kwargs = super(AnyUrlField, self).deconstruct()
path = "django.db.models.{}".format(models.URLField.__name__)
return name, path, args, kwargs
|
"""
Optional integration with django-any-urlfield
"""
from __future__ import absolute_import
from django.db import models
from fluent_utils.django_compat import is_installed
if is_installed('any_urlfield'):
from any_urlfield.models import AnyUrlField as BaseUrlField
else:
BaseUrlField = models.URLField
# subclassing here so South or Django migrations detect a single class.
class AnyUrlField(BaseUrlField):
"""
A CharField that can either refer to a CMS page ID, or external URL.
If *django-any-urlfield* is not installed, only regular URLs can be used.
"""
def __init__(self, *args, **kwargs):
if 'max_length' not in kwargs:
kwargs['max_length'] = 300 # Standardize
super(AnyUrlField, self).__init__(*args, **kwargs)
def south_field_triple(self):
# Masquerade as normal URLField, so the soft-dependency also exists in the migrations.
from south.modelsinspector import introspector
path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__)
args, kwargs = introspector(self)
return (path, args, kwargs)
def deconstruct(self):
# For Django 1.7 migrations, masquerade as normal URLField too
name, path, args, kwargs = super(AnyUrlField, self).deconstruct()
path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__)
return name, path, args, kwargs
Fix AnyUrlField migration issue on Django 1.11."""
Optional integration with django-any-urlfield
"""
from __future__ import absolute_import
from django.db import models
from fluent_utils.django_compat import is_installed
if is_installed('any_urlfield'):
from any_urlfield.models import AnyUrlField as BaseUrlField
else:
BaseUrlField = models.URLField
# subclassing here so South or Django migrations detect a single class.
class AnyUrlField(BaseUrlField):
"""
A CharField that can either refer to a CMS page ID, or external URL.
If *django-any-urlfield* is not installed, only regular URLs can be used.
"""
def __init__(self, *args, **kwargs):
if 'max_length' not in kwargs:
kwargs['max_length'] = 300 # Standardize
super(AnyUrlField, self).__init__(*args, **kwargs)
def south_field_triple(self):
# Masquerade as normal URLField, so the soft-dependency also exists in the migrations.
from south.modelsinspector import introspector
path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__)
args, kwargs = introspector(self)
return (path, args, kwargs)
def deconstruct(self):
# For Django 1.7 migrations, masquerade as normal URLField too
name, path, args, kwargs = super(AnyUrlField, self).deconstruct()
path = "django.db.models.{}".format(models.URLField.__name__)
return name, path, args, kwargs
|
<commit_before>"""
Optional integration with django-any-urlfield
"""
from __future__ import absolute_import
from django.db import models
from fluent_utils.django_compat import is_installed
if is_installed('any_urlfield'):
from any_urlfield.models import AnyUrlField as BaseUrlField
else:
BaseUrlField = models.URLField
# subclassing here so South or Django migrations detect a single class.
class AnyUrlField(BaseUrlField):
"""
A CharField that can either refer to a CMS page ID, or external URL.
If *django-any-urlfield* is not installed, only regular URLs can be used.
"""
def __init__(self, *args, **kwargs):
if 'max_length' not in kwargs:
kwargs['max_length'] = 300 # Standardize
super(AnyUrlField, self).__init__(*args, **kwargs)
def south_field_triple(self):
# Masquerade as normal URLField, so the soft-dependency also exists in the migrations.
from south.modelsinspector import introspector
path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__)
args, kwargs = introspector(self)
return (path, args, kwargs)
def deconstruct(self):
# For Django 1.7 migrations, masquerade as normal URLField too
name, path, args, kwargs = super(AnyUrlField, self).deconstruct()
path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__)
return name, path, args, kwargs
<commit_msg>Fix AnyUrlField migration issue on Django 1.11.<commit_after>"""
Optional integration with django-any-urlfield
"""
from __future__ import absolute_import
from django.db import models
from fluent_utils.django_compat import is_installed
if is_installed('any_urlfield'):
from any_urlfield.models import AnyUrlField as BaseUrlField
else:
BaseUrlField = models.URLField
# subclassing here so South or Django migrations detect a single class.
class AnyUrlField(BaseUrlField):
"""
A CharField that can either refer to a CMS page ID, or external URL.
If *django-any-urlfield* is not installed, only regular URLs can be used.
"""
def __init__(self, *args, **kwargs):
if 'max_length' not in kwargs:
kwargs['max_length'] = 300 # Standardize
super(AnyUrlField, self).__init__(*args, **kwargs)
def south_field_triple(self):
# Masquerade as normal URLField, so the soft-dependency also exists in the migrations.
from south.modelsinspector import introspector
path = "{0}.{1}".format(models.URLField.__module__, models.URLField.__name__)
args, kwargs = introspector(self)
return (path, args, kwargs)
def deconstruct(self):
# For Django 1.7 migrations, masquerade as normal URLField too
name, path, args, kwargs = super(AnyUrlField, self).deconstruct()
path = "django.db.models.{}".format(models.URLField.__name__)
return name, path, args, kwargs
|
d981b34dc18236cf857d1249629b6437005e073f
|
openmc/capi/__init__.py
|
openmc/capi/__init__.py
|
"""
This module provides bindings to C functions defined by OpenMC shared library.
When the :mod:`openmc` package is imported, the OpenMC shared library is
automatically loaded. Calls to the OpenMC library can then be via functions or
objects in the :mod:`openmc.capi` subpackage, for example:
.. code-block:: python
openmc.capi.init()
openmc.capi.run()
openmc.capi.finalize()
"""
from ctypes import CDLL
import os
import sys
from warnings import warn
import pkg_resources
# Determine shared-library suffix
if sys.platform == 'darwin':
_suffix = 'dylib'
else:
_suffix = 'so'
if os.environ.get('READTHEDOCS', None) != 'True':
# Open shared library
_filename = pkg_resources.resource_filename(
__name__, 'libopenmc.{}'.format(_suffix))
_dll = CDLL(_filename)
else:
# For documentation builds, we don't actually have the shared library
# available. Instead, we create a mock object so that when the modules
# within the openmc.capi package try to configure arguments and return
# values for symbols, no errors occur
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock
_dll = Mock()
from .error import *
from .core import *
from .nuclide import *
from .material import *
from .cell import *
from .filter import *
from .tally import *
from .settings import settings
warn("The Python bindings to OpenMC's C API are still unstable "
"and may change substantially in future releases.", FutureWarning)
|
"""
This module provides bindings to C functions defined by OpenMC shared library.
When the :mod:`openmc` package is imported, the OpenMC shared library is
automatically loaded. Calls to the OpenMC library can then be via functions or
objects in the :mod:`openmc.capi` subpackage, for example:
.. code-block:: python
openmc.capi.init()
openmc.capi.run()
openmc.capi.finalize()
"""
from ctypes import CDLL
import os
import sys
import pkg_resources
# Determine shared-library suffix
if sys.platform == 'darwin':
_suffix = 'dylib'
else:
_suffix = 'so'
if os.environ.get('READTHEDOCS', None) != 'True':
# Open shared library
_filename = pkg_resources.resource_filename(
__name__, 'libopenmc.{}'.format(_suffix))
_dll = CDLL(_filename)
else:
# For documentation builds, we don't actually have the shared library
# available. Instead, we create a mock object so that when the modules
# within the openmc.capi package try to configure arguments and return
# values for symbols, no errors occur
from unittest.mock import Mock
_dll = Mock()
from .error import *
from .core import *
from .nuclide import *
from .material import *
from .cell import *
from .filter import *
from .tally import *
from .settings import settings
|
Remove FutureWarning for capi import
|
Remove FutureWarning for capi import
|
Python
|
mit
|
mit-crpg/openmc,shikhar413/openmc,smharper/openmc,amandalund/openmc,amandalund/openmc,wbinventor/openmc,walshjon/openmc,amandalund/openmc,paulromano/openmc,mit-crpg/openmc,walshjon/openmc,paulromano/openmc,liangjg/openmc,mit-crpg/openmc,johnnyliu27/openmc,amandalund/openmc,walshjon/openmc,shikhar413/openmc,shikhar413/openmc,mit-crpg/openmc,smharper/openmc,johnnyliu27/openmc,paulromano/openmc,smharper/openmc,wbinventor/openmc,wbinventor/openmc,liangjg/openmc,johnnyliu27/openmc,shikhar413/openmc,paulromano/openmc,walshjon/openmc,johnnyliu27/openmc,liangjg/openmc,liangjg/openmc,wbinventor/openmc,smharper/openmc
|
"""
This module provides bindings to C functions defined by OpenMC shared library.
When the :mod:`openmc` package is imported, the OpenMC shared library is
automatically loaded. Calls to the OpenMC library can then be via functions or
objects in the :mod:`openmc.capi` subpackage, for example:
.. code-block:: python
openmc.capi.init()
openmc.capi.run()
openmc.capi.finalize()
"""
from ctypes import CDLL
import os
import sys
from warnings import warn
import pkg_resources
# Determine shared-library suffix
if sys.platform == 'darwin':
_suffix = 'dylib'
else:
_suffix = 'so'
if os.environ.get('READTHEDOCS', None) != 'True':
# Open shared library
_filename = pkg_resources.resource_filename(
__name__, 'libopenmc.{}'.format(_suffix))
_dll = CDLL(_filename)
else:
# For documentation builds, we don't actually have the shared library
# available. Instead, we create a mock object so that when the modules
# within the openmc.capi package try to configure arguments and return
# values for symbols, no errors occur
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock
_dll = Mock()
from .error import *
from .core import *
from .nuclide import *
from .material import *
from .cell import *
from .filter import *
from .tally import *
from .settings import settings
warn("The Python bindings to OpenMC's C API are still unstable "
"and may change substantially in future releases.", FutureWarning)
Remove FutureWarning for capi import
|
"""
This module provides bindings to C functions defined by OpenMC shared library.
When the :mod:`openmc` package is imported, the OpenMC shared library is
automatically loaded. Calls to the OpenMC library can then be via functions or
objects in the :mod:`openmc.capi` subpackage, for example:
.. code-block:: python
openmc.capi.init()
openmc.capi.run()
openmc.capi.finalize()
"""
from ctypes import CDLL
import os
import sys
import pkg_resources
# Determine shared-library suffix
if sys.platform == 'darwin':
_suffix = 'dylib'
else:
_suffix = 'so'
if os.environ.get('READTHEDOCS', None) != 'True':
# Open shared library
_filename = pkg_resources.resource_filename(
__name__, 'libopenmc.{}'.format(_suffix))
_dll = CDLL(_filename)
else:
# For documentation builds, we don't actually have the shared library
# available. Instead, we create a mock object so that when the modules
# within the openmc.capi package try to configure arguments and return
# values for symbols, no errors occur
from unittest.mock import Mock
_dll = Mock()
from .error import *
from .core import *
from .nuclide import *
from .material import *
from .cell import *
from .filter import *
from .tally import *
from .settings import settings
|
<commit_before>"""
This module provides bindings to C functions defined by OpenMC shared library.
When the :mod:`openmc` package is imported, the OpenMC shared library is
automatically loaded. Calls to the OpenMC library can then be via functions or
objects in the :mod:`openmc.capi` subpackage, for example:
.. code-block:: python
openmc.capi.init()
openmc.capi.run()
openmc.capi.finalize()
"""
from ctypes import CDLL
import os
import sys
from warnings import warn
import pkg_resources
# Determine shared-library suffix
if sys.platform == 'darwin':
_suffix = 'dylib'
else:
_suffix = 'so'
if os.environ.get('READTHEDOCS', None) != 'True':
# Open shared library
_filename = pkg_resources.resource_filename(
__name__, 'libopenmc.{}'.format(_suffix))
_dll = CDLL(_filename)
else:
# For documentation builds, we don't actually have the shared library
# available. Instead, we create a mock object so that when the modules
# within the openmc.capi package try to configure arguments and return
# values for symbols, no errors occur
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock
_dll = Mock()
from .error import *
from .core import *
from .nuclide import *
from .material import *
from .cell import *
from .filter import *
from .tally import *
from .settings import settings
warn("The Python bindings to OpenMC's C API are still unstable "
"and may change substantially in future releases.", FutureWarning)
<commit_msg>Remove FutureWarning for capi import<commit_after>
|
"""
This module provides bindings to C functions defined by OpenMC shared library.
When the :mod:`openmc` package is imported, the OpenMC shared library is
automatically loaded. Calls to the OpenMC library can then be via functions or
objects in the :mod:`openmc.capi` subpackage, for example:
.. code-block:: python
openmc.capi.init()
openmc.capi.run()
openmc.capi.finalize()
"""
from ctypes import CDLL
import os
import sys
import pkg_resources
# Determine shared-library suffix
if sys.platform == 'darwin':
_suffix = 'dylib'
else:
_suffix = 'so'
if os.environ.get('READTHEDOCS', None) != 'True':
# Open shared library
_filename = pkg_resources.resource_filename(
__name__, 'libopenmc.{}'.format(_suffix))
_dll = CDLL(_filename)
else:
# For documentation builds, we don't actually have the shared library
# available. Instead, we create a mock object so that when the modules
# within the openmc.capi package try to configure arguments and return
# values for symbols, no errors occur
from unittest.mock import Mock
_dll = Mock()
from .error import *
from .core import *
from .nuclide import *
from .material import *
from .cell import *
from .filter import *
from .tally import *
from .settings import settings
|
"""
This module provides bindings to C functions defined by OpenMC shared library.
When the :mod:`openmc` package is imported, the OpenMC shared library is
automatically loaded. Calls to the OpenMC library can then be via functions or
objects in the :mod:`openmc.capi` subpackage, for example:
.. code-block:: python
openmc.capi.init()
openmc.capi.run()
openmc.capi.finalize()
"""
from ctypes import CDLL
import os
import sys
from warnings import warn
import pkg_resources
# Determine shared-library suffix
if sys.platform == 'darwin':
_suffix = 'dylib'
else:
_suffix = 'so'
if os.environ.get('READTHEDOCS', None) != 'True':
# Open shared library
_filename = pkg_resources.resource_filename(
__name__, 'libopenmc.{}'.format(_suffix))
_dll = CDLL(_filename)
else:
# For documentation builds, we don't actually have the shared library
# available. Instead, we create a mock object so that when the modules
# within the openmc.capi package try to configure arguments and return
# values for symbols, no errors occur
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock
_dll = Mock()
from .error import *
from .core import *
from .nuclide import *
from .material import *
from .cell import *
from .filter import *
from .tally import *
from .settings import settings
warn("The Python bindings to OpenMC's C API are still unstable "
"and may change substantially in future releases.", FutureWarning)
Remove FutureWarning for capi import"""
This module provides bindings to C functions defined by OpenMC shared library.
When the :mod:`openmc` package is imported, the OpenMC shared library is
automatically loaded. Calls to the OpenMC library can then be via functions or
objects in the :mod:`openmc.capi` subpackage, for example:
.. code-block:: python
openmc.capi.init()
openmc.capi.run()
openmc.capi.finalize()
"""
from ctypes import CDLL
import os
import sys
import pkg_resources
# Determine shared-library suffix
if sys.platform == 'darwin':
_suffix = 'dylib'
else:
_suffix = 'so'
if os.environ.get('READTHEDOCS', None) != 'True':
# Open shared library
_filename = pkg_resources.resource_filename(
__name__, 'libopenmc.{}'.format(_suffix))
_dll = CDLL(_filename)
else:
# For documentation builds, we don't actually have the shared library
# available. Instead, we create a mock object so that when the modules
# within the openmc.capi package try to configure arguments and return
# values for symbols, no errors occur
from unittest.mock import Mock
_dll = Mock()
from .error import *
from .core import *
from .nuclide import *
from .material import *
from .cell import *
from .filter import *
from .tally import *
from .settings import settings
|
<commit_before>"""
This module provides bindings to C functions defined by OpenMC shared library.
When the :mod:`openmc` package is imported, the OpenMC shared library is
automatically loaded. Calls to the OpenMC library can then be via functions or
objects in the :mod:`openmc.capi` subpackage, for example:
.. code-block:: python
openmc.capi.init()
openmc.capi.run()
openmc.capi.finalize()
"""
from ctypes import CDLL
import os
import sys
from warnings import warn
import pkg_resources
# Determine shared-library suffix
if sys.platform == 'darwin':
_suffix = 'dylib'
else:
_suffix = 'so'
if os.environ.get('READTHEDOCS', None) != 'True':
# Open shared library
_filename = pkg_resources.resource_filename(
__name__, 'libopenmc.{}'.format(_suffix))
_dll = CDLL(_filename)
else:
# For documentation builds, we don't actually have the shared library
# available. Instead, we create a mock object so that when the modules
# within the openmc.capi package try to configure arguments and return
# values for symbols, no errors occur
try:
from unittest.mock import Mock
except ImportError:
from mock import Mock
_dll = Mock()
from .error import *
from .core import *
from .nuclide import *
from .material import *
from .cell import *
from .filter import *
from .tally import *
from .settings import settings
warn("The Python bindings to OpenMC's C API are still unstable "
"and may change substantially in future releases.", FutureWarning)
<commit_msg>Remove FutureWarning for capi import<commit_after>"""
This module provides bindings to C functions defined by OpenMC shared library.
When the :mod:`openmc` package is imported, the OpenMC shared library is
automatically loaded. Calls to the OpenMC library can then be via functions or
objects in the :mod:`openmc.capi` subpackage, for example:
.. code-block:: python
openmc.capi.init()
openmc.capi.run()
openmc.capi.finalize()
"""
from ctypes import CDLL
import os
import sys
import pkg_resources
# Determine shared-library suffix
if sys.platform == 'darwin':
_suffix = 'dylib'
else:
_suffix = 'so'
if os.environ.get('READTHEDOCS', None) != 'True':
# Open shared library
_filename = pkg_resources.resource_filename(
__name__, 'libopenmc.{}'.format(_suffix))
_dll = CDLL(_filename)
else:
# For documentation builds, we don't actually have the shared library
# available. Instead, we create a mock object so that when the modules
# within the openmc.capi package try to configure arguments and return
# values for symbols, no errors occur
from unittest.mock import Mock
_dll = Mock()
from .error import *
from .core import *
from .nuclide import *
from .material import *
from .cell import *
from .filter import *
from .tally import *
from .settings import settings
|
56f27099a8f7be39a6d8848a9378af6ed48f528f
|
bongo/apps/frontend/tests/templatetags_tests.py
|
bongo/apps/frontend/tests/templatetags_tests.py
|
from django.test import TestCase
from django.conf import settings
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
self.assertEqual(rendered, article.__dict__)
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
|
from django.test import TestCase
from django.conf import settings
from django.utils.html import escape
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
self.assertEqual(rendered, escape(article.__dict__))
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
|
Fix broken test (not the intermittent one, this was just a dumb thing)
|
Fix broken test (not the intermittent one, this was just a dumb thing)
|
Python
|
mit
|
BowdoinOrient/bongo,BowdoinOrient/bongo,BowdoinOrient/bongo,BowdoinOrient/bongo
|
from django.test import TestCase
from django.conf import settings
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
self.assertEqual(rendered, article.__dict__)
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
Fix broken test (not the intermittent one, this was just a dumb thing)
|
from django.test import TestCase
from django.conf import settings
from django.utils.html import escape
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
self.assertEqual(rendered, escape(article.__dict__))
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
|
<commit_before>from django.test import TestCase
from django.conf import settings
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
self.assertEqual(rendered, article.__dict__)
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
<commit_msg>Fix broken test (not the intermittent one, this was just a dumb thing)<commit_after>
|
from django.test import TestCase
from django.conf import settings
from django.utils.html import escape
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
self.assertEqual(rendered, escape(article.__dict__))
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
|
from django.test import TestCase
from django.conf import settings
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
self.assertEqual(rendered, article.__dict__)
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
Fix broken test (not the intermittent one, this was just a dumb thing)from django.test import TestCase
from django.conf import settings
from django.utils.html import escape
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
self.assertEqual(rendered, escape(article.__dict__))
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
|
<commit_before>from django.test import TestCase
from django.conf import settings
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
self.assertEqual(rendered, article.__dict__)
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
<commit_msg>Fix broken test (not the intermittent one, this was just a dumb thing)<commit_after>from django.test import TestCase
from django.conf import settings
from django.utils.html import escape
from django.template import Context, Template
from bongo.apps.bongo.tests import factories
def render_template(string, context=None):
context = Context(context) if context else None
return Template(string).render(context)
class TemplateTagsTestCase(TestCase):
def test_dump_templatetag(self):
"""Test the dump() template tag (print object representation)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load dump %}{{ article | dump }}",
context = {
"article": article
}
)
self.assertEqual(rendered, escape(article.__dict__))
def test_class_name_templatetag(self):
"""Test the class_name() template tag (print object's class name)"""
article = factories.PostFactory.create()
rendered = render_template(
"{% load class_name %}{{ article | class_name }}",
context = {
"article": article
}
)
self.assertEqual(rendered, "Post")
|
347f4440deb7b0cce9fd0dcb6e26dbda340f437c
|
planetstack/openstack_observer/steps/sync_images.py
|
planetstack/openstack_observer/steps/sync_images.py
|
import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
images_path = Config().observer_images_directory
available_images = {}
for f in os.listdir(images_path):
if os.path.isfile(os.path.join(images_path ,f)):
available_images[f] = os.path.join(images_path ,f)
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
|
import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
images_path = Config().observer_images_directory
available_images = {}
if os.path.exists(images_path):
for f in os.listdir(images_path):
filename = os.path.join(images_path, f)
if os.path.isfile(filename):
available_images[f] = filename
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
|
Check the existence of the images_path
|
Check the existence of the images_path
ERROR:planetstack.log:[Errno 2] No such file or directory: '/opt/xos/images' BEG TRACEBACK
Traceback (most recent call last):
File "/opt/xos/observer/event_loop.py", line 349, in sync
failed_objects = sync_step(failed=list(self.failed_step_objects), deletion=deletion)
File "/opt/xos/observer/openstacksyncstep.py", line 14, in __call__
return self.call(**args)
File "/opt/xos/observer/syncstep.py", line 97, in call
pending = self.fetch_pending(deletion)
File "/opt/xos/observer/steps/sync_images.py", line 22, in fetch_pending
for f in os.listdir(images_path):
OSError: [Errno 2] No such file or directory: '/opt/xos/images'
ERROR:planetstack.log:[Errno 2] No such file or directory: '/opt/xos/images' END TRACEBACK
Signed-off-by: S.Çağlar Onur <acf5ae661bb0a9f738c88a741b1d35ac69ab5408@10ur.org>
|
Python
|
apache-2.0
|
jermowery/xos,cboling/xos,cboling/xos,xmaruto/mcord,jermowery/xos,cboling/xos,xmaruto/mcord,xmaruto/mcord,xmaruto/mcord,jermowery/xos,cboling/xos,jermowery/xos,cboling/xos
|
import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
images_path = Config().observer_images_directory
available_images = {}
for f in os.listdir(images_path):
if os.path.isfile(os.path.join(images_path ,f)):
available_images[f] = os.path.join(images_path ,f)
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
Check the existence of the images_path
ERROR:planetstack.log:[Errno 2] No such file or directory: '/opt/xos/images' BEG TRACEBACK
Traceback (most recent call last):
File "/opt/xos/observer/event_loop.py", line 349, in sync
failed_objects = sync_step(failed=list(self.failed_step_objects), deletion=deletion)
File "/opt/xos/observer/openstacksyncstep.py", line 14, in __call__
return self.call(**args)
File "/opt/xos/observer/syncstep.py", line 97, in call
pending = self.fetch_pending(deletion)
File "/opt/xos/observer/steps/sync_images.py", line 22, in fetch_pending
for f in os.listdir(images_path):
OSError: [Errno 2] No such file or directory: '/opt/xos/images'
ERROR:planetstack.log:[Errno 2] No such file or directory: '/opt/xos/images' END TRACEBACK
Signed-off-by: S.Çağlar Onur <acf5ae661bb0a9f738c88a741b1d35ac69ab5408@10ur.org>
|
import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
images_path = Config().observer_images_directory
available_images = {}
if os.path.exists(images_path):
for f in os.listdir(images_path):
filename = os.path.join(images_path, f)
if os.path.isfile(filename):
available_images[f] = filename
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
|
<commit_before>import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
images_path = Config().observer_images_directory
available_images = {}
for f in os.listdir(images_path):
if os.path.isfile(os.path.join(images_path ,f)):
available_images[f] = os.path.join(images_path ,f)
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
<commit_msg>Check the existence of the images_path
ERROR:planetstack.log:[Errno 2] No such file or directory: '/opt/xos/images' BEG TRACEBACK
Traceback (most recent call last):
File "/opt/xos/observer/event_loop.py", line 349, in sync
failed_objects = sync_step(failed=list(self.failed_step_objects), deletion=deletion)
File "/opt/xos/observer/openstacksyncstep.py", line 14, in __call__
return self.call(**args)
File "/opt/xos/observer/syncstep.py", line 97, in call
pending = self.fetch_pending(deletion)
File "/opt/xos/observer/steps/sync_images.py", line 22, in fetch_pending
for f in os.listdir(images_path):
OSError: [Errno 2] No such file or directory: '/opt/xos/images'
ERROR:planetstack.log:[Errno 2] No such file or directory: '/opt/xos/images' END TRACEBACK
Signed-off-by: S.Çağlar Onur <acf5ae661bb0a9f738c88a741b1d35ac69ab5408@10ur.org><commit_after>
|
import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
images_path = Config().observer_images_directory
available_images = {}
if os.path.exists(images_path):
for f in os.listdir(images_path):
filename = os.path.join(images_path, f)
if os.path.isfile(filename):
available_images[f] = filename
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
|
import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
images_path = Config().observer_images_directory
available_images = {}
for f in os.listdir(images_path):
if os.path.isfile(os.path.join(images_path ,f)):
available_images[f] = os.path.join(images_path ,f)
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
Check the existence of the images_path
ERROR:planetstack.log:[Errno 2] No such file or directory: '/opt/xos/images' BEG TRACEBACK
Traceback (most recent call last):
File "/opt/xos/observer/event_loop.py", line 349, in sync
failed_objects = sync_step(failed=list(self.failed_step_objects), deletion=deletion)
File "/opt/xos/observer/openstacksyncstep.py", line 14, in __call__
return self.call(**args)
File "/opt/xos/observer/syncstep.py", line 97, in call
pending = self.fetch_pending(deletion)
File "/opt/xos/observer/steps/sync_images.py", line 22, in fetch_pending
for f in os.listdir(images_path):
OSError: [Errno 2] No such file or directory: '/opt/xos/images'
ERROR:planetstack.log:[Errno 2] No such file or directory: '/opt/xos/images' END TRACEBACK
Signed-off-by: S.Çağlar Onur <acf5ae661bb0a9f738c88a741b1d35ac69ab5408@10ur.org>import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
images_path = Config().observer_images_directory
available_images = {}
if os.path.exists(images_path):
for f in os.listdir(images_path):
filename = os.path.join(images_path, f)
if os.path.isfile(filename):
available_images[f] = filename
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
|
<commit_before>import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
images_path = Config().observer_images_directory
available_images = {}
for f in os.listdir(images_path):
if os.path.isfile(os.path.join(images_path ,f)):
available_images[f] = os.path.join(images_path ,f)
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
<commit_msg>Check the existence of the images_path
ERROR:planetstack.log:[Errno 2] No such file or directory: '/opt/xos/images' BEG TRACEBACK
Traceback (most recent call last):
File "/opt/xos/observer/event_loop.py", line 349, in sync
failed_objects = sync_step(failed=list(self.failed_step_objects), deletion=deletion)
File "/opt/xos/observer/openstacksyncstep.py", line 14, in __call__
return self.call(**args)
File "/opt/xos/observer/syncstep.py", line 97, in call
pending = self.fetch_pending(deletion)
File "/opt/xos/observer/steps/sync_images.py", line 22, in fetch_pending
for f in os.listdir(images_path):
OSError: [Errno 2] No such file or directory: '/opt/xos/images'
ERROR:planetstack.log:[Errno 2] No such file or directory: '/opt/xos/images' END TRACEBACK
Signed-off-by: S.Çağlar Onur <acf5ae661bb0a9f738c88a741b1d35ac69ab5408@10ur.org><commit_after>import os
import base64
from django.db.models import F, Q
from xos.config import Config
from observer.openstacksyncstep import OpenStackSyncStep
from core.models.image import Image
class SyncImages(OpenStackSyncStep):
provides=[Image]
requested_interval=0
observes=Image
def fetch_pending(self, deleted):
# Images come from the back end
# You can't delete them
if (deleted):
return []
# get list of images on disk
images_path = Config().observer_images_directory
available_images = {}
if os.path.exists(images_path):
for f in os.listdir(images_path):
filename = os.path.join(images_path, f)
if os.path.isfile(filename):
available_images[f] = filename
images = Image.objects.all()
image_names = [image.name for image in images]
for image_name in available_images:
#remove file extension
clean_name = ".".join(image_name.split('.')[:-1])
if clean_name not in image_names:
image = Image(name=clean_name,
disk_format='raw',
container_format='bare',
path = available_images[image_name])
image.save()
return Image.objects.filter(Q(enacted__lt=F('updated')) | Q(enacted=None))
def sync_record(self, image):
image.save()
|
7805f06446f31ac483ba219147f4053661e86647
|
penchy/jobs/__init__.py
|
penchy/jobs/__init__.py
|
from job import *
from jvms import *
from tools import *
from filters import *
from workloads import *
# all job elements that are interesting for the user have to be enumerated here
__all__ = [
# job
'Job',
'JVMNodeConfiguration',
# jvm
'JVM',
'ValgrindJVM',
# filters
# workloads
'Dacapo',
'ScalaBench',
# tools
'Tamiflex',
'HProf'
]
|
from job import *
import jvms
import tools
import filters
import workloads
from dependency import Edge
JVM = jvms.JVM
# all job elements that are interesting for the user have to be enumerated here
__all__ = [
# job
'Job',
'JVMNodeConfiguration',
# dependencies
'Edge',
# jvms
'JVM',
# modules
'jvms'
'filters'
'workloads'
'tools'
]
|
Restructure jobs interface to match job description docs.
|
Restructure jobs interface to match job description docs.
Signed-off-by: Michael Markert <5eb998b7ac86da375651a4cd767b88c9dad25896@googlemail.com>
|
Python
|
mit
|
fhirschmann/penchy,fhirschmann/penchy
|
from job import *
from jvms import *
from tools import *
from filters import *
from workloads import *
# all job elements that are interesting for the user have to be enumerated here
__all__ = [
# job
'Job',
'JVMNodeConfiguration',
# jvm
'JVM',
'ValgrindJVM',
# filters
# workloads
'Dacapo',
'ScalaBench',
# tools
'Tamiflex',
'HProf'
]
Restructure jobs interface to match job description docs.
Signed-off-by: Michael Markert <5eb998b7ac86da375651a4cd767b88c9dad25896@googlemail.com>
|
from job import *
import jvms
import tools
import filters
import workloads
from dependency import Edge
JVM = jvms.JVM
# all job elements that are interesting for the user have to be enumerated here
__all__ = [
# job
'Job',
'JVMNodeConfiguration',
# dependencies
'Edge',
# jvms
'JVM',
# modules
'jvms'
'filters'
'workloads'
'tools'
]
|
<commit_before>from job import *
from jvms import *
from tools import *
from filters import *
from workloads import *
# all job elements that are interesting for the user have to be enumerated here
__all__ = [
# job
'Job',
'JVMNodeConfiguration',
# jvm
'JVM',
'ValgrindJVM',
# filters
# workloads
'Dacapo',
'ScalaBench',
# tools
'Tamiflex',
'HProf'
]
<commit_msg>Restructure jobs interface to match job description docs.
Signed-off-by: Michael Markert <5eb998b7ac86da375651a4cd767b88c9dad25896@googlemail.com><commit_after>
|
from job import *
import jvms
import tools
import filters
import workloads
from dependency import Edge
JVM = jvms.JVM
# all job elements that are interesting for the user have to be enumerated here
__all__ = [
# job
'Job',
'JVMNodeConfiguration',
# dependencies
'Edge',
# jvms
'JVM',
# modules
'jvms'
'filters'
'workloads'
'tools'
]
|
from job import *
from jvms import *
from tools import *
from filters import *
from workloads import *
# all job elements that are interesting for the user have to be enumerated here
__all__ = [
# job
'Job',
'JVMNodeConfiguration',
# jvm
'JVM',
'ValgrindJVM',
# filters
# workloads
'Dacapo',
'ScalaBench',
# tools
'Tamiflex',
'HProf'
]
Restructure jobs interface to match job description docs.
Signed-off-by: Michael Markert <5eb998b7ac86da375651a4cd767b88c9dad25896@googlemail.com>from job import *
import jvms
import tools
import filters
import workloads
from dependency import Edge
JVM = jvms.JVM
# all job elements that are interesting for the user have to be enumerated here
__all__ = [
# job
'Job',
'JVMNodeConfiguration',
# dependencies
'Edge',
# jvms
'JVM',
# modules
'jvms'
'filters'
'workloads'
'tools'
]
|
<commit_before>from job import *
from jvms import *
from tools import *
from filters import *
from workloads import *
# all job elements that are interesting for the user have to be enumerated here
__all__ = [
# job
'Job',
'JVMNodeConfiguration',
# jvm
'JVM',
'ValgrindJVM',
# filters
# workloads
'Dacapo',
'ScalaBench',
# tools
'Tamiflex',
'HProf'
]
<commit_msg>Restructure jobs interface to match job description docs.
Signed-off-by: Michael Markert <5eb998b7ac86da375651a4cd767b88c9dad25896@googlemail.com><commit_after>from job import *
import jvms
import tools
import filters
import workloads
from dependency import Edge
JVM = jvms.JVM
# all job elements that are interesting for the user have to be enumerated here
__all__ = [
# job
'Job',
'JVMNodeConfiguration',
# dependencies
'Edge',
# jvms
'JVM',
# modules
'jvms'
'filters'
'workloads'
'tools'
]
|
e95d3b9a9075481f22938dd0c577606947900568
|
jumpgate/common/aes.py
|
jumpgate/common/aes.py
|
import base64
from Crypto import Cipher
from jumpgate import config
BLOCK_SIZE = 32
PADDING = '#'
def pad(string):
return string + (BLOCK_SIZE - len(string) % BLOCK_SIZE) * PADDING
def create_cypher():
return Cipher.AES.new(pad(config.CONF['secret_key']))
def encode_aes(string):
cipher = create_cypher()
return base64.b64encode(cipher.encrypt(pad(string)))
def decode_aes(encrypted_string):
cipher = create_cypher()
return cipher.decrypt(base64.b64decode(encrypted_string)).rstrip(PADDING)
|
import base64
from Crypto.Cipher import AES
from jumpgate import config
BLOCK_SIZE = 32
PADDING = '#'
def pad(string):
return string + (BLOCK_SIZE - len(string) % BLOCK_SIZE) * PADDING
def create_cypher():
return AES.new(pad(config.CONF['secret_key']))
def encode_aes(string):
cipher = create_cypher()
return base64.b64encode(cipher.encrypt(pad(string)))
def decode_aes(encrypted_string):
cipher = create_cypher()
return cipher.decrypt(base64.b64decode(encrypted_string)).rstrip(PADDING)
|
Adjust AES import a bit
|
Adjust AES import a bit
|
Python
|
mit
|
softlayer/jumpgate,myxemhoho/mutil-cloud-manage-paltform,HOQTEC/MCP,wpf710/app-proxy,HOQTEC/MCP,softlayer/jumpgate,myxemhoho/mutil-cloud-manage-paltform,wpf710/app-proxy
|
import base64
from Crypto import Cipher
from jumpgate import config
BLOCK_SIZE = 32
PADDING = '#'
def pad(string):
return string + (BLOCK_SIZE - len(string) % BLOCK_SIZE) * PADDING
def create_cypher():
return Cipher.AES.new(pad(config.CONF['secret_key']))
def encode_aes(string):
cipher = create_cypher()
return base64.b64encode(cipher.encrypt(pad(string)))
def decode_aes(encrypted_string):
cipher = create_cypher()
return cipher.decrypt(base64.b64decode(encrypted_string)).rstrip(PADDING)
Adjust AES import a bit
|
import base64
from Crypto.Cipher import AES
from jumpgate import config
BLOCK_SIZE = 32
PADDING = '#'
def pad(string):
return string + (BLOCK_SIZE - len(string) % BLOCK_SIZE) * PADDING
def create_cypher():
return AES.new(pad(config.CONF['secret_key']))
def encode_aes(string):
cipher = create_cypher()
return base64.b64encode(cipher.encrypt(pad(string)))
def decode_aes(encrypted_string):
cipher = create_cypher()
return cipher.decrypt(base64.b64decode(encrypted_string)).rstrip(PADDING)
|
<commit_before>import base64
from Crypto import Cipher
from jumpgate import config
BLOCK_SIZE = 32
PADDING = '#'
def pad(string):
return string + (BLOCK_SIZE - len(string) % BLOCK_SIZE) * PADDING
def create_cypher():
return Cipher.AES.new(pad(config.CONF['secret_key']))
def encode_aes(string):
cipher = create_cypher()
return base64.b64encode(cipher.encrypt(pad(string)))
def decode_aes(encrypted_string):
cipher = create_cypher()
return cipher.decrypt(base64.b64decode(encrypted_string)).rstrip(PADDING)
<commit_msg>Adjust AES import a bit<commit_after>
|
import base64
from Crypto.Cipher import AES
from jumpgate import config
BLOCK_SIZE = 32
PADDING = '#'
def pad(string):
return string + (BLOCK_SIZE - len(string) % BLOCK_SIZE) * PADDING
def create_cypher():
return AES.new(pad(config.CONF['secret_key']))
def encode_aes(string):
cipher = create_cypher()
return base64.b64encode(cipher.encrypt(pad(string)))
def decode_aes(encrypted_string):
cipher = create_cypher()
return cipher.decrypt(base64.b64decode(encrypted_string)).rstrip(PADDING)
|
import base64
from Crypto import Cipher
from jumpgate import config
BLOCK_SIZE = 32
PADDING = '#'
def pad(string):
return string + (BLOCK_SIZE - len(string) % BLOCK_SIZE) * PADDING
def create_cypher():
return Cipher.AES.new(pad(config.CONF['secret_key']))
def encode_aes(string):
cipher = create_cypher()
return base64.b64encode(cipher.encrypt(pad(string)))
def decode_aes(encrypted_string):
cipher = create_cypher()
return cipher.decrypt(base64.b64decode(encrypted_string)).rstrip(PADDING)
Adjust AES import a bitimport base64
from Crypto.Cipher import AES
from jumpgate import config
BLOCK_SIZE = 32
PADDING = '#'
def pad(string):
return string + (BLOCK_SIZE - len(string) % BLOCK_SIZE) * PADDING
def create_cypher():
return AES.new(pad(config.CONF['secret_key']))
def encode_aes(string):
cipher = create_cypher()
return base64.b64encode(cipher.encrypt(pad(string)))
def decode_aes(encrypted_string):
cipher = create_cypher()
return cipher.decrypt(base64.b64decode(encrypted_string)).rstrip(PADDING)
|
<commit_before>import base64
from Crypto import Cipher
from jumpgate import config
BLOCK_SIZE = 32
PADDING = '#'
def pad(string):
return string + (BLOCK_SIZE - len(string) % BLOCK_SIZE) * PADDING
def create_cypher():
return Cipher.AES.new(pad(config.CONF['secret_key']))
def encode_aes(string):
cipher = create_cypher()
return base64.b64encode(cipher.encrypt(pad(string)))
def decode_aes(encrypted_string):
cipher = create_cypher()
return cipher.decrypt(base64.b64decode(encrypted_string)).rstrip(PADDING)
<commit_msg>Adjust AES import a bit<commit_after>import base64
from Crypto.Cipher import AES
from jumpgate import config
BLOCK_SIZE = 32
PADDING = '#'
def pad(string):
return string + (BLOCK_SIZE - len(string) % BLOCK_SIZE) * PADDING
def create_cypher():
return AES.new(pad(config.CONF['secret_key']))
def encode_aes(string):
cipher = create_cypher()
return base64.b64encode(cipher.encrypt(pad(string)))
def decode_aes(encrypted_string):
cipher = create_cypher()
return cipher.decrypt(base64.b64decode(encrypted_string)).rstrip(PADDING)
|
73f2c06ad1e94d8af34764640b89de5b3fba36c5
|
cron_descriptor/GetText.py
|
cron_descriptor/GetText.py
|
# Copyright (C) 2016 Adam Schubert <adam.schubert@sg1-game.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import locale
import gettext
import os
import logging
class GetText(object):
"""
Handles language translations and Initializes global _() function
"""
def __init__(self):
"""Initialize GetText
"""
code, encoding = locale.getlocale()
try:
filename = os.path.join('locale', '{}.mo'.format(code))
trans = gettext.GNUTranslations(open(filename, "rb"))
logging.debug('{} Loaded'.format(filename))
except IOError:
trans = gettext.NullTranslations()
trans.install()
|
# Copyright (C) 2016 Adam Schubert <adam.schubert@sg1-game.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import locale
import gettext
import os
import logging
class GetText(object):
"""
Handles language translations and Initializes global _() function
"""
def __init__(self):
"""Initialize GetText
"""
code, encoding = locale.getlocale()
try:
filename = os.path.join(os.path.abspath(__file__),
'locale', '{}.mo'.format(code))
trans = gettext.GNUTranslations(open(filename, "rb"))
logging.debug('{} Loaded'.format(filename))
except IOError:
trans = gettext.NullTranslations()
trans.install()
|
Fix error on loading other languages
|
Fix error on loading other languages
|
Python
|
mit
|
Salamek/cron-descriptor,Salamek/cron-descriptor
|
# Copyright (C) 2016 Adam Schubert <adam.schubert@sg1-game.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import locale
import gettext
import os
import logging
class GetText(object):
"""
Handles language translations and Initializes global _() function
"""
def __init__(self):
"""Initialize GetText
"""
code, encoding = locale.getlocale()
try:
filename = os.path.join('locale', '{}.mo'.format(code))
trans = gettext.GNUTranslations(open(filename, "rb"))
logging.debug('{} Loaded'.format(filename))
except IOError:
trans = gettext.NullTranslations()
trans.install()
Fix error on loading other languages
|
# Copyright (C) 2016 Adam Schubert <adam.schubert@sg1-game.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import locale
import gettext
import os
import logging
class GetText(object):
"""
Handles language translations and Initializes global _() function
"""
def __init__(self):
"""Initialize GetText
"""
code, encoding = locale.getlocale()
try:
filename = os.path.join(os.path.abspath(__file__),
'locale', '{}.mo'.format(code))
trans = gettext.GNUTranslations(open(filename, "rb"))
logging.debug('{} Loaded'.format(filename))
except IOError:
trans = gettext.NullTranslations()
trans.install()
|
<commit_before># Copyright (C) 2016 Adam Schubert <adam.schubert@sg1-game.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import locale
import gettext
import os
import logging
class GetText(object):
"""
Handles language translations and Initializes global _() function
"""
def __init__(self):
"""Initialize GetText
"""
code, encoding = locale.getlocale()
try:
filename = os.path.join('locale', '{}.mo'.format(code))
trans = gettext.GNUTranslations(open(filename, "rb"))
logging.debug('{} Loaded'.format(filename))
except IOError:
trans = gettext.NullTranslations()
trans.install()
<commit_msg>Fix error on loading other languages<commit_after>
|
# Copyright (C) 2016 Adam Schubert <adam.schubert@sg1-game.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import locale
import gettext
import os
import logging
class GetText(object):
"""
Handles language translations and Initializes global _() function
"""
def __init__(self):
"""Initialize GetText
"""
code, encoding = locale.getlocale()
try:
filename = os.path.join(os.path.abspath(__file__),
'locale', '{}.mo'.format(code))
trans = gettext.GNUTranslations(open(filename, "rb"))
logging.debug('{} Loaded'.format(filename))
except IOError:
trans = gettext.NullTranslations()
trans.install()
|
# Copyright (C) 2016 Adam Schubert <adam.schubert@sg1-game.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import locale
import gettext
import os
import logging
class GetText(object):
"""
Handles language translations and Initializes global _() function
"""
def __init__(self):
"""Initialize GetText
"""
code, encoding = locale.getlocale()
try:
filename = os.path.join('locale', '{}.mo'.format(code))
trans = gettext.GNUTranslations(open(filename, "rb"))
logging.debug('{} Loaded'.format(filename))
except IOError:
trans = gettext.NullTranslations()
trans.install()
Fix error on loading other languages# Copyright (C) 2016 Adam Schubert <adam.schubert@sg1-game.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import locale
import gettext
import os
import logging
class GetText(object):
"""
Handles language translations and Initializes global _() function
"""
def __init__(self):
"""Initialize GetText
"""
code, encoding = locale.getlocale()
try:
filename = os.path.join(os.path.abspath(__file__),
'locale', '{}.mo'.format(code))
trans = gettext.GNUTranslations(open(filename, "rb"))
logging.debug('{} Loaded'.format(filename))
except IOError:
trans = gettext.NullTranslations()
trans.install()
|
<commit_before># Copyright (C) 2016 Adam Schubert <adam.schubert@sg1-game.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import locale
import gettext
import os
import logging
class GetText(object):
"""
Handles language translations and Initializes global _() function
"""
def __init__(self):
"""Initialize GetText
"""
code, encoding = locale.getlocale()
try:
filename = os.path.join('locale', '{}.mo'.format(code))
trans = gettext.GNUTranslations(open(filename, "rb"))
logging.debug('{} Loaded'.format(filename))
except IOError:
trans = gettext.NullTranslations()
trans.install()
<commit_msg>Fix error on loading other languages<commit_after># Copyright (C) 2016 Adam Schubert <adam.schubert@sg1-game.net>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import locale
import gettext
import os
import logging
class GetText(object):
"""
Handles language translations and Initializes global _() function
"""
def __init__(self):
"""Initialize GetText
"""
code, encoding = locale.getlocale()
try:
filename = os.path.join(os.path.abspath(__file__),
'locale', '{}.mo'.format(code))
trans = gettext.GNUTranslations(open(filename, "rb"))
logging.debug('{} Loaded'.format(filename))
except IOError:
trans = gettext.NullTranslations()
trans.install()
|
4d2d940d672c6af14916cf4c4cecf2a5bb6de4ef
|
libqtile/layout/hybridlayoutdemo.py
|
libqtile/layout/hybridlayoutdemo.py
|
from base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
|
from base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
def request_rectangle(self, r, windows):
return (r, Rect())
|
Add request_rectange to HybridLayoutDemo - no clue why this never was here, but it stops it actually working
|
Add request_rectange to HybridLayoutDemo - no clue why this never was here, but it stops it actually working
|
Python
|
mit
|
dequis/qtile,farebord/qtile,de-vri-es/qtile,StephenBarnes/qtile,ramnes/qtile,ramnes/qtile,flacjacket/qtile,tych0/qtile,farebord/qtile,kseistrup/qtile,kopchik/qtile,aniruddhkanojia/qtile,kiniou/qtile,jdowner/qtile,kynikos/qtile,soulchainer/qtile,soulchainer/qtile,kynikos/qtile,andrewyoung1991/qtile,jdowner/qtile,apinsard/qtile,kiniou/qtile,rxcomm/qtile,de-vri-es/qtile,nxnfufunezn/qtile,rxcomm/qtile,aniruddhkanojia/qtile,encukou/qtile,zordsdavini/qtile,StephenBarnes/qtile,tych0/qtile,qtile/qtile,qtile/qtile,frostidaho/qtile,kopchik/qtile,cortesi/qtile,w1ndy/qtile,EndPointCorp/qtile,xplv/qtile,frostidaho/qtile,bavardage/qtile,EndPointCorp/qtile,kseistrup/qtile,flacjacket/qtile,cortesi/qtile,dequis/qtile,encukou/qtile,zordsdavini/qtile,w1ndy/qtile,xplv/qtile,himaaaatti/qtile,apinsard/qtile,andrewyoung1991/qtile,nxnfufunezn/qtile,himaaaatti/qtile
|
from base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
Add request_rectange to HybridLayoutDemo - no clue why this never was here, but it stops it actually working
|
from base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
def request_rectangle(self, r, windows):
return (r, Rect())
|
<commit_before>from base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
<commit_msg>Add request_rectange to HybridLayoutDemo - no clue why this never was here, but it stops it actually working<commit_after>
|
from base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
def request_rectangle(self, r, windows):
return (r, Rect())
|
from base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
Add request_rectange to HybridLayoutDemo - no clue why this never was here, but it stops it actually workingfrom base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
def request_rectangle(self, r, windows):
return (r, Rect())
|
<commit_before>from base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
<commit_msg>Add request_rectange to HybridLayoutDemo - no clue why this never was here, but it stops it actually working<commit_after>from base import SubLayout, Rect
from sublayouts import VerticalStack, Floating
from subtile import SubTile
class HybridLayoutDemo(SubLayout):
def _init_sublayouts(self):
class TopWindow(VerticalStack):
def filter_windows(self, windows):
windows = [w for w in windows if w.name == "htop"]
return ([windows[0],] if len(windows) else [])
def request_rectangle(self, r, windows):
if windows:
return r.split_horizontal(height=300)
else:
return (Rect(), r)
self.sublayouts.append(Floating(self.clientStack,
self.theme,
parent=self,
)
)
self.sublayouts.append(TopWindow(self.clientStack,
self.theme,
parent=self,
autohide=True,
)
)
self.sublayouts.append(SubTile(self.clientStack,
self.theme,
parent=self,
master_windows = 2,
)
)
def filter(self, client):
return True
def request_rectangle(self, r, windows):
return (r, Rect())
|
521ebf29990de4d997c90f4168ea300d75776cfc
|
components/utilities.py
|
components/utilities.py
|
"""Utilities for general operations."""
def IsNumeric(num_str):
try:
val = int(num_str)
except ValueError:
return False
else:
return True
|
"""Utilities for general operations."""
def IsNumeric(num_str):
try:
val = int(num_str)
except ValueError:
return False
else:
return True
def GuaranteeUnicode(obj):
if type(obj) == unicode:
return obj
elif type(obj) == str:
return unicode(obj, "utf-8")
else:
return unicode(str(obj), "utf-8")
|
Add GuranteeUnicode function which always returns a unicode object
|
Add GuranteeUnicode function which always returns a unicode object
|
Python
|
mit
|
lnishan/SQLGitHub
|
"""Utilities for general operations."""
def IsNumeric(num_str):
try:
val = int(num_str)
except ValueError:
return False
else:
return True
Add GuranteeUnicode function which always returns a unicode object
|
"""Utilities for general operations."""
def IsNumeric(num_str):
try:
val = int(num_str)
except ValueError:
return False
else:
return True
def GuaranteeUnicode(obj):
if type(obj) == unicode:
return obj
elif type(obj) == str:
return unicode(obj, "utf-8")
else:
return unicode(str(obj), "utf-8")
|
<commit_before>"""Utilities for general operations."""
def IsNumeric(num_str):
try:
val = int(num_str)
except ValueError:
return False
else:
return True
<commit_msg>Add GuranteeUnicode function which always returns a unicode object<commit_after>
|
"""Utilities for general operations."""
def IsNumeric(num_str):
try:
val = int(num_str)
except ValueError:
return False
else:
return True
def GuaranteeUnicode(obj):
if type(obj) == unicode:
return obj
elif type(obj) == str:
return unicode(obj, "utf-8")
else:
return unicode(str(obj), "utf-8")
|
"""Utilities for general operations."""
def IsNumeric(num_str):
try:
val = int(num_str)
except ValueError:
return False
else:
return True
Add GuranteeUnicode function which always returns a unicode object"""Utilities for general operations."""
def IsNumeric(num_str):
try:
val = int(num_str)
except ValueError:
return False
else:
return True
def GuaranteeUnicode(obj):
if type(obj) == unicode:
return obj
elif type(obj) == str:
return unicode(obj, "utf-8")
else:
return unicode(str(obj), "utf-8")
|
<commit_before>"""Utilities for general operations."""
def IsNumeric(num_str):
try:
val = int(num_str)
except ValueError:
return False
else:
return True
<commit_msg>Add GuranteeUnicode function which always returns a unicode object<commit_after>"""Utilities for general operations."""
def IsNumeric(num_str):
try:
val = int(num_str)
except ValueError:
return False
else:
return True
def GuaranteeUnicode(obj):
if type(obj) == unicode:
return obj
elif type(obj) == str:
return unicode(obj, "utf-8")
else:
return unicode(str(obj), "utf-8")
|
fce5152e8d902821c9b521402667ac87f9e9a17b
|
checks.d/system_core.py
|
checks.d/system_core.py
|
import psutil
from checks import AgentCheck
class SystemCore(AgentCheck):
def check(self, instance):
cpu_times = psutil.cpu_times(percpu=True)
for i, cpu in enumerate(cpu_times):
for key, value in cpu._asdict().iteritems():
self.rate(
"system.core.{0}".format(key),
100.0 * value,
tags=["core:{0}".format(i)]
)
|
import psutil
from checks import AgentCheck
class SystemCore(AgentCheck):
def check(self, instance):
cpu_times = psutil.cpu_times(percpu=True)
self.gauge("system.core.count", len(cpu_times))
for i, cpu in enumerate(cpu_times):
for key, value in cpu._asdict().iteritems():
self.rate(
"system.core.{0}".format(key),
100.0 * value,
tags=["core:{0}".format(i)]
)
|
Send the core count as a metric
|
Send the core count as a metric
|
Python
|
bsd-3-clause
|
truthbk/dd-agent,tebriel/dd-agent,indeedops/dd-agent,mderomph-coolblue/dd-agent,packetloop/dd-agent,PagerDuty/dd-agent,joelvanvelden/dd-agent,indeedops/dd-agent,jyogi/purvar-agent,manolama/dd-agent,tebriel/dd-agent,packetloop/dd-agent,amalakar/dd-agent,a20012251/dd-agent,jraede/dd-agent,AniruddhaSAtre/dd-agent,brettlangdon/dd-agent,polynomial/dd-agent,gphat/dd-agent,urosgruber/dd-agent,c960657/dd-agent,darron/dd-agent,jvassev/dd-agent,Shopify/dd-agent,jyogi/purvar-agent,yuecong/dd-agent,darron/dd-agent,gphat/dd-agent,brettlangdon/dd-agent,eeroniemi/dd-agent,zendesk/dd-agent,JohnLZeller/dd-agent,manolama/dd-agent,citrusleaf/dd-agent,Wattpad/dd-agent,benmccann/dd-agent,huhongbo/dd-agent,c960657/dd-agent,indeedops/dd-agent,lookout/dd-agent,Mashape/dd-agent,darron/dd-agent,polynomial/dd-agent,amalakar/dd-agent,guruxu/dd-agent,benmccann/dd-agent,pmav99/praktoras,zendesk/dd-agent,eeroniemi/dd-agent,jraede/dd-agent,yuecong/dd-agent,pmav99/praktoras,cberry777/dd-agent,jvassev/dd-agent,manolama/dd-agent,AntoCard/powerdns-recursor_check,tebriel/dd-agent,jamesandariese/dd-agent,ess/dd-agent,relateiq/dd-agent,joelvanvelden/dd-agent,joelvanvelden/dd-agent,yuecong/dd-agent,brettlangdon/dd-agent,darron/dd-agent,brettlangdon/dd-agent,jamesandariese/dd-agent,guruxu/dd-agent,Mashape/dd-agent,remh/dd-agent,jvassev/dd-agent,huhongbo/dd-agent,relateiq/dd-agent,PagerDuty/dd-agent,guruxu/dd-agent,jshum/dd-agent,takus/dd-agent,ess/dd-agent,urosgruber/dd-agent,huhongbo/dd-agent,AntoCard/powerdns-recursor_check,jyogi/purvar-agent,truthbk/dd-agent,a20012251/dd-agent,Wattpad/dd-agent,Wattpad/dd-agent,Mashape/dd-agent,takus/dd-agent,jamesandariese/dd-agent,a20012251/dd-agent,PagerDuty/dd-agent,truthbk/dd-agent,urosgruber/dd-agent,AniruddhaSAtre/dd-agent,AniruddhaSAtre/dd-agent,jshum/dd-agent,cberry777/dd-agent,AniruddhaSAtre/dd-agent,a20012251/dd-agent,Wattpad/dd-agent,indeedops/dd-agent,citrusleaf/dd-agent,truthbk/dd-agent,jraede/dd-agent,brettlangdon/dd-agent,amalakar/dd-agent,relateiq/dd-agent,GabrielNicolasAvellaneda/dd-agent,Wattpad/dd-agent,gphat/dd-agent,yuecong/dd-agent,cberry777/dd-agent,eeroniemi/dd-agent,jraede/dd-agent,mderomph-coolblue/dd-agent,c960657/dd-agent,lookout/dd-agent,Shopify/dd-agent,tebriel/dd-agent,AntoCard/powerdns-recursor_check,AniruddhaSAtre/dd-agent,huhongbo/dd-agent,oneandoneis2/dd-agent,jyogi/purvar-agent,tebriel/dd-agent,JohnLZeller/dd-agent,takus/dd-agent,yuecong/dd-agent,Shopify/dd-agent,gphat/dd-agent,indeedops/dd-agent,relateiq/dd-agent,takus/dd-agent,joelvanvelden/dd-agent,joelvanvelden/dd-agent,GabrielNicolasAvellaneda/dd-agent,remh/dd-agent,zendesk/dd-agent,polynomial/dd-agent,cberry777/dd-agent,guruxu/dd-agent,packetloop/dd-agent,c960657/dd-agent,citrusleaf/dd-agent,pmav99/praktoras,manolama/dd-agent,amalakar/dd-agent,lookout/dd-agent,darron/dd-agent,lookout/dd-agent,c960657/dd-agent,GabrielNicolasAvellaneda/dd-agent,polynomial/dd-agent,ess/dd-agent,AntoCard/powerdns-recursor_check,jyogi/purvar-agent,JohnLZeller/dd-agent,manolama/dd-agent,packetloop/dd-agent,PagerDuty/dd-agent,benmccann/dd-agent,mderomph-coolblue/dd-agent,JohnLZeller/dd-agent,cberry777/dd-agent,GabrielNicolasAvellaneda/dd-agent,jvassev/dd-agent,PagerDuty/dd-agent,citrusleaf/dd-agent,eeroniemi/dd-agent,eeroniemi/dd-agent,urosgruber/dd-agent,guruxu/dd-agent,a20012251/dd-agent,lookout/dd-agent,AntoCard/powerdns-recursor_check,jraede/dd-agent,remh/dd-agent,jshum/dd-agent,pmav99/praktoras,relateiq/dd-agent,remh/dd-agent,pfmooney/dd-agent,oneandoneis2/dd-agent,zendesk/dd-agent,Mashape/dd-agent,oneandoneis2/dd-agent,pmav99/praktoras,benmccann/dd-agent,jshum/dd-agent,mderomph-coolblue/dd-agent,ess/dd-agent,oneandoneis2/dd-agent,mderomph-coolblue/dd-agent,remh/dd-agent,Shopify/dd-agent,ess/dd-agent,jshum/dd-agent,Mashape/dd-agent,packetloop/dd-agent,jvassev/dd-agent,gphat/dd-agent,pfmooney/dd-agent,huhongbo/dd-agent,oneandoneis2/dd-agent,zendesk/dd-agent,pfmooney/dd-agent,takus/dd-agent,pfmooney/dd-agent,amalakar/dd-agent,pfmooney/dd-agent,JohnLZeller/dd-agent,polynomial/dd-agent,truthbk/dd-agent,urosgruber/dd-agent,citrusleaf/dd-agent,Shopify/dd-agent,GabrielNicolasAvellaneda/dd-agent,benmccann/dd-agent,jamesandariese/dd-agent,jamesandariese/dd-agent
|
import psutil
from checks import AgentCheck
class SystemCore(AgentCheck):
def check(self, instance):
cpu_times = psutil.cpu_times(percpu=True)
for i, cpu in enumerate(cpu_times):
for key, value in cpu._asdict().iteritems():
self.rate(
"system.core.{0}".format(key),
100.0 * value,
tags=["core:{0}".format(i)]
)
Send the core count as a metric
|
import psutil
from checks import AgentCheck
class SystemCore(AgentCheck):
def check(self, instance):
cpu_times = psutil.cpu_times(percpu=True)
self.gauge("system.core.count", len(cpu_times))
for i, cpu in enumerate(cpu_times):
for key, value in cpu._asdict().iteritems():
self.rate(
"system.core.{0}".format(key),
100.0 * value,
tags=["core:{0}".format(i)]
)
|
<commit_before>import psutil
from checks import AgentCheck
class SystemCore(AgentCheck):
def check(self, instance):
cpu_times = psutil.cpu_times(percpu=True)
for i, cpu in enumerate(cpu_times):
for key, value in cpu._asdict().iteritems():
self.rate(
"system.core.{0}".format(key),
100.0 * value,
tags=["core:{0}".format(i)]
)
<commit_msg>Send the core count as a metric<commit_after>
|
import psutil
from checks import AgentCheck
class SystemCore(AgentCheck):
def check(self, instance):
cpu_times = psutil.cpu_times(percpu=True)
self.gauge("system.core.count", len(cpu_times))
for i, cpu in enumerate(cpu_times):
for key, value in cpu._asdict().iteritems():
self.rate(
"system.core.{0}".format(key),
100.0 * value,
tags=["core:{0}".format(i)]
)
|
import psutil
from checks import AgentCheck
class SystemCore(AgentCheck):
def check(self, instance):
cpu_times = psutil.cpu_times(percpu=True)
for i, cpu in enumerate(cpu_times):
for key, value in cpu._asdict().iteritems():
self.rate(
"system.core.{0}".format(key),
100.0 * value,
tags=["core:{0}".format(i)]
)
Send the core count as a metricimport psutil
from checks import AgentCheck
class SystemCore(AgentCheck):
def check(self, instance):
cpu_times = psutil.cpu_times(percpu=True)
self.gauge("system.core.count", len(cpu_times))
for i, cpu in enumerate(cpu_times):
for key, value in cpu._asdict().iteritems():
self.rate(
"system.core.{0}".format(key),
100.0 * value,
tags=["core:{0}".format(i)]
)
|
<commit_before>import psutil
from checks import AgentCheck
class SystemCore(AgentCheck):
def check(self, instance):
cpu_times = psutil.cpu_times(percpu=True)
for i, cpu in enumerate(cpu_times):
for key, value in cpu._asdict().iteritems():
self.rate(
"system.core.{0}".format(key),
100.0 * value,
tags=["core:{0}".format(i)]
)
<commit_msg>Send the core count as a metric<commit_after>import psutil
from checks import AgentCheck
class SystemCore(AgentCheck):
def check(self, instance):
cpu_times = psutil.cpu_times(percpu=True)
self.gauge("system.core.count", len(cpu_times))
for i, cpu in enumerate(cpu_times):
for key, value in cpu._asdict().iteritems():
self.rate(
"system.core.{0}".format(key),
100.0 * value,
tags=["core:{0}".format(i)]
)
|
f7d792d684e6c74f4a3e508bc29bbe2bacc458f0
|
cms/templatetags/cms.py
|
cms/templatetags/cms.py
|
from django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
date_templ = '<time datetime="{timestamp}">{month_day}<span class="say-no-more">-{year}</span></time>'
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
month_day=format(date, 'm-d'),
year=format(date, 'Y')
))
|
from django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
date_templ = '<time datetime="{timestamp}"><span class="say-no-more">{year}-</span>{month}-{day}</time>'
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
month=format(date, 'm'),
day=format(date, 'd'),
year=format(date, 'Y'),
))
|
Fix order of date parts
|
Fix order of date parts
|
Python
|
apache-2.0
|
willingc/pythondotorg,lebronhkh/pythondotorg,malemburg/pythondotorg,SujaySKumar/pythondotorg,python/pythondotorg,python/pythondotorg,willingc/pythondotorg,demvher/pythondotorg,SujaySKumar/pythondotorg,lsk112233/Clone-test-repo,Mariatta/pythondotorg,ahua/pythondotorg,malemburg/pythondotorg,lepture/pythondotorg,Mariatta/pythondotorg,lebronhkh/pythondotorg,python/pythondotorg,demvher/pythondotorg,SujaySKumar/pythondotorg,manhhomienbienthuy/pythondotorg,lsk112233/Clone-test-repo,malemburg/pythondotorg,malemburg/pythondotorg,manhhomienbienthuy/pythondotorg,demvher/pythondotorg,ahua/pythondotorg,manhhomienbienthuy/pythondotorg,demvher/pythondotorg,lepture/pythondotorg,berkerpeksag/pythondotorg,Mariatta/pythondotorg,SujaySKumar/pythondotorg,lepture/pythondotorg,berkerpeksag/pythondotorg,fe11x/pythondotorg,lepture/pythondotorg,proevo/pythondotorg,berkerpeksag/pythondotorg,fe11x/pythondotorg,proevo/pythondotorg,fe11x/pythondotorg,berkerpeksag/pythondotorg,berkerpeksag/pythondotorg,python/pythondotorg,SujaySKumar/pythondotorg,lepture/pythondotorg,proevo/pythondotorg,manhhomienbienthuy/pythondotorg,willingc/pythondotorg,Mariatta/pythondotorg,lebronhkh/pythondotorg,lsk112233/Clone-test-repo,fe11x/pythondotorg,ahua/pythondotorg,ahua/pythondotorg,ahua/pythondotorg,demvher/pythondotorg,lebronhkh/pythondotorg,lsk112233/Clone-test-repo,lebronhkh/pythondotorg,lsk112233/Clone-test-repo,fe11x/pythondotorg,willingc/pythondotorg,proevo/pythondotorg
|
from django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
date_templ = '<time datetime="{timestamp}">{month_day}<span class="say-no-more">-{year}</span></time>'
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
month_day=format(date, 'm-d'),
year=format(date, 'Y')
))
Fix order of date parts
|
from django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
date_templ = '<time datetime="{timestamp}"><span class="say-no-more">{year}-</span>{month}-{day}</time>'
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
month=format(date, 'm'),
day=format(date, 'd'),
year=format(date, 'Y'),
))
|
<commit_before>from django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
date_templ = '<time datetime="{timestamp}">{month_day}<span class="say-no-more">-{year}</span></time>'
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
month_day=format(date, 'm-d'),
year=format(date, 'Y')
))
<commit_msg>Fix order of date parts<commit_after>
|
from django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
date_templ = '<time datetime="{timestamp}"><span class="say-no-more">{year}-</span>{month}-{day}</time>'
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
month=format(date, 'm'),
day=format(date, 'd'),
year=format(date, 'Y'),
))
|
from django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
date_templ = '<time datetime="{timestamp}">{month_day}<span class="say-no-more">-{year}</span></time>'
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
month_day=format(date, 'm-d'),
year=format(date, 'Y')
))
Fix order of date partsfrom django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
date_templ = '<time datetime="{timestamp}"><span class="say-no-more">{year}-</span>{month}-{day}</time>'
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
month=format(date, 'm'),
day=format(date, 'd'),
year=format(date, 'Y'),
))
|
<commit_before>from django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
date_templ = '<time datetime="{timestamp}">{month_day}<span class="say-no-more">-{year}</span></time>'
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
month_day=format(date, 'm-d'),
year=format(date, 'Y')
))
<commit_msg>Fix order of date parts<commit_after>from django import template
from django.utils.dateformat import format
from django.utils.safestring import mark_safe
register = template.Library()
@register.filter(is_safe=True)
def iso_time_tag(date):
""" Returns an ISO date, with the year tagged in a say-no-more span.
This allows the date representation to shrink to just MM-DD. """
date_templ = '<time datetime="{timestamp}"><span class="say-no-more">{year}-</span>{month}-{day}</time>'
return mark_safe(date_templ.format(
timestamp=format(date, 'c'),
month=format(date, 'm'),
day=format(date, 'd'),
year=format(date, 'Y'),
))
|
5e2281a9d8f7585cb7c35d6fed2d4db5236a3ef2
|
cmis_web/__openerp__.py
|
cmis_web/__openerp__.py
|
# -*- coding: utf-8 -*-
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': "CMIS Web interface",
'summary': """
Embeddable CMIS Web components""",
'author': 'ACSONE SA/NV',
'website': "http://alfodoo.org",
'category': 'Uncategorized',
'version': '9.0.1.0.0',
'license': 'AGPL-3',
'depends': [
'web',
'cmis'
],
'qweb': [
"static/src/xml/*.xml",
],
'data': [
'views/cmis_web.xml'
],
}
|
# -*- coding: utf-8 -*-
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': "CMIS Web interface",
'summary': """
Embeddable CMIS Web components""",
'author': 'ACSONE SA/NV',
'website': "http://alfodoo.org",
'category': 'Uncategorized',
'version': '9.0.1.0.0',
'license': 'AGPL-3',
'depends': [
'web',
'cmis_field'
],
'qweb': [
"static/src/xml/*.xml",
],
'data': [
'views/cmis_web.xml'
],
}
|
Fix dependency cmis_web -> cmis_field
|
Fix dependency cmis_web -> cmis_field
|
Python
|
agpl-3.0
|
acsone/alfodoo,acsone/alfodoo,acsone/alfodoo
|
# -*- coding: utf-8 -*-
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': "CMIS Web interface",
'summary': """
Embeddable CMIS Web components""",
'author': 'ACSONE SA/NV',
'website': "http://alfodoo.org",
'category': 'Uncategorized',
'version': '9.0.1.0.0',
'license': 'AGPL-3',
'depends': [
'web',
'cmis'
],
'qweb': [
"static/src/xml/*.xml",
],
'data': [
'views/cmis_web.xml'
],
}
Fix dependency cmis_web -> cmis_field
|
# -*- coding: utf-8 -*-
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': "CMIS Web interface",
'summary': """
Embeddable CMIS Web components""",
'author': 'ACSONE SA/NV',
'website': "http://alfodoo.org",
'category': 'Uncategorized',
'version': '9.0.1.0.0',
'license': 'AGPL-3',
'depends': [
'web',
'cmis_field'
],
'qweb': [
"static/src/xml/*.xml",
],
'data': [
'views/cmis_web.xml'
],
}
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': "CMIS Web interface",
'summary': """
Embeddable CMIS Web components""",
'author': 'ACSONE SA/NV',
'website': "http://alfodoo.org",
'category': 'Uncategorized',
'version': '9.0.1.0.0',
'license': 'AGPL-3',
'depends': [
'web',
'cmis'
],
'qweb': [
"static/src/xml/*.xml",
],
'data': [
'views/cmis_web.xml'
],
}
<commit_msg>Fix dependency cmis_web -> cmis_field<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': "CMIS Web interface",
'summary': """
Embeddable CMIS Web components""",
'author': 'ACSONE SA/NV',
'website': "http://alfodoo.org",
'category': 'Uncategorized',
'version': '9.0.1.0.0',
'license': 'AGPL-3',
'depends': [
'web',
'cmis_field'
],
'qweb': [
"static/src/xml/*.xml",
],
'data': [
'views/cmis_web.xml'
],
}
|
# -*- coding: utf-8 -*-
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': "CMIS Web interface",
'summary': """
Embeddable CMIS Web components""",
'author': 'ACSONE SA/NV',
'website': "http://alfodoo.org",
'category': 'Uncategorized',
'version': '9.0.1.0.0',
'license': 'AGPL-3',
'depends': [
'web',
'cmis'
],
'qweb': [
"static/src/xml/*.xml",
],
'data': [
'views/cmis_web.xml'
],
}
Fix dependency cmis_web -> cmis_field# -*- coding: utf-8 -*-
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': "CMIS Web interface",
'summary': """
Embeddable CMIS Web components""",
'author': 'ACSONE SA/NV',
'website': "http://alfodoo.org",
'category': 'Uncategorized',
'version': '9.0.1.0.0',
'license': 'AGPL-3',
'depends': [
'web',
'cmis_field'
],
'qweb': [
"static/src/xml/*.xml",
],
'data': [
'views/cmis_web.xml'
],
}
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': "CMIS Web interface",
'summary': """
Embeddable CMIS Web components""",
'author': 'ACSONE SA/NV',
'website': "http://alfodoo.org",
'category': 'Uncategorized',
'version': '9.0.1.0.0',
'license': 'AGPL-3',
'depends': [
'web',
'cmis'
],
'qweb': [
"static/src/xml/*.xml",
],
'data': [
'views/cmis_web.xml'
],
}
<commit_msg>Fix dependency cmis_web -> cmis_field<commit_after># -*- coding: utf-8 -*-
# Copyright 2016 ACSONE SA/NV (<http://acsone.eu>)
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl.html).
{
'name': "CMIS Web interface",
'summary': """
Embeddable CMIS Web components""",
'author': 'ACSONE SA/NV',
'website': "http://alfodoo.org",
'category': 'Uncategorized',
'version': '9.0.1.0.0',
'license': 'AGPL-3',
'depends': [
'web',
'cmis_field'
],
'qweb': [
"static/src/xml/*.xml",
],
'data': [
'views/cmis_web.xml'
],
}
|
0b37e71972d067bd06bfab1d7a0c0f47badefb17
|
scout/markers/models.py
|
scout/markers/models.py
|
from django.db import models
from model_utils import Choices
MARKER_COLOURS = Choices(
('blue', 'Blue'),
('red', 'Red')
)
class Marker(models.Model):
name = models.CharField(max_length=255)
address = models.TextField(blank=True)
lat = models.CharField(max_length=255, blank=True)
long = models.CharField(max_length=255, blank=True)
marker_colour = models.CharField(max_length=10, choices=MARKER_COLOURS, default=MARKER_COLOURS.blue)
def to_dict(self):
return {
'name': self.name,
'lat': self.lat,
'long': self.long,
'marker_colour': self.marker_colour
}
|
from django.db import models
from model_utils import Choices
MARKER_COLOURS = Choices(
('blue', 'Blue'),
('red', 'Red')
)
class Marker(models.Model):
name = models.CharField(max_length=255)
address = models.TextField(blank=True)
lat = models.CharField(max_length=255, blank=True)
long = models.CharField(max_length=255, blank=True)
marker_colour = models.CharField(max_length=10, choices=MARKER_COLOURS, default=MARKER_COLOURS.blue)
def to_dict(self):
return {
'name': self.name,
'lat': self.lat,
'long': self.long,
'address': self.address,
'marker_colour': self.marker_colour
}
|
Return the address from the marker.
|
Return the address from the marker.
|
Python
|
mit
|
meizon/scout,meizon/scout,meizon/scout,meizon/scout
|
from django.db import models
from model_utils import Choices
MARKER_COLOURS = Choices(
('blue', 'Blue'),
('red', 'Red')
)
class Marker(models.Model):
name = models.CharField(max_length=255)
address = models.TextField(blank=True)
lat = models.CharField(max_length=255, blank=True)
long = models.CharField(max_length=255, blank=True)
marker_colour = models.CharField(max_length=10, choices=MARKER_COLOURS, default=MARKER_COLOURS.blue)
def to_dict(self):
return {
'name': self.name,
'lat': self.lat,
'long': self.long,
'marker_colour': self.marker_colour
}Return the address from the marker.
|
from django.db import models
from model_utils import Choices
MARKER_COLOURS = Choices(
('blue', 'Blue'),
('red', 'Red')
)
class Marker(models.Model):
name = models.CharField(max_length=255)
address = models.TextField(blank=True)
lat = models.CharField(max_length=255, blank=True)
long = models.CharField(max_length=255, blank=True)
marker_colour = models.CharField(max_length=10, choices=MARKER_COLOURS, default=MARKER_COLOURS.blue)
def to_dict(self):
return {
'name': self.name,
'lat': self.lat,
'long': self.long,
'address': self.address,
'marker_colour': self.marker_colour
}
|
<commit_before>from django.db import models
from model_utils import Choices
MARKER_COLOURS = Choices(
('blue', 'Blue'),
('red', 'Red')
)
class Marker(models.Model):
name = models.CharField(max_length=255)
address = models.TextField(blank=True)
lat = models.CharField(max_length=255, blank=True)
long = models.CharField(max_length=255, blank=True)
marker_colour = models.CharField(max_length=10, choices=MARKER_COLOURS, default=MARKER_COLOURS.blue)
def to_dict(self):
return {
'name': self.name,
'lat': self.lat,
'long': self.long,
'marker_colour': self.marker_colour
}<commit_msg>Return the address from the marker.<commit_after>
|
from django.db import models
from model_utils import Choices
MARKER_COLOURS = Choices(
('blue', 'Blue'),
('red', 'Red')
)
class Marker(models.Model):
name = models.CharField(max_length=255)
address = models.TextField(blank=True)
lat = models.CharField(max_length=255, blank=True)
long = models.CharField(max_length=255, blank=True)
marker_colour = models.CharField(max_length=10, choices=MARKER_COLOURS, default=MARKER_COLOURS.blue)
def to_dict(self):
return {
'name': self.name,
'lat': self.lat,
'long': self.long,
'address': self.address,
'marker_colour': self.marker_colour
}
|
from django.db import models
from model_utils import Choices
MARKER_COLOURS = Choices(
('blue', 'Blue'),
('red', 'Red')
)
class Marker(models.Model):
name = models.CharField(max_length=255)
address = models.TextField(blank=True)
lat = models.CharField(max_length=255, blank=True)
long = models.CharField(max_length=255, blank=True)
marker_colour = models.CharField(max_length=10, choices=MARKER_COLOURS, default=MARKER_COLOURS.blue)
def to_dict(self):
return {
'name': self.name,
'lat': self.lat,
'long': self.long,
'marker_colour': self.marker_colour
}Return the address from the marker.from django.db import models
from model_utils import Choices
MARKER_COLOURS = Choices(
('blue', 'Blue'),
('red', 'Red')
)
class Marker(models.Model):
name = models.CharField(max_length=255)
address = models.TextField(blank=True)
lat = models.CharField(max_length=255, blank=True)
long = models.CharField(max_length=255, blank=True)
marker_colour = models.CharField(max_length=10, choices=MARKER_COLOURS, default=MARKER_COLOURS.blue)
def to_dict(self):
return {
'name': self.name,
'lat': self.lat,
'long': self.long,
'address': self.address,
'marker_colour': self.marker_colour
}
|
<commit_before>from django.db import models
from model_utils import Choices
MARKER_COLOURS = Choices(
('blue', 'Blue'),
('red', 'Red')
)
class Marker(models.Model):
name = models.CharField(max_length=255)
address = models.TextField(blank=True)
lat = models.CharField(max_length=255, blank=True)
long = models.CharField(max_length=255, blank=True)
marker_colour = models.CharField(max_length=10, choices=MARKER_COLOURS, default=MARKER_COLOURS.blue)
def to_dict(self):
return {
'name': self.name,
'lat': self.lat,
'long': self.long,
'marker_colour': self.marker_colour
}<commit_msg>Return the address from the marker.<commit_after>from django.db import models
from model_utils import Choices
MARKER_COLOURS = Choices(
('blue', 'Blue'),
('red', 'Red')
)
class Marker(models.Model):
name = models.CharField(max_length=255)
address = models.TextField(blank=True)
lat = models.CharField(max_length=255, blank=True)
long = models.CharField(max_length=255, blank=True)
marker_colour = models.CharField(max_length=10, choices=MARKER_COLOURS, default=MARKER_COLOURS.blue)
def to_dict(self):
return {
'name': self.name,
'lat': self.lat,
'long': self.long,
'address': self.address,
'marker_colour': self.marker_colour
}
|
ce77cbeb6fcb71b49c669188b38e43fb75e4d729
|
pyinfra_cli/__main__.py
|
pyinfra_cli/__main__.py
|
# pyinfra
# File: pyinfra_cli/__main__.py
# Desc: bootstrap stuff for the pyinfra CLI and provide it's entry point
import signal
import sys
import click
from colorama import init as colorama_init
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Init colorama for Windows ANSI color support
colorama_init()
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Handle ctrl+c
def _signal_handler(signum, frame):
print('Exiting upon user request!')
sys.exit(0)
signal.signal(signal.SIGINT, _signal_handler) # noqa
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
|
# pyinfra
# File: pyinfra_cli/__main__.py
# Desc: bootstrap stuff for the pyinfra CLI and provide it's entry point
import signal
import sys
import click
import gevent
from colorama import init as colorama_init
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Init colorama for Windows ANSI color support
colorama_init()
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
gevent.signal(signal.SIGINT, gevent.kill) # kill any greenlets on ctrl+c
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
|
Kill all running greenlets when ctrl+c (works a charm in Python 3, not so much in 2).
|
Kill all running greenlets when ctrl+c (works a charm in Python 3, not so much in 2).
|
Python
|
mit
|
Fizzadar/pyinfra,Fizzadar/pyinfra
|
# pyinfra
# File: pyinfra_cli/__main__.py
# Desc: bootstrap stuff for the pyinfra CLI and provide it's entry point
import signal
import sys
import click
from colorama import init as colorama_init
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Init colorama for Windows ANSI color support
colorama_init()
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Handle ctrl+c
def _signal_handler(signum, frame):
print('Exiting upon user request!')
sys.exit(0)
signal.signal(signal.SIGINT, _signal_handler) # noqa
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
Kill all running greenlets when ctrl+c (works a charm in Python 3, not so much in 2).
|
# pyinfra
# File: pyinfra_cli/__main__.py
# Desc: bootstrap stuff for the pyinfra CLI and provide it's entry point
import signal
import sys
import click
import gevent
from colorama import init as colorama_init
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Init colorama for Windows ANSI color support
colorama_init()
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
gevent.signal(signal.SIGINT, gevent.kill) # kill any greenlets on ctrl+c
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
|
<commit_before># pyinfra
# File: pyinfra_cli/__main__.py
# Desc: bootstrap stuff for the pyinfra CLI and provide it's entry point
import signal
import sys
import click
from colorama import init as colorama_init
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Init colorama for Windows ANSI color support
colorama_init()
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Handle ctrl+c
def _signal_handler(signum, frame):
print('Exiting upon user request!')
sys.exit(0)
signal.signal(signal.SIGINT, _signal_handler) # noqa
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
<commit_msg>Kill all running greenlets when ctrl+c (works a charm in Python 3, not so much in 2).<commit_after>
|
# pyinfra
# File: pyinfra_cli/__main__.py
# Desc: bootstrap stuff for the pyinfra CLI and provide it's entry point
import signal
import sys
import click
import gevent
from colorama import init as colorama_init
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Init colorama for Windows ANSI color support
colorama_init()
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
gevent.signal(signal.SIGINT, gevent.kill) # kill any greenlets on ctrl+c
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
|
# pyinfra
# File: pyinfra_cli/__main__.py
# Desc: bootstrap stuff for the pyinfra CLI and provide it's entry point
import signal
import sys
import click
from colorama import init as colorama_init
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Init colorama for Windows ANSI color support
colorama_init()
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Handle ctrl+c
def _signal_handler(signum, frame):
print('Exiting upon user request!')
sys.exit(0)
signal.signal(signal.SIGINT, _signal_handler) # noqa
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
Kill all running greenlets when ctrl+c (works a charm in Python 3, not so much in 2).# pyinfra
# File: pyinfra_cli/__main__.py
# Desc: bootstrap stuff for the pyinfra CLI and provide it's entry point
import signal
import sys
import click
import gevent
from colorama import init as colorama_init
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Init colorama for Windows ANSI color support
colorama_init()
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
gevent.signal(signal.SIGINT, gevent.kill) # kill any greenlets on ctrl+c
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
|
<commit_before># pyinfra
# File: pyinfra_cli/__main__.py
# Desc: bootstrap stuff for the pyinfra CLI and provide it's entry point
import signal
import sys
import click
from colorama import init as colorama_init
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Init colorama for Windows ANSI color support
colorama_init()
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
# Handle ctrl+c
def _signal_handler(signum, frame):
print('Exiting upon user request!')
sys.exit(0)
signal.signal(signal.SIGINT, _signal_handler) # noqa
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
<commit_msg>Kill all running greenlets when ctrl+c (works a charm in Python 3, not so much in 2).<commit_after># pyinfra
# File: pyinfra_cli/__main__.py
# Desc: bootstrap stuff for the pyinfra CLI and provide it's entry point
import signal
import sys
import click
import gevent
from colorama import init as colorama_init
from .legacy import run_main_with_legacy_arguments
from .main import cli, main
# Init colorama for Windows ANSI color support
colorama_init()
# Don't write out deploy.pyc/config.pyc etc
sys.dont_write_bytecode = True
# Make sure imported files (deploy.py/etc) behave as if imported from the cwd
sys.path.append('.')
# Shut it click
click.disable_unicode_literals_warning = True # noqa
def _handle_interrupt(signum, frame):
click.echo('Exiting upon user request!')
sys.exit(0)
gevent.signal(signal.SIGINT, gevent.kill) # kill any greenlets on ctrl+c
signal.signal(signal.SIGINT, _handle_interrupt) # print the message and exit main
def execute_pyinfra():
# Legacy support for pyinfra <0.4 using docopt
if '-i' in sys.argv:
run_main_with_legacy_arguments(main)
else:
cli()
|
299aa432b3183e9db418f0735511330763c8141b
|
botbot/fileinfo.py
|
botbot/fileinfo.py
|
"""File information"""
import os
import time
import pwd
import stat
import hashlib
from .config import CONFIG
def get_file_hash(path):
"""Get md5 hash of a file"""
def reader(fo):
"""Generator which feeds bytes to the md5 hasher"""
while True:
b = fo.read(128)
if len(b) > 0:
yield b
else:
raise StopIteration()
hasher = hashlib.new('md5')
if os.path.isdir(path):
return
else:
try:
with open(path, mode='br') as infile:
for b in reader(infile):
hasher.update(b)
digest = hasher.hexdigest()
return digest
except PermissionError:
return ''
def FileInfo(fd, link=False, important=False):
"""Hold information about a file"""
stats = os.stat(fd, follow_symlinks=link)
return {
'path': os.path.abspath(fd),
'mode': stats.st_mode,
'uid': stats.st_uid,
'username': pwd.getpwuid(stats.st_uid).pw_name,
'size': stats.st_size,
'lastmod': int(stats.st_ctime),
'lastcheck': 0,
'isfile': os.path.isfile(fd),
'isdir': not os.path.isfile(fd),
'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'),
'md5sum': get_file_hash(fd),
'problems': set()
}
|
"""File information"""
import os
import pwd
import hashlib
from .config import CONFIG
def reader(fo):
"""Generator which feeds bytes to the md5 hasher"""
while True:
b = fo.read(128)
if len(b) > 0:
yield b
else:
raise StopIteration()
def get_file_hash(path):
"""Get md5 hash of a file"""
hasher = hashlib.new('md5')
if os.path.isdir(path):
return
else:
try:
with open(path, mode='br') as infile:
for b in reader(infile):
hasher.update(b)
digest = hasher.hexdigest()
return digest
except PermissionError:
return ''
def FileInfo(fd, link=False, important=False):
"""Hold information about a file"""
stats = os.stat(fd, follow_symlinks=link)
return {
'path': os.path.abspath(fd),
'mode': stats.st_mode,
'uid': stats.st_uid,
'username': pwd.getpwuid(stats.st_uid).pw_name,
'size': stats.st_size,
'lastmod': int(stats.st_ctime),
'lastcheck': 0,
'isfile': os.path.isfile(fd),
'isdir': not os.path.isfile(fd),
'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'),
'md5sum': get_file_hash(fd),
'problems': set()
}
|
Move reader() generator out of file hasher
|
Move reader() generator out of file hasher
|
Python
|
mit
|
jackstanek/BotBot,jackstanek/BotBot
|
"""File information"""
import os
import time
import pwd
import stat
import hashlib
from .config import CONFIG
def get_file_hash(path):
"""Get md5 hash of a file"""
def reader(fo):
"""Generator which feeds bytes to the md5 hasher"""
while True:
b = fo.read(128)
if len(b) > 0:
yield b
else:
raise StopIteration()
hasher = hashlib.new('md5')
if os.path.isdir(path):
return
else:
try:
with open(path, mode='br') as infile:
for b in reader(infile):
hasher.update(b)
digest = hasher.hexdigest()
return digest
except PermissionError:
return ''
def FileInfo(fd, link=False, important=False):
"""Hold information about a file"""
stats = os.stat(fd, follow_symlinks=link)
return {
'path': os.path.abspath(fd),
'mode': stats.st_mode,
'uid': stats.st_uid,
'username': pwd.getpwuid(stats.st_uid).pw_name,
'size': stats.st_size,
'lastmod': int(stats.st_ctime),
'lastcheck': 0,
'isfile': os.path.isfile(fd),
'isdir': not os.path.isfile(fd),
'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'),
'md5sum': get_file_hash(fd),
'problems': set()
}
Move reader() generator out of file hasher
|
"""File information"""
import os
import pwd
import hashlib
from .config import CONFIG
def reader(fo):
"""Generator which feeds bytes to the md5 hasher"""
while True:
b = fo.read(128)
if len(b) > 0:
yield b
else:
raise StopIteration()
def get_file_hash(path):
"""Get md5 hash of a file"""
hasher = hashlib.new('md5')
if os.path.isdir(path):
return
else:
try:
with open(path, mode='br') as infile:
for b in reader(infile):
hasher.update(b)
digest = hasher.hexdigest()
return digest
except PermissionError:
return ''
def FileInfo(fd, link=False, important=False):
"""Hold information about a file"""
stats = os.stat(fd, follow_symlinks=link)
return {
'path': os.path.abspath(fd),
'mode': stats.st_mode,
'uid': stats.st_uid,
'username': pwd.getpwuid(stats.st_uid).pw_name,
'size': stats.st_size,
'lastmod': int(stats.st_ctime),
'lastcheck': 0,
'isfile': os.path.isfile(fd),
'isdir': not os.path.isfile(fd),
'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'),
'md5sum': get_file_hash(fd),
'problems': set()
}
|
<commit_before>"""File information"""
import os
import time
import pwd
import stat
import hashlib
from .config import CONFIG
def get_file_hash(path):
"""Get md5 hash of a file"""
def reader(fo):
"""Generator which feeds bytes to the md5 hasher"""
while True:
b = fo.read(128)
if len(b) > 0:
yield b
else:
raise StopIteration()
hasher = hashlib.new('md5')
if os.path.isdir(path):
return
else:
try:
with open(path, mode='br') as infile:
for b in reader(infile):
hasher.update(b)
digest = hasher.hexdigest()
return digest
except PermissionError:
return ''
def FileInfo(fd, link=False, important=False):
"""Hold information about a file"""
stats = os.stat(fd, follow_symlinks=link)
return {
'path': os.path.abspath(fd),
'mode': stats.st_mode,
'uid': stats.st_uid,
'username': pwd.getpwuid(stats.st_uid).pw_name,
'size': stats.st_size,
'lastmod': int(stats.st_ctime),
'lastcheck': 0,
'isfile': os.path.isfile(fd),
'isdir': not os.path.isfile(fd),
'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'),
'md5sum': get_file_hash(fd),
'problems': set()
}
<commit_msg>Move reader() generator out of file hasher<commit_after>
|
"""File information"""
import os
import pwd
import hashlib
from .config import CONFIG
def reader(fo):
"""Generator which feeds bytes to the md5 hasher"""
while True:
b = fo.read(128)
if len(b) > 0:
yield b
else:
raise StopIteration()
def get_file_hash(path):
"""Get md5 hash of a file"""
hasher = hashlib.new('md5')
if os.path.isdir(path):
return
else:
try:
with open(path, mode='br') as infile:
for b in reader(infile):
hasher.update(b)
digest = hasher.hexdigest()
return digest
except PermissionError:
return ''
def FileInfo(fd, link=False, important=False):
"""Hold information about a file"""
stats = os.stat(fd, follow_symlinks=link)
return {
'path': os.path.abspath(fd),
'mode': stats.st_mode,
'uid': stats.st_uid,
'username': pwd.getpwuid(stats.st_uid).pw_name,
'size': stats.st_size,
'lastmod': int(stats.st_ctime),
'lastcheck': 0,
'isfile': os.path.isfile(fd),
'isdir': not os.path.isfile(fd),
'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'),
'md5sum': get_file_hash(fd),
'problems': set()
}
|
"""File information"""
import os
import time
import pwd
import stat
import hashlib
from .config import CONFIG
def get_file_hash(path):
"""Get md5 hash of a file"""
def reader(fo):
"""Generator which feeds bytes to the md5 hasher"""
while True:
b = fo.read(128)
if len(b) > 0:
yield b
else:
raise StopIteration()
hasher = hashlib.new('md5')
if os.path.isdir(path):
return
else:
try:
with open(path, mode='br') as infile:
for b in reader(infile):
hasher.update(b)
digest = hasher.hexdigest()
return digest
except PermissionError:
return ''
def FileInfo(fd, link=False, important=False):
"""Hold information about a file"""
stats = os.stat(fd, follow_symlinks=link)
return {
'path': os.path.abspath(fd),
'mode': stats.st_mode,
'uid': stats.st_uid,
'username': pwd.getpwuid(stats.st_uid).pw_name,
'size': stats.st_size,
'lastmod': int(stats.st_ctime),
'lastcheck': 0,
'isfile': os.path.isfile(fd),
'isdir': not os.path.isfile(fd),
'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'),
'md5sum': get_file_hash(fd),
'problems': set()
}
Move reader() generator out of file hasher"""File information"""
import os
import pwd
import hashlib
from .config import CONFIG
def reader(fo):
"""Generator which feeds bytes to the md5 hasher"""
while True:
b = fo.read(128)
if len(b) > 0:
yield b
else:
raise StopIteration()
def get_file_hash(path):
"""Get md5 hash of a file"""
hasher = hashlib.new('md5')
if os.path.isdir(path):
return
else:
try:
with open(path, mode='br') as infile:
for b in reader(infile):
hasher.update(b)
digest = hasher.hexdigest()
return digest
except PermissionError:
return ''
def FileInfo(fd, link=False, important=False):
"""Hold information about a file"""
stats = os.stat(fd, follow_symlinks=link)
return {
'path': os.path.abspath(fd),
'mode': stats.st_mode,
'uid': stats.st_uid,
'username': pwd.getpwuid(stats.st_uid).pw_name,
'size': stats.st_size,
'lastmod': int(stats.st_ctime),
'lastcheck': 0,
'isfile': os.path.isfile(fd),
'isdir': not os.path.isfile(fd),
'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'),
'md5sum': get_file_hash(fd),
'problems': set()
}
|
<commit_before>"""File information"""
import os
import time
import pwd
import stat
import hashlib
from .config import CONFIG
def get_file_hash(path):
"""Get md5 hash of a file"""
def reader(fo):
"""Generator which feeds bytes to the md5 hasher"""
while True:
b = fo.read(128)
if len(b) > 0:
yield b
else:
raise StopIteration()
hasher = hashlib.new('md5')
if os.path.isdir(path):
return
else:
try:
with open(path, mode='br') as infile:
for b in reader(infile):
hasher.update(b)
digest = hasher.hexdigest()
return digest
except PermissionError:
return ''
def FileInfo(fd, link=False, important=False):
"""Hold information about a file"""
stats = os.stat(fd, follow_symlinks=link)
return {
'path': os.path.abspath(fd),
'mode': stats.st_mode,
'uid': stats.st_uid,
'username': pwd.getpwuid(stats.st_uid).pw_name,
'size': stats.st_size,
'lastmod': int(stats.st_ctime),
'lastcheck': 0,
'isfile': os.path.isfile(fd),
'isdir': not os.path.isfile(fd),
'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'),
'md5sum': get_file_hash(fd),
'problems': set()
}
<commit_msg>Move reader() generator out of file hasher<commit_after>"""File information"""
import os
import pwd
import hashlib
from .config import CONFIG
def reader(fo):
"""Generator which feeds bytes to the md5 hasher"""
while True:
b = fo.read(128)
if len(b) > 0:
yield b
else:
raise StopIteration()
def get_file_hash(path):
"""Get md5 hash of a file"""
hasher = hashlib.new('md5')
if os.path.isdir(path):
return
else:
try:
with open(path, mode='br') as infile:
for b in reader(infile):
hasher.update(b)
digest = hasher.hexdigest()
return digest
except PermissionError:
return ''
def FileInfo(fd, link=False, important=False):
"""Hold information about a file"""
stats = os.stat(fd, follow_symlinks=link)
return {
'path': os.path.abspath(fd),
'mode': stats.st_mode,
'uid': stats.st_uid,
'username': pwd.getpwuid(stats.st_uid).pw_name,
'size': stats.st_size,
'lastmod': int(stats.st_ctime),
'lastcheck': 0,
'isfile': os.path.isfile(fd),
'isdir': not os.path.isfile(fd),
'important': os.path.splitext(fd)[1] in CONFIG.get('fileinfo', 'important'),
'md5sum': get_file_hash(fd),
'problems': set()
}
|
405e34b7573e3af78051741feb32e7589e49dfb9
|
controllers/main.py
|
controllers/main.py
|
# -*- coding: utf-8 -*-
import logging
import simplejson
import os
import base64
import openerp
from ..helpers.zebra import zebra
class PrintController(openerp.addons.web.http.Controller):
_cp_path = '/printer_proxy'
@openerp.addons.web.http.jsonrequest
def output(self, request, format="epl2", **kwargs):
'''Print the passed-in data. Corresponds to "printer_proxy.print"'''
if format.lower() == "epl2":
return self.output_epl2(request, **kwargs)
return {'success': False, 'error': "Format '%s' not recognized" % format}
def output_epl2(self, request, printer_name='zebra_python_unittest', data=[], raw=False, test=False):
'''Print the passed-in EPL2 data.'''
printer = zebra(printer_name)
printer.setup(direct_thermal=True)
for datum in data:
if not raw:
datum = base64.b64decode(datum)
printer.output(datum)
return {'success': True}
|
# -*- coding: utf-8 -*-
import logging
import simplejson
import os
import base64
import openerp
from ..helpers.zebra import zebra
class PrintController(openerp.addons.web.http.Controller):
_cp_path = '/printer_proxy'
@openerp.addons.web.http.jsonrequest
def output(self, request, format="epl2", **kwargs):
'''Print the passed-in data. Corresponds to "printer_proxy.print"'''
if format.lower() == "epl2":
return self.output_epl2(request, **kwargs)
return {'success': False, 'error': "Format '%s' not recognized" % format}
def output_epl2(self, request, printer_name='zebra_python_unittest', data=[], raw=False, test=False):
'''Print the passed-in EPL2 data.'''
printer = zebra(printer_name)
for datum in data:
if not raw:
datum = base64.b64decode(datum)
printer.output(datum)
return {'success': True}
|
Remove line that causes a form feed upon every call to PrintController.output_epl.
|
Remove line that causes a form feed upon every call to PrintController.output_epl.
|
Python
|
agpl-3.0
|
ryepdx/printer_proxy,ryepdx/printer_proxy
|
# -*- coding: utf-8 -*-
import logging
import simplejson
import os
import base64
import openerp
from ..helpers.zebra import zebra
class PrintController(openerp.addons.web.http.Controller):
_cp_path = '/printer_proxy'
@openerp.addons.web.http.jsonrequest
def output(self, request, format="epl2", **kwargs):
'''Print the passed-in data. Corresponds to "printer_proxy.print"'''
if format.lower() == "epl2":
return self.output_epl2(request, **kwargs)
return {'success': False, 'error': "Format '%s' not recognized" % format}
def output_epl2(self, request, printer_name='zebra_python_unittest', data=[], raw=False, test=False):
'''Print the passed-in EPL2 data.'''
printer = zebra(printer_name)
printer.setup(direct_thermal=True)
for datum in data:
if not raw:
datum = base64.b64decode(datum)
printer.output(datum)
return {'success': True}
Remove line that causes a form feed upon every call to PrintController.output_epl.
|
# -*- coding: utf-8 -*-
import logging
import simplejson
import os
import base64
import openerp
from ..helpers.zebra import zebra
class PrintController(openerp.addons.web.http.Controller):
_cp_path = '/printer_proxy'
@openerp.addons.web.http.jsonrequest
def output(self, request, format="epl2", **kwargs):
'''Print the passed-in data. Corresponds to "printer_proxy.print"'''
if format.lower() == "epl2":
return self.output_epl2(request, **kwargs)
return {'success': False, 'error': "Format '%s' not recognized" % format}
def output_epl2(self, request, printer_name='zebra_python_unittest', data=[], raw=False, test=False):
'''Print the passed-in EPL2 data.'''
printer = zebra(printer_name)
for datum in data:
if not raw:
datum = base64.b64decode(datum)
printer.output(datum)
return {'success': True}
|
<commit_before># -*- coding: utf-8 -*-
import logging
import simplejson
import os
import base64
import openerp
from ..helpers.zebra import zebra
class PrintController(openerp.addons.web.http.Controller):
_cp_path = '/printer_proxy'
@openerp.addons.web.http.jsonrequest
def output(self, request, format="epl2", **kwargs):
'''Print the passed-in data. Corresponds to "printer_proxy.print"'''
if format.lower() == "epl2":
return self.output_epl2(request, **kwargs)
return {'success': False, 'error': "Format '%s' not recognized" % format}
def output_epl2(self, request, printer_name='zebra_python_unittest', data=[], raw=False, test=False):
'''Print the passed-in EPL2 data.'''
printer = zebra(printer_name)
printer.setup(direct_thermal=True)
for datum in data:
if not raw:
datum = base64.b64decode(datum)
printer.output(datum)
return {'success': True}
<commit_msg>Remove line that causes a form feed upon every call to PrintController.output_epl.<commit_after>
|
# -*- coding: utf-8 -*-
import logging
import simplejson
import os
import base64
import openerp
from ..helpers.zebra import zebra
class PrintController(openerp.addons.web.http.Controller):
_cp_path = '/printer_proxy'
@openerp.addons.web.http.jsonrequest
def output(self, request, format="epl2", **kwargs):
'''Print the passed-in data. Corresponds to "printer_proxy.print"'''
if format.lower() == "epl2":
return self.output_epl2(request, **kwargs)
return {'success': False, 'error': "Format '%s' not recognized" % format}
def output_epl2(self, request, printer_name='zebra_python_unittest', data=[], raw=False, test=False):
'''Print the passed-in EPL2 data.'''
printer = zebra(printer_name)
for datum in data:
if not raw:
datum = base64.b64decode(datum)
printer.output(datum)
return {'success': True}
|
# -*- coding: utf-8 -*-
import logging
import simplejson
import os
import base64
import openerp
from ..helpers.zebra import zebra
class PrintController(openerp.addons.web.http.Controller):
_cp_path = '/printer_proxy'
@openerp.addons.web.http.jsonrequest
def output(self, request, format="epl2", **kwargs):
'''Print the passed-in data. Corresponds to "printer_proxy.print"'''
if format.lower() == "epl2":
return self.output_epl2(request, **kwargs)
return {'success': False, 'error': "Format '%s' not recognized" % format}
def output_epl2(self, request, printer_name='zebra_python_unittest', data=[], raw=False, test=False):
'''Print the passed-in EPL2 data.'''
printer = zebra(printer_name)
printer.setup(direct_thermal=True)
for datum in data:
if not raw:
datum = base64.b64decode(datum)
printer.output(datum)
return {'success': True}
Remove line that causes a form feed upon every call to PrintController.output_epl.# -*- coding: utf-8 -*-
import logging
import simplejson
import os
import base64
import openerp
from ..helpers.zebra import zebra
class PrintController(openerp.addons.web.http.Controller):
_cp_path = '/printer_proxy'
@openerp.addons.web.http.jsonrequest
def output(self, request, format="epl2", **kwargs):
'''Print the passed-in data. Corresponds to "printer_proxy.print"'''
if format.lower() == "epl2":
return self.output_epl2(request, **kwargs)
return {'success': False, 'error': "Format '%s' not recognized" % format}
def output_epl2(self, request, printer_name='zebra_python_unittest', data=[], raw=False, test=False):
'''Print the passed-in EPL2 data.'''
printer = zebra(printer_name)
for datum in data:
if not raw:
datum = base64.b64decode(datum)
printer.output(datum)
return {'success': True}
|
<commit_before># -*- coding: utf-8 -*-
import logging
import simplejson
import os
import base64
import openerp
from ..helpers.zebra import zebra
class PrintController(openerp.addons.web.http.Controller):
_cp_path = '/printer_proxy'
@openerp.addons.web.http.jsonrequest
def output(self, request, format="epl2", **kwargs):
'''Print the passed-in data. Corresponds to "printer_proxy.print"'''
if format.lower() == "epl2":
return self.output_epl2(request, **kwargs)
return {'success': False, 'error': "Format '%s' not recognized" % format}
def output_epl2(self, request, printer_name='zebra_python_unittest', data=[], raw=False, test=False):
'''Print the passed-in EPL2 data.'''
printer = zebra(printer_name)
printer.setup(direct_thermal=True)
for datum in data:
if not raw:
datum = base64.b64decode(datum)
printer.output(datum)
return {'success': True}
<commit_msg>Remove line that causes a form feed upon every call to PrintController.output_epl.<commit_after># -*- coding: utf-8 -*-
import logging
import simplejson
import os
import base64
import openerp
from ..helpers.zebra import zebra
class PrintController(openerp.addons.web.http.Controller):
_cp_path = '/printer_proxy'
@openerp.addons.web.http.jsonrequest
def output(self, request, format="epl2", **kwargs):
'''Print the passed-in data. Corresponds to "printer_proxy.print"'''
if format.lower() == "epl2":
return self.output_epl2(request, **kwargs)
return {'success': False, 'error': "Format '%s' not recognized" % format}
def output_epl2(self, request, printer_name='zebra_python_unittest', data=[], raw=False, test=False):
'''Print the passed-in EPL2 data.'''
printer = zebra(printer_name)
for datum in data:
if not raw:
datum = base64.b64decode(datum)
printer.output(datum)
return {'success': True}
|
6dbc6caf4af75610cae75d4473fef25f6b405232
|
pymysql/tests/test_nextset.py
|
pymysql/tests/test_nextset.py
|
from pymysql.tests import base
from pymysql import util
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestNextset(base.PyMySQLTestCase):
def setUp(self):
super(TestNextset, self).setUp()
self.con = self.connections[0]
def test_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
r = cur.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur))
self.assertIsNone(cur.nextset())
def test_skip_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
cur.execute("SELECT 42")
self.assertEqual([(42,)], list(cur))
@unittest.expectedFailure
def test_multi_cursor(self):
cur1 = self.con.cursor()
cur2 = self.con.cursor()
cur1.execute("SELECT 1; SELECT 2;")
cur2.execute("SELECT 42")
self.assertEqual([(1,)], list(cur1))
self.assertEqual([(42,)], list(cur2))
r = cur1.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur1))
self.assertIsNone(cur1.nextset())
|
from pymysql.tests import base
from pymysql import util
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestNextset(base.PyMySQLTestCase):
def setUp(self):
super(TestNextset, self).setUp()
self.con = self.connections[0]
def test_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
r = cur.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur))
self.assertIsNone(cur.nextset())
def test_skip_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
cur.execute("SELECT 42")
self.assertEqual([(42,)], list(cur))
@unittest.expectedFailure
def test_multi_cursor(self):
cur1 = self.con.cursor()
cur2 = self.con.cursor()
cur1.execute("SELECT 1; SELECT 2;")
cur2.execute("SELECT 42")
self.assertEqual([(1,)], list(cur1))
self.assertEqual([(42,)], list(cur2))
r = cur1.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur1))
self.assertIsNone(cur1.nextset())
#TODO: How about SSCursor and nextset?
# It's very hard to implement correctly...
|
Add comment about SScursor and nextset.
|
Add comment about SScursor and nextset.
|
Python
|
mit
|
nju520/PyMySQL,methane/PyMySQL,aio-libs/aiomysql,pymysql/pymysql,pulsar314/Tornado-MySQL,yeyinzhu3211/PyMySQL,wraziens/PyMySQL,wraziens/PyMySQL,jheld/PyMySQL,mosquito/Tornado-MySQL,lzedl/PyMySQL,Ting-y/PyMySQL,jwjohns/PyMySQL,PyMySQL/Tornado-MySQL,modulexcite/PyMySQL,xjzhou/PyMySQL,anson-tang/PyMySQL,Geoion/Tornado-MySQL,MartinThoma/PyMySQL,NunoEdgarGub1/PyMySQL,PyMySQL/PyMySQL,lzedl/PyMySQL,yeyinzhu3211/PyMySQL,xjzhou/PyMySQL,eibanez/PyMySQL,boneyao/PyMySQL,eibanez/PyMySQL,DashaChuk/PyMySQL
|
from pymysql.tests import base
from pymysql import util
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestNextset(base.PyMySQLTestCase):
def setUp(self):
super(TestNextset, self).setUp()
self.con = self.connections[0]
def test_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
r = cur.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur))
self.assertIsNone(cur.nextset())
def test_skip_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
cur.execute("SELECT 42")
self.assertEqual([(42,)], list(cur))
@unittest.expectedFailure
def test_multi_cursor(self):
cur1 = self.con.cursor()
cur2 = self.con.cursor()
cur1.execute("SELECT 1; SELECT 2;")
cur2.execute("SELECT 42")
self.assertEqual([(1,)], list(cur1))
self.assertEqual([(42,)], list(cur2))
r = cur1.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur1))
self.assertIsNone(cur1.nextset())
Add comment about SScursor and nextset.
|
from pymysql.tests import base
from pymysql import util
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestNextset(base.PyMySQLTestCase):
def setUp(self):
super(TestNextset, self).setUp()
self.con = self.connections[0]
def test_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
r = cur.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur))
self.assertIsNone(cur.nextset())
def test_skip_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
cur.execute("SELECT 42")
self.assertEqual([(42,)], list(cur))
@unittest.expectedFailure
def test_multi_cursor(self):
cur1 = self.con.cursor()
cur2 = self.con.cursor()
cur1.execute("SELECT 1; SELECT 2;")
cur2.execute("SELECT 42")
self.assertEqual([(1,)], list(cur1))
self.assertEqual([(42,)], list(cur2))
r = cur1.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur1))
self.assertIsNone(cur1.nextset())
#TODO: How about SSCursor and nextset?
# It's very hard to implement correctly...
|
<commit_before>from pymysql.tests import base
from pymysql import util
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestNextset(base.PyMySQLTestCase):
def setUp(self):
super(TestNextset, self).setUp()
self.con = self.connections[0]
def test_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
r = cur.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur))
self.assertIsNone(cur.nextset())
def test_skip_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
cur.execute("SELECT 42")
self.assertEqual([(42,)], list(cur))
@unittest.expectedFailure
def test_multi_cursor(self):
cur1 = self.con.cursor()
cur2 = self.con.cursor()
cur1.execute("SELECT 1; SELECT 2;")
cur2.execute("SELECT 42")
self.assertEqual([(1,)], list(cur1))
self.assertEqual([(42,)], list(cur2))
r = cur1.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur1))
self.assertIsNone(cur1.nextset())
<commit_msg>Add comment about SScursor and nextset.<commit_after>
|
from pymysql.tests import base
from pymysql import util
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestNextset(base.PyMySQLTestCase):
def setUp(self):
super(TestNextset, self).setUp()
self.con = self.connections[0]
def test_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
r = cur.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur))
self.assertIsNone(cur.nextset())
def test_skip_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
cur.execute("SELECT 42")
self.assertEqual([(42,)], list(cur))
@unittest.expectedFailure
def test_multi_cursor(self):
cur1 = self.con.cursor()
cur2 = self.con.cursor()
cur1.execute("SELECT 1; SELECT 2;")
cur2.execute("SELECT 42")
self.assertEqual([(1,)], list(cur1))
self.assertEqual([(42,)], list(cur2))
r = cur1.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur1))
self.assertIsNone(cur1.nextset())
#TODO: How about SSCursor and nextset?
# It's very hard to implement correctly...
|
from pymysql.tests import base
from pymysql import util
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestNextset(base.PyMySQLTestCase):
def setUp(self):
super(TestNextset, self).setUp()
self.con = self.connections[0]
def test_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
r = cur.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur))
self.assertIsNone(cur.nextset())
def test_skip_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
cur.execute("SELECT 42")
self.assertEqual([(42,)], list(cur))
@unittest.expectedFailure
def test_multi_cursor(self):
cur1 = self.con.cursor()
cur2 = self.con.cursor()
cur1.execute("SELECT 1; SELECT 2;")
cur2.execute("SELECT 42")
self.assertEqual([(1,)], list(cur1))
self.assertEqual([(42,)], list(cur2))
r = cur1.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur1))
self.assertIsNone(cur1.nextset())
Add comment about SScursor and nextset.from pymysql.tests import base
from pymysql import util
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestNextset(base.PyMySQLTestCase):
def setUp(self):
super(TestNextset, self).setUp()
self.con = self.connections[0]
def test_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
r = cur.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur))
self.assertIsNone(cur.nextset())
def test_skip_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
cur.execute("SELECT 42")
self.assertEqual([(42,)], list(cur))
@unittest.expectedFailure
def test_multi_cursor(self):
cur1 = self.con.cursor()
cur2 = self.con.cursor()
cur1.execute("SELECT 1; SELECT 2;")
cur2.execute("SELECT 42")
self.assertEqual([(1,)], list(cur1))
self.assertEqual([(42,)], list(cur2))
r = cur1.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur1))
self.assertIsNone(cur1.nextset())
#TODO: How about SSCursor and nextset?
# It's very hard to implement correctly...
|
<commit_before>from pymysql.tests import base
from pymysql import util
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestNextset(base.PyMySQLTestCase):
def setUp(self):
super(TestNextset, self).setUp()
self.con = self.connections[0]
def test_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
r = cur.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur))
self.assertIsNone(cur.nextset())
def test_skip_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
cur.execute("SELECT 42")
self.assertEqual([(42,)], list(cur))
@unittest.expectedFailure
def test_multi_cursor(self):
cur1 = self.con.cursor()
cur2 = self.con.cursor()
cur1.execute("SELECT 1; SELECT 2;")
cur2.execute("SELECT 42")
self.assertEqual([(1,)], list(cur1))
self.assertEqual([(42,)], list(cur2))
r = cur1.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur1))
self.assertIsNone(cur1.nextset())
<commit_msg>Add comment about SScursor and nextset.<commit_after>from pymysql.tests import base
from pymysql import util
try:
import unittest2 as unittest
except ImportError:
import unittest
class TestNextset(base.PyMySQLTestCase):
def setUp(self):
super(TestNextset, self).setUp()
self.con = self.connections[0]
def test_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
r = cur.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur))
self.assertIsNone(cur.nextset())
def test_skip_nextset(self):
cur = self.con.cursor()
cur.execute("SELECT 1; SELECT 2;")
self.assertEqual([(1,)], list(cur))
cur.execute("SELECT 42")
self.assertEqual([(42,)], list(cur))
@unittest.expectedFailure
def test_multi_cursor(self):
cur1 = self.con.cursor()
cur2 = self.con.cursor()
cur1.execute("SELECT 1; SELECT 2;")
cur2.execute("SELECT 42")
self.assertEqual([(1,)], list(cur1))
self.assertEqual([(42,)], list(cur2))
r = cur1.nextset()
self.assertTrue(r)
self.assertEqual([(2,)], list(cur1))
self.assertIsNone(cur1.nextset())
#TODO: How about SSCursor and nextset?
# It's very hard to implement correctly...
|
465b34b2252a4a516aba8f8ea6adac13980ba39c
|
config/test/__init__.py
|
config/test/__init__.py
|
from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
subprocess.call(shlex.split(env.get('TEST_COMMAND')))
sys.exit(0)
def generate(env):
import os
cmd = 'python tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigFinishCB(run_tests)
def exists(): return 1
|
from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
sys.exit(subprocess.call(shlex.split(env.get('TEST_COMMAND'))))
def generate(env):
import os
cmd = 'python tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigFinishCB(run_tests)
def exists(): return 1
|
Return exit code when running tests
|
Return exit code when running tests
|
Python
|
lgpl-2.1
|
CauldronDevelopmentLLC/cbang,CauldronDevelopmentLLC/cbang,CauldronDevelopmentLLC/cbang,CauldronDevelopmentLLC/cbang
|
from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
subprocess.call(shlex.split(env.get('TEST_COMMAND')))
sys.exit(0)
def generate(env):
import os
cmd = 'python tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigFinishCB(run_tests)
def exists(): return 1
Return exit code when running tests
|
from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
sys.exit(subprocess.call(shlex.split(env.get('TEST_COMMAND'))))
def generate(env):
import os
cmd = 'python tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigFinishCB(run_tests)
def exists(): return 1
|
<commit_before>from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
subprocess.call(shlex.split(env.get('TEST_COMMAND')))
sys.exit(0)
def generate(env):
import os
cmd = 'python tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigFinishCB(run_tests)
def exists(): return 1
<commit_msg>Return exit code when running tests<commit_after>
|
from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
sys.exit(subprocess.call(shlex.split(env.get('TEST_COMMAND'))))
def generate(env):
import os
cmd = 'python tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigFinishCB(run_tests)
def exists(): return 1
|
from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
subprocess.call(shlex.split(env.get('TEST_COMMAND')))
sys.exit(0)
def generate(env):
import os
cmd = 'python tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigFinishCB(run_tests)
def exists(): return 1
Return exit code when running testsfrom SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
sys.exit(subprocess.call(shlex.split(env.get('TEST_COMMAND'))))
def generate(env):
import os
cmd = 'python tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigFinishCB(run_tests)
def exists(): return 1
|
<commit_before>from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
subprocess.call(shlex.split(env.get('TEST_COMMAND')))
sys.exit(0)
def generate(env):
import os
cmd = 'python tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigFinishCB(run_tests)
def exists(): return 1
<commit_msg>Return exit code when running tests<commit_after>from SCons.Script import *
def run_tests(env):
import shlex
import subprocess
import sys
sys.exit(subprocess.call(shlex.split(env.get('TEST_COMMAND'))))
def generate(env):
import os
cmd = 'python tests/testHarness -C tests --diff-failed --view-failed ' \
'--view-unfiltered --save-failed --build'
if 'DOCKBOT_MASTER_PORT' in os.environ: cmd += ' --no-color'
env.CBAddVariables(('TEST_COMMAND', '`test` target command line', cmd))
if 'test' in COMMAND_LINE_TARGETS: env.CBAddConfigFinishCB(run_tests)
def exists(): return 1
|
1b066f793c6c3f8f8b1e9df2659922e2a5dbaf3a
|
challenge_5/python/alexbotello/FindTheDifference.py
|
challenge_5/python/alexbotello/FindTheDifference.py
|
from collections import Counter
class Solution:
def findTheDifference(self, s, t):
s = dict(Counter(s))
t = dict(Counter(t))
for key in t.keys():
if key not in s.keys():
s[key] = 0
if s[key] - t[key] <= -1:
return key
if __name__ == '__main__':
test_case = Solution()
s, t = [input() for _ in range(2)]
print('\n' + test_case.findTheDifference(s, t))
|
from collections import Counter
class Solution:
def findTheDifference(self, s, t):
s = Counter(s)
t = Counter(t)
for key in t.keys():
if s[key] != t[key]:
return key
if __name__ == '__main__':
test_case = Solution()
s, t = [input() for _ in range(2)]
print('\n' + test_case.findTheDifference(s, t))
|
Refactor to remove nested for loop
|
Refactor to remove nested for loop
|
Python
|
mit
|
mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges,mindm/2017Challenges
|
from collections import Counter
class Solution:
def findTheDifference(self, s, t):
s = dict(Counter(s))
t = dict(Counter(t))
for key in t.keys():
if key not in s.keys():
s[key] = 0
if s[key] - t[key] <= -1:
return key
if __name__ == '__main__':
test_case = Solution()
s, t = [input() for _ in range(2)]
print('\n' + test_case.findTheDifference(s, t))Refactor to remove nested for loop
|
from collections import Counter
class Solution:
def findTheDifference(self, s, t):
s = Counter(s)
t = Counter(t)
for key in t.keys():
if s[key] != t[key]:
return key
if __name__ == '__main__':
test_case = Solution()
s, t = [input() for _ in range(2)]
print('\n' + test_case.findTheDifference(s, t))
|
<commit_before>from collections import Counter
class Solution:
def findTheDifference(self, s, t):
s = dict(Counter(s))
t = dict(Counter(t))
for key in t.keys():
if key not in s.keys():
s[key] = 0
if s[key] - t[key] <= -1:
return key
if __name__ == '__main__':
test_case = Solution()
s, t = [input() for _ in range(2)]
print('\n' + test_case.findTheDifference(s, t))<commit_msg>Refactor to remove nested for loop<commit_after>
|
from collections import Counter
class Solution:
def findTheDifference(self, s, t):
s = Counter(s)
t = Counter(t)
for key in t.keys():
if s[key] != t[key]:
return key
if __name__ == '__main__':
test_case = Solution()
s, t = [input() for _ in range(2)]
print('\n' + test_case.findTheDifference(s, t))
|
from collections import Counter
class Solution:
def findTheDifference(self, s, t):
s = dict(Counter(s))
t = dict(Counter(t))
for key in t.keys():
if key not in s.keys():
s[key] = 0
if s[key] - t[key] <= -1:
return key
if __name__ == '__main__':
test_case = Solution()
s, t = [input() for _ in range(2)]
print('\n' + test_case.findTheDifference(s, t))Refactor to remove nested for loopfrom collections import Counter
class Solution:
def findTheDifference(self, s, t):
s = Counter(s)
t = Counter(t)
for key in t.keys():
if s[key] != t[key]:
return key
if __name__ == '__main__':
test_case = Solution()
s, t = [input() for _ in range(2)]
print('\n' + test_case.findTheDifference(s, t))
|
<commit_before>from collections import Counter
class Solution:
def findTheDifference(self, s, t):
s = dict(Counter(s))
t = dict(Counter(t))
for key in t.keys():
if key not in s.keys():
s[key] = 0
if s[key] - t[key] <= -1:
return key
if __name__ == '__main__':
test_case = Solution()
s, t = [input() for _ in range(2)]
print('\n' + test_case.findTheDifference(s, t))<commit_msg>Refactor to remove nested for loop<commit_after>from collections import Counter
class Solution:
def findTheDifference(self, s, t):
s = Counter(s)
t = Counter(t)
for key in t.keys():
if s[key] != t[key]:
return key
if __name__ == '__main__':
test_case = Solution()
s, t = [input() for _ in range(2)]
print('\n' + test_case.findTheDifference(s, t))
|
2b8869bb508f4fb67867385f3058372bde664ca5
|
CheckProxy/CheckProxy.py
|
CheckProxy/CheckProxy.py
|
import discord
import requests
from discord.ext import commands
class checkproxy:
"""Cog for proxy checking"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def checkproxy(self, ctx, proxy):
"""Checks the provided proxy."""
p = proxy
pr = {
'http': p,
'https': p
}
try:
r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
if not ctx.message.channel.is_private:
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(checkproxy(bot))
|
import discord
import requests
from discord.ext import commands
class checkproxy:
"""Cog for proxy checking"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def checkproxy(self, ctx, proxy):
"""Checks the provided proxy."""
p = proxy
pr = {
'http': p,
'https': p
}
try:
r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr, timeout=5)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
except requests.exceptions.timeout:
await self.bot.say(':x: Timed out checking proxy.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
if not ctx.message.channel.is_private:
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(checkproxy(bot))
|
Add 5s timeout to checkproxy (in an effort to prevent bot hanging
|
Add 5s timeout to checkproxy (in an effort to prevent bot hanging
|
Python
|
agpl-3.0
|
FrostTheFox/RocketMap-cogs
|
import discord
import requests
from discord.ext import commands
class checkproxy:
"""Cog for proxy checking"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def checkproxy(self, ctx, proxy):
"""Checks the provided proxy."""
p = proxy
pr = {
'http': p,
'https': p
}
try:
r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
if not ctx.message.channel.is_private:
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(checkproxy(bot))
Add 5s timeout to checkproxy (in an effort to prevent bot hanging
|
import discord
import requests
from discord.ext import commands
class checkproxy:
"""Cog for proxy checking"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def checkproxy(self, ctx, proxy):
"""Checks the provided proxy."""
p = proxy
pr = {
'http': p,
'https': p
}
try:
r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr, timeout=5)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
except requests.exceptions.timeout:
await self.bot.say(':x: Timed out checking proxy.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
if not ctx.message.channel.is_private:
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(checkproxy(bot))
|
<commit_before>import discord
import requests
from discord.ext import commands
class checkproxy:
"""Cog for proxy checking"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def checkproxy(self, ctx, proxy):
"""Checks the provided proxy."""
p = proxy
pr = {
'http': p,
'https': p
}
try:
r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
if not ctx.message.channel.is_private:
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(checkproxy(bot))
<commit_msg>Add 5s timeout to checkproxy (in an effort to prevent bot hanging<commit_after>
|
import discord
import requests
from discord.ext import commands
class checkproxy:
"""Cog for proxy checking"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def checkproxy(self, ctx, proxy):
"""Checks the provided proxy."""
p = proxy
pr = {
'http': p,
'https': p
}
try:
r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr, timeout=5)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
except requests.exceptions.timeout:
await self.bot.say(':x: Timed out checking proxy.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
if not ctx.message.channel.is_private:
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(checkproxy(bot))
|
import discord
import requests
from discord.ext import commands
class checkproxy:
"""Cog for proxy checking"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def checkproxy(self, ctx, proxy):
"""Checks the provided proxy."""
p = proxy
pr = {
'http': p,
'https': p
}
try:
r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
if not ctx.message.channel.is_private:
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(checkproxy(bot))
Add 5s timeout to checkproxy (in an effort to prevent bot hangingimport discord
import requests
from discord.ext import commands
class checkproxy:
"""Cog for proxy checking"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def checkproxy(self, ctx, proxy):
"""Checks the provided proxy."""
p = proxy
pr = {
'http': p,
'https': p
}
try:
r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr, timeout=5)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
except requests.exceptions.timeout:
await self.bot.say(':x: Timed out checking proxy.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
if not ctx.message.channel.is_private:
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(checkproxy(bot))
|
<commit_before>import discord
import requests
from discord.ext import commands
class checkproxy:
"""Cog for proxy checking"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def checkproxy(self, ctx, proxy):
"""Checks the provided proxy."""
p = proxy
pr = {
'http': p,
'https': p
}
try:
r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
if not ctx.message.channel.is_private:
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(checkproxy(bot))
<commit_msg>Add 5s timeout to checkproxy (in an effort to prevent bot hanging<commit_after>import discord
import requests
from discord.ext import commands
class checkproxy:
"""Cog for proxy checking"""
def __init__(self, bot):
self.bot = bot
@commands.command(pass_context=True)
async def checkproxy(self, ctx, proxy):
"""Checks the provided proxy."""
p = proxy
pr = {
'http': p,
'https': p
}
try:
r = requests.get('https://pgorelease.nianticlabs.com/plfe/version', proxies=pr, timeout=5)
if r.status_code == 200:
await self.bot.say(':white_check_mark: 200 OK, proxy is not banned.')
if r.status_code == 403:
await self.bot.say(':x: 403 Forbidden, proxy is banned.')
except requests.exceptions.timeout:
await self.bot.say(':x: Timed out checking proxy.')
except requests.exceptions.RequestException as e:
await self.bot.say('Something is wrong with your proxy. Make sure to put the port as well as remove http or https from your input. Authentication is not supported right now.')
if not ctx.message.channel.is_private:
await self.bot.delete_message(ctx.message)
def setup(bot):
bot.add_cog(checkproxy(bot))
|
85fd83e57173aaf00e61169812e3929d5d946896
|
health_check/contrib/celery/backends.py
|
health_check/contrib/celery/backends.py
|
from datetime import datetime, timedelta
from django.conf import settings
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=datetime.now() + timedelta(seconds=timeout)
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
|
from datetime import timedelta
from django.conf import settings
from django.utils import timezone
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=timezone.now() + timedelta(seconds=timeout)
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
|
Send timezone-aware datetime for task expiry
|
Send timezone-aware datetime for task expiry
Needed because this task would consistantly fail if django is set to a
later-than-UTC timezone, due to celery thinking the task expired the instant
it's sent.
|
Python
|
mit
|
KristianOellegaard/django-health-check,KristianOellegaard/django-health-check
|
from datetime import datetime, timedelta
from django.conf import settings
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=datetime.now() + timedelta(seconds=timeout)
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
Send timezone-aware datetime for task expiry
Needed because this task would consistantly fail if django is set to a
later-than-UTC timezone, due to celery thinking the task expired the instant
it's sent.
|
from datetime import timedelta
from django.conf import settings
from django.utils import timezone
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=timezone.now() + timedelta(seconds=timeout)
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
|
<commit_before>from datetime import datetime, timedelta
from django.conf import settings
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=datetime.now() + timedelta(seconds=timeout)
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
<commit_msg>Send timezone-aware datetime for task expiry
Needed because this task would consistantly fail if django is set to a
later-than-UTC timezone, due to celery thinking the task expired the instant
it's sent.<commit_after>
|
from datetime import timedelta
from django.conf import settings
from django.utils import timezone
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=timezone.now() + timedelta(seconds=timeout)
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
|
from datetime import datetime, timedelta
from django.conf import settings
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=datetime.now() + timedelta(seconds=timeout)
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
Send timezone-aware datetime for task expiry
Needed because this task would consistantly fail if django is set to a
later-than-UTC timezone, due to celery thinking the task expired the instant
it's sent.from datetime import timedelta
from django.conf import settings
from django.utils import timezone
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=timezone.now() + timedelta(seconds=timeout)
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
|
<commit_before>from datetime import datetime, timedelta
from django.conf import settings
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=datetime.now() + timedelta(seconds=timeout)
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
<commit_msg>Send timezone-aware datetime for task expiry
Needed because this task would consistantly fail if django is set to a
later-than-UTC timezone, due to celery thinking the task expired the instant
it's sent.<commit_after>from datetime import timedelta
from django.conf import settings
from django.utils import timezone
from health_check.backends import BaseHealthCheckBackend
from health_check.exceptions import (
ServiceReturnedUnexpectedResult, ServiceUnavailable
)
from .tasks import add
class CeleryHealthCheck(BaseHealthCheckBackend):
def check_status(self):
timeout = getattr(settings, 'HEALTHCHECK_CELERY_TIMEOUT', 3)
try:
result = add.apply_async(
args=[4, 4],
expires=timezone.now() + timedelta(seconds=timeout)
)
result.get(timeout=timeout)
if result.result != 8:
self.add_error(ServiceReturnedUnexpectedResult("Celery returned wrong result"))
except IOError as e:
self.add_error(ServiceUnavailable("IOError"), e)
except BaseException as e:
self.add_error(ServiceUnavailable("Unknown error"), e)
|
6bc4f24c8bdd2be0875fba7cb98a81ff86caa5c3
|
tests/behave/environment.py
|
tests/behave/environment.py
|
# -*- coding: utf-8 -*-
u"""
Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U.
This file is part of Toolium.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from toolium.behave.environment import before_scenario, after_scenario, after_all
|
# -*- coding: utf-8 -*-
u"""
Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U.
This file is part of Toolium.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from toolium.behave.environment import before_all as toolium_before_all, before_scenario, after_scenario, after_all
from toolium.config_files import ConfigFiles
def before_all(context):
config_files = ConfigFiles()
config_files.set_config_directory(os.path.join(get_root_path(), 'conf'))
config_files.set_output_directory(os.path.join(get_root_path(), 'output'))
config_files.set_config_properties_filenames('properties.cfg', 'local-properties.cfg')
context.config_files = config_files
toolium_before_all(context)
def get_root_path():
"""Returns absolute path of the project root folder
:returns: root folder path
"""
return os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
|
Add before_all method to set configuration files
|
Add before_all method to set configuration files
|
Python
|
apache-2.0
|
Telefonica/toolium-examples
|
# -*- coding: utf-8 -*-
u"""
Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U.
This file is part of Toolium.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from toolium.behave.environment import before_scenario, after_scenario, after_all
Add before_all method to set configuration files
|
# -*- coding: utf-8 -*-
u"""
Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U.
This file is part of Toolium.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from toolium.behave.environment import before_all as toolium_before_all, before_scenario, after_scenario, after_all
from toolium.config_files import ConfigFiles
def before_all(context):
config_files = ConfigFiles()
config_files.set_config_directory(os.path.join(get_root_path(), 'conf'))
config_files.set_output_directory(os.path.join(get_root_path(), 'output'))
config_files.set_config_properties_filenames('properties.cfg', 'local-properties.cfg')
context.config_files = config_files
toolium_before_all(context)
def get_root_path():
"""Returns absolute path of the project root folder
:returns: root folder path
"""
return os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
|
<commit_before># -*- coding: utf-8 -*-
u"""
Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U.
This file is part of Toolium.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from toolium.behave.environment import before_scenario, after_scenario, after_all
<commit_msg>Add before_all method to set configuration files<commit_after>
|
# -*- coding: utf-8 -*-
u"""
Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U.
This file is part of Toolium.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from toolium.behave.environment import before_all as toolium_before_all, before_scenario, after_scenario, after_all
from toolium.config_files import ConfigFiles
def before_all(context):
config_files = ConfigFiles()
config_files.set_config_directory(os.path.join(get_root_path(), 'conf'))
config_files.set_output_directory(os.path.join(get_root_path(), 'output'))
config_files.set_config_properties_filenames('properties.cfg', 'local-properties.cfg')
context.config_files = config_files
toolium_before_all(context)
def get_root_path():
"""Returns absolute path of the project root folder
:returns: root folder path
"""
return os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
|
# -*- coding: utf-8 -*-
u"""
Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U.
This file is part of Toolium.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from toolium.behave.environment import before_scenario, after_scenario, after_all
Add before_all method to set configuration files# -*- coding: utf-8 -*-
u"""
Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U.
This file is part of Toolium.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from toolium.behave.environment import before_all as toolium_before_all, before_scenario, after_scenario, after_all
from toolium.config_files import ConfigFiles
def before_all(context):
config_files = ConfigFiles()
config_files.set_config_directory(os.path.join(get_root_path(), 'conf'))
config_files.set_output_directory(os.path.join(get_root_path(), 'output'))
config_files.set_config_properties_filenames('properties.cfg', 'local-properties.cfg')
context.config_files = config_files
toolium_before_all(context)
def get_root_path():
"""Returns absolute path of the project root folder
:returns: root folder path
"""
return os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
|
<commit_before># -*- coding: utf-8 -*-
u"""
Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U.
This file is part of Toolium.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from toolium.behave.environment import before_scenario, after_scenario, after_all
<commit_msg>Add before_all method to set configuration files<commit_after># -*- coding: utf-8 -*-
u"""
Copyright 2015 Telefónica Investigación y Desarrollo, S.A.U.
This file is part of Toolium.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
from toolium.behave.environment import before_all as toolium_before_all, before_scenario, after_scenario, after_all
from toolium.config_files import ConfigFiles
def before_all(context):
config_files = ConfigFiles()
config_files.set_config_directory(os.path.join(get_root_path(), 'conf'))
config_files.set_output_directory(os.path.join(get_root_path(), 'output'))
config_files.set_config_properties_filenames('properties.cfg', 'local-properties.cfg')
context.config_files = config_files
toolium_before_all(context)
def get_root_path():
"""Returns absolute path of the project root folder
:returns: root folder path
"""
return os.path.dirname(os.path.dirname(os.path.dirname(os.path.realpath(__file__))))
|
3d04ab5df773be611bbbf790196e587d0da3c5e4
|
colored_logging.py
|
colored_logging.py
|
""" Formatter for the logging module, coloring terminal output according to error criticity. """
import enum
import logging
import sys
Colors = enum.Enum("Colors", ("RED", "GREEN", "YELLOW", "BLUE"))
LEVEL_COLOR_MAPPING = {logging.WARNING: Colors.YELLOW,
logging.ERROR: Colors.RED,
logging.CRITICAL: Colors.RED}
LEVEL_BOLD_MAPPING = {logging.WARNING: False,
logging.ERROR: False,
logging.CRITICAL: True}
class ColoredFormatter(logging.Formatter):
def format(self, record):
message = super().format(record)
if sys.stderr.isatty():
try:
color_code = LEVEL_COLOR_MAPPING[record.levelno].value
bold = LEVEL_BOLD_MAPPING[record.levelno]
except KeyError:
pass
else:
message = "\033[%u;%um%s\033[0m" % (int(bold), 30 + color_code, message)
return message
|
""" Formatter for the logging module, coloring terminal output according to error criticity. """
import enum
import logging
import sys
Colors = enum.Enum("Colors", ("RED", "GREEN", "YELLOW", "BLUE"))
LEVEL_COLOR_MAPPING = {logging.WARNING: Colors.YELLOW,
logging.ERROR: Colors.RED,
logging.CRITICAL: Colors.RED}
LEVEL_BOLD_MAPPING = {logging.WARNING: False,
logging.ERROR: False,
logging.CRITICAL: True}
class ColoredFormatter(logging.Formatter):
def format(self, record):
message = super().format(record)
if sys.stderr.isatty() and not sys.platform.startswith("win32"):
try:
color_code = LEVEL_COLOR_MAPPING[record.levelno].value
bold = LEVEL_BOLD_MAPPING[record.levelno]
except KeyError:
pass
else:
message = "\033[%u;%um%s\033[0m" % (int(bold), 30 + color_code, message)
return message
|
Disable colored logging for Windows
|
Disable colored logging for Windows
|
Python
|
mpl-2.0
|
desbma/sacad,desbma/sacad
|
""" Formatter for the logging module, coloring terminal output according to error criticity. """
import enum
import logging
import sys
Colors = enum.Enum("Colors", ("RED", "GREEN", "YELLOW", "BLUE"))
LEVEL_COLOR_MAPPING = {logging.WARNING: Colors.YELLOW,
logging.ERROR: Colors.RED,
logging.CRITICAL: Colors.RED}
LEVEL_BOLD_MAPPING = {logging.WARNING: False,
logging.ERROR: False,
logging.CRITICAL: True}
class ColoredFormatter(logging.Formatter):
def format(self, record):
message = super().format(record)
if sys.stderr.isatty():
try:
color_code = LEVEL_COLOR_MAPPING[record.levelno].value
bold = LEVEL_BOLD_MAPPING[record.levelno]
except KeyError:
pass
else:
message = "\033[%u;%um%s\033[0m" % (int(bold), 30 + color_code, message)
return message
Disable colored logging for Windows
|
""" Formatter for the logging module, coloring terminal output according to error criticity. """
import enum
import logging
import sys
Colors = enum.Enum("Colors", ("RED", "GREEN", "YELLOW", "BLUE"))
LEVEL_COLOR_MAPPING = {logging.WARNING: Colors.YELLOW,
logging.ERROR: Colors.RED,
logging.CRITICAL: Colors.RED}
LEVEL_BOLD_MAPPING = {logging.WARNING: False,
logging.ERROR: False,
logging.CRITICAL: True}
class ColoredFormatter(logging.Formatter):
def format(self, record):
message = super().format(record)
if sys.stderr.isatty() and not sys.platform.startswith("win32"):
try:
color_code = LEVEL_COLOR_MAPPING[record.levelno].value
bold = LEVEL_BOLD_MAPPING[record.levelno]
except KeyError:
pass
else:
message = "\033[%u;%um%s\033[0m" % (int(bold), 30 + color_code, message)
return message
|
<commit_before>""" Formatter for the logging module, coloring terminal output according to error criticity. """
import enum
import logging
import sys
Colors = enum.Enum("Colors", ("RED", "GREEN", "YELLOW", "BLUE"))
LEVEL_COLOR_MAPPING = {logging.WARNING: Colors.YELLOW,
logging.ERROR: Colors.RED,
logging.CRITICAL: Colors.RED}
LEVEL_BOLD_MAPPING = {logging.WARNING: False,
logging.ERROR: False,
logging.CRITICAL: True}
class ColoredFormatter(logging.Formatter):
def format(self, record):
message = super().format(record)
if sys.stderr.isatty():
try:
color_code = LEVEL_COLOR_MAPPING[record.levelno].value
bold = LEVEL_BOLD_MAPPING[record.levelno]
except KeyError:
pass
else:
message = "\033[%u;%um%s\033[0m" % (int(bold), 30 + color_code, message)
return message
<commit_msg>Disable colored logging for Windows<commit_after>
|
""" Formatter for the logging module, coloring terminal output according to error criticity. """
import enum
import logging
import sys
Colors = enum.Enum("Colors", ("RED", "GREEN", "YELLOW", "BLUE"))
LEVEL_COLOR_MAPPING = {logging.WARNING: Colors.YELLOW,
logging.ERROR: Colors.RED,
logging.CRITICAL: Colors.RED}
LEVEL_BOLD_MAPPING = {logging.WARNING: False,
logging.ERROR: False,
logging.CRITICAL: True}
class ColoredFormatter(logging.Formatter):
def format(self, record):
message = super().format(record)
if sys.stderr.isatty() and not sys.platform.startswith("win32"):
try:
color_code = LEVEL_COLOR_MAPPING[record.levelno].value
bold = LEVEL_BOLD_MAPPING[record.levelno]
except KeyError:
pass
else:
message = "\033[%u;%um%s\033[0m" % (int(bold), 30 + color_code, message)
return message
|
""" Formatter for the logging module, coloring terminal output according to error criticity. """
import enum
import logging
import sys
Colors = enum.Enum("Colors", ("RED", "GREEN", "YELLOW", "BLUE"))
LEVEL_COLOR_MAPPING = {logging.WARNING: Colors.YELLOW,
logging.ERROR: Colors.RED,
logging.CRITICAL: Colors.RED}
LEVEL_BOLD_MAPPING = {logging.WARNING: False,
logging.ERROR: False,
logging.CRITICAL: True}
class ColoredFormatter(logging.Formatter):
def format(self, record):
message = super().format(record)
if sys.stderr.isatty():
try:
color_code = LEVEL_COLOR_MAPPING[record.levelno].value
bold = LEVEL_BOLD_MAPPING[record.levelno]
except KeyError:
pass
else:
message = "\033[%u;%um%s\033[0m" % (int(bold), 30 + color_code, message)
return message
Disable colored logging for Windows""" Formatter for the logging module, coloring terminal output according to error criticity. """
import enum
import logging
import sys
Colors = enum.Enum("Colors", ("RED", "GREEN", "YELLOW", "BLUE"))
LEVEL_COLOR_MAPPING = {logging.WARNING: Colors.YELLOW,
logging.ERROR: Colors.RED,
logging.CRITICAL: Colors.RED}
LEVEL_BOLD_MAPPING = {logging.WARNING: False,
logging.ERROR: False,
logging.CRITICAL: True}
class ColoredFormatter(logging.Formatter):
def format(self, record):
message = super().format(record)
if sys.stderr.isatty() and not sys.platform.startswith("win32"):
try:
color_code = LEVEL_COLOR_MAPPING[record.levelno].value
bold = LEVEL_BOLD_MAPPING[record.levelno]
except KeyError:
pass
else:
message = "\033[%u;%um%s\033[0m" % (int(bold), 30 + color_code, message)
return message
|
<commit_before>""" Formatter for the logging module, coloring terminal output according to error criticity. """
import enum
import logging
import sys
Colors = enum.Enum("Colors", ("RED", "GREEN", "YELLOW", "BLUE"))
LEVEL_COLOR_MAPPING = {logging.WARNING: Colors.YELLOW,
logging.ERROR: Colors.RED,
logging.CRITICAL: Colors.RED}
LEVEL_BOLD_MAPPING = {logging.WARNING: False,
logging.ERROR: False,
logging.CRITICAL: True}
class ColoredFormatter(logging.Formatter):
def format(self, record):
message = super().format(record)
if sys.stderr.isatty():
try:
color_code = LEVEL_COLOR_MAPPING[record.levelno].value
bold = LEVEL_BOLD_MAPPING[record.levelno]
except KeyError:
pass
else:
message = "\033[%u;%um%s\033[0m" % (int(bold), 30 + color_code, message)
return message
<commit_msg>Disable colored logging for Windows<commit_after>""" Formatter for the logging module, coloring terminal output according to error criticity. """
import enum
import logging
import sys
Colors = enum.Enum("Colors", ("RED", "GREEN", "YELLOW", "BLUE"))
LEVEL_COLOR_MAPPING = {logging.WARNING: Colors.YELLOW,
logging.ERROR: Colors.RED,
logging.CRITICAL: Colors.RED}
LEVEL_BOLD_MAPPING = {logging.WARNING: False,
logging.ERROR: False,
logging.CRITICAL: True}
class ColoredFormatter(logging.Formatter):
def format(self, record):
message = super().format(record)
if sys.stderr.isatty() and not sys.platform.startswith("win32"):
try:
color_code = LEVEL_COLOR_MAPPING[record.levelno].value
bold = LEVEL_BOLD_MAPPING[record.levelno]
except KeyError:
pass
else:
message = "\033[%u;%um%s\033[0m" % (int(bold), 30 + color_code, message)
return message
|
5383572c1cb21ae83aec422eda5aede0ed073438
|
lib/ansiblelint/rules/NoFormattingInWhenRule.py
|
lib/ansiblelint/rules/NoFormattingInWhenRule.py
|
from ansiblelint import AnsibleLintRule
try:
from types import StringTypes
except ImportError:
# Python3 removed types.StringTypes
StringTypes = str,
class NoFormattingInWhenRule(AnsibleLintRule):
id = 'CINCH0001'
shortdesc = 'No Jinja2 in when'
description = '"when" lines should not include Jinja2 variables'
tags = ['deprecated']
def _is_valid(self, when):
if not isinstance(when, StringTypes):
return True
return when.find('{{') == -1 and when.find('}}') == -1
def matchplay(self, file, play):
errors = []
if isinstance(play, dict):
if 'roles' not in play:
return errors
for role in play['roles']:
if self.matchtask(file, role):
errors.append(({'when': role},
'role "when" clause has Jinja2 templates'))
if isinstance(play, list):
for play_item in play:
sub_errors = self.matchplay(file, play_item)
if sub_errors:
errors = errors + sub_errors
return errors
def matchtask(self, file, task):
return 'when' in task and not self._is_valid(task['when'])
|
from ansiblelint import AnsibleLintRule
try:
from types import StringTypes
except ImportError:
# Python3 removed types.StringTypes
StringTypes = str,
class NoFormattingInWhenRule(AnsibleLintRule):
id = 'ANSIBLE0019'
shortdesc = 'No Jinja2 in when'
description = '"when" lines should not include Jinja2 variables'
tags = ['deprecated']
def _is_valid(self, when):
if not isinstance(when, StringTypes):
return True
return when.find('{{') == -1 and when.find('}}') == -1
def matchplay(self, file, play):
errors = []
if isinstance(play, dict):
if 'roles' not in play:
return errors
for role in play['roles']:
if self.matchtask(file, role):
errors.append(({'when': role},
'role "when" clause has Jinja2 templates'))
if isinstance(play, list):
for play_item in play:
sub_errors = self.matchplay(file, play_item)
if sub_errors:
errors = errors + sub_errors
return errors
def matchtask(self, file, task):
return 'when' in task and not self._is_valid(task['when'])
|
Update ID to match others
|
Update ID to match others
|
Python
|
mit
|
dataxu/ansible-lint,MatrixCrawler/ansible-lint,willthames/ansible-lint
|
from ansiblelint import AnsibleLintRule
try:
from types import StringTypes
except ImportError:
# Python3 removed types.StringTypes
StringTypes = str,
class NoFormattingInWhenRule(AnsibleLintRule):
id = 'CINCH0001'
shortdesc = 'No Jinja2 in when'
description = '"when" lines should not include Jinja2 variables'
tags = ['deprecated']
def _is_valid(self, when):
if not isinstance(when, StringTypes):
return True
return when.find('{{') == -1 and when.find('}}') == -1
def matchplay(self, file, play):
errors = []
if isinstance(play, dict):
if 'roles' not in play:
return errors
for role in play['roles']:
if self.matchtask(file, role):
errors.append(({'when': role},
'role "when" clause has Jinja2 templates'))
if isinstance(play, list):
for play_item in play:
sub_errors = self.matchplay(file, play_item)
if sub_errors:
errors = errors + sub_errors
return errors
def matchtask(self, file, task):
return 'when' in task and not self._is_valid(task['when'])
Update ID to match others
|
from ansiblelint import AnsibleLintRule
try:
from types import StringTypes
except ImportError:
# Python3 removed types.StringTypes
StringTypes = str,
class NoFormattingInWhenRule(AnsibleLintRule):
id = 'ANSIBLE0019'
shortdesc = 'No Jinja2 in when'
description = '"when" lines should not include Jinja2 variables'
tags = ['deprecated']
def _is_valid(self, when):
if not isinstance(when, StringTypes):
return True
return when.find('{{') == -1 and when.find('}}') == -1
def matchplay(self, file, play):
errors = []
if isinstance(play, dict):
if 'roles' not in play:
return errors
for role in play['roles']:
if self.matchtask(file, role):
errors.append(({'when': role},
'role "when" clause has Jinja2 templates'))
if isinstance(play, list):
for play_item in play:
sub_errors = self.matchplay(file, play_item)
if sub_errors:
errors = errors + sub_errors
return errors
def matchtask(self, file, task):
return 'when' in task and not self._is_valid(task['when'])
|
<commit_before>from ansiblelint import AnsibleLintRule
try:
from types import StringTypes
except ImportError:
# Python3 removed types.StringTypes
StringTypes = str,
class NoFormattingInWhenRule(AnsibleLintRule):
id = 'CINCH0001'
shortdesc = 'No Jinja2 in when'
description = '"when" lines should not include Jinja2 variables'
tags = ['deprecated']
def _is_valid(self, when):
if not isinstance(when, StringTypes):
return True
return when.find('{{') == -1 and when.find('}}') == -1
def matchplay(self, file, play):
errors = []
if isinstance(play, dict):
if 'roles' not in play:
return errors
for role in play['roles']:
if self.matchtask(file, role):
errors.append(({'when': role},
'role "when" clause has Jinja2 templates'))
if isinstance(play, list):
for play_item in play:
sub_errors = self.matchplay(file, play_item)
if sub_errors:
errors = errors + sub_errors
return errors
def matchtask(self, file, task):
return 'when' in task and not self._is_valid(task['when'])
<commit_msg>Update ID to match others<commit_after>
|
from ansiblelint import AnsibleLintRule
try:
from types import StringTypes
except ImportError:
# Python3 removed types.StringTypes
StringTypes = str,
class NoFormattingInWhenRule(AnsibleLintRule):
id = 'ANSIBLE0019'
shortdesc = 'No Jinja2 in when'
description = '"when" lines should not include Jinja2 variables'
tags = ['deprecated']
def _is_valid(self, when):
if not isinstance(when, StringTypes):
return True
return when.find('{{') == -1 and when.find('}}') == -1
def matchplay(self, file, play):
errors = []
if isinstance(play, dict):
if 'roles' not in play:
return errors
for role in play['roles']:
if self.matchtask(file, role):
errors.append(({'when': role},
'role "when" clause has Jinja2 templates'))
if isinstance(play, list):
for play_item in play:
sub_errors = self.matchplay(file, play_item)
if sub_errors:
errors = errors + sub_errors
return errors
def matchtask(self, file, task):
return 'when' in task and not self._is_valid(task['when'])
|
from ansiblelint import AnsibleLintRule
try:
from types import StringTypes
except ImportError:
# Python3 removed types.StringTypes
StringTypes = str,
class NoFormattingInWhenRule(AnsibleLintRule):
id = 'CINCH0001'
shortdesc = 'No Jinja2 in when'
description = '"when" lines should not include Jinja2 variables'
tags = ['deprecated']
def _is_valid(self, when):
if not isinstance(when, StringTypes):
return True
return when.find('{{') == -1 and when.find('}}') == -1
def matchplay(self, file, play):
errors = []
if isinstance(play, dict):
if 'roles' not in play:
return errors
for role in play['roles']:
if self.matchtask(file, role):
errors.append(({'when': role},
'role "when" clause has Jinja2 templates'))
if isinstance(play, list):
for play_item in play:
sub_errors = self.matchplay(file, play_item)
if sub_errors:
errors = errors + sub_errors
return errors
def matchtask(self, file, task):
return 'when' in task and not self._is_valid(task['when'])
Update ID to match othersfrom ansiblelint import AnsibleLintRule
try:
from types import StringTypes
except ImportError:
# Python3 removed types.StringTypes
StringTypes = str,
class NoFormattingInWhenRule(AnsibleLintRule):
id = 'ANSIBLE0019'
shortdesc = 'No Jinja2 in when'
description = '"when" lines should not include Jinja2 variables'
tags = ['deprecated']
def _is_valid(self, when):
if not isinstance(when, StringTypes):
return True
return when.find('{{') == -1 and when.find('}}') == -1
def matchplay(self, file, play):
errors = []
if isinstance(play, dict):
if 'roles' not in play:
return errors
for role in play['roles']:
if self.matchtask(file, role):
errors.append(({'when': role},
'role "when" clause has Jinja2 templates'))
if isinstance(play, list):
for play_item in play:
sub_errors = self.matchplay(file, play_item)
if sub_errors:
errors = errors + sub_errors
return errors
def matchtask(self, file, task):
return 'when' in task and not self._is_valid(task['when'])
|
<commit_before>from ansiblelint import AnsibleLintRule
try:
from types import StringTypes
except ImportError:
# Python3 removed types.StringTypes
StringTypes = str,
class NoFormattingInWhenRule(AnsibleLintRule):
id = 'CINCH0001'
shortdesc = 'No Jinja2 in when'
description = '"when" lines should not include Jinja2 variables'
tags = ['deprecated']
def _is_valid(self, when):
if not isinstance(when, StringTypes):
return True
return when.find('{{') == -1 and when.find('}}') == -1
def matchplay(self, file, play):
errors = []
if isinstance(play, dict):
if 'roles' not in play:
return errors
for role in play['roles']:
if self.matchtask(file, role):
errors.append(({'when': role},
'role "when" clause has Jinja2 templates'))
if isinstance(play, list):
for play_item in play:
sub_errors = self.matchplay(file, play_item)
if sub_errors:
errors = errors + sub_errors
return errors
def matchtask(self, file, task):
return 'when' in task and not self._is_valid(task['when'])
<commit_msg>Update ID to match others<commit_after>from ansiblelint import AnsibleLintRule
try:
from types import StringTypes
except ImportError:
# Python3 removed types.StringTypes
StringTypes = str,
class NoFormattingInWhenRule(AnsibleLintRule):
id = 'ANSIBLE0019'
shortdesc = 'No Jinja2 in when'
description = '"when" lines should not include Jinja2 variables'
tags = ['deprecated']
def _is_valid(self, when):
if not isinstance(when, StringTypes):
return True
return when.find('{{') == -1 and when.find('}}') == -1
def matchplay(self, file, play):
errors = []
if isinstance(play, dict):
if 'roles' not in play:
return errors
for role in play['roles']:
if self.matchtask(file, role):
errors.append(({'when': role},
'role "when" clause has Jinja2 templates'))
if isinstance(play, list):
for play_item in play:
sub_errors = self.matchplay(file, play_item)
if sub_errors:
errors = errors + sub_errors
return errors
def matchtask(self, file, task):
return 'when' in task and not self._is_valid(task['when'])
|
d0f022f393152a6850f6f33f3b1ad88cc2492b24
|
dockwidgets.py
|
dockwidgets.py
|
from PyQt5.QtWidgets import QDockWidget, QTreeView
class WorkerDockWidget(QDockWidget):
def __init__(self, title="Dock Widget", parent=None, flags=None):
super().__init__(title)
self.workerTree = QTreeView(self)
|
from PyQt5.QtWidgets import QDockWidget, QTreeWidget, QWidget, QGridLayout, QFormLayout, QPushButton, QComboBox, QSizePolicy, QFrame
from PyQt5.QtCore import Qt
class WorkerDockWidget(QDockWidget):
def __init__(self):
super().__init__("Workers")
#self.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
# Create main widget for content and layout of Dockwidget
self.mainWidget = QWidget()
self.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
self.mainWidgetLayout = QGridLayout(self.mainWidget)
self.mainWidgetLayout.setSizeConstraint(QGridLayout.SetDefaultConstraint)
# - Create frame for button and entry
self.newConnWidget = QWidget(self.mainWidget)
#self.newConnWidget.setFrameStyle(QFrame.Panel | QFrame.Raised)
self.newConnWidget.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
self.newConnWidgetLayout = QFormLayout(self.newConnWidget)
self.newConnWidgetLayout.setContentsMargins(0, 0, 0, 0)
self.newConnButton = QPushButton(self.newConnWidget)
self.newConnButton.setText("Connect")
self.newConnButton.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Fixed)
self.newConnWidgetLayout.setWidget(0, QFormLayout.LabelRole, self.newConnButton)
self.newConnCombobox = QComboBox(self.newConnWidget)
self.newConnCombobox.setEditable(True)
self.newConnCombobox.setToolTip("Enter ip to connect to.\nEntry format: ip:port\n(if port is omitted, default is used)")
self.newConnCombobox.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
self.newConnWidgetLayout.setWidget(0, QFormLayout.FieldRole, self.newConnCombobox)
self.mainWidgetLayout.addWidget(self.newConnWidget, 0, 0, 1, 1)
# - Add worker treeview to content
self.workerTree = QTreeWidget(self.mainWidget)
self.workerTree.setColumnCount(1)
self.workerTree.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.mainWidgetLayout.addWidget(self.workerTree, 1, 0, 1, 1)
# Set dockwidget content to main widget
self.setWidget(self.mainWidget)
|
Add some UI and logic for handling workers
|
Add some UI and logic for handling workers
|
Python
|
mit
|
DrLuke/gpnshader
|
from PyQt5.QtWidgets import QDockWidget, QTreeView
class WorkerDockWidget(QDockWidget):
def __init__(self, title="Dock Widget", parent=None, flags=None):
super().__init__(title)
self.workerTree = QTreeView(self)
Add some UI and logic for handling workers
|
from PyQt5.QtWidgets import QDockWidget, QTreeWidget, QWidget, QGridLayout, QFormLayout, QPushButton, QComboBox, QSizePolicy, QFrame
from PyQt5.QtCore import Qt
class WorkerDockWidget(QDockWidget):
def __init__(self):
super().__init__("Workers")
#self.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
# Create main widget for content and layout of Dockwidget
self.mainWidget = QWidget()
self.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
self.mainWidgetLayout = QGridLayout(self.mainWidget)
self.mainWidgetLayout.setSizeConstraint(QGridLayout.SetDefaultConstraint)
# - Create frame for button and entry
self.newConnWidget = QWidget(self.mainWidget)
#self.newConnWidget.setFrameStyle(QFrame.Panel | QFrame.Raised)
self.newConnWidget.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
self.newConnWidgetLayout = QFormLayout(self.newConnWidget)
self.newConnWidgetLayout.setContentsMargins(0, 0, 0, 0)
self.newConnButton = QPushButton(self.newConnWidget)
self.newConnButton.setText("Connect")
self.newConnButton.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Fixed)
self.newConnWidgetLayout.setWidget(0, QFormLayout.LabelRole, self.newConnButton)
self.newConnCombobox = QComboBox(self.newConnWidget)
self.newConnCombobox.setEditable(True)
self.newConnCombobox.setToolTip("Enter ip to connect to.\nEntry format: ip:port\n(if port is omitted, default is used)")
self.newConnCombobox.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
self.newConnWidgetLayout.setWidget(0, QFormLayout.FieldRole, self.newConnCombobox)
self.mainWidgetLayout.addWidget(self.newConnWidget, 0, 0, 1, 1)
# - Add worker treeview to content
self.workerTree = QTreeWidget(self.mainWidget)
self.workerTree.setColumnCount(1)
self.workerTree.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.mainWidgetLayout.addWidget(self.workerTree, 1, 0, 1, 1)
# Set dockwidget content to main widget
self.setWidget(self.mainWidget)
|
<commit_before>from PyQt5.QtWidgets import QDockWidget, QTreeView
class WorkerDockWidget(QDockWidget):
def __init__(self, title="Dock Widget", parent=None, flags=None):
super().__init__(title)
self.workerTree = QTreeView(self)
<commit_msg>Add some UI and logic for handling workers<commit_after>
|
from PyQt5.QtWidgets import QDockWidget, QTreeWidget, QWidget, QGridLayout, QFormLayout, QPushButton, QComboBox, QSizePolicy, QFrame
from PyQt5.QtCore import Qt
class WorkerDockWidget(QDockWidget):
def __init__(self):
super().__init__("Workers")
#self.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
# Create main widget for content and layout of Dockwidget
self.mainWidget = QWidget()
self.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
self.mainWidgetLayout = QGridLayout(self.mainWidget)
self.mainWidgetLayout.setSizeConstraint(QGridLayout.SetDefaultConstraint)
# - Create frame for button and entry
self.newConnWidget = QWidget(self.mainWidget)
#self.newConnWidget.setFrameStyle(QFrame.Panel | QFrame.Raised)
self.newConnWidget.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
self.newConnWidgetLayout = QFormLayout(self.newConnWidget)
self.newConnWidgetLayout.setContentsMargins(0, 0, 0, 0)
self.newConnButton = QPushButton(self.newConnWidget)
self.newConnButton.setText("Connect")
self.newConnButton.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Fixed)
self.newConnWidgetLayout.setWidget(0, QFormLayout.LabelRole, self.newConnButton)
self.newConnCombobox = QComboBox(self.newConnWidget)
self.newConnCombobox.setEditable(True)
self.newConnCombobox.setToolTip("Enter ip to connect to.\nEntry format: ip:port\n(if port is omitted, default is used)")
self.newConnCombobox.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
self.newConnWidgetLayout.setWidget(0, QFormLayout.FieldRole, self.newConnCombobox)
self.mainWidgetLayout.addWidget(self.newConnWidget, 0, 0, 1, 1)
# - Add worker treeview to content
self.workerTree = QTreeWidget(self.mainWidget)
self.workerTree.setColumnCount(1)
self.workerTree.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.mainWidgetLayout.addWidget(self.workerTree, 1, 0, 1, 1)
# Set dockwidget content to main widget
self.setWidget(self.mainWidget)
|
from PyQt5.QtWidgets import QDockWidget, QTreeView
class WorkerDockWidget(QDockWidget):
def __init__(self, title="Dock Widget", parent=None, flags=None):
super().__init__(title)
self.workerTree = QTreeView(self)
Add some UI and logic for handling workersfrom PyQt5.QtWidgets import QDockWidget, QTreeWidget, QWidget, QGridLayout, QFormLayout, QPushButton, QComboBox, QSizePolicy, QFrame
from PyQt5.QtCore import Qt
class WorkerDockWidget(QDockWidget):
def __init__(self):
super().__init__("Workers")
#self.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
# Create main widget for content and layout of Dockwidget
self.mainWidget = QWidget()
self.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
self.mainWidgetLayout = QGridLayout(self.mainWidget)
self.mainWidgetLayout.setSizeConstraint(QGridLayout.SetDefaultConstraint)
# - Create frame for button and entry
self.newConnWidget = QWidget(self.mainWidget)
#self.newConnWidget.setFrameStyle(QFrame.Panel | QFrame.Raised)
self.newConnWidget.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
self.newConnWidgetLayout = QFormLayout(self.newConnWidget)
self.newConnWidgetLayout.setContentsMargins(0, 0, 0, 0)
self.newConnButton = QPushButton(self.newConnWidget)
self.newConnButton.setText("Connect")
self.newConnButton.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Fixed)
self.newConnWidgetLayout.setWidget(0, QFormLayout.LabelRole, self.newConnButton)
self.newConnCombobox = QComboBox(self.newConnWidget)
self.newConnCombobox.setEditable(True)
self.newConnCombobox.setToolTip("Enter ip to connect to.\nEntry format: ip:port\n(if port is omitted, default is used)")
self.newConnCombobox.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
self.newConnWidgetLayout.setWidget(0, QFormLayout.FieldRole, self.newConnCombobox)
self.mainWidgetLayout.addWidget(self.newConnWidget, 0, 0, 1, 1)
# - Add worker treeview to content
self.workerTree = QTreeWidget(self.mainWidget)
self.workerTree.setColumnCount(1)
self.workerTree.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.mainWidgetLayout.addWidget(self.workerTree, 1, 0, 1, 1)
# Set dockwidget content to main widget
self.setWidget(self.mainWidget)
|
<commit_before>from PyQt5.QtWidgets import QDockWidget, QTreeView
class WorkerDockWidget(QDockWidget):
def __init__(self, title="Dock Widget", parent=None, flags=None):
super().__init__(title)
self.workerTree = QTreeView(self)
<commit_msg>Add some UI and logic for handling workers<commit_after>from PyQt5.QtWidgets import QDockWidget, QTreeWidget, QWidget, QGridLayout, QFormLayout, QPushButton, QComboBox, QSizePolicy, QFrame
from PyQt5.QtCore import Qt
class WorkerDockWidget(QDockWidget):
def __init__(self):
super().__init__("Workers")
#self.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
# Create main widget for content and layout of Dockwidget
self.mainWidget = QWidget()
self.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
self.mainWidgetLayout = QGridLayout(self.mainWidget)
self.mainWidgetLayout.setSizeConstraint(QGridLayout.SetDefaultConstraint)
# - Create frame for button and entry
self.newConnWidget = QWidget(self.mainWidget)
#self.newConnWidget.setFrameStyle(QFrame.Panel | QFrame.Raised)
self.newConnWidget.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Preferred)
self.newConnWidgetLayout = QFormLayout(self.newConnWidget)
self.newConnWidgetLayout.setContentsMargins(0, 0, 0, 0)
self.newConnButton = QPushButton(self.newConnWidget)
self.newConnButton.setText("Connect")
self.newConnButton.setSizePolicy(QSizePolicy.Minimum, QSizePolicy.Fixed)
self.newConnWidgetLayout.setWidget(0, QFormLayout.LabelRole, self.newConnButton)
self.newConnCombobox = QComboBox(self.newConnWidget)
self.newConnCombobox.setEditable(True)
self.newConnCombobox.setToolTip("Enter ip to connect to.\nEntry format: ip:port\n(if port is omitted, default is used)")
self.newConnCombobox.setSizePolicy(QSizePolicy.Preferred, QSizePolicy.Fixed)
self.newConnWidgetLayout.setWidget(0, QFormLayout.FieldRole, self.newConnCombobox)
self.mainWidgetLayout.addWidget(self.newConnWidget, 0, 0, 1, 1)
# - Add worker treeview to content
self.workerTree = QTreeWidget(self.mainWidget)
self.workerTree.setColumnCount(1)
self.workerTree.setSizePolicy(QSizePolicy.Expanding, QSizePolicy.Expanding)
self.mainWidgetLayout.addWidget(self.workerTree, 1, 0, 1, 1)
# Set dockwidget content to main widget
self.setWidget(self.mainWidget)
|
29110323469d20ff1e481ab2267812afd8e0a3a4
|
more/chameleon/main.py
|
more/chameleon/main.py
|
import morepath
import chameleon
class ChameleonApp(morepath.App):
pass
@ChameleonApp.setting_section(section='chameleon')
def get_setting_section():
return {'auto_reload': False}
@ChameleonApp.template_engine(extension='.pt')
def get_chameleon_render(path, original_render, settings):
config = settings.chameleon.__dict__
template = chameleon.PageTemplateFile(path, **config)
def render(content, request):
variables = {'request': request}
variables.update(content)
return original_render(template.render(**variables), request)
return render
|
import os
import morepath
import chameleon
class ChameleonApp(morepath.App):
pass
@ChameleonApp.setting_section(section='chameleon')
def get_setting_section():
return {'auto_reload': False}
@ChameleonApp.template_engine(extension='.pt')
def get_chameleon_render(name, original_render, registry, search_path):
config = registry.settings.chameleon.__dict__
template = chameleon.PageTemplateFile(os.path.join(search_path, name),
**config)
def render(content, request):
variables = {'request': request}
variables.update(content)
return original_render(template.render(**variables), request)
return render
|
Adjust to modifications in Morepath. But now to enable real explicit file support.
|
Adjust to modifications in Morepath. But now to enable real
explicit file support.
|
Python
|
bsd-3-clause
|
morepath/more.chameleon
|
import morepath
import chameleon
class ChameleonApp(morepath.App):
pass
@ChameleonApp.setting_section(section='chameleon')
def get_setting_section():
return {'auto_reload': False}
@ChameleonApp.template_engine(extension='.pt')
def get_chameleon_render(path, original_render, settings):
config = settings.chameleon.__dict__
template = chameleon.PageTemplateFile(path, **config)
def render(content, request):
variables = {'request': request}
variables.update(content)
return original_render(template.render(**variables), request)
return render
Adjust to modifications in Morepath. But now to enable real
explicit file support.
|
import os
import morepath
import chameleon
class ChameleonApp(morepath.App):
pass
@ChameleonApp.setting_section(section='chameleon')
def get_setting_section():
return {'auto_reload': False}
@ChameleonApp.template_engine(extension='.pt')
def get_chameleon_render(name, original_render, registry, search_path):
config = registry.settings.chameleon.__dict__
template = chameleon.PageTemplateFile(os.path.join(search_path, name),
**config)
def render(content, request):
variables = {'request': request}
variables.update(content)
return original_render(template.render(**variables), request)
return render
|
<commit_before>import morepath
import chameleon
class ChameleonApp(morepath.App):
pass
@ChameleonApp.setting_section(section='chameleon')
def get_setting_section():
return {'auto_reload': False}
@ChameleonApp.template_engine(extension='.pt')
def get_chameleon_render(path, original_render, settings):
config = settings.chameleon.__dict__
template = chameleon.PageTemplateFile(path, **config)
def render(content, request):
variables = {'request': request}
variables.update(content)
return original_render(template.render(**variables), request)
return render
<commit_msg>Adjust to modifications in Morepath. But now to enable real
explicit file support.<commit_after>
|
import os
import morepath
import chameleon
class ChameleonApp(morepath.App):
pass
@ChameleonApp.setting_section(section='chameleon')
def get_setting_section():
return {'auto_reload': False}
@ChameleonApp.template_engine(extension='.pt')
def get_chameleon_render(name, original_render, registry, search_path):
config = registry.settings.chameleon.__dict__
template = chameleon.PageTemplateFile(os.path.join(search_path, name),
**config)
def render(content, request):
variables = {'request': request}
variables.update(content)
return original_render(template.render(**variables), request)
return render
|
import morepath
import chameleon
class ChameleonApp(morepath.App):
pass
@ChameleonApp.setting_section(section='chameleon')
def get_setting_section():
return {'auto_reload': False}
@ChameleonApp.template_engine(extension='.pt')
def get_chameleon_render(path, original_render, settings):
config = settings.chameleon.__dict__
template = chameleon.PageTemplateFile(path, **config)
def render(content, request):
variables = {'request': request}
variables.update(content)
return original_render(template.render(**variables), request)
return render
Adjust to modifications in Morepath. But now to enable real
explicit file support.import os
import morepath
import chameleon
class ChameleonApp(morepath.App):
pass
@ChameleonApp.setting_section(section='chameleon')
def get_setting_section():
return {'auto_reload': False}
@ChameleonApp.template_engine(extension='.pt')
def get_chameleon_render(name, original_render, registry, search_path):
config = registry.settings.chameleon.__dict__
template = chameleon.PageTemplateFile(os.path.join(search_path, name),
**config)
def render(content, request):
variables = {'request': request}
variables.update(content)
return original_render(template.render(**variables), request)
return render
|
<commit_before>import morepath
import chameleon
class ChameleonApp(morepath.App):
pass
@ChameleonApp.setting_section(section='chameleon')
def get_setting_section():
return {'auto_reload': False}
@ChameleonApp.template_engine(extension='.pt')
def get_chameleon_render(path, original_render, settings):
config = settings.chameleon.__dict__
template = chameleon.PageTemplateFile(path, **config)
def render(content, request):
variables = {'request': request}
variables.update(content)
return original_render(template.render(**variables), request)
return render
<commit_msg>Adjust to modifications in Morepath. But now to enable real
explicit file support.<commit_after>import os
import morepath
import chameleon
class ChameleonApp(morepath.App):
pass
@ChameleonApp.setting_section(section='chameleon')
def get_setting_section():
return {'auto_reload': False}
@ChameleonApp.template_engine(extension='.pt')
def get_chameleon_render(name, original_render, registry, search_path):
config = registry.settings.chameleon.__dict__
template = chameleon.PageTemplateFile(os.path.join(search_path, name),
**config)
def render(content, request):
variables = {'request': request}
variables.update(content)
return original_render(template.render(**variables), request)
return render
|
1cf5660d9661646b3d8731986d7581ad27582d77
|
djclick/params.py
|
djclick/params.py
|
import click
from django.core.exceptions import ObjectDoesNotExist
class ModelInstance(click.ParamType):
def __init__(self, qs):
from django.db import models
if isinstance(qs, type) and issubclass(qs, models.Model):
qs = qs.objects.all()
self.qs = qs
self.name = '{}.{}'.format(
qs.model._meta.app_label,
qs.model.__name__,
)
def convert(self, value, param, ctx):
try:
return self.qs.get(pk=value)
except ObjectDoesNotExist:
msg = 'could not find {} with pk={}'.format(self.name, value)
self.fail(msg, param, ctx)
|
import click
from django.core.exceptions import ObjectDoesNotExist
class ModelInstance(click.ParamType):
def __init__(self, qs):
from django.db import models
if isinstance(qs, type) and issubclass(qs, models.Model):
qs = qs.objects.all()
self.qs = qs
self.name = '{}.{}'.format(
qs.model._meta.app_label,
qs.model.__name__,
)
def convert(self, value, param, ctx):
try:
return self.qs.get(pk=value)
except ObjectDoesNotExist:
pass
# call `fail` outside of exception context to avoid nested exception
# handling on Python 3
msg = 'could not find {} with pk={}'.format(self.name, value)
self.fail(msg, param, ctx)
|
Fix failing test on Python 3
|
Fix failing test on Python 3
|
Python
|
mit
|
GaretJax/django-click
|
import click
from django.core.exceptions import ObjectDoesNotExist
class ModelInstance(click.ParamType):
def __init__(self, qs):
from django.db import models
if isinstance(qs, type) and issubclass(qs, models.Model):
qs = qs.objects.all()
self.qs = qs
self.name = '{}.{}'.format(
qs.model._meta.app_label,
qs.model.__name__,
)
def convert(self, value, param, ctx):
try:
return self.qs.get(pk=value)
except ObjectDoesNotExist:
msg = 'could not find {} with pk={}'.format(self.name, value)
self.fail(msg, param, ctx)
Fix failing test on Python 3
|
import click
from django.core.exceptions import ObjectDoesNotExist
class ModelInstance(click.ParamType):
def __init__(self, qs):
from django.db import models
if isinstance(qs, type) and issubclass(qs, models.Model):
qs = qs.objects.all()
self.qs = qs
self.name = '{}.{}'.format(
qs.model._meta.app_label,
qs.model.__name__,
)
def convert(self, value, param, ctx):
try:
return self.qs.get(pk=value)
except ObjectDoesNotExist:
pass
# call `fail` outside of exception context to avoid nested exception
# handling on Python 3
msg = 'could not find {} with pk={}'.format(self.name, value)
self.fail(msg, param, ctx)
|
<commit_before>import click
from django.core.exceptions import ObjectDoesNotExist
class ModelInstance(click.ParamType):
def __init__(self, qs):
from django.db import models
if isinstance(qs, type) and issubclass(qs, models.Model):
qs = qs.objects.all()
self.qs = qs
self.name = '{}.{}'.format(
qs.model._meta.app_label,
qs.model.__name__,
)
def convert(self, value, param, ctx):
try:
return self.qs.get(pk=value)
except ObjectDoesNotExist:
msg = 'could not find {} with pk={}'.format(self.name, value)
self.fail(msg, param, ctx)
<commit_msg>Fix failing test on Python 3<commit_after>
|
import click
from django.core.exceptions import ObjectDoesNotExist
class ModelInstance(click.ParamType):
def __init__(self, qs):
from django.db import models
if isinstance(qs, type) and issubclass(qs, models.Model):
qs = qs.objects.all()
self.qs = qs
self.name = '{}.{}'.format(
qs.model._meta.app_label,
qs.model.__name__,
)
def convert(self, value, param, ctx):
try:
return self.qs.get(pk=value)
except ObjectDoesNotExist:
pass
# call `fail` outside of exception context to avoid nested exception
# handling on Python 3
msg = 'could not find {} with pk={}'.format(self.name, value)
self.fail(msg, param, ctx)
|
import click
from django.core.exceptions import ObjectDoesNotExist
class ModelInstance(click.ParamType):
def __init__(self, qs):
from django.db import models
if isinstance(qs, type) and issubclass(qs, models.Model):
qs = qs.objects.all()
self.qs = qs
self.name = '{}.{}'.format(
qs.model._meta.app_label,
qs.model.__name__,
)
def convert(self, value, param, ctx):
try:
return self.qs.get(pk=value)
except ObjectDoesNotExist:
msg = 'could not find {} with pk={}'.format(self.name, value)
self.fail(msg, param, ctx)
Fix failing test on Python 3import click
from django.core.exceptions import ObjectDoesNotExist
class ModelInstance(click.ParamType):
def __init__(self, qs):
from django.db import models
if isinstance(qs, type) and issubclass(qs, models.Model):
qs = qs.objects.all()
self.qs = qs
self.name = '{}.{}'.format(
qs.model._meta.app_label,
qs.model.__name__,
)
def convert(self, value, param, ctx):
try:
return self.qs.get(pk=value)
except ObjectDoesNotExist:
pass
# call `fail` outside of exception context to avoid nested exception
# handling on Python 3
msg = 'could not find {} with pk={}'.format(self.name, value)
self.fail(msg, param, ctx)
|
<commit_before>import click
from django.core.exceptions import ObjectDoesNotExist
class ModelInstance(click.ParamType):
def __init__(self, qs):
from django.db import models
if isinstance(qs, type) and issubclass(qs, models.Model):
qs = qs.objects.all()
self.qs = qs
self.name = '{}.{}'.format(
qs.model._meta.app_label,
qs.model.__name__,
)
def convert(self, value, param, ctx):
try:
return self.qs.get(pk=value)
except ObjectDoesNotExist:
msg = 'could not find {} with pk={}'.format(self.name, value)
self.fail(msg, param, ctx)
<commit_msg>Fix failing test on Python 3<commit_after>import click
from django.core.exceptions import ObjectDoesNotExist
class ModelInstance(click.ParamType):
def __init__(self, qs):
from django.db import models
if isinstance(qs, type) and issubclass(qs, models.Model):
qs = qs.objects.all()
self.qs = qs
self.name = '{}.{}'.format(
qs.model._meta.app_label,
qs.model.__name__,
)
def convert(self, value, param, ctx):
try:
return self.qs.get(pk=value)
except ObjectDoesNotExist:
pass
# call `fail` outside of exception context to avoid nested exception
# handling on Python 3
msg = 'could not find {} with pk={}'.format(self.name, value)
self.fail(msg, param, ctx)
|
20279983ce2817bf7e75490d85823126ca2c1aed
|
pande_gas/features/basic.py
|
pande_gas/features/basic.py
|
"""
Basic molecular features.
"""
__author__ = "Steven Kearnes"
__copyright__ = "Copyright 2014, Stanford University"
__license__ = "BSD 3-clause"
from rdkit.Chem import Descriptors
from pande_gas.features import Featurizer
class MolecularWeight(Featurizer):
"""
Molecular weight.
"""
name = ['mw', 'molecular_weight']
def _featurize(self, mol):
"""
Calculate molecular weight.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
wt = Descriptors.ExactMolWt(mol)
wt = [wt]
return wt
class SimpleDescriptors(Featurizer):
"""
RDKit descriptors.
See http://rdkit.org/docs/GettingStartedInPython.html
#list-of-available-descriptors.
"""
name = 'descriptors'
def __init__(self):
self.descriptors = []
self.functions = []
for descriptor, function in Descriptors.descList:
self.descriptors.append(descriptor)
self.functions.append(function)
def _featurize(self, mol):
"""
Calculate RDKit descriptors.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
descriptors = []
for function in self.functions:
descriptors.append(function(mol))
return descriptors
|
"""
Basic molecular features.
"""
__author__ = "Steven Kearnes"
__copyright__ = "Copyright 2014, Stanford University"
__license__ = "BSD 3-clause"
from rdkit.Chem import Descriptors
from pande_gas.features import Featurizer
class MolecularWeight(Featurizer):
"""
Molecular weight.
"""
name = ['mw', 'molecular_weight']
def _featurize(self, mol):
"""
Calculate molecular weight.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
wt = Descriptors.ExactMolWt(mol)
wt = [wt]
return wt
class SimpleDescriptors(Featurizer):
"""
RDKit descriptors.
See http://rdkit.org/docs/GettingStartedInPython.html
#list-of-available-descriptors.
"""
name = 'descriptors'
def __init__(self):
self.descriptors = []
self.functions = []
for descriptor, function in Descriptors.descList:
self.descriptors.append(descriptor)
self.functions.append(function)
def _featurize(self, mol):
"""
Calculate RDKit descriptors.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
rval = []
for function in self.functions:
rval.append(function(mol))
return rval
|
Rename descriptors -> rval to avoid confusion
|
Rename descriptors -> rval to avoid confusion
|
Python
|
bsd-3-clause
|
rbharath/pande-gas,rbharath/pande-gas
|
"""
Basic molecular features.
"""
__author__ = "Steven Kearnes"
__copyright__ = "Copyright 2014, Stanford University"
__license__ = "BSD 3-clause"
from rdkit.Chem import Descriptors
from pande_gas.features import Featurizer
class MolecularWeight(Featurizer):
"""
Molecular weight.
"""
name = ['mw', 'molecular_weight']
def _featurize(self, mol):
"""
Calculate molecular weight.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
wt = Descriptors.ExactMolWt(mol)
wt = [wt]
return wt
class SimpleDescriptors(Featurizer):
"""
RDKit descriptors.
See http://rdkit.org/docs/GettingStartedInPython.html
#list-of-available-descriptors.
"""
name = 'descriptors'
def __init__(self):
self.descriptors = []
self.functions = []
for descriptor, function in Descriptors.descList:
self.descriptors.append(descriptor)
self.functions.append(function)
def _featurize(self, mol):
"""
Calculate RDKit descriptors.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
descriptors = []
for function in self.functions:
descriptors.append(function(mol))
return descriptors
Rename descriptors -> rval to avoid confusion
|
"""
Basic molecular features.
"""
__author__ = "Steven Kearnes"
__copyright__ = "Copyright 2014, Stanford University"
__license__ = "BSD 3-clause"
from rdkit.Chem import Descriptors
from pande_gas.features import Featurizer
class MolecularWeight(Featurizer):
"""
Molecular weight.
"""
name = ['mw', 'molecular_weight']
def _featurize(self, mol):
"""
Calculate molecular weight.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
wt = Descriptors.ExactMolWt(mol)
wt = [wt]
return wt
class SimpleDescriptors(Featurizer):
"""
RDKit descriptors.
See http://rdkit.org/docs/GettingStartedInPython.html
#list-of-available-descriptors.
"""
name = 'descriptors'
def __init__(self):
self.descriptors = []
self.functions = []
for descriptor, function in Descriptors.descList:
self.descriptors.append(descriptor)
self.functions.append(function)
def _featurize(self, mol):
"""
Calculate RDKit descriptors.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
rval = []
for function in self.functions:
rval.append(function(mol))
return rval
|
<commit_before>"""
Basic molecular features.
"""
__author__ = "Steven Kearnes"
__copyright__ = "Copyright 2014, Stanford University"
__license__ = "BSD 3-clause"
from rdkit.Chem import Descriptors
from pande_gas.features import Featurizer
class MolecularWeight(Featurizer):
"""
Molecular weight.
"""
name = ['mw', 'molecular_weight']
def _featurize(self, mol):
"""
Calculate molecular weight.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
wt = Descriptors.ExactMolWt(mol)
wt = [wt]
return wt
class SimpleDescriptors(Featurizer):
"""
RDKit descriptors.
See http://rdkit.org/docs/GettingStartedInPython.html
#list-of-available-descriptors.
"""
name = 'descriptors'
def __init__(self):
self.descriptors = []
self.functions = []
for descriptor, function in Descriptors.descList:
self.descriptors.append(descriptor)
self.functions.append(function)
def _featurize(self, mol):
"""
Calculate RDKit descriptors.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
descriptors = []
for function in self.functions:
descriptors.append(function(mol))
return descriptors
<commit_msg>Rename descriptors -> rval to avoid confusion<commit_after>
|
"""
Basic molecular features.
"""
__author__ = "Steven Kearnes"
__copyright__ = "Copyright 2014, Stanford University"
__license__ = "BSD 3-clause"
from rdkit.Chem import Descriptors
from pande_gas.features import Featurizer
class MolecularWeight(Featurizer):
"""
Molecular weight.
"""
name = ['mw', 'molecular_weight']
def _featurize(self, mol):
"""
Calculate molecular weight.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
wt = Descriptors.ExactMolWt(mol)
wt = [wt]
return wt
class SimpleDescriptors(Featurizer):
"""
RDKit descriptors.
See http://rdkit.org/docs/GettingStartedInPython.html
#list-of-available-descriptors.
"""
name = 'descriptors'
def __init__(self):
self.descriptors = []
self.functions = []
for descriptor, function in Descriptors.descList:
self.descriptors.append(descriptor)
self.functions.append(function)
def _featurize(self, mol):
"""
Calculate RDKit descriptors.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
rval = []
for function in self.functions:
rval.append(function(mol))
return rval
|
"""
Basic molecular features.
"""
__author__ = "Steven Kearnes"
__copyright__ = "Copyright 2014, Stanford University"
__license__ = "BSD 3-clause"
from rdkit.Chem import Descriptors
from pande_gas.features import Featurizer
class MolecularWeight(Featurizer):
"""
Molecular weight.
"""
name = ['mw', 'molecular_weight']
def _featurize(self, mol):
"""
Calculate molecular weight.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
wt = Descriptors.ExactMolWt(mol)
wt = [wt]
return wt
class SimpleDescriptors(Featurizer):
"""
RDKit descriptors.
See http://rdkit.org/docs/GettingStartedInPython.html
#list-of-available-descriptors.
"""
name = 'descriptors'
def __init__(self):
self.descriptors = []
self.functions = []
for descriptor, function in Descriptors.descList:
self.descriptors.append(descriptor)
self.functions.append(function)
def _featurize(self, mol):
"""
Calculate RDKit descriptors.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
descriptors = []
for function in self.functions:
descriptors.append(function(mol))
return descriptors
Rename descriptors -> rval to avoid confusion"""
Basic molecular features.
"""
__author__ = "Steven Kearnes"
__copyright__ = "Copyright 2014, Stanford University"
__license__ = "BSD 3-clause"
from rdkit.Chem import Descriptors
from pande_gas.features import Featurizer
class MolecularWeight(Featurizer):
"""
Molecular weight.
"""
name = ['mw', 'molecular_weight']
def _featurize(self, mol):
"""
Calculate molecular weight.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
wt = Descriptors.ExactMolWt(mol)
wt = [wt]
return wt
class SimpleDescriptors(Featurizer):
"""
RDKit descriptors.
See http://rdkit.org/docs/GettingStartedInPython.html
#list-of-available-descriptors.
"""
name = 'descriptors'
def __init__(self):
self.descriptors = []
self.functions = []
for descriptor, function in Descriptors.descList:
self.descriptors.append(descriptor)
self.functions.append(function)
def _featurize(self, mol):
"""
Calculate RDKit descriptors.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
rval = []
for function in self.functions:
rval.append(function(mol))
return rval
|
<commit_before>"""
Basic molecular features.
"""
__author__ = "Steven Kearnes"
__copyright__ = "Copyright 2014, Stanford University"
__license__ = "BSD 3-clause"
from rdkit.Chem import Descriptors
from pande_gas.features import Featurizer
class MolecularWeight(Featurizer):
"""
Molecular weight.
"""
name = ['mw', 'molecular_weight']
def _featurize(self, mol):
"""
Calculate molecular weight.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
wt = Descriptors.ExactMolWt(mol)
wt = [wt]
return wt
class SimpleDescriptors(Featurizer):
"""
RDKit descriptors.
See http://rdkit.org/docs/GettingStartedInPython.html
#list-of-available-descriptors.
"""
name = 'descriptors'
def __init__(self):
self.descriptors = []
self.functions = []
for descriptor, function in Descriptors.descList:
self.descriptors.append(descriptor)
self.functions.append(function)
def _featurize(self, mol):
"""
Calculate RDKit descriptors.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
descriptors = []
for function in self.functions:
descriptors.append(function(mol))
return descriptors
<commit_msg>Rename descriptors -> rval to avoid confusion<commit_after>"""
Basic molecular features.
"""
__author__ = "Steven Kearnes"
__copyright__ = "Copyright 2014, Stanford University"
__license__ = "BSD 3-clause"
from rdkit.Chem import Descriptors
from pande_gas.features import Featurizer
class MolecularWeight(Featurizer):
"""
Molecular weight.
"""
name = ['mw', 'molecular_weight']
def _featurize(self, mol):
"""
Calculate molecular weight.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
wt = Descriptors.ExactMolWt(mol)
wt = [wt]
return wt
class SimpleDescriptors(Featurizer):
"""
RDKit descriptors.
See http://rdkit.org/docs/GettingStartedInPython.html
#list-of-available-descriptors.
"""
name = 'descriptors'
def __init__(self):
self.descriptors = []
self.functions = []
for descriptor, function in Descriptors.descList:
self.descriptors.append(descriptor)
self.functions.append(function)
def _featurize(self, mol):
"""
Calculate RDKit descriptors.
Parameters
----------
mol : RDKit Mol
Molecule.
"""
rval = []
for function in self.functions:
rval.append(function(mol))
return rval
|
04efe96b9ee16b650970d7ddf0ce3a3dd82d55ea
|
forms.py
|
forms.py
|
from flask_wtf import Form
from wtforms.fields import StringField, HiddenField, BooleanField, DecimalField
from wtforms import validators
from flask import request
class DonateForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose a donation amount.")])
reason = StringField(u'Encouraged to contribute by')
installment_period = HiddenField(u'Installment Period')
installments = HiddenField(u'Installments')
openended_status = HiddenField(u'Openended Status')
description = HiddenField(u'Description')
pay_fees = BooleanField(u'Agree to pay fees')
pay_fees_value = HiddenField(u'Pay Fees Value')
class TexasWeeklyForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose an amount.")])
description = HiddenField(u'Description')
|
from flask_wtf import Form
from wtforms.fields import StringField, HiddenField, BooleanField, DecimalField
from wtforms import validators
from flask import request
class DonateForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose a donation amount."),
validators.NumberRange(min=1)])
reason = StringField(u'Encouraged to contribute by')
installment_period = HiddenField(u'Installment Period')
installments = HiddenField(u'Installments')
openended_status = HiddenField(u'Openended Status')
description = HiddenField(u'Description')
pay_fees = BooleanField(u'Agree to pay fees')
pay_fees_value = HiddenField(u'Pay Fees Value')
class TexasWeeklyForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose an amount."),
validators.NumberRange(min=1)])
description = HiddenField(u'Description')
|
Add minimal amount of 1 to form validation
|
Add minimal amount of 1 to form validation
|
Python
|
mit
|
MinnPost/salesforce-stripe,texastribune/salesforce-stripe,MinnPost/salesforce-stripe,texastribune/salesforce-stripe,MinnPost/salesforce-stripe,texastribune/salesforce-stripe
|
from flask_wtf import Form
from wtforms.fields import StringField, HiddenField, BooleanField, DecimalField
from wtforms import validators
from flask import request
class DonateForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose a donation amount.")])
reason = StringField(u'Encouraged to contribute by')
installment_period = HiddenField(u'Installment Period')
installments = HiddenField(u'Installments')
openended_status = HiddenField(u'Openended Status')
description = HiddenField(u'Description')
pay_fees = BooleanField(u'Agree to pay fees')
pay_fees_value = HiddenField(u'Pay Fees Value')
class TexasWeeklyForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose an amount.")])
description = HiddenField(u'Description')
Add minimal amount of 1 to form validation
|
from flask_wtf import Form
from wtforms.fields import StringField, HiddenField, BooleanField, DecimalField
from wtforms import validators
from flask import request
class DonateForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose a donation amount."),
validators.NumberRange(min=1)])
reason = StringField(u'Encouraged to contribute by')
installment_period = HiddenField(u'Installment Period')
installments = HiddenField(u'Installments')
openended_status = HiddenField(u'Openended Status')
description = HiddenField(u'Description')
pay_fees = BooleanField(u'Agree to pay fees')
pay_fees_value = HiddenField(u'Pay Fees Value')
class TexasWeeklyForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose an amount."),
validators.NumberRange(min=1)])
description = HiddenField(u'Description')
|
<commit_before>from flask_wtf import Form
from wtforms.fields import StringField, HiddenField, BooleanField, DecimalField
from wtforms import validators
from flask import request
class DonateForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose a donation amount.")])
reason = StringField(u'Encouraged to contribute by')
installment_period = HiddenField(u'Installment Period')
installments = HiddenField(u'Installments')
openended_status = HiddenField(u'Openended Status')
description = HiddenField(u'Description')
pay_fees = BooleanField(u'Agree to pay fees')
pay_fees_value = HiddenField(u'Pay Fees Value')
class TexasWeeklyForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose an amount.")])
description = HiddenField(u'Description')
<commit_msg>Add minimal amount of 1 to form validation<commit_after>
|
from flask_wtf import Form
from wtforms.fields import StringField, HiddenField, BooleanField, DecimalField
from wtforms import validators
from flask import request
class DonateForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose a donation amount."),
validators.NumberRange(min=1)])
reason = StringField(u'Encouraged to contribute by')
installment_period = HiddenField(u'Installment Period')
installments = HiddenField(u'Installments')
openended_status = HiddenField(u'Openended Status')
description = HiddenField(u'Description')
pay_fees = BooleanField(u'Agree to pay fees')
pay_fees_value = HiddenField(u'Pay Fees Value')
class TexasWeeklyForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose an amount."),
validators.NumberRange(min=1)])
description = HiddenField(u'Description')
|
from flask_wtf import Form
from wtforms.fields import StringField, HiddenField, BooleanField, DecimalField
from wtforms import validators
from flask import request
class DonateForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose a donation amount.")])
reason = StringField(u'Encouraged to contribute by')
installment_period = HiddenField(u'Installment Period')
installments = HiddenField(u'Installments')
openended_status = HiddenField(u'Openended Status')
description = HiddenField(u'Description')
pay_fees = BooleanField(u'Agree to pay fees')
pay_fees_value = HiddenField(u'Pay Fees Value')
class TexasWeeklyForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose an amount.")])
description = HiddenField(u'Description')
Add minimal amount of 1 to form validationfrom flask_wtf import Form
from wtforms.fields import StringField, HiddenField, BooleanField, DecimalField
from wtforms import validators
from flask import request
class DonateForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose a donation amount."),
validators.NumberRange(min=1)])
reason = StringField(u'Encouraged to contribute by')
installment_period = HiddenField(u'Installment Period')
installments = HiddenField(u'Installments')
openended_status = HiddenField(u'Openended Status')
description = HiddenField(u'Description')
pay_fees = BooleanField(u'Agree to pay fees')
pay_fees_value = HiddenField(u'Pay Fees Value')
class TexasWeeklyForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose an amount."),
validators.NumberRange(min=1)])
description = HiddenField(u'Description')
|
<commit_before>from flask_wtf import Form
from wtforms.fields import StringField, HiddenField, BooleanField, DecimalField
from wtforms import validators
from flask import request
class DonateForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose a donation amount.")])
reason = StringField(u'Encouraged to contribute by')
installment_period = HiddenField(u'Installment Period')
installments = HiddenField(u'Installments')
openended_status = HiddenField(u'Openended Status')
description = HiddenField(u'Description')
pay_fees = BooleanField(u'Agree to pay fees')
pay_fees_value = HiddenField(u'Pay Fees Value')
class TexasWeeklyForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose an amount.")])
description = HiddenField(u'Description')
<commit_msg>Add minimal amount of 1 to form validation<commit_after>from flask_wtf import Form
from wtforms.fields import StringField, HiddenField, BooleanField, DecimalField
from wtforms import validators
from flask import request
class DonateForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose a donation amount."),
validators.NumberRange(min=1)])
reason = StringField(u'Encouraged to contribute by')
installment_period = HiddenField(u'Installment Period')
installments = HiddenField(u'Installments')
openended_status = HiddenField(u'Openended Status')
description = HiddenField(u'Description')
pay_fees = BooleanField(u'Agree to pay fees')
pay_fees_value = HiddenField(u'Pay Fees Value')
class TexasWeeklyForm(Form):
first_name = StringField(u'First',
[validators.required(message="Your first name is required.")])
last_name = StringField(u'Last',
[validators.required(message="Your last name is required.")])
amount = DecimalField(u'Amount',
[validators.required(message="Please choose an amount."),
validators.NumberRange(min=1)])
description = HiddenField(u'Description')
|
4d0b5d54ecfde43dc898da03d5481f19943a65e1
|
renzongxian/0000/0000.py
|
renzongxian/0000/0000.py
|
# Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
第 0000 题:将你的 QQ 头像(或者微博头像)右上角加上红色的数字,类似于微信未读信息数量那种提示效果
"""
from PIL import Image, ImageDraw, ImageFont
import sys
def add_num_to_img(file_path):
im = Image.open(file_path)
im_draw = ImageDraw.Draw(im)
font = ImageFont.truetype("arial.ttf", int(im.size[0]/5))
im_draw.text((int(im.size[0]-im.size[0]/10), 5), "4", (256, 0, 0), font=font)
del im_draw
im.save(file_path)
if __name__ == "__main__":
if len(sys.argv) <= 1:
print("Need at least 1 parameter. Try to execute 'python 0000.py $image_path'")
else:
for infile in sys.argv[1:]:
try:
add_num_to_img(infile)
print("Success!")
except IOError:
print("Can't open image!")
pass
|
# Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
第 0000 题:将你的 QQ 头像(或者微博头像)右上角加上红色的数字,类似于微信未读信息数量那种提示效果
"""
from PIL import Image, ImageDraw, ImageFont
import sys
def add_num_to_img(file_path):
im = Image.open(file_path)
im_draw = ImageDraw.Draw(im)
font = ImageFont.truetype("arial.ttf", int(im.size[0]/5))
im_draw.text((int(im.size[0]-im.size[0]/10), 5), "4", (256, 0, 0), font=font)
del im_draw
im.save('./result.png')
if __name__ == "__main__":
if len(sys.argv) <= 1:
print("Need at least 1 parameter. Try to execute 'python 0000.py $image_path'")
else:
for infile in sys.argv[1:]:
try:
add_num_to_img(infile)
print("Success!")
except IOError:
print("Can't open image!")
pass
|
Rename the result image to avoid overwriting the original image
|
Rename the result image to avoid overwriting the original image
|
Python
|
mit
|
starlightme/python,haiyangd/python-show-me-the-code-,EricSekyere/python,ZSeaPeng/python,wangjun/python,yangzilong1986/python,ZuoGuocai/python,luoxufeiyan/python,agogear/python-1,sravaniaitha/python,dominjune/python,ZuoGuocai/python,luoxufeiyan/python,xchaoinfo/python,Ph0enixxx/python,sravaniaitha/python,karnikamit/python,tzq668766/python,dominjune/python,keysona/python,Pritesh242/python,whix/python,Pritesh242/python,starlightme/python,DanielShangHai/python,Supersuuu/python,karnikamit/python,tzq668766/python,zhakui/python,whix/python,hooting/show-me-the-code-python,Ph0enixxx/python,Show-Me-the-Code/python,Supersuuu/python,ZuoGuocai/python,Friday21/python_show_me_the_code,luoxufeiyan/python,xiaoixa/python,Friday21/python_show_me_the_code,snailwalker/python,ionutcipriananescu/python,xchaoinfo/python,merfii/PythonExercises,DIYgod/python,ZSeaPeng/python,JiYouMCC/python,Yrthgze/prueba-sourcetree2,ZSeaPeng/python,DanielShangHai/python,snailwalker/python,karnikamit/python,Jaccorot/python,luoxufeiyan/python,hooting/show-me-the-code-python,12wang3/python,xchaoinfo/python,JiYouMCC/python,keysona/python,DanielShangHai/python,ZuoGuocai/python,JiYouMCC/python,snailwalker/python,Show-Me-the-Code/python,Show-Me-the-Code/python,hooting/show-me-the-code-python,Jaccorot/python,renzongxian/Show-Me-the-Code,zhenglaizhang/python,Supersuuu/python,ionutcipriananescu/python,dominjune/python,fairyzoro/python,xiaoixa/python,zhenglaizhang/python,JiYouMCC/python,ZuoGuocai/python,renzongxian/Show-Me-the-Code,Mark24Code/python,agogear/python-1,karnikamit/python,merfii/PythonExercises,Friday21/python_show_me_the_code,Yrthgze/prueba-sourcetree2,keysona/python,Friday21/python_show_me_the_code,Pritesh242/python,wangjun/python,fairyzoro/python,fairyzoro/python,snailwalker/python,ionutcipriananescu/python,JiYouMCC/python,agogear/python-1,Mark24Code/python,12wang3/python,xiaoixa/python,fairyzoro/python,EricSekyere/python,agogear/python-1,haiyangd/python-show-me-the-code-,renzongxian/Show-Me-the-Code,12wang3/python,merfii/PythonExercises,keysona/python,Yrthgze/prueba-sourcetree2,ionutcipriananescu/python,12wang3/python,tzq668766/python,zhakui/python,haiyangd/python-show-me-the-code-,EricSekyere/python,EricSekyere/python,YGIronMan/python,Mark24Code/python,zhenglaizhang/python,YGIronMan/python,llluiop/python-1,EricSekyere/python,Yrthgze/prueba-sourcetree2,tzq668766/python,zhakui/python,hooting/show-me-the-code-python,DIYgod/python,DIYgod/python,lz199144/python,lz199144/python,sravaniaitha/python,haiyangd/python-show-me-the-code-,Yrthgze/prueba-sourcetree2,llluiop/python-1,Mark24Code/python,starlightme/python,merfii/PythonExercises,xiaoixa/python,Show-Me-the-Code/python,whix/python,whix/python,Pritesh242/python,xchaoinfo/python,tzq668766/python,zhakui/python,Yrthgze/prueba-sourcetree2,Ph0enixxx/python,agogear/python-1,DanielShangHai/python,xiaoixa/python,luoxufeiyan/python,Ph0enixxx/python,dominjune/python,ZSeaPeng/python,DIYgod/python,yangzilong1986/python,Supersuuu/python,ionutcipriananescu/python,yangzilong1986/python,llluiop/python-1,wangjun/python,12wang3/python,merfii/PythonExercises,llluiop/python-1,starlightme/python,ZSeaPeng/python,Mark24Code/python,Jaccorot/python,llluiop/python-1,xchaoinfo/python,haiyangd/python-show-me-the-code-,yangzilong1986/python,yangzilong1986/python,fairyzoro/python,snailwalker/python,Show-Me-the-Code/python,keysona/python,sravaniaitha/python,wangjun/python,YGIronMan/python,renzongxian/Show-Me-the-Code,Jaccorot/python,zhenglaizhang/python,Friday21/python_show_me_the_code,zhakui/python,sravaniaitha/python,whix/python,dominjune/python,renzongxian/Show-Me-the-Code,zhenglaizhang/python,karnikamit/python,YGIronMan/python,DanielShangHai/python,Ph0enixxx/python,YGIronMan/python,starlightme/python,Pritesh242/python,lz199144/python,lz199144/python,Show-Me-the-Code/python,hooting/show-me-the-code-python,Jaccorot/python,lz199144/python,wangjun/python
|
# Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
第 0000 题:将你的 QQ 头像(或者微博头像)右上角加上红色的数字,类似于微信未读信息数量那种提示效果
"""
from PIL import Image, ImageDraw, ImageFont
import sys
def add_num_to_img(file_path):
im = Image.open(file_path)
im_draw = ImageDraw.Draw(im)
font = ImageFont.truetype("arial.ttf", int(im.size[0]/5))
im_draw.text((int(im.size[0]-im.size[0]/10), 5), "4", (256, 0, 0), font=font)
del im_draw
im.save(file_path)
if __name__ == "__main__":
if len(sys.argv) <= 1:
print("Need at least 1 parameter. Try to execute 'python 0000.py $image_path'")
else:
for infile in sys.argv[1:]:
try:
add_num_to_img(infile)
print("Success!")
except IOError:
print("Can't open image!")
pass
Rename the result image to avoid overwriting the original image
|
# Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
第 0000 题:将你的 QQ 头像(或者微博头像)右上角加上红色的数字,类似于微信未读信息数量那种提示效果
"""
from PIL import Image, ImageDraw, ImageFont
import sys
def add_num_to_img(file_path):
im = Image.open(file_path)
im_draw = ImageDraw.Draw(im)
font = ImageFont.truetype("arial.ttf", int(im.size[0]/5))
im_draw.text((int(im.size[0]-im.size[0]/10), 5), "4", (256, 0, 0), font=font)
del im_draw
im.save('./result.png')
if __name__ == "__main__":
if len(sys.argv) <= 1:
print("Need at least 1 parameter. Try to execute 'python 0000.py $image_path'")
else:
for infile in sys.argv[1:]:
try:
add_num_to_img(infile)
print("Success!")
except IOError:
print("Can't open image!")
pass
|
<commit_before># Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
第 0000 题:将你的 QQ 头像(或者微博头像)右上角加上红色的数字,类似于微信未读信息数量那种提示效果
"""
from PIL import Image, ImageDraw, ImageFont
import sys
def add_num_to_img(file_path):
im = Image.open(file_path)
im_draw = ImageDraw.Draw(im)
font = ImageFont.truetype("arial.ttf", int(im.size[0]/5))
im_draw.text((int(im.size[0]-im.size[0]/10), 5), "4", (256, 0, 0), font=font)
del im_draw
im.save(file_path)
if __name__ == "__main__":
if len(sys.argv) <= 1:
print("Need at least 1 parameter. Try to execute 'python 0000.py $image_path'")
else:
for infile in sys.argv[1:]:
try:
add_num_to_img(infile)
print("Success!")
except IOError:
print("Can't open image!")
pass
<commit_msg>Rename the result image to avoid overwriting the original image<commit_after>
|
# Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
第 0000 题:将你的 QQ 头像(或者微博头像)右上角加上红色的数字,类似于微信未读信息数量那种提示效果
"""
from PIL import Image, ImageDraw, ImageFont
import sys
def add_num_to_img(file_path):
im = Image.open(file_path)
im_draw = ImageDraw.Draw(im)
font = ImageFont.truetype("arial.ttf", int(im.size[0]/5))
im_draw.text((int(im.size[0]-im.size[0]/10), 5), "4", (256, 0, 0), font=font)
del im_draw
im.save('./result.png')
if __name__ == "__main__":
if len(sys.argv) <= 1:
print("Need at least 1 parameter. Try to execute 'python 0000.py $image_path'")
else:
for infile in sys.argv[1:]:
try:
add_num_to_img(infile)
print("Success!")
except IOError:
print("Can't open image!")
pass
|
# Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
第 0000 题:将你的 QQ 头像(或者微博头像)右上角加上红色的数字,类似于微信未读信息数量那种提示效果
"""
from PIL import Image, ImageDraw, ImageFont
import sys
def add_num_to_img(file_path):
im = Image.open(file_path)
im_draw = ImageDraw.Draw(im)
font = ImageFont.truetype("arial.ttf", int(im.size[0]/5))
im_draw.text((int(im.size[0]-im.size[0]/10), 5), "4", (256, 0, 0), font=font)
del im_draw
im.save(file_path)
if __name__ == "__main__":
if len(sys.argv) <= 1:
print("Need at least 1 parameter. Try to execute 'python 0000.py $image_path'")
else:
for infile in sys.argv[1:]:
try:
add_num_to_img(infile)
print("Success!")
except IOError:
print("Can't open image!")
pass
Rename the result image to avoid overwriting the original image# Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
第 0000 题:将你的 QQ 头像(或者微博头像)右上角加上红色的数字,类似于微信未读信息数量那种提示效果
"""
from PIL import Image, ImageDraw, ImageFont
import sys
def add_num_to_img(file_path):
im = Image.open(file_path)
im_draw = ImageDraw.Draw(im)
font = ImageFont.truetype("arial.ttf", int(im.size[0]/5))
im_draw.text((int(im.size[0]-im.size[0]/10), 5), "4", (256, 0, 0), font=font)
del im_draw
im.save('./result.png')
if __name__ == "__main__":
if len(sys.argv) <= 1:
print("Need at least 1 parameter. Try to execute 'python 0000.py $image_path'")
else:
for infile in sys.argv[1:]:
try:
add_num_to_img(infile)
print("Success!")
except IOError:
print("Can't open image!")
pass
|
<commit_before># Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
第 0000 题:将你的 QQ 头像(或者微博头像)右上角加上红色的数字,类似于微信未读信息数量那种提示效果
"""
from PIL import Image, ImageDraw, ImageFont
import sys
def add_num_to_img(file_path):
im = Image.open(file_path)
im_draw = ImageDraw.Draw(im)
font = ImageFont.truetype("arial.ttf", int(im.size[0]/5))
im_draw.text((int(im.size[0]-im.size[0]/10), 5), "4", (256, 0, 0), font=font)
del im_draw
im.save(file_path)
if __name__ == "__main__":
if len(sys.argv) <= 1:
print("Need at least 1 parameter. Try to execute 'python 0000.py $image_path'")
else:
for infile in sys.argv[1:]:
try:
add_num_to_img(infile)
print("Success!")
except IOError:
print("Can't open image!")
pass
<commit_msg>Rename the result image to avoid overwriting the original image<commit_after># Source:https://github.com/Show-Me-the-Code/show-me-the-code
# Author:renzongxian
# Date:2014-11-30
# Python 3.4
"""
第 0000 题:将你的 QQ 头像(或者微博头像)右上角加上红色的数字,类似于微信未读信息数量那种提示效果
"""
from PIL import Image, ImageDraw, ImageFont
import sys
def add_num_to_img(file_path):
im = Image.open(file_path)
im_draw = ImageDraw.Draw(im)
font = ImageFont.truetype("arial.ttf", int(im.size[0]/5))
im_draw.text((int(im.size[0]-im.size[0]/10), 5), "4", (256, 0, 0), font=font)
del im_draw
im.save('./result.png')
if __name__ == "__main__":
if len(sys.argv) <= 1:
print("Need at least 1 parameter. Try to execute 'python 0000.py $image_path'")
else:
for infile in sys.argv[1:]:
try:
add_num_to_img(infile)
print("Success!")
except IOError:
print("Can't open image!")
pass
|
dcea53c3ca7f64d7486283d717b92f2d09bea438
|
project/settings_prod.py
|
project/settings_prod.py
|
from project.settings_common import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/'
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}
|
from project.settings_common import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/'
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
#import dj_database_url
#DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}
|
Remove static file serve url
|
Remove static file serve url
|
Python
|
mit
|
AxisPhilly/lobbying.ph-django,AxisPhilly/lobbying.ph-django,AxisPhilly/lobbying.ph-django
|
from project.settings_common import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/'
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}Remove static file serve url
|
from project.settings_common import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/'
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
#import dj_database_url
#DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}
|
<commit_before>from project.settings_common import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/'
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}<commit_msg>Remove static file serve url<commit_after>
|
from project.settings_common import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/'
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
#import dj_database_url
#DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}
|
from project.settings_common import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/'
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}Remove static file serve urlfrom project.settings_common import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/'
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
#import dj_database_url
#DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}
|
<commit_before>from project.settings_common import *
DEBUG = False
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/'
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
import dj_database_url
DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}<commit_msg>Remove static file serve url<commit_after>from project.settings_common import *
DEBUG = True
TEMPLATE_DEBUG = DEBUG
# CACHE
from memcacheify import memcacheify
CACHES = memcacheify()
MIDDLEWARE_CLASSES += (
'django.middleware.cache.UpdateCacheMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
)
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'static/')
STATICFILES_STORAGE = 'storages.backends.s3boto.S3BotoStorage'
STATIC_URL = 'https://s3.amazonaws.com/lobbyingph/'
ADMIN_MEDIA_PREFIX = STATIC_URL + 'admin/'
AWS_ACCESS_KEY_ID = os.environ['AWS_KEY_ID']
AWS_SECRET_ACCESS_KEY = os.environ['AWS_SECRET']
AWS_STORAGE_BUCKET_NAME = 'lobbyingph'
#import dj_database_url
#DATABASES = {'default': dj_database_url.config(default='postgres://localhost')}
|
b82d85114c13f945cc1976606d4d36d5b4b2885a
|
phonenumber_field/formfields.py
|
phonenumber_field/formfields.py
|
#-*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import CharField
from django.core.exceptions import ValidationError
from phonenumber_field.validators import validate_international_phonenumber
from phonenumber_field.phonenumber import to_python
class PhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Enter a valid phone number.'),
}
default_validators = [validate_international_phonenumber]
def to_python(self, value):
phone_number = to_python(value)
if phone_number and not phone_number.is_valid():
raise ValidationError(self.error_messages['invalid'])
return phone_number
|
#-*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import CharField
from django.core.exceptions import ValidationError
from phonenumber_field.validators import validate_international_phonenumber
from phonenumber_field.phonenumber import to_python
class PhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Enter a valid phone number.'),
}
default_validators = [validate_international_phonenumber]
def to_python(self, value):
if value in self.empty_values:
return ''
phone_number = to_python(value)
if phone_number and not phone_number.is_valid():
raise ValidationError(self.error_messages['invalid'])
return phone_number
|
Fix formfield to return an empty string if an empty value is given. This allows empty input for not null model fields with blank=True.
|
Fix formfield to return an empty string if an empty value is given. This allows empty input for not null model fields with blank=True.
|
Python
|
mit
|
bramd/django-phonenumber-field,bramd/django-phonenumber-field
|
#-*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import CharField
from django.core.exceptions import ValidationError
from phonenumber_field.validators import validate_international_phonenumber
from phonenumber_field.phonenumber import to_python
class PhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Enter a valid phone number.'),
}
default_validators = [validate_international_phonenumber]
def to_python(self, value):
phone_number = to_python(value)
if phone_number and not phone_number.is_valid():
raise ValidationError(self.error_messages['invalid'])
return phone_number
Fix formfield to return an empty string if an empty value is given. This allows empty input for not null model fields with blank=True.
|
#-*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import CharField
from django.core.exceptions import ValidationError
from phonenumber_field.validators import validate_international_phonenumber
from phonenumber_field.phonenumber import to_python
class PhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Enter a valid phone number.'),
}
default_validators = [validate_international_phonenumber]
def to_python(self, value):
if value in self.empty_values:
return ''
phone_number = to_python(value)
if phone_number and not phone_number.is_valid():
raise ValidationError(self.error_messages['invalid'])
return phone_number
|
<commit_before>#-*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import CharField
from django.core.exceptions import ValidationError
from phonenumber_field.validators import validate_international_phonenumber
from phonenumber_field.phonenumber import to_python
class PhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Enter a valid phone number.'),
}
default_validators = [validate_international_phonenumber]
def to_python(self, value):
phone_number = to_python(value)
if phone_number and not phone_number.is_valid():
raise ValidationError(self.error_messages['invalid'])
return phone_number
<commit_msg>Fix formfield to return an empty string if an empty value is given. This allows empty input for not null model fields with blank=True.<commit_after>
|
#-*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import CharField
from django.core.exceptions import ValidationError
from phonenumber_field.validators import validate_international_phonenumber
from phonenumber_field.phonenumber import to_python
class PhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Enter a valid phone number.'),
}
default_validators = [validate_international_phonenumber]
def to_python(self, value):
if value in self.empty_values:
return ''
phone_number = to_python(value)
if phone_number and not phone_number.is_valid():
raise ValidationError(self.error_messages['invalid'])
return phone_number
|
#-*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import CharField
from django.core.exceptions import ValidationError
from phonenumber_field.validators import validate_international_phonenumber
from phonenumber_field.phonenumber import to_python
class PhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Enter a valid phone number.'),
}
default_validators = [validate_international_phonenumber]
def to_python(self, value):
phone_number = to_python(value)
if phone_number and not phone_number.is_valid():
raise ValidationError(self.error_messages['invalid'])
return phone_number
Fix formfield to return an empty string if an empty value is given. This allows empty input for not null model fields with blank=True.#-*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import CharField
from django.core.exceptions import ValidationError
from phonenumber_field.validators import validate_international_phonenumber
from phonenumber_field.phonenumber import to_python
class PhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Enter a valid phone number.'),
}
default_validators = [validate_international_phonenumber]
def to_python(self, value):
if value in self.empty_values:
return ''
phone_number = to_python(value)
if phone_number and not phone_number.is_valid():
raise ValidationError(self.error_messages['invalid'])
return phone_number
|
<commit_before>#-*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import CharField
from django.core.exceptions import ValidationError
from phonenumber_field.validators import validate_international_phonenumber
from phonenumber_field.phonenumber import to_python
class PhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Enter a valid phone number.'),
}
default_validators = [validate_international_phonenumber]
def to_python(self, value):
phone_number = to_python(value)
if phone_number and not phone_number.is_valid():
raise ValidationError(self.error_messages['invalid'])
return phone_number
<commit_msg>Fix formfield to return an empty string if an empty value is given. This allows empty input for not null model fields with blank=True.<commit_after>#-*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from django.forms.fields import CharField
from django.core.exceptions import ValidationError
from phonenumber_field.validators import validate_international_phonenumber
from phonenumber_field.phonenumber import to_python
class PhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Enter a valid phone number.'),
}
default_validators = [validate_international_phonenumber]
def to_python(self, value):
if value in self.empty_values:
return ''
phone_number = to_python(value)
if phone_number and not phone_number.is_valid():
raise ValidationError(self.error_messages['invalid'])
return phone_number
|
2e6080f2d8c258700444129a9b989ca5db056a9d
|
elfi/examples/ma2.py
|
elfi/examples/ma2.py
|
import numpy as np
"""Example implementation of the MA2 model
"""
# TODO: add tests
def MA2(n_obs, t1, t2, n_sim=1, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n_sim, n_obs+2) # i.i.d. sequence ~ N(0,1)
u = np.atleast_2d(latents)
y = u[:,2:] + t1 * u[:,1:-1] + t2 * u[:,:-2]
return y
def autocov(lag, x):
"""Normalized autocovariance (i.e. autocorrelation) assuming a (weak) stationary process.
Assuming univariate stochastic process with realizations in rows
"""
mu = np.mean(x, axis=1, keepdims=True)
var = np.var(x, axis=1, keepdims=True, ddof=1)
# Autocovariance
C = np.mean(x[:,lag:] * x[:,:-lag], axis=1, keepdims=True) - mu**2
# Normalize
tau = C / var
return tau
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
|
import numpy as np
"""Example implementation of the MA2 model
"""
# TODO: add tests
def MA2(n_obs, t1, t2, n_sim=1, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n_sim, n_obs+2) # i.i.d. sequence ~ N(0,1)
u = np.atleast_2d(latents)
y = u[:,2:] + t1 * u[:,1:-1] + t2 * u[:,:-2]
return y
def autocov(lag, x):
"""Autocovariance assuming a (weak) univariate stationary process
with realizations in rows
"""
mu = np.mean(x, axis=1, keepdims=True)
C = np.mean(x[:,lag:] * x[:,:-lag], axis=1, keepdims=True) - mu**2
return C
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
|
Change autocorrelation to autocov. Variance infromation improves ABC results.
|
Change autocorrelation to autocov. Variance infromation improves ABC results.
|
Python
|
bsd-3-clause
|
lintusj1/elfi,HIIT/elfi,lintusj1/elfi,elfi-dev/elfi,elfi-dev/elfi
|
import numpy as np
"""Example implementation of the MA2 model
"""
# TODO: add tests
def MA2(n_obs, t1, t2, n_sim=1, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n_sim, n_obs+2) # i.i.d. sequence ~ N(0,1)
u = np.atleast_2d(latents)
y = u[:,2:] + t1 * u[:,1:-1] + t2 * u[:,:-2]
return y
def autocov(lag, x):
"""Normalized autocovariance (i.e. autocorrelation) assuming a (weak) stationary process.
Assuming univariate stochastic process with realizations in rows
"""
mu = np.mean(x, axis=1, keepdims=True)
var = np.var(x, axis=1, keepdims=True, ddof=1)
# Autocovariance
C = np.mean(x[:,lag:] * x[:,:-lag], axis=1, keepdims=True) - mu**2
# Normalize
tau = C / var
return tau
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
Change autocorrelation to autocov. Variance infromation improves ABC results.
|
import numpy as np
"""Example implementation of the MA2 model
"""
# TODO: add tests
def MA2(n_obs, t1, t2, n_sim=1, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n_sim, n_obs+2) # i.i.d. sequence ~ N(0,1)
u = np.atleast_2d(latents)
y = u[:,2:] + t1 * u[:,1:-1] + t2 * u[:,:-2]
return y
def autocov(lag, x):
"""Autocovariance assuming a (weak) univariate stationary process
with realizations in rows
"""
mu = np.mean(x, axis=1, keepdims=True)
C = np.mean(x[:,lag:] * x[:,:-lag], axis=1, keepdims=True) - mu**2
return C
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
|
<commit_before>import numpy as np
"""Example implementation of the MA2 model
"""
# TODO: add tests
def MA2(n_obs, t1, t2, n_sim=1, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n_sim, n_obs+2) # i.i.d. sequence ~ N(0,1)
u = np.atleast_2d(latents)
y = u[:,2:] + t1 * u[:,1:-1] + t2 * u[:,:-2]
return y
def autocov(lag, x):
"""Normalized autocovariance (i.e. autocorrelation) assuming a (weak) stationary process.
Assuming univariate stochastic process with realizations in rows
"""
mu = np.mean(x, axis=1, keepdims=True)
var = np.var(x, axis=1, keepdims=True, ddof=1)
# Autocovariance
C = np.mean(x[:,lag:] * x[:,:-lag], axis=1, keepdims=True) - mu**2
# Normalize
tau = C / var
return tau
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
<commit_msg>Change autocorrelation to autocov. Variance infromation improves ABC results.<commit_after>
|
import numpy as np
"""Example implementation of the MA2 model
"""
# TODO: add tests
def MA2(n_obs, t1, t2, n_sim=1, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n_sim, n_obs+2) # i.i.d. sequence ~ N(0,1)
u = np.atleast_2d(latents)
y = u[:,2:] + t1 * u[:,1:-1] + t2 * u[:,:-2]
return y
def autocov(lag, x):
"""Autocovariance assuming a (weak) univariate stationary process
with realizations in rows
"""
mu = np.mean(x, axis=1, keepdims=True)
C = np.mean(x[:,lag:] * x[:,:-lag], axis=1, keepdims=True) - mu**2
return C
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
|
import numpy as np
"""Example implementation of the MA2 model
"""
# TODO: add tests
def MA2(n_obs, t1, t2, n_sim=1, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n_sim, n_obs+2) # i.i.d. sequence ~ N(0,1)
u = np.atleast_2d(latents)
y = u[:,2:] + t1 * u[:,1:-1] + t2 * u[:,:-2]
return y
def autocov(lag, x):
"""Normalized autocovariance (i.e. autocorrelation) assuming a (weak) stationary process.
Assuming univariate stochastic process with realizations in rows
"""
mu = np.mean(x, axis=1, keepdims=True)
var = np.var(x, axis=1, keepdims=True, ddof=1)
# Autocovariance
C = np.mean(x[:,lag:] * x[:,:-lag], axis=1, keepdims=True) - mu**2
# Normalize
tau = C / var
return tau
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
Change autocorrelation to autocov. Variance infromation improves ABC results.import numpy as np
"""Example implementation of the MA2 model
"""
# TODO: add tests
def MA2(n_obs, t1, t2, n_sim=1, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n_sim, n_obs+2) # i.i.d. sequence ~ N(0,1)
u = np.atleast_2d(latents)
y = u[:,2:] + t1 * u[:,1:-1] + t2 * u[:,:-2]
return y
def autocov(lag, x):
"""Autocovariance assuming a (weak) univariate stationary process
with realizations in rows
"""
mu = np.mean(x, axis=1, keepdims=True)
C = np.mean(x[:,lag:] * x[:,:-lag], axis=1, keepdims=True) - mu**2
return C
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
|
<commit_before>import numpy as np
"""Example implementation of the MA2 model
"""
# TODO: add tests
def MA2(n_obs, t1, t2, n_sim=1, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n_sim, n_obs+2) # i.i.d. sequence ~ N(0,1)
u = np.atleast_2d(latents)
y = u[:,2:] + t1 * u[:,1:-1] + t2 * u[:,:-2]
return y
def autocov(lag, x):
"""Normalized autocovariance (i.e. autocorrelation) assuming a (weak) stationary process.
Assuming univariate stochastic process with realizations in rows
"""
mu = np.mean(x, axis=1, keepdims=True)
var = np.var(x, axis=1, keepdims=True, ddof=1)
# Autocovariance
C = np.mean(x[:,lag:] * x[:,:-lag], axis=1, keepdims=True) - mu**2
# Normalize
tau = C / var
return tau
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
<commit_msg>Change autocorrelation to autocov. Variance infromation improves ABC results.<commit_after>import numpy as np
"""Example implementation of the MA2 model
"""
# TODO: add tests
def MA2(n_obs, t1, t2, n_sim=1, prng=None, latents=None):
if latents is None:
if prng is None:
prng = np.random.RandomState()
latents = prng.randn(n_sim, n_obs+2) # i.i.d. sequence ~ N(0,1)
u = np.atleast_2d(latents)
y = u[:,2:] + t1 * u[:,1:-1] + t2 * u[:,:-2]
return y
def autocov(lag, x):
"""Autocovariance assuming a (weak) univariate stationary process
with realizations in rows
"""
mu = np.mean(x, axis=1, keepdims=True)
C = np.mean(x[:,lag:] * x[:,:-lag], axis=1, keepdims=True) - mu**2
return C
def distance(x, y):
d = np.linalg.norm( np.array(x) - np.array(y), ord=2, axis=0)
return d
|
3480330cb042b08ff85bfa988a130c7fa391a0ee
|
flask_restler/__init__.py
|
flask_restler/__init__.py
|
import logging
__license__ = "MIT"
__project__ = "Flask-Restler"
__version__ = "1.6.2"
logger = logging.getLogger('flask-restler')
logger.addHandler(logging.NullHandler())
class APIError(Exception):
"""Store API exception's information."""
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['error'] = self.message
rv['code'] = rv.get('code', self.status_code)
return rv
def route(rule=None, endpoint=None, **options):
"""Custom routes in resources."""
def decorator(f):
endpoint_ = f.__name__.lower()
f.route = (rule, endpoint_, options)
return f
if callable(rule):
rule, f = rule.__name__.lower(), rule
return decorator(f)
return decorator
from .api import Api, Resource # noqa
|
import logging
__license__ = "MIT"
__project__ = "Flask-Restler"
__version__ = "1.6.2"
logger = logging.getLogger('flask-restler')
logger.addHandler(logging.NullHandler())
class APIError(Exception):
"""Store API exception's information."""
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['error'] = self.message
rv['code'] = rv.get('code', self.status_code)
return rv
def route(rule=None, endpoint=None, **options):
"""Custom routes in resources."""
def decorator(f):
endpoint_ = endpoint or f.__name__.lower()
f.route = (rule, endpoint_, options)
return f
if callable(rule):
rule, f = rule.__name__.lower(), rule
return decorator(f)
return decorator
from .api import Api, Resource # noqa
|
Support endpoint name in route.
|
Support endpoint name in route.
|
Python
|
mit
|
klen/flask-restler,klen/flask-restler
|
import logging
__license__ = "MIT"
__project__ = "Flask-Restler"
__version__ = "1.6.2"
logger = logging.getLogger('flask-restler')
logger.addHandler(logging.NullHandler())
class APIError(Exception):
"""Store API exception's information."""
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['error'] = self.message
rv['code'] = rv.get('code', self.status_code)
return rv
def route(rule=None, endpoint=None, **options):
"""Custom routes in resources."""
def decorator(f):
endpoint_ = f.__name__.lower()
f.route = (rule, endpoint_, options)
return f
if callable(rule):
rule, f = rule.__name__.lower(), rule
return decorator(f)
return decorator
from .api import Api, Resource # noqa
Support endpoint name in route.
|
import logging
__license__ = "MIT"
__project__ = "Flask-Restler"
__version__ = "1.6.2"
logger = logging.getLogger('flask-restler')
logger.addHandler(logging.NullHandler())
class APIError(Exception):
"""Store API exception's information."""
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['error'] = self.message
rv['code'] = rv.get('code', self.status_code)
return rv
def route(rule=None, endpoint=None, **options):
"""Custom routes in resources."""
def decorator(f):
endpoint_ = endpoint or f.__name__.lower()
f.route = (rule, endpoint_, options)
return f
if callable(rule):
rule, f = rule.__name__.lower(), rule
return decorator(f)
return decorator
from .api import Api, Resource # noqa
|
<commit_before>import logging
__license__ = "MIT"
__project__ = "Flask-Restler"
__version__ = "1.6.2"
logger = logging.getLogger('flask-restler')
logger.addHandler(logging.NullHandler())
class APIError(Exception):
"""Store API exception's information."""
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['error'] = self.message
rv['code'] = rv.get('code', self.status_code)
return rv
def route(rule=None, endpoint=None, **options):
"""Custom routes in resources."""
def decorator(f):
endpoint_ = f.__name__.lower()
f.route = (rule, endpoint_, options)
return f
if callable(rule):
rule, f = rule.__name__.lower(), rule
return decorator(f)
return decorator
from .api import Api, Resource # noqa
<commit_msg>Support endpoint name in route.<commit_after>
|
import logging
__license__ = "MIT"
__project__ = "Flask-Restler"
__version__ = "1.6.2"
logger = logging.getLogger('flask-restler')
logger.addHandler(logging.NullHandler())
class APIError(Exception):
"""Store API exception's information."""
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['error'] = self.message
rv['code'] = rv.get('code', self.status_code)
return rv
def route(rule=None, endpoint=None, **options):
"""Custom routes in resources."""
def decorator(f):
endpoint_ = endpoint or f.__name__.lower()
f.route = (rule, endpoint_, options)
return f
if callable(rule):
rule, f = rule.__name__.lower(), rule
return decorator(f)
return decorator
from .api import Api, Resource # noqa
|
import logging
__license__ = "MIT"
__project__ = "Flask-Restler"
__version__ = "1.6.2"
logger = logging.getLogger('flask-restler')
logger.addHandler(logging.NullHandler())
class APIError(Exception):
"""Store API exception's information."""
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['error'] = self.message
rv['code'] = rv.get('code', self.status_code)
return rv
def route(rule=None, endpoint=None, **options):
"""Custom routes in resources."""
def decorator(f):
endpoint_ = f.__name__.lower()
f.route = (rule, endpoint_, options)
return f
if callable(rule):
rule, f = rule.__name__.lower(), rule
return decorator(f)
return decorator
from .api import Api, Resource # noqa
Support endpoint name in route.import logging
__license__ = "MIT"
__project__ = "Flask-Restler"
__version__ = "1.6.2"
logger = logging.getLogger('flask-restler')
logger.addHandler(logging.NullHandler())
class APIError(Exception):
"""Store API exception's information."""
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['error'] = self.message
rv['code'] = rv.get('code', self.status_code)
return rv
def route(rule=None, endpoint=None, **options):
"""Custom routes in resources."""
def decorator(f):
endpoint_ = endpoint or f.__name__.lower()
f.route = (rule, endpoint_, options)
return f
if callable(rule):
rule, f = rule.__name__.lower(), rule
return decorator(f)
return decorator
from .api import Api, Resource # noqa
|
<commit_before>import logging
__license__ = "MIT"
__project__ = "Flask-Restler"
__version__ = "1.6.2"
logger = logging.getLogger('flask-restler')
logger.addHandler(logging.NullHandler())
class APIError(Exception):
"""Store API exception's information."""
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['error'] = self.message
rv['code'] = rv.get('code', self.status_code)
return rv
def route(rule=None, endpoint=None, **options):
"""Custom routes in resources."""
def decorator(f):
endpoint_ = f.__name__.lower()
f.route = (rule, endpoint_, options)
return f
if callable(rule):
rule, f = rule.__name__.lower(), rule
return decorator(f)
return decorator
from .api import Api, Resource # noqa
<commit_msg>Support endpoint name in route.<commit_after>import logging
__license__ = "MIT"
__project__ = "Flask-Restler"
__version__ = "1.6.2"
logger = logging.getLogger('flask-restler')
logger.addHandler(logging.NullHandler())
class APIError(Exception):
"""Store API exception's information."""
status_code = 400
def __init__(self, message, status_code=None, payload=None):
Exception.__init__(self)
self.message = message
if status_code is not None:
self.status_code = status_code
self.payload = payload
def to_dict(self):
rv = dict(self.payload or ())
rv['error'] = self.message
rv['code'] = rv.get('code', self.status_code)
return rv
def route(rule=None, endpoint=None, **options):
"""Custom routes in resources."""
def decorator(f):
endpoint_ = endpoint or f.__name__.lower()
f.route = (rule, endpoint_, options)
return f
if callable(rule):
rule, f = rule.__name__.lower(), rule
return decorator(f)
return decorator
from .api import Api, Resource # noqa
|
373a172535db60e0b428500b1036decd97cf9504
|
bookstore_app/urls.py
|
bookstore_app/urls.py
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^register/', views.register, name='register'),
url(r'^login/', views.login, name='login')
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^register/$', views.register, name='register'),
url(r'^login/$', views.login, name='login'),
url(r'^books/([a-zA-Z0-9]+)/$', views.book, name='book')
]
|
Add book url route matcher
|
Add book url route matcher
|
Python
|
mit
|
siawyoung/bookstore,siawyoung/bookstore,siawyoung/bookstore
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^register/', views.register, name='register'),
url(r'^login/', views.login, name='login')
]Add book url route matcher
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^register/$', views.register, name='register'),
url(r'^login/$', views.login, name='login'),
url(r'^books/([a-zA-Z0-9]+)/$', views.book, name='book')
]
|
<commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^register/', views.register, name='register'),
url(r'^login/', views.login, name='login')
]<commit_msg>Add book url route matcher<commit_after>
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^register/$', views.register, name='register'),
url(r'^login/$', views.login, name='login'),
url(r'^books/([a-zA-Z0-9]+)/$', views.book, name='book')
]
|
from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^register/', views.register, name='register'),
url(r'^login/', views.login, name='login')
]Add book url route matcherfrom django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^register/$', views.register, name='register'),
url(r'^login/$', views.login, name='login'),
url(r'^books/([a-zA-Z0-9]+)/$', views.book, name='book')
]
|
<commit_before>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^register/', views.register, name='register'),
url(r'^login/', views.login, name='login')
]<commit_msg>Add book url route matcher<commit_after>from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^$', views.index, name='index'),
url(r'^register/$', views.register, name='register'),
url(r'^login/$', views.login, name='login'),
url(r'^books/([a-zA-Z0-9]+)/$', views.book, name='book')
]
|
5442d45689a567528753a0e705733f86eac37220
|
buckets/test/views.py
|
buckets/test/views.py
|
from django.core.files.storage import default_storage
from django.views.decorators.http import require_POST
from django.http import HttpResponse
@require_POST
def fake_s3_upload(request):
key = request.POST.get('key')
file = request.FILES.get('file')
default_storage.save(key, file.read())
return HttpResponse('', status=204)
|
from django.core.files.storage import default_storage
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from django.http import HttpResponse
@csrf_exempt
@require_POST
def fake_s3_upload(request):
key = request.POST.get('key')
file = request.FILES.get('file')
default_storage.save(key, file.read())
return HttpResponse('', status=204)
|
Set fake_s3_upload view to be CSRF exempt
|
Set fake_s3_upload view to be CSRF exempt
|
Python
|
agpl-3.0
|
Cadasta/django-buckets,Cadasta/django-buckets,Cadasta/django-buckets
|
from django.core.files.storage import default_storage
from django.views.decorators.http import require_POST
from django.http import HttpResponse
@require_POST
def fake_s3_upload(request):
key = request.POST.get('key')
file = request.FILES.get('file')
default_storage.save(key, file.read())
return HttpResponse('', status=204)
Set fake_s3_upload view to be CSRF exempt
|
from django.core.files.storage import default_storage
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from django.http import HttpResponse
@csrf_exempt
@require_POST
def fake_s3_upload(request):
key = request.POST.get('key')
file = request.FILES.get('file')
default_storage.save(key, file.read())
return HttpResponse('', status=204)
|
<commit_before>from django.core.files.storage import default_storage
from django.views.decorators.http import require_POST
from django.http import HttpResponse
@require_POST
def fake_s3_upload(request):
key = request.POST.get('key')
file = request.FILES.get('file')
default_storage.save(key, file.read())
return HttpResponse('', status=204)
<commit_msg>Set fake_s3_upload view to be CSRF exempt<commit_after>
|
from django.core.files.storage import default_storage
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from django.http import HttpResponse
@csrf_exempt
@require_POST
def fake_s3_upload(request):
key = request.POST.get('key')
file = request.FILES.get('file')
default_storage.save(key, file.read())
return HttpResponse('', status=204)
|
from django.core.files.storage import default_storage
from django.views.decorators.http import require_POST
from django.http import HttpResponse
@require_POST
def fake_s3_upload(request):
key = request.POST.get('key')
file = request.FILES.get('file')
default_storage.save(key, file.read())
return HttpResponse('', status=204)
Set fake_s3_upload view to be CSRF exemptfrom django.core.files.storage import default_storage
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from django.http import HttpResponse
@csrf_exempt
@require_POST
def fake_s3_upload(request):
key = request.POST.get('key')
file = request.FILES.get('file')
default_storage.save(key, file.read())
return HttpResponse('', status=204)
|
<commit_before>from django.core.files.storage import default_storage
from django.views.decorators.http import require_POST
from django.http import HttpResponse
@require_POST
def fake_s3_upload(request):
key = request.POST.get('key')
file = request.FILES.get('file')
default_storage.save(key, file.read())
return HttpResponse('', status=204)
<commit_msg>Set fake_s3_upload view to be CSRF exempt<commit_after>from django.core.files.storage import default_storage
from django.views.decorators.csrf import csrf_exempt
from django.views.decorators.http import require_POST
from django.http import HttpResponse
@csrf_exempt
@require_POST
def fake_s3_upload(request):
key = request.POST.get('key')
file = request.FILES.get('file')
default_storage.save(key, file.read())
return HttpResponse('', status=204)
|
7f05b622ab6cb1202d2d00ec1bcac2c5bbb326b7
|
dthm4kaiako/config/__init__.py
|
dthm4kaiako/config/__init__.py
|
"""Configuration for Django system."""
__version__ = "0.9.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
"""Configuration for Django system."""
__version__ = "0.9.3"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
Increment version number to 0.9.3
|
Increment version number to 0.9.3
|
Python
|
mit
|
uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers,uccser/cs4teachers
|
"""Configuration for Django system."""
__version__ = "0.9.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
Increment version number to 0.9.3
|
"""Configuration for Django system."""
__version__ = "0.9.3"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
<commit_before>"""Configuration for Django system."""
__version__ = "0.9.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
<commit_msg>Increment version number to 0.9.3<commit_after>
|
"""Configuration for Django system."""
__version__ = "0.9.3"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
"""Configuration for Django system."""
__version__ = "0.9.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
Increment version number to 0.9.3"""Configuration for Django system."""
__version__ = "0.9.3"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
<commit_before>"""Configuration for Django system."""
__version__ = "0.9.2"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
<commit_msg>Increment version number to 0.9.3<commit_after>"""Configuration for Django system."""
__version__ = "0.9.3"
__version_info__ = tuple(
[
int(num) if num.isdigit() else num
for num in __version__.replace("-", ".", 1).split(".")
]
)
|
c904654c481678675aabb655c082a3986efbbf27
|
chrome/test/nacl_test_injection/buildbot_nacl_integration.py
|
chrome/test/nacl_test_injection/buildbot_nacl_integration.py
|
#!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
def Main():
pwd = os.environ.get('PWD', '')
# TODO(ncbray): figure out why this is failing on windows and enable.
if (sys.platform in ['win32', 'cygwin'] and
'xp-nacl-chrome' not in pwd and 'win64-nacl-chrome' not in pwd): return
# TODO(ncbray): figure out why this is failing on mac and re-enable.
if (sys.platform == 'darwin' and
'mac-nacl-chrome' not in pwd): return
# TODO(ncbray): figure out why this is failing on some linux trybots.
if (sys.platform in ['linux', 'linux2'] and
'hardy64-nacl-chrome' not in pwd): return
script_dir = os.path.dirname(os.path.abspath(__file__))
test_dir = os.path.dirname(script_dir)
chrome_dir = os.path.dirname(test_dir)
src_dir = os.path.dirname(chrome_dir)
nacl_integration_script = os.path.join(
src_dir, 'native_client/build/buildbot_chrome_nacl_stage.py')
cmd = [sys.executable, nacl_integration_script] + sys.argv[1:]
print cmd
subprocess.check_call(cmd)
if __name__ == '__main__':
Main()
|
#!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
def Main():
pwd = os.environ.get('PWD', '')
# TODO(ncbray): figure out why this is failing on windows and enable.
if sys.platform in ['win32', 'cygwin'] and 'nacl-chrome' not in pwd: return
# TODO(ncbray): figure out why this is failing on mac and re-enable.
if sys.platform == 'darwin' and 'nacl-chrome' not in pwd: return
# TODO(ncbray): figure out why this is failing on some linux trybots.
if sys.platform in ['linux', 'linux2'] and 'nacl-chrome' not in pwd: return
script_dir = os.path.dirname(os.path.abspath(__file__))
test_dir = os.path.dirname(script_dir)
chrome_dir = os.path.dirname(test_dir)
src_dir = os.path.dirname(chrome_dir)
nacl_integration_script = os.path.join(
src_dir, 'native_client/build/buildbot_chrome_nacl_stage.py')
cmd = [sys.executable, nacl_integration_script] + sys.argv[1:]
print cmd
subprocess.check_call(cmd)
if __name__ == '__main__':
Main()
|
Whitelist nacl_integration tests to run on new nacl integration bot.
|
Whitelist nacl_integration tests to run on new nacl integration bot.
BUG= none
TEST= none
Review URL: http://codereview.chromium.org/7050026
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@86021 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
adobe/chromium,yitian134/chromium,yitian134/chromium,gavinp/chromium,ropik/chromium,adobe/chromium,adobe/chromium,ropik/chromium,yitian134/chromium,ropik/chromium,yitian134/chromium,ropik/chromium,gavinp/chromium,yitian134/chromium,adobe/chromium,adobe/chromium,adobe/chromium,adobe/chromium,adobe/chromium,gavinp/chromium,ropik/chromium,adobe/chromium,adobe/chromium,gavinp/chromium,yitian134/chromium,adobe/chromium,ropik/chromium,yitian134/chromium,gavinp/chromium,gavinp/chromium,gavinp/chromium,yitian134/chromium,ropik/chromium,gavinp/chromium,yitian134/chromium,yitian134/chromium,gavinp/chromium,ropik/chromium,gavinp/chromium,ropik/chromium
|
#!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
def Main():
pwd = os.environ.get('PWD', '')
# TODO(ncbray): figure out why this is failing on windows and enable.
if (sys.platform in ['win32', 'cygwin'] and
'xp-nacl-chrome' not in pwd and 'win64-nacl-chrome' not in pwd): return
# TODO(ncbray): figure out why this is failing on mac and re-enable.
if (sys.platform == 'darwin' and
'mac-nacl-chrome' not in pwd): return
# TODO(ncbray): figure out why this is failing on some linux trybots.
if (sys.platform in ['linux', 'linux2'] and
'hardy64-nacl-chrome' not in pwd): return
script_dir = os.path.dirname(os.path.abspath(__file__))
test_dir = os.path.dirname(script_dir)
chrome_dir = os.path.dirname(test_dir)
src_dir = os.path.dirname(chrome_dir)
nacl_integration_script = os.path.join(
src_dir, 'native_client/build/buildbot_chrome_nacl_stage.py')
cmd = [sys.executable, nacl_integration_script] + sys.argv[1:]
print cmd
subprocess.check_call(cmd)
if __name__ == '__main__':
Main()
Whitelist nacl_integration tests to run on new nacl integration bot.
BUG= none
TEST= none
Review URL: http://codereview.chromium.org/7050026
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@86021 0039d316-1c4b-4281-b951-d872f2087c98
|
#!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
def Main():
pwd = os.environ.get('PWD', '')
# TODO(ncbray): figure out why this is failing on windows and enable.
if sys.platform in ['win32', 'cygwin'] and 'nacl-chrome' not in pwd: return
# TODO(ncbray): figure out why this is failing on mac and re-enable.
if sys.platform == 'darwin' and 'nacl-chrome' not in pwd: return
# TODO(ncbray): figure out why this is failing on some linux trybots.
if sys.platform in ['linux', 'linux2'] and 'nacl-chrome' not in pwd: return
script_dir = os.path.dirname(os.path.abspath(__file__))
test_dir = os.path.dirname(script_dir)
chrome_dir = os.path.dirname(test_dir)
src_dir = os.path.dirname(chrome_dir)
nacl_integration_script = os.path.join(
src_dir, 'native_client/build/buildbot_chrome_nacl_stage.py')
cmd = [sys.executable, nacl_integration_script] + sys.argv[1:]
print cmd
subprocess.check_call(cmd)
if __name__ == '__main__':
Main()
|
<commit_before>#!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
def Main():
pwd = os.environ.get('PWD', '')
# TODO(ncbray): figure out why this is failing on windows and enable.
if (sys.platform in ['win32', 'cygwin'] and
'xp-nacl-chrome' not in pwd and 'win64-nacl-chrome' not in pwd): return
# TODO(ncbray): figure out why this is failing on mac and re-enable.
if (sys.platform == 'darwin' and
'mac-nacl-chrome' not in pwd): return
# TODO(ncbray): figure out why this is failing on some linux trybots.
if (sys.platform in ['linux', 'linux2'] and
'hardy64-nacl-chrome' not in pwd): return
script_dir = os.path.dirname(os.path.abspath(__file__))
test_dir = os.path.dirname(script_dir)
chrome_dir = os.path.dirname(test_dir)
src_dir = os.path.dirname(chrome_dir)
nacl_integration_script = os.path.join(
src_dir, 'native_client/build/buildbot_chrome_nacl_stage.py')
cmd = [sys.executable, nacl_integration_script] + sys.argv[1:]
print cmd
subprocess.check_call(cmd)
if __name__ == '__main__':
Main()
<commit_msg>Whitelist nacl_integration tests to run on new nacl integration bot.
BUG= none
TEST= none
Review URL: http://codereview.chromium.org/7050026
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@86021 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
#!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
def Main():
pwd = os.environ.get('PWD', '')
# TODO(ncbray): figure out why this is failing on windows and enable.
if sys.platform in ['win32', 'cygwin'] and 'nacl-chrome' not in pwd: return
# TODO(ncbray): figure out why this is failing on mac and re-enable.
if sys.platform == 'darwin' and 'nacl-chrome' not in pwd: return
# TODO(ncbray): figure out why this is failing on some linux trybots.
if sys.platform in ['linux', 'linux2'] and 'nacl-chrome' not in pwd: return
script_dir = os.path.dirname(os.path.abspath(__file__))
test_dir = os.path.dirname(script_dir)
chrome_dir = os.path.dirname(test_dir)
src_dir = os.path.dirname(chrome_dir)
nacl_integration_script = os.path.join(
src_dir, 'native_client/build/buildbot_chrome_nacl_stage.py')
cmd = [sys.executable, nacl_integration_script] + sys.argv[1:]
print cmd
subprocess.check_call(cmd)
if __name__ == '__main__':
Main()
|
#!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
def Main():
pwd = os.environ.get('PWD', '')
# TODO(ncbray): figure out why this is failing on windows and enable.
if (sys.platform in ['win32', 'cygwin'] and
'xp-nacl-chrome' not in pwd and 'win64-nacl-chrome' not in pwd): return
# TODO(ncbray): figure out why this is failing on mac and re-enable.
if (sys.platform == 'darwin' and
'mac-nacl-chrome' not in pwd): return
# TODO(ncbray): figure out why this is failing on some linux trybots.
if (sys.platform in ['linux', 'linux2'] and
'hardy64-nacl-chrome' not in pwd): return
script_dir = os.path.dirname(os.path.abspath(__file__))
test_dir = os.path.dirname(script_dir)
chrome_dir = os.path.dirname(test_dir)
src_dir = os.path.dirname(chrome_dir)
nacl_integration_script = os.path.join(
src_dir, 'native_client/build/buildbot_chrome_nacl_stage.py')
cmd = [sys.executable, nacl_integration_script] + sys.argv[1:]
print cmd
subprocess.check_call(cmd)
if __name__ == '__main__':
Main()
Whitelist nacl_integration tests to run on new nacl integration bot.
BUG= none
TEST= none
Review URL: http://codereview.chromium.org/7050026
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@86021 0039d316-1c4b-4281-b951-d872f2087c98#!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
def Main():
pwd = os.environ.get('PWD', '')
# TODO(ncbray): figure out why this is failing on windows and enable.
if sys.platform in ['win32', 'cygwin'] and 'nacl-chrome' not in pwd: return
# TODO(ncbray): figure out why this is failing on mac and re-enable.
if sys.platform == 'darwin' and 'nacl-chrome' not in pwd: return
# TODO(ncbray): figure out why this is failing on some linux trybots.
if sys.platform in ['linux', 'linux2'] and 'nacl-chrome' not in pwd: return
script_dir = os.path.dirname(os.path.abspath(__file__))
test_dir = os.path.dirname(script_dir)
chrome_dir = os.path.dirname(test_dir)
src_dir = os.path.dirname(chrome_dir)
nacl_integration_script = os.path.join(
src_dir, 'native_client/build/buildbot_chrome_nacl_stage.py')
cmd = [sys.executable, nacl_integration_script] + sys.argv[1:]
print cmd
subprocess.check_call(cmd)
if __name__ == '__main__':
Main()
|
<commit_before>#!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
def Main():
pwd = os.environ.get('PWD', '')
# TODO(ncbray): figure out why this is failing on windows and enable.
if (sys.platform in ['win32', 'cygwin'] and
'xp-nacl-chrome' not in pwd and 'win64-nacl-chrome' not in pwd): return
# TODO(ncbray): figure out why this is failing on mac and re-enable.
if (sys.platform == 'darwin' and
'mac-nacl-chrome' not in pwd): return
# TODO(ncbray): figure out why this is failing on some linux trybots.
if (sys.platform in ['linux', 'linux2'] and
'hardy64-nacl-chrome' not in pwd): return
script_dir = os.path.dirname(os.path.abspath(__file__))
test_dir = os.path.dirname(script_dir)
chrome_dir = os.path.dirname(test_dir)
src_dir = os.path.dirname(chrome_dir)
nacl_integration_script = os.path.join(
src_dir, 'native_client/build/buildbot_chrome_nacl_stage.py')
cmd = [sys.executable, nacl_integration_script] + sys.argv[1:]
print cmd
subprocess.check_call(cmd)
if __name__ == '__main__':
Main()
<commit_msg>Whitelist nacl_integration tests to run on new nacl integration bot.
BUG= none
TEST= none
Review URL: http://codereview.chromium.org/7050026
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@86021 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>#!/usr/bin/python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os
import subprocess
import sys
def Main():
pwd = os.environ.get('PWD', '')
# TODO(ncbray): figure out why this is failing on windows and enable.
if sys.platform in ['win32', 'cygwin'] and 'nacl-chrome' not in pwd: return
# TODO(ncbray): figure out why this is failing on mac and re-enable.
if sys.platform == 'darwin' and 'nacl-chrome' not in pwd: return
# TODO(ncbray): figure out why this is failing on some linux trybots.
if sys.platform in ['linux', 'linux2'] and 'nacl-chrome' not in pwd: return
script_dir = os.path.dirname(os.path.abspath(__file__))
test_dir = os.path.dirname(script_dir)
chrome_dir = os.path.dirname(test_dir)
src_dir = os.path.dirname(chrome_dir)
nacl_integration_script = os.path.join(
src_dir, 'native_client/build/buildbot_chrome_nacl_stage.py')
cmd = [sys.executable, nacl_integration_script] + sys.argv[1:]
print cmd
subprocess.check_call(cmd)
if __name__ == '__main__':
Main()
|
0069f3085167144e2d077f84c8061aff35305cec
|
docker_registry/wsgi.py
|
docker_registry/wsgi.py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import os
from . import app
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT_WWW', 5000))
app.debug = True
app.run(host='0.0.0.0', port=port)
# Or you can run:
# gunicorn --access-logfile - --log-level debug --debug -b 0.0.0.0:5000 \
# -w 1 wsgi:application
else:
# For uwsgi
app.logger.setLevel(logging.INFO)
stderr_logger = logging.StreamHandler()
stderr_logger.setLevel(logging.INFO)
stderr_logger.setFormatter(
logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
app.logger.addHandler(stderr_logger)
application = app
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import os
from .app import app
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT_WWW', 5000))
app.debug = True
app.run(host='0.0.0.0', port=port)
# Or you can run:
# gunicorn --access-logfile - --log-level debug --debug -b 0.0.0.0:5000 \
# -w 1 wsgi:application
else:
# For uwsgi
app.logger.setLevel(logging.INFO)
stderr_logger = logging.StreamHandler()
stderr_logger.setLevel(logging.INFO)
stderr_logger.setFormatter(
logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
app.logger.addHandler(stderr_logger)
application = app
|
Fix import following namespace movement
|
Fix import following namespace movement
Docker-DCO-1.1-Signed-off-by: Mangled Deutz <olivier@webitup.fr> (github: dmp42)
|
Python
|
apache-2.0
|
Carrotzpc/docker-registry,dhiltgen/docker-registry,wakermahmud/docker-registry,mdshuai/docker-registry,ptisserand/docker-registry,dine1987/Docker,depay/docker-registry,liggitt/docker-registry,tangkun75/docker-registry,deis/docker-registry,ptisserand/docker-registry,liggitt/docker-registry,kireal/docker-registry,viljaste/docker-registry-1,stormltf/docker-registry,depay/docker-registry,HubSpot/docker-registry,andrew-plunk/docker-registry,hex108/docker-registry,docker/docker-registry,nunogt/docker-registry,ken-saka/docker-registry,kireal/docker-registry,csrwng/docker-registry,csrwng/docker-registry,mdshuai/docker-registry,dalvikchen/docker-registry,docker/docker-registry,deis/docker-registry,liggitt/docker-registry,nunogt/docker-registry,mboersma/docker-registry,yuriyf/docker-registry,fabianofranz/docker-registry,whuwxl/docker-registry,mboersma/docker-registry,fabianofranz/docker-registry,dalvikchen/docker-registry,ken-saka/docker-registry,whuwxl/docker-registry,atyenoria/docker-registry,deis/docker-registry,tangkun75/docker-registry,wakermahmud/docker-registry,andrew-plunk/docker-registry,whuwxl/docker-registry,nunogt/docker-registry,depay/docker-registry,mdshuai/docker-registry,Haitianisgood/docker-registry,Haitianisgood/docker-registry,dhiltgen/docker-registry,yuriyf/docker-registry,pombredanne/docker-registry,stormltf/docker-registry,csrwng/docker-registry,HubSpot/docker-registry,dine1987/Docker,cnh/docker-registry,hex108/docker-registry,fabianofranz/docker-registry,cnh/docker-registry,Carrotzpc/docker-registry,atyenoria/docker-registry,Haitianisgood/docker-registry,OnePaaS/docker-registry,mboersma/docker-registry,Carrotzpc/docker-registry,dalvikchen/docker-registry,kireal/docker-registry,OnePaaS/docker-registry,ptisserand/docker-registry,wakermahmud/docker-registry,HubSpot/docker-registry,dine1987/Docker,docker/docker-registry,dedalusdev/docker-registry,OnePaaS/docker-registry,viljaste/docker-registry-1,viljaste/docker-registry-1,atyenoria/docker-registry,dedalusdev/docker-registry,ken-saka/docker-registry,hex108/docker-registry,dhiltgen/docker-registry,stormltf/docker-registry,yuriyf/docker-registry,dedalusdev/docker-registry,pombredanne/docker-registry,pombredanne/docker-registry,cnh/docker-registry,andrew-plunk/docker-registry,tangkun75/docker-registry
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import os
from . import app
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT_WWW', 5000))
app.debug = True
app.run(host='0.0.0.0', port=port)
# Or you can run:
# gunicorn --access-logfile - --log-level debug --debug -b 0.0.0.0:5000 \
# -w 1 wsgi:application
else:
# For uwsgi
app.logger.setLevel(logging.INFO)
stderr_logger = logging.StreamHandler()
stderr_logger.setLevel(logging.INFO)
stderr_logger.setFormatter(
logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
app.logger.addHandler(stderr_logger)
application = app
Fix import following namespace movement
Docker-DCO-1.1-Signed-off-by: Mangled Deutz <olivier@webitup.fr> (github: dmp42)
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import os
from .app import app
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT_WWW', 5000))
app.debug = True
app.run(host='0.0.0.0', port=port)
# Or you can run:
# gunicorn --access-logfile - --log-level debug --debug -b 0.0.0.0:5000 \
# -w 1 wsgi:application
else:
# For uwsgi
app.logger.setLevel(logging.INFO)
stderr_logger = logging.StreamHandler()
stderr_logger.setLevel(logging.INFO)
stderr_logger.setFormatter(
logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
app.logger.addHandler(stderr_logger)
application = app
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import os
from . import app
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT_WWW', 5000))
app.debug = True
app.run(host='0.0.0.0', port=port)
# Or you can run:
# gunicorn --access-logfile - --log-level debug --debug -b 0.0.0.0:5000 \
# -w 1 wsgi:application
else:
# For uwsgi
app.logger.setLevel(logging.INFO)
stderr_logger = logging.StreamHandler()
stderr_logger.setLevel(logging.INFO)
stderr_logger.setFormatter(
logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
app.logger.addHandler(stderr_logger)
application = app
<commit_msg>Fix import following namespace movement
Docker-DCO-1.1-Signed-off-by: Mangled Deutz <olivier@webitup.fr> (github: dmp42)<commit_after>
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import os
from .app import app
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT_WWW', 5000))
app.debug = True
app.run(host='0.0.0.0', port=port)
# Or you can run:
# gunicorn --access-logfile - --log-level debug --debug -b 0.0.0.0:5000 \
# -w 1 wsgi:application
else:
# For uwsgi
app.logger.setLevel(logging.INFO)
stderr_logger = logging.StreamHandler()
stderr_logger.setLevel(logging.INFO)
stderr_logger.setFormatter(
logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
app.logger.addHandler(stderr_logger)
application = app
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import os
from . import app
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT_WWW', 5000))
app.debug = True
app.run(host='0.0.0.0', port=port)
# Or you can run:
# gunicorn --access-logfile - --log-level debug --debug -b 0.0.0.0:5000 \
# -w 1 wsgi:application
else:
# For uwsgi
app.logger.setLevel(logging.INFO)
stderr_logger = logging.StreamHandler()
stderr_logger.setLevel(logging.INFO)
stderr_logger.setFormatter(
logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
app.logger.addHandler(stderr_logger)
application = app
Fix import following namespace movement
Docker-DCO-1.1-Signed-off-by: Mangled Deutz <olivier@webitup.fr> (github: dmp42)#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import os
from .app import app
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT_WWW', 5000))
app.debug = True
app.run(host='0.0.0.0', port=port)
# Or you can run:
# gunicorn --access-logfile - --log-level debug --debug -b 0.0.0.0:5000 \
# -w 1 wsgi:application
else:
# For uwsgi
app.logger.setLevel(logging.INFO)
stderr_logger = logging.StreamHandler()
stderr_logger.setLevel(logging.INFO)
stderr_logger.setFormatter(
logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
app.logger.addHandler(stderr_logger)
application = app
|
<commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import os
from . import app
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT_WWW', 5000))
app.debug = True
app.run(host='0.0.0.0', port=port)
# Or you can run:
# gunicorn --access-logfile - --log-level debug --debug -b 0.0.0.0:5000 \
# -w 1 wsgi:application
else:
# For uwsgi
app.logger.setLevel(logging.INFO)
stderr_logger = logging.StreamHandler()
stderr_logger.setLevel(logging.INFO)
stderr_logger.setFormatter(
logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
app.logger.addHandler(stderr_logger)
application = app
<commit_msg>Fix import following namespace movement
Docker-DCO-1.1-Signed-off-by: Mangled Deutz <olivier@webitup.fr> (github: dmp42)<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import logging
import os
from .app import app
if __name__ == '__main__':
# Bind to PORT if defined, otherwise default to 5000.
port = int(os.environ.get('PORT_WWW', 5000))
app.debug = True
app.run(host='0.0.0.0', port=port)
# Or you can run:
# gunicorn --access-logfile - --log-level debug --debug -b 0.0.0.0:5000 \
# -w 1 wsgi:application
else:
# For uwsgi
app.logger.setLevel(logging.INFO)
stderr_logger = logging.StreamHandler()
stderr_logger.setLevel(logging.INFO)
stderr_logger.setFormatter(
logging.Formatter('%(asctime)s %(levelname)s: %(message)s'))
app.logger.addHandler(stderr_logger)
application = app
|
bcc44a366ab7afbdc448e038e7804cd6719590cc
|
NeuralNet/activations.py
|
NeuralNet/activations.py
|
import numpy as np
class Activator:
@staticmethod
def sigmoid(signal, deriv=False):
if deriv:
return np.multiply(signal, 1 - signal)
activation = 1 / (1 + np.exp(-signal))
return activation
@staticmethod
def tanh(signal, deriv=False):
if deriv:
return 1 - np.power(np.tanh(signal), 2)
activation = np.tanh(signal)
return activation
@staticmethod
def elu(signal, deriv=False, alpha=1.0):
activation = (signal >= 0).astype(int) * signal + \
(signal < 0).astype(int) * (alpha * (np.exp(signal) - 1))
if deriv:
activation = (signal >= 0).astype(int) + \
(signal < 0) * (Activator.elu(signal) + alpha)
return activation
@staticmethod
def softmax(signal, deriv=False):
signal = signal - np.max(signal)
# Implement correct derivation for the softmax normalization
if deriv:
return np.exp(signal) * (1 - np.exp(signal))
activation = np.exp(signal) / np.array([np.sum(np.exp(signal), axis=1)]).T
return activation
|
import numpy as np
class Activator:
@staticmethod
def sigmoid(signal, deriv=False):
if deriv:
return np.multiply(signal, 1 - signal)
activation = 1 / (1 + np.exp(-signal))
return activation
@staticmethod
def tanh(signal, deriv=False):
if deriv:
return 1 - np.power(np.tanh(signal), 2)
activation = np.tanh(signal)
return activation
@staticmethod
def elu(signal, deriv=False, alpha=1.0):
activation = (signal >= 0).astype(int) * signal + \
(signal < 0).astype(int) * (alpha * (np.exp(signal) - 1))
if deriv:
derivation = (signal >= 0).astype(int) + \
(signal < 0) * (activation + alpha)
return derivation
return activation
@staticmethod
def softmax(signal, deriv=False):
signal = signal - np.max(signal)
activation = np.exp(signal) / np.array([np.sum(np.exp(signal), axis=1)]).T
if deriv:
jacobian = - activation[..., None] * activation[:, None, :]
iy, ix = np.diag_indices_from(jacobian[0])
jacobian[:, iy, ix] = activation * (1 - activation)
return jacobian.sum(axis=1)
return activation
|
Implement correct derivation of SoftMax
|
Implement correct derivation of SoftMax
|
Python
|
mit
|
ZahidDev/NeuralNet
|
import numpy as np
class Activator:
@staticmethod
def sigmoid(signal, deriv=False):
if deriv:
return np.multiply(signal, 1 - signal)
activation = 1 / (1 + np.exp(-signal))
return activation
@staticmethod
def tanh(signal, deriv=False):
if deriv:
return 1 - np.power(np.tanh(signal), 2)
activation = np.tanh(signal)
return activation
@staticmethod
def elu(signal, deriv=False, alpha=1.0):
activation = (signal >= 0).astype(int) * signal + \
(signal < 0).astype(int) * (alpha * (np.exp(signal) - 1))
if deriv:
activation = (signal >= 0).astype(int) + \
(signal < 0) * (Activator.elu(signal) + alpha)
return activation
@staticmethod
def softmax(signal, deriv=False):
signal = signal - np.max(signal)
# Implement correct derivation for the softmax normalization
if deriv:
return np.exp(signal) * (1 - np.exp(signal))
activation = np.exp(signal) / np.array([np.sum(np.exp(signal), axis=1)]).T
return activation
Implement correct derivation of SoftMax
|
import numpy as np
class Activator:
@staticmethod
def sigmoid(signal, deriv=False):
if deriv:
return np.multiply(signal, 1 - signal)
activation = 1 / (1 + np.exp(-signal))
return activation
@staticmethod
def tanh(signal, deriv=False):
if deriv:
return 1 - np.power(np.tanh(signal), 2)
activation = np.tanh(signal)
return activation
@staticmethod
def elu(signal, deriv=False, alpha=1.0):
activation = (signal >= 0).astype(int) * signal + \
(signal < 0).astype(int) * (alpha * (np.exp(signal) - 1))
if deriv:
derivation = (signal >= 0).astype(int) + \
(signal < 0) * (activation + alpha)
return derivation
return activation
@staticmethod
def softmax(signal, deriv=False):
signal = signal - np.max(signal)
activation = np.exp(signal) / np.array([np.sum(np.exp(signal), axis=1)]).T
if deriv:
jacobian = - activation[..., None] * activation[:, None, :]
iy, ix = np.diag_indices_from(jacobian[0])
jacobian[:, iy, ix] = activation * (1 - activation)
return jacobian.sum(axis=1)
return activation
|
<commit_before>import numpy as np
class Activator:
@staticmethod
def sigmoid(signal, deriv=False):
if deriv:
return np.multiply(signal, 1 - signal)
activation = 1 / (1 + np.exp(-signal))
return activation
@staticmethod
def tanh(signal, deriv=False):
if deriv:
return 1 - np.power(np.tanh(signal), 2)
activation = np.tanh(signal)
return activation
@staticmethod
def elu(signal, deriv=False, alpha=1.0):
activation = (signal >= 0).astype(int) * signal + \
(signal < 0).astype(int) * (alpha * (np.exp(signal) - 1))
if deriv:
activation = (signal >= 0).astype(int) + \
(signal < 0) * (Activator.elu(signal) + alpha)
return activation
@staticmethod
def softmax(signal, deriv=False):
signal = signal - np.max(signal)
# Implement correct derivation for the softmax normalization
if deriv:
return np.exp(signal) * (1 - np.exp(signal))
activation = np.exp(signal) / np.array([np.sum(np.exp(signal), axis=1)]).T
return activation
<commit_msg>Implement correct derivation of SoftMax<commit_after>
|
import numpy as np
class Activator:
@staticmethod
def sigmoid(signal, deriv=False):
if deriv:
return np.multiply(signal, 1 - signal)
activation = 1 / (1 + np.exp(-signal))
return activation
@staticmethod
def tanh(signal, deriv=False):
if deriv:
return 1 - np.power(np.tanh(signal), 2)
activation = np.tanh(signal)
return activation
@staticmethod
def elu(signal, deriv=False, alpha=1.0):
activation = (signal >= 0).astype(int) * signal + \
(signal < 0).astype(int) * (alpha * (np.exp(signal) - 1))
if deriv:
derivation = (signal >= 0).astype(int) + \
(signal < 0) * (activation + alpha)
return derivation
return activation
@staticmethod
def softmax(signal, deriv=False):
signal = signal - np.max(signal)
activation = np.exp(signal) / np.array([np.sum(np.exp(signal), axis=1)]).T
if deriv:
jacobian = - activation[..., None] * activation[:, None, :]
iy, ix = np.diag_indices_from(jacobian[0])
jacobian[:, iy, ix] = activation * (1 - activation)
return jacobian.sum(axis=1)
return activation
|
import numpy as np
class Activator:
@staticmethod
def sigmoid(signal, deriv=False):
if deriv:
return np.multiply(signal, 1 - signal)
activation = 1 / (1 + np.exp(-signal))
return activation
@staticmethod
def tanh(signal, deriv=False):
if deriv:
return 1 - np.power(np.tanh(signal), 2)
activation = np.tanh(signal)
return activation
@staticmethod
def elu(signal, deriv=False, alpha=1.0):
activation = (signal >= 0).astype(int) * signal + \
(signal < 0).astype(int) * (alpha * (np.exp(signal) - 1))
if deriv:
activation = (signal >= 0).astype(int) + \
(signal < 0) * (Activator.elu(signal) + alpha)
return activation
@staticmethod
def softmax(signal, deriv=False):
signal = signal - np.max(signal)
# Implement correct derivation for the softmax normalization
if deriv:
return np.exp(signal) * (1 - np.exp(signal))
activation = np.exp(signal) / np.array([np.sum(np.exp(signal), axis=1)]).T
return activation
Implement correct derivation of SoftMaximport numpy as np
class Activator:
@staticmethod
def sigmoid(signal, deriv=False):
if deriv:
return np.multiply(signal, 1 - signal)
activation = 1 / (1 + np.exp(-signal))
return activation
@staticmethod
def tanh(signal, deriv=False):
if deriv:
return 1 - np.power(np.tanh(signal), 2)
activation = np.tanh(signal)
return activation
@staticmethod
def elu(signal, deriv=False, alpha=1.0):
activation = (signal >= 0).astype(int) * signal + \
(signal < 0).astype(int) * (alpha * (np.exp(signal) - 1))
if deriv:
derivation = (signal >= 0).astype(int) + \
(signal < 0) * (activation + alpha)
return derivation
return activation
@staticmethod
def softmax(signal, deriv=False):
signal = signal - np.max(signal)
activation = np.exp(signal) / np.array([np.sum(np.exp(signal), axis=1)]).T
if deriv:
jacobian = - activation[..., None] * activation[:, None, :]
iy, ix = np.diag_indices_from(jacobian[0])
jacobian[:, iy, ix] = activation * (1 - activation)
return jacobian.sum(axis=1)
return activation
|
<commit_before>import numpy as np
class Activator:
@staticmethod
def sigmoid(signal, deriv=False):
if deriv:
return np.multiply(signal, 1 - signal)
activation = 1 / (1 + np.exp(-signal))
return activation
@staticmethod
def tanh(signal, deriv=False):
if deriv:
return 1 - np.power(np.tanh(signal), 2)
activation = np.tanh(signal)
return activation
@staticmethod
def elu(signal, deriv=False, alpha=1.0):
activation = (signal >= 0).astype(int) * signal + \
(signal < 0).astype(int) * (alpha * (np.exp(signal) - 1))
if deriv:
activation = (signal >= 0).astype(int) + \
(signal < 0) * (Activator.elu(signal) + alpha)
return activation
@staticmethod
def softmax(signal, deriv=False):
signal = signal - np.max(signal)
# Implement correct derivation for the softmax normalization
if deriv:
return np.exp(signal) * (1 - np.exp(signal))
activation = np.exp(signal) / np.array([np.sum(np.exp(signal), axis=1)]).T
return activation
<commit_msg>Implement correct derivation of SoftMax<commit_after>import numpy as np
class Activator:
@staticmethod
def sigmoid(signal, deriv=False):
if deriv:
return np.multiply(signal, 1 - signal)
activation = 1 / (1 + np.exp(-signal))
return activation
@staticmethod
def tanh(signal, deriv=False):
if deriv:
return 1 - np.power(np.tanh(signal), 2)
activation = np.tanh(signal)
return activation
@staticmethod
def elu(signal, deriv=False, alpha=1.0):
activation = (signal >= 0).astype(int) * signal + \
(signal < 0).astype(int) * (alpha * (np.exp(signal) - 1))
if deriv:
derivation = (signal >= 0).astype(int) + \
(signal < 0) * (activation + alpha)
return derivation
return activation
@staticmethod
def softmax(signal, deriv=False):
signal = signal - np.max(signal)
activation = np.exp(signal) / np.array([np.sum(np.exp(signal), axis=1)]).T
if deriv:
jacobian = - activation[..., None] * activation[:, None, :]
iy, ix = np.diag_indices_from(jacobian[0])
jacobian[:, iy, ix] = activation * (1 - activation)
return jacobian.sum(axis=1)
return activation
|
572d924de7025eb6a41734e7f8df039210c930c1
|
eventlet/__init__.py
|
eventlet/__init__.py
|
__version__ = '1.0.2'
try:
from eventlet import greenthread
from eventlet import greenpool
from eventlet import queue
from eventlet import timeout
from eventlet import patcher
from eventlet import convenience
import greenlet
sleep = greenthread.sleep
spawn = greenthread.spawn
spawn_n = greenthread.spawn_n
spawn_after = greenthread.spawn_after
kill = greenthread.kill
Timeout = timeout.Timeout
with_timeout = timeout.with_timeout
GreenPool = greenpool.GreenPool
GreenPile = greenpool.GreenPile
Queue = queue.Queue
import_patched = patcher.import_patched
monkey_patch = patcher.monkey_patch
connect = convenience.connect
listen = convenience.listen
serve = convenience.serve
StopServe = convenience.StopServe
wrap_ssl = convenience.wrap_ssl
getcurrent = greenlet.greenlet.getcurrent
# deprecated
TimeoutError = timeout.Timeout
exc_after = greenthread.exc_after
call_after_global = greenthread.call_after_global
except ImportError, e:
# This is to make Debian packaging easier, it ignores import
# errors of greenlet so that the packager can still at least
# access the version. Also this makes easy_install a little quieter
if 'greenlet' not in str(e):
# any other exception should be printed
import traceback
traceback.print_exc()
|
version_info = (1, 0, 3)
__version__ = ".".join(map(str, version_info))
try:
from eventlet import greenthread
from eventlet import greenpool
from eventlet import queue
from eventlet import timeout
from eventlet import patcher
from eventlet import convenience
import greenlet
sleep = greenthread.sleep
spawn = greenthread.spawn
spawn_n = greenthread.spawn_n
spawn_after = greenthread.spawn_after
kill = greenthread.kill
Timeout = timeout.Timeout
with_timeout = timeout.with_timeout
GreenPool = greenpool.GreenPool
GreenPile = greenpool.GreenPile
Queue = queue.Queue
import_patched = patcher.import_patched
monkey_patch = patcher.monkey_patch
connect = convenience.connect
listen = convenience.listen
serve = convenience.serve
StopServe = convenience.StopServe
wrap_ssl = convenience.wrap_ssl
getcurrent = greenlet.greenlet.getcurrent
# deprecated
TimeoutError = timeout.Timeout
exc_after = greenthread.exc_after
call_after_global = greenthread.call_after_global
except ImportError, e:
# This is to make Debian packaging easier, it ignores import
# errors of greenlet so that the packager can still at least
# access the version. Also this makes easy_install a little quieter
if 'greenlet' not in str(e):
# any other exception should be printed
import traceback
traceback.print_exc()
|
Fix version info, and bump to 1.0.3
|
Fix version info, and bump to 1.0.3
|
Python
|
mit
|
Cue/eventlet,Cue/eventlet
|
__version__ = '1.0.2'
try:
from eventlet import greenthread
from eventlet import greenpool
from eventlet import queue
from eventlet import timeout
from eventlet import patcher
from eventlet import convenience
import greenlet
sleep = greenthread.sleep
spawn = greenthread.spawn
spawn_n = greenthread.spawn_n
spawn_after = greenthread.spawn_after
kill = greenthread.kill
Timeout = timeout.Timeout
with_timeout = timeout.with_timeout
GreenPool = greenpool.GreenPool
GreenPile = greenpool.GreenPile
Queue = queue.Queue
import_patched = patcher.import_patched
monkey_patch = patcher.monkey_patch
connect = convenience.connect
listen = convenience.listen
serve = convenience.serve
StopServe = convenience.StopServe
wrap_ssl = convenience.wrap_ssl
getcurrent = greenlet.greenlet.getcurrent
# deprecated
TimeoutError = timeout.Timeout
exc_after = greenthread.exc_after
call_after_global = greenthread.call_after_global
except ImportError, e:
# This is to make Debian packaging easier, it ignores import
# errors of greenlet so that the packager can still at least
# access the version. Also this makes easy_install a little quieter
if 'greenlet' not in str(e):
# any other exception should be printed
import traceback
traceback.print_exc()
Fix version info, and bump to 1.0.3
|
version_info = (1, 0, 3)
__version__ = ".".join(map(str, version_info))
try:
from eventlet import greenthread
from eventlet import greenpool
from eventlet import queue
from eventlet import timeout
from eventlet import patcher
from eventlet import convenience
import greenlet
sleep = greenthread.sleep
spawn = greenthread.spawn
spawn_n = greenthread.spawn_n
spawn_after = greenthread.spawn_after
kill = greenthread.kill
Timeout = timeout.Timeout
with_timeout = timeout.with_timeout
GreenPool = greenpool.GreenPool
GreenPile = greenpool.GreenPile
Queue = queue.Queue
import_patched = patcher.import_patched
monkey_patch = patcher.monkey_patch
connect = convenience.connect
listen = convenience.listen
serve = convenience.serve
StopServe = convenience.StopServe
wrap_ssl = convenience.wrap_ssl
getcurrent = greenlet.greenlet.getcurrent
# deprecated
TimeoutError = timeout.Timeout
exc_after = greenthread.exc_after
call_after_global = greenthread.call_after_global
except ImportError, e:
# This is to make Debian packaging easier, it ignores import
# errors of greenlet so that the packager can still at least
# access the version. Also this makes easy_install a little quieter
if 'greenlet' not in str(e):
# any other exception should be printed
import traceback
traceback.print_exc()
|
<commit_before>__version__ = '1.0.2'
try:
from eventlet import greenthread
from eventlet import greenpool
from eventlet import queue
from eventlet import timeout
from eventlet import patcher
from eventlet import convenience
import greenlet
sleep = greenthread.sleep
spawn = greenthread.spawn
spawn_n = greenthread.spawn_n
spawn_after = greenthread.spawn_after
kill = greenthread.kill
Timeout = timeout.Timeout
with_timeout = timeout.with_timeout
GreenPool = greenpool.GreenPool
GreenPile = greenpool.GreenPile
Queue = queue.Queue
import_patched = patcher.import_patched
monkey_patch = patcher.monkey_patch
connect = convenience.connect
listen = convenience.listen
serve = convenience.serve
StopServe = convenience.StopServe
wrap_ssl = convenience.wrap_ssl
getcurrent = greenlet.greenlet.getcurrent
# deprecated
TimeoutError = timeout.Timeout
exc_after = greenthread.exc_after
call_after_global = greenthread.call_after_global
except ImportError, e:
# This is to make Debian packaging easier, it ignores import
# errors of greenlet so that the packager can still at least
# access the version. Also this makes easy_install a little quieter
if 'greenlet' not in str(e):
# any other exception should be printed
import traceback
traceback.print_exc()
<commit_msg>Fix version info, and bump to 1.0.3<commit_after>
|
version_info = (1, 0, 3)
__version__ = ".".join(map(str, version_info))
try:
from eventlet import greenthread
from eventlet import greenpool
from eventlet import queue
from eventlet import timeout
from eventlet import patcher
from eventlet import convenience
import greenlet
sleep = greenthread.sleep
spawn = greenthread.spawn
spawn_n = greenthread.spawn_n
spawn_after = greenthread.spawn_after
kill = greenthread.kill
Timeout = timeout.Timeout
with_timeout = timeout.with_timeout
GreenPool = greenpool.GreenPool
GreenPile = greenpool.GreenPile
Queue = queue.Queue
import_patched = patcher.import_patched
monkey_patch = patcher.monkey_patch
connect = convenience.connect
listen = convenience.listen
serve = convenience.serve
StopServe = convenience.StopServe
wrap_ssl = convenience.wrap_ssl
getcurrent = greenlet.greenlet.getcurrent
# deprecated
TimeoutError = timeout.Timeout
exc_after = greenthread.exc_after
call_after_global = greenthread.call_after_global
except ImportError, e:
# This is to make Debian packaging easier, it ignores import
# errors of greenlet so that the packager can still at least
# access the version. Also this makes easy_install a little quieter
if 'greenlet' not in str(e):
# any other exception should be printed
import traceback
traceback.print_exc()
|
__version__ = '1.0.2'
try:
from eventlet import greenthread
from eventlet import greenpool
from eventlet import queue
from eventlet import timeout
from eventlet import patcher
from eventlet import convenience
import greenlet
sleep = greenthread.sleep
spawn = greenthread.spawn
spawn_n = greenthread.spawn_n
spawn_after = greenthread.spawn_after
kill = greenthread.kill
Timeout = timeout.Timeout
with_timeout = timeout.with_timeout
GreenPool = greenpool.GreenPool
GreenPile = greenpool.GreenPile
Queue = queue.Queue
import_patched = patcher.import_patched
monkey_patch = patcher.monkey_patch
connect = convenience.connect
listen = convenience.listen
serve = convenience.serve
StopServe = convenience.StopServe
wrap_ssl = convenience.wrap_ssl
getcurrent = greenlet.greenlet.getcurrent
# deprecated
TimeoutError = timeout.Timeout
exc_after = greenthread.exc_after
call_after_global = greenthread.call_after_global
except ImportError, e:
# This is to make Debian packaging easier, it ignores import
# errors of greenlet so that the packager can still at least
# access the version. Also this makes easy_install a little quieter
if 'greenlet' not in str(e):
# any other exception should be printed
import traceback
traceback.print_exc()
Fix version info, and bump to 1.0.3version_info = (1, 0, 3)
__version__ = ".".join(map(str, version_info))
try:
from eventlet import greenthread
from eventlet import greenpool
from eventlet import queue
from eventlet import timeout
from eventlet import patcher
from eventlet import convenience
import greenlet
sleep = greenthread.sleep
spawn = greenthread.spawn
spawn_n = greenthread.spawn_n
spawn_after = greenthread.spawn_after
kill = greenthread.kill
Timeout = timeout.Timeout
with_timeout = timeout.with_timeout
GreenPool = greenpool.GreenPool
GreenPile = greenpool.GreenPile
Queue = queue.Queue
import_patched = patcher.import_patched
monkey_patch = patcher.monkey_patch
connect = convenience.connect
listen = convenience.listen
serve = convenience.serve
StopServe = convenience.StopServe
wrap_ssl = convenience.wrap_ssl
getcurrent = greenlet.greenlet.getcurrent
# deprecated
TimeoutError = timeout.Timeout
exc_after = greenthread.exc_after
call_after_global = greenthread.call_after_global
except ImportError, e:
# This is to make Debian packaging easier, it ignores import
# errors of greenlet so that the packager can still at least
# access the version. Also this makes easy_install a little quieter
if 'greenlet' not in str(e):
# any other exception should be printed
import traceback
traceback.print_exc()
|
<commit_before>__version__ = '1.0.2'
try:
from eventlet import greenthread
from eventlet import greenpool
from eventlet import queue
from eventlet import timeout
from eventlet import patcher
from eventlet import convenience
import greenlet
sleep = greenthread.sleep
spawn = greenthread.spawn
spawn_n = greenthread.spawn_n
spawn_after = greenthread.spawn_after
kill = greenthread.kill
Timeout = timeout.Timeout
with_timeout = timeout.with_timeout
GreenPool = greenpool.GreenPool
GreenPile = greenpool.GreenPile
Queue = queue.Queue
import_patched = patcher.import_patched
monkey_patch = patcher.monkey_patch
connect = convenience.connect
listen = convenience.listen
serve = convenience.serve
StopServe = convenience.StopServe
wrap_ssl = convenience.wrap_ssl
getcurrent = greenlet.greenlet.getcurrent
# deprecated
TimeoutError = timeout.Timeout
exc_after = greenthread.exc_after
call_after_global = greenthread.call_after_global
except ImportError, e:
# This is to make Debian packaging easier, it ignores import
# errors of greenlet so that the packager can still at least
# access the version. Also this makes easy_install a little quieter
if 'greenlet' not in str(e):
# any other exception should be printed
import traceback
traceback.print_exc()
<commit_msg>Fix version info, and bump to 1.0.3<commit_after>version_info = (1, 0, 3)
__version__ = ".".join(map(str, version_info))
try:
from eventlet import greenthread
from eventlet import greenpool
from eventlet import queue
from eventlet import timeout
from eventlet import patcher
from eventlet import convenience
import greenlet
sleep = greenthread.sleep
spawn = greenthread.spawn
spawn_n = greenthread.spawn_n
spawn_after = greenthread.spawn_after
kill = greenthread.kill
Timeout = timeout.Timeout
with_timeout = timeout.with_timeout
GreenPool = greenpool.GreenPool
GreenPile = greenpool.GreenPile
Queue = queue.Queue
import_patched = patcher.import_patched
monkey_patch = patcher.monkey_patch
connect = convenience.connect
listen = convenience.listen
serve = convenience.serve
StopServe = convenience.StopServe
wrap_ssl = convenience.wrap_ssl
getcurrent = greenlet.greenlet.getcurrent
# deprecated
TimeoutError = timeout.Timeout
exc_after = greenthread.exc_after
call_after_global = greenthread.call_after_global
except ImportError, e:
# This is to make Debian packaging easier, it ignores import
# errors of greenlet so that the packager can still at least
# access the version. Also this makes easy_install a little quieter
if 'greenlet' not in str(e):
# any other exception should be printed
import traceback
traceback.print_exc()
|
146832fe1eba0bc22125ade183f34621de5625fa
|
apps/bluebottle_utils/fields.py
|
apps/bluebottle_utils/fields.py
|
from decimal import Decimal
from django.db import models
class MoneyField(models.DecimalField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_digits', 9)
kwargs.setdefault('decimal_places', 2)
super(MoneyField, self).__init__(*args, default=Decimal('0.00'), **kwargs)
|
from decimal import Decimal
from django.db import models
class MoneyField(models.DecimalField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_digits', 9)
kwargs.setdefault('decimal_places', 2)
kwargs.setdefault('default', Decimal('0.00'))
super(MoneyField, self).__init__(*args, **kwargs)
# If south is installed, ensure that MoneyField will be introspected just like a normal DecimalField
try:
from south.modelsinspector import add_introspection_rules
except ImportError:
pass
else:
add_introspection_rules([], ["^apps\.bluebottle_utils\.fields\.MoneyField",])
|
Add south introspection rule for MoneyField.
|
Add south introspection rule for MoneyField.
|
Python
|
bsd-3-clause
|
onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site
|
from decimal import Decimal
from django.db import models
class MoneyField(models.DecimalField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_digits', 9)
kwargs.setdefault('decimal_places', 2)
super(MoneyField, self).__init__(*args, default=Decimal('0.00'), **kwargs)
Add south introspection rule for MoneyField.
|
from decimal import Decimal
from django.db import models
class MoneyField(models.DecimalField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_digits', 9)
kwargs.setdefault('decimal_places', 2)
kwargs.setdefault('default', Decimal('0.00'))
super(MoneyField, self).__init__(*args, **kwargs)
# If south is installed, ensure that MoneyField will be introspected just like a normal DecimalField
try:
from south.modelsinspector import add_introspection_rules
except ImportError:
pass
else:
add_introspection_rules([], ["^apps\.bluebottle_utils\.fields\.MoneyField",])
|
<commit_before>from decimal import Decimal
from django.db import models
class MoneyField(models.DecimalField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_digits', 9)
kwargs.setdefault('decimal_places', 2)
super(MoneyField, self).__init__(*args, default=Decimal('0.00'), **kwargs)
<commit_msg>Add south introspection rule for MoneyField.<commit_after>
|
from decimal import Decimal
from django.db import models
class MoneyField(models.DecimalField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_digits', 9)
kwargs.setdefault('decimal_places', 2)
kwargs.setdefault('default', Decimal('0.00'))
super(MoneyField, self).__init__(*args, **kwargs)
# If south is installed, ensure that MoneyField will be introspected just like a normal DecimalField
try:
from south.modelsinspector import add_introspection_rules
except ImportError:
pass
else:
add_introspection_rules([], ["^apps\.bluebottle_utils\.fields\.MoneyField",])
|
from decimal import Decimal
from django.db import models
class MoneyField(models.DecimalField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_digits', 9)
kwargs.setdefault('decimal_places', 2)
super(MoneyField, self).__init__(*args, default=Decimal('0.00'), **kwargs)
Add south introspection rule for MoneyField.from decimal import Decimal
from django.db import models
class MoneyField(models.DecimalField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_digits', 9)
kwargs.setdefault('decimal_places', 2)
kwargs.setdefault('default', Decimal('0.00'))
super(MoneyField, self).__init__(*args, **kwargs)
# If south is installed, ensure that MoneyField will be introspected just like a normal DecimalField
try:
from south.modelsinspector import add_introspection_rules
except ImportError:
pass
else:
add_introspection_rules([], ["^apps\.bluebottle_utils\.fields\.MoneyField",])
|
<commit_before>from decimal import Decimal
from django.db import models
class MoneyField(models.DecimalField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_digits', 9)
kwargs.setdefault('decimal_places', 2)
super(MoneyField, self).__init__(*args, default=Decimal('0.00'), **kwargs)
<commit_msg>Add south introspection rule for MoneyField.<commit_after>from decimal import Decimal
from django.db import models
class MoneyField(models.DecimalField):
def __init__(self, *args, **kwargs):
kwargs.setdefault('max_digits', 9)
kwargs.setdefault('decimal_places', 2)
kwargs.setdefault('default', Decimal('0.00'))
super(MoneyField, self).__init__(*args, **kwargs)
# If south is installed, ensure that MoneyField will be introspected just like a normal DecimalField
try:
from south.modelsinspector import add_introspection_rules
except ImportError:
pass
else:
add_introspection_rules([], ["^apps\.bluebottle_utils\.fields\.MoneyField",])
|
1ef71bd1b1eabcbe3d2148d8eb5e3f5a890450d7
|
idiokit/dns/_hostlookup.py
|
idiokit/dns/_hostlookup.py
|
from .. import idiokit
from ._iputils import parse_ip
from ._conf import hosts
from ._dns import DNSError, a, aaaa
def _filter_ips(potential_ips):
results = []
for ip in potential_ips:
try:
family, ip = parse_ip(ip)
except ValueError:
continue
else:
results.append((family, ip))
return results
class HostLookup(object):
_hosts = hosts()
@idiokit.stream
def host_lookup(self, host, resolver=None):
results = _filter_ips([host])
if not results:
results = _filter_ips(self._hosts.load().name_to_ips(host))
if not results:
results = []
error = None
try:
records = yield a(host, resolver)
except DNSError as error:
results = []
else:
results = _filter_ips(records)
try:
records = yield aaaa(host, resolver)
except DNSError:
if error is not None:
raise error
else:
results.extend(_filter_ips(records))
idiokit.stop(results)
host_lookup = HostLookup().host_lookup
|
from .. import idiokit
from ._iputils import parse_ip
from ._conf import hosts
from ._dns import DNSError, a, aaaa
def _filter_ips(potential_ips):
results = []
for ip in potential_ips:
try:
family, ip = parse_ip(ip)
except ValueError:
continue
else:
results.append((family, ip))
return results
class HostLookup(object):
_hosts = None
def __init__(self, hosts_file=None):
if hosts_file:
self._hosts = hosts(path=hosts_file)
else:
self._hosts = hosts()
@idiokit.stream
def host_lookup(self, host, resolver=None):
results = _filter_ips([host])
if not results:
results = _filter_ips(self._hosts.load().name_to_ips(host))
if not results:
results = []
error = None
try:
records = yield a(host, resolver)
except DNSError as error:
results = []
else:
results = _filter_ips(records)
try:
records = yield aaaa(host, resolver)
except DNSError:
if error is not None:
raise error
else:
results.extend(_filter_ips(records))
idiokit.stop(results)
host_lookup = HostLookup().host_lookup
|
Add configurable path to hosts file for HostLookup()
|
Add configurable path to hosts file for HostLookup()
Required for unit testing so that tests don't have to rely on
/etc/hosts file.
|
Python
|
mit
|
abusesa/idiokit
|
from .. import idiokit
from ._iputils import parse_ip
from ._conf import hosts
from ._dns import DNSError, a, aaaa
def _filter_ips(potential_ips):
results = []
for ip in potential_ips:
try:
family, ip = parse_ip(ip)
except ValueError:
continue
else:
results.append((family, ip))
return results
class HostLookup(object):
_hosts = hosts()
@idiokit.stream
def host_lookup(self, host, resolver=None):
results = _filter_ips([host])
if not results:
results = _filter_ips(self._hosts.load().name_to_ips(host))
if not results:
results = []
error = None
try:
records = yield a(host, resolver)
except DNSError as error:
results = []
else:
results = _filter_ips(records)
try:
records = yield aaaa(host, resolver)
except DNSError:
if error is not None:
raise error
else:
results.extend(_filter_ips(records))
idiokit.stop(results)
host_lookup = HostLookup().host_lookup
Add configurable path to hosts file for HostLookup()
Required for unit testing so that tests don't have to rely on
/etc/hosts file.
|
from .. import idiokit
from ._iputils import parse_ip
from ._conf import hosts
from ._dns import DNSError, a, aaaa
def _filter_ips(potential_ips):
results = []
for ip in potential_ips:
try:
family, ip = parse_ip(ip)
except ValueError:
continue
else:
results.append((family, ip))
return results
class HostLookup(object):
_hosts = None
def __init__(self, hosts_file=None):
if hosts_file:
self._hosts = hosts(path=hosts_file)
else:
self._hosts = hosts()
@idiokit.stream
def host_lookup(self, host, resolver=None):
results = _filter_ips([host])
if not results:
results = _filter_ips(self._hosts.load().name_to_ips(host))
if not results:
results = []
error = None
try:
records = yield a(host, resolver)
except DNSError as error:
results = []
else:
results = _filter_ips(records)
try:
records = yield aaaa(host, resolver)
except DNSError:
if error is not None:
raise error
else:
results.extend(_filter_ips(records))
idiokit.stop(results)
host_lookup = HostLookup().host_lookup
|
<commit_before>from .. import idiokit
from ._iputils import parse_ip
from ._conf import hosts
from ._dns import DNSError, a, aaaa
def _filter_ips(potential_ips):
results = []
for ip in potential_ips:
try:
family, ip = parse_ip(ip)
except ValueError:
continue
else:
results.append((family, ip))
return results
class HostLookup(object):
_hosts = hosts()
@idiokit.stream
def host_lookup(self, host, resolver=None):
results = _filter_ips([host])
if not results:
results = _filter_ips(self._hosts.load().name_to_ips(host))
if not results:
results = []
error = None
try:
records = yield a(host, resolver)
except DNSError as error:
results = []
else:
results = _filter_ips(records)
try:
records = yield aaaa(host, resolver)
except DNSError:
if error is not None:
raise error
else:
results.extend(_filter_ips(records))
idiokit.stop(results)
host_lookup = HostLookup().host_lookup
<commit_msg>Add configurable path to hosts file for HostLookup()
Required for unit testing so that tests don't have to rely on
/etc/hosts file.<commit_after>
|
from .. import idiokit
from ._iputils import parse_ip
from ._conf import hosts
from ._dns import DNSError, a, aaaa
def _filter_ips(potential_ips):
results = []
for ip in potential_ips:
try:
family, ip = parse_ip(ip)
except ValueError:
continue
else:
results.append((family, ip))
return results
class HostLookup(object):
_hosts = None
def __init__(self, hosts_file=None):
if hosts_file:
self._hosts = hosts(path=hosts_file)
else:
self._hosts = hosts()
@idiokit.stream
def host_lookup(self, host, resolver=None):
results = _filter_ips([host])
if not results:
results = _filter_ips(self._hosts.load().name_to_ips(host))
if not results:
results = []
error = None
try:
records = yield a(host, resolver)
except DNSError as error:
results = []
else:
results = _filter_ips(records)
try:
records = yield aaaa(host, resolver)
except DNSError:
if error is not None:
raise error
else:
results.extend(_filter_ips(records))
idiokit.stop(results)
host_lookup = HostLookup().host_lookup
|
from .. import idiokit
from ._iputils import parse_ip
from ._conf import hosts
from ._dns import DNSError, a, aaaa
def _filter_ips(potential_ips):
results = []
for ip in potential_ips:
try:
family, ip = parse_ip(ip)
except ValueError:
continue
else:
results.append((family, ip))
return results
class HostLookup(object):
_hosts = hosts()
@idiokit.stream
def host_lookup(self, host, resolver=None):
results = _filter_ips([host])
if not results:
results = _filter_ips(self._hosts.load().name_to_ips(host))
if not results:
results = []
error = None
try:
records = yield a(host, resolver)
except DNSError as error:
results = []
else:
results = _filter_ips(records)
try:
records = yield aaaa(host, resolver)
except DNSError:
if error is not None:
raise error
else:
results.extend(_filter_ips(records))
idiokit.stop(results)
host_lookup = HostLookup().host_lookup
Add configurable path to hosts file for HostLookup()
Required for unit testing so that tests don't have to rely on
/etc/hosts file.from .. import idiokit
from ._iputils import parse_ip
from ._conf import hosts
from ._dns import DNSError, a, aaaa
def _filter_ips(potential_ips):
results = []
for ip in potential_ips:
try:
family, ip = parse_ip(ip)
except ValueError:
continue
else:
results.append((family, ip))
return results
class HostLookup(object):
_hosts = None
def __init__(self, hosts_file=None):
if hosts_file:
self._hosts = hosts(path=hosts_file)
else:
self._hosts = hosts()
@idiokit.stream
def host_lookup(self, host, resolver=None):
results = _filter_ips([host])
if not results:
results = _filter_ips(self._hosts.load().name_to_ips(host))
if not results:
results = []
error = None
try:
records = yield a(host, resolver)
except DNSError as error:
results = []
else:
results = _filter_ips(records)
try:
records = yield aaaa(host, resolver)
except DNSError:
if error is not None:
raise error
else:
results.extend(_filter_ips(records))
idiokit.stop(results)
host_lookup = HostLookup().host_lookup
|
<commit_before>from .. import idiokit
from ._iputils import parse_ip
from ._conf import hosts
from ._dns import DNSError, a, aaaa
def _filter_ips(potential_ips):
results = []
for ip in potential_ips:
try:
family, ip = parse_ip(ip)
except ValueError:
continue
else:
results.append((family, ip))
return results
class HostLookup(object):
_hosts = hosts()
@idiokit.stream
def host_lookup(self, host, resolver=None):
results = _filter_ips([host])
if not results:
results = _filter_ips(self._hosts.load().name_to_ips(host))
if not results:
results = []
error = None
try:
records = yield a(host, resolver)
except DNSError as error:
results = []
else:
results = _filter_ips(records)
try:
records = yield aaaa(host, resolver)
except DNSError:
if error is not None:
raise error
else:
results.extend(_filter_ips(records))
idiokit.stop(results)
host_lookup = HostLookup().host_lookup
<commit_msg>Add configurable path to hosts file for HostLookup()
Required for unit testing so that tests don't have to rely on
/etc/hosts file.<commit_after>from .. import idiokit
from ._iputils import parse_ip
from ._conf import hosts
from ._dns import DNSError, a, aaaa
def _filter_ips(potential_ips):
results = []
for ip in potential_ips:
try:
family, ip = parse_ip(ip)
except ValueError:
continue
else:
results.append((family, ip))
return results
class HostLookup(object):
_hosts = None
def __init__(self, hosts_file=None):
if hosts_file:
self._hosts = hosts(path=hosts_file)
else:
self._hosts = hosts()
@idiokit.stream
def host_lookup(self, host, resolver=None):
results = _filter_ips([host])
if not results:
results = _filter_ips(self._hosts.load().name_to_ips(host))
if not results:
results = []
error = None
try:
records = yield a(host, resolver)
except DNSError as error:
results = []
else:
results = _filter_ips(records)
try:
records = yield aaaa(host, resolver)
except DNSError:
if error is not None:
raise error
else:
results.extend(_filter_ips(records))
idiokit.stop(results)
host_lookup = HostLookup().host_lookup
|
476b66510cd7b84233ad02ccfcde3ecd33604c57
|
simple_es/event/domain_event.py
|
simple_es/event/domain_event.py
|
from simple_es.identifier.identifies import Identifies
class DomainEvent():
"""
Base class for all domain driven events
"""
identifier = None
def __init__(self, identifier=None):
if not isinstance(identifier, Identifies):
raise TypeError('Event identifier must be an instance of the Identifies class', identifier)
self.identifier = identifier
|
from simple_es.identifier.identifies import Identifies
class DomainEvent():
"""
Base class for all domain driven events
"""
identifier = None
_recorded = False
def __init__(self, identifier=None):
if not isinstance(identifier, Identifies):
raise TypeError('Event identifier must be an instance of the Identifies class', identifier)
self.identifier = identifier
|
Add a _recorded bool to events
|
Add a _recorded bool to events
|
Python
|
apache-2.0
|
OnShift/simple-es
|
from simple_es.identifier.identifies import Identifies
class DomainEvent():
"""
Base class for all domain driven events
"""
identifier = None
def __init__(self, identifier=None):
if not isinstance(identifier, Identifies):
raise TypeError('Event identifier must be an instance of the Identifies class', identifier)
self.identifier = identifier
Add a _recorded bool to events
|
from simple_es.identifier.identifies import Identifies
class DomainEvent():
"""
Base class for all domain driven events
"""
identifier = None
_recorded = False
def __init__(self, identifier=None):
if not isinstance(identifier, Identifies):
raise TypeError('Event identifier must be an instance of the Identifies class', identifier)
self.identifier = identifier
|
<commit_before>from simple_es.identifier.identifies import Identifies
class DomainEvent():
"""
Base class for all domain driven events
"""
identifier = None
def __init__(self, identifier=None):
if not isinstance(identifier, Identifies):
raise TypeError('Event identifier must be an instance of the Identifies class', identifier)
self.identifier = identifier
<commit_msg>Add a _recorded bool to events<commit_after>
|
from simple_es.identifier.identifies import Identifies
class DomainEvent():
"""
Base class for all domain driven events
"""
identifier = None
_recorded = False
def __init__(self, identifier=None):
if not isinstance(identifier, Identifies):
raise TypeError('Event identifier must be an instance of the Identifies class', identifier)
self.identifier = identifier
|
from simple_es.identifier.identifies import Identifies
class DomainEvent():
"""
Base class for all domain driven events
"""
identifier = None
def __init__(self, identifier=None):
if not isinstance(identifier, Identifies):
raise TypeError('Event identifier must be an instance of the Identifies class', identifier)
self.identifier = identifier
Add a _recorded bool to eventsfrom simple_es.identifier.identifies import Identifies
class DomainEvent():
"""
Base class for all domain driven events
"""
identifier = None
_recorded = False
def __init__(self, identifier=None):
if not isinstance(identifier, Identifies):
raise TypeError('Event identifier must be an instance of the Identifies class', identifier)
self.identifier = identifier
|
<commit_before>from simple_es.identifier.identifies import Identifies
class DomainEvent():
"""
Base class for all domain driven events
"""
identifier = None
def __init__(self, identifier=None):
if not isinstance(identifier, Identifies):
raise TypeError('Event identifier must be an instance of the Identifies class', identifier)
self.identifier = identifier
<commit_msg>Add a _recorded bool to events<commit_after>from simple_es.identifier.identifies import Identifies
class DomainEvent():
"""
Base class for all domain driven events
"""
identifier = None
_recorded = False
def __init__(self, identifier=None):
if not isinstance(identifier, Identifies):
raise TypeError('Event identifier must be an instance of the Identifies class', identifier)
self.identifier = identifier
|
70049aa7b2f8dcede7d562def03b262f4c39816a
|
indra/sources/lincs/api.py
|
indra/sources/lincs/api.py
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
__all__ = []
import requests
DATASET_URL = 'http://lincs.hms.harvard.edu/db/datasets/20000/results'
def _get_lincs_drug_target_data():
resp = requests.get(DATASET_URL, params={'output_type': '.csv'})
assert resp.status_code == 200, resp.text
csv_str = resp.content.decode('utf-8')
return [tuple(line.split(',')) for line in csv_str.splitlines()]
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
__all__ = []
import csv
import requests
from indra.sources.lincs.processor import LincsProcessor
DATASET_URL = 'http://lincs.hms.harvard.edu/db/datasets/20000/results'
def process_from_web():
lincs_data = _get_lincs_drug_target_data()
return LincsProcessor(lincs_data)
def _get_lincs_drug_target_data():
resp = requests.get(DATASET_URL, params={'output_type': '.csv'})
assert resp.status_code == 200, resp.text
csv_str = resp.content.decode('utf-8')
csv_lines = csv_str.splitlines()
headers = csv_lines[0].split(',')
return [{headers[i]: val for i, val in enumerate(line_elements)}
for line_elements in csv.reader(csv_lines[1:])]
|
Return a list of dicts for data.
|
Return a list of dicts for data.
|
Python
|
bsd-2-clause
|
pvtodorov/indra,johnbachman/indra,sorgerlab/belpy,sorgerlab/belpy,sorgerlab/indra,bgyori/indra,sorgerlab/indra,johnbachman/belpy,johnbachman/belpy,pvtodorov/indra,sorgerlab/belpy,johnbachman/belpy,pvtodorov/indra,bgyori/indra,johnbachman/indra,bgyori/indra,sorgerlab/indra,pvtodorov/indra,johnbachman/indra
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
__all__ = []
import requests
DATASET_URL = 'http://lincs.hms.harvard.edu/db/datasets/20000/results'
def _get_lincs_drug_target_data():
resp = requests.get(DATASET_URL, params={'output_type': '.csv'})
assert resp.status_code == 200, resp.text
csv_str = resp.content.decode('utf-8')
return [tuple(line.split(',')) for line in csv_str.splitlines()]
Return a list of dicts for data.
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
__all__ = []
import csv
import requests
from indra.sources.lincs.processor import LincsProcessor
DATASET_URL = 'http://lincs.hms.harvard.edu/db/datasets/20000/results'
def process_from_web():
lincs_data = _get_lincs_drug_target_data()
return LincsProcessor(lincs_data)
def _get_lincs_drug_target_data():
resp = requests.get(DATASET_URL, params={'output_type': '.csv'})
assert resp.status_code == 200, resp.text
csv_str = resp.content.decode('utf-8')
csv_lines = csv_str.splitlines()
headers = csv_lines[0].split(',')
return [{headers[i]: val for i, val in enumerate(line_elements)}
for line_elements in csv.reader(csv_lines[1:])]
|
<commit_before>from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
__all__ = []
import requests
DATASET_URL = 'http://lincs.hms.harvard.edu/db/datasets/20000/results'
def _get_lincs_drug_target_data():
resp = requests.get(DATASET_URL, params={'output_type': '.csv'})
assert resp.status_code == 200, resp.text
csv_str = resp.content.decode('utf-8')
return [tuple(line.split(',')) for line in csv_str.splitlines()]
<commit_msg>Return a list of dicts for data.<commit_after>
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
__all__ = []
import csv
import requests
from indra.sources.lincs.processor import LincsProcessor
DATASET_URL = 'http://lincs.hms.harvard.edu/db/datasets/20000/results'
def process_from_web():
lincs_data = _get_lincs_drug_target_data()
return LincsProcessor(lincs_data)
def _get_lincs_drug_target_data():
resp = requests.get(DATASET_URL, params={'output_type': '.csv'})
assert resp.status_code == 200, resp.text
csv_str = resp.content.decode('utf-8')
csv_lines = csv_str.splitlines()
headers = csv_lines[0].split(',')
return [{headers[i]: val for i, val in enumerate(line_elements)}
for line_elements in csv.reader(csv_lines[1:])]
|
from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
__all__ = []
import requests
DATASET_URL = 'http://lincs.hms.harvard.edu/db/datasets/20000/results'
def _get_lincs_drug_target_data():
resp = requests.get(DATASET_URL, params={'output_type': '.csv'})
assert resp.status_code == 200, resp.text
csv_str = resp.content.decode('utf-8')
return [tuple(line.split(',')) for line in csv_str.splitlines()]
Return a list of dicts for data.from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
__all__ = []
import csv
import requests
from indra.sources.lincs.processor import LincsProcessor
DATASET_URL = 'http://lincs.hms.harvard.edu/db/datasets/20000/results'
def process_from_web():
lincs_data = _get_lincs_drug_target_data()
return LincsProcessor(lincs_data)
def _get_lincs_drug_target_data():
resp = requests.get(DATASET_URL, params={'output_type': '.csv'})
assert resp.status_code == 200, resp.text
csv_str = resp.content.decode('utf-8')
csv_lines = csv_str.splitlines()
headers = csv_lines[0].split(',')
return [{headers[i]: val for i, val in enumerate(line_elements)}
for line_elements in csv.reader(csv_lines[1:])]
|
<commit_before>from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
__all__ = []
import requests
DATASET_URL = 'http://lincs.hms.harvard.edu/db/datasets/20000/results'
def _get_lincs_drug_target_data():
resp = requests.get(DATASET_URL, params={'output_type': '.csv'})
assert resp.status_code == 200, resp.text
csv_str = resp.content.decode('utf-8')
return [tuple(line.split(',')) for line in csv_str.splitlines()]
<commit_msg>Return a list of dicts for data.<commit_after>from __future__ import absolute_import, print_function, unicode_literals
from builtins import dict, str
__all__ = []
import csv
import requests
from indra.sources.lincs.processor import LincsProcessor
DATASET_URL = 'http://lincs.hms.harvard.edu/db/datasets/20000/results'
def process_from_web():
lincs_data = _get_lincs_drug_target_data()
return LincsProcessor(lincs_data)
def _get_lincs_drug_target_data():
resp = requests.get(DATASET_URL, params={'output_type': '.csv'})
assert resp.status_code == 200, resp.text
csv_str = resp.content.decode('utf-8')
csv_lines = csv_str.splitlines()
headers = csv_lines[0].split(',')
return [{headers[i]: val for i, val in enumerate(line_elements)}
for line_elements in csv.reader(csv_lines[1:])]
|
581bc613ed00b99fc252e52953a9757ff580a510
|
generateConfig.py
|
generateConfig.py
|
#!/bin/python3.5
import os
config = """
.VSBasePath = 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Community'
.WindowsSDKBasePath10 = 'C:/Program Files (x86)/Windows Kits/10'
.WindowsSDKSubVersion = '10.0.15063.0'
#if __WINDOWS__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = 'C:/temp/fazecache'
.VulkanSDKBasePath = 'C:/VulkanSDK/1.0.54.0'
#endif
#if __LINUX__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = '/tmp/.fbuild.fazecache'
.VulkanSDKBasePath = '/usr/lib'
#endif"""
curDir = os.getcwd().replace("\\", "/")
print("current directory: " + curDir)
config = config.replace("CURRENT_DIRECTORY", curDir)
with open('config.bff', 'w') as out:
out.write(config)
|
#!/bin/python3.5
import os
config = """
.WindowsSDKBasePath10 = 'C:/Program Files (x86)/Windows Kits/10'
.WindowsSDKSubVersion = '10.0.15063.0'
#if __WINDOWS__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = 'C:/temp/fazecache'
.VulkanSDKBasePath = 'C:/VulkanSDK/1.0.54.0'
#endif
#if __LINUX__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = '/tmp/.fbuild.fazecache'
.VulkanSDKBasePath = '/usr/lib'
#endif"""
curDir = os.getcwd().replace("\\", "/")
print("current directory: " + curDir)
config = config.replace("CURRENT_DIRECTORY", curDir)
with open('config.bff', 'w') as out:
if os.path.isdir('C:/Program Files (x86)/Microsoft Visual Studio/2017/Professional'):
out.write(""".VSBasePath = 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Professional'""")
else:
out.write(""".VSBasePath = 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Community'""")
out.write("\n")
out.write(config)
|
Use professional vs instead of community if found
|
Use professional vs instead of community if found
|
Python
|
mit
|
jgavert/Faze,jgavert/Faze,jgavert/Faze,jgavert/Faze
|
#!/bin/python3.5
import os
config = """
.VSBasePath = 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Community'
.WindowsSDKBasePath10 = 'C:/Program Files (x86)/Windows Kits/10'
.WindowsSDKSubVersion = '10.0.15063.0'
#if __WINDOWS__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = 'C:/temp/fazecache'
.VulkanSDKBasePath = 'C:/VulkanSDK/1.0.54.0'
#endif
#if __LINUX__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = '/tmp/.fbuild.fazecache'
.VulkanSDKBasePath = '/usr/lib'
#endif"""
curDir = os.getcwd().replace("\\", "/")
print("current directory: " + curDir)
config = config.replace("CURRENT_DIRECTORY", curDir)
with open('config.bff', 'w') as out:
out.write(config)
Use professional vs instead of community if found
|
#!/bin/python3.5
import os
config = """
.WindowsSDKBasePath10 = 'C:/Program Files (x86)/Windows Kits/10'
.WindowsSDKSubVersion = '10.0.15063.0'
#if __WINDOWS__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = 'C:/temp/fazecache'
.VulkanSDKBasePath = 'C:/VulkanSDK/1.0.54.0'
#endif
#if __LINUX__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = '/tmp/.fbuild.fazecache'
.VulkanSDKBasePath = '/usr/lib'
#endif"""
curDir = os.getcwd().replace("\\", "/")
print("current directory: " + curDir)
config = config.replace("CURRENT_DIRECTORY", curDir)
with open('config.bff', 'w') as out:
if os.path.isdir('C:/Program Files (x86)/Microsoft Visual Studio/2017/Professional'):
out.write(""".VSBasePath = 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Professional'""")
else:
out.write(""".VSBasePath = 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Community'""")
out.write("\n")
out.write(config)
|
<commit_before>#!/bin/python3.5
import os
config = """
.VSBasePath = 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Community'
.WindowsSDKBasePath10 = 'C:/Program Files (x86)/Windows Kits/10'
.WindowsSDKSubVersion = '10.0.15063.0'
#if __WINDOWS__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = 'C:/temp/fazecache'
.VulkanSDKBasePath = 'C:/VulkanSDK/1.0.54.0'
#endif
#if __LINUX__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = '/tmp/.fbuild.fazecache'
.VulkanSDKBasePath = '/usr/lib'
#endif"""
curDir = os.getcwd().replace("\\", "/")
print("current directory: " + curDir)
config = config.replace("CURRENT_DIRECTORY", curDir)
with open('config.bff', 'w') as out:
out.write(config)
<commit_msg>Use professional vs instead of community if found<commit_after>
|
#!/bin/python3.5
import os
config = """
.WindowsSDKBasePath10 = 'C:/Program Files (x86)/Windows Kits/10'
.WindowsSDKSubVersion = '10.0.15063.0'
#if __WINDOWS__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = 'C:/temp/fazecache'
.VulkanSDKBasePath = 'C:/VulkanSDK/1.0.54.0'
#endif
#if __LINUX__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = '/tmp/.fbuild.fazecache'
.VulkanSDKBasePath = '/usr/lib'
#endif"""
curDir = os.getcwd().replace("\\", "/")
print("current directory: " + curDir)
config = config.replace("CURRENT_DIRECTORY", curDir)
with open('config.bff', 'w') as out:
if os.path.isdir('C:/Program Files (x86)/Microsoft Visual Studio/2017/Professional'):
out.write(""".VSBasePath = 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Professional'""")
else:
out.write(""".VSBasePath = 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Community'""")
out.write("\n")
out.write(config)
|
#!/bin/python3.5
import os
config = """
.VSBasePath = 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Community'
.WindowsSDKBasePath10 = 'C:/Program Files (x86)/Windows Kits/10'
.WindowsSDKSubVersion = '10.0.15063.0'
#if __WINDOWS__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = 'C:/temp/fazecache'
.VulkanSDKBasePath = 'C:/VulkanSDK/1.0.54.0'
#endif
#if __LINUX__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = '/tmp/.fbuild.fazecache'
.VulkanSDKBasePath = '/usr/lib'
#endif"""
curDir = os.getcwd().replace("\\", "/")
print("current directory: " + curDir)
config = config.replace("CURRENT_DIRECTORY", curDir)
with open('config.bff', 'w') as out:
out.write(config)
Use professional vs instead of community if found#!/bin/python3.5
import os
config = """
.WindowsSDKBasePath10 = 'C:/Program Files (x86)/Windows Kits/10'
.WindowsSDKSubVersion = '10.0.15063.0'
#if __WINDOWS__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = 'C:/temp/fazecache'
.VulkanSDKBasePath = 'C:/VulkanSDK/1.0.54.0'
#endif
#if __LINUX__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = '/tmp/.fbuild.fazecache'
.VulkanSDKBasePath = '/usr/lib'
#endif"""
curDir = os.getcwd().replace("\\", "/")
print("current directory: " + curDir)
config = config.replace("CURRENT_DIRECTORY", curDir)
with open('config.bff', 'w') as out:
if os.path.isdir('C:/Program Files (x86)/Microsoft Visual Studio/2017/Professional'):
out.write(""".VSBasePath = 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Professional'""")
else:
out.write(""".VSBasePath = 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Community'""")
out.write("\n")
out.write(config)
|
<commit_before>#!/bin/python3.5
import os
config = """
.VSBasePath = 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Community'
.WindowsSDKBasePath10 = 'C:/Program Files (x86)/Windows Kits/10'
.WindowsSDKSubVersion = '10.0.15063.0'
#if __WINDOWS__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = 'C:/temp/fazecache'
.VulkanSDKBasePath = 'C:/VulkanSDK/1.0.54.0'
#endif
#if __LINUX__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = '/tmp/.fbuild.fazecache'
.VulkanSDKBasePath = '/usr/lib'
#endif"""
curDir = os.getcwd().replace("\\", "/")
print("current directory: " + curDir)
config = config.replace("CURRENT_DIRECTORY", curDir)
with open('config.bff', 'w') as out:
out.write(config)
<commit_msg>Use professional vs instead of community if found<commit_after>#!/bin/python3.5
import os
config = """
.WindowsSDKBasePath10 = 'C:/Program Files (x86)/Windows Kits/10'
.WindowsSDKSubVersion = '10.0.15063.0'
#if __WINDOWS__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = 'C:/temp/fazecache'
.VulkanSDKBasePath = 'C:/VulkanSDK/1.0.54.0'
#endif
#if __LINUX__
.FazEPath = 'CURRENT_DIRECTORY'
.FBuildCache = '/tmp/.fbuild.fazecache'
.VulkanSDKBasePath = '/usr/lib'
#endif"""
curDir = os.getcwd().replace("\\", "/")
print("current directory: " + curDir)
config = config.replace("CURRENT_DIRECTORY", curDir)
with open('config.bff', 'w') as out:
if os.path.isdir('C:/Program Files (x86)/Microsoft Visual Studio/2017/Professional'):
out.write(""".VSBasePath = 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Professional'""")
else:
out.write(""".VSBasePath = 'C:/Program Files (x86)/Microsoft Visual Studio/2017/Community'""")
out.write("\n")
out.write(config)
|
da510e3156b1a92bc9139263f9e27e793dd6316c
|
importlib_metadata/abc.py
|
importlib_metadata/abc.py
|
from __future__ import absolute_import
import abc
import sys
if sys.version_info >= (3,): # pragma: nocover
from importlib.abc import MetaPathFinder
else: # pragma: nocover
from abc import ABCMeta as MetaPathFinder
class DistributionFinder(MetaPathFinder):
"""
A MetaPathFinder capable of discovering installed distributions.
"""
@abc.abstractmethod
def find_distributions(self, name=None, path=None):
"""
Return an iterable of all Distribution instances capable of
loading the metadata for packages matching the name
(or all names if not supplied) along the paths in the list
of directories ``path`` (defaults to sys.path).
"""
|
from __future__ import absolute_import
import abc
import sys
if sys.version_info >= (3,): # pragma: nocover
from importlib.abc import MetaPathFinder
else: # pragma: nocover
class MetaPathFinder(object):
__metaclass__ = abc.ABCMeta
class DistributionFinder(MetaPathFinder):
"""
A MetaPathFinder capable of discovering installed distributions.
"""
@abc.abstractmethod
def find_distributions(self, name=None, path=None):
"""
Return an iterable of all Distribution instances capable of
loading the metadata for packages matching the name
(or all names if not supplied) along the paths in the list
of directories ``path`` (defaults to sys.path).
"""
|
Fix MetaPathFinder compatibility stub on Python 2.7
|
Fix MetaPathFinder compatibility stub on Python 2.7
|
Python
|
apache-2.0
|
python/importlib_metadata
|
from __future__ import absolute_import
import abc
import sys
if sys.version_info >= (3,): # pragma: nocover
from importlib.abc import MetaPathFinder
else: # pragma: nocover
from abc import ABCMeta as MetaPathFinder
class DistributionFinder(MetaPathFinder):
"""
A MetaPathFinder capable of discovering installed distributions.
"""
@abc.abstractmethod
def find_distributions(self, name=None, path=None):
"""
Return an iterable of all Distribution instances capable of
loading the metadata for packages matching the name
(or all names if not supplied) along the paths in the list
of directories ``path`` (defaults to sys.path).
"""
Fix MetaPathFinder compatibility stub on Python 2.7
|
from __future__ import absolute_import
import abc
import sys
if sys.version_info >= (3,): # pragma: nocover
from importlib.abc import MetaPathFinder
else: # pragma: nocover
class MetaPathFinder(object):
__metaclass__ = abc.ABCMeta
class DistributionFinder(MetaPathFinder):
"""
A MetaPathFinder capable of discovering installed distributions.
"""
@abc.abstractmethod
def find_distributions(self, name=None, path=None):
"""
Return an iterable of all Distribution instances capable of
loading the metadata for packages matching the name
(or all names if not supplied) along the paths in the list
of directories ``path`` (defaults to sys.path).
"""
|
<commit_before>from __future__ import absolute_import
import abc
import sys
if sys.version_info >= (3,): # pragma: nocover
from importlib.abc import MetaPathFinder
else: # pragma: nocover
from abc import ABCMeta as MetaPathFinder
class DistributionFinder(MetaPathFinder):
"""
A MetaPathFinder capable of discovering installed distributions.
"""
@abc.abstractmethod
def find_distributions(self, name=None, path=None):
"""
Return an iterable of all Distribution instances capable of
loading the metadata for packages matching the name
(or all names if not supplied) along the paths in the list
of directories ``path`` (defaults to sys.path).
"""
<commit_msg>Fix MetaPathFinder compatibility stub on Python 2.7<commit_after>
|
from __future__ import absolute_import
import abc
import sys
if sys.version_info >= (3,): # pragma: nocover
from importlib.abc import MetaPathFinder
else: # pragma: nocover
class MetaPathFinder(object):
__metaclass__ = abc.ABCMeta
class DistributionFinder(MetaPathFinder):
"""
A MetaPathFinder capable of discovering installed distributions.
"""
@abc.abstractmethod
def find_distributions(self, name=None, path=None):
"""
Return an iterable of all Distribution instances capable of
loading the metadata for packages matching the name
(or all names if not supplied) along the paths in the list
of directories ``path`` (defaults to sys.path).
"""
|
from __future__ import absolute_import
import abc
import sys
if sys.version_info >= (3,): # pragma: nocover
from importlib.abc import MetaPathFinder
else: # pragma: nocover
from abc import ABCMeta as MetaPathFinder
class DistributionFinder(MetaPathFinder):
"""
A MetaPathFinder capable of discovering installed distributions.
"""
@abc.abstractmethod
def find_distributions(self, name=None, path=None):
"""
Return an iterable of all Distribution instances capable of
loading the metadata for packages matching the name
(or all names if not supplied) along the paths in the list
of directories ``path`` (defaults to sys.path).
"""
Fix MetaPathFinder compatibility stub on Python 2.7from __future__ import absolute_import
import abc
import sys
if sys.version_info >= (3,): # pragma: nocover
from importlib.abc import MetaPathFinder
else: # pragma: nocover
class MetaPathFinder(object):
__metaclass__ = abc.ABCMeta
class DistributionFinder(MetaPathFinder):
"""
A MetaPathFinder capable of discovering installed distributions.
"""
@abc.abstractmethod
def find_distributions(self, name=None, path=None):
"""
Return an iterable of all Distribution instances capable of
loading the metadata for packages matching the name
(or all names if not supplied) along the paths in the list
of directories ``path`` (defaults to sys.path).
"""
|
<commit_before>from __future__ import absolute_import
import abc
import sys
if sys.version_info >= (3,): # pragma: nocover
from importlib.abc import MetaPathFinder
else: # pragma: nocover
from abc import ABCMeta as MetaPathFinder
class DistributionFinder(MetaPathFinder):
"""
A MetaPathFinder capable of discovering installed distributions.
"""
@abc.abstractmethod
def find_distributions(self, name=None, path=None):
"""
Return an iterable of all Distribution instances capable of
loading the metadata for packages matching the name
(or all names if not supplied) along the paths in the list
of directories ``path`` (defaults to sys.path).
"""
<commit_msg>Fix MetaPathFinder compatibility stub on Python 2.7<commit_after>from __future__ import absolute_import
import abc
import sys
if sys.version_info >= (3,): # pragma: nocover
from importlib.abc import MetaPathFinder
else: # pragma: nocover
class MetaPathFinder(object):
__metaclass__ = abc.ABCMeta
class DistributionFinder(MetaPathFinder):
"""
A MetaPathFinder capable of discovering installed distributions.
"""
@abc.abstractmethod
def find_distributions(self, name=None, path=None):
"""
Return an iterable of all Distribution instances capable of
loading the metadata for packages matching the name
(or all names if not supplied) along the paths in the list
of directories ``path`` (defaults to sys.path).
"""
|
5753ea19d7d83413cf64ecce6360a2f29ef920bf
|
docs/source/conf.py
|
docs/source/conf.py
|
import os
import sys
import sphinx_rtd_theme
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
sys.path.insert(0, os.path.abspath("../.."))
import django
django.setup()
project = "django-postgres-extra"
copyright = "2019, Sector Labs"
author = "Sector Labs"
extensions = [
"sphinx_rtd_theme",
"sphinx.ext.intersphinx",
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
]
templates_path = ["_templates"]
exclude_patterns = []
html_theme = "sphinx_rtd_theme"
intersphinx_mapping = {
"django": ("https://docs.djangoproject.com/en/2.2/", "https://docs.djangoproject.com/en/2.2/_objects/"),
}
|
import os
import sys
import sphinx_rtd_theme
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
sys.path.insert(0, os.path.abspath("../.."))
import django
django.setup()
project = "django-postgres-extra"
copyright = "2019, Sector Labs"
author = "Sector Labs"
extensions = [
"sphinx_rtd_theme",
"sphinx.ext.intersphinx",
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
]
templates_path = ["_templates"]
exclude_patterns = []
html_theme = "sphinx_rtd_theme"
intersphinx_mapping = {
"django": ("https://docs.djangoproject.com/en/stable/", "https://docs.djangoproject.com/en/stable/_objects/"),
}
|
Use 'stable' Django version for intersphinx
|
Use 'stable' Django version for intersphinx
This will ensure documentation references always point at the latest version.
|
Python
|
mit
|
SectorLabs/django-postgres-extra
|
import os
import sys
import sphinx_rtd_theme
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
sys.path.insert(0, os.path.abspath("../.."))
import django
django.setup()
project = "django-postgres-extra"
copyright = "2019, Sector Labs"
author = "Sector Labs"
extensions = [
"sphinx_rtd_theme",
"sphinx.ext.intersphinx",
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
]
templates_path = ["_templates"]
exclude_patterns = []
html_theme = "sphinx_rtd_theme"
intersphinx_mapping = {
"django": ("https://docs.djangoproject.com/en/2.2/", "https://docs.djangoproject.com/en/2.2/_objects/"),
}
Use 'stable' Django version for intersphinx
This will ensure documentation references always point at the latest version.
|
import os
import sys
import sphinx_rtd_theme
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
sys.path.insert(0, os.path.abspath("../.."))
import django
django.setup()
project = "django-postgres-extra"
copyright = "2019, Sector Labs"
author = "Sector Labs"
extensions = [
"sphinx_rtd_theme",
"sphinx.ext.intersphinx",
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
]
templates_path = ["_templates"]
exclude_patterns = []
html_theme = "sphinx_rtd_theme"
intersphinx_mapping = {
"django": ("https://docs.djangoproject.com/en/stable/", "https://docs.djangoproject.com/en/stable/_objects/"),
}
|
<commit_before>import os
import sys
import sphinx_rtd_theme
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
sys.path.insert(0, os.path.abspath("../.."))
import django
django.setup()
project = "django-postgres-extra"
copyright = "2019, Sector Labs"
author = "Sector Labs"
extensions = [
"sphinx_rtd_theme",
"sphinx.ext.intersphinx",
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
]
templates_path = ["_templates"]
exclude_patterns = []
html_theme = "sphinx_rtd_theme"
intersphinx_mapping = {
"django": ("https://docs.djangoproject.com/en/2.2/", "https://docs.djangoproject.com/en/2.2/_objects/"),
}
<commit_msg>Use 'stable' Django version for intersphinx
This will ensure documentation references always point at the latest version.<commit_after>
|
import os
import sys
import sphinx_rtd_theme
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
sys.path.insert(0, os.path.abspath("../.."))
import django
django.setup()
project = "django-postgres-extra"
copyright = "2019, Sector Labs"
author = "Sector Labs"
extensions = [
"sphinx_rtd_theme",
"sphinx.ext.intersphinx",
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
]
templates_path = ["_templates"]
exclude_patterns = []
html_theme = "sphinx_rtd_theme"
intersphinx_mapping = {
"django": ("https://docs.djangoproject.com/en/stable/", "https://docs.djangoproject.com/en/stable/_objects/"),
}
|
import os
import sys
import sphinx_rtd_theme
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
sys.path.insert(0, os.path.abspath("../.."))
import django
django.setup()
project = "django-postgres-extra"
copyright = "2019, Sector Labs"
author = "Sector Labs"
extensions = [
"sphinx_rtd_theme",
"sphinx.ext.intersphinx",
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
]
templates_path = ["_templates"]
exclude_patterns = []
html_theme = "sphinx_rtd_theme"
intersphinx_mapping = {
"django": ("https://docs.djangoproject.com/en/2.2/", "https://docs.djangoproject.com/en/2.2/_objects/"),
}
Use 'stable' Django version for intersphinx
This will ensure documentation references always point at the latest version.import os
import sys
import sphinx_rtd_theme
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
sys.path.insert(0, os.path.abspath("../.."))
import django
django.setup()
project = "django-postgres-extra"
copyright = "2019, Sector Labs"
author = "Sector Labs"
extensions = [
"sphinx_rtd_theme",
"sphinx.ext.intersphinx",
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
]
templates_path = ["_templates"]
exclude_patterns = []
html_theme = "sphinx_rtd_theme"
intersphinx_mapping = {
"django": ("https://docs.djangoproject.com/en/stable/", "https://docs.djangoproject.com/en/stable/_objects/"),
}
|
<commit_before>import os
import sys
import sphinx_rtd_theme
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
sys.path.insert(0, os.path.abspath("../.."))
import django
django.setup()
project = "django-postgres-extra"
copyright = "2019, Sector Labs"
author = "Sector Labs"
extensions = [
"sphinx_rtd_theme",
"sphinx.ext.intersphinx",
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
]
templates_path = ["_templates"]
exclude_patterns = []
html_theme = "sphinx_rtd_theme"
intersphinx_mapping = {
"django": ("https://docs.djangoproject.com/en/2.2/", "https://docs.djangoproject.com/en/2.2/_objects/"),
}
<commit_msg>Use 'stable' Django version for intersphinx
This will ensure documentation references always point at the latest version.<commit_after>import os
import sys
import sphinx_rtd_theme
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "settings")
sys.path.insert(0, os.path.abspath("../.."))
import django
django.setup()
project = "django-postgres-extra"
copyright = "2019, Sector Labs"
author = "Sector Labs"
extensions = [
"sphinx_rtd_theme",
"sphinx.ext.intersphinx",
"sphinx.ext.autodoc",
"sphinx.ext.coverage",
"sphinx.ext.napoleon",
]
templates_path = ["_templates"]
exclude_patterns = []
html_theme = "sphinx_rtd_theme"
intersphinx_mapping = {
"django": ("https://docs.djangoproject.com/en/stable/", "https://docs.djangoproject.com/en/stable/_objects/"),
}
|
c3cb6a294fe83557d86d9415f8cdf8efb4f7e59f
|
elevator/message.py
|
elevator/message.py
|
import msgpack
import logging
class MessageFormatError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Request(object):
"""Handler objects for frontend->backend objects messages"""
def __init__(self, raw_message, compressed=False):
errors_logger = logging.getLogger("errors_logger")
message = msgpack.unpackb(raw_message)
try:
self.db_uid = message.pop('DB_UID')
self.command = message.pop('COMMAND')
self.data = message.pop('ARGS')
except KeyError:
errors_logger.exception("Invalid request message : %s" %
message)
raise MessageFormatError("Invalid request message")
class Response(tuple):
"""Handler objects for frontend->backend objects messages"""
def __new__(cls, id, *args, **kwargs):
response = {
'STATUS': kwargs.pop('status', 0),
'DATAS': kwargs.pop('datas', [])
}
response['DATAS'] = cls._format_datas(response['DATAS'])
msg = [id, msgpack.packb(response)]
return tuple.__new__(cls, msg)
@classmethod
def _format_datas(cls, datas):
if datas and not isinstance(datas, (tuple, list)):
datas = [datas]
return [unicode(d) for d in datas]
|
import msgpack
import logging
class MessageFormatError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Request(object):
"""Handler objects for frontend->backend objects messages"""
def __init__(self, raw_message, compressed=False):
errors_logger = logging.getLogger("errors_logger")
message = msgpack.unpackb(raw_message)
try:
self.db_uid = message.pop('DB_UID')
self.command = message.pop('COMMAND')
self.data = message.pop('ARGS')
except KeyError:
errors_logger.exception("Invalid request message : %s" %
message)
raise MessageFormatError("Invalid request message")
class Response(tuple):
"""Handler objects for frontend->backend objects messages"""
def __new__(cls, id, *args, **kwargs):
response = {
'STATUS': kwargs.pop('status', 0),
'DATAS': kwargs.pop('datas', [])
}
response['DATAS'] = cls._format_datas(response['DATAS'])
msg = [id, msgpack.packb(response)]
return tuple.__new__(cls, msg)
@classmethod
def _format_datas(cls, datas):
if datas and not isinstance(datas, (tuple, list)):
datas = [datas]
return datas
|
Fix : response datas list should not be unicoded
|
Fix : response datas list should not be unicoded
|
Python
|
mit
|
oleiade/Elevator
|
import msgpack
import logging
class MessageFormatError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Request(object):
"""Handler objects for frontend->backend objects messages"""
def __init__(self, raw_message, compressed=False):
errors_logger = logging.getLogger("errors_logger")
message = msgpack.unpackb(raw_message)
try:
self.db_uid = message.pop('DB_UID')
self.command = message.pop('COMMAND')
self.data = message.pop('ARGS')
except KeyError:
errors_logger.exception("Invalid request message : %s" %
message)
raise MessageFormatError("Invalid request message")
class Response(tuple):
"""Handler objects for frontend->backend objects messages"""
def __new__(cls, id, *args, **kwargs):
response = {
'STATUS': kwargs.pop('status', 0),
'DATAS': kwargs.pop('datas', [])
}
response['DATAS'] = cls._format_datas(response['DATAS'])
msg = [id, msgpack.packb(response)]
return tuple.__new__(cls, msg)
@classmethod
def _format_datas(cls, datas):
if datas and not isinstance(datas, (tuple, list)):
datas = [datas]
return [unicode(d) for d in datas]
Fix : response datas list should not be unicoded
|
import msgpack
import logging
class MessageFormatError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Request(object):
"""Handler objects for frontend->backend objects messages"""
def __init__(self, raw_message, compressed=False):
errors_logger = logging.getLogger("errors_logger")
message = msgpack.unpackb(raw_message)
try:
self.db_uid = message.pop('DB_UID')
self.command = message.pop('COMMAND')
self.data = message.pop('ARGS')
except KeyError:
errors_logger.exception("Invalid request message : %s" %
message)
raise MessageFormatError("Invalid request message")
class Response(tuple):
"""Handler objects for frontend->backend objects messages"""
def __new__(cls, id, *args, **kwargs):
response = {
'STATUS': kwargs.pop('status', 0),
'DATAS': kwargs.pop('datas', [])
}
response['DATAS'] = cls._format_datas(response['DATAS'])
msg = [id, msgpack.packb(response)]
return tuple.__new__(cls, msg)
@classmethod
def _format_datas(cls, datas):
if datas and not isinstance(datas, (tuple, list)):
datas = [datas]
return datas
|
<commit_before>import msgpack
import logging
class MessageFormatError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Request(object):
"""Handler objects for frontend->backend objects messages"""
def __init__(self, raw_message, compressed=False):
errors_logger = logging.getLogger("errors_logger")
message = msgpack.unpackb(raw_message)
try:
self.db_uid = message.pop('DB_UID')
self.command = message.pop('COMMAND')
self.data = message.pop('ARGS')
except KeyError:
errors_logger.exception("Invalid request message : %s" %
message)
raise MessageFormatError("Invalid request message")
class Response(tuple):
"""Handler objects for frontend->backend objects messages"""
def __new__(cls, id, *args, **kwargs):
response = {
'STATUS': kwargs.pop('status', 0),
'DATAS': kwargs.pop('datas', [])
}
response['DATAS'] = cls._format_datas(response['DATAS'])
msg = [id, msgpack.packb(response)]
return tuple.__new__(cls, msg)
@classmethod
def _format_datas(cls, datas):
if datas and not isinstance(datas, (tuple, list)):
datas = [datas]
return [unicode(d) for d in datas]
<commit_msg>Fix : response datas list should not be unicoded<commit_after>
|
import msgpack
import logging
class MessageFormatError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Request(object):
"""Handler objects for frontend->backend objects messages"""
def __init__(self, raw_message, compressed=False):
errors_logger = logging.getLogger("errors_logger")
message = msgpack.unpackb(raw_message)
try:
self.db_uid = message.pop('DB_UID')
self.command = message.pop('COMMAND')
self.data = message.pop('ARGS')
except KeyError:
errors_logger.exception("Invalid request message : %s" %
message)
raise MessageFormatError("Invalid request message")
class Response(tuple):
"""Handler objects for frontend->backend objects messages"""
def __new__(cls, id, *args, **kwargs):
response = {
'STATUS': kwargs.pop('status', 0),
'DATAS': kwargs.pop('datas', [])
}
response['DATAS'] = cls._format_datas(response['DATAS'])
msg = [id, msgpack.packb(response)]
return tuple.__new__(cls, msg)
@classmethod
def _format_datas(cls, datas):
if datas and not isinstance(datas, (tuple, list)):
datas = [datas]
return datas
|
import msgpack
import logging
class MessageFormatError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Request(object):
"""Handler objects for frontend->backend objects messages"""
def __init__(self, raw_message, compressed=False):
errors_logger = logging.getLogger("errors_logger")
message = msgpack.unpackb(raw_message)
try:
self.db_uid = message.pop('DB_UID')
self.command = message.pop('COMMAND')
self.data = message.pop('ARGS')
except KeyError:
errors_logger.exception("Invalid request message : %s" %
message)
raise MessageFormatError("Invalid request message")
class Response(tuple):
"""Handler objects for frontend->backend objects messages"""
def __new__(cls, id, *args, **kwargs):
response = {
'STATUS': kwargs.pop('status', 0),
'DATAS': kwargs.pop('datas', [])
}
response['DATAS'] = cls._format_datas(response['DATAS'])
msg = [id, msgpack.packb(response)]
return tuple.__new__(cls, msg)
@classmethod
def _format_datas(cls, datas):
if datas and not isinstance(datas, (tuple, list)):
datas = [datas]
return [unicode(d) for d in datas]
Fix : response datas list should not be unicodedimport msgpack
import logging
class MessageFormatError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Request(object):
"""Handler objects for frontend->backend objects messages"""
def __init__(self, raw_message, compressed=False):
errors_logger = logging.getLogger("errors_logger")
message = msgpack.unpackb(raw_message)
try:
self.db_uid = message.pop('DB_UID')
self.command = message.pop('COMMAND')
self.data = message.pop('ARGS')
except KeyError:
errors_logger.exception("Invalid request message : %s" %
message)
raise MessageFormatError("Invalid request message")
class Response(tuple):
"""Handler objects for frontend->backend objects messages"""
def __new__(cls, id, *args, **kwargs):
response = {
'STATUS': kwargs.pop('status', 0),
'DATAS': kwargs.pop('datas', [])
}
response['DATAS'] = cls._format_datas(response['DATAS'])
msg = [id, msgpack.packb(response)]
return tuple.__new__(cls, msg)
@classmethod
def _format_datas(cls, datas):
if datas and not isinstance(datas, (tuple, list)):
datas = [datas]
return datas
|
<commit_before>import msgpack
import logging
class MessageFormatError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Request(object):
"""Handler objects for frontend->backend objects messages"""
def __init__(self, raw_message, compressed=False):
errors_logger = logging.getLogger("errors_logger")
message = msgpack.unpackb(raw_message)
try:
self.db_uid = message.pop('DB_UID')
self.command = message.pop('COMMAND')
self.data = message.pop('ARGS')
except KeyError:
errors_logger.exception("Invalid request message : %s" %
message)
raise MessageFormatError("Invalid request message")
class Response(tuple):
"""Handler objects for frontend->backend objects messages"""
def __new__(cls, id, *args, **kwargs):
response = {
'STATUS': kwargs.pop('status', 0),
'DATAS': kwargs.pop('datas', [])
}
response['DATAS'] = cls._format_datas(response['DATAS'])
msg = [id, msgpack.packb(response)]
return tuple.__new__(cls, msg)
@classmethod
def _format_datas(cls, datas):
if datas and not isinstance(datas, (tuple, list)):
datas = [datas]
return [unicode(d) for d in datas]
<commit_msg>Fix : response datas list should not be unicoded<commit_after>import msgpack
import logging
class MessageFormatError(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class Request(object):
"""Handler objects for frontend->backend objects messages"""
def __init__(self, raw_message, compressed=False):
errors_logger = logging.getLogger("errors_logger")
message = msgpack.unpackb(raw_message)
try:
self.db_uid = message.pop('DB_UID')
self.command = message.pop('COMMAND')
self.data = message.pop('ARGS')
except KeyError:
errors_logger.exception("Invalid request message : %s" %
message)
raise MessageFormatError("Invalid request message")
class Response(tuple):
"""Handler objects for frontend->backend objects messages"""
def __new__(cls, id, *args, **kwargs):
response = {
'STATUS': kwargs.pop('status', 0),
'DATAS': kwargs.pop('datas', [])
}
response['DATAS'] = cls._format_datas(response['DATAS'])
msg = [id, msgpack.packb(response)]
return tuple.__new__(cls, msg)
@classmethod
def _format_datas(cls, datas):
if datas and not isinstance(datas, (tuple, list)):
datas = [datas]
return datas
|
51a9fe51f170132ab9da09fbf3aa73c59678aa03
|
start.py
|
start.py
|
#!/usr/bin/env python2.7
"""
Run a local instance of Boulder for testing purposes.
This runs in non-monolithic mode and requires RabbitMQ on localhost.
Keeps servers alive until ^C or 100K seconds elapse. Exits non-zero if
any servers fail to start, or die before timer/^C.
"""
import os
import signal
import sys
import time
sys.path.append('./test')
import startservers
MAX_RUNTIME = 100000
class Alarm(Exception):
pass
if not startservers.start():
sys.exit(1)
try:
time.sleep(1)
print("All servers are running. To stop, hit ^C or wait %d seconds." % MAX_RUNTIME)
def handler(*args):
raise Alarm
signal.signal(signal.SIGALRM, handler)
signal.alarm(MAX_RUNTIME)
os.wait()
# If we reach here, a child died early. Log what died:
startservers.check()
sys.exit(1)
except KeyboardInterrupt, Alarm:
signal.alarm(0)
print "\nstopping servers."
|
#!/usr/bin/env python2.7
"""
Run a local instance of Boulder for testing purposes.
This runs in non-monolithic mode and requires RabbitMQ on localhost.
Keeps servers alive until ^C. Exit non-zero if any servers fail to
start, or die before ^C.
"""
import os
import sys
import time
sys.path.append('./test')
import startservers
if not startservers.start():
sys.exit(1)
try:
time.sleep(1)
print("All servers are running. To stop, hit ^C.")
os.wait()
# If we reach here, a child died early. Log what died:
startservers.check()
sys.exit(1)
except KeyboardInterrupt:
print "\nstopping servers."
|
Remove 100K-second max runtime, just run until ^C or server crash.
|
Remove 100K-second max runtime, just run until ^C or server crash.
|
Python
|
mpl-2.0
|
letsencrypt/boulder,patf/boulder,lmcro/boulder,deserted/boulder,jgillula/boulder,mozmark/boulder,ZCloud-Firstserver/boulder,postfix/boulder,KyleChamberlin/boulder,kuba/boulder,modulexcite/boulder,ibukanov/boulder,hlandauf/boulder,mehmooda/boulder,julienschmidt/boulder,tomclegg/boulder,mommel/boulder,mozmark/boulder,jcjones/boulder,postfix/boulder,kuba/boulder,jcjones/boulder,hlandauf/boulder,jsha/boulder,patf/boulder,devpaul/boulder,modulexcite/boulder,jsha/boulder,mehmooda/boulder,jfrazelle/boulder,mommel/boulder,mommel/boulder,mozmark/boulder,jgillula/boulder,hlandauf/boulder,devpaul/boulder,deserted/boulder,bmw/boulder,jcjones/boulder,josephyzhou/boulder,jfrazelle/boulder,jmhodges/boulder,jgillula/boulder,ibukanov/boulder,sophie-h/boulder,tomclegg/boulder,patf/boulder,r0ro/boulder,lucasallan/boulder,jmhodges/boulder,lmcro/boulder,KyleChamberlin/boulder,josephyzhou/boulder,josephyzhou/boulder,jmhodges/boulder,mehmooda/boulder,mommel/boulder,julienschmidt/boulder,ZCloud-Firstserver/boulder,josephyzhou/boulder,mehmooda/boulder,postfix/boulder,modulexcite/boulder,benileo/boulder,lucasallan/boulder,jsha/boulder,tomclegg/boulder,ibukanov/boulder,julienschmidt/boulder,sophie-h/boulder,deserted/boulder,r0ro/boulder,devpaul/boulder,ZCloud-Firstserver/boulder,kuba/boulder,mozmark/boulder,modulexcite/boulder,benileo/boulder,deserted/boulder,letsencrypt/boulder,sophie-h/boulder,jfrazelle/boulder,tomclegg/boulder,julienschmidt/boulder,postfix/boulder,jmhodges/boulder,KyleChamberlin/boulder,bmw/boulder,lmcro/boulder,patf/boulder,jcjones/boulder,KyleChamberlin/boulder,ZCloud-Firstserver/boulder,devpaul/boulder,hlandauf/boulder,jfrazelle/boulder,kuba/boulder,benileo/boulder,jgillula/boulder,benileo/boulder,lmcro/boulder,r0ro/boulder,bmw/boulder,r0ro/boulder,lucasallan/boulder,letsencrypt/boulder
|
#!/usr/bin/env python2.7
"""
Run a local instance of Boulder for testing purposes.
This runs in non-monolithic mode and requires RabbitMQ on localhost.
Keeps servers alive until ^C or 100K seconds elapse. Exits non-zero if
any servers fail to start, or die before timer/^C.
"""
import os
import signal
import sys
import time
sys.path.append('./test')
import startservers
MAX_RUNTIME = 100000
class Alarm(Exception):
pass
if not startservers.start():
sys.exit(1)
try:
time.sleep(1)
print("All servers are running. To stop, hit ^C or wait %d seconds." % MAX_RUNTIME)
def handler(*args):
raise Alarm
signal.signal(signal.SIGALRM, handler)
signal.alarm(MAX_RUNTIME)
os.wait()
# If we reach here, a child died early. Log what died:
startservers.check()
sys.exit(1)
except KeyboardInterrupt, Alarm:
signal.alarm(0)
print "\nstopping servers."
Remove 100K-second max runtime, just run until ^C or server crash.
|
#!/usr/bin/env python2.7
"""
Run a local instance of Boulder for testing purposes.
This runs in non-monolithic mode and requires RabbitMQ on localhost.
Keeps servers alive until ^C. Exit non-zero if any servers fail to
start, or die before ^C.
"""
import os
import sys
import time
sys.path.append('./test')
import startservers
if not startservers.start():
sys.exit(1)
try:
time.sleep(1)
print("All servers are running. To stop, hit ^C.")
os.wait()
# If we reach here, a child died early. Log what died:
startservers.check()
sys.exit(1)
except KeyboardInterrupt:
print "\nstopping servers."
|
<commit_before>#!/usr/bin/env python2.7
"""
Run a local instance of Boulder for testing purposes.
This runs in non-monolithic mode and requires RabbitMQ on localhost.
Keeps servers alive until ^C or 100K seconds elapse. Exits non-zero if
any servers fail to start, or die before timer/^C.
"""
import os
import signal
import sys
import time
sys.path.append('./test')
import startservers
MAX_RUNTIME = 100000
class Alarm(Exception):
pass
if not startservers.start():
sys.exit(1)
try:
time.sleep(1)
print("All servers are running. To stop, hit ^C or wait %d seconds." % MAX_RUNTIME)
def handler(*args):
raise Alarm
signal.signal(signal.SIGALRM, handler)
signal.alarm(MAX_RUNTIME)
os.wait()
# If we reach here, a child died early. Log what died:
startservers.check()
sys.exit(1)
except KeyboardInterrupt, Alarm:
signal.alarm(0)
print "\nstopping servers."
<commit_msg>Remove 100K-second max runtime, just run until ^C or server crash.<commit_after>
|
#!/usr/bin/env python2.7
"""
Run a local instance of Boulder for testing purposes.
This runs in non-monolithic mode and requires RabbitMQ on localhost.
Keeps servers alive until ^C. Exit non-zero if any servers fail to
start, or die before ^C.
"""
import os
import sys
import time
sys.path.append('./test')
import startservers
if not startservers.start():
sys.exit(1)
try:
time.sleep(1)
print("All servers are running. To stop, hit ^C.")
os.wait()
# If we reach here, a child died early. Log what died:
startservers.check()
sys.exit(1)
except KeyboardInterrupt:
print "\nstopping servers."
|
#!/usr/bin/env python2.7
"""
Run a local instance of Boulder for testing purposes.
This runs in non-monolithic mode and requires RabbitMQ on localhost.
Keeps servers alive until ^C or 100K seconds elapse. Exits non-zero if
any servers fail to start, or die before timer/^C.
"""
import os
import signal
import sys
import time
sys.path.append('./test')
import startservers
MAX_RUNTIME = 100000
class Alarm(Exception):
pass
if not startservers.start():
sys.exit(1)
try:
time.sleep(1)
print("All servers are running. To stop, hit ^C or wait %d seconds." % MAX_RUNTIME)
def handler(*args):
raise Alarm
signal.signal(signal.SIGALRM, handler)
signal.alarm(MAX_RUNTIME)
os.wait()
# If we reach here, a child died early. Log what died:
startservers.check()
sys.exit(1)
except KeyboardInterrupt, Alarm:
signal.alarm(0)
print "\nstopping servers."
Remove 100K-second max runtime, just run until ^C or server crash.#!/usr/bin/env python2.7
"""
Run a local instance of Boulder for testing purposes.
This runs in non-monolithic mode and requires RabbitMQ on localhost.
Keeps servers alive until ^C. Exit non-zero if any servers fail to
start, or die before ^C.
"""
import os
import sys
import time
sys.path.append('./test')
import startservers
if not startservers.start():
sys.exit(1)
try:
time.sleep(1)
print("All servers are running. To stop, hit ^C.")
os.wait()
# If we reach here, a child died early. Log what died:
startservers.check()
sys.exit(1)
except KeyboardInterrupt:
print "\nstopping servers."
|
<commit_before>#!/usr/bin/env python2.7
"""
Run a local instance of Boulder for testing purposes.
This runs in non-monolithic mode and requires RabbitMQ on localhost.
Keeps servers alive until ^C or 100K seconds elapse. Exits non-zero if
any servers fail to start, or die before timer/^C.
"""
import os
import signal
import sys
import time
sys.path.append('./test')
import startservers
MAX_RUNTIME = 100000
class Alarm(Exception):
pass
if not startservers.start():
sys.exit(1)
try:
time.sleep(1)
print("All servers are running. To stop, hit ^C or wait %d seconds." % MAX_RUNTIME)
def handler(*args):
raise Alarm
signal.signal(signal.SIGALRM, handler)
signal.alarm(MAX_RUNTIME)
os.wait()
# If we reach here, a child died early. Log what died:
startservers.check()
sys.exit(1)
except KeyboardInterrupt, Alarm:
signal.alarm(0)
print "\nstopping servers."
<commit_msg>Remove 100K-second max runtime, just run until ^C or server crash.<commit_after>#!/usr/bin/env python2.7
"""
Run a local instance of Boulder for testing purposes.
This runs in non-monolithic mode and requires RabbitMQ on localhost.
Keeps servers alive until ^C. Exit non-zero if any servers fail to
start, or die before ^C.
"""
import os
import sys
import time
sys.path.append('./test')
import startservers
if not startservers.start():
sys.exit(1)
try:
time.sleep(1)
print("All servers are running. To stop, hit ^C.")
os.wait()
# If we reach here, a child died early. Log what died:
startservers.check()
sys.exit(1)
except KeyboardInterrupt:
print "\nstopping servers."
|
3a0d81e62d8a6cd6807d0447b72cc35206e2c8fd
|
ecs-cleaner.py
|
ecs-cleaner.py
|
import boto3
def main(event, context):
client = boto3.client(u'ecs')
inspect_clusters = [u'staging1']
for cluster in inspect_clusters:
resp = client.list_container_instances(
cluster=cluster
)
instances = resp[u'containerInstanceArns']
try:
nxt_tok = resp[u'nextToken']
while True:
resp = client.list_container_instances(
cluster=cluster,
nextToken=nxt_tok
)
instances += resp[u'containerInstanceArns']
nxt_tok = resp[u'nextToken']
except KeyError:
pass
resp = client.describe_container_instances(
cluster=cluster,
containerInstances=instances
)
ec2 = boto3.resource('ec2')
client = boto3.client('autoscaling')
for inst in resp[u'containerInstances']:
if not inst['agentConnected']:
I = ec2.Instance(id=inst[u'ec2InstanceId'])
autoscalegroup = filter(lambda k: k['Key'] == u'aws:autoscaling:groupName', I.tags)[0]['Value']
print I.id, u': ', autoscalegroup
|
import boto3
def main(event, context):
ecs_client = boto3.client(u'ecs')
inspect_clusters = [u'staging1']
for cluster in inspect_clusters:
resp = ecs_client.list_container_instances(
cluster=cluster
)
instances = resp[u'containerInstanceArns']
try:
nxt_tok = resp[u'nextToken']
while True:
resp = ecs_client.list_container_instances(
cluster=cluster,
nextToken=nxt_tok
)
instances += resp[u'containerInstanceArns']
nxt_tok = resp[u'nextToken']
except KeyError:
pass
resp = ecs_client.describe_container_instances(
cluster=cluster,
containerInstances=instances
)
ec2 = boto3.resource('ec2')
autoscale_client = boto3.client('autoscaling')
for inst in resp[u'containerInstances']:
if not inst['agentConnected']:
I = ec2.Instance(id=inst[u'ec2InstanceId'])
autoscalegroup = filter(lambda k: k['Key'] == u'aws:autoscaling:groupName', I.tags)[0]['Value']
# Danger! Detaching Instance from autoscaling group
autoscale_client.detach_instances(
InstanceIds=[I.id],
AutoScalingGroupName=autoscalegroup,
ShouldDecrementDesiredCapacity=False
)
# Danger! Terminating Instance
I.terminate()
print u'Detaching and Terminating: ', I.id, u' in autoscale group ', autoscalegroup
|
Add instance detach and terminate
|
Add instance detach and terminate
|
Python
|
mit
|
silinternational/ecs-agent-monitor
|
import boto3
def main(event, context):
client = boto3.client(u'ecs')
inspect_clusters = [u'staging1']
for cluster in inspect_clusters:
resp = client.list_container_instances(
cluster=cluster
)
instances = resp[u'containerInstanceArns']
try:
nxt_tok = resp[u'nextToken']
while True:
resp = client.list_container_instances(
cluster=cluster,
nextToken=nxt_tok
)
instances += resp[u'containerInstanceArns']
nxt_tok = resp[u'nextToken']
except KeyError:
pass
resp = client.describe_container_instances(
cluster=cluster,
containerInstances=instances
)
ec2 = boto3.resource('ec2')
client = boto3.client('autoscaling')
for inst in resp[u'containerInstances']:
if not inst['agentConnected']:
I = ec2.Instance(id=inst[u'ec2InstanceId'])
autoscalegroup = filter(lambda k: k['Key'] == u'aws:autoscaling:groupName', I.tags)[0]['Value']
print I.id, u': ', autoscalegroup
Add instance detach and terminate
|
import boto3
def main(event, context):
ecs_client = boto3.client(u'ecs')
inspect_clusters = [u'staging1']
for cluster in inspect_clusters:
resp = ecs_client.list_container_instances(
cluster=cluster
)
instances = resp[u'containerInstanceArns']
try:
nxt_tok = resp[u'nextToken']
while True:
resp = ecs_client.list_container_instances(
cluster=cluster,
nextToken=nxt_tok
)
instances += resp[u'containerInstanceArns']
nxt_tok = resp[u'nextToken']
except KeyError:
pass
resp = ecs_client.describe_container_instances(
cluster=cluster,
containerInstances=instances
)
ec2 = boto3.resource('ec2')
autoscale_client = boto3.client('autoscaling')
for inst in resp[u'containerInstances']:
if not inst['agentConnected']:
I = ec2.Instance(id=inst[u'ec2InstanceId'])
autoscalegroup = filter(lambda k: k['Key'] == u'aws:autoscaling:groupName', I.tags)[0]['Value']
# Danger! Detaching Instance from autoscaling group
autoscale_client.detach_instances(
InstanceIds=[I.id],
AutoScalingGroupName=autoscalegroup,
ShouldDecrementDesiredCapacity=False
)
# Danger! Terminating Instance
I.terminate()
print u'Detaching and Terminating: ', I.id, u' in autoscale group ', autoscalegroup
|
<commit_before>import boto3
def main(event, context):
client = boto3.client(u'ecs')
inspect_clusters = [u'staging1']
for cluster in inspect_clusters:
resp = client.list_container_instances(
cluster=cluster
)
instances = resp[u'containerInstanceArns']
try:
nxt_tok = resp[u'nextToken']
while True:
resp = client.list_container_instances(
cluster=cluster,
nextToken=nxt_tok
)
instances += resp[u'containerInstanceArns']
nxt_tok = resp[u'nextToken']
except KeyError:
pass
resp = client.describe_container_instances(
cluster=cluster,
containerInstances=instances
)
ec2 = boto3.resource('ec2')
client = boto3.client('autoscaling')
for inst in resp[u'containerInstances']:
if not inst['agentConnected']:
I = ec2.Instance(id=inst[u'ec2InstanceId'])
autoscalegroup = filter(lambda k: k['Key'] == u'aws:autoscaling:groupName', I.tags)[0]['Value']
print I.id, u': ', autoscalegroup
<commit_msg>Add instance detach and terminate<commit_after>
|
import boto3
def main(event, context):
ecs_client = boto3.client(u'ecs')
inspect_clusters = [u'staging1']
for cluster in inspect_clusters:
resp = ecs_client.list_container_instances(
cluster=cluster
)
instances = resp[u'containerInstanceArns']
try:
nxt_tok = resp[u'nextToken']
while True:
resp = ecs_client.list_container_instances(
cluster=cluster,
nextToken=nxt_tok
)
instances += resp[u'containerInstanceArns']
nxt_tok = resp[u'nextToken']
except KeyError:
pass
resp = ecs_client.describe_container_instances(
cluster=cluster,
containerInstances=instances
)
ec2 = boto3.resource('ec2')
autoscale_client = boto3.client('autoscaling')
for inst in resp[u'containerInstances']:
if not inst['agentConnected']:
I = ec2.Instance(id=inst[u'ec2InstanceId'])
autoscalegroup = filter(lambda k: k['Key'] == u'aws:autoscaling:groupName', I.tags)[0]['Value']
# Danger! Detaching Instance from autoscaling group
autoscale_client.detach_instances(
InstanceIds=[I.id],
AutoScalingGroupName=autoscalegroup,
ShouldDecrementDesiredCapacity=False
)
# Danger! Terminating Instance
I.terminate()
print u'Detaching and Terminating: ', I.id, u' in autoscale group ', autoscalegroup
|
import boto3
def main(event, context):
client = boto3.client(u'ecs')
inspect_clusters = [u'staging1']
for cluster in inspect_clusters:
resp = client.list_container_instances(
cluster=cluster
)
instances = resp[u'containerInstanceArns']
try:
nxt_tok = resp[u'nextToken']
while True:
resp = client.list_container_instances(
cluster=cluster,
nextToken=nxt_tok
)
instances += resp[u'containerInstanceArns']
nxt_tok = resp[u'nextToken']
except KeyError:
pass
resp = client.describe_container_instances(
cluster=cluster,
containerInstances=instances
)
ec2 = boto3.resource('ec2')
client = boto3.client('autoscaling')
for inst in resp[u'containerInstances']:
if not inst['agentConnected']:
I = ec2.Instance(id=inst[u'ec2InstanceId'])
autoscalegroup = filter(lambda k: k['Key'] == u'aws:autoscaling:groupName', I.tags)[0]['Value']
print I.id, u': ', autoscalegroup
Add instance detach and terminateimport boto3
def main(event, context):
ecs_client = boto3.client(u'ecs')
inspect_clusters = [u'staging1']
for cluster in inspect_clusters:
resp = ecs_client.list_container_instances(
cluster=cluster
)
instances = resp[u'containerInstanceArns']
try:
nxt_tok = resp[u'nextToken']
while True:
resp = ecs_client.list_container_instances(
cluster=cluster,
nextToken=nxt_tok
)
instances += resp[u'containerInstanceArns']
nxt_tok = resp[u'nextToken']
except KeyError:
pass
resp = ecs_client.describe_container_instances(
cluster=cluster,
containerInstances=instances
)
ec2 = boto3.resource('ec2')
autoscale_client = boto3.client('autoscaling')
for inst in resp[u'containerInstances']:
if not inst['agentConnected']:
I = ec2.Instance(id=inst[u'ec2InstanceId'])
autoscalegroup = filter(lambda k: k['Key'] == u'aws:autoscaling:groupName', I.tags)[0]['Value']
# Danger! Detaching Instance from autoscaling group
autoscale_client.detach_instances(
InstanceIds=[I.id],
AutoScalingGroupName=autoscalegroup,
ShouldDecrementDesiredCapacity=False
)
# Danger! Terminating Instance
I.terminate()
print u'Detaching and Terminating: ', I.id, u' in autoscale group ', autoscalegroup
|
<commit_before>import boto3
def main(event, context):
client = boto3.client(u'ecs')
inspect_clusters = [u'staging1']
for cluster in inspect_clusters:
resp = client.list_container_instances(
cluster=cluster
)
instances = resp[u'containerInstanceArns']
try:
nxt_tok = resp[u'nextToken']
while True:
resp = client.list_container_instances(
cluster=cluster,
nextToken=nxt_tok
)
instances += resp[u'containerInstanceArns']
nxt_tok = resp[u'nextToken']
except KeyError:
pass
resp = client.describe_container_instances(
cluster=cluster,
containerInstances=instances
)
ec2 = boto3.resource('ec2')
client = boto3.client('autoscaling')
for inst in resp[u'containerInstances']:
if not inst['agentConnected']:
I = ec2.Instance(id=inst[u'ec2InstanceId'])
autoscalegroup = filter(lambda k: k['Key'] == u'aws:autoscaling:groupName', I.tags)[0]['Value']
print I.id, u': ', autoscalegroup
<commit_msg>Add instance detach and terminate<commit_after>import boto3
def main(event, context):
ecs_client = boto3.client(u'ecs')
inspect_clusters = [u'staging1']
for cluster in inspect_clusters:
resp = ecs_client.list_container_instances(
cluster=cluster
)
instances = resp[u'containerInstanceArns']
try:
nxt_tok = resp[u'nextToken']
while True:
resp = ecs_client.list_container_instances(
cluster=cluster,
nextToken=nxt_tok
)
instances += resp[u'containerInstanceArns']
nxt_tok = resp[u'nextToken']
except KeyError:
pass
resp = ecs_client.describe_container_instances(
cluster=cluster,
containerInstances=instances
)
ec2 = boto3.resource('ec2')
autoscale_client = boto3.client('autoscaling')
for inst in resp[u'containerInstances']:
if not inst['agentConnected']:
I = ec2.Instance(id=inst[u'ec2InstanceId'])
autoscalegroup = filter(lambda k: k['Key'] == u'aws:autoscaling:groupName', I.tags)[0]['Value']
# Danger! Detaching Instance from autoscaling group
autoscale_client.detach_instances(
InstanceIds=[I.id],
AutoScalingGroupName=autoscalegroup,
ShouldDecrementDesiredCapacity=False
)
# Danger! Terminating Instance
I.terminate()
print u'Detaching and Terminating: ', I.id, u' in autoscale group ', autoscalegroup
|
f5b9b755eaf7c5935a6b5c6b1014cc3df90323bc
|
scripts/get_bump_version.py
|
scripts/get_bump_version.py
|
from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--long", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, since, gsha = version.split("-")
status = ""
except ValueError:
vers, status, since, gsha = version.split("-")
return vers, status, since, gsha
vers, status, since, gsha = get_version_from_git()
if status == "":
print("No X.X.X-devel[rc] tag.")
else:
print(vers + "." + status + "."+ gsha[1:])
|
from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--long", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, since, gsha = version.split("-")
status = ""
except ValueError:
vers, status, since, gsha = version.split("-")
return vers, status, since, gsha
vers, status, since, gsha = get_version_from_git()
if status == "":
print("No X.X.X-devel[rc] tag.")
else:
print(vers + "-" + status + "-"+ gsha[1:])
|
Use dash to be compliant with XXX-devel pattern.
|
Use dash to be compliant with XXX-devel pattern.
|
Python
|
bsd-3-clause
|
abele/bokeh,timothydmorton/bokeh,Karel-van-de-Plassche/bokeh,carlvlewis/bokeh,philippjfr/bokeh,bokeh/bokeh,stonebig/bokeh,khkaminska/bokeh,timothydmorton/bokeh,justacec/bokeh,khkaminska/bokeh,ahmadia/bokeh,laurent-george/bokeh,ericdill/bokeh,ericmjl/bokeh,paultcochrane/bokeh,maxalbert/bokeh,aiguofer/bokeh,CrazyGuo/bokeh,xguse/bokeh,bokeh/bokeh,quasiben/bokeh,mutirri/bokeh,bokeh/bokeh,saifrahmed/bokeh,msarahan/bokeh,lukebarnard1/bokeh,PythonCharmers/bokeh,srinathv/bokeh,DuCorey/bokeh,ahmadia/bokeh,carlvlewis/bokeh,percyfal/bokeh,xguse/bokeh,daodaoliang/bokeh,alan-unravel/bokeh,satishgoda/bokeh,ericdill/bokeh,roxyboy/bokeh,gpfreitas/bokeh,roxyboy/bokeh,Karel-van-de-Plassche/bokeh,roxyboy/bokeh,percyfal/bokeh,jplourenco/bokeh,canavandl/bokeh,rhiever/bokeh,aavanian/bokeh,rs2/bokeh,CrazyGuo/bokeh,lukebarnard1/bokeh,mindriot101/bokeh,deeplook/bokeh,abele/bokeh,ptitjano/bokeh,schoolie/bokeh,bsipocz/bokeh,akloster/bokeh,timsnyder/bokeh,phobson/bokeh,stonebig/bokeh,tacaswell/bokeh,stonebig/bokeh,carlvlewis/bokeh,laurent-george/bokeh,akloster/bokeh,caseyclements/bokeh,draperjames/bokeh,timsnyder/bokeh,dennisobrien/bokeh,matbra/bokeh,muku42/bokeh,eteq/bokeh,PythonCharmers/bokeh,eteq/bokeh,mutirri/bokeh,PythonCharmers/bokeh,schoolie/bokeh,rs2/bokeh,stonebig/bokeh,birdsarah/bokeh,birdsarah/bokeh,azjps/bokeh,evidation-health/bokeh,dennisobrien/bokeh,msarahan/bokeh,DuCorey/bokeh,daodaoliang/bokeh,ericdill/bokeh,draperjames/bokeh,stuart-knock/bokeh,josherick/bokeh,htygithub/bokeh,azjps/bokeh,abele/bokeh,mutirri/bokeh,rhiever/bokeh,draperjames/bokeh,alan-unravel/bokeh,phobson/bokeh,KasperPRasmussen/bokeh,jakirkham/bokeh,ChristosChristofidis/bokeh,jplourenco/bokeh,tacaswell/bokeh,percyfal/bokeh,rothnic/bokeh,ChristosChristofidis/bokeh,quasiben/bokeh,srinathv/bokeh,mutirri/bokeh,jakirkham/bokeh,maxalbert/bokeh,clairetang6/bokeh,mindriot101/bokeh,maxalbert/bokeh,carlvlewis/bokeh,deeplook/bokeh,deeplook/bokeh,daodaoliang/bokeh,ChinaQuants/bokeh,justacec/bokeh,stuart-knock/bokeh,clairetang6/bokeh,bokeh/bokeh,DuCorey/bokeh,bokeh/bokeh,CrazyGuo/bokeh,gpfreitas/bokeh,msarahan/bokeh,jplourenco/bokeh,DuCorey/bokeh,justacec/bokeh,evidation-health/bokeh,maxalbert/bokeh,stuart-knock/bokeh,ahmadia/bokeh,abele/bokeh,bsipocz/bokeh,stuart-knock/bokeh,timothydmorton/bokeh,quasiben/bokeh,satishgoda/bokeh,josherick/bokeh,alan-unravel/bokeh,rothnic/bokeh,lukebarnard1/bokeh,srinathv/bokeh,Karel-van-de-Plassche/bokeh,timothydmorton/bokeh,dennisobrien/bokeh,clairetang6/bokeh,paultcochrane/bokeh,DuCorey/bokeh,caseyclements/bokeh,aavanian/bokeh,eteq/bokeh,paultcochrane/bokeh,ChinaQuants/bokeh,birdsarah/bokeh,josherick/bokeh,deeplook/bokeh,KasperPRasmussen/bokeh,philippjfr/bokeh,srinathv/bokeh,bsipocz/bokeh,dennisobrien/bokeh,caseyclements/bokeh,schoolie/bokeh,khkaminska/bokeh,aavanian/bokeh,lukebarnard1/bokeh,jakirkham/bokeh,rhiever/bokeh,mindriot101/bokeh,justacec/bokeh,canavandl/bokeh,bsipocz/bokeh,ptitjano/bokeh,jakirkham/bokeh,xguse/bokeh,almarklein/bokeh,canavandl/bokeh,aiguofer/bokeh,ptitjano/bokeh,rothnic/bokeh,ChristosChristofidis/bokeh,philippjfr/bokeh,timsnyder/bokeh,josherick/bokeh,azjps/bokeh,ericmjl/bokeh,draperjames/bokeh,CrazyGuo/bokeh,ahmadia/bokeh,phobson/bokeh,schoolie/bokeh,gpfreitas/bokeh,akloster/bokeh,satishgoda/bokeh,daodaoliang/bokeh,KasperPRasmussen/bokeh,tacaswell/bokeh,roxyboy/bokeh,dennisobrien/bokeh,birdsarah/bokeh,rs2/bokeh,paultcochrane/bokeh,matbra/bokeh,evidation-health/bokeh,aavanian/bokeh,matbra/bokeh,khkaminska/bokeh,timsnyder/bokeh,percyfal/bokeh,aiguofer/bokeh,clairetang6/bokeh,philippjfr/bokeh,jakirkham/bokeh,percyfal/bokeh,mindriot101/bokeh,ChinaQuants/bokeh,aiguofer/bokeh,KasperPRasmussen/bokeh,xguse/bokeh,muku42/bokeh,rothnic/bokeh,ChristosChristofidis/bokeh,Karel-van-de-Plassche/bokeh,htygithub/bokeh,ericmjl/bokeh,jplourenco/bokeh,satishgoda/bokeh,awanke/bokeh,ericmjl/bokeh,htygithub/bokeh,philippjfr/bokeh,muku42/bokeh,htygithub/bokeh,rhiever/bokeh,matbra/bokeh,rs2/bokeh,eteq/bokeh,timsnyder/bokeh,caseyclements/bokeh,saifrahmed/bokeh,awanke/bokeh,tacaswell/bokeh,ericmjl/bokeh,azjps/bokeh,evidation-health/bokeh,ericdill/bokeh,almarklein/bokeh,draperjames/bokeh,msarahan/bokeh,saifrahmed/bokeh,ptitjano/bokeh,phobson/bokeh,awanke/bokeh,ChinaQuants/bokeh,rs2/bokeh,aavanian/bokeh,PythonCharmers/bokeh,schoolie/bokeh,Karel-van-de-Plassche/bokeh,saifrahmed/bokeh,laurent-george/bokeh,canavandl/bokeh,KasperPRasmussen/bokeh,awanke/bokeh,almarklein/bokeh,laurent-george/bokeh,gpfreitas/bokeh,phobson/bokeh,aiguofer/bokeh,azjps/bokeh,alan-unravel/bokeh,muku42/bokeh,ptitjano/bokeh,akloster/bokeh
|
from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--long", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, since, gsha = version.split("-")
status = ""
except ValueError:
vers, status, since, gsha = version.split("-")
return vers, status, since, gsha
vers, status, since, gsha = get_version_from_git()
if status == "":
print("No X.X.X-devel[rc] tag.")
else:
print(vers + "." + status + "."+ gsha[1:])
Use dash to be compliant with XXX-devel pattern.
|
from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--long", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, since, gsha = version.split("-")
status = ""
except ValueError:
vers, status, since, gsha = version.split("-")
return vers, status, since, gsha
vers, status, since, gsha = get_version_from_git()
if status == "":
print("No X.X.X-devel[rc] tag.")
else:
print(vers + "-" + status + "-"+ gsha[1:])
|
<commit_before>from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--long", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, since, gsha = version.split("-")
status = ""
except ValueError:
vers, status, since, gsha = version.split("-")
return vers, status, since, gsha
vers, status, since, gsha = get_version_from_git()
if status == "":
print("No X.X.X-devel[rc] tag.")
else:
print(vers + "." + status + "."+ gsha[1:])
<commit_msg>Use dash to be compliant with XXX-devel pattern.<commit_after>
|
from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--long", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, since, gsha = version.split("-")
status = ""
except ValueError:
vers, status, since, gsha = version.split("-")
return vers, status, since, gsha
vers, status, since, gsha = get_version_from_git()
if status == "":
print("No X.X.X-devel[rc] tag.")
else:
print(vers + "-" + status + "-"+ gsha[1:])
|
from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--long", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, since, gsha = version.split("-")
status = ""
except ValueError:
vers, status, since, gsha = version.split("-")
return vers, status, since, gsha
vers, status, since, gsha = get_version_from_git()
if status == "":
print("No X.X.X-devel[rc] tag.")
else:
print(vers + "." + status + "."+ gsha[1:])
Use dash to be compliant with XXX-devel pattern.from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--long", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, since, gsha = version.split("-")
status = ""
except ValueError:
vers, status, since, gsha = version.split("-")
return vers, status, since, gsha
vers, status, since, gsha = get_version_from_git()
if status == "":
print("No X.X.X-devel[rc] tag.")
else:
print(vers + "-" + status + "-"+ gsha[1:])
|
<commit_before>from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--long", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, since, gsha = version.split("-")
status = ""
except ValueError:
vers, status, since, gsha = version.split("-")
return vers, status, since, gsha
vers, status, since, gsha = get_version_from_git()
if status == "":
print("No X.X.X-devel[rc] tag.")
else:
print(vers + "." + status + "."+ gsha[1:])
<commit_msg>Use dash to be compliant with XXX-devel pattern.<commit_after>from __future__ import print_function
import subprocess
def get_version_from_git():
cmd = ["git", "describe", "--tags", "--long", "--always"]
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE)
code = proc.wait()
if code != 0:
print("Failed to run: %s" % " ".join(cmd))
sys.exit(1)
version = proc.stdout.read().decode('utf-8').strip()
try:
vers, since, gsha = version.split("-")
status = ""
except ValueError:
vers, status, since, gsha = version.split("-")
return vers, status, since, gsha
vers, status, since, gsha = get_version_from_git()
if status == "":
print("No X.X.X-devel[rc] tag.")
else:
print(vers + "-" + status + "-"+ gsha[1:])
|
4b30b6dd4eb24c36cd32d37bf6555be79cdc80a8
|
scripts/maf_split_by_src.py
|
scripts/maf_split_by_src.py
|
#!/usr/bin/env python2.3
"""
Read a MAF from stdin and break into a set of mafs containing
no more than a certain number of columns
"""
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
writer_key = string.join( [ c.src for c in m.components ], '_' )
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
|
#!/usr/bin/env python2.3
"""
Read a MAF from stdin and break into a set of mafs containing
no more than a certain number of columns
"""
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
parser.add_option( "-c", "--component", action="store", default=None )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
comp = options.component
if comp is not None:
comp = int( comp )
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
if comp is None:
writer_key = string.join( [ c.src for c in m.components ], '_' )
else:
writer_key = m.components[ comp ].src
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
|
Allow splitting by a particular component (by index)
|
Allow splitting by a particular component (by index)
|
Python
|
mit
|
uhjish/bx-python,uhjish/bx-python,uhjish/bx-python
|
#!/usr/bin/env python2.3
"""
Read a MAF from stdin and break into a set of mafs containing
no more than a certain number of columns
"""
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
writer_key = string.join( [ c.src for c in m.components ], '_' )
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
Allow splitting by a particular component (by index)
|
#!/usr/bin/env python2.3
"""
Read a MAF from stdin and break into a set of mafs containing
no more than a certain number of columns
"""
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
parser.add_option( "-c", "--component", action="store", default=None )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
comp = options.component
if comp is not None:
comp = int( comp )
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
if comp is None:
writer_key = string.join( [ c.src for c in m.components ], '_' )
else:
writer_key = m.components[ comp ].src
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
|
<commit_before>#!/usr/bin/env python2.3
"""
Read a MAF from stdin and break into a set of mafs containing
no more than a certain number of columns
"""
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
writer_key = string.join( [ c.src for c in m.components ], '_' )
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
<commit_msg>Allow splitting by a particular component (by index)<commit_after>
|
#!/usr/bin/env python2.3
"""
Read a MAF from stdin and break into a set of mafs containing
no more than a certain number of columns
"""
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
parser.add_option( "-c", "--component", action="store", default=None )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
comp = options.component
if comp is not None:
comp = int( comp )
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
if comp is None:
writer_key = string.join( [ c.src for c in m.components ], '_' )
else:
writer_key = m.components[ comp ].src
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
|
#!/usr/bin/env python2.3
"""
Read a MAF from stdin and break into a set of mafs containing
no more than a certain number of columns
"""
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
writer_key = string.join( [ c.src for c in m.components ], '_' )
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
Allow splitting by a particular component (by index)#!/usr/bin/env python2.3
"""
Read a MAF from stdin and break into a set of mafs containing
no more than a certain number of columns
"""
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
parser.add_option( "-c", "--component", action="store", default=None )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
comp = options.component
if comp is not None:
comp = int( comp )
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
if comp is None:
writer_key = string.join( [ c.src for c in m.components ], '_' )
else:
writer_key = m.components[ comp ].src
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
|
<commit_before>#!/usr/bin/env python2.3
"""
Read a MAF from stdin and break into a set of mafs containing
no more than a certain number of columns
"""
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
writer_key = string.join( [ c.src for c in m.components ], '_' )
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
<commit_msg>Allow splitting by a particular component (by index)<commit_after>#!/usr/bin/env python2.3
"""
Read a MAF from stdin and break into a set of mafs containing
no more than a certain number of columns
"""
usage = "usage: %prog"
import sys, string
import bx.align.maf
from optparse import OptionParser
import psyco_full
INF="inf"
def __main__():
# Parse command line arguments
parser = OptionParser( usage=usage )
parser.add_option( "-o", "--outprefix", action="store", default="" )
parser.add_option( "-c", "--component", action="store", default=None )
( options, args ) = parser.parse_args()
out_prefix = options.outprefix
comp = options.component
if comp is not None:
comp = int( comp )
maf_reader = bx.align.maf.Reader( sys.stdin )
writers = {}
for m in maf_reader:
if comp is None:
writer_key = string.join( [ c.src for c in m.components ], '_' )
else:
writer_key = m.components[ comp ].src
if not writers.has_key( writer_key ):
writer = bx.align.maf.Writer( file( "%s%s.maf" % ( out_prefix, writer_key ), "w" ) )
writers[ writer_key ] = writer
else:
writer = writers[ writer_key ]
writer.write( m )
for key in writers:
writers[ key ].close()
if __name__ == "__main__": __main__()
|
c02900e7fb8657316fa647f92c4f9ddbcedb2b7c
|
rma/helpers/formating.py
|
rma/helpers/formating.py
|
from math import floor
from collections import Counter
def floored_percentage(val, digits):
"""
Return string of floored value with given digits after period
:param val:
:param digits:
:return:
"""
val *= 10 ** (digits + 2)
return '{1:.{0}f}%'.format(digits, floor(val) / 10 ** digits)
def pref_encoding(data):
"""
Return string with unique words in list with percentage of they frequency
:param data:
:return str:
"""
encoding_counted = Counter(data)
total = sum(encoding_counted.values())
sorted_encodings = sorted(encoding_counted.items(), key=lambda t: t[1], reverse=True)
return ' / '.join(
["{:<1} [{:<4}]".format(k, floored_percentage(v * 1.0 / total, 1)) for k, v in sorted_encodings])
def make_total_row(source, agg):
"""
Execute agg column based function for source columns. For example if you need `total` in table data:
Examples:
src = [[1,1],[1,2],[1,3]]
print(make_total_row(src, [sum, min]))
>>> [3, 1]
:param source:
:param agg:
:return:
"""
return [agg[index](value) if callable(agg[index]) else agg[index] for index, value in enumerate(zip(*source))]
|
from math import floor
from collections import Counter
def floored_percentage(val, digits):
"""
Return string of floored value with given digits after period
:param val:
:param digits:
:return:
"""
val *= 10 ** (digits + 2)
return '{1:.{0}f}%'.format(digits, floor(val) / 10 ** digits)
def pref_encoding(data, encoding_transform=None):
"""
Return string with unique words in list with percentage of they frequency
:param data:
:param encoding_transform:
:return str:
"""
encoding_counted = Counter(data)
total = sum(encoding_counted.values())
sorted_encodings = sorted(encoding_counted.items(), key=lambda t: t[1], reverse=True)
return ' / '.join(
["{:<1} [{:<4}]".format(encoding_transform(k) if encoding_transform else k, floored_percentage(v * 1.0 / total, 1)) for k, v in sorted_encodings])
def make_total_row(source, agg):
"""
Execute agg column based function for source columns. For example if you need `total` in table data:
Examples:
src = [[1,1],[1,2],[1,3]]
print(make_total_row(src, [sum, min]))
>>> [3, 1]
:param source:
:param agg:
:return:
"""
return [agg[index](value) if callable(agg[index]) else agg[index] for index, value in enumerate(zip(*source))]
|
Add transforming function to pref_encodings
|
Add transforming function to pref_encodings
|
Python
|
mit
|
gamenet/redis-memory-analyzer
|
from math import floor
from collections import Counter
def floored_percentage(val, digits):
"""
Return string of floored value with given digits after period
:param val:
:param digits:
:return:
"""
val *= 10 ** (digits + 2)
return '{1:.{0}f}%'.format(digits, floor(val) / 10 ** digits)
def pref_encoding(data):
"""
Return string with unique words in list with percentage of they frequency
:param data:
:return str:
"""
encoding_counted = Counter(data)
total = sum(encoding_counted.values())
sorted_encodings = sorted(encoding_counted.items(), key=lambda t: t[1], reverse=True)
return ' / '.join(
["{:<1} [{:<4}]".format(k, floored_percentage(v * 1.0 / total, 1)) for k, v in sorted_encodings])
def make_total_row(source, agg):
"""
Execute agg column based function for source columns. For example if you need `total` in table data:
Examples:
src = [[1,1],[1,2],[1,3]]
print(make_total_row(src, [sum, min]))
>>> [3, 1]
:param source:
:param agg:
:return:
"""
return [agg[index](value) if callable(agg[index]) else agg[index] for index, value in enumerate(zip(*source))]
Add transforming function to pref_encodings
|
from math import floor
from collections import Counter
def floored_percentage(val, digits):
"""
Return string of floored value with given digits after period
:param val:
:param digits:
:return:
"""
val *= 10 ** (digits + 2)
return '{1:.{0}f}%'.format(digits, floor(val) / 10 ** digits)
def pref_encoding(data, encoding_transform=None):
"""
Return string with unique words in list with percentage of they frequency
:param data:
:param encoding_transform:
:return str:
"""
encoding_counted = Counter(data)
total = sum(encoding_counted.values())
sorted_encodings = sorted(encoding_counted.items(), key=lambda t: t[1], reverse=True)
return ' / '.join(
["{:<1} [{:<4}]".format(encoding_transform(k) if encoding_transform else k, floored_percentage(v * 1.0 / total, 1)) for k, v in sorted_encodings])
def make_total_row(source, agg):
"""
Execute agg column based function for source columns. For example if you need `total` in table data:
Examples:
src = [[1,1],[1,2],[1,3]]
print(make_total_row(src, [sum, min]))
>>> [3, 1]
:param source:
:param agg:
:return:
"""
return [agg[index](value) if callable(agg[index]) else agg[index] for index, value in enumerate(zip(*source))]
|
<commit_before>from math import floor
from collections import Counter
def floored_percentage(val, digits):
"""
Return string of floored value with given digits after period
:param val:
:param digits:
:return:
"""
val *= 10 ** (digits + 2)
return '{1:.{0}f}%'.format(digits, floor(val) / 10 ** digits)
def pref_encoding(data):
"""
Return string with unique words in list with percentage of they frequency
:param data:
:return str:
"""
encoding_counted = Counter(data)
total = sum(encoding_counted.values())
sorted_encodings = sorted(encoding_counted.items(), key=lambda t: t[1], reverse=True)
return ' / '.join(
["{:<1} [{:<4}]".format(k, floored_percentage(v * 1.0 / total, 1)) for k, v in sorted_encodings])
def make_total_row(source, agg):
"""
Execute agg column based function for source columns. For example if you need `total` in table data:
Examples:
src = [[1,1],[1,2],[1,3]]
print(make_total_row(src, [sum, min]))
>>> [3, 1]
:param source:
:param agg:
:return:
"""
return [agg[index](value) if callable(agg[index]) else agg[index] for index, value in enumerate(zip(*source))]
<commit_msg>Add transforming function to pref_encodings<commit_after>
|
from math import floor
from collections import Counter
def floored_percentage(val, digits):
"""
Return string of floored value with given digits after period
:param val:
:param digits:
:return:
"""
val *= 10 ** (digits + 2)
return '{1:.{0}f}%'.format(digits, floor(val) / 10 ** digits)
def pref_encoding(data, encoding_transform=None):
"""
Return string with unique words in list with percentage of they frequency
:param data:
:param encoding_transform:
:return str:
"""
encoding_counted = Counter(data)
total = sum(encoding_counted.values())
sorted_encodings = sorted(encoding_counted.items(), key=lambda t: t[1], reverse=True)
return ' / '.join(
["{:<1} [{:<4}]".format(encoding_transform(k) if encoding_transform else k, floored_percentage(v * 1.0 / total, 1)) for k, v in sorted_encodings])
def make_total_row(source, agg):
"""
Execute agg column based function for source columns. For example if you need `total` in table data:
Examples:
src = [[1,1],[1,2],[1,3]]
print(make_total_row(src, [sum, min]))
>>> [3, 1]
:param source:
:param agg:
:return:
"""
return [agg[index](value) if callable(agg[index]) else agg[index] for index, value in enumerate(zip(*source))]
|
from math import floor
from collections import Counter
def floored_percentage(val, digits):
"""
Return string of floored value with given digits after period
:param val:
:param digits:
:return:
"""
val *= 10 ** (digits + 2)
return '{1:.{0}f}%'.format(digits, floor(val) / 10 ** digits)
def pref_encoding(data):
"""
Return string with unique words in list with percentage of they frequency
:param data:
:return str:
"""
encoding_counted = Counter(data)
total = sum(encoding_counted.values())
sorted_encodings = sorted(encoding_counted.items(), key=lambda t: t[1], reverse=True)
return ' / '.join(
["{:<1} [{:<4}]".format(k, floored_percentage(v * 1.0 / total, 1)) for k, v in sorted_encodings])
def make_total_row(source, agg):
"""
Execute agg column based function for source columns. For example if you need `total` in table data:
Examples:
src = [[1,1],[1,2],[1,3]]
print(make_total_row(src, [sum, min]))
>>> [3, 1]
:param source:
:param agg:
:return:
"""
return [agg[index](value) if callable(agg[index]) else agg[index] for index, value in enumerate(zip(*source))]
Add transforming function to pref_encodingsfrom math import floor
from collections import Counter
def floored_percentage(val, digits):
"""
Return string of floored value with given digits after period
:param val:
:param digits:
:return:
"""
val *= 10 ** (digits + 2)
return '{1:.{0}f}%'.format(digits, floor(val) / 10 ** digits)
def pref_encoding(data, encoding_transform=None):
"""
Return string with unique words in list with percentage of they frequency
:param data:
:param encoding_transform:
:return str:
"""
encoding_counted = Counter(data)
total = sum(encoding_counted.values())
sorted_encodings = sorted(encoding_counted.items(), key=lambda t: t[1], reverse=True)
return ' / '.join(
["{:<1} [{:<4}]".format(encoding_transform(k) if encoding_transform else k, floored_percentage(v * 1.0 / total, 1)) for k, v in sorted_encodings])
def make_total_row(source, agg):
"""
Execute agg column based function for source columns. For example if you need `total` in table data:
Examples:
src = [[1,1],[1,2],[1,3]]
print(make_total_row(src, [sum, min]))
>>> [3, 1]
:param source:
:param agg:
:return:
"""
return [agg[index](value) if callable(agg[index]) else agg[index] for index, value in enumerate(zip(*source))]
|
<commit_before>from math import floor
from collections import Counter
def floored_percentage(val, digits):
"""
Return string of floored value with given digits after period
:param val:
:param digits:
:return:
"""
val *= 10 ** (digits + 2)
return '{1:.{0}f}%'.format(digits, floor(val) / 10 ** digits)
def pref_encoding(data):
"""
Return string with unique words in list with percentage of they frequency
:param data:
:return str:
"""
encoding_counted = Counter(data)
total = sum(encoding_counted.values())
sorted_encodings = sorted(encoding_counted.items(), key=lambda t: t[1], reverse=True)
return ' / '.join(
["{:<1} [{:<4}]".format(k, floored_percentage(v * 1.0 / total, 1)) for k, v in sorted_encodings])
def make_total_row(source, agg):
"""
Execute agg column based function for source columns. For example if you need `total` in table data:
Examples:
src = [[1,1],[1,2],[1,3]]
print(make_total_row(src, [sum, min]))
>>> [3, 1]
:param source:
:param agg:
:return:
"""
return [agg[index](value) if callable(agg[index]) else agg[index] for index, value in enumerate(zip(*source))]
<commit_msg>Add transforming function to pref_encodings<commit_after>from math import floor
from collections import Counter
def floored_percentage(val, digits):
"""
Return string of floored value with given digits after period
:param val:
:param digits:
:return:
"""
val *= 10 ** (digits + 2)
return '{1:.{0}f}%'.format(digits, floor(val) / 10 ** digits)
def pref_encoding(data, encoding_transform=None):
"""
Return string with unique words in list with percentage of they frequency
:param data:
:param encoding_transform:
:return str:
"""
encoding_counted = Counter(data)
total = sum(encoding_counted.values())
sorted_encodings = sorted(encoding_counted.items(), key=lambda t: t[1], reverse=True)
return ' / '.join(
["{:<1} [{:<4}]".format(encoding_transform(k) if encoding_transform else k, floored_percentage(v * 1.0 / total, 1)) for k, v in sorted_encodings])
def make_total_row(source, agg):
"""
Execute agg column based function for source columns. For example if you need `total` in table data:
Examples:
src = [[1,1],[1,2],[1,3]]
print(make_total_row(src, [sum, min]))
>>> [3, 1]
:param source:
:param agg:
:return:
"""
return [agg[index](value) if callable(agg[index]) else agg[index] for index, value in enumerate(zip(*source))]
|
c0882e8096d1fecd5785a85e43a472d2e6d184db
|
error_proxy.py
|
error_proxy.py
|
#!/usr/bin/env python
import sys
import json
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class ErrorHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(420)
if sys.argv[1:]:
config_file = sys.argv[1:]
else:
config_file = "Proxyfile"
with open(config_file) as c:
config = json.load(c)
httpd = HTTPServer(("localhost", config['port']), ErrorHTTPRequestHandler)
httpd.serve_forever()
|
#!/usr/bin/env python
import sys
import json
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class ErrorHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(self.config['get_response'])
if sys.argv[1:]:
config_file = sys.argv[1:]
else:
config_file = "Proxyfile"
with open(config_file) as c:
config = json.load(c)
ErrorHTTPRequestHandler.config = config
httpd = HTTPServer(("localhost", config['port']), ErrorHTTPRequestHandler)
httpd.serve_forever()
|
Configure GET response via Proxyfile
|
Configure GET response via Proxyfile
|
Python
|
mit
|
pozorvlak/error_proxy
|
#!/usr/bin/env python
import sys
import json
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class ErrorHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(420)
if sys.argv[1:]:
config_file = sys.argv[1:]
else:
config_file = "Proxyfile"
with open(config_file) as c:
config = json.load(c)
httpd = HTTPServer(("localhost", config['port']), ErrorHTTPRequestHandler)
httpd.serve_forever()
Configure GET response via Proxyfile
|
#!/usr/bin/env python
import sys
import json
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class ErrorHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(self.config['get_response'])
if sys.argv[1:]:
config_file = sys.argv[1:]
else:
config_file = "Proxyfile"
with open(config_file) as c:
config = json.load(c)
ErrorHTTPRequestHandler.config = config
httpd = HTTPServer(("localhost", config['port']), ErrorHTTPRequestHandler)
httpd.serve_forever()
|
<commit_before>#!/usr/bin/env python
import sys
import json
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class ErrorHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(420)
if sys.argv[1:]:
config_file = sys.argv[1:]
else:
config_file = "Proxyfile"
with open(config_file) as c:
config = json.load(c)
httpd = HTTPServer(("localhost", config['port']), ErrorHTTPRequestHandler)
httpd.serve_forever()
<commit_msg>Configure GET response via Proxyfile<commit_after>
|
#!/usr/bin/env python
import sys
import json
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class ErrorHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(self.config['get_response'])
if sys.argv[1:]:
config_file = sys.argv[1:]
else:
config_file = "Proxyfile"
with open(config_file) as c:
config = json.load(c)
ErrorHTTPRequestHandler.config = config
httpd = HTTPServer(("localhost", config['port']), ErrorHTTPRequestHandler)
httpd.serve_forever()
|
#!/usr/bin/env python
import sys
import json
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class ErrorHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(420)
if sys.argv[1:]:
config_file = sys.argv[1:]
else:
config_file = "Proxyfile"
with open(config_file) as c:
config = json.load(c)
httpd = HTTPServer(("localhost", config['port']), ErrorHTTPRequestHandler)
httpd.serve_forever()
Configure GET response via Proxyfile#!/usr/bin/env python
import sys
import json
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class ErrorHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(self.config['get_response'])
if sys.argv[1:]:
config_file = sys.argv[1:]
else:
config_file = "Proxyfile"
with open(config_file) as c:
config = json.load(c)
ErrorHTTPRequestHandler.config = config
httpd = HTTPServer(("localhost", config['port']), ErrorHTTPRequestHandler)
httpd.serve_forever()
|
<commit_before>#!/usr/bin/env python
import sys
import json
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class ErrorHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(420)
if sys.argv[1:]:
config_file = sys.argv[1:]
else:
config_file = "Proxyfile"
with open(config_file) as c:
config = json.load(c)
httpd = HTTPServer(("localhost", config['port']), ErrorHTTPRequestHandler)
httpd.serve_forever()
<commit_msg>Configure GET response via Proxyfile<commit_after>#!/usr/bin/env python
import sys
import json
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class ErrorHTTPRequestHandler(BaseHTTPRequestHandler):
def do_GET(self):
self.send_response(self.config['get_response'])
if sys.argv[1:]:
config_file = sys.argv[1:]
else:
config_file = "Proxyfile"
with open(config_file) as c:
config = json.load(c)
ErrorHTTPRequestHandler.config = config
httpd = HTTPServer(("localhost", config['port']), ErrorHTTPRequestHandler)
httpd.serve_forever()
|
cfe4148feac51a9be6ff74e978a22f1493adff8b
|
doajtest/unit/test_tasks_sitemap.py
|
doajtest/unit/test_tasks_sitemap.py
|
from doajtest.helpers import DoajTestCase
from portality.core import app
from portality.tasks import sitemap
from portality.background import BackgroundApi
import os, shutil, time
from portality.lib import paths
from portality.store import StoreFactory
class TestSitemap(DoajTestCase):
store_impl = None
@classmethod
def setUpClass(cls) -> None:
super(TestSitemap, cls).setUpClass()
cls.store_impl = app.config["STORE_IMPL"]
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
@classmethod
def tearDownClass(cls) -> None:
super(TestSitemap, cls).tearDownClass()
app.config["STORE_IMPL"] = cls.store_impl
def setUp(self):
super(TestSitemap, self).setUp()
self.container_id = app.config.get("STORE_CACHE_CONTAINER")
self.mainStore = StoreFactory.get("cache")
def tearDown(self):
super(TestSitemap, self).tearDown()
self.mainStore.delete_container(self.container_id)
def test_01_sitemap(self):
user = app.config.get("SYSTEM_USERNAME")
job = sitemap.SitemapBackgroundTask.prepare(user)
task = sitemap.SitemapBackgroundTask(job)
BackgroundApi.execute(task)
time.sleep(1.5)
assert len(self.mainStore.list(self.container_id)) == 1
|
from doajtest.helpers import DoajTestCase
from portality.core import app
from portality.tasks import sitemap
from portality.background import BackgroundApi
import time
from portality.store import StoreFactory
class TestSitemap(DoajTestCase):
store_impl = None
@classmethod
def setUpClass(cls) -> None:
super(TestSitemap, cls).setUpClass()
cls.store_impl = app.config["STORE_IMPL"]
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
@classmethod
def tearDownClass(cls) -> None:
super(TestSitemap, cls).tearDownClass()
app.config["STORE_IMPL"] = cls.store_impl
def setUp(self):
super(TestSitemap, self).setUp()
self.container_id = app.config.get("STORE_CACHE_CONTAINER")
self.mainStore = StoreFactory.get("cache")
def tearDown(self):
super(TestSitemap, self).tearDown()
self.mainStore.delete_container(self.container_id)
def test_01_sitemap(self):
user = app.config.get("SYSTEM_USERNAME")
job = sitemap.SitemapBackgroundTask.prepare(user)
task = sitemap.SitemapBackgroundTask(job)
BackgroundApi.execute(task)
time.sleep(2)
assert len(self.mainStore.list(self.container_id)) == 1
|
Increase timeout for slow test
|
Increase timeout for slow test
|
Python
|
apache-2.0
|
DOAJ/doaj,DOAJ/doaj,DOAJ/doaj,DOAJ/doaj
|
from doajtest.helpers import DoajTestCase
from portality.core import app
from portality.tasks import sitemap
from portality.background import BackgroundApi
import os, shutil, time
from portality.lib import paths
from portality.store import StoreFactory
class TestSitemap(DoajTestCase):
store_impl = None
@classmethod
def setUpClass(cls) -> None:
super(TestSitemap, cls).setUpClass()
cls.store_impl = app.config["STORE_IMPL"]
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
@classmethod
def tearDownClass(cls) -> None:
super(TestSitemap, cls).tearDownClass()
app.config["STORE_IMPL"] = cls.store_impl
def setUp(self):
super(TestSitemap, self).setUp()
self.container_id = app.config.get("STORE_CACHE_CONTAINER")
self.mainStore = StoreFactory.get("cache")
def tearDown(self):
super(TestSitemap, self).tearDown()
self.mainStore.delete_container(self.container_id)
def test_01_sitemap(self):
user = app.config.get("SYSTEM_USERNAME")
job = sitemap.SitemapBackgroundTask.prepare(user)
task = sitemap.SitemapBackgroundTask(job)
BackgroundApi.execute(task)
time.sleep(1.5)
assert len(self.mainStore.list(self.container_id)) == 1Increase timeout for slow test
|
from doajtest.helpers import DoajTestCase
from portality.core import app
from portality.tasks import sitemap
from portality.background import BackgroundApi
import time
from portality.store import StoreFactory
class TestSitemap(DoajTestCase):
store_impl = None
@classmethod
def setUpClass(cls) -> None:
super(TestSitemap, cls).setUpClass()
cls.store_impl = app.config["STORE_IMPL"]
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
@classmethod
def tearDownClass(cls) -> None:
super(TestSitemap, cls).tearDownClass()
app.config["STORE_IMPL"] = cls.store_impl
def setUp(self):
super(TestSitemap, self).setUp()
self.container_id = app.config.get("STORE_CACHE_CONTAINER")
self.mainStore = StoreFactory.get("cache")
def tearDown(self):
super(TestSitemap, self).tearDown()
self.mainStore.delete_container(self.container_id)
def test_01_sitemap(self):
user = app.config.get("SYSTEM_USERNAME")
job = sitemap.SitemapBackgroundTask.prepare(user)
task = sitemap.SitemapBackgroundTask(job)
BackgroundApi.execute(task)
time.sleep(2)
assert len(self.mainStore.list(self.container_id)) == 1
|
<commit_before>from doajtest.helpers import DoajTestCase
from portality.core import app
from portality.tasks import sitemap
from portality.background import BackgroundApi
import os, shutil, time
from portality.lib import paths
from portality.store import StoreFactory
class TestSitemap(DoajTestCase):
store_impl = None
@classmethod
def setUpClass(cls) -> None:
super(TestSitemap, cls).setUpClass()
cls.store_impl = app.config["STORE_IMPL"]
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
@classmethod
def tearDownClass(cls) -> None:
super(TestSitemap, cls).tearDownClass()
app.config["STORE_IMPL"] = cls.store_impl
def setUp(self):
super(TestSitemap, self).setUp()
self.container_id = app.config.get("STORE_CACHE_CONTAINER")
self.mainStore = StoreFactory.get("cache")
def tearDown(self):
super(TestSitemap, self).tearDown()
self.mainStore.delete_container(self.container_id)
def test_01_sitemap(self):
user = app.config.get("SYSTEM_USERNAME")
job = sitemap.SitemapBackgroundTask.prepare(user)
task = sitemap.SitemapBackgroundTask(job)
BackgroundApi.execute(task)
time.sleep(1.5)
assert len(self.mainStore.list(self.container_id)) == 1<commit_msg>Increase timeout for slow test<commit_after>
|
from doajtest.helpers import DoajTestCase
from portality.core import app
from portality.tasks import sitemap
from portality.background import BackgroundApi
import time
from portality.store import StoreFactory
class TestSitemap(DoajTestCase):
store_impl = None
@classmethod
def setUpClass(cls) -> None:
super(TestSitemap, cls).setUpClass()
cls.store_impl = app.config["STORE_IMPL"]
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
@classmethod
def tearDownClass(cls) -> None:
super(TestSitemap, cls).tearDownClass()
app.config["STORE_IMPL"] = cls.store_impl
def setUp(self):
super(TestSitemap, self).setUp()
self.container_id = app.config.get("STORE_CACHE_CONTAINER")
self.mainStore = StoreFactory.get("cache")
def tearDown(self):
super(TestSitemap, self).tearDown()
self.mainStore.delete_container(self.container_id)
def test_01_sitemap(self):
user = app.config.get("SYSTEM_USERNAME")
job = sitemap.SitemapBackgroundTask.prepare(user)
task = sitemap.SitemapBackgroundTask(job)
BackgroundApi.execute(task)
time.sleep(2)
assert len(self.mainStore.list(self.container_id)) == 1
|
from doajtest.helpers import DoajTestCase
from portality.core import app
from portality.tasks import sitemap
from portality.background import BackgroundApi
import os, shutil, time
from portality.lib import paths
from portality.store import StoreFactory
class TestSitemap(DoajTestCase):
store_impl = None
@classmethod
def setUpClass(cls) -> None:
super(TestSitemap, cls).setUpClass()
cls.store_impl = app.config["STORE_IMPL"]
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
@classmethod
def tearDownClass(cls) -> None:
super(TestSitemap, cls).tearDownClass()
app.config["STORE_IMPL"] = cls.store_impl
def setUp(self):
super(TestSitemap, self).setUp()
self.container_id = app.config.get("STORE_CACHE_CONTAINER")
self.mainStore = StoreFactory.get("cache")
def tearDown(self):
super(TestSitemap, self).tearDown()
self.mainStore.delete_container(self.container_id)
def test_01_sitemap(self):
user = app.config.get("SYSTEM_USERNAME")
job = sitemap.SitemapBackgroundTask.prepare(user)
task = sitemap.SitemapBackgroundTask(job)
BackgroundApi.execute(task)
time.sleep(1.5)
assert len(self.mainStore.list(self.container_id)) == 1Increase timeout for slow testfrom doajtest.helpers import DoajTestCase
from portality.core import app
from portality.tasks import sitemap
from portality.background import BackgroundApi
import time
from portality.store import StoreFactory
class TestSitemap(DoajTestCase):
store_impl = None
@classmethod
def setUpClass(cls) -> None:
super(TestSitemap, cls).setUpClass()
cls.store_impl = app.config["STORE_IMPL"]
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
@classmethod
def tearDownClass(cls) -> None:
super(TestSitemap, cls).tearDownClass()
app.config["STORE_IMPL"] = cls.store_impl
def setUp(self):
super(TestSitemap, self).setUp()
self.container_id = app.config.get("STORE_CACHE_CONTAINER")
self.mainStore = StoreFactory.get("cache")
def tearDown(self):
super(TestSitemap, self).tearDown()
self.mainStore.delete_container(self.container_id)
def test_01_sitemap(self):
user = app.config.get("SYSTEM_USERNAME")
job = sitemap.SitemapBackgroundTask.prepare(user)
task = sitemap.SitemapBackgroundTask(job)
BackgroundApi.execute(task)
time.sleep(2)
assert len(self.mainStore.list(self.container_id)) == 1
|
<commit_before>from doajtest.helpers import DoajTestCase
from portality.core import app
from portality.tasks import sitemap
from portality.background import BackgroundApi
import os, shutil, time
from portality.lib import paths
from portality.store import StoreFactory
class TestSitemap(DoajTestCase):
store_impl = None
@classmethod
def setUpClass(cls) -> None:
super(TestSitemap, cls).setUpClass()
cls.store_impl = app.config["STORE_IMPL"]
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
@classmethod
def tearDownClass(cls) -> None:
super(TestSitemap, cls).tearDownClass()
app.config["STORE_IMPL"] = cls.store_impl
def setUp(self):
super(TestSitemap, self).setUp()
self.container_id = app.config.get("STORE_CACHE_CONTAINER")
self.mainStore = StoreFactory.get("cache")
def tearDown(self):
super(TestSitemap, self).tearDown()
self.mainStore.delete_container(self.container_id)
def test_01_sitemap(self):
user = app.config.get("SYSTEM_USERNAME")
job = sitemap.SitemapBackgroundTask.prepare(user)
task = sitemap.SitemapBackgroundTask(job)
BackgroundApi.execute(task)
time.sleep(1.5)
assert len(self.mainStore.list(self.container_id)) == 1<commit_msg>Increase timeout for slow test<commit_after>from doajtest.helpers import DoajTestCase
from portality.core import app
from portality.tasks import sitemap
from portality.background import BackgroundApi
import time
from portality.store import StoreFactory
class TestSitemap(DoajTestCase):
store_impl = None
@classmethod
def setUpClass(cls) -> None:
super(TestSitemap, cls).setUpClass()
cls.store_impl = app.config["STORE_IMPL"]
app.config["STORE_IMPL"] = "portality.store.StoreLocal"
@classmethod
def tearDownClass(cls) -> None:
super(TestSitemap, cls).tearDownClass()
app.config["STORE_IMPL"] = cls.store_impl
def setUp(self):
super(TestSitemap, self).setUp()
self.container_id = app.config.get("STORE_CACHE_CONTAINER")
self.mainStore = StoreFactory.get("cache")
def tearDown(self):
super(TestSitemap, self).tearDown()
self.mainStore.delete_container(self.container_id)
def test_01_sitemap(self):
user = app.config.get("SYSTEM_USERNAME")
job = sitemap.SitemapBackgroundTask.prepare(user)
task = sitemap.SitemapBackgroundTask(job)
BackgroundApi.execute(task)
time.sleep(2)
assert len(self.mainStore.list(self.container_id)) == 1
|
611a00496834b610c2663f408c94fb73b8785980
|
rpc_client/rpc_client.py
|
rpc_client/rpc_client.py
|
#!/usr/bin/python
import xmlrpclib, logging
from ConfigParser import SafeConfigParser
# Configure logging.
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m/%d %H:%M:%S',
filename='/tmp/kts46_rpc_client.log',
filemode='w')
# Define a handler for console message with mode simple format.
console = logging.StreamHandler()
console.setFormatter( logging.Formatter('L:%(levelname)-6s %(message)s') )
logger = logging.getLogger('kts46.rpc_client')
logger.addHandler(console)
# Create configuration.
logger.debug('Reading configuration.')
cfg = SafeConfigParser()
cfg.read(('rpc_client.ini',))
# Create proxy.
host = cfg.get('connection', 'server')
port = cfg.getint('connection', 'port')
connString = 'http://%s:%i' % (host, port)
logger.info('Connecting to server %s' % connString)
sp = xmlrpclib.ServerProxy(connString)
# Say hello and print available functions.
print sp.hello('Hello Mr. Server!')
|
#!/usr/bin/python
import xmlrpclib, logging
from ConfigParser import SafeConfigParser
def init():
# Configure logging.
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m/%d %H:%M:%S',
filename='/tmp/kts46_rpc_client.log',
filemode='w')
# Define a handler for console message with mode simple format.
console = logging.StreamHandler()
console.setFormatter( logging.Formatter('L:%(levelname)-6s %(message)s') )
logger = logging.getLogger('kts46.rpc_client')
logger.addHandler(console)
# Create configuration.
logger.debug('Reading configuration.')
cfg = SafeConfigParser()
cfg.read(('rpc_client.ini',))
return (cfg, logger)
cfg, logger = init()
# Create proxy.
host = cfg.get('connection', 'server')
port = cfg.getint('connection', 'port')
connString = 'http://%s:%i' % (host, port)
logger.info('Connecting to server %s' % connString)
sp = xmlrpclib.ServerProxy(connString)
# Say hello and print available functions.
print sp.hello('Hello Mr. Server!')
|
Move initialization of logger and cfg parser to separate function.
|
Move initialization of logger and cfg parser to separate function.
|
Python
|
apache-2.0
|
anthony-kolesov/kts46,anthony-kolesov/kts46,anthony-kolesov/kts46,anthony-kolesov/kts46
|
#!/usr/bin/python
import xmlrpclib, logging
from ConfigParser import SafeConfigParser
# Configure logging.
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m/%d %H:%M:%S',
filename='/tmp/kts46_rpc_client.log',
filemode='w')
# Define a handler for console message with mode simple format.
console = logging.StreamHandler()
console.setFormatter( logging.Formatter('L:%(levelname)-6s %(message)s') )
logger = logging.getLogger('kts46.rpc_client')
logger.addHandler(console)
# Create configuration.
logger.debug('Reading configuration.')
cfg = SafeConfigParser()
cfg.read(('rpc_client.ini',))
# Create proxy.
host = cfg.get('connection', 'server')
port = cfg.getint('connection', 'port')
connString = 'http://%s:%i' % (host, port)
logger.info('Connecting to server %s' % connString)
sp = xmlrpclib.ServerProxy(connString)
# Say hello and print available functions.
print sp.hello('Hello Mr. Server!')
Move initialization of logger and cfg parser to separate function.
|
#!/usr/bin/python
import xmlrpclib, logging
from ConfigParser import SafeConfigParser
def init():
# Configure logging.
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m/%d %H:%M:%S',
filename='/tmp/kts46_rpc_client.log',
filemode='w')
# Define a handler for console message with mode simple format.
console = logging.StreamHandler()
console.setFormatter( logging.Formatter('L:%(levelname)-6s %(message)s') )
logger = logging.getLogger('kts46.rpc_client')
logger.addHandler(console)
# Create configuration.
logger.debug('Reading configuration.')
cfg = SafeConfigParser()
cfg.read(('rpc_client.ini',))
return (cfg, logger)
cfg, logger = init()
# Create proxy.
host = cfg.get('connection', 'server')
port = cfg.getint('connection', 'port')
connString = 'http://%s:%i' % (host, port)
logger.info('Connecting to server %s' % connString)
sp = xmlrpclib.ServerProxy(connString)
# Say hello and print available functions.
print sp.hello('Hello Mr. Server!')
|
<commit_before>#!/usr/bin/python
import xmlrpclib, logging
from ConfigParser import SafeConfigParser
# Configure logging.
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m/%d %H:%M:%S',
filename='/tmp/kts46_rpc_client.log',
filemode='w')
# Define a handler for console message with mode simple format.
console = logging.StreamHandler()
console.setFormatter( logging.Formatter('L:%(levelname)-6s %(message)s') )
logger = logging.getLogger('kts46.rpc_client')
logger.addHandler(console)
# Create configuration.
logger.debug('Reading configuration.')
cfg = SafeConfigParser()
cfg.read(('rpc_client.ini',))
# Create proxy.
host = cfg.get('connection', 'server')
port = cfg.getint('connection', 'port')
connString = 'http://%s:%i' % (host, port)
logger.info('Connecting to server %s' % connString)
sp = xmlrpclib.ServerProxy(connString)
# Say hello and print available functions.
print sp.hello('Hello Mr. Server!')
<commit_msg>Move initialization of logger and cfg parser to separate function.<commit_after>
|
#!/usr/bin/python
import xmlrpclib, logging
from ConfigParser import SafeConfigParser
def init():
# Configure logging.
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m/%d %H:%M:%S',
filename='/tmp/kts46_rpc_client.log',
filemode='w')
# Define a handler for console message with mode simple format.
console = logging.StreamHandler()
console.setFormatter( logging.Formatter('L:%(levelname)-6s %(message)s') )
logger = logging.getLogger('kts46.rpc_client')
logger.addHandler(console)
# Create configuration.
logger.debug('Reading configuration.')
cfg = SafeConfigParser()
cfg.read(('rpc_client.ini',))
return (cfg, logger)
cfg, logger = init()
# Create proxy.
host = cfg.get('connection', 'server')
port = cfg.getint('connection', 'port')
connString = 'http://%s:%i' % (host, port)
logger.info('Connecting to server %s' % connString)
sp = xmlrpclib.ServerProxy(connString)
# Say hello and print available functions.
print sp.hello('Hello Mr. Server!')
|
#!/usr/bin/python
import xmlrpclib, logging
from ConfigParser import SafeConfigParser
# Configure logging.
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m/%d %H:%M:%S',
filename='/tmp/kts46_rpc_client.log',
filemode='w')
# Define a handler for console message with mode simple format.
console = logging.StreamHandler()
console.setFormatter( logging.Formatter('L:%(levelname)-6s %(message)s') )
logger = logging.getLogger('kts46.rpc_client')
logger.addHandler(console)
# Create configuration.
logger.debug('Reading configuration.')
cfg = SafeConfigParser()
cfg.read(('rpc_client.ini',))
# Create proxy.
host = cfg.get('connection', 'server')
port = cfg.getint('connection', 'port')
connString = 'http://%s:%i' % (host, port)
logger.info('Connecting to server %s' % connString)
sp = xmlrpclib.ServerProxy(connString)
# Say hello and print available functions.
print sp.hello('Hello Mr. Server!')
Move initialization of logger and cfg parser to separate function.#!/usr/bin/python
import xmlrpclib, logging
from ConfigParser import SafeConfigParser
def init():
# Configure logging.
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m/%d %H:%M:%S',
filename='/tmp/kts46_rpc_client.log',
filemode='w')
# Define a handler for console message with mode simple format.
console = logging.StreamHandler()
console.setFormatter( logging.Formatter('L:%(levelname)-6s %(message)s') )
logger = logging.getLogger('kts46.rpc_client')
logger.addHandler(console)
# Create configuration.
logger.debug('Reading configuration.')
cfg = SafeConfigParser()
cfg.read(('rpc_client.ini',))
return (cfg, logger)
cfg, logger = init()
# Create proxy.
host = cfg.get('connection', 'server')
port = cfg.getint('connection', 'port')
connString = 'http://%s:%i' % (host, port)
logger.info('Connecting to server %s' % connString)
sp = xmlrpclib.ServerProxy(connString)
# Say hello and print available functions.
print sp.hello('Hello Mr. Server!')
|
<commit_before>#!/usr/bin/python
import xmlrpclib, logging
from ConfigParser import SafeConfigParser
# Configure logging.
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m/%d %H:%M:%S',
filename='/tmp/kts46_rpc_client.log',
filemode='w')
# Define a handler for console message with mode simple format.
console = logging.StreamHandler()
console.setFormatter( logging.Formatter('L:%(levelname)-6s %(message)s') )
logger = logging.getLogger('kts46.rpc_client')
logger.addHandler(console)
# Create configuration.
logger.debug('Reading configuration.')
cfg = SafeConfigParser()
cfg.read(('rpc_client.ini',))
# Create proxy.
host = cfg.get('connection', 'server')
port = cfg.getint('connection', 'port')
connString = 'http://%s:%i' % (host, port)
logger.info('Connecting to server %s' % connString)
sp = xmlrpclib.ServerProxy(connString)
# Say hello and print available functions.
print sp.hello('Hello Mr. Server!')
<commit_msg>Move initialization of logger and cfg parser to separate function.<commit_after>#!/usr/bin/python
import xmlrpclib, logging
from ConfigParser import SafeConfigParser
def init():
# Configure logging.
logging.basicConfig(level=logging.INFO,
format='%(asctime)s %(name)-12s %(levelname)-8s %(message)s',
datefmt='%m/%d %H:%M:%S',
filename='/tmp/kts46_rpc_client.log',
filemode='w')
# Define a handler for console message with mode simple format.
console = logging.StreamHandler()
console.setFormatter( logging.Formatter('L:%(levelname)-6s %(message)s') )
logger = logging.getLogger('kts46.rpc_client')
logger.addHandler(console)
# Create configuration.
logger.debug('Reading configuration.')
cfg = SafeConfigParser()
cfg.read(('rpc_client.ini',))
return (cfg, logger)
cfg, logger = init()
# Create proxy.
host = cfg.get('connection', 'server')
port = cfg.getint('connection', 'port')
connString = 'http://%s:%i' % (host, port)
logger.info('Connecting to server %s' % connString)
sp = xmlrpclib.ServerProxy(connString)
# Say hello and print available functions.
print sp.hello('Hello Mr. Server!')
|
fb07eabac3847a1d454bbe6d663deef6ec47fc9b
|
seo/escaped_fragment/app.py
|
seo/escaped_fragment/app.py
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
while retrynums < 3:
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
while retrynums < 5:
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
if 'class="ng-scope"' not in response:
raise CalledProcessError()
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
|
Fix broken content rendered by PhJS
|
Fix broken content rendered by PhJS
|
Python
|
apache-2.0
|
platformio/platformio-web,orgkhnargh/platformio-web,orgkhnargh/platformio-web,platformio/platformio-web,orgkhnargh/platformio-web
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
while retrynums < 3:
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
Fix broken content rendered by PhJS
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
while retrynums < 5:
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
if 'class="ng-scope"' not in response:
raise CalledProcessError()
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
|
<commit_before># Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
while retrynums < 3:
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
<commit_msg>Fix broken content rendered by PhJS<commit_after>
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
while retrynums < 5:
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
if 'class="ng-scope"' not in response:
raise CalledProcessError()
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
|
# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
while retrynums < 3:
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
Fix broken content rendered by PhJS# Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
while retrynums < 5:
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
if 'class="ng-scope"' not in response:
raise CalledProcessError()
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
|
<commit_before># Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
while retrynums < 3:
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
<commit_msg>Fix broken content rendered by PhJS<commit_after># Copyright (C) Ivan Kravets <me@ikravets.com>
# See LICENSE for details.
from subprocess import check_output, CalledProcessError
from urllib import unquote
def application(env, start_response):
status = "200 OK"
response = ""
qs = env.get("QUERY_STRING", None)
if not qs or not qs.startswith("_escaped_fragment_="):
status = "500 Internal Server Error"
else:
url = "http://platformio.org/#!" + unquote(qs[19:])
try:
response = get_webcontent(url)
if "404 Not Found" in response:
status = "404 Not Found"
except Exception:
status = "500 Internal Server Error"
start_response(status, [("Content-Type", "text/html"),
("Content-Length", str(len(response)))])
return response
def get_webcontent(url):
retrynums = 0
while retrynums < 5:
try:
response = check_output([
"phantomjs", "--disk-cache=true", "--load-images=false",
"crawler.js", url
])
if 'class="ng-scope"' not in response:
raise CalledProcessError()
return response
except CalledProcessError:
retrynums += 1
raise Exception("Could not retrieve content from %s" % url)
|
632a655f8f1f5867069f1c4d381417fa567b79a6
|
controlled_vocabularies/urls.py
|
controlled_vocabularies/urls.py
|
from django.urls import re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
re_path(r'^$', vocabulary_list, name="vocabulary_list"),
re_path(r'^all-verbose/?$', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
re_path(r'^about/', about, name="about"),
re_path(r'^all/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^(?P<vocabulary_name>[\w-]+)/$', term_list, name="term_list"),
re_path(r'^(?P<list_name>[\w-]+)/(?P<file_format>\w+)/$', vocabulary_file,
name="vocabulary_file"),
]
|
from django.urls import path, re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
path('', vocabulary_list, name="vocabulary_list"),
path('all-verbose/', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
path('about/', about, name="about"),
path('all/', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
path('<slug:list_name>/<slug:file_format>/', vocabulary_file,
name="vocabulary_file"),
path('<slug:vocabulary_name>/', term_list, name="term_list"),
]
|
Replace re_path with path wherever possible
|
Replace re_path with path wherever possible
|
Python
|
bsd-3-clause
|
unt-libraries/django-controlled-vocabularies,unt-libraries/django-controlled-vocabularies
|
from django.urls import re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
re_path(r'^$', vocabulary_list, name="vocabulary_list"),
re_path(r'^all-verbose/?$', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
re_path(r'^about/', about, name="about"),
re_path(r'^all/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^(?P<vocabulary_name>[\w-]+)/$', term_list, name="term_list"),
re_path(r'^(?P<list_name>[\w-]+)/(?P<file_format>\w+)/$', vocabulary_file,
name="vocabulary_file"),
]
Replace re_path with path wherever possible
|
from django.urls import path, re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
path('', vocabulary_list, name="vocabulary_list"),
path('all-verbose/', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
path('about/', about, name="about"),
path('all/', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
path('<slug:list_name>/<slug:file_format>/', vocabulary_file,
name="vocabulary_file"),
path('<slug:vocabulary_name>/', term_list, name="term_list"),
]
|
<commit_before>from django.urls import re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
re_path(r'^$', vocabulary_list, name="vocabulary_list"),
re_path(r'^all-verbose/?$', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
re_path(r'^about/', about, name="about"),
re_path(r'^all/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^(?P<vocabulary_name>[\w-]+)/$', term_list, name="term_list"),
re_path(r'^(?P<list_name>[\w-]+)/(?P<file_format>\w+)/$', vocabulary_file,
name="vocabulary_file"),
]
<commit_msg>Replace re_path with path wherever possible<commit_after>
|
from django.urls import path, re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
path('', vocabulary_list, name="vocabulary_list"),
path('all-verbose/', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
path('about/', about, name="about"),
path('all/', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
path('<slug:list_name>/<slug:file_format>/', vocabulary_file,
name="vocabulary_file"),
path('<slug:vocabulary_name>/', term_list, name="term_list"),
]
|
from django.urls import re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
re_path(r'^$', vocabulary_list, name="vocabulary_list"),
re_path(r'^all-verbose/?$', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
re_path(r'^about/', about, name="about"),
re_path(r'^all/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^(?P<vocabulary_name>[\w-]+)/$', term_list, name="term_list"),
re_path(r'^(?P<list_name>[\w-]+)/(?P<file_format>\w+)/$', vocabulary_file,
name="vocabulary_file"),
]
Replace re_path with path wherever possiblefrom django.urls import path, re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
path('', vocabulary_list, name="vocabulary_list"),
path('all-verbose/', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
path('about/', about, name="about"),
path('all/', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
path('<slug:list_name>/<slug:file_format>/', vocabulary_file,
name="vocabulary_file"),
path('<slug:vocabulary_name>/', term_list, name="term_list"),
]
|
<commit_before>from django.urls import re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
re_path(r'^$', vocabulary_list, name="vocabulary_list"),
re_path(r'^all-verbose/?$', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
re_path(r'^about/', about, name="about"),
re_path(r'^all/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
re_path(r'^(?P<vocabulary_name>[\w-]+)/$', term_list, name="term_list"),
re_path(r'^(?P<list_name>[\w-]+)/(?P<file_format>\w+)/$', vocabulary_file,
name="vocabulary_file"),
]
<commit_msg>Replace re_path with path wherever possible<commit_after>from django.urls import path, re_path
from controlled_vocabularies.views import (
vocabulary_list, verbose_vocabularies, about,
all_vocabularies, term_list, vocabulary_file
)
urlpatterns = [
# Search View
path('', vocabulary_list, name="vocabulary_list"),
path('all-verbose/', verbose_vocabularies, name="verbose_vocabularies"),
re_path(r'^all-verbose\.(?P<file_format>py|json)/?$', verbose_vocabularies,
name="verbose_vocabularies"),
path('about/', about, name="about"),
path('all/', all_vocabularies, name="all_vocabularies"),
re_path(r'^all\.(?P<file_format>py|json)/?$', all_vocabularies, name="all_vocabularies"),
path('<slug:list_name>/<slug:file_format>/', vocabulary_file,
name="vocabulary_file"),
path('<slug:vocabulary_name>/', term_list, name="term_list"),
]
|
ebf9628a55a82daa489f2bd5e2d83f2218369f01
|
controllers/accounts_manager.py
|
controllers/accounts_manager.py
|
from flask_restful import Resource
class AccountsManager(Resource):
"""docstring for AccountsManager."""
def get(self):
return {"route": "login"}
def post(self):
return {"route": "register"}
|
from flask import jsonify, make_response
from flask_restful import Resource, reqparse
from app.models import User
from app.db_instance import save
from validator import validate
class AccountsManager(Resource):
"""docstring for AccountsManager."""
def __init__(self):
self.parser = reqparse.RequestParser()
self.parser.add_argument('username',
type=str,
required=True,
help="username is required")
self.parser.add_argument('password',
type=str,
required=True,
help="password is required")
def post(self):
args = self.parser.parse_args(strict=True)
username = args.get("username")
password = args.get("password")
if any(arg == "" for arg in [username, password]):
message = "username and password is required"
status = 400
elif not username.isalpha():
message = "username should not contain special characters"
status = 400
elif len(password) < 6:
message = "password should be more than 6 characters"
status = 400
elif User.query.filter_by(username=username).first():
message = "username already exists"
status = 409
else:
user = User(username, password)
save(user)
message = "user registered successfully"
status = 201
return make_response(jsonify({
"message": message
}), status)
|
Add Register resource to handle user registration and save user data to the database
|
Add Register resource to handle user registration and save user data to the database
|
Python
|
mit
|
brayoh/bucket-list-api
|
from flask_restful import Resource
class AccountsManager(Resource):
"""docstring for AccountsManager."""
def get(self):
return {"route": "login"}
def post(self):
return {"route": "register"}
Add Register resource to handle user registration and save user data to the database
|
from flask import jsonify, make_response
from flask_restful import Resource, reqparse
from app.models import User
from app.db_instance import save
from validator import validate
class AccountsManager(Resource):
"""docstring for AccountsManager."""
def __init__(self):
self.parser = reqparse.RequestParser()
self.parser.add_argument('username',
type=str,
required=True,
help="username is required")
self.parser.add_argument('password',
type=str,
required=True,
help="password is required")
def post(self):
args = self.parser.parse_args(strict=True)
username = args.get("username")
password = args.get("password")
if any(arg == "" for arg in [username, password]):
message = "username and password is required"
status = 400
elif not username.isalpha():
message = "username should not contain special characters"
status = 400
elif len(password) < 6:
message = "password should be more than 6 characters"
status = 400
elif User.query.filter_by(username=username).first():
message = "username already exists"
status = 409
else:
user = User(username, password)
save(user)
message = "user registered successfully"
status = 201
return make_response(jsonify({
"message": message
}), status)
|
<commit_before>from flask_restful import Resource
class AccountsManager(Resource):
"""docstring for AccountsManager."""
def get(self):
return {"route": "login"}
def post(self):
return {"route": "register"}
<commit_msg>Add Register resource to handle user registration and save user data to the database<commit_after>
|
from flask import jsonify, make_response
from flask_restful import Resource, reqparse
from app.models import User
from app.db_instance import save
from validator import validate
class AccountsManager(Resource):
"""docstring for AccountsManager."""
def __init__(self):
self.parser = reqparse.RequestParser()
self.parser.add_argument('username',
type=str,
required=True,
help="username is required")
self.parser.add_argument('password',
type=str,
required=True,
help="password is required")
def post(self):
args = self.parser.parse_args(strict=True)
username = args.get("username")
password = args.get("password")
if any(arg == "" for arg in [username, password]):
message = "username and password is required"
status = 400
elif not username.isalpha():
message = "username should not contain special characters"
status = 400
elif len(password) < 6:
message = "password should be more than 6 characters"
status = 400
elif User.query.filter_by(username=username).first():
message = "username already exists"
status = 409
else:
user = User(username, password)
save(user)
message = "user registered successfully"
status = 201
return make_response(jsonify({
"message": message
}), status)
|
from flask_restful import Resource
class AccountsManager(Resource):
"""docstring for AccountsManager."""
def get(self):
return {"route": "login"}
def post(self):
return {"route": "register"}
Add Register resource to handle user registration and save user data to the databasefrom flask import jsonify, make_response
from flask_restful import Resource, reqparse
from app.models import User
from app.db_instance import save
from validator import validate
class AccountsManager(Resource):
"""docstring for AccountsManager."""
def __init__(self):
self.parser = reqparse.RequestParser()
self.parser.add_argument('username',
type=str,
required=True,
help="username is required")
self.parser.add_argument('password',
type=str,
required=True,
help="password is required")
def post(self):
args = self.parser.parse_args(strict=True)
username = args.get("username")
password = args.get("password")
if any(arg == "" for arg in [username, password]):
message = "username and password is required"
status = 400
elif not username.isalpha():
message = "username should not contain special characters"
status = 400
elif len(password) < 6:
message = "password should be more than 6 characters"
status = 400
elif User.query.filter_by(username=username).first():
message = "username already exists"
status = 409
else:
user = User(username, password)
save(user)
message = "user registered successfully"
status = 201
return make_response(jsonify({
"message": message
}), status)
|
<commit_before>from flask_restful import Resource
class AccountsManager(Resource):
"""docstring for AccountsManager."""
def get(self):
return {"route": "login"}
def post(self):
return {"route": "register"}
<commit_msg>Add Register resource to handle user registration and save user data to the database<commit_after>from flask import jsonify, make_response
from flask_restful import Resource, reqparse
from app.models import User
from app.db_instance import save
from validator import validate
class AccountsManager(Resource):
"""docstring for AccountsManager."""
def __init__(self):
self.parser = reqparse.RequestParser()
self.parser.add_argument('username',
type=str,
required=True,
help="username is required")
self.parser.add_argument('password',
type=str,
required=True,
help="password is required")
def post(self):
args = self.parser.parse_args(strict=True)
username = args.get("username")
password = args.get("password")
if any(arg == "" for arg in [username, password]):
message = "username and password is required"
status = 400
elif not username.isalpha():
message = "username should not contain special characters"
status = 400
elif len(password) < 6:
message = "password should be more than 6 characters"
status = 400
elif User.query.filter_by(username=username).first():
message = "username already exists"
status = 409
else:
user = User(username, password)
save(user)
message = "user registered successfully"
status = 201
return make_response(jsonify({
"message": message
}), status)
|
45e180a6769584cad372399f9383dbb965e8ece8
|
live_studio/build/views.py
|
live_studio/build/views.py
|
from django.http import HttpResponseRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
@require_POST
def enqueue(request, config_id):
config = get_object_or_404(request.user.configs, pk=config_id)
config.builds.create()
messages.add_message(request, messages.INFO,
"Build enqueued.")
return HttpResponseRedirect(config.get_absolute_url())
|
from django.http import HttpResponseRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
@require_POST
def enqueue(request, config_id):
config = get_object_or_404(request.user.configs, pk=config_id)
config.builds.create()
messages.add_message(request, messages.INFO,
"Build enqueued. You will receive an email when the build completes.")
return HttpResponseRedirect(config.get_absolute_url())
|
Make it clearer that you get an email when the build completes
|
Make it clearer that you get an email when the build completes
|
Python
|
agpl-3.0
|
debian-live/live-studio,debian-live/live-studio,debian-live/live-studio
|
from django.http import HttpResponseRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
@require_POST
def enqueue(request, config_id):
config = get_object_or_404(request.user.configs, pk=config_id)
config.builds.create()
messages.add_message(request, messages.INFO,
"Build enqueued.")
return HttpResponseRedirect(config.get_absolute_url())
Make it clearer that you get an email when the build completes
|
from django.http import HttpResponseRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
@require_POST
def enqueue(request, config_id):
config = get_object_or_404(request.user.configs, pk=config_id)
config.builds.create()
messages.add_message(request, messages.INFO,
"Build enqueued. You will receive an email when the build completes.")
return HttpResponseRedirect(config.get_absolute_url())
|
<commit_before>from django.http import HttpResponseRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
@require_POST
def enqueue(request, config_id):
config = get_object_or_404(request.user.configs, pk=config_id)
config.builds.create()
messages.add_message(request, messages.INFO,
"Build enqueued.")
return HttpResponseRedirect(config.get_absolute_url())
<commit_msg>Make it clearer that you get an email when the build completes<commit_after>
|
from django.http import HttpResponseRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
@require_POST
def enqueue(request, config_id):
config = get_object_or_404(request.user.configs, pk=config_id)
config.builds.create()
messages.add_message(request, messages.INFO,
"Build enqueued. You will receive an email when the build completes.")
return HttpResponseRedirect(config.get_absolute_url())
|
from django.http import HttpResponseRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
@require_POST
def enqueue(request, config_id):
config = get_object_or_404(request.user.configs, pk=config_id)
config.builds.create()
messages.add_message(request, messages.INFO,
"Build enqueued.")
return HttpResponseRedirect(config.get_absolute_url())
Make it clearer that you get an email when the build completesfrom django.http import HttpResponseRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
@require_POST
def enqueue(request, config_id):
config = get_object_or_404(request.user.configs, pk=config_id)
config.builds.create()
messages.add_message(request, messages.INFO,
"Build enqueued. You will receive an email when the build completes.")
return HttpResponseRedirect(config.get_absolute_url())
|
<commit_before>from django.http import HttpResponseRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
@require_POST
def enqueue(request, config_id):
config = get_object_or_404(request.user.configs, pk=config_id)
config.builds.create()
messages.add_message(request, messages.INFO,
"Build enqueued.")
return HttpResponseRedirect(config.get_absolute_url())
<commit_msg>Make it clearer that you get an email when the build completes<commit_after>from django.http import HttpResponseRedirect
from django.contrib import messages
from django.shortcuts import get_object_or_404
from django.views.decorators.http import require_POST
@require_POST
def enqueue(request, config_id):
config = get_object_or_404(request.user.configs, pk=config_id)
config.builds.create()
messages.add_message(request, messages.INFO,
"Build enqueued. You will receive an email when the build completes.")
return HttpResponseRedirect(config.get_absolute_url())
|
bff55b65cd08259c64171e0ad5fd836875ce3008
|
example/search/views.py
|
example/search/views.py
|
from __future__ import absolute_import, unicode_literals
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.shortcuts import render
import wagtail
if wagtail.VERSION >= (2, 0):
from wagtail.core.models import Page
from wagtail.search.models import Query
else:
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
return render(request, 'search/search.html', {
'search_query': search_query,
'search_results': search_results,
})
|
from __future__ import absolute_import, unicode_literals
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.shortcuts import render
from wagtail.core.models import Page
from wagtail.search.models import Query
def search(request):
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
return render(request, 'search/search.html', {
'search_query': search_query,
'search_results': search_results,
})
|
Drop wagtail 1.13 support from example
|
Drop wagtail 1.13 support from example
|
Python
|
mit
|
Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget,Frojd/wagtail-geo-widget
|
from __future__ import absolute_import, unicode_literals
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.shortcuts import render
import wagtail
if wagtail.VERSION >= (2, 0):
from wagtail.core.models import Page
from wagtail.search.models import Query
else:
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
return render(request, 'search/search.html', {
'search_query': search_query,
'search_results': search_results,
})
Drop wagtail 1.13 support from example
|
from __future__ import absolute_import, unicode_literals
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.shortcuts import render
from wagtail.core.models import Page
from wagtail.search.models import Query
def search(request):
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
return render(request, 'search/search.html', {
'search_query': search_query,
'search_results': search_results,
})
|
<commit_before>from __future__ import absolute_import, unicode_literals
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.shortcuts import render
import wagtail
if wagtail.VERSION >= (2, 0):
from wagtail.core.models import Page
from wagtail.search.models import Query
else:
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
return render(request, 'search/search.html', {
'search_query': search_query,
'search_results': search_results,
})
<commit_msg>Drop wagtail 1.13 support from example<commit_after>
|
from __future__ import absolute_import, unicode_literals
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.shortcuts import render
from wagtail.core.models import Page
from wagtail.search.models import Query
def search(request):
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
return render(request, 'search/search.html', {
'search_query': search_query,
'search_results': search_results,
})
|
from __future__ import absolute_import, unicode_literals
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.shortcuts import render
import wagtail
if wagtail.VERSION >= (2, 0):
from wagtail.core.models import Page
from wagtail.search.models import Query
else:
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
return render(request, 'search/search.html', {
'search_query': search_query,
'search_results': search_results,
})
Drop wagtail 1.13 support from examplefrom __future__ import absolute_import, unicode_literals
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.shortcuts import render
from wagtail.core.models import Page
from wagtail.search.models import Query
def search(request):
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
return render(request, 'search/search.html', {
'search_query': search_query,
'search_results': search_results,
})
|
<commit_before>from __future__ import absolute_import, unicode_literals
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.shortcuts import render
import wagtail
if wagtail.VERSION >= (2, 0):
from wagtail.core.models import Page
from wagtail.search.models import Query
else:
from wagtail.wagtailcore.models import Page
from wagtail.wagtailsearch.models import Query
def search(request):
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
return render(request, 'search/search.html', {
'search_query': search_query,
'search_results': search_results,
})
<commit_msg>Drop wagtail 1.13 support from example<commit_after>from __future__ import absolute_import, unicode_literals
from django.core.paginator import EmptyPage, PageNotAnInteger, Paginator
from django.shortcuts import render
from wagtail.core.models import Page
from wagtail.search.models import Query
def search(request):
search_query = request.GET.get('query', None)
page = request.GET.get('page', 1)
# Search
if search_query:
search_results = Page.objects.live().search(search_query)
query = Query.get(search_query)
# Record hit
query.add_hit()
else:
search_results = Page.objects.none()
# Pagination
paginator = Paginator(search_results, 10)
try:
search_results = paginator.page(page)
except PageNotAnInteger:
search_results = paginator.page(1)
except EmptyPage:
search_results = paginator.page(paginator.num_pages)
return render(request, 'search/search.html', {
'search_query': search_query,
'search_results': search_results,
})
|
9080e9c967fa5e2af43c42a48de8b8ec3231a866
|
src/tests/behave/agent/features/steps/common.py
|
src/tests/behave/agent/features/steps/common.py
|
from collections import defaultdict
from behave import given, then, when, step, use_step_matcher
use_step_matcher("parse")
@given('something with the agent')
def step_something_with_the_agent(context):
"""
:type context: behave.runner.Context
"""
pass
|
from collections import defaultdict
from behave import given, then, when, step, use_step_matcher
use_step_matcher("parse")
@given('an agent is running on {ip}')
def step_an_agent_is_running_on_ip(context, ip):
"""
:type context: behave.runner.Context
:type ip: str
"""
pass
|
Add ip argument to agent running step
|
Add ip argument to agent running step
|
Python
|
apache-2.0
|
jr0d/mercury,jr0d/mercury
|
from collections import defaultdict
from behave import given, then, when, step, use_step_matcher
use_step_matcher("parse")
@given('something with the agent')
def step_something_with_the_agent(context):
"""
:type context: behave.runner.Context
"""
pass
Add ip argument to agent running step
|
from collections import defaultdict
from behave import given, then, when, step, use_step_matcher
use_step_matcher("parse")
@given('an agent is running on {ip}')
def step_an_agent_is_running_on_ip(context, ip):
"""
:type context: behave.runner.Context
:type ip: str
"""
pass
|
<commit_before>from collections import defaultdict
from behave import given, then, when, step, use_step_matcher
use_step_matcher("parse")
@given('something with the agent')
def step_something_with_the_agent(context):
"""
:type context: behave.runner.Context
"""
pass
<commit_msg>Add ip argument to agent running step<commit_after>
|
from collections import defaultdict
from behave import given, then, when, step, use_step_matcher
use_step_matcher("parse")
@given('an agent is running on {ip}')
def step_an_agent_is_running_on_ip(context, ip):
"""
:type context: behave.runner.Context
:type ip: str
"""
pass
|
from collections import defaultdict
from behave import given, then, when, step, use_step_matcher
use_step_matcher("parse")
@given('something with the agent')
def step_something_with_the_agent(context):
"""
:type context: behave.runner.Context
"""
pass
Add ip argument to agent running stepfrom collections import defaultdict
from behave import given, then, when, step, use_step_matcher
use_step_matcher("parse")
@given('an agent is running on {ip}')
def step_an_agent_is_running_on_ip(context, ip):
"""
:type context: behave.runner.Context
:type ip: str
"""
pass
|
<commit_before>from collections import defaultdict
from behave import given, then, when, step, use_step_matcher
use_step_matcher("parse")
@given('something with the agent')
def step_something_with_the_agent(context):
"""
:type context: behave.runner.Context
"""
pass
<commit_msg>Add ip argument to agent running step<commit_after>from collections import defaultdict
from behave import given, then, when, step, use_step_matcher
use_step_matcher("parse")
@given('an agent is running on {ip}')
def step_an_agent_is_running_on_ip(context, ip):
"""
:type context: behave.runner.Context
:type ip: str
"""
pass
|
a23061a7efb241186ddf59911d6f1513cdec61a7
|
geotrek/core/urls.py
|
geotrek/core/urls.py
|
from django.conf import settings
from django.conf.urls import patterns, url
from mapentity import registry
from geotrek.altimetry.urls import AltimetryEntityOptions
from geotrek.core.models import Path, Trail
from geotrek.core.views import get_graph_json
if settings.TREKKING_TOPOLOGY_ENABLED:
urlpatterns = patterns('',
url(r'^api/graph.json$', get_graph_json, name="path_json_graph"),
)
class PathEntityOptions(AltimetryEntityOptions):
# Profiles for paths
pass
urlpatterns += registry.register(Path, PathEntityOptions)
urlpatterns += registry.register(Trail, menu=settings.TRAIL_MODEL_ENABLED)
|
from django.conf import settings
from django.conf.urls import patterns, url
from mapentity import registry
from geotrek.altimetry.urls import AltimetryEntityOptions
from geotrek.core.models import Path, Trail
from geotrek.core.views import get_graph_json
urlpatterns = patterns('',
url(r'^api/graph.json$', get_graph_json, name="path_json_graph"),
)
if settings.TREKKING_TOPOLOGY_ENABLED:
class PathEntityOptions(AltimetryEntityOptions):
# Profiles for paths
pass
urlpatterns += registry.register(Path, PathEntityOptions)
urlpatterns += registry.register(Trail, menu=settings.TRAIL_MODEL_ENABLED)
|
Fix URL error with Geotrek light
|
Fix URL error with Geotrek light
|
Python
|
bsd-2-clause
|
johan--/Geotrek,GeotrekCE/Geotrek-admin,Anaethelion/Geotrek,mabhub/Geotrek,GeotrekCE/Geotrek-admin,johan--/Geotrek,makinacorpus/Geotrek,Anaethelion/Geotrek,Anaethelion/Geotrek,johan--/Geotrek,mabhub/Geotrek,makinacorpus/Geotrek,GeotrekCE/Geotrek-admin,Anaethelion/Geotrek,GeotrekCE/Geotrek-admin,johan--/Geotrek,makinacorpus/Geotrek,makinacorpus/Geotrek,mabhub/Geotrek,mabhub/Geotrek
|
from django.conf import settings
from django.conf.urls import patterns, url
from mapentity import registry
from geotrek.altimetry.urls import AltimetryEntityOptions
from geotrek.core.models import Path, Trail
from geotrek.core.views import get_graph_json
if settings.TREKKING_TOPOLOGY_ENABLED:
urlpatterns = patterns('',
url(r'^api/graph.json$', get_graph_json, name="path_json_graph"),
)
class PathEntityOptions(AltimetryEntityOptions):
# Profiles for paths
pass
urlpatterns += registry.register(Path, PathEntityOptions)
urlpatterns += registry.register(Trail, menu=settings.TRAIL_MODEL_ENABLED)
Fix URL error with Geotrek light
|
from django.conf import settings
from django.conf.urls import patterns, url
from mapentity import registry
from geotrek.altimetry.urls import AltimetryEntityOptions
from geotrek.core.models import Path, Trail
from geotrek.core.views import get_graph_json
urlpatterns = patterns('',
url(r'^api/graph.json$', get_graph_json, name="path_json_graph"),
)
if settings.TREKKING_TOPOLOGY_ENABLED:
class PathEntityOptions(AltimetryEntityOptions):
# Profiles for paths
pass
urlpatterns += registry.register(Path, PathEntityOptions)
urlpatterns += registry.register(Trail, menu=settings.TRAIL_MODEL_ENABLED)
|
<commit_before>from django.conf import settings
from django.conf.urls import patterns, url
from mapentity import registry
from geotrek.altimetry.urls import AltimetryEntityOptions
from geotrek.core.models import Path, Trail
from geotrek.core.views import get_graph_json
if settings.TREKKING_TOPOLOGY_ENABLED:
urlpatterns = patterns('',
url(r'^api/graph.json$', get_graph_json, name="path_json_graph"),
)
class PathEntityOptions(AltimetryEntityOptions):
# Profiles for paths
pass
urlpatterns += registry.register(Path, PathEntityOptions)
urlpatterns += registry.register(Trail, menu=settings.TRAIL_MODEL_ENABLED)
<commit_msg>Fix URL error with Geotrek light<commit_after>
|
from django.conf import settings
from django.conf.urls import patterns, url
from mapentity import registry
from geotrek.altimetry.urls import AltimetryEntityOptions
from geotrek.core.models import Path, Trail
from geotrek.core.views import get_graph_json
urlpatterns = patterns('',
url(r'^api/graph.json$', get_graph_json, name="path_json_graph"),
)
if settings.TREKKING_TOPOLOGY_ENABLED:
class PathEntityOptions(AltimetryEntityOptions):
# Profiles for paths
pass
urlpatterns += registry.register(Path, PathEntityOptions)
urlpatterns += registry.register(Trail, menu=settings.TRAIL_MODEL_ENABLED)
|
from django.conf import settings
from django.conf.urls import patterns, url
from mapentity import registry
from geotrek.altimetry.urls import AltimetryEntityOptions
from geotrek.core.models import Path, Trail
from geotrek.core.views import get_graph_json
if settings.TREKKING_TOPOLOGY_ENABLED:
urlpatterns = patterns('',
url(r'^api/graph.json$', get_graph_json, name="path_json_graph"),
)
class PathEntityOptions(AltimetryEntityOptions):
# Profiles for paths
pass
urlpatterns += registry.register(Path, PathEntityOptions)
urlpatterns += registry.register(Trail, menu=settings.TRAIL_MODEL_ENABLED)
Fix URL error with Geotrek lightfrom django.conf import settings
from django.conf.urls import patterns, url
from mapentity import registry
from geotrek.altimetry.urls import AltimetryEntityOptions
from geotrek.core.models import Path, Trail
from geotrek.core.views import get_graph_json
urlpatterns = patterns('',
url(r'^api/graph.json$', get_graph_json, name="path_json_graph"),
)
if settings.TREKKING_TOPOLOGY_ENABLED:
class PathEntityOptions(AltimetryEntityOptions):
# Profiles for paths
pass
urlpatterns += registry.register(Path, PathEntityOptions)
urlpatterns += registry.register(Trail, menu=settings.TRAIL_MODEL_ENABLED)
|
<commit_before>from django.conf import settings
from django.conf.urls import patterns, url
from mapentity import registry
from geotrek.altimetry.urls import AltimetryEntityOptions
from geotrek.core.models import Path, Trail
from geotrek.core.views import get_graph_json
if settings.TREKKING_TOPOLOGY_ENABLED:
urlpatterns = patterns('',
url(r'^api/graph.json$', get_graph_json, name="path_json_graph"),
)
class PathEntityOptions(AltimetryEntityOptions):
# Profiles for paths
pass
urlpatterns += registry.register(Path, PathEntityOptions)
urlpatterns += registry.register(Trail, menu=settings.TRAIL_MODEL_ENABLED)
<commit_msg>Fix URL error with Geotrek light<commit_after>from django.conf import settings
from django.conf.urls import patterns, url
from mapentity import registry
from geotrek.altimetry.urls import AltimetryEntityOptions
from geotrek.core.models import Path, Trail
from geotrek.core.views import get_graph_json
urlpatterns = patterns('',
url(r'^api/graph.json$', get_graph_json, name="path_json_graph"),
)
if settings.TREKKING_TOPOLOGY_ENABLED:
class PathEntityOptions(AltimetryEntityOptions):
# Profiles for paths
pass
urlpatterns += registry.register(Path, PathEntityOptions)
urlpatterns += registry.register(Trail, menu=settings.TRAIL_MODEL_ENABLED)
|
355b70412f8b725dcf6771967387cf4ba999c98b
|
fetch_configs/syzygy.py
|
fetch_configs/syzygy.py
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://github.com/google/syzygy.git',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://chromium.googlesource.com/syzygy',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
Update fetch config with new Syzygy location.
|
Update fetch config with new Syzygy location.
Change-Id: Iacc2efd6974f1a161da6e33a0d25d6329fcaaf4f
Reviewed-on: https://chromium-review.googlesource.com/692697
Commit-Queue: Sébastien Marchand <b98658856fe44d267ccfa37efbb15fc831b08ae9@chromium.org>
Reviewed-by: Aaron Gable <bbed39beedae4cdb499af742d2fcd6c63934d93b@chromium.org>
Reviewed-by: Sébastien Marchand <b98658856fe44d267ccfa37efbb15fc831b08ae9@chromium.org>
|
Python
|
bsd-3-clause
|
CoherentLabs/depot_tools,CoherentLabs/depot_tools
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://github.com/google/syzygy.git',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
Update fetch config with new Syzygy location.
Change-Id: Iacc2efd6974f1a161da6e33a0d25d6329fcaaf4f
Reviewed-on: https://chromium-review.googlesource.com/692697
Commit-Queue: Sébastien Marchand <b98658856fe44d267ccfa37efbb15fc831b08ae9@chromium.org>
Reviewed-by: Aaron Gable <bbed39beedae4cdb499af742d2fcd6c63934d93b@chromium.org>
Reviewed-by: Sébastien Marchand <b98658856fe44d267ccfa37efbb15fc831b08ae9@chromium.org>
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://chromium.googlesource.com/syzygy',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://github.com/google/syzygy.git',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
<commit_msg>Update fetch config with new Syzygy location.
Change-Id: Iacc2efd6974f1a161da6e33a0d25d6329fcaaf4f
Reviewed-on: https://chromium-review.googlesource.com/692697
Commit-Queue: Sébastien Marchand <b98658856fe44d267ccfa37efbb15fc831b08ae9@chromium.org>
Reviewed-by: Aaron Gable <bbed39beedae4cdb499af742d2fcd6c63934d93b@chromium.org>
Reviewed-by: Sébastien Marchand <b98658856fe44d267ccfa37efbb15fc831b08ae9@chromium.org><commit_after>
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://chromium.googlesource.com/syzygy',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://github.com/google/syzygy.git',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
Update fetch config with new Syzygy location.
Change-Id: Iacc2efd6974f1a161da6e33a0d25d6329fcaaf4f
Reviewed-on: https://chromium-review.googlesource.com/692697
Commit-Queue: Sébastien Marchand <b98658856fe44d267ccfa37efbb15fc831b08ae9@chromium.org>
Reviewed-by: Aaron Gable <bbed39beedae4cdb499af742d2fcd6c63934d93b@chromium.org>
Reviewed-by: Sébastien Marchand <b98658856fe44d267ccfa37efbb15fc831b08ae9@chromium.org># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://chromium.googlesource.com/syzygy',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
<commit_before># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://github.com/google/syzygy.git',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
<commit_msg>Update fetch config with new Syzygy location.
Change-Id: Iacc2efd6974f1a161da6e33a0d25d6329fcaaf4f
Reviewed-on: https://chromium-review.googlesource.com/692697
Commit-Queue: Sébastien Marchand <b98658856fe44d267ccfa37efbb15fc831b08ae9@chromium.org>
Reviewed-by: Aaron Gable <bbed39beedae4cdb499af742d2fcd6c63934d93b@chromium.org>
Reviewed-by: Sébastien Marchand <b98658856fe44d267ccfa37efbb15fc831b08ae9@chromium.org><commit_after># Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import sys
import config_util # pylint: disable=import-error
# This class doesn't need an __init__ method, so we disable the warning
# pylint: disable=no-init
class Syzygy(config_util.Config):
"""Basic Config class for Syzygy."""
@staticmethod
def fetch_spec(_props):
return {
'type': 'gclient_git',
'gclient_git_spec': {
'solutions': [
{
'name' : 'src',
'url' : 'https://chromium.googlesource.com/syzygy',
'deps_file': 'DEPS',
'managed' : False,
}
],
},
}
@staticmethod
def expected_root(_props):
return 'src'
def main(argv=None):
return Syzygy().handle_args(argv)
if __name__ == '__main__':
sys.exit(main(sys.argv))
|
d115c8902d51c37dacda96a10a7bd17a1e741f4e
|
ooni/tests/test_errors.py
|
ooni/tests/test_errors.py
|
from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after its parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
Fix typo in docstring spotted by @armadev
|
Fix typo in docstring spotted by @armadev
|
Python
|
bsd-2-clause
|
0xPoly/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe,0xPoly/ooni-probe
|
from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
Fix typo in docstring spotted by @armadev
|
from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after its parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
<commit_before>from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
<commit_msg>Fix typo in docstring spotted by @armadev<commit_after>
|
from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after its parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
Fix typo in docstring spotted by @armadevfrom twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after its parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
<commit_before>from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after it's parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
<commit_msg>Fix typo in docstring spotted by @armadev<commit_after>from twisted.trial import unittest
import ooni.errors
class TestErrors(unittest.TestCase):
def test_catch_child_failures_before_parent_failures(self):
"""
Verify that more specific Failures are caught first by
handleAllFailures() and failureToString().
Fails if a subclass is listed after its parent Failure.
"""
# Check each Failure against all subsequent failures
for index, (failure, _) in enumerate(ooni.errors.known_failures):
for sub_failure, _ in ooni.errors.known_failures[index+1:]:
# Fail if subsequent Failure inherits from the current Failure
self.assertNotIsInstance(sub_failure(None), failure)
|
e0d8289c8ea3240f1c7cceb4e42470c814a81e61
|
members/serializers.py
|
members/serializers.py
|
from rest_framework import serializers
from members.models import Band
from members.models import BandMember
class BandMemberSerializer(serializers.ModelSerializer):
class Meta:
model = BandMember
fields = ('id',
'account',
'section',
'instrument_number',
'band',
'created_at',
'updated_at',)
read_only_fields = ('account', 'created_at', 'updated_at',)
class BandSerializer(serializers.ModelSerializer):
class Meta:
model = Band
fields = ('id',
'identifier',
'created_at',
'updated_at',)
read_only_fields = ('created_at', 'updated_at',)
|
from rest_framework import serializers
from members.models import Band
from members.models import BandMember
class BandMemberSerializer(serializers.ModelSerializer):
class Meta:
model = BandMember
fields = ('id',
'account',
'section',
'instrument_number',
'band',
'created_at',
'updated_at',)
read_only_fields = ('account', 'created_at', 'updated_at',)
class BandSerializer(serializers.ModelSerializer):
class Meta:
model = Band
fields = ('id',
'identifier',
'created_at',
'updated_at',)
read_only_fields = ('created_at', 'updated_at',)
def create(self, validated_data):
band = Band.objects.create(**validated_data)
band.unassigned_members = BandMember.objects.all()
band.save()
return band
|
Add band members as unassigned to new bands
|
Add band members as unassigned to new bands
|
Python
|
mit
|
KonichiwaKen/band-dashboard,KonichiwaKen/band-dashboard,KonichiwaKen/band-dashboard
|
from rest_framework import serializers
from members.models import Band
from members.models import BandMember
class BandMemberSerializer(serializers.ModelSerializer):
class Meta:
model = BandMember
fields = ('id',
'account',
'section',
'instrument_number',
'band',
'created_at',
'updated_at',)
read_only_fields = ('account', 'created_at', 'updated_at',)
class BandSerializer(serializers.ModelSerializer):
class Meta:
model = Band
fields = ('id',
'identifier',
'created_at',
'updated_at',)
read_only_fields = ('created_at', 'updated_at',)
Add band members as unassigned to new bands
|
from rest_framework import serializers
from members.models import Band
from members.models import BandMember
class BandMemberSerializer(serializers.ModelSerializer):
class Meta:
model = BandMember
fields = ('id',
'account',
'section',
'instrument_number',
'band',
'created_at',
'updated_at',)
read_only_fields = ('account', 'created_at', 'updated_at',)
class BandSerializer(serializers.ModelSerializer):
class Meta:
model = Band
fields = ('id',
'identifier',
'created_at',
'updated_at',)
read_only_fields = ('created_at', 'updated_at',)
def create(self, validated_data):
band = Band.objects.create(**validated_data)
band.unassigned_members = BandMember.objects.all()
band.save()
return band
|
<commit_before>from rest_framework import serializers
from members.models import Band
from members.models import BandMember
class BandMemberSerializer(serializers.ModelSerializer):
class Meta:
model = BandMember
fields = ('id',
'account',
'section',
'instrument_number',
'band',
'created_at',
'updated_at',)
read_only_fields = ('account', 'created_at', 'updated_at',)
class BandSerializer(serializers.ModelSerializer):
class Meta:
model = Band
fields = ('id',
'identifier',
'created_at',
'updated_at',)
read_only_fields = ('created_at', 'updated_at',)
<commit_msg>Add band members as unassigned to new bands<commit_after>
|
from rest_framework import serializers
from members.models import Band
from members.models import BandMember
class BandMemberSerializer(serializers.ModelSerializer):
class Meta:
model = BandMember
fields = ('id',
'account',
'section',
'instrument_number',
'band',
'created_at',
'updated_at',)
read_only_fields = ('account', 'created_at', 'updated_at',)
class BandSerializer(serializers.ModelSerializer):
class Meta:
model = Band
fields = ('id',
'identifier',
'created_at',
'updated_at',)
read_only_fields = ('created_at', 'updated_at',)
def create(self, validated_data):
band = Band.objects.create(**validated_data)
band.unassigned_members = BandMember.objects.all()
band.save()
return band
|
from rest_framework import serializers
from members.models import Band
from members.models import BandMember
class BandMemberSerializer(serializers.ModelSerializer):
class Meta:
model = BandMember
fields = ('id',
'account',
'section',
'instrument_number',
'band',
'created_at',
'updated_at',)
read_only_fields = ('account', 'created_at', 'updated_at',)
class BandSerializer(serializers.ModelSerializer):
class Meta:
model = Band
fields = ('id',
'identifier',
'created_at',
'updated_at',)
read_only_fields = ('created_at', 'updated_at',)
Add band members as unassigned to new bandsfrom rest_framework import serializers
from members.models import Band
from members.models import BandMember
class BandMemberSerializer(serializers.ModelSerializer):
class Meta:
model = BandMember
fields = ('id',
'account',
'section',
'instrument_number',
'band',
'created_at',
'updated_at',)
read_only_fields = ('account', 'created_at', 'updated_at',)
class BandSerializer(serializers.ModelSerializer):
class Meta:
model = Band
fields = ('id',
'identifier',
'created_at',
'updated_at',)
read_only_fields = ('created_at', 'updated_at',)
def create(self, validated_data):
band = Band.objects.create(**validated_data)
band.unassigned_members = BandMember.objects.all()
band.save()
return band
|
<commit_before>from rest_framework import serializers
from members.models import Band
from members.models import BandMember
class BandMemberSerializer(serializers.ModelSerializer):
class Meta:
model = BandMember
fields = ('id',
'account',
'section',
'instrument_number',
'band',
'created_at',
'updated_at',)
read_only_fields = ('account', 'created_at', 'updated_at',)
class BandSerializer(serializers.ModelSerializer):
class Meta:
model = Band
fields = ('id',
'identifier',
'created_at',
'updated_at',)
read_only_fields = ('created_at', 'updated_at',)
<commit_msg>Add band members as unassigned to new bands<commit_after>from rest_framework import serializers
from members.models import Band
from members.models import BandMember
class BandMemberSerializer(serializers.ModelSerializer):
class Meta:
model = BandMember
fields = ('id',
'account',
'section',
'instrument_number',
'band',
'created_at',
'updated_at',)
read_only_fields = ('account', 'created_at', 'updated_at',)
class BandSerializer(serializers.ModelSerializer):
class Meta:
model = Band
fields = ('id',
'identifier',
'created_at',
'updated_at',)
read_only_fields = ('created_at', 'updated_at',)
def create(self, validated_data):
band = Band.objects.create(**validated_data)
band.unassigned_members = BandMember.objects.all()
band.save()
return band
|
1cf14753174b6fdbd5999ac857ce0e55852194b6
|
dmoj/executors/ruby_executor.py
|
dmoj/executors/ruby_executor.py
|
import os
from .base_executor import ScriptExecutor
class RubyExecutor(ScriptExecutor):
ext = '.rb'
name = 'RUBY'
address_grace = 65536
test_program = 'puts gets'
def get_cmdline(self):
return [self.get_command(), '--disable-gems', self._code]
@classmethod
def get_version_flags(cls, command):
return ['-v']
@classmethod
def get_command(cls):
name = cls.get_executor_name().lower()
if name in cls.runtime_dict:
return cls.runtime_dict[name]
if '%s_home' % name in cls.runtime_dict:
return os.path.join(cls.runtime_dict['%s_home' % name], 'bin', 'ruby')
@classmethod
def get_versionable_commands(cls):
return ('ruby', cls.get_command()),
@classmethod
def get_find_first_mapping(cls):
return {cls.name.lower(): cls.command_paths}
|
import os
import re
from .base_executor import ScriptExecutor
class RubyExecutor(ScriptExecutor):
ext = '.rb'
name = 'RUBY'
address_grace = 65536
test_program = 'puts gets'
def get_fs(self):
fs = super(RubyExecutor, self).get_fs()
home = self.runtime_dict.get('%s_home' % self.get_executor_name().lower())
if home is not None:
fs.append(re.escape(home))
components = home.split('/')
components.pop()
while components and components[-1]:
fs.append(re.escape('/'.join(components)) + '$')
components.pop()
return fs
def get_cmdline(self):
return [self.get_command(), '--disable-gems', self._code]
@classmethod
def get_version_flags(cls, command):
return ['-v']
@classmethod
def get_command(cls):
name = cls.get_executor_name().lower()
if name in cls.runtime_dict:
return cls.runtime_dict[name]
if '%s_home' % name in cls.runtime_dict:
return os.path.join(cls.runtime_dict['%s_home' % name], 'bin', 'ruby')
@classmethod
def get_versionable_commands(cls):
return ('ruby', cls.get_command()),
@classmethod
def get_find_first_mapping(cls):
return {cls.name.lower(): cls.command_paths}
|
Make Ruby work on Travis
|
Make Ruby work on Travis
|
Python
|
agpl-3.0
|
DMOJ/judge,DMOJ/judge,DMOJ/judge
|
import os
from .base_executor import ScriptExecutor
class RubyExecutor(ScriptExecutor):
ext = '.rb'
name = 'RUBY'
address_grace = 65536
test_program = 'puts gets'
def get_cmdline(self):
return [self.get_command(), '--disable-gems', self._code]
@classmethod
def get_version_flags(cls, command):
return ['-v']
@classmethod
def get_command(cls):
name = cls.get_executor_name().lower()
if name in cls.runtime_dict:
return cls.runtime_dict[name]
if '%s_home' % name in cls.runtime_dict:
return os.path.join(cls.runtime_dict['%s_home' % name], 'bin', 'ruby')
@classmethod
def get_versionable_commands(cls):
return ('ruby', cls.get_command()),
@classmethod
def get_find_first_mapping(cls):
return {cls.name.lower(): cls.command_paths}
Make Ruby work on Travis
|
import os
import re
from .base_executor import ScriptExecutor
class RubyExecutor(ScriptExecutor):
ext = '.rb'
name = 'RUBY'
address_grace = 65536
test_program = 'puts gets'
def get_fs(self):
fs = super(RubyExecutor, self).get_fs()
home = self.runtime_dict.get('%s_home' % self.get_executor_name().lower())
if home is not None:
fs.append(re.escape(home))
components = home.split('/')
components.pop()
while components and components[-1]:
fs.append(re.escape('/'.join(components)) + '$')
components.pop()
return fs
def get_cmdline(self):
return [self.get_command(), '--disable-gems', self._code]
@classmethod
def get_version_flags(cls, command):
return ['-v']
@classmethod
def get_command(cls):
name = cls.get_executor_name().lower()
if name in cls.runtime_dict:
return cls.runtime_dict[name]
if '%s_home' % name in cls.runtime_dict:
return os.path.join(cls.runtime_dict['%s_home' % name], 'bin', 'ruby')
@classmethod
def get_versionable_commands(cls):
return ('ruby', cls.get_command()),
@classmethod
def get_find_first_mapping(cls):
return {cls.name.lower(): cls.command_paths}
|
<commit_before>import os
from .base_executor import ScriptExecutor
class RubyExecutor(ScriptExecutor):
ext = '.rb'
name = 'RUBY'
address_grace = 65536
test_program = 'puts gets'
def get_cmdline(self):
return [self.get_command(), '--disable-gems', self._code]
@classmethod
def get_version_flags(cls, command):
return ['-v']
@classmethod
def get_command(cls):
name = cls.get_executor_name().lower()
if name in cls.runtime_dict:
return cls.runtime_dict[name]
if '%s_home' % name in cls.runtime_dict:
return os.path.join(cls.runtime_dict['%s_home' % name], 'bin', 'ruby')
@classmethod
def get_versionable_commands(cls):
return ('ruby', cls.get_command()),
@classmethod
def get_find_first_mapping(cls):
return {cls.name.lower(): cls.command_paths}
<commit_msg>Make Ruby work on Travis<commit_after>
|
import os
import re
from .base_executor import ScriptExecutor
class RubyExecutor(ScriptExecutor):
ext = '.rb'
name = 'RUBY'
address_grace = 65536
test_program = 'puts gets'
def get_fs(self):
fs = super(RubyExecutor, self).get_fs()
home = self.runtime_dict.get('%s_home' % self.get_executor_name().lower())
if home is not None:
fs.append(re.escape(home))
components = home.split('/')
components.pop()
while components and components[-1]:
fs.append(re.escape('/'.join(components)) + '$')
components.pop()
return fs
def get_cmdline(self):
return [self.get_command(), '--disable-gems', self._code]
@classmethod
def get_version_flags(cls, command):
return ['-v']
@classmethod
def get_command(cls):
name = cls.get_executor_name().lower()
if name in cls.runtime_dict:
return cls.runtime_dict[name]
if '%s_home' % name in cls.runtime_dict:
return os.path.join(cls.runtime_dict['%s_home' % name], 'bin', 'ruby')
@classmethod
def get_versionable_commands(cls):
return ('ruby', cls.get_command()),
@classmethod
def get_find_first_mapping(cls):
return {cls.name.lower(): cls.command_paths}
|
import os
from .base_executor import ScriptExecutor
class RubyExecutor(ScriptExecutor):
ext = '.rb'
name = 'RUBY'
address_grace = 65536
test_program = 'puts gets'
def get_cmdline(self):
return [self.get_command(), '--disable-gems', self._code]
@classmethod
def get_version_flags(cls, command):
return ['-v']
@classmethod
def get_command(cls):
name = cls.get_executor_name().lower()
if name in cls.runtime_dict:
return cls.runtime_dict[name]
if '%s_home' % name in cls.runtime_dict:
return os.path.join(cls.runtime_dict['%s_home' % name], 'bin', 'ruby')
@classmethod
def get_versionable_commands(cls):
return ('ruby', cls.get_command()),
@classmethod
def get_find_first_mapping(cls):
return {cls.name.lower(): cls.command_paths}
Make Ruby work on Travisimport os
import re
from .base_executor import ScriptExecutor
class RubyExecutor(ScriptExecutor):
ext = '.rb'
name = 'RUBY'
address_grace = 65536
test_program = 'puts gets'
def get_fs(self):
fs = super(RubyExecutor, self).get_fs()
home = self.runtime_dict.get('%s_home' % self.get_executor_name().lower())
if home is not None:
fs.append(re.escape(home))
components = home.split('/')
components.pop()
while components and components[-1]:
fs.append(re.escape('/'.join(components)) + '$')
components.pop()
return fs
def get_cmdline(self):
return [self.get_command(), '--disable-gems', self._code]
@classmethod
def get_version_flags(cls, command):
return ['-v']
@classmethod
def get_command(cls):
name = cls.get_executor_name().lower()
if name in cls.runtime_dict:
return cls.runtime_dict[name]
if '%s_home' % name in cls.runtime_dict:
return os.path.join(cls.runtime_dict['%s_home' % name], 'bin', 'ruby')
@classmethod
def get_versionable_commands(cls):
return ('ruby', cls.get_command()),
@classmethod
def get_find_first_mapping(cls):
return {cls.name.lower(): cls.command_paths}
|
<commit_before>import os
from .base_executor import ScriptExecutor
class RubyExecutor(ScriptExecutor):
ext = '.rb'
name = 'RUBY'
address_grace = 65536
test_program = 'puts gets'
def get_cmdline(self):
return [self.get_command(), '--disable-gems', self._code]
@classmethod
def get_version_flags(cls, command):
return ['-v']
@classmethod
def get_command(cls):
name = cls.get_executor_name().lower()
if name in cls.runtime_dict:
return cls.runtime_dict[name]
if '%s_home' % name in cls.runtime_dict:
return os.path.join(cls.runtime_dict['%s_home' % name], 'bin', 'ruby')
@classmethod
def get_versionable_commands(cls):
return ('ruby', cls.get_command()),
@classmethod
def get_find_first_mapping(cls):
return {cls.name.lower(): cls.command_paths}
<commit_msg>Make Ruby work on Travis<commit_after>import os
import re
from .base_executor import ScriptExecutor
class RubyExecutor(ScriptExecutor):
ext = '.rb'
name = 'RUBY'
address_grace = 65536
test_program = 'puts gets'
def get_fs(self):
fs = super(RubyExecutor, self).get_fs()
home = self.runtime_dict.get('%s_home' % self.get_executor_name().lower())
if home is not None:
fs.append(re.escape(home))
components = home.split('/')
components.pop()
while components and components[-1]:
fs.append(re.escape('/'.join(components)) + '$')
components.pop()
return fs
def get_cmdline(self):
return [self.get_command(), '--disable-gems', self._code]
@classmethod
def get_version_flags(cls, command):
return ['-v']
@classmethod
def get_command(cls):
name = cls.get_executor_name().lower()
if name in cls.runtime_dict:
return cls.runtime_dict[name]
if '%s_home' % name in cls.runtime_dict:
return os.path.join(cls.runtime_dict['%s_home' % name], 'bin', 'ruby')
@classmethod
def get_versionable_commands(cls):
return ('ruby', cls.get_command()),
@classmethod
def get_find_first_mapping(cls):
return {cls.name.lower(): cls.command_paths}
|
a735ad28d1d8996ece647dd95f68d19e629a4d53
|
frigg/worker/fetcher.py
|
frigg/worker/fetcher.py
|
# -*- coding: utf8 -*-
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
for result in build.results:
del result
del build
|
# -*- coding: utf8 -*-
import sys
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
sys.exit(0)
|
Make worker exit after a build
|
Make worker exit after a build
|
Python
|
mit
|
frigg/frigg-worker
|
# -*- coding: utf8 -*-
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
for result in build.results:
del result
del build
Make worker exit after a build
|
# -*- coding: utf8 -*-
import sys
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
sys.exit(0)
|
<commit_before># -*- coding: utf8 -*-
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
for result in build.results:
del result
del build
<commit_msg>Make worker exit after a build<commit_after>
|
# -*- coding: utf8 -*-
import sys
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
sys.exit(0)
|
# -*- coding: utf8 -*-
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
for result in build.results:
del result
del build
Make worker exit after a build# -*- coding: utf8 -*-
import sys
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
sys.exit(0)
|
<commit_before># -*- coding: utf8 -*-
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
for result in build.results:
del result
del build
<commit_msg>Make worker exit after a build<commit_after># -*- coding: utf8 -*-
import sys
import json
import threading
import time
import logging
from frigg.worker import config
from frigg.worker.jobs import Build
logger = logging.getLogger(__name__)
def fetcher():
redis = config.redis_client()
while redis:
task = redis.rpop('frigg:queue')
if task:
__start_task(task)
time.sleep(2)
def __start_task(json_string):
task = json.loads(json_string)
thread = threading.Thread(name='build-%s' % task['id'], target=__start_build, args=[task])
thread.daemon = True
thread.start()
logger.info('Started %s' % task)
return thread
def __start_build(task):
build = Build(task['id'], task)
build.run_tests()
sys.exit(0)
|
c02cad5047ff397229e1139109df80208e7dd5b6
|
fireant/__init__.py
|
fireant/__init__.py
|
# coding: utf-8
__version__ = '{major}.{minor}.{patch}'.format(major=0, minor=12, patch=0)
|
# coding: utf-8
__version__ = '{major}.{minor}.{patch}'.format(major=0, minor=13, patch=0)
|
Bump fireant version to 0.13.0
|
Bump fireant version to 0.13.0
|
Python
|
apache-2.0
|
kayak/fireant,mikeengland/fireant
|
# coding: utf-8
__version__ = '{major}.{minor}.{patch}'.format(major=0, minor=12, patch=0)
Bump fireant version to 0.13.0
|
# coding: utf-8
__version__ = '{major}.{minor}.{patch}'.format(major=0, minor=13, patch=0)
|
<commit_before># coding: utf-8
__version__ = '{major}.{minor}.{patch}'.format(major=0, minor=12, patch=0)
<commit_msg>Bump fireant version to 0.13.0<commit_after>
|
# coding: utf-8
__version__ = '{major}.{minor}.{patch}'.format(major=0, minor=13, patch=0)
|
# coding: utf-8
__version__ = '{major}.{minor}.{patch}'.format(major=0, minor=12, patch=0)
Bump fireant version to 0.13.0# coding: utf-8
__version__ = '{major}.{minor}.{patch}'.format(major=0, minor=13, patch=0)
|
<commit_before># coding: utf-8
__version__ = '{major}.{minor}.{patch}'.format(major=0, minor=12, patch=0)
<commit_msg>Bump fireant version to 0.13.0<commit_after># coding: utf-8
__version__ = '{major}.{minor}.{patch}'.format(major=0, minor=13, patch=0)
|
f9b4f5857d3266a2c5661920144f33aad9ef8a3f
|
amy/autoemails/tests/base.py
|
amy/autoemails/tests/base.py
|
import django_rq
from fakeredis import FakeStrictRedis
from redis import Redis
from rq import Queue
class FakeRedisTestCaseMixin:
"""TestCase mixin that provides easy setup of FakeRedis connection to both
Django-RQ and RQ-Scheduler, as well as test-teardown with scheduled jobs
purging."""
def setUp(self):
super().setUp()
# self.connection = FakeStrictRedis()
self.connection = Redis()
self.queue = Queue(is_async=False, connection=self.connection)
self.scheduler = django_rq.get_scheduler('testing', queue=self.queue)
def tearDown(self):
# clear job queue
for job in self.scheduler.get_jobs():
self.scheduler.cancel(job)
assert not bool(list(self.scheduler.get_jobs()))
assert self.scheduler.count() == 0
super().tearDown()
|
import django_rq
from fakeredis import FakeStrictRedis
from redis import Redis
from rq import Queue
connection = FakeStrictRedis()
class FakeRedisTestCaseMixin:
"""TestCase mixin that provides easy setup of FakeRedis connection to both
Django-RQ and RQ-Scheduler, as well as test-teardown with scheduled jobs
purging."""
def setUp(self):
super().setUp()
self.connection = connection
# self.connection = Redis()
self.queue = Queue(is_async=False, connection=self.connection)
self.scheduler = django_rq.get_scheduler('testing', queue=self.queue)
def tearDown(self):
# clear job queue
for job in self.scheduler.get_jobs():
self.scheduler.cancel(job)
assert not bool(list(self.scheduler.get_jobs()))
assert self.scheduler.count() == 0
super().tearDown()
|
Switch tests to fake redis implementation
|
Switch tests to fake redis implementation
|
Python
|
mit
|
swcarpentry/amy,swcarpentry/amy,swcarpentry/amy,pbanaszkiewicz/amy,pbanaszkiewicz/amy,pbanaszkiewicz/amy
|
import django_rq
from fakeredis import FakeStrictRedis
from redis import Redis
from rq import Queue
class FakeRedisTestCaseMixin:
"""TestCase mixin that provides easy setup of FakeRedis connection to both
Django-RQ and RQ-Scheduler, as well as test-teardown with scheduled jobs
purging."""
def setUp(self):
super().setUp()
# self.connection = FakeStrictRedis()
self.connection = Redis()
self.queue = Queue(is_async=False, connection=self.connection)
self.scheduler = django_rq.get_scheduler('testing', queue=self.queue)
def tearDown(self):
# clear job queue
for job in self.scheduler.get_jobs():
self.scheduler.cancel(job)
assert not bool(list(self.scheduler.get_jobs()))
assert self.scheduler.count() == 0
super().tearDown()
Switch tests to fake redis implementation
|
import django_rq
from fakeredis import FakeStrictRedis
from redis import Redis
from rq import Queue
connection = FakeStrictRedis()
class FakeRedisTestCaseMixin:
"""TestCase mixin that provides easy setup of FakeRedis connection to both
Django-RQ and RQ-Scheduler, as well as test-teardown with scheduled jobs
purging."""
def setUp(self):
super().setUp()
self.connection = connection
# self.connection = Redis()
self.queue = Queue(is_async=False, connection=self.connection)
self.scheduler = django_rq.get_scheduler('testing', queue=self.queue)
def tearDown(self):
# clear job queue
for job in self.scheduler.get_jobs():
self.scheduler.cancel(job)
assert not bool(list(self.scheduler.get_jobs()))
assert self.scheduler.count() == 0
super().tearDown()
|
<commit_before>import django_rq
from fakeredis import FakeStrictRedis
from redis import Redis
from rq import Queue
class FakeRedisTestCaseMixin:
"""TestCase mixin that provides easy setup of FakeRedis connection to both
Django-RQ and RQ-Scheduler, as well as test-teardown with scheduled jobs
purging."""
def setUp(self):
super().setUp()
# self.connection = FakeStrictRedis()
self.connection = Redis()
self.queue = Queue(is_async=False, connection=self.connection)
self.scheduler = django_rq.get_scheduler('testing', queue=self.queue)
def tearDown(self):
# clear job queue
for job in self.scheduler.get_jobs():
self.scheduler.cancel(job)
assert not bool(list(self.scheduler.get_jobs()))
assert self.scheduler.count() == 0
super().tearDown()
<commit_msg>Switch tests to fake redis implementation<commit_after>
|
import django_rq
from fakeredis import FakeStrictRedis
from redis import Redis
from rq import Queue
connection = FakeStrictRedis()
class FakeRedisTestCaseMixin:
"""TestCase mixin that provides easy setup of FakeRedis connection to both
Django-RQ and RQ-Scheduler, as well as test-teardown with scheduled jobs
purging."""
def setUp(self):
super().setUp()
self.connection = connection
# self.connection = Redis()
self.queue = Queue(is_async=False, connection=self.connection)
self.scheduler = django_rq.get_scheduler('testing', queue=self.queue)
def tearDown(self):
# clear job queue
for job in self.scheduler.get_jobs():
self.scheduler.cancel(job)
assert not bool(list(self.scheduler.get_jobs()))
assert self.scheduler.count() == 0
super().tearDown()
|
import django_rq
from fakeredis import FakeStrictRedis
from redis import Redis
from rq import Queue
class FakeRedisTestCaseMixin:
"""TestCase mixin that provides easy setup of FakeRedis connection to both
Django-RQ and RQ-Scheduler, as well as test-teardown with scheduled jobs
purging."""
def setUp(self):
super().setUp()
# self.connection = FakeStrictRedis()
self.connection = Redis()
self.queue = Queue(is_async=False, connection=self.connection)
self.scheduler = django_rq.get_scheduler('testing', queue=self.queue)
def tearDown(self):
# clear job queue
for job in self.scheduler.get_jobs():
self.scheduler.cancel(job)
assert not bool(list(self.scheduler.get_jobs()))
assert self.scheduler.count() == 0
super().tearDown()
Switch tests to fake redis implementationimport django_rq
from fakeredis import FakeStrictRedis
from redis import Redis
from rq import Queue
connection = FakeStrictRedis()
class FakeRedisTestCaseMixin:
"""TestCase mixin that provides easy setup of FakeRedis connection to both
Django-RQ and RQ-Scheduler, as well as test-teardown with scheduled jobs
purging."""
def setUp(self):
super().setUp()
self.connection = connection
# self.connection = Redis()
self.queue = Queue(is_async=False, connection=self.connection)
self.scheduler = django_rq.get_scheduler('testing', queue=self.queue)
def tearDown(self):
# clear job queue
for job in self.scheduler.get_jobs():
self.scheduler.cancel(job)
assert not bool(list(self.scheduler.get_jobs()))
assert self.scheduler.count() == 0
super().tearDown()
|
<commit_before>import django_rq
from fakeredis import FakeStrictRedis
from redis import Redis
from rq import Queue
class FakeRedisTestCaseMixin:
"""TestCase mixin that provides easy setup of FakeRedis connection to both
Django-RQ and RQ-Scheduler, as well as test-teardown with scheduled jobs
purging."""
def setUp(self):
super().setUp()
# self.connection = FakeStrictRedis()
self.connection = Redis()
self.queue = Queue(is_async=False, connection=self.connection)
self.scheduler = django_rq.get_scheduler('testing', queue=self.queue)
def tearDown(self):
# clear job queue
for job in self.scheduler.get_jobs():
self.scheduler.cancel(job)
assert not bool(list(self.scheduler.get_jobs()))
assert self.scheduler.count() == 0
super().tearDown()
<commit_msg>Switch tests to fake redis implementation<commit_after>import django_rq
from fakeredis import FakeStrictRedis
from redis import Redis
from rq import Queue
connection = FakeStrictRedis()
class FakeRedisTestCaseMixin:
"""TestCase mixin that provides easy setup of FakeRedis connection to both
Django-RQ and RQ-Scheduler, as well as test-teardown with scheduled jobs
purging."""
def setUp(self):
super().setUp()
self.connection = connection
# self.connection = Redis()
self.queue = Queue(is_async=False, connection=self.connection)
self.scheduler = django_rq.get_scheduler('testing', queue=self.queue)
def tearDown(self):
# clear job queue
for job in self.scheduler.get_jobs():
self.scheduler.cancel(job)
assert not bool(list(self.scheduler.get_jobs()))
assert self.scheduler.count() == 0
super().tearDown()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.