commit
stringlengths 40
40
| old_file
stringlengths 4
118
| new_file
stringlengths 4
118
| old_contents
stringlengths 0
2.94k
| new_contents
stringlengths 1
4.43k
| subject
stringlengths 15
444
| message
stringlengths 16
3.45k
| lang
stringclasses 1
value | license
stringclasses 13
values | repos
stringlengths 5
43.2k
| prompt
stringlengths 17
4.58k
| response
stringlengths 1
4.43k
| prompt_tagged
stringlengths 58
4.62k
| response_tagged
stringlengths 1
4.43k
| text
stringlengths 132
7.29k
| text_tagged
stringlengths 173
7.33k
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
5545bd1df34e6d3bb600b78b92d757ea12e3861b
|
printer/PlatformPhysicsOperation.py
|
printer/PlatformPhysicsOperation.py
|
from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._translation = translation
def undo(self):
pass
def redo(self):
pass
def mergeWith(self, other):
if type(other) is AddSceneNodeOperation:
other._node.translate(self._translation)
return other
elif type(other) is TranslateOperation:
other._translation += self._translation
return other
else:
return False
|
from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
from UM.Operations.GroupedOperation import GroupedOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._transform = node.getLocalTransformation()
self._position = node.getPosition() + translation
self._always_merge = True
def undo(self):
self._node.setLocalTransformation(self._transform)
def redo(self):
self._node.setPosition(self._position)
def mergeWith(self, other):
group = GroupedOperation()
group.addOperation(self)
group.addOperation(other)
return group
def __repr__(self):
return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
|
Use GroupedOperation for merging PlatformPhyisicsOperation
|
Use GroupedOperation for merging PlatformPhyisicsOperation
|
Python
|
agpl-3.0
|
onitake/Uranium,onitake/Uranium
|
from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._translation = translation
def undo(self):
pass
def redo(self):
pass
def mergeWith(self, other):
if type(other) is AddSceneNodeOperation:
other._node.translate(self._translation)
return other
elif type(other) is TranslateOperation:
other._translation += self._translation
return other
else:
return False
Use GroupedOperation for merging PlatformPhyisicsOperation
|
from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
from UM.Operations.GroupedOperation import GroupedOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._transform = node.getLocalTransformation()
self._position = node.getPosition() + translation
self._always_merge = True
def undo(self):
self._node.setLocalTransformation(self._transform)
def redo(self):
self._node.setPosition(self._position)
def mergeWith(self, other):
group = GroupedOperation()
group.addOperation(self)
group.addOperation(other)
return group
def __repr__(self):
return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
|
<commit_before>from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._translation = translation
def undo(self):
pass
def redo(self):
pass
def mergeWith(self, other):
if type(other) is AddSceneNodeOperation:
other._node.translate(self._translation)
return other
elif type(other) is TranslateOperation:
other._translation += self._translation
return other
else:
return False
<commit_msg>Use GroupedOperation for merging PlatformPhyisicsOperation<commit_after>
|
from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
from UM.Operations.GroupedOperation import GroupedOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._transform = node.getLocalTransformation()
self._position = node.getPosition() + translation
self._always_merge = True
def undo(self):
self._node.setLocalTransformation(self._transform)
def redo(self):
self._node.setPosition(self._position)
def mergeWith(self, other):
group = GroupedOperation()
group.addOperation(self)
group.addOperation(other)
return group
def __repr__(self):
return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
|
from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._translation = translation
def undo(self):
pass
def redo(self):
pass
def mergeWith(self, other):
if type(other) is AddSceneNodeOperation:
other._node.translate(self._translation)
return other
elif type(other) is TranslateOperation:
other._translation += self._translation
return other
else:
return False
Use GroupedOperation for merging PlatformPhyisicsOperationfrom UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
from UM.Operations.GroupedOperation import GroupedOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._transform = node.getLocalTransformation()
self._position = node.getPosition() + translation
self._always_merge = True
def undo(self):
self._node.setLocalTransformation(self._transform)
def redo(self):
self._node.setPosition(self._position)
def mergeWith(self, other):
group = GroupedOperation()
group.addOperation(self)
group.addOperation(other)
return group
def __repr__(self):
return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
|
<commit_before>from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._translation = translation
def undo(self):
pass
def redo(self):
pass
def mergeWith(self, other):
if type(other) is AddSceneNodeOperation:
other._node.translate(self._translation)
return other
elif type(other) is TranslateOperation:
other._translation += self._translation
return other
else:
return False
<commit_msg>Use GroupedOperation for merging PlatformPhyisicsOperation<commit_after>from UM.Operations.Operation import Operation
from UM.Operations.AddSceneNodeOperation import AddSceneNodeOperation
from UM.Operations.TranslateOperation import TranslateOperation
from UM.Operations.GroupedOperation import GroupedOperation
## A specialised operation designed specifically to modify the previous operation.
class PlatformPhysicsOperation(Operation):
def __init__(self, node, translation):
super().__init__()
self._node = node
self._transform = node.getLocalTransformation()
self._position = node.getPosition() + translation
self._always_merge = True
def undo(self):
self._node.setLocalTransformation(self._transform)
def redo(self):
self._node.setPosition(self._position)
def mergeWith(self, other):
group = GroupedOperation()
group.addOperation(self)
group.addOperation(other)
return group
def __repr__(self):
return 'PlatformPhysicsOperation(t = {0})'.format(self._position)
|
f733300f622a4ffc1f0179c90590d543dc37113e
|
weber_utils/pagination.py
|
weber_utils/pagination.py
|
import functools
from flask import jsonify, request
from flask.ext.sqlalchemy import Pagination
from .request_utils import dictify_model, error_abort
def paginate_query(query, default_page_size=100, renderer=dictify_model):
try:
page_size = int(request.args.get("page_size", default_page_size))
page = int(request.args.get("page", 1))
except ValueError:
error_abort(httplib.BAD_REQUEST, "Invalid integer value")
num_objects = query.count()
return {
"metadata": {
"total_num_objects": num_objects,
"total_num_pages": _ceil_div(num_objects, page_size) or 1,
"page": page,
},
"result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)],
}
def _ceil_div(value, divisor):
returned = float(value) / divisor
if int(returned) != returned:
return int(returned) + 1
return int(returned)
def paginated_view(func):
@functools.wraps(func)
def new_func(*args, **kwargs):
returned = func(*args, **kwargs)
return jsonify(paginate_query(returned))
return new_func
|
import functools
from flask import jsonify, request
from flask.ext.sqlalchemy import Pagination
from .request_utils import dictify_model, error_abort
def paginate_query(query, default_page_size=100, renderer=dictify_model):
try:
page_size = int(request.args.get("page_size", default_page_size))
page = int(request.args.get("page", 1))
except ValueError:
error_abort(httplib.BAD_REQUEST, "Invalid integer value")
num_objects = query.count()
return {
"metadata": {
"total_num_objects": num_objects,
"total_num_pages": _ceil_div(num_objects, page_size) or 1,
"page": page,
},
"result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)],
}
def _ceil_div(value, divisor):
returned = float(value) / divisor
if int(returned) != returned:
return int(returned) + 1
return int(returned)
def paginated_view(func=None, renderer=dictify_model):
if func is None:
return functools.partial(paginated_view, renderer=renderer)
@functools.wraps(func)
def new_func(*args, **kwargs):
returned = func(*args, **kwargs)
return jsonify(paginate_query(returned, renderer=renderer))
return new_func
|
Allow renderer argument to paginated_view decorator
|
Allow renderer argument to paginated_view decorator
|
Python
|
bsd-3-clause
|
vmalloc/weber-utils
|
import functools
from flask import jsonify, request
from flask.ext.sqlalchemy import Pagination
from .request_utils import dictify_model, error_abort
def paginate_query(query, default_page_size=100, renderer=dictify_model):
try:
page_size = int(request.args.get("page_size", default_page_size))
page = int(request.args.get("page", 1))
except ValueError:
error_abort(httplib.BAD_REQUEST, "Invalid integer value")
num_objects = query.count()
return {
"metadata": {
"total_num_objects": num_objects,
"total_num_pages": _ceil_div(num_objects, page_size) or 1,
"page": page,
},
"result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)],
}
def _ceil_div(value, divisor):
returned = float(value) / divisor
if int(returned) != returned:
return int(returned) + 1
return int(returned)
def paginated_view(func):
@functools.wraps(func)
def new_func(*args, **kwargs):
returned = func(*args, **kwargs)
return jsonify(paginate_query(returned))
return new_func
Allow renderer argument to paginated_view decorator
|
import functools
from flask import jsonify, request
from flask.ext.sqlalchemy import Pagination
from .request_utils import dictify_model, error_abort
def paginate_query(query, default_page_size=100, renderer=dictify_model):
try:
page_size = int(request.args.get("page_size", default_page_size))
page = int(request.args.get("page", 1))
except ValueError:
error_abort(httplib.BAD_REQUEST, "Invalid integer value")
num_objects = query.count()
return {
"metadata": {
"total_num_objects": num_objects,
"total_num_pages": _ceil_div(num_objects, page_size) or 1,
"page": page,
},
"result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)],
}
def _ceil_div(value, divisor):
returned = float(value) / divisor
if int(returned) != returned:
return int(returned) + 1
return int(returned)
def paginated_view(func=None, renderer=dictify_model):
if func is None:
return functools.partial(paginated_view, renderer=renderer)
@functools.wraps(func)
def new_func(*args, **kwargs):
returned = func(*args, **kwargs)
return jsonify(paginate_query(returned, renderer=renderer))
return new_func
|
<commit_before>import functools
from flask import jsonify, request
from flask.ext.sqlalchemy import Pagination
from .request_utils import dictify_model, error_abort
def paginate_query(query, default_page_size=100, renderer=dictify_model):
try:
page_size = int(request.args.get("page_size", default_page_size))
page = int(request.args.get("page", 1))
except ValueError:
error_abort(httplib.BAD_REQUEST, "Invalid integer value")
num_objects = query.count()
return {
"metadata": {
"total_num_objects": num_objects,
"total_num_pages": _ceil_div(num_objects, page_size) or 1,
"page": page,
},
"result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)],
}
def _ceil_div(value, divisor):
returned = float(value) / divisor
if int(returned) != returned:
return int(returned) + 1
return int(returned)
def paginated_view(func):
@functools.wraps(func)
def new_func(*args, **kwargs):
returned = func(*args, **kwargs)
return jsonify(paginate_query(returned))
return new_func
<commit_msg>Allow renderer argument to paginated_view decorator<commit_after>
|
import functools
from flask import jsonify, request
from flask.ext.sqlalchemy import Pagination
from .request_utils import dictify_model, error_abort
def paginate_query(query, default_page_size=100, renderer=dictify_model):
try:
page_size = int(request.args.get("page_size", default_page_size))
page = int(request.args.get("page", 1))
except ValueError:
error_abort(httplib.BAD_REQUEST, "Invalid integer value")
num_objects = query.count()
return {
"metadata": {
"total_num_objects": num_objects,
"total_num_pages": _ceil_div(num_objects, page_size) or 1,
"page": page,
},
"result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)],
}
def _ceil_div(value, divisor):
returned = float(value) / divisor
if int(returned) != returned:
return int(returned) + 1
return int(returned)
def paginated_view(func=None, renderer=dictify_model):
if func is None:
return functools.partial(paginated_view, renderer=renderer)
@functools.wraps(func)
def new_func(*args, **kwargs):
returned = func(*args, **kwargs)
return jsonify(paginate_query(returned, renderer=renderer))
return new_func
|
import functools
from flask import jsonify, request
from flask.ext.sqlalchemy import Pagination
from .request_utils import dictify_model, error_abort
def paginate_query(query, default_page_size=100, renderer=dictify_model):
try:
page_size = int(request.args.get("page_size", default_page_size))
page = int(request.args.get("page", 1))
except ValueError:
error_abort(httplib.BAD_REQUEST, "Invalid integer value")
num_objects = query.count()
return {
"metadata": {
"total_num_objects": num_objects,
"total_num_pages": _ceil_div(num_objects, page_size) or 1,
"page": page,
},
"result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)],
}
def _ceil_div(value, divisor):
returned = float(value) / divisor
if int(returned) != returned:
return int(returned) + 1
return int(returned)
def paginated_view(func):
@functools.wraps(func)
def new_func(*args, **kwargs):
returned = func(*args, **kwargs)
return jsonify(paginate_query(returned))
return new_func
Allow renderer argument to paginated_view decoratorimport functools
from flask import jsonify, request
from flask.ext.sqlalchemy import Pagination
from .request_utils import dictify_model, error_abort
def paginate_query(query, default_page_size=100, renderer=dictify_model):
try:
page_size = int(request.args.get("page_size", default_page_size))
page = int(request.args.get("page", 1))
except ValueError:
error_abort(httplib.BAD_REQUEST, "Invalid integer value")
num_objects = query.count()
return {
"metadata": {
"total_num_objects": num_objects,
"total_num_pages": _ceil_div(num_objects, page_size) or 1,
"page": page,
},
"result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)],
}
def _ceil_div(value, divisor):
returned = float(value) / divisor
if int(returned) != returned:
return int(returned) + 1
return int(returned)
def paginated_view(func=None, renderer=dictify_model):
if func is None:
return functools.partial(paginated_view, renderer=renderer)
@functools.wraps(func)
def new_func(*args, **kwargs):
returned = func(*args, **kwargs)
return jsonify(paginate_query(returned, renderer=renderer))
return new_func
|
<commit_before>import functools
from flask import jsonify, request
from flask.ext.sqlalchemy import Pagination
from .request_utils import dictify_model, error_abort
def paginate_query(query, default_page_size=100, renderer=dictify_model):
try:
page_size = int(request.args.get("page_size", default_page_size))
page = int(request.args.get("page", 1))
except ValueError:
error_abort(httplib.BAD_REQUEST, "Invalid integer value")
num_objects = query.count()
return {
"metadata": {
"total_num_objects": num_objects,
"total_num_pages": _ceil_div(num_objects, page_size) or 1,
"page": page,
},
"result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)],
}
def _ceil_div(value, divisor):
returned = float(value) / divisor
if int(returned) != returned:
return int(returned) + 1
return int(returned)
def paginated_view(func):
@functools.wraps(func)
def new_func(*args, **kwargs):
returned = func(*args, **kwargs)
return jsonify(paginate_query(returned))
return new_func
<commit_msg>Allow renderer argument to paginated_view decorator<commit_after>import functools
from flask import jsonify, request
from flask.ext.sqlalchemy import Pagination
from .request_utils import dictify_model, error_abort
def paginate_query(query, default_page_size=100, renderer=dictify_model):
try:
page_size = int(request.args.get("page_size", default_page_size))
page = int(request.args.get("page", 1))
except ValueError:
error_abort(httplib.BAD_REQUEST, "Invalid integer value")
num_objects = query.count()
return {
"metadata": {
"total_num_objects": num_objects,
"total_num_pages": _ceil_div(num_objects, page_size) or 1,
"page": page,
},
"result": [renderer(obj) for obj in query.offset((page-1)*page_size).limit(page_size)],
}
def _ceil_div(value, divisor):
returned = float(value) / divisor
if int(returned) != returned:
return int(returned) + 1
return int(returned)
def paginated_view(func=None, renderer=dictify_model):
if func is None:
return functools.partial(paginated_view, renderer=renderer)
@functools.wraps(func)
def new_func(*args, **kwargs):
returned = func(*args, **kwargs)
return jsonify(paginate_query(returned, renderer=renderer))
return new_func
|
574b4d95a48f4df676ed5f23f0c83a9df2bc241d
|
pydux/log_middleware.py
|
pydux/log_middleware.py
|
"""
logging middleware example
"""
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
|
from __future__ import print_function
"""
logging middleware example
"""
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
|
Use from __future__ import for print function
|
Use from __future__ import for print function
|
Python
|
mit
|
usrlocalben/pydux
|
"""
logging middleware example
"""
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
Use from __future__ import for print function
|
from __future__ import print_function
"""
logging middleware example
"""
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
|
<commit_before>"""
logging middleware example
"""
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
<commit_msg>Use from __future__ import for print function<commit_after>
|
from __future__ import print_function
"""
logging middleware example
"""
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
|
"""
logging middleware example
"""
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
Use from __future__ import for print functionfrom __future__ import print_function
"""
logging middleware example
"""
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
|
<commit_before>"""
logging middleware example
"""
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
<commit_msg>Use from __future__ import for print function<commit_after>from __future__ import print_function
"""
logging middleware example
"""
def log_middleware(store):
"""log all actions to console as they are dispatched"""
def wrapper(next_):
def log_dispatch(action):
print('Dispatch Action:', action)
return next_(action)
return log_dispatch
return wrapper
|
d80ee56ea6259265a534231a52146f9fd04c9689
|
taskflow/engines/__init__.py
|
taskflow/engines/__init__.py
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# promote helpers to this module namespace
from taskflow.engines.helpers import flow_from_detail # noqa
from taskflow.engines.helpers import load # noqa
from taskflow.engines.helpers import load_from_detail # noqa
from taskflow.engines.helpers import load_from_factory # noqa
from taskflow.engines.helpers import run # noqa
from taskflow.engines.helpers import save_factory_details # noqa
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import eventletutils as _eventletutils
# Give a nice warning that if eventlet is being used these modules
# are highly recommended to be patched (or otherwise bad things could
# happen).
_eventletutils.warn_eventlet_not_patched(
expected_patched_modules=['time', 'thread'])
# Promote helpers to this module namespace (for easy access).
from taskflow.engines.helpers import flow_from_detail # noqa
from taskflow.engines.helpers import load # noqa
from taskflow.engines.helpers import load_from_detail # noqa
from taskflow.engines.helpers import load_from_factory # noqa
from taskflow.engines.helpers import run # noqa
from taskflow.engines.helpers import save_factory_details # noqa
|
Use oslo_utils eventletutils to warn about eventlet patching
|
Use oslo_utils eventletutils to warn about eventlet patching
Change-Id: I86ba0de51b5c5789efae187ebc1c46ae32ff8b8b
|
Python
|
apache-2.0
|
jimbobhickville/taskflow,openstack/taskflow,jimbobhickville/taskflow,openstack/taskflow,junneyang/taskflow,pombredanne/taskflow-1,junneyang/taskflow,pombredanne/taskflow-1
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# promote helpers to this module namespace
from taskflow.engines.helpers import flow_from_detail # noqa
from taskflow.engines.helpers import load # noqa
from taskflow.engines.helpers import load_from_detail # noqa
from taskflow.engines.helpers import load_from_factory # noqa
from taskflow.engines.helpers import run # noqa
from taskflow.engines.helpers import save_factory_details # noqa
Use oslo_utils eventletutils to warn about eventlet patching
Change-Id: I86ba0de51b5c5789efae187ebc1c46ae32ff8b8b
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import eventletutils as _eventletutils
# Give a nice warning that if eventlet is being used these modules
# are highly recommended to be patched (or otherwise bad things could
# happen).
_eventletutils.warn_eventlet_not_patched(
expected_patched_modules=['time', 'thread'])
# Promote helpers to this module namespace (for easy access).
from taskflow.engines.helpers import flow_from_detail # noqa
from taskflow.engines.helpers import load # noqa
from taskflow.engines.helpers import load_from_detail # noqa
from taskflow.engines.helpers import load_from_factory # noqa
from taskflow.engines.helpers import run # noqa
from taskflow.engines.helpers import save_factory_details # noqa
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# promote helpers to this module namespace
from taskflow.engines.helpers import flow_from_detail # noqa
from taskflow.engines.helpers import load # noqa
from taskflow.engines.helpers import load_from_detail # noqa
from taskflow.engines.helpers import load_from_factory # noqa
from taskflow.engines.helpers import run # noqa
from taskflow.engines.helpers import save_factory_details # noqa
<commit_msg>Use oslo_utils eventletutils to warn about eventlet patching
Change-Id: I86ba0de51b5c5789efae187ebc1c46ae32ff8b8b<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import eventletutils as _eventletutils
# Give a nice warning that if eventlet is being used these modules
# are highly recommended to be patched (or otherwise bad things could
# happen).
_eventletutils.warn_eventlet_not_patched(
expected_patched_modules=['time', 'thread'])
# Promote helpers to this module namespace (for easy access).
from taskflow.engines.helpers import flow_from_detail # noqa
from taskflow.engines.helpers import load # noqa
from taskflow.engines.helpers import load_from_detail # noqa
from taskflow.engines.helpers import load_from_factory # noqa
from taskflow.engines.helpers import run # noqa
from taskflow.engines.helpers import save_factory_details # noqa
|
# -*- coding: utf-8 -*-
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# promote helpers to this module namespace
from taskflow.engines.helpers import flow_from_detail # noqa
from taskflow.engines.helpers import load # noqa
from taskflow.engines.helpers import load_from_detail # noqa
from taskflow.engines.helpers import load_from_factory # noqa
from taskflow.engines.helpers import run # noqa
from taskflow.engines.helpers import save_factory_details # noqa
Use oslo_utils eventletutils to warn about eventlet patching
Change-Id: I86ba0de51b5c5789efae187ebc1c46ae32ff8b8b# -*- coding: utf-8 -*-
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import eventletutils as _eventletutils
# Give a nice warning that if eventlet is being used these modules
# are highly recommended to be patched (or otherwise bad things could
# happen).
_eventletutils.warn_eventlet_not_patched(
expected_patched_modules=['time', 'thread'])
# Promote helpers to this module namespace (for easy access).
from taskflow.engines.helpers import flow_from_detail # noqa
from taskflow.engines.helpers import load # noqa
from taskflow.engines.helpers import load_from_detail # noqa
from taskflow.engines.helpers import load_from_factory # noqa
from taskflow.engines.helpers import run # noqa
from taskflow.engines.helpers import save_factory_details # noqa
|
<commit_before># -*- coding: utf-8 -*-
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# promote helpers to this module namespace
from taskflow.engines.helpers import flow_from_detail # noqa
from taskflow.engines.helpers import load # noqa
from taskflow.engines.helpers import load_from_detail # noqa
from taskflow.engines.helpers import load_from_factory # noqa
from taskflow.engines.helpers import run # noqa
from taskflow.engines.helpers import save_factory_details # noqa
<commit_msg>Use oslo_utils eventletutils to warn about eventlet patching
Change-Id: I86ba0de51b5c5789efae187ebc1c46ae32ff8b8b<commit_after># -*- coding: utf-8 -*-
# Copyright (C) 2012 Yahoo! Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_utils import eventletutils as _eventletutils
# Give a nice warning that if eventlet is being used these modules
# are highly recommended to be patched (or otherwise bad things could
# happen).
_eventletutils.warn_eventlet_not_patched(
expected_patched_modules=['time', 'thread'])
# Promote helpers to this module namespace (for easy access).
from taskflow.engines.helpers import flow_from_detail # noqa
from taskflow.engines.helpers import load # noqa
from taskflow.engines.helpers import load_from_detail # noqa
from taskflow.engines.helpers import load_from_factory # noqa
from taskflow.engines.helpers import run # noqa
from taskflow.engines.helpers import save_factory_details # noqa
|
20ad5bf3b814b57035ed92358e7a8cad25e5a7ee
|
gcm/api.py
|
gcm/api.py
|
import urllib2
import json
def send_gcm_message(api_key, regs_id, data, collapse_key=None):
"""
Send a GCM message for one or more devices, using json data
api_key: The API_KEY from your console (https://code.google.com/apis/console, locate Key for Server Apps in
Google Cloud Messaging for Android)
regs_id: A list with the devices which will be receiving a message
data: The dict data which will be send
collapse_key: A string to group messages, look at the documentation about it:
http://developer.android.com/google/gcm/gcm.html#request
"""
values = {
'registration_ids': regs_id,
'collapse_key': collapse_key,
'data': data
}
values = json.dumps(values)
headers = {
'UserAgent': "GCM-Server",
'Content-Type': 'application/json',
'Authorization': 'key=' + api_key,
}
request = urllib2.Request("https://android.googleapis.com/gcm/send", data=values, headers=headers)
response = urllib2.urlopen(request)
result = response.read()
return result
|
import requests
import json
def send_gcm_message(api_key, regs_id, data, collapse_key=None):
"""
Send a GCM message for one or more devices, using json data
api_key: The API_KEY from your console (https://code.google.com/apis/console, locate Key for Server Apps in
Google Cloud Messaging for Android)
regs_id: A list with the devices which will be receiving a message
data: The dict data which will be send
collapse_key: A string to group messages, look at the documentation about it:
http://developer.android.com/google/gcm/gcm.html#request
"""
values = {
'registration_ids': regs_id,
'collapse_key': collapse_key,
'data': data
}
values = json.dumps(values)
headers = {
'UserAgent': "GCM-Server",
'Content-Type': 'application/json',
'Authorization': 'key=' + api_key,
}
response = requests.post(url="https://android.googleapis.com/gcm/send",
data=values,
headers=headers)
return response.content
|
Use requests package instead of urllib2
|
Use requests package instead of urllib2
|
Python
|
bsd-2-clause
|
johnofkorea/django-gcm,johnofkorea/django-gcm,bogdal/django-gcm,bogdal/django-gcm
|
import urllib2
import json
def send_gcm_message(api_key, regs_id, data, collapse_key=None):
"""
Send a GCM message for one or more devices, using json data
api_key: The API_KEY from your console (https://code.google.com/apis/console, locate Key for Server Apps in
Google Cloud Messaging for Android)
regs_id: A list with the devices which will be receiving a message
data: The dict data which will be send
collapse_key: A string to group messages, look at the documentation about it:
http://developer.android.com/google/gcm/gcm.html#request
"""
values = {
'registration_ids': regs_id,
'collapse_key': collapse_key,
'data': data
}
values = json.dumps(values)
headers = {
'UserAgent': "GCM-Server",
'Content-Type': 'application/json',
'Authorization': 'key=' + api_key,
}
request = urllib2.Request("https://android.googleapis.com/gcm/send", data=values, headers=headers)
response = urllib2.urlopen(request)
result = response.read()
return result
Use requests package instead of urllib2
|
import requests
import json
def send_gcm_message(api_key, regs_id, data, collapse_key=None):
"""
Send a GCM message for one or more devices, using json data
api_key: The API_KEY from your console (https://code.google.com/apis/console, locate Key for Server Apps in
Google Cloud Messaging for Android)
regs_id: A list with the devices which will be receiving a message
data: The dict data which will be send
collapse_key: A string to group messages, look at the documentation about it:
http://developer.android.com/google/gcm/gcm.html#request
"""
values = {
'registration_ids': regs_id,
'collapse_key': collapse_key,
'data': data
}
values = json.dumps(values)
headers = {
'UserAgent': "GCM-Server",
'Content-Type': 'application/json',
'Authorization': 'key=' + api_key,
}
response = requests.post(url="https://android.googleapis.com/gcm/send",
data=values,
headers=headers)
return response.content
|
<commit_before>import urllib2
import json
def send_gcm_message(api_key, regs_id, data, collapse_key=None):
"""
Send a GCM message for one or more devices, using json data
api_key: The API_KEY from your console (https://code.google.com/apis/console, locate Key for Server Apps in
Google Cloud Messaging for Android)
regs_id: A list with the devices which will be receiving a message
data: The dict data which will be send
collapse_key: A string to group messages, look at the documentation about it:
http://developer.android.com/google/gcm/gcm.html#request
"""
values = {
'registration_ids': regs_id,
'collapse_key': collapse_key,
'data': data
}
values = json.dumps(values)
headers = {
'UserAgent': "GCM-Server",
'Content-Type': 'application/json',
'Authorization': 'key=' + api_key,
}
request = urllib2.Request("https://android.googleapis.com/gcm/send", data=values, headers=headers)
response = urllib2.urlopen(request)
result = response.read()
return result
<commit_msg>Use requests package instead of urllib2<commit_after>
|
import requests
import json
def send_gcm_message(api_key, regs_id, data, collapse_key=None):
"""
Send a GCM message for one or more devices, using json data
api_key: The API_KEY from your console (https://code.google.com/apis/console, locate Key for Server Apps in
Google Cloud Messaging for Android)
regs_id: A list with the devices which will be receiving a message
data: The dict data which will be send
collapse_key: A string to group messages, look at the documentation about it:
http://developer.android.com/google/gcm/gcm.html#request
"""
values = {
'registration_ids': regs_id,
'collapse_key': collapse_key,
'data': data
}
values = json.dumps(values)
headers = {
'UserAgent': "GCM-Server",
'Content-Type': 'application/json',
'Authorization': 'key=' + api_key,
}
response = requests.post(url="https://android.googleapis.com/gcm/send",
data=values,
headers=headers)
return response.content
|
import urllib2
import json
def send_gcm_message(api_key, regs_id, data, collapse_key=None):
"""
Send a GCM message for one or more devices, using json data
api_key: The API_KEY from your console (https://code.google.com/apis/console, locate Key for Server Apps in
Google Cloud Messaging for Android)
regs_id: A list with the devices which will be receiving a message
data: The dict data which will be send
collapse_key: A string to group messages, look at the documentation about it:
http://developer.android.com/google/gcm/gcm.html#request
"""
values = {
'registration_ids': regs_id,
'collapse_key': collapse_key,
'data': data
}
values = json.dumps(values)
headers = {
'UserAgent': "GCM-Server",
'Content-Type': 'application/json',
'Authorization': 'key=' + api_key,
}
request = urllib2.Request("https://android.googleapis.com/gcm/send", data=values, headers=headers)
response = urllib2.urlopen(request)
result = response.read()
return result
Use requests package instead of urllib2import requests
import json
def send_gcm_message(api_key, regs_id, data, collapse_key=None):
"""
Send a GCM message for one or more devices, using json data
api_key: The API_KEY from your console (https://code.google.com/apis/console, locate Key for Server Apps in
Google Cloud Messaging for Android)
regs_id: A list with the devices which will be receiving a message
data: The dict data which will be send
collapse_key: A string to group messages, look at the documentation about it:
http://developer.android.com/google/gcm/gcm.html#request
"""
values = {
'registration_ids': regs_id,
'collapse_key': collapse_key,
'data': data
}
values = json.dumps(values)
headers = {
'UserAgent': "GCM-Server",
'Content-Type': 'application/json',
'Authorization': 'key=' + api_key,
}
response = requests.post(url="https://android.googleapis.com/gcm/send",
data=values,
headers=headers)
return response.content
|
<commit_before>import urllib2
import json
def send_gcm_message(api_key, regs_id, data, collapse_key=None):
"""
Send a GCM message for one or more devices, using json data
api_key: The API_KEY from your console (https://code.google.com/apis/console, locate Key for Server Apps in
Google Cloud Messaging for Android)
regs_id: A list with the devices which will be receiving a message
data: The dict data which will be send
collapse_key: A string to group messages, look at the documentation about it:
http://developer.android.com/google/gcm/gcm.html#request
"""
values = {
'registration_ids': regs_id,
'collapse_key': collapse_key,
'data': data
}
values = json.dumps(values)
headers = {
'UserAgent': "GCM-Server",
'Content-Type': 'application/json',
'Authorization': 'key=' + api_key,
}
request = urllib2.Request("https://android.googleapis.com/gcm/send", data=values, headers=headers)
response = urllib2.urlopen(request)
result = response.read()
return result
<commit_msg>Use requests package instead of urllib2<commit_after>import requests
import json
def send_gcm_message(api_key, regs_id, data, collapse_key=None):
"""
Send a GCM message for one or more devices, using json data
api_key: The API_KEY from your console (https://code.google.com/apis/console, locate Key for Server Apps in
Google Cloud Messaging for Android)
regs_id: A list with the devices which will be receiving a message
data: The dict data which will be send
collapse_key: A string to group messages, look at the documentation about it:
http://developer.android.com/google/gcm/gcm.html#request
"""
values = {
'registration_ids': regs_id,
'collapse_key': collapse_key,
'data': data
}
values = json.dumps(values)
headers = {
'UserAgent': "GCM-Server",
'Content-Type': 'application/json',
'Authorization': 'key=' + api_key,
}
response = requests.post(url="https://android.googleapis.com/gcm/send",
data=values,
headers=headers)
return response.content
|
dd9f5980ded9b10210ea524169ef769a6eff3993
|
utils/paginate.py
|
utils/paginate.py
|
import discord
import asyncio
from typing import List, Tuple
from discord.ext.commands import Context
EMOJI_MAP = {"back": "⬅️", "forward": "➡️"}
async def paginate(ctx: Context, embeds: List[discord.Embed], timeout=30.0) -> None:
msg = ctx.message
emojis = EMOJI_MAP.values()
for reaction in emojis:
await msg.add_react(reaction)
current_index = 0
while True:
try:
reaction, _ = await ctx.bot.wait_for(
"reaction_add",
timeout=timeout,
check=lambda reaction, user: (
user == ctx.author and reaction.emoji in emojis and reaction.message.id == msg.id
),
)
except asyncio.TimeoutError:
return await msg.clear_reactions()
if (reaction.emoji == EMOJI_MAP["back"]):
current_index -= 1
if (reaction.emoji == EMOJI_MAP["forward"]):
current_index += 1
await msg.edit(embed=embeds[current_index])
await msg.remove_reaction(reaction.emoji, ctx.author)
|
import discord
import asyncio
from typing import List
from discord.ext.commands import Context
EMOJI_MAP = {"back": "⬅️", "forward": "➡️"}
async def paginate(ctx: Context, embeds: List[discord.Embed], timeout=30.0) -> None:
msg = ctx.message
emojis = EMOJI_MAP.values()
for emoji in emojis:
await msg.add_reaction(emoji)
current_index = 0
while True:
try:
reaction, _ = await ctx.bot.wait_for(
"reaction_add",
timeout=timeout,
check=lambda reaction, user: (
user == ctx.author and reaction.emoji in emojis and reaction.message.id == msg.id
),
)
except asyncio.TimeoutError:
return await msg.clear_reactions()
if reaction.emoji == EMOJI_MAP["back"]:
current_index = current_index - 1 if current_index > 0 else 0
if reaction.emoji == EMOJI_MAP["forward"]:
current_index = current_index + 1 if current_index < len(embeds) - 1 else len(embeds) - 1
await msg.edit(embed=embeds[current_index])
await msg.remove_reaction(reaction.emoji, ctx.author)
|
Fix pagination logic & typo
|
Fix pagination logic & typo
|
Python
|
mit
|
Naught0/qtbot
|
import discord
import asyncio
from typing import List, Tuple
from discord.ext.commands import Context
EMOJI_MAP = {"back": "⬅️", "forward": "➡️"}
async def paginate(ctx: Context, embeds: List[discord.Embed], timeout=30.0) -> None:
msg = ctx.message
emojis = EMOJI_MAP.values()
for reaction in emojis:
await msg.add_react(reaction)
current_index = 0
while True:
try:
reaction, _ = await ctx.bot.wait_for(
"reaction_add",
timeout=timeout,
check=lambda reaction, user: (
user == ctx.author and reaction.emoji in emojis and reaction.message.id == msg.id
),
)
except asyncio.TimeoutError:
return await msg.clear_reactions()
if (reaction.emoji == EMOJI_MAP["back"]):
current_index -= 1
if (reaction.emoji == EMOJI_MAP["forward"]):
current_index += 1
await msg.edit(embed=embeds[current_index])
await msg.remove_reaction(reaction.emoji, ctx.author)
Fix pagination logic & typo
|
import discord
import asyncio
from typing import List
from discord.ext.commands import Context
EMOJI_MAP = {"back": "⬅️", "forward": "➡️"}
async def paginate(ctx: Context, embeds: List[discord.Embed], timeout=30.0) -> None:
msg = ctx.message
emojis = EMOJI_MAP.values()
for emoji in emojis:
await msg.add_reaction(emoji)
current_index = 0
while True:
try:
reaction, _ = await ctx.bot.wait_for(
"reaction_add",
timeout=timeout,
check=lambda reaction, user: (
user == ctx.author and reaction.emoji in emojis and reaction.message.id == msg.id
),
)
except asyncio.TimeoutError:
return await msg.clear_reactions()
if reaction.emoji == EMOJI_MAP["back"]:
current_index = current_index - 1 if current_index > 0 else 0
if reaction.emoji == EMOJI_MAP["forward"]:
current_index = current_index + 1 if current_index < len(embeds) - 1 else len(embeds) - 1
await msg.edit(embed=embeds[current_index])
await msg.remove_reaction(reaction.emoji, ctx.author)
|
<commit_before>import discord
import asyncio
from typing import List, Tuple
from discord.ext.commands import Context
EMOJI_MAP = {"back": "⬅️", "forward": "➡️"}
async def paginate(ctx: Context, embeds: List[discord.Embed], timeout=30.0) -> None:
msg = ctx.message
emojis = EMOJI_MAP.values()
for reaction in emojis:
await msg.add_react(reaction)
current_index = 0
while True:
try:
reaction, _ = await ctx.bot.wait_for(
"reaction_add",
timeout=timeout,
check=lambda reaction, user: (
user == ctx.author and reaction.emoji in emojis and reaction.message.id == msg.id
),
)
except asyncio.TimeoutError:
return await msg.clear_reactions()
if (reaction.emoji == EMOJI_MAP["back"]):
current_index -= 1
if (reaction.emoji == EMOJI_MAP["forward"]):
current_index += 1
await msg.edit(embed=embeds[current_index])
await msg.remove_reaction(reaction.emoji, ctx.author)
<commit_msg>Fix pagination logic & typo<commit_after>
|
import discord
import asyncio
from typing import List
from discord.ext.commands import Context
EMOJI_MAP = {"back": "⬅️", "forward": "➡️"}
async def paginate(ctx: Context, embeds: List[discord.Embed], timeout=30.0) -> None:
msg = ctx.message
emojis = EMOJI_MAP.values()
for emoji in emojis:
await msg.add_reaction(emoji)
current_index = 0
while True:
try:
reaction, _ = await ctx.bot.wait_for(
"reaction_add",
timeout=timeout,
check=lambda reaction, user: (
user == ctx.author and reaction.emoji in emojis and reaction.message.id == msg.id
),
)
except asyncio.TimeoutError:
return await msg.clear_reactions()
if reaction.emoji == EMOJI_MAP["back"]:
current_index = current_index - 1 if current_index > 0 else 0
if reaction.emoji == EMOJI_MAP["forward"]:
current_index = current_index + 1 if current_index < len(embeds) - 1 else len(embeds) - 1
await msg.edit(embed=embeds[current_index])
await msg.remove_reaction(reaction.emoji, ctx.author)
|
import discord
import asyncio
from typing import List, Tuple
from discord.ext.commands import Context
EMOJI_MAP = {"back": "⬅️", "forward": "➡️"}
async def paginate(ctx: Context, embeds: List[discord.Embed], timeout=30.0) -> None:
msg = ctx.message
emojis = EMOJI_MAP.values()
for reaction in emojis:
await msg.add_react(reaction)
current_index = 0
while True:
try:
reaction, _ = await ctx.bot.wait_for(
"reaction_add",
timeout=timeout,
check=lambda reaction, user: (
user == ctx.author and reaction.emoji in emojis and reaction.message.id == msg.id
),
)
except asyncio.TimeoutError:
return await msg.clear_reactions()
if (reaction.emoji == EMOJI_MAP["back"]):
current_index -= 1
if (reaction.emoji == EMOJI_MAP["forward"]):
current_index += 1
await msg.edit(embed=embeds[current_index])
await msg.remove_reaction(reaction.emoji, ctx.author)
Fix pagination logic & typoimport discord
import asyncio
from typing import List
from discord.ext.commands import Context
EMOJI_MAP = {"back": "⬅️", "forward": "➡️"}
async def paginate(ctx: Context, embeds: List[discord.Embed], timeout=30.0) -> None:
msg = ctx.message
emojis = EMOJI_MAP.values()
for emoji in emojis:
await msg.add_reaction(emoji)
current_index = 0
while True:
try:
reaction, _ = await ctx.bot.wait_for(
"reaction_add",
timeout=timeout,
check=lambda reaction, user: (
user == ctx.author and reaction.emoji in emojis and reaction.message.id == msg.id
),
)
except asyncio.TimeoutError:
return await msg.clear_reactions()
if reaction.emoji == EMOJI_MAP["back"]:
current_index = current_index - 1 if current_index > 0 else 0
if reaction.emoji == EMOJI_MAP["forward"]:
current_index = current_index + 1 if current_index < len(embeds) - 1 else len(embeds) - 1
await msg.edit(embed=embeds[current_index])
await msg.remove_reaction(reaction.emoji, ctx.author)
|
<commit_before>import discord
import asyncio
from typing import List, Tuple
from discord.ext.commands import Context
EMOJI_MAP = {"back": "⬅️", "forward": "➡️"}
async def paginate(ctx: Context, embeds: List[discord.Embed], timeout=30.0) -> None:
msg = ctx.message
emojis = EMOJI_MAP.values()
for reaction in emojis:
await msg.add_react(reaction)
current_index = 0
while True:
try:
reaction, _ = await ctx.bot.wait_for(
"reaction_add",
timeout=timeout,
check=lambda reaction, user: (
user == ctx.author and reaction.emoji in emojis and reaction.message.id == msg.id
),
)
except asyncio.TimeoutError:
return await msg.clear_reactions()
if (reaction.emoji == EMOJI_MAP["back"]):
current_index -= 1
if (reaction.emoji == EMOJI_MAP["forward"]):
current_index += 1
await msg.edit(embed=embeds[current_index])
await msg.remove_reaction(reaction.emoji, ctx.author)
<commit_msg>Fix pagination logic & typo<commit_after>import discord
import asyncio
from typing import List
from discord.ext.commands import Context
EMOJI_MAP = {"back": "⬅️", "forward": "➡️"}
async def paginate(ctx: Context, embeds: List[discord.Embed], timeout=30.0) -> None:
msg = ctx.message
emojis = EMOJI_MAP.values()
for emoji in emojis:
await msg.add_reaction(emoji)
current_index = 0
while True:
try:
reaction, _ = await ctx.bot.wait_for(
"reaction_add",
timeout=timeout,
check=lambda reaction, user: (
user == ctx.author and reaction.emoji in emojis and reaction.message.id == msg.id
),
)
except asyncio.TimeoutError:
return await msg.clear_reactions()
if reaction.emoji == EMOJI_MAP["back"]:
current_index = current_index - 1 if current_index > 0 else 0
if reaction.emoji == EMOJI_MAP["forward"]:
current_index = current_index + 1 if current_index < len(embeds) - 1 else len(embeds) - 1
await msg.edit(embed=embeds[current_index])
await msg.remove_reaction(reaction.emoji, ctx.author)
|
7fd7e2e8c9472a9dadf7d33991d11de6a68a2736
|
refmanage/refmanage.py
|
refmanage/refmanage.py
|
# -*- coding: utf-8 -*-
import os
import argparse
import fs_utils
from pybtex.database.input import bibtex
def main():
"""
Command-line interface
"""
parser = argparse.ArgumentParser(description="Manage BibTeX files")
parser.add_argument("-t", "--test",
action="store_true",
help="Test parseability of BibTeX file(s)",)
parser.add_argument("-v", "--verbose",
action="store_true",
help="Verbose output",)
parser.add_argument("paths_args",
nargs="*",
default="*.bib",
help="File(s) to test parseability",
metavar="files")
args = parser.parse_args()
test(args)
def test(args):
"""
Implement "test" command-line functionality
"""
paths = fs_utils.handle_files_args(*args.paths_args)
bibs_paths_dict = fs_utils.import_bib_files(*paths)
parseables = []
unparseables = []
for key in bibs_paths_dict.keys():
if bibs_paths_dict[key] is None:
unparseables.append(key)
else:
parseables.append(key)
print("The following files are unparseable:")
for unparseable in unparseables:
print("\t" + str(unparseable.resolve()))
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
import os
import argparse
import fs_utils
from pybtex.database.input import bibtex
def main():
"""
Command-line interface
"""
parser = argparse.ArgumentParser(description="Manage BibTeX files")
parser.add_argument("-t", "--test",
action="store_true",
help="Test parseability of BibTeX file(s)",)
parser.add_argument("-v", "--verbose",
action="store_true",
help="Verbose output",)
parser.add_argument("paths_args",
nargs="*",
default="*.bib",
help="File(s) to test parseability",
metavar="files")
args = parser.parse_args()
test(args)
def test(args):
"""
Implement "test" command-line functionality
"""
paths = fs_utils.handle_files_args(*args.paths_args)
bibs_paths_dict = fs_utils.import_bib_files(*paths)
parseables = []
parseables_msg = "The following files are parseable:"
unparseables = []
unparseables_msg = "The following files are unparseable:"
for key in bibs_paths_dict.keys():
if bibs_paths_dict[key] is None:
unparseables.append(key)
unparseables_msg += "\n\t" + str(key.resolve())
else:
parseables.append(key)
parseables_msg += "\n\t" + str(key.resolve())
if args.verbose:
print(parseables_msg)
print("\r")
print(unparseables_msg)
if __name__ == '__main__':
main()
|
Add functionality to print list of parseable files
|
Add functionality to print list of parseable files
|
Python
|
mit
|
jrsmith3/refmanage
|
# -*- coding: utf-8 -*-
import os
import argparse
import fs_utils
from pybtex.database.input import bibtex
def main():
"""
Command-line interface
"""
parser = argparse.ArgumentParser(description="Manage BibTeX files")
parser.add_argument("-t", "--test",
action="store_true",
help="Test parseability of BibTeX file(s)",)
parser.add_argument("-v", "--verbose",
action="store_true",
help="Verbose output",)
parser.add_argument("paths_args",
nargs="*",
default="*.bib",
help="File(s) to test parseability",
metavar="files")
args = parser.parse_args()
test(args)
def test(args):
"""
Implement "test" command-line functionality
"""
paths = fs_utils.handle_files_args(*args.paths_args)
bibs_paths_dict = fs_utils.import_bib_files(*paths)
parseables = []
unparseables = []
for key in bibs_paths_dict.keys():
if bibs_paths_dict[key] is None:
unparseables.append(key)
else:
parseables.append(key)
print("The following files are unparseable:")
for unparseable in unparseables:
print("\t" + str(unparseable.resolve()))
if __name__ == '__main__':
main()
Add functionality to print list of parseable files
|
# -*- coding: utf-8 -*-
import os
import argparse
import fs_utils
from pybtex.database.input import bibtex
def main():
"""
Command-line interface
"""
parser = argparse.ArgumentParser(description="Manage BibTeX files")
parser.add_argument("-t", "--test",
action="store_true",
help="Test parseability of BibTeX file(s)",)
parser.add_argument("-v", "--verbose",
action="store_true",
help="Verbose output",)
parser.add_argument("paths_args",
nargs="*",
default="*.bib",
help="File(s) to test parseability",
metavar="files")
args = parser.parse_args()
test(args)
def test(args):
"""
Implement "test" command-line functionality
"""
paths = fs_utils.handle_files_args(*args.paths_args)
bibs_paths_dict = fs_utils.import_bib_files(*paths)
parseables = []
parseables_msg = "The following files are parseable:"
unparseables = []
unparseables_msg = "The following files are unparseable:"
for key in bibs_paths_dict.keys():
if bibs_paths_dict[key] is None:
unparseables.append(key)
unparseables_msg += "\n\t" + str(key.resolve())
else:
parseables.append(key)
parseables_msg += "\n\t" + str(key.resolve())
if args.verbose:
print(parseables_msg)
print("\r")
print(unparseables_msg)
if __name__ == '__main__':
main()
|
<commit_before># -*- coding: utf-8 -*-
import os
import argparse
import fs_utils
from pybtex.database.input import bibtex
def main():
"""
Command-line interface
"""
parser = argparse.ArgumentParser(description="Manage BibTeX files")
parser.add_argument("-t", "--test",
action="store_true",
help="Test parseability of BibTeX file(s)",)
parser.add_argument("-v", "--verbose",
action="store_true",
help="Verbose output",)
parser.add_argument("paths_args",
nargs="*",
default="*.bib",
help="File(s) to test parseability",
metavar="files")
args = parser.parse_args()
test(args)
def test(args):
"""
Implement "test" command-line functionality
"""
paths = fs_utils.handle_files_args(*args.paths_args)
bibs_paths_dict = fs_utils.import_bib_files(*paths)
parseables = []
unparseables = []
for key in bibs_paths_dict.keys():
if bibs_paths_dict[key] is None:
unparseables.append(key)
else:
parseables.append(key)
print("The following files are unparseable:")
for unparseable in unparseables:
print("\t" + str(unparseable.resolve()))
if __name__ == '__main__':
main()
<commit_msg>Add functionality to print list of parseable files<commit_after>
|
# -*- coding: utf-8 -*-
import os
import argparse
import fs_utils
from pybtex.database.input import bibtex
def main():
"""
Command-line interface
"""
parser = argparse.ArgumentParser(description="Manage BibTeX files")
parser.add_argument("-t", "--test",
action="store_true",
help="Test parseability of BibTeX file(s)",)
parser.add_argument("-v", "--verbose",
action="store_true",
help="Verbose output",)
parser.add_argument("paths_args",
nargs="*",
default="*.bib",
help="File(s) to test parseability",
metavar="files")
args = parser.parse_args()
test(args)
def test(args):
"""
Implement "test" command-line functionality
"""
paths = fs_utils.handle_files_args(*args.paths_args)
bibs_paths_dict = fs_utils.import_bib_files(*paths)
parseables = []
parseables_msg = "The following files are parseable:"
unparseables = []
unparseables_msg = "The following files are unparseable:"
for key in bibs_paths_dict.keys():
if bibs_paths_dict[key] is None:
unparseables.append(key)
unparseables_msg += "\n\t" + str(key.resolve())
else:
parseables.append(key)
parseables_msg += "\n\t" + str(key.resolve())
if args.verbose:
print(parseables_msg)
print("\r")
print(unparseables_msg)
if __name__ == '__main__':
main()
|
# -*- coding: utf-8 -*-
import os
import argparse
import fs_utils
from pybtex.database.input import bibtex
def main():
"""
Command-line interface
"""
parser = argparse.ArgumentParser(description="Manage BibTeX files")
parser.add_argument("-t", "--test",
action="store_true",
help="Test parseability of BibTeX file(s)",)
parser.add_argument("-v", "--verbose",
action="store_true",
help="Verbose output",)
parser.add_argument("paths_args",
nargs="*",
default="*.bib",
help="File(s) to test parseability",
metavar="files")
args = parser.parse_args()
test(args)
def test(args):
"""
Implement "test" command-line functionality
"""
paths = fs_utils.handle_files_args(*args.paths_args)
bibs_paths_dict = fs_utils.import_bib_files(*paths)
parseables = []
unparseables = []
for key in bibs_paths_dict.keys():
if bibs_paths_dict[key] is None:
unparseables.append(key)
else:
parseables.append(key)
print("The following files are unparseable:")
for unparseable in unparseables:
print("\t" + str(unparseable.resolve()))
if __name__ == '__main__':
main()
Add functionality to print list of parseable files# -*- coding: utf-8 -*-
import os
import argparse
import fs_utils
from pybtex.database.input import bibtex
def main():
"""
Command-line interface
"""
parser = argparse.ArgumentParser(description="Manage BibTeX files")
parser.add_argument("-t", "--test",
action="store_true",
help="Test parseability of BibTeX file(s)",)
parser.add_argument("-v", "--verbose",
action="store_true",
help="Verbose output",)
parser.add_argument("paths_args",
nargs="*",
default="*.bib",
help="File(s) to test parseability",
metavar="files")
args = parser.parse_args()
test(args)
def test(args):
"""
Implement "test" command-line functionality
"""
paths = fs_utils.handle_files_args(*args.paths_args)
bibs_paths_dict = fs_utils.import_bib_files(*paths)
parseables = []
parseables_msg = "The following files are parseable:"
unparseables = []
unparseables_msg = "The following files are unparseable:"
for key in bibs_paths_dict.keys():
if bibs_paths_dict[key] is None:
unparseables.append(key)
unparseables_msg += "\n\t" + str(key.resolve())
else:
parseables.append(key)
parseables_msg += "\n\t" + str(key.resolve())
if args.verbose:
print(parseables_msg)
print("\r")
print(unparseables_msg)
if __name__ == '__main__':
main()
|
<commit_before># -*- coding: utf-8 -*-
import os
import argparse
import fs_utils
from pybtex.database.input import bibtex
def main():
"""
Command-line interface
"""
parser = argparse.ArgumentParser(description="Manage BibTeX files")
parser.add_argument("-t", "--test",
action="store_true",
help="Test parseability of BibTeX file(s)",)
parser.add_argument("-v", "--verbose",
action="store_true",
help="Verbose output",)
parser.add_argument("paths_args",
nargs="*",
default="*.bib",
help="File(s) to test parseability",
metavar="files")
args = parser.parse_args()
test(args)
def test(args):
"""
Implement "test" command-line functionality
"""
paths = fs_utils.handle_files_args(*args.paths_args)
bibs_paths_dict = fs_utils.import_bib_files(*paths)
parseables = []
unparseables = []
for key in bibs_paths_dict.keys():
if bibs_paths_dict[key] is None:
unparseables.append(key)
else:
parseables.append(key)
print("The following files are unparseable:")
for unparseable in unparseables:
print("\t" + str(unparseable.resolve()))
if __name__ == '__main__':
main()
<commit_msg>Add functionality to print list of parseable files<commit_after># -*- coding: utf-8 -*-
import os
import argparse
import fs_utils
from pybtex.database.input import bibtex
def main():
"""
Command-line interface
"""
parser = argparse.ArgumentParser(description="Manage BibTeX files")
parser.add_argument("-t", "--test",
action="store_true",
help="Test parseability of BibTeX file(s)",)
parser.add_argument("-v", "--verbose",
action="store_true",
help="Verbose output",)
parser.add_argument("paths_args",
nargs="*",
default="*.bib",
help="File(s) to test parseability",
metavar="files")
args = parser.parse_args()
test(args)
def test(args):
"""
Implement "test" command-line functionality
"""
paths = fs_utils.handle_files_args(*args.paths_args)
bibs_paths_dict = fs_utils.import_bib_files(*paths)
parseables = []
parseables_msg = "The following files are parseable:"
unparseables = []
unparseables_msg = "The following files are unparseable:"
for key in bibs_paths_dict.keys():
if bibs_paths_dict[key] is None:
unparseables.append(key)
unparseables_msg += "\n\t" + str(key.resolve())
else:
parseables.append(key)
parseables_msg += "\n\t" + str(key.resolve())
if args.verbose:
print(parseables_msg)
print("\r")
print(unparseables_msg)
if __name__ == '__main__':
main()
|
22ab67a2c5a3bf3f7d1696a35b5fe029b848d63e
|
virtool/models.py
|
virtool/models.py
|
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
id = Column(String, primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
|
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String, Sequence, Integer
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
id = Column(Integer, Sequence('labels_id_seq'), primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
|
Use serial integer IDs for SQL records
|
Use serial integer IDs for SQL records
|
Python
|
mit
|
virtool/virtool,igboyes/virtool,virtool/virtool,igboyes/virtool
|
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
id = Column(String, primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
Use serial integer IDs for SQL records
|
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String, Sequence, Integer
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
id = Column(Integer, Sequence('labels_id_seq'), primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
|
<commit_before>from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
id = Column(String, primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
<commit_msg>Use serial integer IDs for SQL records<commit_after>
|
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String, Sequence, Integer
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
id = Column(Integer, Sequence('labels_id_seq'), primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
|
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
id = Column(String, primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
Use serial integer IDs for SQL recordsfrom sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String, Sequence, Integer
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
id = Column(Integer, Sequence('labels_id_seq'), primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
|
<commit_before>from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
id = Column(String, primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
<commit_msg>Use serial integer IDs for SQL records<commit_after>from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy import Column, String, Sequence, Integer
Base = declarative_base()
class Label(Base):
__tablename__ = 'labels'
id = Column(Integer, Sequence('labels_id_seq'), primary_key=True)
name = Column(String, unique=True)
color = Column(String)
description = Column(String)
def __repr__(self):
return "<Label(name='%s', color='%s', description='%s')>" % (
self.name, self.color, self.description)
async def create_tables(engine):
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
|
8a63a1c2464a63f1a52c32b5179b9dacfe5d4332
|
framework/sessions/model.py
|
framework/sessions/model.py
|
# -*- coding: utf-8 -*-
from bson import ObjectId
from modularodm import fields
from framework.mongo import StoredObject
class Session(StoredObject):
_id = fields.StringField(primary=True, default=lambda: str(ObjectId()))
date_created = fields.DateTimeField(auto_now_add=True)
date_modified = fields.DateTimeField(auto_now=True)
data = fields.DictionaryField()
|
# -*- coding: utf-8 -*-
from bson import ObjectId
from modularodm import fields
from framework.mongo import StoredObject
class Session(StoredObject):
_id = fields.StringField(primary=True, default=lambda: str(ObjectId()))
date_created = fields.DateTimeField(auto_now_add=True)
date_modified = fields.DateTimeField(auto_now=True)
data = fields.DictionaryField()
|
Add missing newline for flake8
|
Add missing newline for flake8
|
Python
|
apache-2.0
|
zachjanicki/osf.io,lyndsysimon/osf.io,aaxelb/osf.io,caneruguz/osf.io,leb2dg/osf.io,MerlinZhang/osf.io,Johnetordoff/osf.io,chrisseto/osf.io,sbt9uc/osf.io,arpitar/osf.io,jmcarp/osf.io,petermalcolm/osf.io,Nesiehr/osf.io,acshi/osf.io,DanielSBrown/osf.io,crcresearch/osf.io,MerlinZhang/osf.io,samanehsan/osf.io,mattclark/osf.io,mfraezz/osf.io,TomBaxter/osf.io,sloria/osf.io,KAsante95/osf.io,doublebits/osf.io,dplorimer/osf,hmoco/osf.io,SSJohns/osf.io,pattisdr/osf.io,emetsger/osf.io,aaxelb/osf.io,petermalcolm/osf.io,CenterForOpenScience/osf.io,jnayak1/osf.io,arpitar/osf.io,mfraezz/osf.io,binoculars/osf.io,reinaH/osf.io,samchrisinger/osf.io,zachjanicki/osf.io,GageGaskins/osf.io,cosenal/osf.io,mluo613/osf.io,caneruguz/osf.io,dplorimer/osf,caseyrollins/osf.io,jnayak1/osf.io,monikagrabowska/osf.io,crcresearch/osf.io,doublebits/osf.io,mluke93/osf.io,zachjanicki/osf.io,samanehsan/osf.io,jmcarp/osf.io,caseyrollins/osf.io,HalcyonChimera/osf.io,Ghalko/osf.io,lyndsysimon/osf.io,cslzchen/osf.io,caseyrollins/osf.io,kwierman/osf.io,kch8qx/osf.io,Johnetordoff/osf.io,njantrania/osf.io,DanielSBrown/osf.io,jinluyuan/osf.io,brianjgeiger/osf.io,lyndsysimon/osf.io,petermalcolm/osf.io,leb2dg/osf.io,asanfilippo7/osf.io,pattisdr/osf.io,amyshi188/osf.io,kch8qx/osf.io,billyhunt/osf.io,samchrisinger/osf.io,Nesiehr/osf.io,TomHeatwole/osf.io,GageGaskins/osf.io,samchrisinger/osf.io,haoyuchen1992/osf.io,rdhyee/osf.io,cldershem/osf.io,crcresearch/osf.io,alexschiller/osf.io,KAsante95/osf.io,KAsante95/osf.io,adlius/osf.io,jeffreyliu3230/osf.io,MerlinZhang/osf.io,samanehsan/osf.io,jolene-esposito/osf.io,saradbowman/osf.io,wearpants/osf.io,ticklemepierce/osf.io,abought/osf.io,brianjgeiger/osf.io,Ghalko/osf.io,billyhunt/osf.io,CenterForOpenScience/osf.io,brianjgeiger/osf.io,adlius/osf.io,zachjanicki/osf.io,binoculars/osf.io,sbt9uc/osf.io,RomanZWang/osf.io,HarryRybacki/osf.io,caseyrygt/osf.io,KAsante95/osf.io,zamattiac/osf.io,GageGaskins/osf.io,mluke93/osf.io,erinspace/osf.io,chrisseto/osf.io,icereval/osf.io,Johnetordoff/osf.io,kch8qx/osf.io,danielneis/osf.io,hmoco/osf.io,mluo613/osf.io,reinaH/osf.io,jolene-esposito/osf.io,caseyrygt/osf.io,sloria/osf.io,haoyuchen1992/osf.io,bdyetton/prettychart,petermalcolm/osf.io,cwisecarver/osf.io,brianjgeiger/osf.io,dplorimer/osf,zamattiac/osf.io,asanfilippo7/osf.io,ckc6cz/osf.io,cslzchen/osf.io,kwierman/osf.io,jeffreyliu3230/osf.io,asanfilippo7/osf.io,monikagrabowska/osf.io,fabianvf/osf.io,felliott/osf.io,arpitar/osf.io,fabianvf/osf.io,felliott/osf.io,jinluyuan/osf.io,rdhyee/osf.io,Nesiehr/osf.io,rdhyee/osf.io,ZobairAlijan/osf.io,monikagrabowska/osf.io,Nesiehr/osf.io,mluo613/osf.io,reinaH/osf.io,mluke93/osf.io,doublebits/osf.io,DanielSBrown/osf.io,mattclark/osf.io,mluo613/osf.io,icereval/osf.io,amyshi188/osf.io,brandonPurvis/osf.io,abought/osf.io,acshi/osf.io,HalcyonChimera/osf.io,TomHeatwole/osf.io,jeffreyliu3230/osf.io,cosenal/osf.io,chrisseto/osf.io,hmoco/osf.io,kch8qx/osf.io,alexschiller/osf.io,laurenrevere/osf.io,ticklemepierce/osf.io,mfraezz/osf.io,CenterForOpenScience/osf.io,HalcyonChimera/osf.io,kwierman/osf.io,laurenrevere/osf.io,brandonPurvis/osf.io,ticklemepierce/osf.io,TomBaxter/osf.io,HarryRybacki/osf.io,GageGaskins/osf.io,acshi/osf.io,dplorimer/osf,RomanZWang/osf.io,binoculars/osf.io,arpitar/osf.io,caneruguz/osf.io,RomanZWang/osf.io,jmcarp/osf.io,jeffreyliu3230/osf.io,ZobairAlijan/osf.io,danielneis/osf.io,ckc6cz/osf.io,wearpants/osf.io,wearpants/osf.io,KAsante95/osf.io,caseyrygt/osf.io,kwierman/osf.io,erinspace/osf.io,erinspace/osf.io,jinluyuan/osf.io,fabianvf/osf.io,amyshi188/osf.io,HarryRybacki/osf.io,SSJohns/osf.io,sbt9uc/osf.io,cldershem/osf.io,icereval/osf.io,saradbowman/osf.io,samanehsan/osf.io,aaxelb/osf.io,cldershem/osf.io,lyndsysimon/osf.io,ticklemepierce/osf.io,doublebits/osf.io,zamattiac/osf.io,jnayak1/osf.io,SSJohns/osf.io,leb2dg/osf.io,acshi/osf.io,billyhunt/osf.io,haoyuchen1992/osf.io,ckc6cz/osf.io,monikagrabowska/osf.io,asanfilippo7/osf.io,cslzchen/osf.io,njantrania/osf.io,HarryRybacki/osf.io,felliott/osf.io,wearpants/osf.io,cwisecarver/osf.io,alexschiller/osf.io,rdhyee/osf.io,adlius/osf.io,emetsger/osf.io,jinluyuan/osf.io,aaxelb/osf.io,ZobairAlijan/osf.io,cldershem/osf.io,ZobairAlijan/osf.io,mluke93/osf.io,chennan47/osf.io,Ghalko/osf.io,emetsger/osf.io,bdyetton/prettychart,emetsger/osf.io,chennan47/osf.io,mfraezz/osf.io,DanielSBrown/osf.io,baylee-d/osf.io,CenterForOpenScience/osf.io,cosenal/osf.io,mattclark/osf.io,leb2dg/osf.io,RomanZWang/osf.io,njantrania/osf.io,amyshi188/osf.io,fabianvf/osf.io,abought/osf.io,chrisseto/osf.io,kch8qx/osf.io,pattisdr/osf.io,jmcarp/osf.io,caneruguz/osf.io,jolene-esposito/osf.io,laurenrevere/osf.io,cwisecarver/osf.io,bdyetton/prettychart,TomHeatwole/osf.io,acshi/osf.io,MerlinZhang/osf.io,RomanZWang/osf.io,danielneis/osf.io,brandonPurvis/osf.io,GageGaskins/osf.io,alexschiller/osf.io,cslzchen/osf.io,felliott/osf.io,njantrania/osf.io,alexschiller/osf.io,cosenal/osf.io,brandonPurvis/osf.io,bdyetton/prettychart,baylee-d/osf.io,sloria/osf.io,cwisecarver/osf.io,baylee-d/osf.io,danielneis/osf.io,sbt9uc/osf.io,doublebits/osf.io,ckc6cz/osf.io,reinaH/osf.io,Ghalko/osf.io,chennan47/osf.io,haoyuchen1992/osf.io,zamattiac/osf.io,hmoco/osf.io,abought/osf.io,jolene-esposito/osf.io,TomHeatwole/osf.io,monikagrabowska/osf.io,mluo613/osf.io,HalcyonChimera/osf.io,billyhunt/osf.io,Johnetordoff/osf.io,jnayak1/osf.io,billyhunt/osf.io,adlius/osf.io,TomBaxter/osf.io,samchrisinger/osf.io,caseyrygt/osf.io,brandonPurvis/osf.io,SSJohns/osf.io
|
# -*- coding: utf-8 -*-
from bson import ObjectId
from modularodm import fields
from framework.mongo import StoredObject
class Session(StoredObject):
_id = fields.StringField(primary=True, default=lambda: str(ObjectId()))
date_created = fields.DateTimeField(auto_now_add=True)
date_modified = fields.DateTimeField(auto_now=True)
data = fields.DictionaryField()Add missing newline for flake8
|
# -*- coding: utf-8 -*-
from bson import ObjectId
from modularodm import fields
from framework.mongo import StoredObject
class Session(StoredObject):
_id = fields.StringField(primary=True, default=lambda: str(ObjectId()))
date_created = fields.DateTimeField(auto_now_add=True)
date_modified = fields.DateTimeField(auto_now=True)
data = fields.DictionaryField()
|
<commit_before># -*- coding: utf-8 -*-
from bson import ObjectId
from modularodm import fields
from framework.mongo import StoredObject
class Session(StoredObject):
_id = fields.StringField(primary=True, default=lambda: str(ObjectId()))
date_created = fields.DateTimeField(auto_now_add=True)
date_modified = fields.DateTimeField(auto_now=True)
data = fields.DictionaryField()<commit_msg>Add missing newline for flake8<commit_after>
|
# -*- coding: utf-8 -*-
from bson import ObjectId
from modularodm import fields
from framework.mongo import StoredObject
class Session(StoredObject):
_id = fields.StringField(primary=True, default=lambda: str(ObjectId()))
date_created = fields.DateTimeField(auto_now_add=True)
date_modified = fields.DateTimeField(auto_now=True)
data = fields.DictionaryField()
|
# -*- coding: utf-8 -*-
from bson import ObjectId
from modularodm import fields
from framework.mongo import StoredObject
class Session(StoredObject):
_id = fields.StringField(primary=True, default=lambda: str(ObjectId()))
date_created = fields.DateTimeField(auto_now_add=True)
date_modified = fields.DateTimeField(auto_now=True)
data = fields.DictionaryField()Add missing newline for flake8# -*- coding: utf-8 -*-
from bson import ObjectId
from modularodm import fields
from framework.mongo import StoredObject
class Session(StoredObject):
_id = fields.StringField(primary=True, default=lambda: str(ObjectId()))
date_created = fields.DateTimeField(auto_now_add=True)
date_modified = fields.DateTimeField(auto_now=True)
data = fields.DictionaryField()
|
<commit_before># -*- coding: utf-8 -*-
from bson import ObjectId
from modularodm import fields
from framework.mongo import StoredObject
class Session(StoredObject):
_id = fields.StringField(primary=True, default=lambda: str(ObjectId()))
date_created = fields.DateTimeField(auto_now_add=True)
date_modified = fields.DateTimeField(auto_now=True)
data = fields.DictionaryField()<commit_msg>Add missing newline for flake8<commit_after># -*- coding: utf-8 -*-
from bson import ObjectId
from modularodm import fields
from framework.mongo import StoredObject
class Session(StoredObject):
_id = fields.StringField(primary=True, default=lambda: str(ObjectId()))
date_created = fields.DateTimeField(auto_now_add=True)
date_modified = fields.DateTimeField(auto_now=True)
data = fields.DictionaryField()
|
64d599d6f7ca0aae6d95bf753a8421c7978276a2
|
subliminal/__init__.py
|
subliminal/__init__.py
|
# -*- coding: utf-8 -*-
__title__ = 'subliminal'
__version__ = '1.0.dev0'
__author__ = 'Antoine Bertin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015, Antoine Bertin'
import logging
from .api import (ProviderPool, check_video, provider_manager, download_best_subtitles, download_subtitles,
list_subtitles, save_subtitles)
from .cache import region
from .exceptions import Error, ProviderError
from .providers import Provider
from .subtitle import Subtitle
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
# -*- coding: utf-8 -*-
__title__ = 'subliminal'
__version__ = '1.0.dev0'
__author__ = 'Antoine Bertin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015, Antoine Bertin'
import logging
from .api import (ProviderPool, check_video, provider_manager, download_best_subtitles, download_subtitles,
list_subtitles, save_subtitles)
from .cache import region
from .exceptions import Error, ProviderError
from .providers import Provider
from .subtitle import Subtitle, compute_score
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
Add compute_score to subliminal namespace
|
Add compute_score to subliminal namespace
|
Python
|
mit
|
juanmhidalgo/subliminal,h3llrais3r/subliminal,getzze/subliminal,hpsbranco/subliminal,kbkailashbagaria/subliminal,oxan/subliminal,ratoaq2/subliminal,ofir123/subliminal,SickRage/subliminal,pums974/subliminal,Elettronik/subliminal,goll/subliminal,bogdal/subliminal,fernandog/subliminal,Diaoul/subliminal,neo1691/subliminal,t4lwh/subliminal
|
# -*- coding: utf-8 -*-
__title__ = 'subliminal'
__version__ = '1.0.dev0'
__author__ = 'Antoine Bertin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015, Antoine Bertin'
import logging
from .api import (ProviderPool, check_video, provider_manager, download_best_subtitles, download_subtitles,
list_subtitles, save_subtitles)
from .cache import region
from .exceptions import Error, ProviderError
from .providers import Provider
from .subtitle import Subtitle
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
logging.getLogger(__name__).addHandler(logging.NullHandler())
Add compute_score to subliminal namespace
|
# -*- coding: utf-8 -*-
__title__ = 'subliminal'
__version__ = '1.0.dev0'
__author__ = 'Antoine Bertin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015, Antoine Bertin'
import logging
from .api import (ProviderPool, check_video, provider_manager, download_best_subtitles, download_subtitles,
list_subtitles, save_subtitles)
from .cache import region
from .exceptions import Error, ProviderError
from .providers import Provider
from .subtitle import Subtitle, compute_score
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
<commit_before># -*- coding: utf-8 -*-
__title__ = 'subliminal'
__version__ = '1.0.dev0'
__author__ = 'Antoine Bertin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015, Antoine Bertin'
import logging
from .api import (ProviderPool, check_video, provider_manager, download_best_subtitles, download_subtitles,
list_subtitles, save_subtitles)
from .cache import region
from .exceptions import Error, ProviderError
from .providers import Provider
from .subtitle import Subtitle
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
logging.getLogger(__name__).addHandler(logging.NullHandler())
<commit_msg>Add compute_score to subliminal namespace<commit_after>
|
# -*- coding: utf-8 -*-
__title__ = 'subliminal'
__version__ = '1.0.dev0'
__author__ = 'Antoine Bertin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015, Antoine Bertin'
import logging
from .api import (ProviderPool, check_video, provider_manager, download_best_subtitles, download_subtitles,
list_subtitles, save_subtitles)
from .cache import region
from .exceptions import Error, ProviderError
from .providers import Provider
from .subtitle import Subtitle, compute_score
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
# -*- coding: utf-8 -*-
__title__ = 'subliminal'
__version__ = '1.0.dev0'
__author__ = 'Antoine Bertin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015, Antoine Bertin'
import logging
from .api import (ProviderPool, check_video, provider_manager, download_best_subtitles, download_subtitles,
list_subtitles, save_subtitles)
from .cache import region
from .exceptions import Error, ProviderError
from .providers import Provider
from .subtitle import Subtitle
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
logging.getLogger(__name__).addHandler(logging.NullHandler())
Add compute_score to subliminal namespace# -*- coding: utf-8 -*-
__title__ = 'subliminal'
__version__ = '1.0.dev0'
__author__ = 'Antoine Bertin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015, Antoine Bertin'
import logging
from .api import (ProviderPool, check_video, provider_manager, download_best_subtitles, download_subtitles,
list_subtitles, save_subtitles)
from .cache import region
from .exceptions import Error, ProviderError
from .providers import Provider
from .subtitle import Subtitle, compute_score
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
<commit_before># -*- coding: utf-8 -*-
__title__ = 'subliminal'
__version__ = '1.0.dev0'
__author__ = 'Antoine Bertin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015, Antoine Bertin'
import logging
from .api import (ProviderPool, check_video, provider_manager, download_best_subtitles, download_subtitles,
list_subtitles, save_subtitles)
from .cache import region
from .exceptions import Error, ProviderError
from .providers import Provider
from .subtitle import Subtitle
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
logging.getLogger(__name__).addHandler(logging.NullHandler())
<commit_msg>Add compute_score to subliminal namespace<commit_after># -*- coding: utf-8 -*-
__title__ = 'subliminal'
__version__ = '1.0.dev0'
__author__ = 'Antoine Bertin'
__license__ = 'MIT'
__copyright__ = 'Copyright 2015, Antoine Bertin'
import logging
from .api import (ProviderPool, check_video, provider_manager, download_best_subtitles, download_subtitles,
list_subtitles, save_subtitles)
from .cache import region
from .exceptions import Error, ProviderError
from .providers import Provider
from .subtitle import Subtitle, compute_score
from .video import SUBTITLE_EXTENSIONS, VIDEO_EXTENSIONS, Episode, Movie, Video, scan_video, scan_videos
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
7f8a2e8e3b2721111c2de506d2d3bdea415e9b2d
|
markups/common.py
|
markups/common.py
|
# This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2015
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.environ.get('XDG_CONFIG_HOME') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
|
# This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2015
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
|
Use %APPDATA% for CONFIGURATION_DIR on Windows
|
Use %APPDATA% for CONFIGURATION_DIR on Windows
References retext-project/retext#156.
|
Python
|
bsd-3-clause
|
retext-project/pymarkups,mitya57/pymarkups
|
# This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2015
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.environ.get('XDG_CONFIG_HOME') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
Use %APPDATA% for CONFIGURATION_DIR on Windows
References retext-project/retext#156.
|
# This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2015
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
|
<commit_before># This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2015
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.environ.get('XDG_CONFIG_HOME') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
<commit_msg>Use %APPDATA% for CONFIGURATION_DIR on Windows
References retext-project/retext#156.<commit_after>
|
# This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2015
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
|
# This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2015
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.environ.get('XDG_CONFIG_HOME') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
Use %APPDATA% for CONFIGURATION_DIR on Windows
References retext-project/retext#156.# This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2015
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
|
<commit_before># This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2015
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.environ.get('XDG_CONFIG_HOME') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
<commit_msg>Use %APPDATA% for CONFIGURATION_DIR on Windows
References retext-project/retext#156.<commit_after># This file is part of python-markups module
# License: BSD
# Copyright: (C) Dmitry Shachnev, 2012-2015
import os.path
# Some common constants and functions
(LANGUAGE_HOME_PAGE, MODULE_HOME_PAGE, SYNTAX_DOCUMENTATION) = range(3)
CONFIGURATION_DIR = (os.getenv('XDG_CONFIG_HOME') or os.getenv('APPDATA') or
os.path.expanduser('~/.config'))
MATHJAX_LOCAL_URL = 'file:///usr/share/javascript/mathjax/MathJax.js'
MATHJAX_WEB_URL = 'https://cdn.mathjax.org/mathjax/latest/MathJax.js'
PYGMENTS_STYLE = 'default'
def get_pygments_stylesheet(selector, style=None):
if style is None:
style = PYGMENTS_STYLE
if style == '':
return ''
try:
from pygments.formatters import HtmlFormatter
except ImportError:
return ''
else:
return HtmlFormatter(style=style).get_style_defs(selector) + '\n'
def get_mathjax_url(webenv):
if os.path.exists(MATHJAX_LOCAL_URL[7:]) and not webenv:
return MATHJAX_LOCAL_URL
else:
return MATHJAX_WEB_URL
|
7a179eefb73c5a1aebb4417f1e1adba0c6615f2b
|
csunplugged/general/views.py
|
csunplugged/general/views.py
|
"""Views for the general application."""
from django.views.generic import TemplateView
from django.http import HttpResponse
class GeneralIndexView(TemplateView):
"""View for the homepage that renders from a template."""
template_name = 'general/index.html'
class GeneralAboutView(TemplateView):
"""View for the about page that renders from a template."""
template_name = 'general/about.html'
class GeneralContactView(TemplateView):
"""View for the contact page that renders from a template."""
template_name = 'general/contact.html'
class GeneralPeopleView(TemplateView):
"""View for the people page that renders from a template."""
template_name = 'general/people.html'
class GeneralPrinciplesView(TemplateView):
"""View for the princples page that renders from a template."""
template_name = 'general/principles.html'
def health_check(request):
"""Return heath check response for Google App Engine.
Returns a 200 HTTP response for Google App Engine to detect the system
is running.
"""
return HttpResponse(status=200)
|
"""Views for the general application."""
from django.views.generic import TemplateView
from django.http import HttpResponse
class GeneralIndexView(TemplateView):
"""View for the homepage that renders from a template."""
template_name = 'general/index.html'
class GeneralAboutView(TemplateView):
"""View for the about page that renders from a template."""
template_name = 'general/about.html'
class GeneralContactView(TemplateView):
"""View for the contact page that renders from a template"""
template_name = 'general/contact.html'
class GeneralPeopleView(TemplateView):
"""View for the people page that renders from a template."""
template_name = 'general/people.html'
class GeneralPrinciplesView(TemplateView):
"""View for the princples page that renders from a template."""
template_name = 'general/principles.html'
def health_check(request):
"""Return heath check response for Google App Engine.
Returns a 200 HTTP response for Google App Engine to detect the system
is running.
"""
return HttpResponse(status=200)
|
Break PEP8 and Pydocstring to check Travis
|
Break PEP8 and Pydocstring to check Travis
|
Python
|
mit
|
uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged,uccser/cs-unplugged
|
"""Views for the general application."""
from django.views.generic import TemplateView
from django.http import HttpResponse
class GeneralIndexView(TemplateView):
"""View for the homepage that renders from a template."""
template_name = 'general/index.html'
class GeneralAboutView(TemplateView):
"""View for the about page that renders from a template."""
template_name = 'general/about.html'
class GeneralContactView(TemplateView):
"""View for the contact page that renders from a template."""
template_name = 'general/contact.html'
class GeneralPeopleView(TemplateView):
"""View for the people page that renders from a template."""
template_name = 'general/people.html'
class GeneralPrinciplesView(TemplateView):
"""View for the princples page that renders from a template."""
template_name = 'general/principles.html'
def health_check(request):
"""Return heath check response for Google App Engine.
Returns a 200 HTTP response for Google App Engine to detect the system
is running.
"""
return HttpResponse(status=200)
Break PEP8 and Pydocstring to check Travis
|
"""Views for the general application."""
from django.views.generic import TemplateView
from django.http import HttpResponse
class GeneralIndexView(TemplateView):
"""View for the homepage that renders from a template."""
template_name = 'general/index.html'
class GeneralAboutView(TemplateView):
"""View for the about page that renders from a template."""
template_name = 'general/about.html'
class GeneralContactView(TemplateView):
"""View for the contact page that renders from a template"""
template_name = 'general/contact.html'
class GeneralPeopleView(TemplateView):
"""View for the people page that renders from a template."""
template_name = 'general/people.html'
class GeneralPrinciplesView(TemplateView):
"""View for the princples page that renders from a template."""
template_name = 'general/principles.html'
def health_check(request):
"""Return heath check response for Google App Engine.
Returns a 200 HTTP response for Google App Engine to detect the system
is running.
"""
return HttpResponse(status=200)
|
<commit_before>"""Views for the general application."""
from django.views.generic import TemplateView
from django.http import HttpResponse
class GeneralIndexView(TemplateView):
"""View for the homepage that renders from a template."""
template_name = 'general/index.html'
class GeneralAboutView(TemplateView):
"""View for the about page that renders from a template."""
template_name = 'general/about.html'
class GeneralContactView(TemplateView):
"""View for the contact page that renders from a template."""
template_name = 'general/contact.html'
class GeneralPeopleView(TemplateView):
"""View for the people page that renders from a template."""
template_name = 'general/people.html'
class GeneralPrinciplesView(TemplateView):
"""View for the princples page that renders from a template."""
template_name = 'general/principles.html'
def health_check(request):
"""Return heath check response for Google App Engine.
Returns a 200 HTTP response for Google App Engine to detect the system
is running.
"""
return HttpResponse(status=200)
<commit_msg>Break PEP8 and Pydocstring to check Travis<commit_after>
|
"""Views for the general application."""
from django.views.generic import TemplateView
from django.http import HttpResponse
class GeneralIndexView(TemplateView):
"""View for the homepage that renders from a template."""
template_name = 'general/index.html'
class GeneralAboutView(TemplateView):
"""View for the about page that renders from a template."""
template_name = 'general/about.html'
class GeneralContactView(TemplateView):
"""View for the contact page that renders from a template"""
template_name = 'general/contact.html'
class GeneralPeopleView(TemplateView):
"""View for the people page that renders from a template."""
template_name = 'general/people.html'
class GeneralPrinciplesView(TemplateView):
"""View for the princples page that renders from a template."""
template_name = 'general/principles.html'
def health_check(request):
"""Return heath check response for Google App Engine.
Returns a 200 HTTP response for Google App Engine to detect the system
is running.
"""
return HttpResponse(status=200)
|
"""Views for the general application."""
from django.views.generic import TemplateView
from django.http import HttpResponse
class GeneralIndexView(TemplateView):
"""View for the homepage that renders from a template."""
template_name = 'general/index.html'
class GeneralAboutView(TemplateView):
"""View for the about page that renders from a template."""
template_name = 'general/about.html'
class GeneralContactView(TemplateView):
"""View for the contact page that renders from a template."""
template_name = 'general/contact.html'
class GeneralPeopleView(TemplateView):
"""View for the people page that renders from a template."""
template_name = 'general/people.html'
class GeneralPrinciplesView(TemplateView):
"""View for the princples page that renders from a template."""
template_name = 'general/principles.html'
def health_check(request):
"""Return heath check response for Google App Engine.
Returns a 200 HTTP response for Google App Engine to detect the system
is running.
"""
return HttpResponse(status=200)
Break PEP8 and Pydocstring to check Travis"""Views for the general application."""
from django.views.generic import TemplateView
from django.http import HttpResponse
class GeneralIndexView(TemplateView):
"""View for the homepage that renders from a template."""
template_name = 'general/index.html'
class GeneralAboutView(TemplateView):
"""View for the about page that renders from a template."""
template_name = 'general/about.html'
class GeneralContactView(TemplateView):
"""View for the contact page that renders from a template"""
template_name = 'general/contact.html'
class GeneralPeopleView(TemplateView):
"""View for the people page that renders from a template."""
template_name = 'general/people.html'
class GeneralPrinciplesView(TemplateView):
"""View for the princples page that renders from a template."""
template_name = 'general/principles.html'
def health_check(request):
"""Return heath check response for Google App Engine.
Returns a 200 HTTP response for Google App Engine to detect the system
is running.
"""
return HttpResponse(status=200)
|
<commit_before>"""Views for the general application."""
from django.views.generic import TemplateView
from django.http import HttpResponse
class GeneralIndexView(TemplateView):
"""View for the homepage that renders from a template."""
template_name = 'general/index.html'
class GeneralAboutView(TemplateView):
"""View for the about page that renders from a template."""
template_name = 'general/about.html'
class GeneralContactView(TemplateView):
"""View for the contact page that renders from a template."""
template_name = 'general/contact.html'
class GeneralPeopleView(TemplateView):
"""View for the people page that renders from a template."""
template_name = 'general/people.html'
class GeneralPrinciplesView(TemplateView):
"""View for the princples page that renders from a template."""
template_name = 'general/principles.html'
def health_check(request):
"""Return heath check response for Google App Engine.
Returns a 200 HTTP response for Google App Engine to detect the system
is running.
"""
return HttpResponse(status=200)
<commit_msg>Break PEP8 and Pydocstring to check Travis<commit_after>"""Views for the general application."""
from django.views.generic import TemplateView
from django.http import HttpResponse
class GeneralIndexView(TemplateView):
"""View for the homepage that renders from a template."""
template_name = 'general/index.html'
class GeneralAboutView(TemplateView):
"""View for the about page that renders from a template."""
template_name = 'general/about.html'
class GeneralContactView(TemplateView):
"""View for the contact page that renders from a template"""
template_name = 'general/contact.html'
class GeneralPeopleView(TemplateView):
"""View for the people page that renders from a template."""
template_name = 'general/people.html'
class GeneralPrinciplesView(TemplateView):
"""View for the princples page that renders from a template."""
template_name = 'general/principles.html'
def health_check(request):
"""Return heath check response for Google App Engine.
Returns a 200 HTTP response for Google App Engine to detect the system
is running.
"""
return HttpResponse(status=200)
|
90abb9f68ed32fd5affe8200dfd3bb4836f1c69e
|
test/os_win7.py
|
test/os_win7.py
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
# Since we don't have mock, let's monkey-patch
def get_mbed_devices_new(self):
return [
('\\DosDevices\\D:', '_??_USBSTOR#Disk&Ven_MBED&Prod_XPRO&Rev_1.00#9&35913356&0&ATML2127031800007973&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'),
]
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
def test_get_mbeds(self):
m = MbedLsToolsWin7()
func_type = type(MbedLsToolsWin7.get_mbed_devices)
m.get_mbed_devices = func_type(get_mbed_devices_new, m, MbedLsToolsWin7)
mbeds = m.get_mbeds()
self.assertIsNotNone(mbeds)
self.assertEqual(1, len(mbeds))
mbed = mbeds[0]
self.assertEqual("D:", mbed[0])
self.assertEqual("ATML2127031800007973", mbed[1])
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
if __name__ == '__main__':
unittest.main()
|
Revert "Add test for mbed parsing"
|
Revert "Add test for mbed parsing"
This reverts commit d37dc009f1c4f6e8855657dd6dbf17df9332f765.
|
Python
|
apache-2.0
|
mtmtech/mbed-ls,mtmtech/mbed-ls,mazimkhan/mbed-ls,jupe/mbed-ls,mazimkhan/mbed-ls,jupe/mbed-ls
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
# Since we don't have mock, let's monkey-patch
def get_mbed_devices_new(self):
return [
('\\DosDevices\\D:', '_??_USBSTOR#Disk&Ven_MBED&Prod_XPRO&Rev_1.00#9&35913356&0&ATML2127031800007973&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'),
]
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
def test_get_mbeds(self):
m = MbedLsToolsWin7()
func_type = type(MbedLsToolsWin7.get_mbed_devices)
m.get_mbed_devices = func_type(get_mbed_devices_new, m, MbedLsToolsWin7)
mbeds = m.get_mbeds()
self.assertIsNotNone(mbeds)
self.assertEqual(1, len(mbeds))
mbed = mbeds[0]
self.assertEqual("D:", mbed[0])
self.assertEqual("ATML2127031800007973", mbed[1])
if __name__ == '__main__':
unittest.main()
Revert "Add test for mbed parsing"
This reverts commit d37dc009f1c4f6e8855657dd6dbf17df9332f765.
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
# Since we don't have mock, let's monkey-patch
def get_mbed_devices_new(self):
return [
('\\DosDevices\\D:', '_??_USBSTOR#Disk&Ven_MBED&Prod_XPRO&Rev_1.00#9&35913356&0&ATML2127031800007973&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'),
]
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
def test_get_mbeds(self):
m = MbedLsToolsWin7()
func_type = type(MbedLsToolsWin7.get_mbed_devices)
m.get_mbed_devices = func_type(get_mbed_devices_new, m, MbedLsToolsWin7)
mbeds = m.get_mbeds()
self.assertIsNotNone(mbeds)
self.assertEqual(1, len(mbeds))
mbed = mbeds[0]
self.assertEqual("D:", mbed[0])
self.assertEqual("ATML2127031800007973", mbed[1])
if __name__ == '__main__':
unittest.main()
<commit_msg>Revert "Add test for mbed parsing"
This reverts commit d37dc009f1c4f6e8855657dd6dbf17df9332f765.<commit_after>
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
if __name__ == '__main__':
unittest.main()
|
#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
# Since we don't have mock, let's monkey-patch
def get_mbed_devices_new(self):
return [
('\\DosDevices\\D:', '_??_USBSTOR#Disk&Ven_MBED&Prod_XPRO&Rev_1.00#9&35913356&0&ATML2127031800007973&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'),
]
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
def test_get_mbeds(self):
m = MbedLsToolsWin7()
func_type = type(MbedLsToolsWin7.get_mbed_devices)
m.get_mbed_devices = func_type(get_mbed_devices_new, m, MbedLsToolsWin7)
mbeds = m.get_mbeds()
self.assertIsNotNone(mbeds)
self.assertEqual(1, len(mbeds))
mbed = mbeds[0]
self.assertEqual("D:", mbed[0])
self.assertEqual("ATML2127031800007973", mbed[1])
if __name__ == '__main__':
unittest.main()
Revert "Add test for mbed parsing"
This reverts commit d37dc009f1c4f6e8855657dd6dbf17df9332f765.#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
if __name__ == '__main__':
unittest.main()
|
<commit_before>#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
# Since we don't have mock, let's monkey-patch
def get_mbed_devices_new(self):
return [
('\\DosDevices\\D:', '_??_USBSTOR#Disk&Ven_MBED&Prod_XPRO&Rev_1.00#9&35913356&0&ATML2127031800007973&0#{53f56307-b6bf-11d0-94f2-00a0c91efb8b}'),
]
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
def test_get_mbeds(self):
m = MbedLsToolsWin7()
func_type = type(MbedLsToolsWin7.get_mbed_devices)
m.get_mbed_devices = func_type(get_mbed_devices_new, m, MbedLsToolsWin7)
mbeds = m.get_mbeds()
self.assertIsNotNone(mbeds)
self.assertEqual(1, len(mbeds))
mbed = mbeds[0]
self.assertEqual("D:", mbed[0])
self.assertEqual("ATML2127031800007973", mbed[1])
if __name__ == '__main__':
unittest.main()
<commit_msg>Revert "Add test for mbed parsing"
This reverts commit d37dc009f1c4f6e8855657dd6dbf17df9332f765.<commit_after>#!/usr/bin/env python
"""
mbed SDK
Copyright (c) 2011-2015 ARM Limited
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from mbed_lstools.lstools_win7 import MbedLsToolsWin7
class Win7TestCase(unittest.TestCase):
""" Basic test cases checking trivial asserts
"""
def setUp(self):
pass
def test_os_supported(self):
pass
if __name__ == '__main__':
unittest.main()
|
175c72d97d073a64714cebef05bd37f0221f94fa
|
test_octave_kernel.py
|
test_octave_kernel.py
|
"""Example use of jupyter_kernel_test, with tests for IPython."""
import sys
import unittest
import jupyter_kernel_test as jkt
class OctaveKernelTests(jkt.KernelTests):
kernel_name = "octave"
language_name = "octave"
code_hello_world = "disp('hello, world')"
code_display_data = [
{'code': '%plot -f png\nplot([1,2,3])', 'mime': 'image/png'},
{'code': '%plot -f svg\nplot([1,2,3])', 'mime': 'image/svg+xml'}
] if sys.platform == 'darwin' else []
completion_samples = [
{
'text': 'one',
'matches': {'ones', 'onenormest'},
},
]
code_page_something = "ones?"
if __name__ == '__main__':
unittest.main()
|
"""Example use of jupyter_kernel_test, with tests for IPython."""
import sys
import unittest
import jupyter_kernel_test as jkt
class OctaveKernelTests(jkt.KernelTests):
kernel_name = "octave"
language_name = "octave"
code_hello_world = "disp('hello, world')"
code_display_data = [
{'code': '%plot -f png\nplot([1,2,3])', 'mime': 'image/png'},
{'code': '%plot -f svg\nplot([1,2,3])', 'mime': 'image/svg+xml'}
] if sys.platform == 'darwin' else []
completion_samples = [
{
'text': 'acos',
'matches': {'acos', 'acosd', 'acosh'},
},
]
code_page_something = "ones?"
if __name__ == '__main__':
unittest.main()
|
Fix tests with Octave 5.
|
Fix tests with Octave 5.
|
Python
|
bsd-3-clause
|
Calysto/octave_kernel,Calysto/octave_kernel
|
"""Example use of jupyter_kernel_test, with tests for IPython."""
import sys
import unittest
import jupyter_kernel_test as jkt
class OctaveKernelTests(jkt.KernelTests):
kernel_name = "octave"
language_name = "octave"
code_hello_world = "disp('hello, world')"
code_display_data = [
{'code': '%plot -f png\nplot([1,2,3])', 'mime': 'image/png'},
{'code': '%plot -f svg\nplot([1,2,3])', 'mime': 'image/svg+xml'}
] if sys.platform == 'darwin' else []
completion_samples = [
{
'text': 'one',
'matches': {'ones', 'onenormest'},
},
]
code_page_something = "ones?"
if __name__ == '__main__':
unittest.main()
Fix tests with Octave 5.
|
"""Example use of jupyter_kernel_test, with tests for IPython."""
import sys
import unittest
import jupyter_kernel_test as jkt
class OctaveKernelTests(jkt.KernelTests):
kernel_name = "octave"
language_name = "octave"
code_hello_world = "disp('hello, world')"
code_display_data = [
{'code': '%plot -f png\nplot([1,2,3])', 'mime': 'image/png'},
{'code': '%plot -f svg\nplot([1,2,3])', 'mime': 'image/svg+xml'}
] if sys.platform == 'darwin' else []
completion_samples = [
{
'text': 'acos',
'matches': {'acos', 'acosd', 'acosh'},
},
]
code_page_something = "ones?"
if __name__ == '__main__':
unittest.main()
|
<commit_before>"""Example use of jupyter_kernel_test, with tests for IPython."""
import sys
import unittest
import jupyter_kernel_test as jkt
class OctaveKernelTests(jkt.KernelTests):
kernel_name = "octave"
language_name = "octave"
code_hello_world = "disp('hello, world')"
code_display_data = [
{'code': '%plot -f png\nplot([1,2,3])', 'mime': 'image/png'},
{'code': '%plot -f svg\nplot([1,2,3])', 'mime': 'image/svg+xml'}
] if sys.platform == 'darwin' else []
completion_samples = [
{
'text': 'one',
'matches': {'ones', 'onenormest'},
},
]
code_page_something = "ones?"
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix tests with Octave 5.<commit_after>
|
"""Example use of jupyter_kernel_test, with tests for IPython."""
import sys
import unittest
import jupyter_kernel_test as jkt
class OctaveKernelTests(jkt.KernelTests):
kernel_name = "octave"
language_name = "octave"
code_hello_world = "disp('hello, world')"
code_display_data = [
{'code': '%plot -f png\nplot([1,2,3])', 'mime': 'image/png'},
{'code': '%plot -f svg\nplot([1,2,3])', 'mime': 'image/svg+xml'}
] if sys.platform == 'darwin' else []
completion_samples = [
{
'text': 'acos',
'matches': {'acos', 'acosd', 'acosh'},
},
]
code_page_something = "ones?"
if __name__ == '__main__':
unittest.main()
|
"""Example use of jupyter_kernel_test, with tests for IPython."""
import sys
import unittest
import jupyter_kernel_test as jkt
class OctaveKernelTests(jkt.KernelTests):
kernel_name = "octave"
language_name = "octave"
code_hello_world = "disp('hello, world')"
code_display_data = [
{'code': '%plot -f png\nplot([1,2,3])', 'mime': 'image/png'},
{'code': '%plot -f svg\nplot([1,2,3])', 'mime': 'image/svg+xml'}
] if sys.platform == 'darwin' else []
completion_samples = [
{
'text': 'one',
'matches': {'ones', 'onenormest'},
},
]
code_page_something = "ones?"
if __name__ == '__main__':
unittest.main()
Fix tests with Octave 5."""Example use of jupyter_kernel_test, with tests for IPython."""
import sys
import unittest
import jupyter_kernel_test as jkt
class OctaveKernelTests(jkt.KernelTests):
kernel_name = "octave"
language_name = "octave"
code_hello_world = "disp('hello, world')"
code_display_data = [
{'code': '%plot -f png\nplot([1,2,3])', 'mime': 'image/png'},
{'code': '%plot -f svg\nplot([1,2,3])', 'mime': 'image/svg+xml'}
] if sys.platform == 'darwin' else []
completion_samples = [
{
'text': 'acos',
'matches': {'acos', 'acosd', 'acosh'},
},
]
code_page_something = "ones?"
if __name__ == '__main__':
unittest.main()
|
<commit_before>"""Example use of jupyter_kernel_test, with tests for IPython."""
import sys
import unittest
import jupyter_kernel_test as jkt
class OctaveKernelTests(jkt.KernelTests):
kernel_name = "octave"
language_name = "octave"
code_hello_world = "disp('hello, world')"
code_display_data = [
{'code': '%plot -f png\nplot([1,2,3])', 'mime': 'image/png'},
{'code': '%plot -f svg\nplot([1,2,3])', 'mime': 'image/svg+xml'}
] if sys.platform == 'darwin' else []
completion_samples = [
{
'text': 'one',
'matches': {'ones', 'onenormest'},
},
]
code_page_something = "ones?"
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix tests with Octave 5.<commit_after>"""Example use of jupyter_kernel_test, with tests for IPython."""
import sys
import unittest
import jupyter_kernel_test as jkt
class OctaveKernelTests(jkt.KernelTests):
kernel_name = "octave"
language_name = "octave"
code_hello_world = "disp('hello, world')"
code_display_data = [
{'code': '%plot -f png\nplot([1,2,3])', 'mime': 'image/png'},
{'code': '%plot -f svg\nplot([1,2,3])', 'mime': 'image/svg+xml'}
] if sys.platform == 'darwin' else []
completion_samples = [
{
'text': 'acos',
'matches': {'acos', 'acosd', 'acosh'},
},
]
code_page_something = "ones?"
if __name__ == '__main__':
unittest.main()
|
826e5cffbfc7ac3e3b3a138f290f3fcc50e2a187
|
scripts/insert_demo.py
|
scripts/insert_demo.py
|
"""Insert the demo into the codemirror site."""
import os
import fileinput
import shutil
proselint_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
code_mirror_path = os.path.join(
proselint_path,
"plugins",
"webeditor")
code_mirror_demo_path = os.path.join(code_mirror_path, "index.html")
live_write_path = os.path.join(proselint_path, "site", "write")
shutil.copytree(code_mirror_path, live_write_path)
demo_path = os.path.join(proselint_path, "demo.md")
with open(demo_path, "r") as f:
demo = f.read()
for line in fileinput.input(code_mirror_demo_path, inplace=True):
if "##DEMO_PLACEHOLDER##" in line:
print demo,
else:
print line,
|
"""Insert the demo into the codemirror site."""
import os
import fileinput
import shutil
proselint_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
code_mirror_path = os.path.join(
proselint_path,
"plugins",
"webeditor")
code_mirror_demo_path = os.path.join(code_mirror_path, "index.html")
live_write_path = os.path.join(proselint_path, "site", "write")
shutil.copytree(code_mirror_path, live_write_path)
demo_path = os.path.join(proselint_path, "demo.md")
with open(demo_path, "r") as f:
demo = f.read()
for line in fileinput.input(
os.path.join(live_write_path, "index.html"), inplace=True):
if "##DEMO_PLACEHOLDER##" in line:
print demo,
else:
print line,
|
Replace the placeholder in the live demo
|
Replace the placeholder in the live demo
|
Python
|
bsd-3-clause
|
jstewmon/proselint,amperser/proselint,jstewmon/proselint,amperser/proselint,amperser/proselint,amperser/proselint,amperser/proselint,jstewmon/proselint
|
"""Insert the demo into the codemirror site."""
import os
import fileinput
import shutil
proselint_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
code_mirror_path = os.path.join(
proselint_path,
"plugins",
"webeditor")
code_mirror_demo_path = os.path.join(code_mirror_path, "index.html")
live_write_path = os.path.join(proselint_path, "site", "write")
shutil.copytree(code_mirror_path, live_write_path)
demo_path = os.path.join(proselint_path, "demo.md")
with open(demo_path, "r") as f:
demo = f.read()
for line in fileinput.input(code_mirror_demo_path, inplace=True):
if "##DEMO_PLACEHOLDER##" in line:
print demo,
else:
print line,
Replace the placeholder in the live demo
|
"""Insert the demo into the codemirror site."""
import os
import fileinput
import shutil
proselint_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
code_mirror_path = os.path.join(
proselint_path,
"plugins",
"webeditor")
code_mirror_demo_path = os.path.join(code_mirror_path, "index.html")
live_write_path = os.path.join(proselint_path, "site", "write")
shutil.copytree(code_mirror_path, live_write_path)
demo_path = os.path.join(proselint_path, "demo.md")
with open(demo_path, "r") as f:
demo = f.read()
for line in fileinput.input(
os.path.join(live_write_path, "index.html"), inplace=True):
if "##DEMO_PLACEHOLDER##" in line:
print demo,
else:
print line,
|
<commit_before>"""Insert the demo into the codemirror site."""
import os
import fileinput
import shutil
proselint_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
code_mirror_path = os.path.join(
proselint_path,
"plugins",
"webeditor")
code_mirror_demo_path = os.path.join(code_mirror_path, "index.html")
live_write_path = os.path.join(proselint_path, "site", "write")
shutil.copytree(code_mirror_path, live_write_path)
demo_path = os.path.join(proselint_path, "demo.md")
with open(demo_path, "r") as f:
demo = f.read()
for line in fileinput.input(code_mirror_demo_path, inplace=True):
if "##DEMO_PLACEHOLDER##" in line:
print demo,
else:
print line,
<commit_msg>Replace the placeholder in the live demo<commit_after>
|
"""Insert the demo into the codemirror site."""
import os
import fileinput
import shutil
proselint_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
code_mirror_path = os.path.join(
proselint_path,
"plugins",
"webeditor")
code_mirror_demo_path = os.path.join(code_mirror_path, "index.html")
live_write_path = os.path.join(proselint_path, "site", "write")
shutil.copytree(code_mirror_path, live_write_path)
demo_path = os.path.join(proselint_path, "demo.md")
with open(demo_path, "r") as f:
demo = f.read()
for line in fileinput.input(
os.path.join(live_write_path, "index.html"), inplace=True):
if "##DEMO_PLACEHOLDER##" in line:
print demo,
else:
print line,
|
"""Insert the demo into the codemirror site."""
import os
import fileinput
import shutil
proselint_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
code_mirror_path = os.path.join(
proselint_path,
"plugins",
"webeditor")
code_mirror_demo_path = os.path.join(code_mirror_path, "index.html")
live_write_path = os.path.join(proselint_path, "site", "write")
shutil.copytree(code_mirror_path, live_write_path)
demo_path = os.path.join(proselint_path, "demo.md")
with open(demo_path, "r") as f:
demo = f.read()
for line in fileinput.input(code_mirror_demo_path, inplace=True):
if "##DEMO_PLACEHOLDER##" in line:
print demo,
else:
print line,
Replace the placeholder in the live demo"""Insert the demo into the codemirror site."""
import os
import fileinput
import shutil
proselint_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
code_mirror_path = os.path.join(
proselint_path,
"plugins",
"webeditor")
code_mirror_demo_path = os.path.join(code_mirror_path, "index.html")
live_write_path = os.path.join(proselint_path, "site", "write")
shutil.copytree(code_mirror_path, live_write_path)
demo_path = os.path.join(proselint_path, "demo.md")
with open(demo_path, "r") as f:
demo = f.read()
for line in fileinput.input(
os.path.join(live_write_path, "index.html"), inplace=True):
if "##DEMO_PLACEHOLDER##" in line:
print demo,
else:
print line,
|
<commit_before>"""Insert the demo into the codemirror site."""
import os
import fileinput
import shutil
proselint_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
code_mirror_path = os.path.join(
proselint_path,
"plugins",
"webeditor")
code_mirror_demo_path = os.path.join(code_mirror_path, "index.html")
live_write_path = os.path.join(proselint_path, "site", "write")
shutil.copytree(code_mirror_path, live_write_path)
demo_path = os.path.join(proselint_path, "demo.md")
with open(demo_path, "r") as f:
demo = f.read()
for line in fileinput.input(code_mirror_demo_path, inplace=True):
if "##DEMO_PLACEHOLDER##" in line:
print demo,
else:
print line,
<commit_msg>Replace the placeholder in the live demo<commit_after>"""Insert the demo into the codemirror site."""
import os
import fileinput
import shutil
proselint_path = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
code_mirror_path = os.path.join(
proselint_path,
"plugins",
"webeditor")
code_mirror_demo_path = os.path.join(code_mirror_path, "index.html")
live_write_path = os.path.join(proselint_path, "site", "write")
shutil.copytree(code_mirror_path, live_write_path)
demo_path = os.path.join(proselint_path, "demo.md")
with open(demo_path, "r") as f:
demo = f.read()
for line in fileinput.input(
os.path.join(live_write_path, "index.html"), inplace=True):
if "##DEMO_PLACEHOLDER##" in line:
print demo,
else:
print line,
|
39eea826a1f29c2bd77d5f4f5bead7011b47f0bb
|
sed/engine/__init__.py
|
sed/engine/__init__.py
|
from sed.engine.StreamEditor import StreamEditor
from sed.engine.sed_file_util import call_main
from sed.engine.match_engine import ACCEPT, REJECT, NEXT, REPEAT, CUT
from sed.engine.sed_regex import ANY
__all__ = [
"StreamEditor",
"call_main",
"ACCEPT", "REJECT", "NEXT", "REPEAT",
"ANY",
]
|
"""
Interface to sed engine
- defines objects exported from this module
"""
from sed.engine.StreamEditor import StreamEditor
from sed.engine.sed_file_util import call_main
from sed.engine.match_engine import (
ACCEPT,
REJECT,
NEXT,
REPEAT,
CUT,
)
from sed.engine.sed_regex import ANY
__all__ = [
"StreamEditor",
"call_main",
"ACCEPT", "REJECT", "NEXT", "REPEAT", "CUT",
"ANY",
]
|
Add CUT to list of externally visible objects
|
Add CUT to list of externally visible objects
|
Python
|
mit
|
hughdbrown/sed,hughdbrown/sed
|
from sed.engine.StreamEditor import StreamEditor
from sed.engine.sed_file_util import call_main
from sed.engine.match_engine import ACCEPT, REJECT, NEXT, REPEAT, CUT
from sed.engine.sed_regex import ANY
__all__ = [
"StreamEditor",
"call_main",
"ACCEPT", "REJECT", "NEXT", "REPEAT",
"ANY",
]
Add CUT to list of externally visible objects
|
"""
Interface to sed engine
- defines objects exported from this module
"""
from sed.engine.StreamEditor import StreamEditor
from sed.engine.sed_file_util import call_main
from sed.engine.match_engine import (
ACCEPT,
REJECT,
NEXT,
REPEAT,
CUT,
)
from sed.engine.sed_regex import ANY
__all__ = [
"StreamEditor",
"call_main",
"ACCEPT", "REJECT", "NEXT", "REPEAT", "CUT",
"ANY",
]
|
<commit_before>from sed.engine.StreamEditor import StreamEditor
from sed.engine.sed_file_util import call_main
from sed.engine.match_engine import ACCEPT, REJECT, NEXT, REPEAT, CUT
from sed.engine.sed_regex import ANY
__all__ = [
"StreamEditor",
"call_main",
"ACCEPT", "REJECT", "NEXT", "REPEAT",
"ANY",
]
<commit_msg>Add CUT to list of externally visible objects<commit_after>
|
"""
Interface to sed engine
- defines objects exported from this module
"""
from sed.engine.StreamEditor import StreamEditor
from sed.engine.sed_file_util import call_main
from sed.engine.match_engine import (
ACCEPT,
REJECT,
NEXT,
REPEAT,
CUT,
)
from sed.engine.sed_regex import ANY
__all__ = [
"StreamEditor",
"call_main",
"ACCEPT", "REJECT", "NEXT", "REPEAT", "CUT",
"ANY",
]
|
from sed.engine.StreamEditor import StreamEditor
from sed.engine.sed_file_util import call_main
from sed.engine.match_engine import ACCEPT, REJECT, NEXT, REPEAT, CUT
from sed.engine.sed_regex import ANY
__all__ = [
"StreamEditor",
"call_main",
"ACCEPT", "REJECT", "NEXT", "REPEAT",
"ANY",
]
Add CUT to list of externally visible objects"""
Interface to sed engine
- defines objects exported from this module
"""
from sed.engine.StreamEditor import StreamEditor
from sed.engine.sed_file_util import call_main
from sed.engine.match_engine import (
ACCEPT,
REJECT,
NEXT,
REPEAT,
CUT,
)
from sed.engine.sed_regex import ANY
__all__ = [
"StreamEditor",
"call_main",
"ACCEPT", "REJECT", "NEXT", "REPEAT", "CUT",
"ANY",
]
|
<commit_before>from sed.engine.StreamEditor import StreamEditor
from sed.engine.sed_file_util import call_main
from sed.engine.match_engine import ACCEPT, REJECT, NEXT, REPEAT, CUT
from sed.engine.sed_regex import ANY
__all__ = [
"StreamEditor",
"call_main",
"ACCEPT", "REJECT", "NEXT", "REPEAT",
"ANY",
]
<commit_msg>Add CUT to list of externally visible objects<commit_after>"""
Interface to sed engine
- defines objects exported from this module
"""
from sed.engine.StreamEditor import StreamEditor
from sed.engine.sed_file_util import call_main
from sed.engine.match_engine import (
ACCEPT,
REJECT,
NEXT,
REPEAT,
CUT,
)
from sed.engine.sed_regex import ANY
__all__ = [
"StreamEditor",
"call_main",
"ACCEPT", "REJECT", "NEXT", "REPEAT", "CUT",
"ANY",
]
|
2785c24a730b01678d3683becc5e41f4f27a3760
|
tests/database/conftest.py
|
tests/database/conftest.py
|
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy_utils import create_database, database_exists, drop_database
from gold_digger.database.db_model import Base
@pytest.fixture(scope="module")
def db_connection(db_connection_string):
"""
Create one test database for all database tests.
"""
engine = create_engine(db_connection_string)
if not database_exists(engine.url):
create_database(engine.url)
connection = engine.connect()
yield connection
connection.close()
engine.dispose()
drop_database(engine.url)
@pytest.fixture
def db_session(db_connection):
"""
Drop and create all tables for every test, ie. every test starts with empty tables and new session.
"""
db_connection.execute("DROP TABLE IF EXISTS statistics_base CASCADE")
Base.metadata.drop_all(db_connection)
Base.metadata.create_all(db_connection)
session = scoped_session(sessionmaker(db_connection))
yield session
session.remove()
|
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy_utils import create_database, database_exists, drop_database
from gold_digger.database.db_model import Base
@pytest.fixture(scope="module")
def db_connection(db_connection_string):
"""
Create one test database for all database tests.
"""
engine = create_engine(db_connection_string)
if not database_exists(engine.url):
create_database(engine.url)
connection = engine.connect()
yield connection
connection.close()
engine.dispose()
drop_database(engine.url)
@pytest.fixture
def db_session(db_connection):
"""
Drop and create all tables for every test, ie. every test starts with empty tables and new session.
"""
db_connection.execute("DROP TABLE IF EXISTS statistics_base CASCADE")
Base.metadata.drop_all(db_connection)
Base.metadata.create_all(db_connection)
session = scoped_session(sessionmaker(db_connection))
yield session()
session.remove()
|
Use SQLA Session directly in tests
|
Use SQLA Session directly in tests
|
Python
|
apache-2.0
|
business-factory/gold-digger
|
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy_utils import create_database, database_exists, drop_database
from gold_digger.database.db_model import Base
@pytest.fixture(scope="module")
def db_connection(db_connection_string):
"""
Create one test database for all database tests.
"""
engine = create_engine(db_connection_string)
if not database_exists(engine.url):
create_database(engine.url)
connection = engine.connect()
yield connection
connection.close()
engine.dispose()
drop_database(engine.url)
@pytest.fixture
def db_session(db_connection):
"""
Drop and create all tables for every test, ie. every test starts with empty tables and new session.
"""
db_connection.execute("DROP TABLE IF EXISTS statistics_base CASCADE")
Base.metadata.drop_all(db_connection)
Base.metadata.create_all(db_connection)
session = scoped_session(sessionmaker(db_connection))
yield session
session.remove()
Use SQLA Session directly in tests
|
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy_utils import create_database, database_exists, drop_database
from gold_digger.database.db_model import Base
@pytest.fixture(scope="module")
def db_connection(db_connection_string):
"""
Create one test database for all database tests.
"""
engine = create_engine(db_connection_string)
if not database_exists(engine.url):
create_database(engine.url)
connection = engine.connect()
yield connection
connection.close()
engine.dispose()
drop_database(engine.url)
@pytest.fixture
def db_session(db_connection):
"""
Drop and create all tables for every test, ie. every test starts with empty tables and new session.
"""
db_connection.execute("DROP TABLE IF EXISTS statistics_base CASCADE")
Base.metadata.drop_all(db_connection)
Base.metadata.create_all(db_connection)
session = scoped_session(sessionmaker(db_connection))
yield session()
session.remove()
|
<commit_before>import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy_utils import create_database, database_exists, drop_database
from gold_digger.database.db_model import Base
@pytest.fixture(scope="module")
def db_connection(db_connection_string):
"""
Create one test database for all database tests.
"""
engine = create_engine(db_connection_string)
if not database_exists(engine.url):
create_database(engine.url)
connection = engine.connect()
yield connection
connection.close()
engine.dispose()
drop_database(engine.url)
@pytest.fixture
def db_session(db_connection):
"""
Drop and create all tables for every test, ie. every test starts with empty tables and new session.
"""
db_connection.execute("DROP TABLE IF EXISTS statistics_base CASCADE")
Base.metadata.drop_all(db_connection)
Base.metadata.create_all(db_connection)
session = scoped_session(sessionmaker(db_connection))
yield session
session.remove()
<commit_msg>Use SQLA Session directly in tests<commit_after>
|
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy_utils import create_database, database_exists, drop_database
from gold_digger.database.db_model import Base
@pytest.fixture(scope="module")
def db_connection(db_connection_string):
"""
Create one test database for all database tests.
"""
engine = create_engine(db_connection_string)
if not database_exists(engine.url):
create_database(engine.url)
connection = engine.connect()
yield connection
connection.close()
engine.dispose()
drop_database(engine.url)
@pytest.fixture
def db_session(db_connection):
"""
Drop and create all tables for every test, ie. every test starts with empty tables and new session.
"""
db_connection.execute("DROP TABLE IF EXISTS statistics_base CASCADE")
Base.metadata.drop_all(db_connection)
Base.metadata.create_all(db_connection)
session = scoped_session(sessionmaker(db_connection))
yield session()
session.remove()
|
import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy_utils import create_database, database_exists, drop_database
from gold_digger.database.db_model import Base
@pytest.fixture(scope="module")
def db_connection(db_connection_string):
"""
Create one test database for all database tests.
"""
engine = create_engine(db_connection_string)
if not database_exists(engine.url):
create_database(engine.url)
connection = engine.connect()
yield connection
connection.close()
engine.dispose()
drop_database(engine.url)
@pytest.fixture
def db_session(db_connection):
"""
Drop and create all tables for every test, ie. every test starts with empty tables and new session.
"""
db_connection.execute("DROP TABLE IF EXISTS statistics_base CASCADE")
Base.metadata.drop_all(db_connection)
Base.metadata.create_all(db_connection)
session = scoped_session(sessionmaker(db_connection))
yield session
session.remove()
Use SQLA Session directly in testsimport pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy_utils import create_database, database_exists, drop_database
from gold_digger.database.db_model import Base
@pytest.fixture(scope="module")
def db_connection(db_connection_string):
"""
Create one test database for all database tests.
"""
engine = create_engine(db_connection_string)
if not database_exists(engine.url):
create_database(engine.url)
connection = engine.connect()
yield connection
connection.close()
engine.dispose()
drop_database(engine.url)
@pytest.fixture
def db_session(db_connection):
"""
Drop and create all tables for every test, ie. every test starts with empty tables and new session.
"""
db_connection.execute("DROP TABLE IF EXISTS statistics_base CASCADE")
Base.metadata.drop_all(db_connection)
Base.metadata.create_all(db_connection)
session = scoped_session(sessionmaker(db_connection))
yield session()
session.remove()
|
<commit_before>import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy_utils import create_database, database_exists, drop_database
from gold_digger.database.db_model import Base
@pytest.fixture(scope="module")
def db_connection(db_connection_string):
"""
Create one test database for all database tests.
"""
engine = create_engine(db_connection_string)
if not database_exists(engine.url):
create_database(engine.url)
connection = engine.connect()
yield connection
connection.close()
engine.dispose()
drop_database(engine.url)
@pytest.fixture
def db_session(db_connection):
"""
Drop and create all tables for every test, ie. every test starts with empty tables and new session.
"""
db_connection.execute("DROP TABLE IF EXISTS statistics_base CASCADE")
Base.metadata.drop_all(db_connection)
Base.metadata.create_all(db_connection)
session = scoped_session(sessionmaker(db_connection))
yield session
session.remove()
<commit_msg>Use SQLA Session directly in tests<commit_after>import pytest
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session, sessionmaker
from sqlalchemy_utils import create_database, database_exists, drop_database
from gold_digger.database.db_model import Base
@pytest.fixture(scope="module")
def db_connection(db_connection_string):
"""
Create one test database for all database tests.
"""
engine = create_engine(db_connection_string)
if not database_exists(engine.url):
create_database(engine.url)
connection = engine.connect()
yield connection
connection.close()
engine.dispose()
drop_database(engine.url)
@pytest.fixture
def db_session(db_connection):
"""
Drop and create all tables for every test, ie. every test starts with empty tables and new session.
"""
db_connection.execute("DROP TABLE IF EXISTS statistics_base CASCADE")
Base.metadata.drop_all(db_connection)
Base.metadata.create_all(db_connection)
session = scoped_session(sessionmaker(db_connection))
yield session()
session.remove()
|
8b3a5bd9c28ba15e82215d4410b2952bcc81b917
|
tests/conftest.py
|
tests/conftest.py
|
# -*- coding: utf-8 -*-
import pytest
@pytest.yield_fixture
def tmpfile(request, tmpdir):
yield tmpdir.join('file.tmp').ensure().strpath
|
# -*- coding: utf-8 -*-
import pytest
from blox.file import File
@pytest.yield_fixture
def tmpfile(request, tmpdir):
filename = tmpdir.join('file.tmp').ensure().strpath
File(filename, 'w').close()
yield filename
|
Create a valid blox file in tmpfile fixture
|
Create a valid blox file in tmpfile fixture
|
Python
|
mit
|
aldanor/blox
|
# -*- coding: utf-8 -*-
import pytest
@pytest.yield_fixture
def tmpfile(request, tmpdir):
yield tmpdir.join('file.tmp').ensure().strpath
Create a valid blox file in tmpfile fixture
|
# -*- coding: utf-8 -*-
import pytest
from blox.file import File
@pytest.yield_fixture
def tmpfile(request, tmpdir):
filename = tmpdir.join('file.tmp').ensure().strpath
File(filename, 'w').close()
yield filename
|
<commit_before># -*- coding: utf-8 -*-
import pytest
@pytest.yield_fixture
def tmpfile(request, tmpdir):
yield tmpdir.join('file.tmp').ensure().strpath
<commit_msg>Create a valid blox file in tmpfile fixture<commit_after>
|
# -*- coding: utf-8 -*-
import pytest
from blox.file import File
@pytest.yield_fixture
def tmpfile(request, tmpdir):
filename = tmpdir.join('file.tmp').ensure().strpath
File(filename, 'w').close()
yield filename
|
# -*- coding: utf-8 -*-
import pytest
@pytest.yield_fixture
def tmpfile(request, tmpdir):
yield tmpdir.join('file.tmp').ensure().strpath
Create a valid blox file in tmpfile fixture# -*- coding: utf-8 -*-
import pytest
from blox.file import File
@pytest.yield_fixture
def tmpfile(request, tmpdir):
filename = tmpdir.join('file.tmp').ensure().strpath
File(filename, 'w').close()
yield filename
|
<commit_before># -*- coding: utf-8 -*-
import pytest
@pytest.yield_fixture
def tmpfile(request, tmpdir):
yield tmpdir.join('file.tmp').ensure().strpath
<commit_msg>Create a valid blox file in tmpfile fixture<commit_after># -*- coding: utf-8 -*-
import pytest
from blox.file import File
@pytest.yield_fixture
def tmpfile(request, tmpdir):
filename = tmpdir.join('file.tmp').ensure().strpath
File(filename, 'w').close()
yield filename
|
1e8f2c38cd83d23ad86ca898da9f6c7f7012da55
|
tests/get_data.py
|
tests/get_data.py
|
#!/usr/bin/env python
#
# PyUSBtmc
# get_data.py
#
# Copyright (c) 2011 Mike Hadmack
# This code is distributed under the MIT license
import numpy
import sys
import os
from matplotlib import pyplot
sys.path.append(os.path.expanduser('~/Source'))
sys.path.append(os.path.expanduser('~/src'))
sys.path.append('/var/local/src')
from pyoscope import RigolScope
from pyoscope import Waverunner
from pyoscope import makeDataFilePath
""" Capture data from Rigol oscilloscope and write to a file
usage: python save_channel.py <filename>
if filename is not given STDOUT will be used"""
SCOPE_ADDRESS = 'nigpib1'
try:
filename = sys.argv[1]
except:
filename = makeDataFilePath()
if filename == "--help":
print """Usage: 1%s [filename]\n Reads both traces from oscilloscope and writes as ASCII tabular data to filename. If no filename is given the program outputs to STDOUT. STDOUT can be directed into a file or piped into another application. For example:\n 1%s myfile\n 1%s > myfile\n 1%s | ./plot_data.py"""%sys.argv[0]
sys.exit(1)
print filename
#scope = RigolScope("/dev/usbtmc0")
scope = Waverunner(SCOPE_ADDRESS)
scope.grabData()
scope.writeWaveformToFile(filename)
scope.close()
|
#!/usr/bin/env python
#
# PyUSBtmc
# get_data.py
#
# Copyright (c) 2011 Mike Hadmack
# This code is distributed under the MIT license
import numpy
import sys
import os
from matplotlib import pyplot
sys.path.append(os.path.expanduser('.'))
from oscope import RigolScope
from oscope import Waverunner
from oscope import makeDataFilePath
""" Capture data from Rigol oscilloscope and write to a file
usage: python get_data.py <filename>
if filename is not given STDOUT will be used"""
SCOPE_ADDRESS = 'nigpib1'
try:
filename = sys.argv[1]
except:
filename = makeDataFilePath()
if filename == "--help":
print """Usage: 1%s [filename]\n Reads both traces from oscilloscope and writes as ASCII tabular data to filename. If no filename is given the program outputs to STDOUT. STDOUT can be directed into a file or piped into another application. For example:\n 1%s myfile\n 1%s > myfile\n 1%s | ./plot_data.py"""%sys.argv[0]
sys.exit(1)
print filename
#scope = RigolScope("/dev/usbtmc0")
scope = Waverunner(SCOPE_ADDRESS)
scope.grabData()
scope.writeWaveformToFile(filename)
scope.close()
|
Adjust paths and module name
|
Adjust paths and module name
|
Python
|
mit
|
niun/pyoscope,pklaus/pyoscope
|
#!/usr/bin/env python
#
# PyUSBtmc
# get_data.py
#
# Copyright (c) 2011 Mike Hadmack
# This code is distributed under the MIT license
import numpy
import sys
import os
from matplotlib import pyplot
sys.path.append(os.path.expanduser('~/Source'))
sys.path.append(os.path.expanduser('~/src'))
sys.path.append('/var/local/src')
from pyoscope import RigolScope
from pyoscope import Waverunner
from pyoscope import makeDataFilePath
""" Capture data from Rigol oscilloscope and write to a file
usage: python save_channel.py <filename>
if filename is not given STDOUT will be used"""
SCOPE_ADDRESS = 'nigpib1'
try:
filename = sys.argv[1]
except:
filename = makeDataFilePath()
if filename == "--help":
print """Usage: 1%s [filename]\n Reads both traces from oscilloscope and writes as ASCII tabular data to filename. If no filename is given the program outputs to STDOUT. STDOUT can be directed into a file or piped into another application. For example:\n 1%s myfile\n 1%s > myfile\n 1%s | ./plot_data.py"""%sys.argv[0]
sys.exit(1)
print filename
#scope = RigolScope("/dev/usbtmc0")
scope = Waverunner(SCOPE_ADDRESS)
scope.grabData()
scope.writeWaveformToFile(filename)
scope.close()
Adjust paths and module name
|
#!/usr/bin/env python
#
# PyUSBtmc
# get_data.py
#
# Copyright (c) 2011 Mike Hadmack
# This code is distributed under the MIT license
import numpy
import sys
import os
from matplotlib import pyplot
sys.path.append(os.path.expanduser('.'))
from oscope import RigolScope
from oscope import Waverunner
from oscope import makeDataFilePath
""" Capture data from Rigol oscilloscope and write to a file
usage: python get_data.py <filename>
if filename is not given STDOUT will be used"""
SCOPE_ADDRESS = 'nigpib1'
try:
filename = sys.argv[1]
except:
filename = makeDataFilePath()
if filename == "--help":
print """Usage: 1%s [filename]\n Reads both traces from oscilloscope and writes as ASCII tabular data to filename. If no filename is given the program outputs to STDOUT. STDOUT can be directed into a file or piped into another application. For example:\n 1%s myfile\n 1%s > myfile\n 1%s | ./plot_data.py"""%sys.argv[0]
sys.exit(1)
print filename
#scope = RigolScope("/dev/usbtmc0")
scope = Waverunner(SCOPE_ADDRESS)
scope.grabData()
scope.writeWaveformToFile(filename)
scope.close()
|
<commit_before>#!/usr/bin/env python
#
# PyUSBtmc
# get_data.py
#
# Copyright (c) 2011 Mike Hadmack
# This code is distributed under the MIT license
import numpy
import sys
import os
from matplotlib import pyplot
sys.path.append(os.path.expanduser('~/Source'))
sys.path.append(os.path.expanduser('~/src'))
sys.path.append('/var/local/src')
from pyoscope import RigolScope
from pyoscope import Waverunner
from pyoscope import makeDataFilePath
""" Capture data from Rigol oscilloscope and write to a file
usage: python save_channel.py <filename>
if filename is not given STDOUT will be used"""
SCOPE_ADDRESS = 'nigpib1'
try:
filename = sys.argv[1]
except:
filename = makeDataFilePath()
if filename == "--help":
print """Usage: 1%s [filename]\n Reads both traces from oscilloscope and writes as ASCII tabular data to filename. If no filename is given the program outputs to STDOUT. STDOUT can be directed into a file or piped into another application. For example:\n 1%s myfile\n 1%s > myfile\n 1%s | ./plot_data.py"""%sys.argv[0]
sys.exit(1)
print filename
#scope = RigolScope("/dev/usbtmc0")
scope = Waverunner(SCOPE_ADDRESS)
scope.grabData()
scope.writeWaveformToFile(filename)
scope.close()
<commit_msg>Adjust paths and module name<commit_after>
|
#!/usr/bin/env python
#
# PyUSBtmc
# get_data.py
#
# Copyright (c) 2011 Mike Hadmack
# This code is distributed under the MIT license
import numpy
import sys
import os
from matplotlib import pyplot
sys.path.append(os.path.expanduser('.'))
from oscope import RigolScope
from oscope import Waverunner
from oscope import makeDataFilePath
""" Capture data from Rigol oscilloscope and write to a file
usage: python get_data.py <filename>
if filename is not given STDOUT will be used"""
SCOPE_ADDRESS = 'nigpib1'
try:
filename = sys.argv[1]
except:
filename = makeDataFilePath()
if filename == "--help":
print """Usage: 1%s [filename]\n Reads both traces from oscilloscope and writes as ASCII tabular data to filename. If no filename is given the program outputs to STDOUT. STDOUT can be directed into a file or piped into another application. For example:\n 1%s myfile\n 1%s > myfile\n 1%s | ./plot_data.py"""%sys.argv[0]
sys.exit(1)
print filename
#scope = RigolScope("/dev/usbtmc0")
scope = Waverunner(SCOPE_ADDRESS)
scope.grabData()
scope.writeWaveformToFile(filename)
scope.close()
|
#!/usr/bin/env python
#
# PyUSBtmc
# get_data.py
#
# Copyright (c) 2011 Mike Hadmack
# This code is distributed under the MIT license
import numpy
import sys
import os
from matplotlib import pyplot
sys.path.append(os.path.expanduser('~/Source'))
sys.path.append(os.path.expanduser('~/src'))
sys.path.append('/var/local/src')
from pyoscope import RigolScope
from pyoscope import Waverunner
from pyoscope import makeDataFilePath
""" Capture data from Rigol oscilloscope and write to a file
usage: python save_channel.py <filename>
if filename is not given STDOUT will be used"""
SCOPE_ADDRESS = 'nigpib1'
try:
filename = sys.argv[1]
except:
filename = makeDataFilePath()
if filename == "--help":
print """Usage: 1%s [filename]\n Reads both traces from oscilloscope and writes as ASCII tabular data to filename. If no filename is given the program outputs to STDOUT. STDOUT can be directed into a file or piped into another application. For example:\n 1%s myfile\n 1%s > myfile\n 1%s | ./plot_data.py"""%sys.argv[0]
sys.exit(1)
print filename
#scope = RigolScope("/dev/usbtmc0")
scope = Waverunner(SCOPE_ADDRESS)
scope.grabData()
scope.writeWaveformToFile(filename)
scope.close()
Adjust paths and module name#!/usr/bin/env python
#
# PyUSBtmc
# get_data.py
#
# Copyright (c) 2011 Mike Hadmack
# This code is distributed under the MIT license
import numpy
import sys
import os
from matplotlib import pyplot
sys.path.append(os.path.expanduser('.'))
from oscope import RigolScope
from oscope import Waverunner
from oscope import makeDataFilePath
""" Capture data from Rigol oscilloscope and write to a file
usage: python get_data.py <filename>
if filename is not given STDOUT will be used"""
SCOPE_ADDRESS = 'nigpib1'
try:
filename = sys.argv[1]
except:
filename = makeDataFilePath()
if filename == "--help":
print """Usage: 1%s [filename]\n Reads both traces from oscilloscope and writes as ASCII tabular data to filename. If no filename is given the program outputs to STDOUT. STDOUT can be directed into a file or piped into another application. For example:\n 1%s myfile\n 1%s > myfile\n 1%s | ./plot_data.py"""%sys.argv[0]
sys.exit(1)
print filename
#scope = RigolScope("/dev/usbtmc0")
scope = Waverunner(SCOPE_ADDRESS)
scope.grabData()
scope.writeWaveformToFile(filename)
scope.close()
|
<commit_before>#!/usr/bin/env python
#
# PyUSBtmc
# get_data.py
#
# Copyright (c) 2011 Mike Hadmack
# This code is distributed under the MIT license
import numpy
import sys
import os
from matplotlib import pyplot
sys.path.append(os.path.expanduser('~/Source'))
sys.path.append(os.path.expanduser('~/src'))
sys.path.append('/var/local/src')
from pyoscope import RigolScope
from pyoscope import Waverunner
from pyoscope import makeDataFilePath
""" Capture data from Rigol oscilloscope and write to a file
usage: python save_channel.py <filename>
if filename is not given STDOUT will be used"""
SCOPE_ADDRESS = 'nigpib1'
try:
filename = sys.argv[1]
except:
filename = makeDataFilePath()
if filename == "--help":
print """Usage: 1%s [filename]\n Reads both traces from oscilloscope and writes as ASCII tabular data to filename. If no filename is given the program outputs to STDOUT. STDOUT can be directed into a file or piped into another application. For example:\n 1%s myfile\n 1%s > myfile\n 1%s | ./plot_data.py"""%sys.argv[0]
sys.exit(1)
print filename
#scope = RigolScope("/dev/usbtmc0")
scope = Waverunner(SCOPE_ADDRESS)
scope.grabData()
scope.writeWaveformToFile(filename)
scope.close()
<commit_msg>Adjust paths and module name<commit_after>#!/usr/bin/env python
#
# PyUSBtmc
# get_data.py
#
# Copyright (c) 2011 Mike Hadmack
# This code is distributed under the MIT license
import numpy
import sys
import os
from matplotlib import pyplot
sys.path.append(os.path.expanduser('.'))
from oscope import RigolScope
from oscope import Waverunner
from oscope import makeDataFilePath
""" Capture data from Rigol oscilloscope and write to a file
usage: python get_data.py <filename>
if filename is not given STDOUT will be used"""
SCOPE_ADDRESS = 'nigpib1'
try:
filename = sys.argv[1]
except:
filename = makeDataFilePath()
if filename == "--help":
print """Usage: 1%s [filename]\n Reads both traces from oscilloscope and writes as ASCII tabular data to filename. If no filename is given the program outputs to STDOUT. STDOUT can be directed into a file or piped into another application. For example:\n 1%s myfile\n 1%s > myfile\n 1%s | ./plot_data.py"""%sys.argv[0]
sys.exit(1)
print filename
#scope = RigolScope("/dev/usbtmc0")
scope = Waverunner(SCOPE_ADDRESS)
scope.grabData()
scope.writeWaveformToFile(filename)
scope.close()
|
bf4717f39aaf3cf70bf99648afd38cd8dd5c8ad3
|
src/main/python/systemml/__init__.py
|
src/main/python/systemml/__init__.py
|
# -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
from .mlcontext import *
from .defmatrix import *
from .converters import *
__all__ = mlcontext.__all__
__all__ += defmatrix.__all__
__all__ += converters.__all__
|
# -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
from .mlcontext import *
from .defmatrix import *
from .converters import *
from .classloader import *
__all__ = mlcontext.__all__
__all__ += defmatrix.__all__
__all__ += converters.__all__
__all__ += classloader.__all__
|
Allow access to classloaders methods
|
[MINOR] Allow access to classloaders methods
|
Python
|
apache-2.0
|
apache/incubator-systemml,niketanpansare/incubator-systemml,apache/incubator-systemml,apache/incubator-systemml,apache/incubator-systemml,niketanpansare/incubator-systemml,niketanpansare/systemml,niketanpansare/incubator-systemml,apache/incubator-systemml,niketanpansare/systemml,apache/incubator-systemml,niketanpansare/systemml,niketanpansare/incubator-systemml,niketanpansare/systemml,niketanpansare/systemml,niketanpansare/systemml
|
# -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
from .mlcontext import *
from .defmatrix import *
from .converters import *
__all__ = mlcontext.__all__
__all__ += defmatrix.__all__
__all__ += converters.__all__
[MINOR] Allow access to classloaders methods
|
# -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
from .mlcontext import *
from .defmatrix import *
from .converters import *
from .classloader import *
__all__ = mlcontext.__all__
__all__ += defmatrix.__all__
__all__ += converters.__all__
__all__ += classloader.__all__
|
<commit_before># -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
from .mlcontext import *
from .defmatrix import *
from .converters import *
__all__ = mlcontext.__all__
__all__ += defmatrix.__all__
__all__ += converters.__all__
<commit_msg>[MINOR] Allow access to classloaders methods<commit_after>
|
# -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
from .mlcontext import *
from .defmatrix import *
from .converters import *
from .classloader import *
__all__ = mlcontext.__all__
__all__ += defmatrix.__all__
__all__ += converters.__all__
__all__ += classloader.__all__
|
# -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
from .mlcontext import *
from .defmatrix import *
from .converters import *
__all__ = mlcontext.__all__
__all__ += defmatrix.__all__
__all__ += converters.__all__
[MINOR] Allow access to classloaders methods# -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
from .mlcontext import *
from .defmatrix import *
from .converters import *
from .classloader import *
__all__ = mlcontext.__all__
__all__ += defmatrix.__all__
__all__ += converters.__all__
__all__ += classloader.__all__
|
<commit_before># -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
from .mlcontext import *
from .defmatrix import *
from .converters import *
__all__ = mlcontext.__all__
__all__ += defmatrix.__all__
__all__ += converters.__all__
<commit_msg>[MINOR] Allow access to classloaders methods<commit_after># -------------------------------------------------------------
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
# -------------------------------------------------------------
from .mlcontext import *
from .defmatrix import *
from .converters import *
from .classloader import *
__all__ = mlcontext.__all__
__all__ += defmatrix.__all__
__all__ += converters.__all__
__all__ += classloader.__all__
|
71251ba62843b4842055783941929884df38267d
|
tests/helper.py
|
tests/helper.py
|
import sublime
from unittest import TestCase
class TestHelper(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.window().run_command('close_file')
def set_text(self, lines):
for line in lines:
self.view.run_command('move_to', { 'to': 'bol', 'extend': True })
self.view.run_command('insert', { 'characters': line + "\n" })
def check_command(self, text, start, end, extend_selection=False, indent_offset=0):
tab_size = self.view.settings().get("tab_size")
self.set_text(text)
self.view.sel().clear()
self.view.sel().add(sublime.Region(start[0], start[1]))
self.view.run_command(self.command(), { 'extend_selection': extend_selection, 'indent_offset': indent_offset })
self.assertEqual(self.view.sel()[0], sublime.Region(end[0], end[1]))
|
import sublime
from unittest import TestCase
class TestHelper(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
self.view.settings().set("tab_size", 2)
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.window().run_command('close_file')
def set_text(self, lines):
for line in lines:
self.view.run_command('move_to', { 'to': 'bol', 'extend': True })
self.view.run_command('insert', { 'characters': line + "\n" })
def check_command(self, text, start, end, extend_selection=False, indent_offset=0):
self.set_text(text)
self.view.sel().clear()
self.view.sel().add(sublime.Region(start[0], start[1]))
self.view.run_command(self.command(), { 'extend_selection': extend_selection, 'indent_offset': indent_offset })
self.assertEqual(self.view.sel()[0], sublime.Region(end[0], end[1]))
|
Fix tests failing from different tab_size
|
Fix tests failing from different tab_size
|
Python
|
mit
|
mwean/sublime_jump_along_indent
|
import sublime
from unittest import TestCase
class TestHelper(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.window().run_command('close_file')
def set_text(self, lines):
for line in lines:
self.view.run_command('move_to', { 'to': 'bol', 'extend': True })
self.view.run_command('insert', { 'characters': line + "\n" })
def check_command(self, text, start, end, extend_selection=False, indent_offset=0):
tab_size = self.view.settings().get("tab_size")
self.set_text(text)
self.view.sel().clear()
self.view.sel().add(sublime.Region(start[0], start[1]))
self.view.run_command(self.command(), { 'extend_selection': extend_selection, 'indent_offset': indent_offset })
self.assertEqual(self.view.sel()[0], sublime.Region(end[0], end[1]))
Fix tests failing from different tab_size
|
import sublime
from unittest import TestCase
class TestHelper(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
self.view.settings().set("tab_size", 2)
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.window().run_command('close_file')
def set_text(self, lines):
for line in lines:
self.view.run_command('move_to', { 'to': 'bol', 'extend': True })
self.view.run_command('insert', { 'characters': line + "\n" })
def check_command(self, text, start, end, extend_selection=False, indent_offset=0):
self.set_text(text)
self.view.sel().clear()
self.view.sel().add(sublime.Region(start[0], start[1]))
self.view.run_command(self.command(), { 'extend_selection': extend_selection, 'indent_offset': indent_offset })
self.assertEqual(self.view.sel()[0], sublime.Region(end[0], end[1]))
|
<commit_before>import sublime
from unittest import TestCase
class TestHelper(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.window().run_command('close_file')
def set_text(self, lines):
for line in lines:
self.view.run_command('move_to', { 'to': 'bol', 'extend': True })
self.view.run_command('insert', { 'characters': line + "\n" })
def check_command(self, text, start, end, extend_selection=False, indent_offset=0):
tab_size = self.view.settings().get("tab_size")
self.set_text(text)
self.view.sel().clear()
self.view.sel().add(sublime.Region(start[0], start[1]))
self.view.run_command(self.command(), { 'extend_selection': extend_selection, 'indent_offset': indent_offset })
self.assertEqual(self.view.sel()[0], sublime.Region(end[0], end[1]))
<commit_msg>Fix tests failing from different tab_size<commit_after>
|
import sublime
from unittest import TestCase
class TestHelper(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
self.view.settings().set("tab_size", 2)
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.window().run_command('close_file')
def set_text(self, lines):
for line in lines:
self.view.run_command('move_to', { 'to': 'bol', 'extend': True })
self.view.run_command('insert', { 'characters': line + "\n" })
def check_command(self, text, start, end, extend_selection=False, indent_offset=0):
self.set_text(text)
self.view.sel().clear()
self.view.sel().add(sublime.Region(start[0], start[1]))
self.view.run_command(self.command(), { 'extend_selection': extend_selection, 'indent_offset': indent_offset })
self.assertEqual(self.view.sel()[0], sublime.Region(end[0], end[1]))
|
import sublime
from unittest import TestCase
class TestHelper(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.window().run_command('close_file')
def set_text(self, lines):
for line in lines:
self.view.run_command('move_to', { 'to': 'bol', 'extend': True })
self.view.run_command('insert', { 'characters': line + "\n" })
def check_command(self, text, start, end, extend_selection=False, indent_offset=0):
tab_size = self.view.settings().get("tab_size")
self.set_text(text)
self.view.sel().clear()
self.view.sel().add(sublime.Region(start[0], start[1]))
self.view.run_command(self.command(), { 'extend_selection': extend_selection, 'indent_offset': indent_offset })
self.assertEqual(self.view.sel()[0], sublime.Region(end[0], end[1]))
Fix tests failing from different tab_sizeimport sublime
from unittest import TestCase
class TestHelper(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
self.view.settings().set("tab_size", 2)
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.window().run_command('close_file')
def set_text(self, lines):
for line in lines:
self.view.run_command('move_to', { 'to': 'bol', 'extend': True })
self.view.run_command('insert', { 'characters': line + "\n" })
def check_command(self, text, start, end, extend_selection=False, indent_offset=0):
self.set_text(text)
self.view.sel().clear()
self.view.sel().add(sublime.Region(start[0], start[1]))
self.view.run_command(self.command(), { 'extend_selection': extend_selection, 'indent_offset': indent_offset })
self.assertEqual(self.view.sel()[0], sublime.Region(end[0], end[1]))
|
<commit_before>import sublime
from unittest import TestCase
class TestHelper(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.window().run_command('close_file')
def set_text(self, lines):
for line in lines:
self.view.run_command('move_to', { 'to': 'bol', 'extend': True })
self.view.run_command('insert', { 'characters': line + "\n" })
def check_command(self, text, start, end, extend_selection=False, indent_offset=0):
tab_size = self.view.settings().get("tab_size")
self.set_text(text)
self.view.sel().clear()
self.view.sel().add(sublime.Region(start[0], start[1]))
self.view.run_command(self.command(), { 'extend_selection': extend_selection, 'indent_offset': indent_offset })
self.assertEqual(self.view.sel()[0], sublime.Region(end[0], end[1]))
<commit_msg>Fix tests failing from different tab_size<commit_after>import sublime
from unittest import TestCase
class TestHelper(TestCase):
def setUp(self):
self.view = sublime.active_window().new_file()
self.view.settings().set("tab_size", 2)
def tearDown(self):
if self.view:
self.view.set_scratch(True)
self.view.window().run_command('close_file')
def set_text(self, lines):
for line in lines:
self.view.run_command('move_to', { 'to': 'bol', 'extend': True })
self.view.run_command('insert', { 'characters': line + "\n" })
def check_command(self, text, start, end, extend_selection=False, indent_offset=0):
self.set_text(text)
self.view.sel().clear()
self.view.sel().add(sublime.Region(start[0], start[1]))
self.view.run_command(self.command(), { 'extend_selection': extend_selection, 'indent_offset': indent_offset })
self.assertEqual(self.view.sel()[0], sublime.Region(end[0], end[1]))
|
9ef9724a21382d8c93bebfb8dc6e551b58e0a57c
|
py/testdir_multi_jvm/test_rf_200x4_fvec.py
|
py/testdir_multi_jvm/test_rf_200x4_fvec.py
|
import unittest, time, sys, os
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(4)
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_rf_200x4_fvec(self):
h2o.beta_features = True
csvPathname = 'hhp.cut3.214.data.gz'
print "RF start on ", csvPathname, "this will probably take 1 minute.."
start = time.time()
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, schema='put')
h2o_cmd.runRF(parseResult=parseResult, ntrees=5,
timeoutSecs=400, retryDelaySecs=15)
print "RF end on ", csvPathname, 'took', time.time() - start, 'seconds'
if __name__ == '__main__':
h2o.unit_main()
|
import unittest, time, sys, os
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(4)
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_rf_200x4_fvec(self):
h2o.beta_features = True
csvPathname = 'hhp.cut3.214.data.gz'
print "RF start on ", csvPathname, "this will probably take 1 minute.."
start = time.time()
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, schema='put')
h2o_cmd.runRF(parseResult=parseResult, ntrees=5,
timeoutSecs=800, retryDelaySecs=15)
print "RF end on ", csvPathname, 'took', time.time() - start, 'seconds'
if __name__ == '__main__':
h2o.unit_main()
|
Increase timeout from 400 to 800 seconds.
|
Increase timeout from 400 to 800 seconds.
|
Python
|
apache-2.0
|
h2oai/h2o-2,100star/h2o,h2oai/h2o,h2oai/h2o,vbelakov/h2o,elkingtonmcb/h2o-2,calvingit21/h2o-2,h2oai/h2o,rowhit/h2o-2,h2oai/h2o-2,h2oai/h2o-2,elkingtonmcb/h2o-2,h2oai/h2o,calvingit21/h2o-2,h2oai/h2o-2,rowhit/h2o-2,rowhit/h2o-2,vbelakov/h2o,eg-zhang/h2o-2,h2oai/h2o,calvingit21/h2o-2,h2oai/h2o,vbelakov/h2o,100star/h2o,calvingit21/h2o-2,h2oai/h2o,h2oai/h2o,eg-zhang/h2o-2,111t8e/h2o-2,rowhit/h2o-2,rowhit/h2o-2,calvingit21/h2o-2,111t8e/h2o-2,111t8e/h2o-2,vbelakov/h2o,h2oai/h2o-2,eg-zhang/h2o-2,100star/h2o,elkingtonmcb/h2o-2,h2oai/h2o,100star/h2o,111t8e/h2o-2,eg-zhang/h2o-2,h2oai/h2o-2,vbelakov/h2o,rowhit/h2o-2,100star/h2o,elkingtonmcb/h2o-2,111t8e/h2o-2,vbelakov/h2o,100star/h2o,vbelakov/h2o,rowhit/h2o-2,calvingit21/h2o-2,eg-zhang/h2o-2,111t8e/h2o-2,100star/h2o,rowhit/h2o-2,elkingtonmcb/h2o-2,eg-zhang/h2o-2,100star/h2o,calvingit21/h2o-2,h2oai/h2o-2,100star/h2o,h2oai/h2o,eg-zhang/h2o-2,vbelakov/h2o,h2oai/h2o-2,eg-zhang/h2o-2,vbelakov/h2o,calvingit21/h2o-2,calvingit21/h2o-2,h2oai/h2o-2,elkingtonmcb/h2o-2,elkingtonmcb/h2o-2,rowhit/h2o-2,111t8e/h2o-2,h2oai/h2o-2,111t8e/h2o-2,elkingtonmcb/h2o-2,111t8e/h2o-2,elkingtonmcb/h2o-2,calvingit21/h2o-2,rowhit/h2o-2,111t8e/h2o-2,eg-zhang/h2o-2,vbelakov/h2o,elkingtonmcb/h2o-2,eg-zhang/h2o-2
|
import unittest, time, sys, os
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(4)
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_rf_200x4_fvec(self):
h2o.beta_features = True
csvPathname = 'hhp.cut3.214.data.gz'
print "RF start on ", csvPathname, "this will probably take 1 minute.."
start = time.time()
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, schema='put')
h2o_cmd.runRF(parseResult=parseResult, ntrees=5,
timeoutSecs=400, retryDelaySecs=15)
print "RF end on ", csvPathname, 'took', time.time() - start, 'seconds'
if __name__ == '__main__':
h2o.unit_main()
Increase timeout from 400 to 800 seconds.
|
import unittest, time, sys, os
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(4)
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_rf_200x4_fvec(self):
h2o.beta_features = True
csvPathname = 'hhp.cut3.214.data.gz'
print "RF start on ", csvPathname, "this will probably take 1 minute.."
start = time.time()
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, schema='put')
h2o_cmd.runRF(parseResult=parseResult, ntrees=5,
timeoutSecs=800, retryDelaySecs=15)
print "RF end on ", csvPathname, 'took', time.time() - start, 'seconds'
if __name__ == '__main__':
h2o.unit_main()
|
<commit_before>import unittest, time, sys, os
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(4)
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_rf_200x4_fvec(self):
h2o.beta_features = True
csvPathname = 'hhp.cut3.214.data.gz'
print "RF start on ", csvPathname, "this will probably take 1 minute.."
start = time.time()
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, schema='put')
h2o_cmd.runRF(parseResult=parseResult, ntrees=5,
timeoutSecs=400, retryDelaySecs=15)
print "RF end on ", csvPathname, 'took', time.time() - start, 'seconds'
if __name__ == '__main__':
h2o.unit_main()
<commit_msg>Increase timeout from 400 to 800 seconds.<commit_after>
|
import unittest, time, sys, os
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(4)
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_rf_200x4_fvec(self):
h2o.beta_features = True
csvPathname = 'hhp.cut3.214.data.gz'
print "RF start on ", csvPathname, "this will probably take 1 minute.."
start = time.time()
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, schema='put')
h2o_cmd.runRF(parseResult=parseResult, ntrees=5,
timeoutSecs=800, retryDelaySecs=15)
print "RF end on ", csvPathname, 'took', time.time() - start, 'seconds'
if __name__ == '__main__':
h2o.unit_main()
|
import unittest, time, sys, os
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(4)
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_rf_200x4_fvec(self):
h2o.beta_features = True
csvPathname = 'hhp.cut3.214.data.gz'
print "RF start on ", csvPathname, "this will probably take 1 minute.."
start = time.time()
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, schema='put')
h2o_cmd.runRF(parseResult=parseResult, ntrees=5,
timeoutSecs=400, retryDelaySecs=15)
print "RF end on ", csvPathname, 'took', time.time() - start, 'seconds'
if __name__ == '__main__':
h2o.unit_main()
Increase timeout from 400 to 800 seconds.import unittest, time, sys, os
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(4)
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_rf_200x4_fvec(self):
h2o.beta_features = True
csvPathname = 'hhp.cut3.214.data.gz'
print "RF start on ", csvPathname, "this will probably take 1 minute.."
start = time.time()
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, schema='put')
h2o_cmd.runRF(parseResult=parseResult, ntrees=5,
timeoutSecs=800, retryDelaySecs=15)
print "RF end on ", csvPathname, 'took', time.time() - start, 'seconds'
if __name__ == '__main__':
h2o.unit_main()
|
<commit_before>import unittest, time, sys, os
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(4)
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_rf_200x4_fvec(self):
h2o.beta_features = True
csvPathname = 'hhp.cut3.214.data.gz'
print "RF start on ", csvPathname, "this will probably take 1 minute.."
start = time.time()
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, schema='put')
h2o_cmd.runRF(parseResult=parseResult, ntrees=5,
timeoutSecs=400, retryDelaySecs=15)
print "RF end on ", csvPathname, 'took', time.time() - start, 'seconds'
if __name__ == '__main__':
h2o.unit_main()
<commit_msg>Increase timeout from 400 to 800 seconds.<commit_after>import unittest, time, sys, os
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_hosts, h2o_import as h2i
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(4)
else:
h2o_hosts.build_cloud_with_hosts()
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_rf_200x4_fvec(self):
h2o.beta_features = True
csvPathname = 'hhp.cut3.214.data.gz'
print "RF start on ", csvPathname, "this will probably take 1 minute.."
start = time.time()
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, schema='put')
h2o_cmd.runRF(parseResult=parseResult, ntrees=5,
timeoutSecs=800, retryDelaySecs=15)
print "RF end on ", csvPathname, 'took', time.time() - start, 'seconds'
if __name__ == '__main__':
h2o.unit_main()
|
89a001a1c4b5f8726c710c0dd4046ceb8df1fe5b
|
tests/test_fields_virtual.py
|
tests/test_fields_virtual.py
|
# -*- coding: utf-8 -*-
import pytest
import odin
class MultiPartResource(odin.Resource):
id = odin.IntegerField()
code = odin.StringField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
class TestFields(object):
def test_multipartfield__get_value(self):
target = MultiPartResource(id=42, code='29A')
assert '42:29A' == target.two_parts
def test_multipartfield__unknown_fields(self):
with pytest.raises(AttributeError) as result:
class BadMultiPartResource(odin.Resource):
id = odin.IntegerField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
assert result.value.message.startswith("Attribute 'code' not found")
|
# -*- coding: utf-8 -*-
import pytest
import odin
class MultiPartResource(odin.Resource):
id = odin.IntegerField()
code = odin.StringField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
class TestFields(object):
def test_multipartfield__get_value(self):
target = MultiPartResource(id=42, code='29A')
assert '42:29A' == target.two_parts
def test_multipartfield__unknown_fields(self):
with pytest.raises(AttributeError) as result:
class BadMultiPartResource(odin.Resource):
id = odin.IntegerField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
assert str(result.value).startswith("Attribute 'code' not found")
|
Fix test for python 3
|
Fix test for python 3
|
Python
|
bsd-3-clause
|
python-odin/odin
|
# -*- coding: utf-8 -*-
import pytest
import odin
class MultiPartResource(odin.Resource):
id = odin.IntegerField()
code = odin.StringField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
class TestFields(object):
def test_multipartfield__get_value(self):
target = MultiPartResource(id=42, code='29A')
assert '42:29A' == target.two_parts
def test_multipartfield__unknown_fields(self):
with pytest.raises(AttributeError) as result:
class BadMultiPartResource(odin.Resource):
id = odin.IntegerField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
assert result.value.message.startswith("Attribute 'code' not found")
Fix test for python 3
|
# -*- coding: utf-8 -*-
import pytest
import odin
class MultiPartResource(odin.Resource):
id = odin.IntegerField()
code = odin.StringField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
class TestFields(object):
def test_multipartfield__get_value(self):
target = MultiPartResource(id=42, code='29A')
assert '42:29A' == target.two_parts
def test_multipartfield__unknown_fields(self):
with pytest.raises(AttributeError) as result:
class BadMultiPartResource(odin.Resource):
id = odin.IntegerField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
assert str(result.value).startswith("Attribute 'code' not found")
|
<commit_before># -*- coding: utf-8 -*-
import pytest
import odin
class MultiPartResource(odin.Resource):
id = odin.IntegerField()
code = odin.StringField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
class TestFields(object):
def test_multipartfield__get_value(self):
target = MultiPartResource(id=42, code='29A')
assert '42:29A' == target.two_parts
def test_multipartfield__unknown_fields(self):
with pytest.raises(AttributeError) as result:
class BadMultiPartResource(odin.Resource):
id = odin.IntegerField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
assert result.value.message.startswith("Attribute 'code' not found")
<commit_msg>Fix test for python 3<commit_after>
|
# -*- coding: utf-8 -*-
import pytest
import odin
class MultiPartResource(odin.Resource):
id = odin.IntegerField()
code = odin.StringField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
class TestFields(object):
def test_multipartfield__get_value(self):
target = MultiPartResource(id=42, code='29A')
assert '42:29A' == target.two_parts
def test_multipartfield__unknown_fields(self):
with pytest.raises(AttributeError) as result:
class BadMultiPartResource(odin.Resource):
id = odin.IntegerField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
assert str(result.value).startswith("Attribute 'code' not found")
|
# -*- coding: utf-8 -*-
import pytest
import odin
class MultiPartResource(odin.Resource):
id = odin.IntegerField()
code = odin.StringField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
class TestFields(object):
def test_multipartfield__get_value(self):
target = MultiPartResource(id=42, code='29A')
assert '42:29A' == target.two_parts
def test_multipartfield__unknown_fields(self):
with pytest.raises(AttributeError) as result:
class BadMultiPartResource(odin.Resource):
id = odin.IntegerField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
assert result.value.message.startswith("Attribute 'code' not found")
Fix test for python 3# -*- coding: utf-8 -*-
import pytest
import odin
class MultiPartResource(odin.Resource):
id = odin.IntegerField()
code = odin.StringField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
class TestFields(object):
def test_multipartfield__get_value(self):
target = MultiPartResource(id=42, code='29A')
assert '42:29A' == target.two_parts
def test_multipartfield__unknown_fields(self):
with pytest.raises(AttributeError) as result:
class BadMultiPartResource(odin.Resource):
id = odin.IntegerField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
assert str(result.value).startswith("Attribute 'code' not found")
|
<commit_before># -*- coding: utf-8 -*-
import pytest
import odin
class MultiPartResource(odin.Resource):
id = odin.IntegerField()
code = odin.StringField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
class TestFields(object):
def test_multipartfield__get_value(self):
target = MultiPartResource(id=42, code='29A')
assert '42:29A' == target.two_parts
def test_multipartfield__unknown_fields(self):
with pytest.raises(AttributeError) as result:
class BadMultiPartResource(odin.Resource):
id = odin.IntegerField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
assert result.value.message.startswith("Attribute 'code' not found")
<commit_msg>Fix test for python 3<commit_after># -*- coding: utf-8 -*-
import pytest
import odin
class MultiPartResource(odin.Resource):
id = odin.IntegerField()
code = odin.StringField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
class TestFields(object):
def test_multipartfield__get_value(self):
target = MultiPartResource(id=42, code='29A')
assert '42:29A' == target.two_parts
def test_multipartfield__unknown_fields(self):
with pytest.raises(AttributeError) as result:
class BadMultiPartResource(odin.Resource):
id = odin.IntegerField()
two_parts = odin.MultiPartField(('id', 'code'), separator=':')
assert str(result.value).startswith("Attribute 'code' not found")
|
92216b0f09ee7de1d43ef54f9a1c7072faedabb5
|
tests/test_tracker_stores.py
|
tests/test_tracker_stores.py
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from rasa_core.channels import UserMessage
from rasa_core.domain import TemplateDomain
from rasa_core.events import SlotSet
from rasa_core.tracker_store import InMemoryTrackerStore
domain = TemplateDomain.load("data/test_domains/default_with_topic.yml")
def test_get_or_create():
slot_key = 'location'
slot_val = 'Easter Island'
store = InMemoryTrackerStore(domain)
tracker = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER)
ev = SlotSet(slot_key, slot_val)
tracker.update(ev)
assert tracker.get_slot(slot_key) == slot_val
store.save(tracker)
again = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER)
assert again.get_slot(slot_key) == slot_val
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from rasa_core.channels import UserMessage
from rasa_core.domain import TemplateDomain
from rasa_core.events import SlotSet
from rasa_core.tracker_store import InMemoryTrackerStore
domain = TemplateDomain.load("data/test_domains/default_with_topic.yml")
def test_get_or_create():
slot_key = 'location'
slot_val = 'Easter Island'
store = InMemoryTrackerStore(domain)
tracker = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER_ID)
ev = SlotSet(slot_key, slot_val)
tracker.update(ev)
assert tracker.get_slot(slot_key) == slot_val
store.save(tracker)
again = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER_ID)
assert again.get_slot(slot_key) == slot_val
|
Update tracker store test for 0.7.5
|
Update tracker store test for 0.7.5
|
Python
|
apache-2.0
|
RasaHQ/rasa_nlu,RasaHQ/rasa_core,RasaHQ/rasa_nlu,RasaHQ/rasa_nlu,RasaHQ/rasa_core,RasaHQ/rasa_core
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from rasa_core.channels import UserMessage
from rasa_core.domain import TemplateDomain
from rasa_core.events import SlotSet
from rasa_core.tracker_store import InMemoryTrackerStore
domain = TemplateDomain.load("data/test_domains/default_with_topic.yml")
def test_get_or_create():
slot_key = 'location'
slot_val = 'Easter Island'
store = InMemoryTrackerStore(domain)
tracker = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER)
ev = SlotSet(slot_key, slot_val)
tracker.update(ev)
assert tracker.get_slot(slot_key) == slot_val
store.save(tracker)
again = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER)
assert again.get_slot(slot_key) == slot_val
Update tracker store test for 0.7.5
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from rasa_core.channels import UserMessage
from rasa_core.domain import TemplateDomain
from rasa_core.events import SlotSet
from rasa_core.tracker_store import InMemoryTrackerStore
domain = TemplateDomain.load("data/test_domains/default_with_topic.yml")
def test_get_or_create():
slot_key = 'location'
slot_val = 'Easter Island'
store = InMemoryTrackerStore(domain)
tracker = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER_ID)
ev = SlotSet(slot_key, slot_val)
tracker.update(ev)
assert tracker.get_slot(slot_key) == slot_val
store.save(tracker)
again = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER_ID)
assert again.get_slot(slot_key) == slot_val
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from rasa_core.channels import UserMessage
from rasa_core.domain import TemplateDomain
from rasa_core.events import SlotSet
from rasa_core.tracker_store import InMemoryTrackerStore
domain = TemplateDomain.load("data/test_domains/default_with_topic.yml")
def test_get_or_create():
slot_key = 'location'
slot_val = 'Easter Island'
store = InMemoryTrackerStore(domain)
tracker = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER)
ev = SlotSet(slot_key, slot_val)
tracker.update(ev)
assert tracker.get_slot(slot_key) == slot_val
store.save(tracker)
again = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER)
assert again.get_slot(slot_key) == slot_val
<commit_msg>Update tracker store test for 0.7.5<commit_after>
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from rasa_core.channels import UserMessage
from rasa_core.domain import TemplateDomain
from rasa_core.events import SlotSet
from rasa_core.tracker_store import InMemoryTrackerStore
domain = TemplateDomain.load("data/test_domains/default_with_topic.yml")
def test_get_or_create():
slot_key = 'location'
slot_val = 'Easter Island'
store = InMemoryTrackerStore(domain)
tracker = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER_ID)
ev = SlotSet(slot_key, slot_val)
tracker.update(ev)
assert tracker.get_slot(slot_key) == slot_val
store.save(tracker)
again = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER_ID)
assert again.get_slot(slot_key) == slot_val
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from rasa_core.channels import UserMessage
from rasa_core.domain import TemplateDomain
from rasa_core.events import SlotSet
from rasa_core.tracker_store import InMemoryTrackerStore
domain = TemplateDomain.load("data/test_domains/default_with_topic.yml")
def test_get_or_create():
slot_key = 'location'
slot_val = 'Easter Island'
store = InMemoryTrackerStore(domain)
tracker = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER)
ev = SlotSet(slot_key, slot_val)
tracker.update(ev)
assert tracker.get_slot(slot_key) == slot_val
store.save(tracker)
again = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER)
assert again.get_slot(slot_key) == slot_val
Update tracker store test for 0.7.5from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from rasa_core.channels import UserMessage
from rasa_core.domain import TemplateDomain
from rasa_core.events import SlotSet
from rasa_core.tracker_store import InMemoryTrackerStore
domain = TemplateDomain.load("data/test_domains/default_with_topic.yml")
def test_get_or_create():
slot_key = 'location'
slot_val = 'Easter Island'
store = InMemoryTrackerStore(domain)
tracker = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER_ID)
ev = SlotSet(slot_key, slot_val)
tracker.update(ev)
assert tracker.get_slot(slot_key) == slot_val
store.save(tracker)
again = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER_ID)
assert again.get_slot(slot_key) == slot_val
|
<commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from rasa_core.channels import UserMessage
from rasa_core.domain import TemplateDomain
from rasa_core.events import SlotSet
from rasa_core.tracker_store import InMemoryTrackerStore
domain = TemplateDomain.load("data/test_domains/default_with_topic.yml")
def test_get_or_create():
slot_key = 'location'
slot_val = 'Easter Island'
store = InMemoryTrackerStore(domain)
tracker = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER)
ev = SlotSet(slot_key, slot_val)
tracker.update(ev)
assert tracker.get_slot(slot_key) == slot_val
store.save(tracker)
again = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER)
assert again.get_slot(slot_key) == slot_val
<commit_msg>Update tracker store test for 0.7.5<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
from rasa_core.channels import UserMessage
from rasa_core.domain import TemplateDomain
from rasa_core.events import SlotSet
from rasa_core.tracker_store import InMemoryTrackerStore
domain = TemplateDomain.load("data/test_domains/default_with_topic.yml")
def test_get_or_create():
slot_key = 'location'
slot_val = 'Easter Island'
store = InMemoryTrackerStore(domain)
tracker = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER_ID)
ev = SlotSet(slot_key, slot_val)
tracker.update(ev)
assert tracker.get_slot(slot_key) == slot_val
store.save(tracker)
again = store.get_or_create_tracker(UserMessage.DEFAULT_SENDER_ID)
assert again.get_slot(slot_key) == slot_val
|
74fde273d79248d4ad1c0cfd47d2861c83b50cbd
|
kolibri/auth/migrations/0007_auto_20171226_1125.py
|
kolibri/auth/migrations/0007_auto_20171226_1125.py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-12-26 19:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('kolibriauth', '0006_auto_20171206_1207'),
]
operations = [
migrations.AlterField(
model_name='facilitydataset',
name='preset',
field=models.CharField(choices=[('informal', 'Informal and personal use'), ('nonformal', 'Self-managed'), ('formal', 'Admin-managed')], default='nonformal', max_length=50),
),
migrations.AlterUniqueTogether(
name='facilityuser',
unique_together=set([]),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-12-26 19:25
from __future__ import unicode_literals
from django.db import migrations, models
# This is necessary because:
# 1. The list generator has an unpredictable order, and when items swap places
# then this would be picked up as a change in Django if we had used
# 2. These choices can be changed in facility_configuration_presets.json
# and such change should not warrant warnings that models are inconsistent
# as it has no impact.
# Notice: The 'choices' property of a field does NOT have any impact on DB
# See: https://github.com/learningequality/kolibri/pull/3180
from ..constants.facility_presets import choices as facility_choices
class Migration(migrations.Migration):
dependencies = [
('kolibriauth', '0006_auto_20171206_1207'),
]
operations = [
migrations.AlterField(
model_name='facilitydataset',
name='preset',
field=models.CharField(choices=facility_choices, default='nonformal', max_length=50),
),
migrations.AlterUniqueTogether(
name='facilityuser',
unique_together=set([]),
),
]
|
Fix for dynamic value of FacilityDataset.preset.choices causing migration inconsistencies
|
Fix for dynamic value of FacilityDataset.preset.choices causing migration inconsistencies
|
Python
|
mit
|
christianmemije/kolibri,christianmemije/kolibri,indirectlylit/kolibri,benjaoming/kolibri,lyw07/kolibri,learningequality/kolibri,mrpau/kolibri,mrpau/kolibri,lyw07/kolibri,indirectlylit/kolibri,christianmemije/kolibri,jonboiser/kolibri,jonboiser/kolibri,DXCanas/kolibri,lyw07/kolibri,mrpau/kolibri,mrpau/kolibri,benjaoming/kolibri,indirectlylit/kolibri,christianmemije/kolibri,jonboiser/kolibri,learningequality/kolibri,DXCanas/kolibri,lyw07/kolibri,learningequality/kolibri,benjaoming/kolibri,DXCanas/kolibri,DXCanas/kolibri,learningequality/kolibri,jonboiser/kolibri,indirectlylit/kolibri,benjaoming/kolibri
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-12-26 19:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('kolibriauth', '0006_auto_20171206_1207'),
]
operations = [
migrations.AlterField(
model_name='facilitydataset',
name='preset',
field=models.CharField(choices=[('informal', 'Informal and personal use'), ('nonformal', 'Self-managed'), ('formal', 'Admin-managed')], default='nonformal', max_length=50),
),
migrations.AlterUniqueTogether(
name='facilityuser',
unique_together=set([]),
),
]
Fix for dynamic value of FacilityDataset.preset.choices causing migration inconsistencies
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-12-26 19:25
from __future__ import unicode_literals
from django.db import migrations, models
# This is necessary because:
# 1. The list generator has an unpredictable order, and when items swap places
# then this would be picked up as a change in Django if we had used
# 2. These choices can be changed in facility_configuration_presets.json
# and such change should not warrant warnings that models are inconsistent
# as it has no impact.
# Notice: The 'choices' property of a field does NOT have any impact on DB
# See: https://github.com/learningequality/kolibri/pull/3180
from ..constants.facility_presets import choices as facility_choices
class Migration(migrations.Migration):
dependencies = [
('kolibriauth', '0006_auto_20171206_1207'),
]
operations = [
migrations.AlterField(
model_name='facilitydataset',
name='preset',
field=models.CharField(choices=facility_choices, default='nonformal', max_length=50),
),
migrations.AlterUniqueTogether(
name='facilityuser',
unique_together=set([]),
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-12-26 19:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('kolibriauth', '0006_auto_20171206_1207'),
]
operations = [
migrations.AlterField(
model_name='facilitydataset',
name='preset',
field=models.CharField(choices=[('informal', 'Informal and personal use'), ('nonformal', 'Self-managed'), ('formal', 'Admin-managed')], default='nonformal', max_length=50),
),
migrations.AlterUniqueTogether(
name='facilityuser',
unique_together=set([]),
),
]
<commit_msg>Fix for dynamic value of FacilityDataset.preset.choices causing migration inconsistencies<commit_after>
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-12-26 19:25
from __future__ import unicode_literals
from django.db import migrations, models
# This is necessary because:
# 1. The list generator has an unpredictable order, and when items swap places
# then this would be picked up as a change in Django if we had used
# 2. These choices can be changed in facility_configuration_presets.json
# and such change should not warrant warnings that models are inconsistent
# as it has no impact.
# Notice: The 'choices' property of a field does NOT have any impact on DB
# See: https://github.com/learningequality/kolibri/pull/3180
from ..constants.facility_presets import choices as facility_choices
class Migration(migrations.Migration):
dependencies = [
('kolibriauth', '0006_auto_20171206_1207'),
]
operations = [
migrations.AlterField(
model_name='facilitydataset',
name='preset',
field=models.CharField(choices=facility_choices, default='nonformal', max_length=50),
),
migrations.AlterUniqueTogether(
name='facilityuser',
unique_together=set([]),
),
]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-12-26 19:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('kolibriauth', '0006_auto_20171206_1207'),
]
operations = [
migrations.AlterField(
model_name='facilitydataset',
name='preset',
field=models.CharField(choices=[('informal', 'Informal and personal use'), ('nonformal', 'Self-managed'), ('formal', 'Admin-managed')], default='nonformal', max_length=50),
),
migrations.AlterUniqueTogether(
name='facilityuser',
unique_together=set([]),
),
]
Fix for dynamic value of FacilityDataset.preset.choices causing migration inconsistencies# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-12-26 19:25
from __future__ import unicode_literals
from django.db import migrations, models
# This is necessary because:
# 1. The list generator has an unpredictable order, and when items swap places
# then this would be picked up as a change in Django if we had used
# 2. These choices can be changed in facility_configuration_presets.json
# and such change should not warrant warnings that models are inconsistent
# as it has no impact.
# Notice: The 'choices' property of a field does NOT have any impact on DB
# See: https://github.com/learningequality/kolibri/pull/3180
from ..constants.facility_presets import choices as facility_choices
class Migration(migrations.Migration):
dependencies = [
('kolibriauth', '0006_auto_20171206_1207'),
]
operations = [
migrations.AlterField(
model_name='facilitydataset',
name='preset',
field=models.CharField(choices=facility_choices, default='nonformal', max_length=50),
),
migrations.AlterUniqueTogether(
name='facilityuser',
unique_together=set([]),
),
]
|
<commit_before># -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-12-26 19:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('kolibriauth', '0006_auto_20171206_1207'),
]
operations = [
migrations.AlterField(
model_name='facilitydataset',
name='preset',
field=models.CharField(choices=[('informal', 'Informal and personal use'), ('nonformal', 'Self-managed'), ('formal', 'Admin-managed')], default='nonformal', max_length=50),
),
migrations.AlterUniqueTogether(
name='facilityuser',
unique_together=set([]),
),
]
<commit_msg>Fix for dynamic value of FacilityDataset.preset.choices causing migration inconsistencies<commit_after># -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-12-26 19:25
from __future__ import unicode_literals
from django.db import migrations, models
# This is necessary because:
# 1. The list generator has an unpredictable order, and when items swap places
# then this would be picked up as a change in Django if we had used
# 2. These choices can be changed in facility_configuration_presets.json
# and such change should not warrant warnings that models are inconsistent
# as it has no impact.
# Notice: The 'choices' property of a field does NOT have any impact on DB
# See: https://github.com/learningequality/kolibri/pull/3180
from ..constants.facility_presets import choices as facility_choices
class Migration(migrations.Migration):
dependencies = [
('kolibriauth', '0006_auto_20171206_1207'),
]
operations = [
migrations.AlterField(
model_name='facilitydataset',
name='preset',
field=models.CharField(choices=facility_choices, default='nonformal', max_length=50),
),
migrations.AlterUniqueTogether(
name='facilityuser',
unique_together=set([]),
),
]
|
2320dd29d23d03562319cfbb5cdf46e46795d79b
|
trex/views/project.py
|
trex/views/project.py
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework import generics
from trex.models.project import Project, Entry
from trex.serializers import (
ProjectSerializer, ProjectDetailSerializer, EntryDetailSerializer)
class ProjectListCreateAPIView(generics.ListCreateAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
class ProjectDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Project.objects.all()
serializer_class = ProjectDetailSerializer
class EntryDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Entry.objects.all()
serializer_class = EntryDetailSerializer
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework import generics, status
from rest_framework.response import Response
from trex.models.project import Project, Entry
from trex.parsers import PlainTextParser
from trex.serializers import (
ProjectSerializer, ProjectDetailSerializer, EntryDetailSerializer)
from trex.utils import Zeiterfassung
class ProjectListCreateAPIView(generics.ListCreateAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
class ProjectDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Project.objects.all()
serializer_class = ProjectDetailSerializer
class ProjectZeiterfassungAPIView(generics.CreateAPIView):
queryset = Project.objects.all()
parser_classes = (PlainTextParser,)
serializer_class = ProjectDetailSerializer
def create(self, request, *args, **kwargs):
try:
proj = self.get_object()
except Project.DoesNotExist:
errors = self._create_errors("Project does not exist")
return Response(errors, status=status.HTTP_400_BAD_REQUEST)
zeiterfassung = Zeiterfassung(request.DATA)
try:
proj.create_entries_from_zeiterfassung(zeiterfassung)
except Exception, e:
errors = self._create_errors(str(e))
# TODO review if e could contain info not suited for the user
return Response(errors, status=status.HTTP_400_BAD_REQUEST)
serializer = self.get_serializer(proj)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED,
headers=headers)
def _create_errors(self, msg):
return {"non_field_errors": [msg]}
class EntryDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Entry.objects.all()
serializer_class = EntryDetailSerializer
|
Add view to add Zeiterfassung entries from a plain text submission
|
Add view to add Zeiterfassung entries from a plain text submission
|
Python
|
mit
|
bjoernricks/trex,bjoernricks/trex
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework import generics
from trex.models.project import Project, Entry
from trex.serializers import (
ProjectSerializer, ProjectDetailSerializer, EntryDetailSerializer)
class ProjectListCreateAPIView(generics.ListCreateAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
class ProjectDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Project.objects.all()
serializer_class = ProjectDetailSerializer
class EntryDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Entry.objects.all()
serializer_class = EntryDetailSerializer
Add view to add Zeiterfassung entries from a plain text submission
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework import generics, status
from rest_framework.response import Response
from trex.models.project import Project, Entry
from trex.parsers import PlainTextParser
from trex.serializers import (
ProjectSerializer, ProjectDetailSerializer, EntryDetailSerializer)
from trex.utils import Zeiterfassung
class ProjectListCreateAPIView(generics.ListCreateAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
class ProjectDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Project.objects.all()
serializer_class = ProjectDetailSerializer
class ProjectZeiterfassungAPIView(generics.CreateAPIView):
queryset = Project.objects.all()
parser_classes = (PlainTextParser,)
serializer_class = ProjectDetailSerializer
def create(self, request, *args, **kwargs):
try:
proj = self.get_object()
except Project.DoesNotExist:
errors = self._create_errors("Project does not exist")
return Response(errors, status=status.HTTP_400_BAD_REQUEST)
zeiterfassung = Zeiterfassung(request.DATA)
try:
proj.create_entries_from_zeiterfassung(zeiterfassung)
except Exception, e:
errors = self._create_errors(str(e))
# TODO review if e could contain info not suited for the user
return Response(errors, status=status.HTTP_400_BAD_REQUEST)
serializer = self.get_serializer(proj)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED,
headers=headers)
def _create_errors(self, msg):
return {"non_field_errors": [msg]}
class EntryDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Entry.objects.all()
serializer_class = EntryDetailSerializer
|
<commit_before># -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework import generics
from trex.models.project import Project, Entry
from trex.serializers import (
ProjectSerializer, ProjectDetailSerializer, EntryDetailSerializer)
class ProjectListCreateAPIView(generics.ListCreateAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
class ProjectDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Project.objects.all()
serializer_class = ProjectDetailSerializer
class EntryDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Entry.objects.all()
serializer_class = EntryDetailSerializer
<commit_msg>Add view to add Zeiterfassung entries from a plain text submission<commit_after>
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework import generics, status
from rest_framework.response import Response
from trex.models.project import Project, Entry
from trex.parsers import PlainTextParser
from trex.serializers import (
ProjectSerializer, ProjectDetailSerializer, EntryDetailSerializer)
from trex.utils import Zeiterfassung
class ProjectListCreateAPIView(generics.ListCreateAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
class ProjectDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Project.objects.all()
serializer_class = ProjectDetailSerializer
class ProjectZeiterfassungAPIView(generics.CreateAPIView):
queryset = Project.objects.all()
parser_classes = (PlainTextParser,)
serializer_class = ProjectDetailSerializer
def create(self, request, *args, **kwargs):
try:
proj = self.get_object()
except Project.DoesNotExist:
errors = self._create_errors("Project does not exist")
return Response(errors, status=status.HTTP_400_BAD_REQUEST)
zeiterfassung = Zeiterfassung(request.DATA)
try:
proj.create_entries_from_zeiterfassung(zeiterfassung)
except Exception, e:
errors = self._create_errors(str(e))
# TODO review if e could contain info not suited for the user
return Response(errors, status=status.HTTP_400_BAD_REQUEST)
serializer = self.get_serializer(proj)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED,
headers=headers)
def _create_errors(self, msg):
return {"non_field_errors": [msg]}
class EntryDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Entry.objects.all()
serializer_class = EntryDetailSerializer
|
# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework import generics
from trex.models.project import Project, Entry
from trex.serializers import (
ProjectSerializer, ProjectDetailSerializer, EntryDetailSerializer)
class ProjectListCreateAPIView(generics.ListCreateAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
class ProjectDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Project.objects.all()
serializer_class = ProjectDetailSerializer
class EntryDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Entry.objects.all()
serializer_class = EntryDetailSerializer
Add view to add Zeiterfassung entries from a plain text submission# -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework import generics, status
from rest_framework.response import Response
from trex.models.project import Project, Entry
from trex.parsers import PlainTextParser
from trex.serializers import (
ProjectSerializer, ProjectDetailSerializer, EntryDetailSerializer)
from trex.utils import Zeiterfassung
class ProjectListCreateAPIView(generics.ListCreateAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
class ProjectDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Project.objects.all()
serializer_class = ProjectDetailSerializer
class ProjectZeiterfassungAPIView(generics.CreateAPIView):
queryset = Project.objects.all()
parser_classes = (PlainTextParser,)
serializer_class = ProjectDetailSerializer
def create(self, request, *args, **kwargs):
try:
proj = self.get_object()
except Project.DoesNotExist:
errors = self._create_errors("Project does not exist")
return Response(errors, status=status.HTTP_400_BAD_REQUEST)
zeiterfassung = Zeiterfassung(request.DATA)
try:
proj.create_entries_from_zeiterfassung(zeiterfassung)
except Exception, e:
errors = self._create_errors(str(e))
# TODO review if e could contain info not suited for the user
return Response(errors, status=status.HTTP_400_BAD_REQUEST)
serializer = self.get_serializer(proj)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED,
headers=headers)
def _create_errors(self, msg):
return {"non_field_errors": [msg]}
class EntryDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Entry.objects.all()
serializer_class = EntryDetailSerializer
|
<commit_before># -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework import generics
from trex.models.project import Project, Entry
from trex.serializers import (
ProjectSerializer, ProjectDetailSerializer, EntryDetailSerializer)
class ProjectListCreateAPIView(generics.ListCreateAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
class ProjectDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Project.objects.all()
serializer_class = ProjectDetailSerializer
class EntryDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Entry.objects.all()
serializer_class = EntryDetailSerializer
<commit_msg>Add view to add Zeiterfassung entries from a plain text submission<commit_after># -*- coding: utf-8 -*-
#
# (c) 2014 Bjoern Ricks <bjoern.ricks@gmail.com>
#
# See LICENSE comming with the source of 'trex' for details.
#
from rest_framework import generics, status
from rest_framework.response import Response
from trex.models.project import Project, Entry
from trex.parsers import PlainTextParser
from trex.serializers import (
ProjectSerializer, ProjectDetailSerializer, EntryDetailSerializer)
from trex.utils import Zeiterfassung
class ProjectListCreateAPIView(generics.ListCreateAPIView):
queryset = Project.objects.all()
serializer_class = ProjectSerializer
class ProjectDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Project.objects.all()
serializer_class = ProjectDetailSerializer
class ProjectZeiterfassungAPIView(generics.CreateAPIView):
queryset = Project.objects.all()
parser_classes = (PlainTextParser,)
serializer_class = ProjectDetailSerializer
def create(self, request, *args, **kwargs):
try:
proj = self.get_object()
except Project.DoesNotExist:
errors = self._create_errors("Project does not exist")
return Response(errors, status=status.HTTP_400_BAD_REQUEST)
zeiterfassung = Zeiterfassung(request.DATA)
try:
proj.create_entries_from_zeiterfassung(zeiterfassung)
except Exception, e:
errors = self._create_errors(str(e))
# TODO review if e could contain info not suited for the user
return Response(errors, status=status.HTTP_400_BAD_REQUEST)
serializer = self.get_serializer(proj)
headers = self.get_success_headers(serializer.data)
return Response(serializer.data, status=status.HTTP_201_CREATED,
headers=headers)
def _create_errors(self, msg):
return {"non_field_errors": [msg]}
class EntryDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
queryset = Entry.objects.all()
serializer_class = EntryDetailSerializer
|
a5ac21234cd8970112be12b1209886dd1208ad9c
|
troposphere/cloud9.py
|
troposphere/cloud9.py
|
# Copyright (c) 2012-2017, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .validators import integer
class Repository(AWSProperty):
props = {
"PathComponent": (str, True),
"RepositoryUrl": (str, True),
}
class EnvironmentEC2(AWSObject):
resource_type = "AWS::Cloud9::EnvironmentEC2"
props = {
"AutomaticStopTimeMinutes": (integer, False),
"Description": (str, False),
"InstanceType": (str, True),
"Name": (str, False),
"OwnerArn": (str, False),
"Repositories": ([Repository], False),
"SubnetId": (str, False),
}
|
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 35.0.0
from troposphere import Tags
from . import AWSObject, AWSProperty
from .validators import integer
class Repository(AWSProperty):
props = {
"PathComponent": (str, True),
"RepositoryUrl": (str, True),
}
class EnvironmentEC2(AWSObject):
resource_type = "AWS::Cloud9::EnvironmentEC2"
props = {
"AutomaticStopTimeMinutes": (integer, False),
"ConnectionType": (str, False),
"Description": (str, False),
"ImageId": (str, False),
"InstanceType": (str, True),
"Name": (str, False),
"OwnerArn": (str, False),
"Repositories": ([Repository], False),
"SubnetId": (str, False),
"Tags": (Tags, False),
}
|
Update Cloud9 per 2021-04-01 changes
|
Update Cloud9 per 2021-04-01 changes
|
Python
|
bsd-2-clause
|
cloudtools/troposphere,cloudtools/troposphere
|
# Copyright (c) 2012-2017, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .validators import integer
class Repository(AWSProperty):
props = {
"PathComponent": (str, True),
"RepositoryUrl": (str, True),
}
class EnvironmentEC2(AWSObject):
resource_type = "AWS::Cloud9::EnvironmentEC2"
props = {
"AutomaticStopTimeMinutes": (integer, False),
"Description": (str, False),
"InstanceType": (str, True),
"Name": (str, False),
"OwnerArn": (str, False),
"Repositories": ([Repository], False),
"SubnetId": (str, False),
}
Update Cloud9 per 2021-04-01 changes
|
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 35.0.0
from troposphere import Tags
from . import AWSObject, AWSProperty
from .validators import integer
class Repository(AWSProperty):
props = {
"PathComponent": (str, True),
"RepositoryUrl": (str, True),
}
class EnvironmentEC2(AWSObject):
resource_type = "AWS::Cloud9::EnvironmentEC2"
props = {
"AutomaticStopTimeMinutes": (integer, False),
"ConnectionType": (str, False),
"Description": (str, False),
"ImageId": (str, False),
"InstanceType": (str, True),
"Name": (str, False),
"OwnerArn": (str, False),
"Repositories": ([Repository], False),
"SubnetId": (str, False),
"Tags": (Tags, False),
}
|
<commit_before># Copyright (c) 2012-2017, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .validators import integer
class Repository(AWSProperty):
props = {
"PathComponent": (str, True),
"RepositoryUrl": (str, True),
}
class EnvironmentEC2(AWSObject):
resource_type = "AWS::Cloud9::EnvironmentEC2"
props = {
"AutomaticStopTimeMinutes": (integer, False),
"Description": (str, False),
"InstanceType": (str, True),
"Name": (str, False),
"OwnerArn": (str, False),
"Repositories": ([Repository], False),
"SubnetId": (str, False),
}
<commit_msg>Update Cloud9 per 2021-04-01 changes<commit_after>
|
# Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 35.0.0
from troposphere import Tags
from . import AWSObject, AWSProperty
from .validators import integer
class Repository(AWSProperty):
props = {
"PathComponent": (str, True),
"RepositoryUrl": (str, True),
}
class EnvironmentEC2(AWSObject):
resource_type = "AWS::Cloud9::EnvironmentEC2"
props = {
"AutomaticStopTimeMinutes": (integer, False),
"ConnectionType": (str, False),
"Description": (str, False),
"ImageId": (str, False),
"InstanceType": (str, True),
"Name": (str, False),
"OwnerArn": (str, False),
"Repositories": ([Repository], False),
"SubnetId": (str, False),
"Tags": (Tags, False),
}
|
# Copyright (c) 2012-2017, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .validators import integer
class Repository(AWSProperty):
props = {
"PathComponent": (str, True),
"RepositoryUrl": (str, True),
}
class EnvironmentEC2(AWSObject):
resource_type = "AWS::Cloud9::EnvironmentEC2"
props = {
"AutomaticStopTimeMinutes": (integer, False),
"Description": (str, False),
"InstanceType": (str, True),
"Name": (str, False),
"OwnerArn": (str, False),
"Repositories": ([Repository], False),
"SubnetId": (str, False),
}
Update Cloud9 per 2021-04-01 changes# Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 35.0.0
from troposphere import Tags
from . import AWSObject, AWSProperty
from .validators import integer
class Repository(AWSProperty):
props = {
"PathComponent": (str, True),
"RepositoryUrl": (str, True),
}
class EnvironmentEC2(AWSObject):
resource_type = "AWS::Cloud9::EnvironmentEC2"
props = {
"AutomaticStopTimeMinutes": (integer, False),
"ConnectionType": (str, False),
"Description": (str, False),
"ImageId": (str, False),
"InstanceType": (str, True),
"Name": (str, False),
"OwnerArn": (str, False),
"Repositories": ([Repository], False),
"SubnetId": (str, False),
"Tags": (Tags, False),
}
|
<commit_before># Copyright (c) 2012-2017, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
from . import AWSObject, AWSProperty
from .validators import integer
class Repository(AWSProperty):
props = {
"PathComponent": (str, True),
"RepositoryUrl": (str, True),
}
class EnvironmentEC2(AWSObject):
resource_type = "AWS::Cloud9::EnvironmentEC2"
props = {
"AutomaticStopTimeMinutes": (integer, False),
"Description": (str, False),
"InstanceType": (str, True),
"Name": (str, False),
"OwnerArn": (str, False),
"Repositories": ([Repository], False),
"SubnetId": (str, False),
}
<commit_msg>Update Cloud9 per 2021-04-01 changes<commit_after># Copyright (c) 2012-2021, Mark Peek <mark@peek.org>
# All rights reserved.
#
# See LICENSE file for full license.
#
# *** Do not modify - this file is autogenerated ***
# Resource specification version: 35.0.0
from troposphere import Tags
from . import AWSObject, AWSProperty
from .validators import integer
class Repository(AWSProperty):
props = {
"PathComponent": (str, True),
"RepositoryUrl": (str, True),
}
class EnvironmentEC2(AWSObject):
resource_type = "AWS::Cloud9::EnvironmentEC2"
props = {
"AutomaticStopTimeMinutes": (integer, False),
"ConnectionType": (str, False),
"Description": (str, False),
"ImageId": (str, False),
"InstanceType": (str, True),
"Name": (str, False),
"OwnerArn": (str, False),
"Repositories": ([Repository], False),
"SubnetId": (str, False),
"Tags": (Tags, False),
}
|
ca8349a897c233d72ea74128dabdd1311f00c13c
|
tests/unittest.py
|
tests/unittest.py
|
# -*- coding: utf-8 -*-
# Copyright 2014 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
pass
|
# -*- coding: utf-8 -*-
# Copyright 2014 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestCase, self).__init__(*args, **kwargs)
level = getattr(self, "loglevel", NEVER)
orig_setUp = self.setUp
def setUp():
old_level = logging.getLogger().level
if old_level != level:
orig_tearDown = self.tearDown
def tearDown():
ret = orig_tearDown()
logging.getLogger().setLevel(old_level)
return ret
self.tearDown = tearDown
logging.getLogger().setLevel(level)
return orig_setUp()
self.setUp = setUp
|
Allow a TestCase to set a 'loglevel' attribute, which overrides the logging level while that testcase runs
|
Allow a TestCase to set a 'loglevel' attribute, which overrides the logging level while that testcase runs
|
Python
|
apache-2.0
|
illicitonion/synapse,TribeMedia/synapse,howethomas/synapse,iot-factory/synapse,howethomas/synapse,TribeMedia/synapse,rzr/synapse,rzr/synapse,illicitonion/synapse,illicitonion/synapse,illicitonion/synapse,TribeMedia/synapse,TribeMedia/synapse,iot-factory/synapse,rzr/synapse,rzr/synapse,matrix-org/synapse,iot-factory/synapse,howethomas/synapse,matrix-org/synapse,howethomas/synapse,iot-factory/synapse,matrix-org/synapse,matrix-org/synapse,illicitonion/synapse,howethomas/synapse,TribeMedia/synapse,rzr/synapse,matrix-org/synapse,matrix-org/synapse,iot-factory/synapse
|
# -*- coding: utf-8 -*-
# Copyright 2014 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
pass
Allow a TestCase to set a 'loglevel' attribute, which overrides the logging level while that testcase runs
|
# -*- coding: utf-8 -*-
# Copyright 2014 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestCase, self).__init__(*args, **kwargs)
level = getattr(self, "loglevel", NEVER)
orig_setUp = self.setUp
def setUp():
old_level = logging.getLogger().level
if old_level != level:
orig_tearDown = self.tearDown
def tearDown():
ret = orig_tearDown()
logging.getLogger().setLevel(old_level)
return ret
self.tearDown = tearDown
logging.getLogger().setLevel(level)
return orig_setUp()
self.setUp = setUp
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2014 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
pass
<commit_msg>Allow a TestCase to set a 'loglevel' attribute, which overrides the logging level while that testcase runs<commit_after>
|
# -*- coding: utf-8 -*-
# Copyright 2014 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestCase, self).__init__(*args, **kwargs)
level = getattr(self, "loglevel", NEVER)
orig_setUp = self.setUp
def setUp():
old_level = logging.getLogger().level
if old_level != level:
orig_tearDown = self.tearDown
def tearDown():
ret = orig_tearDown()
logging.getLogger().setLevel(old_level)
return ret
self.tearDown = tearDown
logging.getLogger().setLevel(level)
return orig_setUp()
self.setUp = setUp
|
# -*- coding: utf-8 -*-
# Copyright 2014 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
pass
Allow a TestCase to set a 'loglevel' attribute, which overrides the logging level while that testcase runs# -*- coding: utf-8 -*-
# Copyright 2014 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestCase, self).__init__(*args, **kwargs)
level = getattr(self, "loglevel", NEVER)
orig_setUp = self.setUp
def setUp():
old_level = logging.getLogger().level
if old_level != level:
orig_tearDown = self.tearDown
def tearDown():
ret = orig_tearDown()
logging.getLogger().setLevel(old_level)
return ret
self.tearDown = tearDown
logging.getLogger().setLevel(level)
return orig_setUp()
self.setUp = setUp
|
<commit_before># -*- coding: utf-8 -*-
# Copyright 2014 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
pass
<commit_msg>Allow a TestCase to set a 'loglevel' attribute, which overrides the logging level while that testcase runs<commit_after># -*- coding: utf-8 -*-
# Copyright 2014 OpenMarket Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from twisted.trial import unittest
import logging
# logging doesn't have a "don't log anything at all EVARRRR setting,
# but since the highest value is 50, 1000000 should do ;)
NEVER = 1000000
logging.getLogger().addHandler(logging.StreamHandler())
logging.getLogger().setLevel(NEVER)
class TestCase(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(TestCase, self).__init__(*args, **kwargs)
level = getattr(self, "loglevel", NEVER)
orig_setUp = self.setUp
def setUp():
old_level = logging.getLogger().level
if old_level != level:
orig_tearDown = self.tearDown
def tearDown():
ret = orig_tearDown()
logging.getLogger().setLevel(old_level)
return ret
self.tearDown = tearDown
logging.getLogger().setLevel(level)
return orig_setUp()
self.setUp = setUp
|
ab84c37195feb7ea19be810a7d1a899e5e53ee78
|
tests/test_pdfbuild.py
|
tests/test_pdfbuild.py
|
from latex import build_pdf
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
|
from latex import build_pdf
from latex.exc import LatexBuildError
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
|
Test whether or not the right exception is thrown.
|
Test whether or not the right exception is thrown.
|
Python
|
bsd-3-clause
|
mbr/latex
|
from latex import build_pdf
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
Test whether or not the right exception is thrown.
|
from latex import build_pdf
from latex.exc import LatexBuildError
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
|
<commit_before>from latex import build_pdf
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
<commit_msg>Test whether or not the right exception is thrown.<commit_after>
|
from latex import build_pdf
from latex.exc import LatexBuildError
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
|
from latex import build_pdf
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
Test whether or not the right exception is thrown.from latex import build_pdf
from latex.exc import LatexBuildError
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
|
<commit_before>from latex import build_pdf
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
<commit_msg>Test whether or not the right exception is thrown.<commit_after>from latex import build_pdf
from latex.exc import LatexBuildError
import pytest
def test_generates_something():
min_latex = r"""
\documentclass{article}
\begin{document}
Hello, world!
\end{document}
"""
pdf = build_pdf(min_latex)
assert pdf
def test_raises_correct_exception_on_fail():
broken_latex = r"""foo"""
with pytest.raises(LatexBuildError):
build_pdf(broken_latex)
|
edd92253a7f37f63021e8dff15372bbbbce63089
|
tfgraphviz/__init__.py
|
tfgraphviz/__init__.py
|
#!/usr/bin/env python
# coding: utf-8
from graphviz_wrapper import board
__author__ = 'akimacho'
__version__ = '0.0.1'
__license__ = 'MIT'
|
#!/usr/bin/env python
# coding: utf-8
from .graphviz_wrapper import board
__author__ = 'akimacho'
__version__ = '0.0.1'
__license__ = 'MIT'
|
Change IMPLICIT relative imports (not allowed in Python3k) to EXPLICIT relative imports (allowed in both Python2k/3k).
|
Change IMPLICIT relative imports (not allowed in Python3k) to EXPLICIT relative imports (allowed in both Python2k/3k).
|
Python
|
mit
|
akimach/tfgraphviz
|
#!/usr/bin/env python
# coding: utf-8
from graphviz_wrapper import board
__author__ = 'akimacho'
__version__ = '0.0.1'
__license__ = 'MIT'Change IMPLICIT relative imports (not allowed in Python3k) to EXPLICIT relative imports (allowed in both Python2k/3k).
|
#!/usr/bin/env python
# coding: utf-8
from .graphviz_wrapper import board
__author__ = 'akimacho'
__version__ = '0.0.1'
__license__ = 'MIT'
|
<commit_before>#!/usr/bin/env python
# coding: utf-8
from graphviz_wrapper import board
__author__ = 'akimacho'
__version__ = '0.0.1'
__license__ = 'MIT'<commit_msg>Change IMPLICIT relative imports (not allowed in Python3k) to EXPLICIT relative imports (allowed in both Python2k/3k).<commit_after>
|
#!/usr/bin/env python
# coding: utf-8
from .graphviz_wrapper import board
__author__ = 'akimacho'
__version__ = '0.0.1'
__license__ = 'MIT'
|
#!/usr/bin/env python
# coding: utf-8
from graphviz_wrapper import board
__author__ = 'akimacho'
__version__ = '0.0.1'
__license__ = 'MIT'Change IMPLICIT relative imports (not allowed in Python3k) to EXPLICIT relative imports (allowed in both Python2k/3k).#!/usr/bin/env python
# coding: utf-8
from .graphviz_wrapper import board
__author__ = 'akimacho'
__version__ = '0.0.1'
__license__ = 'MIT'
|
<commit_before>#!/usr/bin/env python
# coding: utf-8
from graphviz_wrapper import board
__author__ = 'akimacho'
__version__ = '0.0.1'
__license__ = 'MIT'<commit_msg>Change IMPLICIT relative imports (not allowed in Python3k) to EXPLICIT relative imports (allowed in both Python2k/3k).<commit_after>#!/usr/bin/env python
# coding: utf-8
from .graphviz_wrapper import board
__author__ = 'akimacho'
__version__ = '0.0.1'
__license__ = 'MIT'
|
98c7f3afb2276012f22ad50e77fef60d7d71ee5f
|
qtpy/QtSvg.py
|
qtpy/QtSvg.py
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtSvg classes and functions."""
# Local imports
from . import PYQT4, PYQT5, PYSIDE, PythonQtError
if PYQT5:
from PyQt5.QtSvg import (QGraphicsSvgItem, QSvgGenerator, QSvgRenderer,
QSvgWidget)
elif PYQT4:
from PyQt4.QtSvg import (QGraphicsSvgItem, QSvgGenerator, QSvgRenderer,
QSvgWidget)
elif PYSIDE:
from PySide.QtSvg import (QGraphicsSvgItem, QSvgGenerator, QSvgRenderer,
QSvgWidget)
else:
raise PythonQtError('No Qt bindings could be found')
del PYQT4, PYQT5, PYSIDE
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtSvg classes and functions."""
# Local imports
from . import PYQT4, PYQT5, PYSIDE, PythonQtError
if PYQT5:
from PyQt5.QtSvg import *
elif PYQT4:
from PyQt4.QtSvg import *
elif PYSIDE:
from PySide.QtSvg import *
else:
raise PythonQtError('No Qt bindings could be found')
del PYQT4, PYQT5, PYSIDE
|
Use star imports again instead of direct ones
|
QtSvG: Use star imports again instead of direct ones
|
Python
|
mit
|
goanpeca/qtpy,davvid/qtpy,spyder-ide/qtpy,goanpeca/qtpy,davvid/qtpy
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtSvg classes and functions."""
# Local imports
from . import PYQT4, PYQT5, PYSIDE, PythonQtError
if PYQT5:
from PyQt5.QtSvg import (QGraphicsSvgItem, QSvgGenerator, QSvgRenderer,
QSvgWidget)
elif PYQT4:
from PyQt4.QtSvg import (QGraphicsSvgItem, QSvgGenerator, QSvgRenderer,
QSvgWidget)
elif PYSIDE:
from PySide.QtSvg import (QGraphicsSvgItem, QSvgGenerator, QSvgRenderer,
QSvgWidget)
else:
raise PythonQtError('No Qt bindings could be found')
del PYQT4, PYQT5, PYSIDE
QtSvG: Use star imports again instead of direct ones
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtSvg classes and functions."""
# Local imports
from . import PYQT4, PYQT5, PYSIDE, PythonQtError
if PYQT5:
from PyQt5.QtSvg import *
elif PYQT4:
from PyQt4.QtSvg import *
elif PYSIDE:
from PySide.QtSvg import *
else:
raise PythonQtError('No Qt bindings could be found')
del PYQT4, PYQT5, PYSIDE
|
<commit_before># -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtSvg classes and functions."""
# Local imports
from . import PYQT4, PYQT5, PYSIDE, PythonQtError
if PYQT5:
from PyQt5.QtSvg import (QGraphicsSvgItem, QSvgGenerator, QSvgRenderer,
QSvgWidget)
elif PYQT4:
from PyQt4.QtSvg import (QGraphicsSvgItem, QSvgGenerator, QSvgRenderer,
QSvgWidget)
elif PYSIDE:
from PySide.QtSvg import (QGraphicsSvgItem, QSvgGenerator, QSvgRenderer,
QSvgWidget)
else:
raise PythonQtError('No Qt bindings could be found')
del PYQT4, PYQT5, PYSIDE
<commit_msg>QtSvG: Use star imports again instead of direct ones<commit_after>
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtSvg classes and functions."""
# Local imports
from . import PYQT4, PYQT5, PYSIDE, PythonQtError
if PYQT5:
from PyQt5.QtSvg import *
elif PYQT4:
from PyQt4.QtSvg import *
elif PYSIDE:
from PySide.QtSvg import *
else:
raise PythonQtError('No Qt bindings could be found')
del PYQT4, PYQT5, PYSIDE
|
# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtSvg classes and functions."""
# Local imports
from . import PYQT4, PYQT5, PYSIDE, PythonQtError
if PYQT5:
from PyQt5.QtSvg import (QGraphicsSvgItem, QSvgGenerator, QSvgRenderer,
QSvgWidget)
elif PYQT4:
from PyQt4.QtSvg import (QGraphicsSvgItem, QSvgGenerator, QSvgRenderer,
QSvgWidget)
elif PYSIDE:
from PySide.QtSvg import (QGraphicsSvgItem, QSvgGenerator, QSvgRenderer,
QSvgWidget)
else:
raise PythonQtError('No Qt bindings could be found')
del PYQT4, PYQT5, PYSIDE
QtSvG: Use star imports again instead of direct ones# -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtSvg classes and functions."""
# Local imports
from . import PYQT4, PYQT5, PYSIDE, PythonQtError
if PYQT5:
from PyQt5.QtSvg import *
elif PYQT4:
from PyQt4.QtSvg import *
elif PYSIDE:
from PySide.QtSvg import *
else:
raise PythonQtError('No Qt bindings could be found')
del PYQT4, PYQT5, PYSIDE
|
<commit_before># -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtSvg classes and functions."""
# Local imports
from . import PYQT4, PYQT5, PYSIDE, PythonQtError
if PYQT5:
from PyQt5.QtSvg import (QGraphicsSvgItem, QSvgGenerator, QSvgRenderer,
QSvgWidget)
elif PYQT4:
from PyQt4.QtSvg import (QGraphicsSvgItem, QSvgGenerator, QSvgRenderer,
QSvgWidget)
elif PYSIDE:
from PySide.QtSvg import (QGraphicsSvgItem, QSvgGenerator, QSvgRenderer,
QSvgWidget)
else:
raise PythonQtError('No Qt bindings could be found')
del PYQT4, PYQT5, PYSIDE
<commit_msg>QtSvG: Use star imports again instead of direct ones<commit_after># -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright © 2009- The Spyder Development Team
#
# Licensed under the terms of the MIT License
# (see LICENSE.txt for details)
# -----------------------------------------------------------------------------
"""Provides QtSvg classes and functions."""
# Local imports
from . import PYQT4, PYQT5, PYSIDE, PythonQtError
if PYQT5:
from PyQt5.QtSvg import *
elif PYQT4:
from PyQt4.QtSvg import *
elif PYSIDE:
from PySide.QtSvg import *
else:
raise PythonQtError('No Qt bindings could be found')
del PYQT4, PYQT5, PYSIDE
|
4ebdd73bab19e83d52e03ac4afb7e1b3f78004f5
|
drftutorial/catalog/views.py
|
drftutorial/catalog/views.py
|
from django.http import HttpResponse
from django.http import Http404
from rest_framework import generics
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
from .permissions import IsAdminOrReadOnly
from .models import Product
from .serializers import ProductSerializer
class ProductList(generics.ListCreateAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (IsAdminOrReadOnly, )
class ProductDetail(APIView):
def get_object(self, pk):
try:
return Product.objects.get(pk=pk)
except Product.DoesNotExist:
raise Http404
def get(self, request, pk, format=None):
product = self.get_object(pk)
serializer = ProductSerializer(product)
return Response(serializer.data)
def put(self, request, pk, format=None):
product = self.get_object(pk)
serializer = ProductSerializer(product, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, pk, format=None):
product = self.get_object(pk)
product.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
|
from rest_framework import generics
from .permissions import IsAdminOrReadOnly
from .models import Product
from .serializers import ProductSerializer
class ProductList(generics.ListCreateAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (IsAdminOrReadOnly, )
class ProductDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (IsAdminOrReadOnly, )
|
Implement ProductDetail with a generic RetrieveUpdateDestroyAPIView class
|
Implement ProductDetail with a generic RetrieveUpdateDestroyAPIView class
|
Python
|
mit
|
andreagrandi/drf-tutorial
|
from django.http import HttpResponse
from django.http import Http404
from rest_framework import generics
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
from .permissions import IsAdminOrReadOnly
from .models import Product
from .serializers import ProductSerializer
class ProductList(generics.ListCreateAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (IsAdminOrReadOnly, )
class ProductDetail(APIView):
def get_object(self, pk):
try:
return Product.objects.get(pk=pk)
except Product.DoesNotExist:
raise Http404
def get(self, request, pk, format=None):
product = self.get_object(pk)
serializer = ProductSerializer(product)
return Response(serializer.data)
def put(self, request, pk, format=None):
product = self.get_object(pk)
serializer = ProductSerializer(product, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, pk, format=None):
product = self.get_object(pk)
product.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
Implement ProductDetail with a generic RetrieveUpdateDestroyAPIView class
|
from rest_framework import generics
from .permissions import IsAdminOrReadOnly
from .models import Product
from .serializers import ProductSerializer
class ProductList(generics.ListCreateAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (IsAdminOrReadOnly, )
class ProductDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (IsAdminOrReadOnly, )
|
<commit_before>from django.http import HttpResponse
from django.http import Http404
from rest_framework import generics
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
from .permissions import IsAdminOrReadOnly
from .models import Product
from .serializers import ProductSerializer
class ProductList(generics.ListCreateAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (IsAdminOrReadOnly, )
class ProductDetail(APIView):
def get_object(self, pk):
try:
return Product.objects.get(pk=pk)
except Product.DoesNotExist:
raise Http404
def get(self, request, pk, format=None):
product = self.get_object(pk)
serializer = ProductSerializer(product)
return Response(serializer.data)
def put(self, request, pk, format=None):
product = self.get_object(pk)
serializer = ProductSerializer(product, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, pk, format=None):
product = self.get_object(pk)
product.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
<commit_msg>Implement ProductDetail with a generic RetrieveUpdateDestroyAPIView class<commit_after>
|
from rest_framework import generics
from .permissions import IsAdminOrReadOnly
from .models import Product
from .serializers import ProductSerializer
class ProductList(generics.ListCreateAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (IsAdminOrReadOnly, )
class ProductDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (IsAdminOrReadOnly, )
|
from django.http import HttpResponse
from django.http import Http404
from rest_framework import generics
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
from .permissions import IsAdminOrReadOnly
from .models import Product
from .serializers import ProductSerializer
class ProductList(generics.ListCreateAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (IsAdminOrReadOnly, )
class ProductDetail(APIView):
def get_object(self, pk):
try:
return Product.objects.get(pk=pk)
except Product.DoesNotExist:
raise Http404
def get(self, request, pk, format=None):
product = self.get_object(pk)
serializer = ProductSerializer(product)
return Response(serializer.data)
def put(self, request, pk, format=None):
product = self.get_object(pk)
serializer = ProductSerializer(product, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, pk, format=None):
product = self.get_object(pk)
product.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
Implement ProductDetail with a generic RetrieveUpdateDestroyAPIView classfrom rest_framework import generics
from .permissions import IsAdminOrReadOnly
from .models import Product
from .serializers import ProductSerializer
class ProductList(generics.ListCreateAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (IsAdminOrReadOnly, )
class ProductDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (IsAdminOrReadOnly, )
|
<commit_before>from django.http import HttpResponse
from django.http import Http404
from rest_framework import generics
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework import status
from .permissions import IsAdminOrReadOnly
from .models import Product
from .serializers import ProductSerializer
class ProductList(generics.ListCreateAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (IsAdminOrReadOnly, )
class ProductDetail(APIView):
def get_object(self, pk):
try:
return Product.objects.get(pk=pk)
except Product.DoesNotExist:
raise Http404
def get(self, request, pk, format=None):
product = self.get_object(pk)
serializer = ProductSerializer(product)
return Response(serializer.data)
def put(self, request, pk, format=None):
product = self.get_object(pk)
serializer = ProductSerializer(product, data=request.data)
if serializer.is_valid():
serializer.save()
return Response(serializer.data)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def delete(self, request, pk, format=None):
product = self.get_object(pk)
product.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
<commit_msg>Implement ProductDetail with a generic RetrieveUpdateDestroyAPIView class<commit_after>from rest_framework import generics
from .permissions import IsAdminOrReadOnly
from .models import Product
from .serializers import ProductSerializer
class ProductList(generics.ListCreateAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (IsAdminOrReadOnly, )
class ProductDetail(generics.RetrieveUpdateDestroyAPIView):
queryset = Product.objects.all()
serializer_class = ProductSerializer
permission_classes = (IsAdminOrReadOnly, )
|
b3670094a44fc0fb07a91e1dc0ffb1a17001f855
|
botcommands/vimtips.py
|
botcommands/vimtips.py
|
# coding: utf-8
import requests
from redis_wrap import get_hash
from rq.decorators import job
def vimtips(msg=None):
try:
existing_tips = get_hash('vimtips')
_len = len(existing_tips)
if _len > 0:
_index = randint(0, _len - 1)
_k = existing_tips.keys()[_index]
_v = existing_tips[_k]
tip = {
'Content': _k,
'Comment': _v
}
else:
tip = requests.get('http://vim-tips.com/random_tips/json').json()
existing_tips.update({
tip['Content']: tip['Comment']
})
collect_tip()
except Exception as e:
return u'哦,不小心玩坏了……'
return u'%s\n%s' % (tip['Content'], tip['Comment'], )
@job('default')
def collect_tip():
tip = requests.get('http://vim-tips.com/random_tips/json').json()
get_hash('vimtips').update({
tip['Content']: tip['Comment']
})
|
# coding: utf-8
from random import randint
import requests
from redis_wrap import get_hash, SYSTEMS
from rq.decorators import job
def vimtips(msg=None):
try:
existing_tips = get_hash('vimtips')
_len = len(existing_tips)
if _len > 0:
_index = randint(0, _len - 1)
_k = existing_tips.keys()[_index]
_v = existing_tips[_k]
tip = {
'Content': _k,
'Comment': _v
}
else:
tip = requests.get('http://vim-tips.com/random_tips/json').json()
existing_tips.update({
tip['Content']: tip['Comment']
})
collect_tip.delay()
except Exception as e:
return '哦,不小心玩坏了……'
return '%s\n%s' % (tip['Content'], tip['Comment'], )
# Fetch a new tip in RQ queue
@job('default', connection=SYSTEMS['default'], result_ttl=5)
def collect_tip():
tip = requests.get('http://vim-tips.com/random_tips/json').json()
get_hash('vimtips').update({
tip['Content']: tip['Comment']
})
|
Fix import and use queue the right way
|
Fix import and use queue the right way
|
Python
|
bsd-2-clause
|
JokerQyou/bot
|
# coding: utf-8
import requests
from redis_wrap import get_hash
from rq.decorators import job
def vimtips(msg=None):
try:
existing_tips = get_hash('vimtips')
_len = len(existing_tips)
if _len > 0:
_index = randint(0, _len - 1)
_k = existing_tips.keys()[_index]
_v = existing_tips[_k]
tip = {
'Content': _k,
'Comment': _v
}
else:
tip = requests.get('http://vim-tips.com/random_tips/json').json()
existing_tips.update({
tip['Content']: tip['Comment']
})
collect_tip()
except Exception as e:
return u'哦,不小心玩坏了……'
return u'%s\n%s' % (tip['Content'], tip['Comment'], )
@job('default')
def collect_tip():
tip = requests.get('http://vim-tips.com/random_tips/json').json()
get_hash('vimtips').update({
tip['Content']: tip['Comment']
})
Fix import and use queue the right way
|
# coding: utf-8
from random import randint
import requests
from redis_wrap import get_hash, SYSTEMS
from rq.decorators import job
def vimtips(msg=None):
try:
existing_tips = get_hash('vimtips')
_len = len(existing_tips)
if _len > 0:
_index = randint(0, _len - 1)
_k = existing_tips.keys()[_index]
_v = existing_tips[_k]
tip = {
'Content': _k,
'Comment': _v
}
else:
tip = requests.get('http://vim-tips.com/random_tips/json').json()
existing_tips.update({
tip['Content']: tip['Comment']
})
collect_tip.delay()
except Exception as e:
return '哦,不小心玩坏了……'
return '%s\n%s' % (tip['Content'], tip['Comment'], )
# Fetch a new tip in RQ queue
@job('default', connection=SYSTEMS['default'], result_ttl=5)
def collect_tip():
tip = requests.get('http://vim-tips.com/random_tips/json').json()
get_hash('vimtips').update({
tip['Content']: tip['Comment']
})
|
<commit_before># coding: utf-8
import requests
from redis_wrap import get_hash
from rq.decorators import job
def vimtips(msg=None):
try:
existing_tips = get_hash('vimtips')
_len = len(existing_tips)
if _len > 0:
_index = randint(0, _len - 1)
_k = existing_tips.keys()[_index]
_v = existing_tips[_k]
tip = {
'Content': _k,
'Comment': _v
}
else:
tip = requests.get('http://vim-tips.com/random_tips/json').json()
existing_tips.update({
tip['Content']: tip['Comment']
})
collect_tip()
except Exception as e:
return u'哦,不小心玩坏了……'
return u'%s\n%s' % (tip['Content'], tip['Comment'], )
@job('default')
def collect_tip():
tip = requests.get('http://vim-tips.com/random_tips/json').json()
get_hash('vimtips').update({
tip['Content']: tip['Comment']
})
<commit_msg>Fix import and use queue the right way<commit_after>
|
# coding: utf-8
from random import randint
import requests
from redis_wrap import get_hash, SYSTEMS
from rq.decorators import job
def vimtips(msg=None):
try:
existing_tips = get_hash('vimtips')
_len = len(existing_tips)
if _len > 0:
_index = randint(0, _len - 1)
_k = existing_tips.keys()[_index]
_v = existing_tips[_k]
tip = {
'Content': _k,
'Comment': _v
}
else:
tip = requests.get('http://vim-tips.com/random_tips/json').json()
existing_tips.update({
tip['Content']: tip['Comment']
})
collect_tip.delay()
except Exception as e:
return '哦,不小心玩坏了……'
return '%s\n%s' % (tip['Content'], tip['Comment'], )
# Fetch a new tip in RQ queue
@job('default', connection=SYSTEMS['default'], result_ttl=5)
def collect_tip():
tip = requests.get('http://vim-tips.com/random_tips/json').json()
get_hash('vimtips').update({
tip['Content']: tip['Comment']
})
|
# coding: utf-8
import requests
from redis_wrap import get_hash
from rq.decorators import job
def vimtips(msg=None):
try:
existing_tips = get_hash('vimtips')
_len = len(existing_tips)
if _len > 0:
_index = randint(0, _len - 1)
_k = existing_tips.keys()[_index]
_v = existing_tips[_k]
tip = {
'Content': _k,
'Comment': _v
}
else:
tip = requests.get('http://vim-tips.com/random_tips/json').json()
existing_tips.update({
tip['Content']: tip['Comment']
})
collect_tip()
except Exception as e:
return u'哦,不小心玩坏了……'
return u'%s\n%s' % (tip['Content'], tip['Comment'], )
@job('default')
def collect_tip():
tip = requests.get('http://vim-tips.com/random_tips/json').json()
get_hash('vimtips').update({
tip['Content']: tip['Comment']
})
Fix import and use queue the right way# coding: utf-8
from random import randint
import requests
from redis_wrap import get_hash, SYSTEMS
from rq.decorators import job
def vimtips(msg=None):
try:
existing_tips = get_hash('vimtips')
_len = len(existing_tips)
if _len > 0:
_index = randint(0, _len - 1)
_k = existing_tips.keys()[_index]
_v = existing_tips[_k]
tip = {
'Content': _k,
'Comment': _v
}
else:
tip = requests.get('http://vim-tips.com/random_tips/json').json()
existing_tips.update({
tip['Content']: tip['Comment']
})
collect_tip.delay()
except Exception as e:
return '哦,不小心玩坏了……'
return '%s\n%s' % (tip['Content'], tip['Comment'], )
# Fetch a new tip in RQ queue
@job('default', connection=SYSTEMS['default'], result_ttl=5)
def collect_tip():
tip = requests.get('http://vim-tips.com/random_tips/json').json()
get_hash('vimtips').update({
tip['Content']: tip['Comment']
})
|
<commit_before># coding: utf-8
import requests
from redis_wrap import get_hash
from rq.decorators import job
def vimtips(msg=None):
try:
existing_tips = get_hash('vimtips')
_len = len(existing_tips)
if _len > 0:
_index = randint(0, _len - 1)
_k = existing_tips.keys()[_index]
_v = existing_tips[_k]
tip = {
'Content': _k,
'Comment': _v
}
else:
tip = requests.get('http://vim-tips.com/random_tips/json').json()
existing_tips.update({
tip['Content']: tip['Comment']
})
collect_tip()
except Exception as e:
return u'哦,不小心玩坏了……'
return u'%s\n%s' % (tip['Content'], tip['Comment'], )
@job('default')
def collect_tip():
tip = requests.get('http://vim-tips.com/random_tips/json').json()
get_hash('vimtips').update({
tip['Content']: tip['Comment']
})
<commit_msg>Fix import and use queue the right way<commit_after># coding: utf-8
from random import randint
import requests
from redis_wrap import get_hash, SYSTEMS
from rq.decorators import job
def vimtips(msg=None):
try:
existing_tips = get_hash('vimtips')
_len = len(existing_tips)
if _len > 0:
_index = randint(0, _len - 1)
_k = existing_tips.keys()[_index]
_v = existing_tips[_k]
tip = {
'Content': _k,
'Comment': _v
}
else:
tip = requests.get('http://vim-tips.com/random_tips/json').json()
existing_tips.update({
tip['Content']: tip['Comment']
})
collect_tip.delay()
except Exception as e:
return '哦,不小心玩坏了……'
return '%s\n%s' % (tip['Content'], tip['Comment'], )
# Fetch a new tip in RQ queue
@job('default', connection=SYSTEMS['default'], result_ttl=5)
def collect_tip():
tip = requests.get('http://vim-tips.com/random_tips/json').json()
get_hash('vimtips').update({
tip['Content']: tip['Comment']
})
|
0ad0004d6460908d8b882d7da1086fc77e6c9635
|
src/reversion/middleware.py
|
src/reversion/middleware.py
|
"""Middleware used by Reversion."""
from __future__ import unicode_literals
from reversion.revisions import revision_context_manager
REVISION_MIDDLEWARE_FLAG = "reversion.revision_middleware_active"
class RevisionMiddleware(object):
"""Wraps the entire request in a revision."""
def process_request(self, request):
"""Starts a new revision."""
request.META[(REVISION_MIDDLEWARE_FLAG, self)] = True
revision_context_manager.start()
def _close_revision(self, request):
"""Closes the revision."""
if request.META.get((REVISION_MIDDLEWARE_FLAG, self), False):
del request.META[(REVISION_MIDDLEWARE_FLAG, self)]
revision_context_manager.end()
def process_response(self, request, response):
"""Closes the revision."""
# look to see if the session has been accessed before looking for user to stop Vary: Cookie
if hasattr(request, 'session') and request.session.accessed \
and hasattr(request, "user") and request.user.is_authenticated():
revision_context_manager.set_user(request.user)
self._close_revision(request)
return response
def process_exception(self, request, exception):
"""Closes the revision."""
revision_context_manager.invalidate()
self._close_revision(request)
|
"""Middleware used by Reversion."""
from __future__ import unicode_literals
from reversion.revisions import revision_context_manager
REVISION_MIDDLEWARE_FLAG = "reversion.revision_middleware_active"
class RevisionMiddleware(object):
"""Wraps the entire request in a revision."""
def process_request(self, request):
"""Starts a new revision."""
request.META[(REVISION_MIDDLEWARE_FLAG, self)] = True
revision_context_manager.start()
def _close_revision(self, request):
"""Closes the revision."""
if request.META.get((REVISION_MIDDLEWARE_FLAG, self), False):
del request.META[(REVISION_MIDDLEWARE_FLAG, self)]
revision_context_manager.end()
def process_response(self, request, response):
"""Closes the revision."""
# look to see if the session has been accessed before looking for user to stop Vary: Cookie
if hasattr(request, 'session') and request.session.accessed \
and hasattr(request, "user") and request.user.is_authenticated() \
and revision_context_manager.is_active():
revision_context_manager.set_user(request.user)
self._close_revision(request)
return response
def process_exception(self, request, exception):
"""Closes the revision."""
revision_context_manager.invalidate()
self._close_revision(request)
|
Fix bug handling exceptions in RevisionMiddleware
|
Fix bug handling exceptions in RevisionMiddleware
Recently the RevisionMiddleware was modified to avoid accessing
request.user unncessarily for caching purposes. This works well
except in some cases it can obscure errors generated elsewhere in a
project.
The RevisionContextManager has "active" and "inactive" states.
If there is an exception handler elsewhere in the middleware stack
that generates a new HTTP response, the
RevisionMiddleware.process_exception() method will inactivate the
RevisionContextManager via _close_revision(). Then the HTTP response will
be provided to process_response(). If a user is logged in the middleware
will call RevisionContextManager.set_user(), which expects to be in the
active state, and throws an error ("no active revision for this thread").
To fix this, check if the RevisionContextManager is in the active state
before calling set_user.
|
Python
|
bsd-3-clause
|
ixc/django-reversion,etianen/django-reversion,etianen/django-reversion,adonm/django-reversion,MikeAmy/django-reversion,ixc/django-reversion,Beauhurst/django-reversion,MikeAmy/django-reversion,mkebri/django-reversion,adonm/django-reversion,blag/django-reversion,talpor/django-reversion,Govexec/django-reversion,mkebri/django-reversion,matllubos/django-reversion,fladi/django-reversion,matllubos/django-reversion,IanLee1521/django-reversion,pydanny/django-reversion,Govexec/django-reversion,IanLee1521/django-reversion,Beauhurst/django-reversion,talpor/django-reversion,pydanny/django-reversion,lutoma/django-reversion,fladi/django-reversion,lutoma/django-reversion,blag/django-reversion
|
"""Middleware used by Reversion."""
from __future__ import unicode_literals
from reversion.revisions import revision_context_manager
REVISION_MIDDLEWARE_FLAG = "reversion.revision_middleware_active"
class RevisionMiddleware(object):
"""Wraps the entire request in a revision."""
def process_request(self, request):
"""Starts a new revision."""
request.META[(REVISION_MIDDLEWARE_FLAG, self)] = True
revision_context_manager.start()
def _close_revision(self, request):
"""Closes the revision."""
if request.META.get((REVISION_MIDDLEWARE_FLAG, self), False):
del request.META[(REVISION_MIDDLEWARE_FLAG, self)]
revision_context_manager.end()
def process_response(self, request, response):
"""Closes the revision."""
# look to see if the session has been accessed before looking for user to stop Vary: Cookie
if hasattr(request, 'session') and request.session.accessed \
and hasattr(request, "user") and request.user.is_authenticated():
revision_context_manager.set_user(request.user)
self._close_revision(request)
return response
def process_exception(self, request, exception):
"""Closes the revision."""
revision_context_manager.invalidate()
self._close_revision(request)
Fix bug handling exceptions in RevisionMiddleware
Recently the RevisionMiddleware was modified to avoid accessing
request.user unncessarily for caching purposes. This works well
except in some cases it can obscure errors generated elsewhere in a
project.
The RevisionContextManager has "active" and "inactive" states.
If there is an exception handler elsewhere in the middleware stack
that generates a new HTTP response, the
RevisionMiddleware.process_exception() method will inactivate the
RevisionContextManager via _close_revision(). Then the HTTP response will
be provided to process_response(). If a user is logged in the middleware
will call RevisionContextManager.set_user(), which expects to be in the
active state, and throws an error ("no active revision for this thread").
To fix this, check if the RevisionContextManager is in the active state
before calling set_user.
|
"""Middleware used by Reversion."""
from __future__ import unicode_literals
from reversion.revisions import revision_context_manager
REVISION_MIDDLEWARE_FLAG = "reversion.revision_middleware_active"
class RevisionMiddleware(object):
"""Wraps the entire request in a revision."""
def process_request(self, request):
"""Starts a new revision."""
request.META[(REVISION_MIDDLEWARE_FLAG, self)] = True
revision_context_manager.start()
def _close_revision(self, request):
"""Closes the revision."""
if request.META.get((REVISION_MIDDLEWARE_FLAG, self), False):
del request.META[(REVISION_MIDDLEWARE_FLAG, self)]
revision_context_manager.end()
def process_response(self, request, response):
"""Closes the revision."""
# look to see if the session has been accessed before looking for user to stop Vary: Cookie
if hasattr(request, 'session') and request.session.accessed \
and hasattr(request, "user") and request.user.is_authenticated() \
and revision_context_manager.is_active():
revision_context_manager.set_user(request.user)
self._close_revision(request)
return response
def process_exception(self, request, exception):
"""Closes the revision."""
revision_context_manager.invalidate()
self._close_revision(request)
|
<commit_before>"""Middleware used by Reversion."""
from __future__ import unicode_literals
from reversion.revisions import revision_context_manager
REVISION_MIDDLEWARE_FLAG = "reversion.revision_middleware_active"
class RevisionMiddleware(object):
"""Wraps the entire request in a revision."""
def process_request(self, request):
"""Starts a new revision."""
request.META[(REVISION_MIDDLEWARE_FLAG, self)] = True
revision_context_manager.start()
def _close_revision(self, request):
"""Closes the revision."""
if request.META.get((REVISION_MIDDLEWARE_FLAG, self), False):
del request.META[(REVISION_MIDDLEWARE_FLAG, self)]
revision_context_manager.end()
def process_response(self, request, response):
"""Closes the revision."""
# look to see if the session has been accessed before looking for user to stop Vary: Cookie
if hasattr(request, 'session') and request.session.accessed \
and hasattr(request, "user") and request.user.is_authenticated():
revision_context_manager.set_user(request.user)
self._close_revision(request)
return response
def process_exception(self, request, exception):
"""Closes the revision."""
revision_context_manager.invalidate()
self._close_revision(request)
<commit_msg>Fix bug handling exceptions in RevisionMiddleware
Recently the RevisionMiddleware was modified to avoid accessing
request.user unncessarily for caching purposes. This works well
except in some cases it can obscure errors generated elsewhere in a
project.
The RevisionContextManager has "active" and "inactive" states.
If there is an exception handler elsewhere in the middleware stack
that generates a new HTTP response, the
RevisionMiddleware.process_exception() method will inactivate the
RevisionContextManager via _close_revision(). Then the HTTP response will
be provided to process_response(). If a user is logged in the middleware
will call RevisionContextManager.set_user(), which expects to be in the
active state, and throws an error ("no active revision for this thread").
To fix this, check if the RevisionContextManager is in the active state
before calling set_user.<commit_after>
|
"""Middleware used by Reversion."""
from __future__ import unicode_literals
from reversion.revisions import revision_context_manager
REVISION_MIDDLEWARE_FLAG = "reversion.revision_middleware_active"
class RevisionMiddleware(object):
"""Wraps the entire request in a revision."""
def process_request(self, request):
"""Starts a new revision."""
request.META[(REVISION_MIDDLEWARE_FLAG, self)] = True
revision_context_manager.start()
def _close_revision(self, request):
"""Closes the revision."""
if request.META.get((REVISION_MIDDLEWARE_FLAG, self), False):
del request.META[(REVISION_MIDDLEWARE_FLAG, self)]
revision_context_manager.end()
def process_response(self, request, response):
"""Closes the revision."""
# look to see if the session has been accessed before looking for user to stop Vary: Cookie
if hasattr(request, 'session') and request.session.accessed \
and hasattr(request, "user") and request.user.is_authenticated() \
and revision_context_manager.is_active():
revision_context_manager.set_user(request.user)
self._close_revision(request)
return response
def process_exception(self, request, exception):
"""Closes the revision."""
revision_context_manager.invalidate()
self._close_revision(request)
|
"""Middleware used by Reversion."""
from __future__ import unicode_literals
from reversion.revisions import revision_context_manager
REVISION_MIDDLEWARE_FLAG = "reversion.revision_middleware_active"
class RevisionMiddleware(object):
"""Wraps the entire request in a revision."""
def process_request(self, request):
"""Starts a new revision."""
request.META[(REVISION_MIDDLEWARE_FLAG, self)] = True
revision_context_manager.start()
def _close_revision(self, request):
"""Closes the revision."""
if request.META.get((REVISION_MIDDLEWARE_FLAG, self), False):
del request.META[(REVISION_MIDDLEWARE_FLAG, self)]
revision_context_manager.end()
def process_response(self, request, response):
"""Closes the revision."""
# look to see if the session has been accessed before looking for user to stop Vary: Cookie
if hasattr(request, 'session') and request.session.accessed \
and hasattr(request, "user") and request.user.is_authenticated():
revision_context_manager.set_user(request.user)
self._close_revision(request)
return response
def process_exception(self, request, exception):
"""Closes the revision."""
revision_context_manager.invalidate()
self._close_revision(request)
Fix bug handling exceptions in RevisionMiddleware
Recently the RevisionMiddleware was modified to avoid accessing
request.user unncessarily for caching purposes. This works well
except in some cases it can obscure errors generated elsewhere in a
project.
The RevisionContextManager has "active" and "inactive" states.
If there is an exception handler elsewhere in the middleware stack
that generates a new HTTP response, the
RevisionMiddleware.process_exception() method will inactivate the
RevisionContextManager via _close_revision(). Then the HTTP response will
be provided to process_response(). If a user is logged in the middleware
will call RevisionContextManager.set_user(), which expects to be in the
active state, and throws an error ("no active revision for this thread").
To fix this, check if the RevisionContextManager is in the active state
before calling set_user."""Middleware used by Reversion."""
from __future__ import unicode_literals
from reversion.revisions import revision_context_manager
REVISION_MIDDLEWARE_FLAG = "reversion.revision_middleware_active"
class RevisionMiddleware(object):
"""Wraps the entire request in a revision."""
def process_request(self, request):
"""Starts a new revision."""
request.META[(REVISION_MIDDLEWARE_FLAG, self)] = True
revision_context_manager.start()
def _close_revision(self, request):
"""Closes the revision."""
if request.META.get((REVISION_MIDDLEWARE_FLAG, self), False):
del request.META[(REVISION_MIDDLEWARE_FLAG, self)]
revision_context_manager.end()
def process_response(self, request, response):
"""Closes the revision."""
# look to see if the session has been accessed before looking for user to stop Vary: Cookie
if hasattr(request, 'session') and request.session.accessed \
and hasattr(request, "user") and request.user.is_authenticated() \
and revision_context_manager.is_active():
revision_context_manager.set_user(request.user)
self._close_revision(request)
return response
def process_exception(self, request, exception):
"""Closes the revision."""
revision_context_manager.invalidate()
self._close_revision(request)
|
<commit_before>"""Middleware used by Reversion."""
from __future__ import unicode_literals
from reversion.revisions import revision_context_manager
REVISION_MIDDLEWARE_FLAG = "reversion.revision_middleware_active"
class RevisionMiddleware(object):
"""Wraps the entire request in a revision."""
def process_request(self, request):
"""Starts a new revision."""
request.META[(REVISION_MIDDLEWARE_FLAG, self)] = True
revision_context_manager.start()
def _close_revision(self, request):
"""Closes the revision."""
if request.META.get((REVISION_MIDDLEWARE_FLAG, self), False):
del request.META[(REVISION_MIDDLEWARE_FLAG, self)]
revision_context_manager.end()
def process_response(self, request, response):
"""Closes the revision."""
# look to see if the session has been accessed before looking for user to stop Vary: Cookie
if hasattr(request, 'session') and request.session.accessed \
and hasattr(request, "user") and request.user.is_authenticated():
revision_context_manager.set_user(request.user)
self._close_revision(request)
return response
def process_exception(self, request, exception):
"""Closes the revision."""
revision_context_manager.invalidate()
self._close_revision(request)
<commit_msg>Fix bug handling exceptions in RevisionMiddleware
Recently the RevisionMiddleware was modified to avoid accessing
request.user unncessarily for caching purposes. This works well
except in some cases it can obscure errors generated elsewhere in a
project.
The RevisionContextManager has "active" and "inactive" states.
If there is an exception handler elsewhere in the middleware stack
that generates a new HTTP response, the
RevisionMiddleware.process_exception() method will inactivate the
RevisionContextManager via _close_revision(). Then the HTTP response will
be provided to process_response(). If a user is logged in the middleware
will call RevisionContextManager.set_user(), which expects to be in the
active state, and throws an error ("no active revision for this thread").
To fix this, check if the RevisionContextManager is in the active state
before calling set_user.<commit_after>"""Middleware used by Reversion."""
from __future__ import unicode_literals
from reversion.revisions import revision_context_manager
REVISION_MIDDLEWARE_FLAG = "reversion.revision_middleware_active"
class RevisionMiddleware(object):
"""Wraps the entire request in a revision."""
def process_request(self, request):
"""Starts a new revision."""
request.META[(REVISION_MIDDLEWARE_FLAG, self)] = True
revision_context_manager.start()
def _close_revision(self, request):
"""Closes the revision."""
if request.META.get((REVISION_MIDDLEWARE_FLAG, self), False):
del request.META[(REVISION_MIDDLEWARE_FLAG, self)]
revision_context_manager.end()
def process_response(self, request, response):
"""Closes the revision."""
# look to see if the session has been accessed before looking for user to stop Vary: Cookie
if hasattr(request, 'session') and request.session.accessed \
and hasattr(request, "user") and request.user.is_authenticated() \
and revision_context_manager.is_active():
revision_context_manager.set_user(request.user)
self._close_revision(request)
return response
def process_exception(self, request, exception):
"""Closes the revision."""
revision_context_manager.invalidate()
self._close_revision(request)
|
bd4b122f72ad09245ba57acbd717e7e6d1126b88
|
src/calc_perplexity.py
|
src/calc_perplexity.py
|
#! /usr/bin/python2
from __future__ import division
import numpy
# JAKE
def calc_perplexity(test_counts_dict, trigram_probs_dict):
'''
# Calculates perplexity of contents of file_string
# according to probabilities in trigram_probs_dict.
'''
test_probs = []
for trigram, count in test_counts_dict.items():
for n in range(count):
logprob = numpy.log10(trigram_probs_dict[trigram])
test_probs.append(logprob)
logprob = sum(test_probs)
print "LOGPROB: {0}" .format(logprob)
norm = logprob / len(test_probs)
perplexity = numpy.power(2, -norm)
return perplexity
|
#! /usr/bin/python2
from __future__ import division
import numpy
# JAKE
def calc_perplexity(test_counts_dict, trigram_probs_dict):
'''
# Calculates perplexity of contents of file_string
# according to probabilities in trigram_probs_dict.
'''
test_probs = []
for trigram, count in test_counts_dict.items():
for n in range(count):
logprob = numpy.log2(trigram_probs_dict[trigram])
test_probs.append(logprob)
logprob = sum(test_probs)
print "LOGPROB: {0}" .format(logprob)
entropy = logprob / len(test_probs)
perplexity = numpy.power(2, -entropy)
return perplexity
|
Use log2 instead of log10.
|
Use log2 instead of log10.
|
Python
|
unlicense
|
jvasilakes/language_detector,jvasilakes/language_detector
|
#! /usr/bin/python2
from __future__ import division
import numpy
# JAKE
def calc_perplexity(test_counts_dict, trigram_probs_dict):
'''
# Calculates perplexity of contents of file_string
# according to probabilities in trigram_probs_dict.
'''
test_probs = []
for trigram, count in test_counts_dict.items():
for n in range(count):
logprob = numpy.log10(trigram_probs_dict[trigram])
test_probs.append(logprob)
logprob = sum(test_probs)
print "LOGPROB: {0}" .format(logprob)
norm = logprob / len(test_probs)
perplexity = numpy.power(2, -norm)
return perplexity
Use log2 instead of log10.
|
#! /usr/bin/python2
from __future__ import division
import numpy
# JAKE
def calc_perplexity(test_counts_dict, trigram_probs_dict):
'''
# Calculates perplexity of contents of file_string
# according to probabilities in trigram_probs_dict.
'''
test_probs = []
for trigram, count in test_counts_dict.items():
for n in range(count):
logprob = numpy.log2(trigram_probs_dict[trigram])
test_probs.append(logprob)
logprob = sum(test_probs)
print "LOGPROB: {0}" .format(logprob)
entropy = logprob / len(test_probs)
perplexity = numpy.power(2, -entropy)
return perplexity
|
<commit_before>#! /usr/bin/python2
from __future__ import division
import numpy
# JAKE
def calc_perplexity(test_counts_dict, trigram_probs_dict):
'''
# Calculates perplexity of contents of file_string
# according to probabilities in trigram_probs_dict.
'''
test_probs = []
for trigram, count in test_counts_dict.items():
for n in range(count):
logprob = numpy.log10(trigram_probs_dict[trigram])
test_probs.append(logprob)
logprob = sum(test_probs)
print "LOGPROB: {0}" .format(logprob)
norm = logprob / len(test_probs)
perplexity = numpy.power(2, -norm)
return perplexity
<commit_msg>Use log2 instead of log10.<commit_after>
|
#! /usr/bin/python2
from __future__ import division
import numpy
# JAKE
def calc_perplexity(test_counts_dict, trigram_probs_dict):
'''
# Calculates perplexity of contents of file_string
# according to probabilities in trigram_probs_dict.
'''
test_probs = []
for trigram, count in test_counts_dict.items():
for n in range(count):
logprob = numpy.log2(trigram_probs_dict[trigram])
test_probs.append(logprob)
logprob = sum(test_probs)
print "LOGPROB: {0}" .format(logprob)
entropy = logprob / len(test_probs)
perplexity = numpy.power(2, -entropy)
return perplexity
|
#! /usr/bin/python2
from __future__ import division
import numpy
# JAKE
def calc_perplexity(test_counts_dict, trigram_probs_dict):
'''
# Calculates perplexity of contents of file_string
# according to probabilities in trigram_probs_dict.
'''
test_probs = []
for trigram, count in test_counts_dict.items():
for n in range(count):
logprob = numpy.log10(trigram_probs_dict[trigram])
test_probs.append(logprob)
logprob = sum(test_probs)
print "LOGPROB: {0}" .format(logprob)
norm = logprob / len(test_probs)
perplexity = numpy.power(2, -norm)
return perplexity
Use log2 instead of log10.#! /usr/bin/python2
from __future__ import division
import numpy
# JAKE
def calc_perplexity(test_counts_dict, trigram_probs_dict):
'''
# Calculates perplexity of contents of file_string
# according to probabilities in trigram_probs_dict.
'''
test_probs = []
for trigram, count in test_counts_dict.items():
for n in range(count):
logprob = numpy.log2(trigram_probs_dict[trigram])
test_probs.append(logprob)
logprob = sum(test_probs)
print "LOGPROB: {0}" .format(logprob)
entropy = logprob / len(test_probs)
perplexity = numpy.power(2, -entropy)
return perplexity
|
<commit_before>#! /usr/bin/python2
from __future__ import division
import numpy
# JAKE
def calc_perplexity(test_counts_dict, trigram_probs_dict):
'''
# Calculates perplexity of contents of file_string
# according to probabilities in trigram_probs_dict.
'''
test_probs = []
for trigram, count in test_counts_dict.items():
for n in range(count):
logprob = numpy.log10(trigram_probs_dict[trigram])
test_probs.append(logprob)
logprob = sum(test_probs)
print "LOGPROB: {0}" .format(logprob)
norm = logprob / len(test_probs)
perplexity = numpy.power(2, -norm)
return perplexity
<commit_msg>Use log2 instead of log10.<commit_after>#! /usr/bin/python2
from __future__ import division
import numpy
# JAKE
def calc_perplexity(test_counts_dict, trigram_probs_dict):
'''
# Calculates perplexity of contents of file_string
# according to probabilities in trigram_probs_dict.
'''
test_probs = []
for trigram, count in test_counts_dict.items():
for n in range(count):
logprob = numpy.log2(trigram_probs_dict[trigram])
test_probs.append(logprob)
logprob = sum(test_probs)
print "LOGPROB: {0}" .format(logprob)
entropy = logprob / len(test_probs)
perplexity = numpy.power(2, -entropy)
return perplexity
|
12b8cd254bad5c2cb15de3f0c3e69ab78083fc48
|
server/app.py
|
server/app.py
|
"""This module contains basic functions to instantiate the BigchainDB API.
The application is implemented in Flask and runs using Gunicorn.
"""
import os
from flask import Flask
from flask.ext.cors import CORS
from server.lib.api.views import api_views
def create_app(debug):
"""Return an instance of the Flask application.
Args:
debug (bool): a flag to activate the debug mode for the app
(default: False).
"""
app = Flask(__name__)
CORS(app,
origins=("^(https?://)?(www\.)?(" +
os.environ.get('DOCKER_MACHINE_IP', 'localhost') +
"0|0.0.0.0|dimi-bat.local|localhost|127.0.0.1)(\.com)?:\d{1,5}$"),
headers=(
'x-requested-with',
'content-type',
'accept',
'origin',
'authorization',
'x-csrftoken',
'withcredentials',
'cache-control',
'cookie',
'session-id',
),
supports_credentials=True,
)
app.debug = debug
app.register_blueprint(api_views, url_prefix='/api')
return app
if __name__ == '__main__':
app = create_app(debug=True)
app.run(host=os.environ.get('FLASK_HOST', '127.0.0.1'), port=os.environ.get('FLASK_PORT', 8000))
app.run()
|
"""This module contains basic functions to instantiate the BigchainDB API.
The application is implemented in Flask and runs using Gunicorn.
"""
import os
from flask import Flask
from flask.ext.cors import CORS
from server.lib.api.views import api_views
def create_app(debug):
"""Return an instance of the Flask application.
Args:
debug (bool): a flag to activate the debug mode for the app
(default: False).
"""
app = Flask(__name__)
CORS(app,
origins=("^(https?://)?(www\.)?(" +
os.environ.get('DOCKER_MACHINE_IP', 'localhost') +
"|0|0.0.0.0|dimi-bat.local|localhost|127.0.0.1)(\.com)?:\d{1,5}$"),
headers=(
'x-requested-with',
'content-type',
'accept',
'origin',
'authorization',
'x-csrftoken',
'withcredentials',
'cache-control',
'cookie',
'session-id',
),
supports_credentials=True,
)
app.debug = debug
app.register_blueprint(api_views, url_prefix='/api')
return app
if __name__ == '__main__':
app = create_app(debug=True)
app.run(host=os.environ.get('FLASK_HOST', '127.0.0.1'), port=os.environ.get('FLASK_PORT', 8000))
app.run()
|
Fix CORS when running the api server with Docker
|
Fix CORS when running the api server with Docker
|
Python
|
apache-2.0
|
bigchaindb/bigchaindb-examples,bigchaindb/bigchaindb-examples,bigchaindb/bigchaindb-examples
|
"""This module contains basic functions to instantiate the BigchainDB API.
The application is implemented in Flask and runs using Gunicorn.
"""
import os
from flask import Flask
from flask.ext.cors import CORS
from server.lib.api.views import api_views
def create_app(debug):
"""Return an instance of the Flask application.
Args:
debug (bool): a flag to activate the debug mode for the app
(default: False).
"""
app = Flask(__name__)
CORS(app,
origins=("^(https?://)?(www\.)?(" +
os.environ.get('DOCKER_MACHINE_IP', 'localhost') +
"0|0.0.0.0|dimi-bat.local|localhost|127.0.0.1)(\.com)?:\d{1,5}$"),
headers=(
'x-requested-with',
'content-type',
'accept',
'origin',
'authorization',
'x-csrftoken',
'withcredentials',
'cache-control',
'cookie',
'session-id',
),
supports_credentials=True,
)
app.debug = debug
app.register_blueprint(api_views, url_prefix='/api')
return app
if __name__ == '__main__':
app = create_app(debug=True)
app.run(host=os.environ.get('FLASK_HOST', '127.0.0.1'), port=os.environ.get('FLASK_PORT', 8000))
app.run()
Fix CORS when running the api server with Docker
|
"""This module contains basic functions to instantiate the BigchainDB API.
The application is implemented in Flask and runs using Gunicorn.
"""
import os
from flask import Flask
from flask.ext.cors import CORS
from server.lib.api.views import api_views
def create_app(debug):
"""Return an instance of the Flask application.
Args:
debug (bool): a flag to activate the debug mode for the app
(default: False).
"""
app = Flask(__name__)
CORS(app,
origins=("^(https?://)?(www\.)?(" +
os.environ.get('DOCKER_MACHINE_IP', 'localhost') +
"|0|0.0.0.0|dimi-bat.local|localhost|127.0.0.1)(\.com)?:\d{1,5}$"),
headers=(
'x-requested-with',
'content-type',
'accept',
'origin',
'authorization',
'x-csrftoken',
'withcredentials',
'cache-control',
'cookie',
'session-id',
),
supports_credentials=True,
)
app.debug = debug
app.register_blueprint(api_views, url_prefix='/api')
return app
if __name__ == '__main__':
app = create_app(debug=True)
app.run(host=os.environ.get('FLASK_HOST', '127.0.0.1'), port=os.environ.get('FLASK_PORT', 8000))
app.run()
|
<commit_before>"""This module contains basic functions to instantiate the BigchainDB API.
The application is implemented in Flask and runs using Gunicorn.
"""
import os
from flask import Flask
from flask.ext.cors import CORS
from server.lib.api.views import api_views
def create_app(debug):
"""Return an instance of the Flask application.
Args:
debug (bool): a flag to activate the debug mode for the app
(default: False).
"""
app = Flask(__name__)
CORS(app,
origins=("^(https?://)?(www\.)?(" +
os.environ.get('DOCKER_MACHINE_IP', 'localhost') +
"0|0.0.0.0|dimi-bat.local|localhost|127.0.0.1)(\.com)?:\d{1,5}$"),
headers=(
'x-requested-with',
'content-type',
'accept',
'origin',
'authorization',
'x-csrftoken',
'withcredentials',
'cache-control',
'cookie',
'session-id',
),
supports_credentials=True,
)
app.debug = debug
app.register_blueprint(api_views, url_prefix='/api')
return app
if __name__ == '__main__':
app = create_app(debug=True)
app.run(host=os.environ.get('FLASK_HOST', '127.0.0.1'), port=os.environ.get('FLASK_PORT', 8000))
app.run()
<commit_msg>Fix CORS when running the api server with Docker<commit_after>
|
"""This module contains basic functions to instantiate the BigchainDB API.
The application is implemented in Flask and runs using Gunicorn.
"""
import os
from flask import Flask
from flask.ext.cors import CORS
from server.lib.api.views import api_views
def create_app(debug):
"""Return an instance of the Flask application.
Args:
debug (bool): a flag to activate the debug mode for the app
(default: False).
"""
app = Flask(__name__)
CORS(app,
origins=("^(https?://)?(www\.)?(" +
os.environ.get('DOCKER_MACHINE_IP', 'localhost') +
"|0|0.0.0.0|dimi-bat.local|localhost|127.0.0.1)(\.com)?:\d{1,5}$"),
headers=(
'x-requested-with',
'content-type',
'accept',
'origin',
'authorization',
'x-csrftoken',
'withcredentials',
'cache-control',
'cookie',
'session-id',
),
supports_credentials=True,
)
app.debug = debug
app.register_blueprint(api_views, url_prefix='/api')
return app
if __name__ == '__main__':
app = create_app(debug=True)
app.run(host=os.environ.get('FLASK_HOST', '127.0.0.1'), port=os.environ.get('FLASK_PORT', 8000))
app.run()
|
"""This module contains basic functions to instantiate the BigchainDB API.
The application is implemented in Flask and runs using Gunicorn.
"""
import os
from flask import Flask
from flask.ext.cors import CORS
from server.lib.api.views import api_views
def create_app(debug):
"""Return an instance of the Flask application.
Args:
debug (bool): a flag to activate the debug mode for the app
(default: False).
"""
app = Flask(__name__)
CORS(app,
origins=("^(https?://)?(www\.)?(" +
os.environ.get('DOCKER_MACHINE_IP', 'localhost') +
"0|0.0.0.0|dimi-bat.local|localhost|127.0.0.1)(\.com)?:\d{1,5}$"),
headers=(
'x-requested-with',
'content-type',
'accept',
'origin',
'authorization',
'x-csrftoken',
'withcredentials',
'cache-control',
'cookie',
'session-id',
),
supports_credentials=True,
)
app.debug = debug
app.register_blueprint(api_views, url_prefix='/api')
return app
if __name__ == '__main__':
app = create_app(debug=True)
app.run(host=os.environ.get('FLASK_HOST', '127.0.0.1'), port=os.environ.get('FLASK_PORT', 8000))
app.run()
Fix CORS when running the api server with Docker"""This module contains basic functions to instantiate the BigchainDB API.
The application is implemented in Flask and runs using Gunicorn.
"""
import os
from flask import Flask
from flask.ext.cors import CORS
from server.lib.api.views import api_views
def create_app(debug):
"""Return an instance of the Flask application.
Args:
debug (bool): a flag to activate the debug mode for the app
(default: False).
"""
app = Flask(__name__)
CORS(app,
origins=("^(https?://)?(www\.)?(" +
os.environ.get('DOCKER_MACHINE_IP', 'localhost') +
"|0|0.0.0.0|dimi-bat.local|localhost|127.0.0.1)(\.com)?:\d{1,5}$"),
headers=(
'x-requested-with',
'content-type',
'accept',
'origin',
'authorization',
'x-csrftoken',
'withcredentials',
'cache-control',
'cookie',
'session-id',
),
supports_credentials=True,
)
app.debug = debug
app.register_blueprint(api_views, url_prefix='/api')
return app
if __name__ == '__main__':
app = create_app(debug=True)
app.run(host=os.environ.get('FLASK_HOST', '127.0.0.1'), port=os.environ.get('FLASK_PORT', 8000))
app.run()
|
<commit_before>"""This module contains basic functions to instantiate the BigchainDB API.
The application is implemented in Flask and runs using Gunicorn.
"""
import os
from flask import Flask
from flask.ext.cors import CORS
from server.lib.api.views import api_views
def create_app(debug):
"""Return an instance of the Flask application.
Args:
debug (bool): a flag to activate the debug mode for the app
(default: False).
"""
app = Flask(__name__)
CORS(app,
origins=("^(https?://)?(www\.)?(" +
os.environ.get('DOCKER_MACHINE_IP', 'localhost') +
"0|0.0.0.0|dimi-bat.local|localhost|127.0.0.1)(\.com)?:\d{1,5}$"),
headers=(
'x-requested-with',
'content-type',
'accept',
'origin',
'authorization',
'x-csrftoken',
'withcredentials',
'cache-control',
'cookie',
'session-id',
),
supports_credentials=True,
)
app.debug = debug
app.register_blueprint(api_views, url_prefix='/api')
return app
if __name__ == '__main__':
app = create_app(debug=True)
app.run(host=os.environ.get('FLASK_HOST', '127.0.0.1'), port=os.environ.get('FLASK_PORT', 8000))
app.run()
<commit_msg>Fix CORS when running the api server with Docker<commit_after>"""This module contains basic functions to instantiate the BigchainDB API.
The application is implemented in Flask and runs using Gunicorn.
"""
import os
from flask import Flask
from flask.ext.cors import CORS
from server.lib.api.views import api_views
def create_app(debug):
"""Return an instance of the Flask application.
Args:
debug (bool): a flag to activate the debug mode for the app
(default: False).
"""
app = Flask(__name__)
CORS(app,
origins=("^(https?://)?(www\.)?(" +
os.environ.get('DOCKER_MACHINE_IP', 'localhost') +
"|0|0.0.0.0|dimi-bat.local|localhost|127.0.0.1)(\.com)?:\d{1,5}$"),
headers=(
'x-requested-with',
'content-type',
'accept',
'origin',
'authorization',
'x-csrftoken',
'withcredentials',
'cache-control',
'cookie',
'session-id',
),
supports_credentials=True,
)
app.debug = debug
app.register_blueprint(api_views, url_prefix='/api')
return app
if __name__ == '__main__':
app = create_app(debug=True)
app.run(host=os.environ.get('FLASK_HOST', '127.0.0.1'), port=os.environ.get('FLASK_PORT', 8000))
app.run()
|
b98d7312019d041415d3d10d003267f03dddbf38
|
eva/layers/residual_block.py
|
eva/layers/residual_block.py
|
from keras.layers import Convolution2D, Merge
from keras.layers.advanced_activations import PReLU
from eva.layers.masked_convolution2d import MaskedConvolution2D
def ResidualBlock(model, filters):
# 2h -> h
block = PReLU()(model)
block = MaskedConvolution2D(filters//2, 1, 1)(block)
# h 3x3 -> h
block = PReLU()(block)
block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block)
# h -> 2h
block = PReLU()(block)
block = MaskedConvolution2D(filters, 1, 1)(block)
return Merge(mode='sum')([model, block])
def ResidualBlockList(model, filters, length):
for _ in range(length):
model = ResidualBlock(model, filters)
return model
|
from keras.layers import Convolution2D, Merge
from keras.layers.advanced_activations import PReLU
from eva.layers.masked_convolution2d import MaskedConvolution2D
class ResidualBlock(object):
def __init__(self, filters):
self.filters = filters
def __call__(self, model):
# 2h -> h
block = PReLU()(model)
block = MaskedConvolution2D(self.filters//2, 1, 1)(block)
# h 3x3 -> h
block = PReLU()(block)
block = MaskedConvolution2D(self.filters//2, 3, 3, border_mode='same')(block)
# h -> 2h
block = PReLU()(block)
block = MaskedConvolution2D(self.filters, 1, 1)(block)
return Merge(mode='sum')([model, block])
class ResidualBlockList(object):
def __init__(self, filters, length):
self.filters = filters
self.length = length
def __call__(self, model):
for _ in range(self.length):
model = ResidualBlock(self.filters)(model)
return model
|
Rewrite residual block as class rather than method
|
Rewrite residual block as class rather than method
|
Python
|
apache-2.0
|
israelg99/eva
|
from keras.layers import Convolution2D, Merge
from keras.layers.advanced_activations import PReLU
from eva.layers.masked_convolution2d import MaskedConvolution2D
def ResidualBlock(model, filters):
# 2h -> h
block = PReLU()(model)
block = MaskedConvolution2D(filters//2, 1, 1)(block)
# h 3x3 -> h
block = PReLU()(block)
block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block)
# h -> 2h
block = PReLU()(block)
block = MaskedConvolution2D(filters, 1, 1)(block)
return Merge(mode='sum')([model, block])
def ResidualBlockList(model, filters, length):
for _ in range(length):
model = ResidualBlock(model, filters)
return model
Rewrite residual block as class rather than method
|
from keras.layers import Convolution2D, Merge
from keras.layers.advanced_activations import PReLU
from eva.layers.masked_convolution2d import MaskedConvolution2D
class ResidualBlock(object):
def __init__(self, filters):
self.filters = filters
def __call__(self, model):
# 2h -> h
block = PReLU()(model)
block = MaskedConvolution2D(self.filters//2, 1, 1)(block)
# h 3x3 -> h
block = PReLU()(block)
block = MaskedConvolution2D(self.filters//2, 3, 3, border_mode='same')(block)
# h -> 2h
block = PReLU()(block)
block = MaskedConvolution2D(self.filters, 1, 1)(block)
return Merge(mode='sum')([model, block])
class ResidualBlockList(object):
def __init__(self, filters, length):
self.filters = filters
self.length = length
def __call__(self, model):
for _ in range(self.length):
model = ResidualBlock(self.filters)(model)
return model
|
<commit_before>from keras.layers import Convolution2D, Merge
from keras.layers.advanced_activations import PReLU
from eva.layers.masked_convolution2d import MaskedConvolution2D
def ResidualBlock(model, filters):
# 2h -> h
block = PReLU()(model)
block = MaskedConvolution2D(filters//2, 1, 1)(block)
# h 3x3 -> h
block = PReLU()(block)
block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block)
# h -> 2h
block = PReLU()(block)
block = MaskedConvolution2D(filters, 1, 1)(block)
return Merge(mode='sum')([model, block])
def ResidualBlockList(model, filters, length):
for _ in range(length):
model = ResidualBlock(model, filters)
return model
<commit_msg>Rewrite residual block as class rather than method<commit_after>
|
from keras.layers import Convolution2D, Merge
from keras.layers.advanced_activations import PReLU
from eva.layers.masked_convolution2d import MaskedConvolution2D
class ResidualBlock(object):
def __init__(self, filters):
self.filters = filters
def __call__(self, model):
# 2h -> h
block = PReLU()(model)
block = MaskedConvolution2D(self.filters//2, 1, 1)(block)
# h 3x3 -> h
block = PReLU()(block)
block = MaskedConvolution2D(self.filters//2, 3, 3, border_mode='same')(block)
# h -> 2h
block = PReLU()(block)
block = MaskedConvolution2D(self.filters, 1, 1)(block)
return Merge(mode='sum')([model, block])
class ResidualBlockList(object):
def __init__(self, filters, length):
self.filters = filters
self.length = length
def __call__(self, model):
for _ in range(self.length):
model = ResidualBlock(self.filters)(model)
return model
|
from keras.layers import Convolution2D, Merge
from keras.layers.advanced_activations import PReLU
from eva.layers.masked_convolution2d import MaskedConvolution2D
def ResidualBlock(model, filters):
# 2h -> h
block = PReLU()(model)
block = MaskedConvolution2D(filters//2, 1, 1)(block)
# h 3x3 -> h
block = PReLU()(block)
block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block)
# h -> 2h
block = PReLU()(block)
block = MaskedConvolution2D(filters, 1, 1)(block)
return Merge(mode='sum')([model, block])
def ResidualBlockList(model, filters, length):
for _ in range(length):
model = ResidualBlock(model, filters)
return model
Rewrite residual block as class rather than methodfrom keras.layers import Convolution2D, Merge
from keras.layers.advanced_activations import PReLU
from eva.layers.masked_convolution2d import MaskedConvolution2D
class ResidualBlock(object):
def __init__(self, filters):
self.filters = filters
def __call__(self, model):
# 2h -> h
block = PReLU()(model)
block = MaskedConvolution2D(self.filters//2, 1, 1)(block)
# h 3x3 -> h
block = PReLU()(block)
block = MaskedConvolution2D(self.filters//2, 3, 3, border_mode='same')(block)
# h -> 2h
block = PReLU()(block)
block = MaskedConvolution2D(self.filters, 1, 1)(block)
return Merge(mode='sum')([model, block])
class ResidualBlockList(object):
def __init__(self, filters, length):
self.filters = filters
self.length = length
def __call__(self, model):
for _ in range(self.length):
model = ResidualBlock(self.filters)(model)
return model
|
<commit_before>from keras.layers import Convolution2D, Merge
from keras.layers.advanced_activations import PReLU
from eva.layers.masked_convolution2d import MaskedConvolution2D
def ResidualBlock(model, filters):
# 2h -> h
block = PReLU()(model)
block = MaskedConvolution2D(filters//2, 1, 1)(block)
# h 3x3 -> h
block = PReLU()(block)
block = MaskedConvolution2D(filters//2, 3, 3, border_mode='same')(block)
# h -> 2h
block = PReLU()(block)
block = MaskedConvolution2D(filters, 1, 1)(block)
return Merge(mode='sum')([model, block])
def ResidualBlockList(model, filters, length):
for _ in range(length):
model = ResidualBlock(model, filters)
return model
<commit_msg>Rewrite residual block as class rather than method<commit_after>from keras.layers import Convolution2D, Merge
from keras.layers.advanced_activations import PReLU
from eva.layers.masked_convolution2d import MaskedConvolution2D
class ResidualBlock(object):
def __init__(self, filters):
self.filters = filters
def __call__(self, model):
# 2h -> h
block = PReLU()(model)
block = MaskedConvolution2D(self.filters//2, 1, 1)(block)
# h 3x3 -> h
block = PReLU()(block)
block = MaskedConvolution2D(self.filters//2, 3, 3, border_mode='same')(block)
# h -> 2h
block = PReLU()(block)
block = MaskedConvolution2D(self.filters, 1, 1)(block)
return Merge(mode='sum')([model, block])
class ResidualBlockList(object):
def __init__(self, filters, length):
self.filters = filters
self.length = length
def __call__(self, model):
for _ in range(self.length):
model = ResidualBlock(self.filters)(model)
return model
|
f2ab04ec2eb870e661223fd397d7c5a23935a233
|
src/apps/employees/schema/types.py
|
src/apps/employees/schema/types.py
|
import graphene
from graphene_django.types import DjangoObjectType, ObjectType
from graphene_django_extras import (
DjangoFilterPaginateListField, LimitOffsetGraphqlPagination
)
from apps.employees import models
class EmployeeType(DjangoObjectType):
class Meta:
model = models.Employee
filter_fields = {
'first_name': ['icontains', 'istartswith'],
'last_name': ['icontains', 'istartswith'],
'position': ['exact'],
'id': ['exact']
}
interfaces = (graphene.relay.Node,)
class PositionType(DjangoObjectType):
"""
Position graphQL type.
Implemented total_employees and employees objects.
"""
employees = DjangoFilterPaginateListField(
EmployeeType,
pagination=LimitOffsetGraphqlPagination()
)
total_employees = graphene.Int()
def resolve_total_employees(self, info):
return self.employees.count()
def resolve_employees(self, info):
return self.employees.all()
class Meta:
model = models.Position
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact']
}
interfaces = (graphene.relay.Node,)
class SpecializationType(DjangoObjectType):
class Meta:
model = models.Specialization
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact'],
}
interfaces = (graphene.relay.Node,)
|
import graphene
from graphene_django.types import DjangoObjectType, ObjectType
from graphene_django_extras import (
DjangoFilterPaginateListField, LimitOffsetGraphqlPagination
)
from apps.employees import models
class EmployeeType(DjangoObjectType):
class Meta:
model = models.Employee
filter_fields = {
'first_name': ['icontains', 'istartswith'],
'last_name': ['icontains', 'istartswith'],
'position': ['exact'],
'id': ['exact']
}
class PositionType(DjangoObjectType):
"""
Position graphQL type.
Implemented total_employees and employees objects.
"""
employees = DjangoFilterPaginateListField(
EmployeeType,
pagination=LimitOffsetGraphqlPagination()
)
total_employees = graphene.Int()
def resolve_total_employees(self, info):
return self.employees.count()
def resolve_employees(self, info):
return self.employees.all()
class Meta:
model = models.Position
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact']
}
class SpecializationType(DjangoObjectType):
class Meta:
model = models.Specialization
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact'],
}
|
Remove Node interfaces (use origin id for objects)
|
Remove Node interfaces (use origin id for objects)
|
Python
|
mit
|
wis-software/office-manager
|
import graphene
from graphene_django.types import DjangoObjectType, ObjectType
from graphene_django_extras import (
DjangoFilterPaginateListField, LimitOffsetGraphqlPagination
)
from apps.employees import models
class EmployeeType(DjangoObjectType):
class Meta:
model = models.Employee
filter_fields = {
'first_name': ['icontains', 'istartswith'],
'last_name': ['icontains', 'istartswith'],
'position': ['exact'],
'id': ['exact']
}
interfaces = (graphene.relay.Node,)
class PositionType(DjangoObjectType):
"""
Position graphQL type.
Implemented total_employees and employees objects.
"""
employees = DjangoFilterPaginateListField(
EmployeeType,
pagination=LimitOffsetGraphqlPagination()
)
total_employees = graphene.Int()
def resolve_total_employees(self, info):
return self.employees.count()
def resolve_employees(self, info):
return self.employees.all()
class Meta:
model = models.Position
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact']
}
interfaces = (graphene.relay.Node,)
class SpecializationType(DjangoObjectType):
class Meta:
model = models.Specialization
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact'],
}
interfaces = (graphene.relay.Node,)
Remove Node interfaces (use origin id for objects)
|
import graphene
from graphene_django.types import DjangoObjectType, ObjectType
from graphene_django_extras import (
DjangoFilterPaginateListField, LimitOffsetGraphqlPagination
)
from apps.employees import models
class EmployeeType(DjangoObjectType):
class Meta:
model = models.Employee
filter_fields = {
'first_name': ['icontains', 'istartswith'],
'last_name': ['icontains', 'istartswith'],
'position': ['exact'],
'id': ['exact']
}
class PositionType(DjangoObjectType):
"""
Position graphQL type.
Implemented total_employees and employees objects.
"""
employees = DjangoFilterPaginateListField(
EmployeeType,
pagination=LimitOffsetGraphqlPagination()
)
total_employees = graphene.Int()
def resolve_total_employees(self, info):
return self.employees.count()
def resolve_employees(self, info):
return self.employees.all()
class Meta:
model = models.Position
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact']
}
class SpecializationType(DjangoObjectType):
class Meta:
model = models.Specialization
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact'],
}
|
<commit_before>import graphene
from graphene_django.types import DjangoObjectType, ObjectType
from graphene_django_extras import (
DjangoFilterPaginateListField, LimitOffsetGraphqlPagination
)
from apps.employees import models
class EmployeeType(DjangoObjectType):
class Meta:
model = models.Employee
filter_fields = {
'first_name': ['icontains', 'istartswith'],
'last_name': ['icontains', 'istartswith'],
'position': ['exact'],
'id': ['exact']
}
interfaces = (graphene.relay.Node,)
class PositionType(DjangoObjectType):
"""
Position graphQL type.
Implemented total_employees and employees objects.
"""
employees = DjangoFilterPaginateListField(
EmployeeType,
pagination=LimitOffsetGraphqlPagination()
)
total_employees = graphene.Int()
def resolve_total_employees(self, info):
return self.employees.count()
def resolve_employees(self, info):
return self.employees.all()
class Meta:
model = models.Position
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact']
}
interfaces = (graphene.relay.Node,)
class SpecializationType(DjangoObjectType):
class Meta:
model = models.Specialization
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact'],
}
interfaces = (graphene.relay.Node,)
<commit_msg>Remove Node interfaces (use origin id for objects)<commit_after>
|
import graphene
from graphene_django.types import DjangoObjectType, ObjectType
from graphene_django_extras import (
DjangoFilterPaginateListField, LimitOffsetGraphqlPagination
)
from apps.employees import models
class EmployeeType(DjangoObjectType):
class Meta:
model = models.Employee
filter_fields = {
'first_name': ['icontains', 'istartswith'],
'last_name': ['icontains', 'istartswith'],
'position': ['exact'],
'id': ['exact']
}
class PositionType(DjangoObjectType):
"""
Position graphQL type.
Implemented total_employees and employees objects.
"""
employees = DjangoFilterPaginateListField(
EmployeeType,
pagination=LimitOffsetGraphqlPagination()
)
total_employees = graphene.Int()
def resolve_total_employees(self, info):
return self.employees.count()
def resolve_employees(self, info):
return self.employees.all()
class Meta:
model = models.Position
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact']
}
class SpecializationType(DjangoObjectType):
class Meta:
model = models.Specialization
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact'],
}
|
import graphene
from graphene_django.types import DjangoObjectType, ObjectType
from graphene_django_extras import (
DjangoFilterPaginateListField, LimitOffsetGraphqlPagination
)
from apps.employees import models
class EmployeeType(DjangoObjectType):
class Meta:
model = models.Employee
filter_fields = {
'first_name': ['icontains', 'istartswith'],
'last_name': ['icontains', 'istartswith'],
'position': ['exact'],
'id': ['exact']
}
interfaces = (graphene.relay.Node,)
class PositionType(DjangoObjectType):
"""
Position graphQL type.
Implemented total_employees and employees objects.
"""
employees = DjangoFilterPaginateListField(
EmployeeType,
pagination=LimitOffsetGraphqlPagination()
)
total_employees = graphene.Int()
def resolve_total_employees(self, info):
return self.employees.count()
def resolve_employees(self, info):
return self.employees.all()
class Meta:
model = models.Position
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact']
}
interfaces = (graphene.relay.Node,)
class SpecializationType(DjangoObjectType):
class Meta:
model = models.Specialization
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact'],
}
interfaces = (graphene.relay.Node,)
Remove Node interfaces (use origin id for objects)import graphene
from graphene_django.types import DjangoObjectType, ObjectType
from graphene_django_extras import (
DjangoFilterPaginateListField, LimitOffsetGraphqlPagination
)
from apps.employees import models
class EmployeeType(DjangoObjectType):
class Meta:
model = models.Employee
filter_fields = {
'first_name': ['icontains', 'istartswith'],
'last_name': ['icontains', 'istartswith'],
'position': ['exact'],
'id': ['exact']
}
class PositionType(DjangoObjectType):
"""
Position graphQL type.
Implemented total_employees and employees objects.
"""
employees = DjangoFilterPaginateListField(
EmployeeType,
pagination=LimitOffsetGraphqlPagination()
)
total_employees = graphene.Int()
def resolve_total_employees(self, info):
return self.employees.count()
def resolve_employees(self, info):
return self.employees.all()
class Meta:
model = models.Position
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact']
}
class SpecializationType(DjangoObjectType):
class Meta:
model = models.Specialization
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact'],
}
|
<commit_before>import graphene
from graphene_django.types import DjangoObjectType, ObjectType
from graphene_django_extras import (
DjangoFilterPaginateListField, LimitOffsetGraphqlPagination
)
from apps.employees import models
class EmployeeType(DjangoObjectType):
class Meta:
model = models.Employee
filter_fields = {
'first_name': ['icontains', 'istartswith'],
'last_name': ['icontains', 'istartswith'],
'position': ['exact'],
'id': ['exact']
}
interfaces = (graphene.relay.Node,)
class PositionType(DjangoObjectType):
"""
Position graphQL type.
Implemented total_employees and employees objects.
"""
employees = DjangoFilterPaginateListField(
EmployeeType,
pagination=LimitOffsetGraphqlPagination()
)
total_employees = graphene.Int()
def resolve_total_employees(self, info):
return self.employees.count()
def resolve_employees(self, info):
return self.employees.all()
class Meta:
model = models.Position
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact']
}
interfaces = (graphene.relay.Node,)
class SpecializationType(DjangoObjectType):
class Meta:
model = models.Specialization
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact'],
}
interfaces = (graphene.relay.Node,)
<commit_msg>Remove Node interfaces (use origin id for objects)<commit_after>import graphene
from graphene_django.types import DjangoObjectType, ObjectType
from graphene_django_extras import (
DjangoFilterPaginateListField, LimitOffsetGraphqlPagination
)
from apps.employees import models
class EmployeeType(DjangoObjectType):
class Meta:
model = models.Employee
filter_fields = {
'first_name': ['icontains', 'istartswith'],
'last_name': ['icontains', 'istartswith'],
'position': ['exact'],
'id': ['exact']
}
class PositionType(DjangoObjectType):
"""
Position graphQL type.
Implemented total_employees and employees objects.
"""
employees = DjangoFilterPaginateListField(
EmployeeType,
pagination=LimitOffsetGraphqlPagination()
)
total_employees = graphene.Int()
def resolve_total_employees(self, info):
return self.employees.count()
def resolve_employees(self, info):
return self.employees.all()
class Meta:
model = models.Position
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact']
}
class SpecializationType(DjangoObjectType):
class Meta:
model = models.Specialization
filter_fields = {
'name': ['exact', 'icontains', 'istartswith'],
'id': ['exact'],
}
|
866af848f8468966ea7d9a020d46e88d7d780b2d
|
pytac/cs.py
|
pytac/cs.py
|
"""
Template module to define control systems.
"""
class ControlSystem(object):
""" Define a control system to be used with a device.
It uses channel access to comunicate over the network with
the hardware.
"""
def __init__(self):
raise NotImplementedError()
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
raise NotImplementedError()
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
raise NotImplementedError()
class NullControlSystem(ControlSystem):
""" Dummy control system to set the value of a pv."""
def __init__(self):
pass
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
pass
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
pass
|
"""
Template module to define control systems.
"""
class ControlSystem(object):
""" Define a control system to be used with a device.
It uses channel access to comunicate over the network with
the hardware.
"""
def __init__(self):
raise NotImplementedError()
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
raise NotImplementedError()
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
raise NotImplementedError()
|
Remove the null control system
|
Remove the null control system
|
Python
|
apache-2.0
|
willrogers/pytac,willrogers/pytac
|
"""
Template module to define control systems.
"""
class ControlSystem(object):
""" Define a control system to be used with a device.
It uses channel access to comunicate over the network with
the hardware.
"""
def __init__(self):
raise NotImplementedError()
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
raise NotImplementedError()
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
raise NotImplementedError()
class NullControlSystem(ControlSystem):
""" Dummy control system to set the value of a pv."""
def __init__(self):
pass
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
pass
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
pass
Remove the null control system
|
"""
Template module to define control systems.
"""
class ControlSystem(object):
""" Define a control system to be used with a device.
It uses channel access to comunicate over the network with
the hardware.
"""
def __init__(self):
raise NotImplementedError()
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
raise NotImplementedError()
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
raise NotImplementedError()
|
<commit_before>"""
Template module to define control systems.
"""
class ControlSystem(object):
""" Define a control system to be used with a device.
It uses channel access to comunicate over the network with
the hardware.
"""
def __init__(self):
raise NotImplementedError()
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
raise NotImplementedError()
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
raise NotImplementedError()
class NullControlSystem(ControlSystem):
""" Dummy control system to set the value of a pv."""
def __init__(self):
pass
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
pass
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
pass
<commit_msg>Remove the null control system<commit_after>
|
"""
Template module to define control systems.
"""
class ControlSystem(object):
""" Define a control system to be used with a device.
It uses channel access to comunicate over the network with
the hardware.
"""
def __init__(self):
raise NotImplementedError()
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
raise NotImplementedError()
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
raise NotImplementedError()
|
"""
Template module to define control systems.
"""
class ControlSystem(object):
""" Define a control system to be used with a device.
It uses channel access to comunicate over the network with
the hardware.
"""
def __init__(self):
raise NotImplementedError()
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
raise NotImplementedError()
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
raise NotImplementedError()
class NullControlSystem(ControlSystem):
""" Dummy control system to set the value of a pv."""
def __init__(self):
pass
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
pass
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
pass
Remove the null control system"""
Template module to define control systems.
"""
class ControlSystem(object):
""" Define a control system to be used with a device.
It uses channel access to comunicate over the network with
the hardware.
"""
def __init__(self):
raise NotImplementedError()
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
raise NotImplementedError()
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
raise NotImplementedError()
|
<commit_before>"""
Template module to define control systems.
"""
class ControlSystem(object):
""" Define a control system to be used with a device.
It uses channel access to comunicate over the network with
the hardware.
"""
def __init__(self):
raise NotImplementedError()
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
raise NotImplementedError()
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
raise NotImplementedError()
class NullControlSystem(ControlSystem):
""" Dummy control system to set the value of a pv."""
def __init__(self):
pass
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
pass
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
pass
<commit_msg>Remove the null control system<commit_after>"""
Template module to define control systems.
"""
class ControlSystem(object):
""" Define a control system to be used with a device.
It uses channel access to comunicate over the network with
the hardware.
"""
def __init__(self):
raise NotImplementedError()
def get(self, pv):
""" Get the value of the given pv.
Args:
pv(string): The Pv to get the value of.
Returns:
Number: The numeric value of the pv.
"""
raise NotImplementedError()
def put(self, pv, value):
""" Put the value of a given pv.
Args:
pv(string): The string to put the value for.
value(Number): The value to be set.
"""
raise NotImplementedError()
|
3977b36760afa2407c5e98926a6c3c1f926f5493
|
x64/expand.py
|
x64/expand.py
|
import sys
def expand(filename):
for dir in ('.', '../common', '../anstests/'):
try:
f = open(dir + "/" + filename)
except IOError:
continue
for line in f:
line = line.replace('\r', '')
if line.strip().startswith('#bye'):
sys.exit(0)
if line.strip().startswith('include '):
expand(line.split()[1])
else:
sys.stdout.write(line)
print
return
assert 0, filename + 'not found'
if __name__ == '__main__':
for a in sys.argv[1:]:
expand(a)
|
import sys
def expand(filename):
for dir in ('.', '../common', '../anstests/'):
try:
f = open(dir + "/" + filename)
except IOError:
continue
for line in f:
line = line.replace('\r', '')
if line.strip().startswith('#bye'):
sys.exit(0)
if line.strip().startswith('include '):
expand(line.split()[1])
else:
sys.stdout.write(line)
sys.stdout.write('\n')
return
assert 0, filename + 'not found'
if __name__ == '__main__':
for a in sys.argv[1:]:
expand(a)
|
Fix missing newlines with Python3
|
Fix missing newlines with Python3
|
Python
|
bsd-3-clause
|
jamesbowman/swapforth,zuloloxi/swapforth,jamesbowman/swapforth,zuloloxi/swapforth,zuloloxi/swapforth,zuloloxi/swapforth,RGD2/swapforth,jamesbowman/swapforth,RGD2/swapforth,jamesbowman/swapforth,RGD2/swapforth,RGD2/swapforth
|
import sys
def expand(filename):
for dir in ('.', '../common', '../anstests/'):
try:
f = open(dir + "/" + filename)
except IOError:
continue
for line in f:
line = line.replace('\r', '')
if line.strip().startswith('#bye'):
sys.exit(0)
if line.strip().startswith('include '):
expand(line.split()[1])
else:
sys.stdout.write(line)
print
return
assert 0, filename + 'not found'
if __name__ == '__main__':
for a in sys.argv[1:]:
expand(a)
Fix missing newlines with Python3
|
import sys
def expand(filename):
for dir in ('.', '../common', '../anstests/'):
try:
f = open(dir + "/" + filename)
except IOError:
continue
for line in f:
line = line.replace('\r', '')
if line.strip().startswith('#bye'):
sys.exit(0)
if line.strip().startswith('include '):
expand(line.split()[1])
else:
sys.stdout.write(line)
sys.stdout.write('\n')
return
assert 0, filename + 'not found'
if __name__ == '__main__':
for a in sys.argv[1:]:
expand(a)
|
<commit_before>import sys
def expand(filename):
for dir in ('.', '../common', '../anstests/'):
try:
f = open(dir + "/" + filename)
except IOError:
continue
for line in f:
line = line.replace('\r', '')
if line.strip().startswith('#bye'):
sys.exit(0)
if line.strip().startswith('include '):
expand(line.split()[1])
else:
sys.stdout.write(line)
print
return
assert 0, filename + 'not found'
if __name__ == '__main__':
for a in sys.argv[1:]:
expand(a)
<commit_msg>Fix missing newlines with Python3<commit_after>
|
import sys
def expand(filename):
for dir in ('.', '../common', '../anstests/'):
try:
f = open(dir + "/" + filename)
except IOError:
continue
for line in f:
line = line.replace('\r', '')
if line.strip().startswith('#bye'):
sys.exit(0)
if line.strip().startswith('include '):
expand(line.split()[1])
else:
sys.stdout.write(line)
sys.stdout.write('\n')
return
assert 0, filename + 'not found'
if __name__ == '__main__':
for a in sys.argv[1:]:
expand(a)
|
import sys
def expand(filename):
for dir in ('.', '../common', '../anstests/'):
try:
f = open(dir + "/" + filename)
except IOError:
continue
for line in f:
line = line.replace('\r', '')
if line.strip().startswith('#bye'):
sys.exit(0)
if line.strip().startswith('include '):
expand(line.split()[1])
else:
sys.stdout.write(line)
print
return
assert 0, filename + 'not found'
if __name__ == '__main__':
for a in sys.argv[1:]:
expand(a)
Fix missing newlines with Python3import sys
def expand(filename):
for dir in ('.', '../common', '../anstests/'):
try:
f = open(dir + "/" + filename)
except IOError:
continue
for line in f:
line = line.replace('\r', '')
if line.strip().startswith('#bye'):
sys.exit(0)
if line.strip().startswith('include '):
expand(line.split()[1])
else:
sys.stdout.write(line)
sys.stdout.write('\n')
return
assert 0, filename + 'not found'
if __name__ == '__main__':
for a in sys.argv[1:]:
expand(a)
|
<commit_before>import sys
def expand(filename):
for dir in ('.', '../common', '../anstests/'):
try:
f = open(dir + "/" + filename)
except IOError:
continue
for line in f:
line = line.replace('\r', '')
if line.strip().startswith('#bye'):
sys.exit(0)
if line.strip().startswith('include '):
expand(line.split()[1])
else:
sys.stdout.write(line)
print
return
assert 0, filename + 'not found'
if __name__ == '__main__':
for a in sys.argv[1:]:
expand(a)
<commit_msg>Fix missing newlines with Python3<commit_after>import sys
def expand(filename):
for dir in ('.', '../common', '../anstests/'):
try:
f = open(dir + "/" + filename)
except IOError:
continue
for line in f:
line = line.replace('\r', '')
if line.strip().startswith('#bye'):
sys.exit(0)
if line.strip().startswith('include '):
expand(line.split()[1])
else:
sys.stdout.write(line)
sys.stdout.write('\n')
return
assert 0, filename + 'not found'
if __name__ == '__main__':
for a in sys.argv[1:]:
expand(a)
|
d8d9b16e7264a6b2936b4920ca97f4dd923f29a3
|
crankycoin/services/queue.py
|
crankycoin/services/queue.py
|
import zmq
from crankycoin import config, logger
class Queue(object):
QUEUE_BIND_IN = config['user']['queue_bind_in']
QUEUE_BIND_OUT = config['user']['queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
logger.error("could not start queue: %s", e.message)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
|
import sys
import zmq
from crankycoin import config, logger
WIN32 = 'win32' in sys.platform
class Queue(object):
QUEUE_BIND_IN = config['user']['queue_bind_in'] if not WIN32 else config['user']['win_queue_bind_in']
QUEUE_BIND_OUT = config['user']['queue_bind_out'] if not WIN32 else config['user']['win_queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
logger.error("could not start queue: %s", e)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
|
Fix `protocol not supported` on Windows
|
Fix `protocol not supported` on Windows
|
Python
|
mit
|
cranklin/crankycoin
|
import zmq
from crankycoin import config, logger
class Queue(object):
QUEUE_BIND_IN = config['user']['queue_bind_in']
QUEUE_BIND_OUT = config['user']['queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
logger.error("could not start queue: %s", e.message)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
Fix `protocol not supported` on Windows
|
import sys
import zmq
from crankycoin import config, logger
WIN32 = 'win32' in sys.platform
class Queue(object):
QUEUE_BIND_IN = config['user']['queue_bind_in'] if not WIN32 else config['user']['win_queue_bind_in']
QUEUE_BIND_OUT = config['user']['queue_bind_out'] if not WIN32 else config['user']['win_queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
logger.error("could not start queue: %s", e)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
|
<commit_before>import zmq
from crankycoin import config, logger
class Queue(object):
QUEUE_BIND_IN = config['user']['queue_bind_in']
QUEUE_BIND_OUT = config['user']['queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
logger.error("could not start queue: %s", e.message)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
<commit_msg>Fix `protocol not supported` on Windows<commit_after>
|
import sys
import zmq
from crankycoin import config, logger
WIN32 = 'win32' in sys.platform
class Queue(object):
QUEUE_BIND_IN = config['user']['queue_bind_in'] if not WIN32 else config['user']['win_queue_bind_in']
QUEUE_BIND_OUT = config['user']['queue_bind_out'] if not WIN32 else config['user']['win_queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
logger.error("could not start queue: %s", e)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
|
import zmq
from crankycoin import config, logger
class Queue(object):
QUEUE_BIND_IN = config['user']['queue_bind_in']
QUEUE_BIND_OUT = config['user']['queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
logger.error("could not start queue: %s", e.message)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
Fix `protocol not supported` on Windowsimport sys
import zmq
from crankycoin import config, logger
WIN32 = 'win32' in sys.platform
class Queue(object):
QUEUE_BIND_IN = config['user']['queue_bind_in'] if not WIN32 else config['user']['win_queue_bind_in']
QUEUE_BIND_OUT = config['user']['queue_bind_out'] if not WIN32 else config['user']['win_queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
logger.error("could not start queue: %s", e)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
|
<commit_before>import zmq
from crankycoin import config, logger
class Queue(object):
QUEUE_BIND_IN = config['user']['queue_bind_in']
QUEUE_BIND_OUT = config['user']['queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
logger.error("could not start queue: %s", e.message)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
<commit_msg>Fix `protocol not supported` on Windows<commit_after>import sys
import zmq
from crankycoin import config, logger
WIN32 = 'win32' in sys.platform
class Queue(object):
QUEUE_BIND_IN = config['user']['queue_bind_in'] if not WIN32 else config['user']['win_queue_bind_in']
QUEUE_BIND_OUT = config['user']['queue_bind_out'] if not WIN32 else config['user']['win_queue_bind_out']
QUEUE_PROCESSING_WORKERS = config['user']['queue_processing_workers']
@classmethod
def start_queue(cls):
try:
context = zmq.Context(1)
# Socket facing producers
frontend = context.socket(zmq.PULL)
frontend.bind(cls.QUEUE_BIND_IN)
# Socket facing consumers
backend = context.socket(zmq.PUSH)
backend.bind(cls.QUEUE_BIND_OUT)
zmq.proxy(frontend, backend)
except Exception as e:
logger.error("could not start queue: %s", e)
raise
@classmethod
def enqueue(cls, msg):
context = zmq.Context()
socket = context.socket(zmq.PUSH)
socket.connect(cls.QUEUE_BIND_IN)
socket.send_json(msg)
@classmethod
def dequeue(cls):
context = zmq.Context()
socket = context.socket(zmq.PULL)
socket.connect(cls.QUEUE_BIND_OUT)
return socket.recv_json()
|
0b845f6beaec8f7ce8e4cd473ed50fe1202b5139
|
seabird/qc.py
|
seabird/qc.py
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import logging
from cotede.qc import ProfileQC
from . import fCNV
from .exceptions import CNVError
class fProfileQC(ProfileQC):
""" Apply ProfileQC from CoTeDe straight from a file.
"""
def __init__(self, inputfile, cfg=None, saveauxiliary=True, verbose=True,
logger=None):
"""
"""
self.logger = logging.getLogger(logger or 'seabird.qc.fProfileQC')
self.name = 'fProfileQC'
try:
# Not the best way, but will work for now. I should pass
# the reference for the logger being used.
profile = fCNV(inputfile, logger=None)
except CNVError as e:
#self.attributes['filename'] = basename(inputfile)
logging.error(e.msg)
raise
super(fProfileQC, self).__init__(profile, cfg=cfg,
saveauxiliary=saveauxiliary, verbose=verbose,
logger=logger)
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import logging
from os.path import basename
from cotede.qc import ProfileQC
from . import fCNV
from .exceptions import CNVError
class fProfileQC(ProfileQC):
""" Apply ProfileQC from CoTeDe straight from a file.
"""
def __init__(self, inputfile, cfg=None, saveauxiliary=True, verbose=True,
logger=None):
"""
"""
self.logger = logging.getLogger(logger or 'seabird.qc.fProfileQC')
self.name = 'fProfileQC'
try:
# Not the best way, but will work for now. I should pass
# the reference for the logger being used.
profile = fCNV(inputfile, logger=None)
except CNVError as e:
self.attributes['filename'] = basename(inputfile)
logging.error(e.msg)
raise
super(fProfileQC, self).__init__(profile, cfg=cfg,
saveauxiliary=saveauxiliary, verbose=verbose,
logger=logger)
|
Add filename in attrs if fails to load it.
|
Add filename in attrs if fails to load it.
Filename in attrs helps to debug.
|
Python
|
bsd-3-clause
|
castelao/seabird
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import logging
from cotede.qc import ProfileQC
from . import fCNV
from .exceptions import CNVError
class fProfileQC(ProfileQC):
""" Apply ProfileQC from CoTeDe straight from a file.
"""
def __init__(self, inputfile, cfg=None, saveauxiliary=True, verbose=True,
logger=None):
"""
"""
self.logger = logging.getLogger(logger or 'seabird.qc.fProfileQC')
self.name = 'fProfileQC'
try:
# Not the best way, but will work for now. I should pass
# the reference for the logger being used.
profile = fCNV(inputfile, logger=None)
except CNVError as e:
#self.attributes['filename'] = basename(inputfile)
logging.error(e.msg)
raise
super(fProfileQC, self).__init__(profile, cfg=cfg,
saveauxiliary=saveauxiliary, verbose=verbose,
logger=logger)
Add filename in attrs if fails to load it.
Filename in attrs helps to debug.
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import logging
from os.path import basename
from cotede.qc import ProfileQC
from . import fCNV
from .exceptions import CNVError
class fProfileQC(ProfileQC):
""" Apply ProfileQC from CoTeDe straight from a file.
"""
def __init__(self, inputfile, cfg=None, saveauxiliary=True, verbose=True,
logger=None):
"""
"""
self.logger = logging.getLogger(logger or 'seabird.qc.fProfileQC')
self.name = 'fProfileQC'
try:
# Not the best way, but will work for now. I should pass
# the reference for the logger being used.
profile = fCNV(inputfile, logger=None)
except CNVError as e:
self.attributes['filename'] = basename(inputfile)
logging.error(e.msg)
raise
super(fProfileQC, self).__init__(profile, cfg=cfg,
saveauxiliary=saveauxiliary, verbose=verbose,
logger=logger)
|
<commit_before># -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import logging
from cotede.qc import ProfileQC
from . import fCNV
from .exceptions import CNVError
class fProfileQC(ProfileQC):
""" Apply ProfileQC from CoTeDe straight from a file.
"""
def __init__(self, inputfile, cfg=None, saveauxiliary=True, verbose=True,
logger=None):
"""
"""
self.logger = logging.getLogger(logger or 'seabird.qc.fProfileQC')
self.name = 'fProfileQC'
try:
# Not the best way, but will work for now. I should pass
# the reference for the logger being used.
profile = fCNV(inputfile, logger=None)
except CNVError as e:
#self.attributes['filename'] = basename(inputfile)
logging.error(e.msg)
raise
super(fProfileQC, self).__init__(profile, cfg=cfg,
saveauxiliary=saveauxiliary, verbose=verbose,
logger=logger)
<commit_msg>Add filename in attrs if fails to load it.
Filename in attrs helps to debug.<commit_after>
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import logging
from os.path import basename
from cotede.qc import ProfileQC
from . import fCNV
from .exceptions import CNVError
class fProfileQC(ProfileQC):
""" Apply ProfileQC from CoTeDe straight from a file.
"""
def __init__(self, inputfile, cfg=None, saveauxiliary=True, verbose=True,
logger=None):
"""
"""
self.logger = logging.getLogger(logger or 'seabird.qc.fProfileQC')
self.name = 'fProfileQC'
try:
# Not the best way, but will work for now. I should pass
# the reference for the logger being used.
profile = fCNV(inputfile, logger=None)
except CNVError as e:
self.attributes['filename'] = basename(inputfile)
logging.error(e.msg)
raise
super(fProfileQC, self).__init__(profile, cfg=cfg,
saveauxiliary=saveauxiliary, verbose=verbose,
logger=logger)
|
# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import logging
from cotede.qc import ProfileQC
from . import fCNV
from .exceptions import CNVError
class fProfileQC(ProfileQC):
""" Apply ProfileQC from CoTeDe straight from a file.
"""
def __init__(self, inputfile, cfg=None, saveauxiliary=True, verbose=True,
logger=None):
"""
"""
self.logger = logging.getLogger(logger or 'seabird.qc.fProfileQC')
self.name = 'fProfileQC'
try:
# Not the best way, but will work for now. I should pass
# the reference for the logger being used.
profile = fCNV(inputfile, logger=None)
except CNVError as e:
#self.attributes['filename'] = basename(inputfile)
logging.error(e.msg)
raise
super(fProfileQC, self).__init__(profile, cfg=cfg,
saveauxiliary=saveauxiliary, verbose=verbose,
logger=logger)
Add filename in attrs if fails to load it.
Filename in attrs helps to debug.# -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import logging
from os.path import basename
from cotede.qc import ProfileQC
from . import fCNV
from .exceptions import CNVError
class fProfileQC(ProfileQC):
""" Apply ProfileQC from CoTeDe straight from a file.
"""
def __init__(self, inputfile, cfg=None, saveauxiliary=True, verbose=True,
logger=None):
"""
"""
self.logger = logging.getLogger(logger or 'seabird.qc.fProfileQC')
self.name = 'fProfileQC'
try:
# Not the best way, but will work for now. I should pass
# the reference for the logger being used.
profile = fCNV(inputfile, logger=None)
except CNVError as e:
self.attributes['filename'] = basename(inputfile)
logging.error(e.msg)
raise
super(fProfileQC, self).__init__(profile, cfg=cfg,
saveauxiliary=saveauxiliary, verbose=verbose,
logger=logger)
|
<commit_before># -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import logging
from cotede.qc import ProfileQC
from . import fCNV
from .exceptions import CNVError
class fProfileQC(ProfileQC):
""" Apply ProfileQC from CoTeDe straight from a file.
"""
def __init__(self, inputfile, cfg=None, saveauxiliary=True, verbose=True,
logger=None):
"""
"""
self.logger = logging.getLogger(logger or 'seabird.qc.fProfileQC')
self.name = 'fProfileQC'
try:
# Not the best way, but will work for now. I should pass
# the reference for the logger being used.
profile = fCNV(inputfile, logger=None)
except CNVError as e:
#self.attributes['filename'] = basename(inputfile)
logging.error(e.msg)
raise
super(fProfileQC, self).__init__(profile, cfg=cfg,
saveauxiliary=saveauxiliary, verbose=verbose,
logger=logger)
<commit_msg>Add filename in attrs if fails to load it.
Filename in attrs helps to debug.<commit_after># -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import logging
from os.path import basename
from cotede.qc import ProfileQC
from . import fCNV
from .exceptions import CNVError
class fProfileQC(ProfileQC):
""" Apply ProfileQC from CoTeDe straight from a file.
"""
def __init__(self, inputfile, cfg=None, saveauxiliary=True, verbose=True,
logger=None):
"""
"""
self.logger = logging.getLogger(logger or 'seabird.qc.fProfileQC')
self.name = 'fProfileQC'
try:
# Not the best way, but will work for now. I should pass
# the reference for the logger being used.
profile = fCNV(inputfile, logger=None)
except CNVError as e:
self.attributes['filename'] = basename(inputfile)
logging.error(e.msg)
raise
super(fProfileQC, self).__init__(profile, cfg=cfg,
saveauxiliary=saveauxiliary, verbose=verbose,
logger=logger)
|
e8cf948ec22312e61548f5cb96bea3669a64f33c
|
id/migrations/0012_delete_externaldatabase.py
|
id/migrations/0012_delete_externaldatabase.py
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django.db
class Migration(migrations.Migration):
dependencies = [
('id', '0011_auto_20150916_1546'),
]
database_operations = [
migrations.AlterModelTable('ExternalDatabase', 'databases_externaldatabase'),
# If running on PostgreSQL, rename the sequencer also - no sequencer left behind!
migrations.RunSQL(
"ALTER SEQUENCE id_externaldatabase_id_seq RENAME TO databases_externaldatabase_id_seq" if django.db.connection.vendor == "postgresql" \
else "SELECT 1"
)
]
state_operations = [
migrations.DeleteModel('ExternalDatabase')
]
operations = [
migrations.SeparateDatabaseAndState(
database_operations=database_operations,
state_operations=state_operations)
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('id', '0011_auto_20150916_1546'),
]
database_operations = [
migrations.AlterModelTable('ExternalDatabase', 'databases_externaldatabase'),
]
state_operations = [
migrations.DeleteModel('ExternalDatabase')
]
operations = [
migrations.SeparateDatabaseAndState(
database_operations=database_operations,
state_operations=state_operations
),
]
|
Revoke earlier change: This migration has already been applied.
|
Revoke earlier change: This migration has already been applied.
|
Python
|
mit
|
occrp/id-backend
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django.db
class Migration(migrations.Migration):
dependencies = [
('id', '0011_auto_20150916_1546'),
]
database_operations = [
migrations.AlterModelTable('ExternalDatabase', 'databases_externaldatabase'),
# If running on PostgreSQL, rename the sequencer also - no sequencer left behind!
migrations.RunSQL(
"ALTER SEQUENCE id_externaldatabase_id_seq RENAME TO databases_externaldatabase_id_seq" if django.db.connection.vendor == "postgresql" \
else "SELECT 1"
)
]
state_operations = [
migrations.DeleteModel('ExternalDatabase')
]
operations = [
migrations.SeparateDatabaseAndState(
database_operations=database_operations,
state_operations=state_operations)
]
Revoke earlier change: This migration has already been applied.
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('id', '0011_auto_20150916_1546'),
]
database_operations = [
migrations.AlterModelTable('ExternalDatabase', 'databases_externaldatabase'),
]
state_operations = [
migrations.DeleteModel('ExternalDatabase')
]
operations = [
migrations.SeparateDatabaseAndState(
database_operations=database_operations,
state_operations=state_operations
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django.db
class Migration(migrations.Migration):
dependencies = [
('id', '0011_auto_20150916_1546'),
]
database_operations = [
migrations.AlterModelTable('ExternalDatabase', 'databases_externaldatabase'),
# If running on PostgreSQL, rename the sequencer also - no sequencer left behind!
migrations.RunSQL(
"ALTER SEQUENCE id_externaldatabase_id_seq RENAME TO databases_externaldatabase_id_seq" if django.db.connection.vendor == "postgresql" \
else "SELECT 1"
)
]
state_operations = [
migrations.DeleteModel('ExternalDatabase')
]
operations = [
migrations.SeparateDatabaseAndState(
database_operations=database_operations,
state_operations=state_operations)
]
<commit_msg>Revoke earlier change: This migration has already been applied.<commit_after>
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('id', '0011_auto_20150916_1546'),
]
database_operations = [
migrations.AlterModelTable('ExternalDatabase', 'databases_externaldatabase'),
]
state_operations = [
migrations.DeleteModel('ExternalDatabase')
]
operations = [
migrations.SeparateDatabaseAndState(
database_operations=database_operations,
state_operations=state_operations
),
]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django.db
class Migration(migrations.Migration):
dependencies = [
('id', '0011_auto_20150916_1546'),
]
database_operations = [
migrations.AlterModelTable('ExternalDatabase', 'databases_externaldatabase'),
# If running on PostgreSQL, rename the sequencer also - no sequencer left behind!
migrations.RunSQL(
"ALTER SEQUENCE id_externaldatabase_id_seq RENAME TO databases_externaldatabase_id_seq" if django.db.connection.vendor == "postgresql" \
else "SELECT 1"
)
]
state_operations = [
migrations.DeleteModel('ExternalDatabase')
]
operations = [
migrations.SeparateDatabaseAndState(
database_operations=database_operations,
state_operations=state_operations)
]
Revoke earlier change: This migration has already been applied.# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('id', '0011_auto_20150916_1546'),
]
database_operations = [
migrations.AlterModelTable('ExternalDatabase', 'databases_externaldatabase'),
]
state_operations = [
migrations.DeleteModel('ExternalDatabase')
]
operations = [
migrations.SeparateDatabaseAndState(
database_operations=database_operations,
state_operations=state_operations
),
]
|
<commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
import django.db
class Migration(migrations.Migration):
dependencies = [
('id', '0011_auto_20150916_1546'),
]
database_operations = [
migrations.AlterModelTable('ExternalDatabase', 'databases_externaldatabase'),
# If running on PostgreSQL, rename the sequencer also - no sequencer left behind!
migrations.RunSQL(
"ALTER SEQUENCE id_externaldatabase_id_seq RENAME TO databases_externaldatabase_id_seq" if django.db.connection.vendor == "postgresql" \
else "SELECT 1"
)
]
state_operations = [
migrations.DeleteModel('ExternalDatabase')
]
operations = [
migrations.SeparateDatabaseAndState(
database_operations=database_operations,
state_operations=state_operations)
]
<commit_msg>Revoke earlier change: This migration has already been applied.<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('id', '0011_auto_20150916_1546'),
]
database_operations = [
migrations.AlterModelTable('ExternalDatabase', 'databases_externaldatabase'),
]
state_operations = [
migrations.DeleteModel('ExternalDatabase')
]
operations = [
migrations.SeparateDatabaseAndState(
database_operations=database_operations,
state_operations=state_operations
),
]
|
573055a80ef19f2b743ef3bfc08c40e8738c5bb1
|
libtree/utils.py
|
libtree/utils.py
|
# Copyright (c) 2016 Fabian Kochem
import collections
from copy import deepcopy
def recursive_dict_merge(left, right, first_run=True):
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
if first_run is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
|
# Copyright (c) 2016 Fabian Kochem
import collections
from copy import deepcopy
def recursive_dict_merge(left, right, create_copy=True):
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
if create_copy is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
|
Rename 'first_run' -> 'create_copy' in recursive_dict_merge()
|
Rename 'first_run' -> 'create_copy' in recursive_dict_merge()
|
Python
|
mit
|
conceptsandtraining/libtree
|
# Copyright (c) 2016 Fabian Kochem
import collections
from copy import deepcopy
def recursive_dict_merge(left, right, first_run=True):
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
if first_run is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
Rename 'first_run' -> 'create_copy' in recursive_dict_merge()
|
# Copyright (c) 2016 Fabian Kochem
import collections
from copy import deepcopy
def recursive_dict_merge(left, right, create_copy=True):
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
if create_copy is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
|
<commit_before># Copyright (c) 2016 Fabian Kochem
import collections
from copy import deepcopy
def recursive_dict_merge(left, right, first_run=True):
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
if first_run is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
<commit_msg>Rename 'first_run' -> 'create_copy' in recursive_dict_merge()<commit_after>
|
# Copyright (c) 2016 Fabian Kochem
import collections
from copy import deepcopy
def recursive_dict_merge(left, right, create_copy=True):
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
if create_copy is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
|
# Copyright (c) 2016 Fabian Kochem
import collections
from copy import deepcopy
def recursive_dict_merge(left, right, first_run=True):
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
if first_run is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
Rename 'first_run' -> 'create_copy' in recursive_dict_merge()# Copyright (c) 2016 Fabian Kochem
import collections
from copy import deepcopy
def recursive_dict_merge(left, right, create_copy=True):
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
if create_copy is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
|
<commit_before># Copyright (c) 2016 Fabian Kochem
import collections
from copy import deepcopy
def recursive_dict_merge(left, right, first_run=True):
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
if first_run is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
<commit_msg>Rename 'first_run' -> 'create_copy' in recursive_dict_merge()<commit_after># Copyright (c) 2016 Fabian Kochem
import collections
from copy import deepcopy
def recursive_dict_merge(left, right, create_copy=True):
"""
Merge ``right`` into ``left`` and return a new dictionary.
"""
if create_copy is True:
left = deepcopy(left)
for key in right:
if key in left:
if isinstance(left[key], dict) and isinstance(right[key], dict):
recursive_dict_merge(left[key], right[key], False)
else:
left[key] = right[key]
else:
left[key] = right[key]
return left
def vectorize_nodes(*nodes):
if len(nodes) == 1 and isinstance(nodes[0], collections.Iterable):
nodes = nodes[0]
ret = []
parents = {node.parent: node for node in nodes}
last_parent = None
for _ in range(len(parents)):
node = parents[last_parent]
ret.append(node)
last_parent = node.id
return ret
|
19733452008845419aea36e13d68494d931e44e6
|
settings.py
|
settings.py
|
"""settings.py - settings and configuration used over all modules :
Copyright (c) 2018 Heinrich Widmann (DKRZ)
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import time
def init():
# check the version from :
global B2FINDVersion
B2FINDVersion = '2.4.0'
global TimeStart
TimeStart = time.time()
global ckanorg
ckanorg = 'rda'
|
"""settings.py - settings and configuration used over all modules :
Copyright (c) 2018 Heinrich Widmann (DKRZ)
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import time
def init():
# check the version from :
global B2FINDVersion
B2FINDVersion = '2.4.0'
global TimeStart
TimeStart = time.time()
global ckanorg
ckanorg = 'eudat-b2find'
|
Set the right CKAN organization
|
Set the right CKAN organization
|
Python
|
mit
|
EUDAT-Training/B2FIND-Training
|
"""settings.py - settings and configuration used over all modules :
Copyright (c) 2018 Heinrich Widmann (DKRZ)
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import time
def init():
# check the version from :
global B2FINDVersion
B2FINDVersion = '2.4.0'
global TimeStart
TimeStart = time.time()
global ckanorg
ckanorg = 'rda'
Set the right CKAN organization
|
"""settings.py - settings and configuration used over all modules :
Copyright (c) 2018 Heinrich Widmann (DKRZ)
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import time
def init():
# check the version from :
global B2FINDVersion
B2FINDVersion = '2.4.0'
global TimeStart
TimeStart = time.time()
global ckanorg
ckanorg = 'eudat-b2find'
|
<commit_before>"""settings.py - settings and configuration used over all modules :
Copyright (c) 2018 Heinrich Widmann (DKRZ)
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import time
def init():
# check the version from :
global B2FINDVersion
B2FINDVersion = '2.4.0'
global TimeStart
TimeStart = time.time()
global ckanorg
ckanorg = 'rda'
<commit_msg>Set the right CKAN organization<commit_after>
|
"""settings.py - settings and configuration used over all modules :
Copyright (c) 2018 Heinrich Widmann (DKRZ)
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import time
def init():
# check the version from :
global B2FINDVersion
B2FINDVersion = '2.4.0'
global TimeStart
TimeStart = time.time()
global ckanorg
ckanorg = 'eudat-b2find'
|
"""settings.py - settings and configuration used over all modules :
Copyright (c) 2018 Heinrich Widmann (DKRZ)
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import time
def init():
# check the version from :
global B2FINDVersion
B2FINDVersion = '2.4.0'
global TimeStart
TimeStart = time.time()
global ckanorg
ckanorg = 'rda'
Set the right CKAN organization"""settings.py - settings and configuration used over all modules :
Copyright (c) 2018 Heinrich Widmann (DKRZ)
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import time
def init():
# check the version from :
global B2FINDVersion
B2FINDVersion = '2.4.0'
global TimeStart
TimeStart = time.time()
global ckanorg
ckanorg = 'eudat-b2find'
|
<commit_before>"""settings.py - settings and configuration used over all modules :
Copyright (c) 2018 Heinrich Widmann (DKRZ)
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import time
def init():
# check the version from :
global B2FINDVersion
B2FINDVersion = '2.4.0'
global TimeStart
TimeStart = time.time()
global ckanorg
ckanorg = 'rda'
<commit_msg>Set the right CKAN organization<commit_after>"""settings.py - settings and configuration used over all modules :
Copyright (c) 2018 Heinrich Widmann (DKRZ)
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import time
def init():
# check the version from :
global B2FINDVersion
B2FINDVersion = '2.4.0'
global TimeStart
TimeStart = time.time()
global ckanorg
ckanorg = 'eudat-b2find'
|
72b9aebb3cddf999bfaea0b3452cfa670b9ed269
|
utils/config.py
|
utils/config.py
|
import yaml
def from_config(config):
config = dict(config) # shallow copy
return config.pop('class').from_config(config)
def from_yaml(yaml_string):
config = yaml.load(yaml_string)
return from_config(config)
class ConfigObject:
def get_config(self):
return dict(class_name=self.__class__.__name__)
@classmethod
def from_config(cls, config):
# TODO pop class?
return cls(**config)
def to_yaml(self, *args, **kwargs):
config = self.get_config()
return yaml.dump(config, *args, **kwargs)
@classmethod
def from_yaml(cls, yaml_string):
config = yaml.load(yaml_string)
return cls.from_config(config)
|
import yaml
def from_config(config):
config = dict(config) # shallow copy
return config.pop('class').from_config(config)
def from_yaml(yaml_string):
config = yaml.load(yaml_string)
return from_config(config)
class ConfigObject:
def get_config(self):
return {'class': self.__class__}
@classmethod
def from_config(cls, config):
# TODO pop class?
return cls(**config)
def to_yaml(self, *args, width=float('inf'), **kwargs):
config = self.get_config()
return yaml.dump(config, *args, width=width, **kwargs)
@classmethod
def from_yaml(cls, yaml_string):
config = yaml.load(yaml_string)
return cls.from_config(config)
|
Fix ConfigObject to use class instead of class_name. Pass infinite width when dumping to yaml.
|
Fix ConfigObject to use class instead of class_name. Pass infinite width when dumping to yaml.
|
Python
|
mit
|
alexlee-gk/visual_dynamics
|
import yaml
def from_config(config):
config = dict(config) # shallow copy
return config.pop('class').from_config(config)
def from_yaml(yaml_string):
config = yaml.load(yaml_string)
return from_config(config)
class ConfigObject:
def get_config(self):
return dict(class_name=self.__class__.__name__)
@classmethod
def from_config(cls, config):
# TODO pop class?
return cls(**config)
def to_yaml(self, *args, **kwargs):
config = self.get_config()
return yaml.dump(config, *args, **kwargs)
@classmethod
def from_yaml(cls, yaml_string):
config = yaml.load(yaml_string)
return cls.from_config(config)
Fix ConfigObject to use class instead of class_name. Pass infinite width when dumping to yaml.
|
import yaml
def from_config(config):
config = dict(config) # shallow copy
return config.pop('class').from_config(config)
def from_yaml(yaml_string):
config = yaml.load(yaml_string)
return from_config(config)
class ConfigObject:
def get_config(self):
return {'class': self.__class__}
@classmethod
def from_config(cls, config):
# TODO pop class?
return cls(**config)
def to_yaml(self, *args, width=float('inf'), **kwargs):
config = self.get_config()
return yaml.dump(config, *args, width=width, **kwargs)
@classmethod
def from_yaml(cls, yaml_string):
config = yaml.load(yaml_string)
return cls.from_config(config)
|
<commit_before>import yaml
def from_config(config):
config = dict(config) # shallow copy
return config.pop('class').from_config(config)
def from_yaml(yaml_string):
config = yaml.load(yaml_string)
return from_config(config)
class ConfigObject:
def get_config(self):
return dict(class_name=self.__class__.__name__)
@classmethod
def from_config(cls, config):
# TODO pop class?
return cls(**config)
def to_yaml(self, *args, **kwargs):
config = self.get_config()
return yaml.dump(config, *args, **kwargs)
@classmethod
def from_yaml(cls, yaml_string):
config = yaml.load(yaml_string)
return cls.from_config(config)
<commit_msg>Fix ConfigObject to use class instead of class_name. Pass infinite width when dumping to yaml.<commit_after>
|
import yaml
def from_config(config):
config = dict(config) # shallow copy
return config.pop('class').from_config(config)
def from_yaml(yaml_string):
config = yaml.load(yaml_string)
return from_config(config)
class ConfigObject:
def get_config(self):
return {'class': self.__class__}
@classmethod
def from_config(cls, config):
# TODO pop class?
return cls(**config)
def to_yaml(self, *args, width=float('inf'), **kwargs):
config = self.get_config()
return yaml.dump(config, *args, width=width, **kwargs)
@classmethod
def from_yaml(cls, yaml_string):
config = yaml.load(yaml_string)
return cls.from_config(config)
|
import yaml
def from_config(config):
config = dict(config) # shallow copy
return config.pop('class').from_config(config)
def from_yaml(yaml_string):
config = yaml.load(yaml_string)
return from_config(config)
class ConfigObject:
def get_config(self):
return dict(class_name=self.__class__.__name__)
@classmethod
def from_config(cls, config):
# TODO pop class?
return cls(**config)
def to_yaml(self, *args, **kwargs):
config = self.get_config()
return yaml.dump(config, *args, **kwargs)
@classmethod
def from_yaml(cls, yaml_string):
config = yaml.load(yaml_string)
return cls.from_config(config)
Fix ConfigObject to use class instead of class_name. Pass infinite width when dumping to yaml.import yaml
def from_config(config):
config = dict(config) # shallow copy
return config.pop('class').from_config(config)
def from_yaml(yaml_string):
config = yaml.load(yaml_string)
return from_config(config)
class ConfigObject:
def get_config(self):
return {'class': self.__class__}
@classmethod
def from_config(cls, config):
# TODO pop class?
return cls(**config)
def to_yaml(self, *args, width=float('inf'), **kwargs):
config = self.get_config()
return yaml.dump(config, *args, width=width, **kwargs)
@classmethod
def from_yaml(cls, yaml_string):
config = yaml.load(yaml_string)
return cls.from_config(config)
|
<commit_before>import yaml
def from_config(config):
config = dict(config) # shallow copy
return config.pop('class').from_config(config)
def from_yaml(yaml_string):
config = yaml.load(yaml_string)
return from_config(config)
class ConfigObject:
def get_config(self):
return dict(class_name=self.__class__.__name__)
@classmethod
def from_config(cls, config):
# TODO pop class?
return cls(**config)
def to_yaml(self, *args, **kwargs):
config = self.get_config()
return yaml.dump(config, *args, **kwargs)
@classmethod
def from_yaml(cls, yaml_string):
config = yaml.load(yaml_string)
return cls.from_config(config)
<commit_msg>Fix ConfigObject to use class instead of class_name. Pass infinite width when dumping to yaml.<commit_after>import yaml
def from_config(config):
config = dict(config) # shallow copy
return config.pop('class').from_config(config)
def from_yaml(yaml_string):
config = yaml.load(yaml_string)
return from_config(config)
class ConfigObject:
def get_config(self):
return {'class': self.__class__}
@classmethod
def from_config(cls, config):
# TODO pop class?
return cls(**config)
def to_yaml(self, *args, width=float('inf'), **kwargs):
config = self.get_config()
return yaml.dump(config, *args, width=width, **kwargs)
@classmethod
def from_yaml(cls, yaml_string):
config = yaml.load(yaml_string)
return cls.from_config(config)
|
56ca0dce01ad76934ae850ea20ab25adbcc751d1
|
conf_site/proposals/admin.py
|
conf_site/proposals/admin.py
|
from django.contrib import admin
from .models import Proposal, ProposalKeyword
@admin.register(ProposalKeyword)
class KeywordAdmin(admin.ModelAdmin):
list_display = ("name", "slug", "official",)
list_filter = ("official",)
@admin.register(Proposal)
class ProposalAdmin(admin.ModelAdmin):
exclude = (
"under_represented_population",
"under_represented_details",
"under_represented_other",
)
list_display = (
'number',
'title',
'speaker_email',
'speaker',
'kind',
'audience_level',
'cancelled',
"date_created",
"date_last_modified",
)
list_display_links = ("title",)
list_filter = (
'kind',
'audience_level',
'cancelled',
'recording_release',
)
search_fields = ("title", "speaker__name")
date_hierarchy = "date_created"
|
from django.contrib import admin
from .models import Proposal, ProposalKeyword
@admin.register(ProposalKeyword)
class KeywordAdmin(admin.ModelAdmin):
list_display = ("name", "slug", "official",)
list_filter = ("official",)
@admin.register(Proposal)
class ProposalAdmin(admin.ModelAdmin):
exclude = (
"under_represented_population",
"under_represented_details",
"under_represented_other",
)
list_display = (
'number',
'title',
'speaker',
'kind',
'audience_level',
'cancelled',
"date_created",
"date_last_modified",
)
list_display_links = ("title",)
list_filter = (
'kind',
'audience_level',
'cancelled',
'recording_release',
)
search_fields = ("title", "speaker__name")
date_hierarchy = "date_created"
|
Remove speaker email field from proposal listing.
|
Remove speaker email field from proposal listing.
Save space in admin proposal listing by removing the speaker email
field.
|
Python
|
mit
|
pydata/conf_site,pydata/conf_site,pydata/conf_site
|
from django.contrib import admin
from .models import Proposal, ProposalKeyword
@admin.register(ProposalKeyword)
class KeywordAdmin(admin.ModelAdmin):
list_display = ("name", "slug", "official",)
list_filter = ("official",)
@admin.register(Proposal)
class ProposalAdmin(admin.ModelAdmin):
exclude = (
"under_represented_population",
"under_represented_details",
"under_represented_other",
)
list_display = (
'number',
'title',
'speaker_email',
'speaker',
'kind',
'audience_level',
'cancelled',
"date_created",
"date_last_modified",
)
list_display_links = ("title",)
list_filter = (
'kind',
'audience_level',
'cancelled',
'recording_release',
)
search_fields = ("title", "speaker__name")
date_hierarchy = "date_created"
Remove speaker email field from proposal listing.
Save space in admin proposal listing by removing the speaker email
field.
|
from django.contrib import admin
from .models import Proposal, ProposalKeyword
@admin.register(ProposalKeyword)
class KeywordAdmin(admin.ModelAdmin):
list_display = ("name", "slug", "official",)
list_filter = ("official",)
@admin.register(Proposal)
class ProposalAdmin(admin.ModelAdmin):
exclude = (
"under_represented_population",
"under_represented_details",
"under_represented_other",
)
list_display = (
'number',
'title',
'speaker',
'kind',
'audience_level',
'cancelled',
"date_created",
"date_last_modified",
)
list_display_links = ("title",)
list_filter = (
'kind',
'audience_level',
'cancelled',
'recording_release',
)
search_fields = ("title", "speaker__name")
date_hierarchy = "date_created"
|
<commit_before>from django.contrib import admin
from .models import Proposal, ProposalKeyword
@admin.register(ProposalKeyword)
class KeywordAdmin(admin.ModelAdmin):
list_display = ("name", "slug", "official",)
list_filter = ("official",)
@admin.register(Proposal)
class ProposalAdmin(admin.ModelAdmin):
exclude = (
"under_represented_population",
"under_represented_details",
"under_represented_other",
)
list_display = (
'number',
'title',
'speaker_email',
'speaker',
'kind',
'audience_level',
'cancelled',
"date_created",
"date_last_modified",
)
list_display_links = ("title",)
list_filter = (
'kind',
'audience_level',
'cancelled',
'recording_release',
)
search_fields = ("title", "speaker__name")
date_hierarchy = "date_created"
<commit_msg>Remove speaker email field from proposal listing.
Save space in admin proposal listing by removing the speaker email
field.<commit_after>
|
from django.contrib import admin
from .models import Proposal, ProposalKeyword
@admin.register(ProposalKeyword)
class KeywordAdmin(admin.ModelAdmin):
list_display = ("name", "slug", "official",)
list_filter = ("official",)
@admin.register(Proposal)
class ProposalAdmin(admin.ModelAdmin):
exclude = (
"under_represented_population",
"under_represented_details",
"under_represented_other",
)
list_display = (
'number',
'title',
'speaker',
'kind',
'audience_level',
'cancelled',
"date_created",
"date_last_modified",
)
list_display_links = ("title",)
list_filter = (
'kind',
'audience_level',
'cancelled',
'recording_release',
)
search_fields = ("title", "speaker__name")
date_hierarchy = "date_created"
|
from django.contrib import admin
from .models import Proposal, ProposalKeyword
@admin.register(ProposalKeyword)
class KeywordAdmin(admin.ModelAdmin):
list_display = ("name", "slug", "official",)
list_filter = ("official",)
@admin.register(Proposal)
class ProposalAdmin(admin.ModelAdmin):
exclude = (
"under_represented_population",
"under_represented_details",
"under_represented_other",
)
list_display = (
'number',
'title',
'speaker_email',
'speaker',
'kind',
'audience_level',
'cancelled',
"date_created",
"date_last_modified",
)
list_display_links = ("title",)
list_filter = (
'kind',
'audience_level',
'cancelled',
'recording_release',
)
search_fields = ("title", "speaker__name")
date_hierarchy = "date_created"
Remove speaker email field from proposal listing.
Save space in admin proposal listing by removing the speaker email
field.from django.contrib import admin
from .models import Proposal, ProposalKeyword
@admin.register(ProposalKeyword)
class KeywordAdmin(admin.ModelAdmin):
list_display = ("name", "slug", "official",)
list_filter = ("official",)
@admin.register(Proposal)
class ProposalAdmin(admin.ModelAdmin):
exclude = (
"under_represented_population",
"under_represented_details",
"under_represented_other",
)
list_display = (
'number',
'title',
'speaker',
'kind',
'audience_level',
'cancelled',
"date_created",
"date_last_modified",
)
list_display_links = ("title",)
list_filter = (
'kind',
'audience_level',
'cancelled',
'recording_release',
)
search_fields = ("title", "speaker__name")
date_hierarchy = "date_created"
|
<commit_before>from django.contrib import admin
from .models import Proposal, ProposalKeyword
@admin.register(ProposalKeyword)
class KeywordAdmin(admin.ModelAdmin):
list_display = ("name", "slug", "official",)
list_filter = ("official",)
@admin.register(Proposal)
class ProposalAdmin(admin.ModelAdmin):
exclude = (
"under_represented_population",
"under_represented_details",
"under_represented_other",
)
list_display = (
'number',
'title',
'speaker_email',
'speaker',
'kind',
'audience_level',
'cancelled',
"date_created",
"date_last_modified",
)
list_display_links = ("title",)
list_filter = (
'kind',
'audience_level',
'cancelled',
'recording_release',
)
search_fields = ("title", "speaker__name")
date_hierarchy = "date_created"
<commit_msg>Remove speaker email field from proposal listing.
Save space in admin proposal listing by removing the speaker email
field.<commit_after>from django.contrib import admin
from .models import Proposal, ProposalKeyword
@admin.register(ProposalKeyword)
class KeywordAdmin(admin.ModelAdmin):
list_display = ("name", "slug", "official",)
list_filter = ("official",)
@admin.register(Proposal)
class ProposalAdmin(admin.ModelAdmin):
exclude = (
"under_represented_population",
"under_represented_details",
"under_represented_other",
)
list_display = (
'number',
'title',
'speaker',
'kind',
'audience_level',
'cancelled',
"date_created",
"date_last_modified",
)
list_display_links = ("title",)
list_filter = (
'kind',
'audience_level',
'cancelled',
'recording_release',
)
search_fields = ("title", "speaker__name")
date_hierarchy = "date_created"
|
d76b7525e60767c0fc73c67ebe458329a3ae2426
|
tests/parser/test_parse_inreach.py
|
tests/parser/test_parse_inreach.py
|
import unittest
from ogn.parser.aprs_comment.inreach_parser import InreachParser
class TestStringMethods(unittest.TestCase):
def test_position_comment(self):
message = InreachParser().parse_position("id300434060496190 inReac True")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], None)
message = InreachParser().parse_position("id300434060496190 inReac True Jim Bob")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], "Jim Bob")
if __name__ == '__main__':
unittest.main()
|
import unittest
from ogn.parser.aprs_comment.inreach_parser import InreachParser
class TestStringMethods(unittest.TestCase):
def test_position_comment(self):
message = InreachParser().parse_position("id300434060496190 inReac True")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], None)
message = InreachParser().parse_position("id300434060496190 inReac True Jim Bob")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], "Jim Bob")
if __name__ == '__main__':
unittest.main()
|
Add a blank line in the test case for CI?
|
Add a blank line in the test case for CI?
|
Python
|
agpl-3.0
|
glidernet/python-ogn-client
|
import unittest
from ogn.parser.aprs_comment.inreach_parser import InreachParser
class TestStringMethods(unittest.TestCase):
def test_position_comment(self):
message = InreachParser().parse_position("id300434060496190 inReac True")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], None)
message = InreachParser().parse_position("id300434060496190 inReac True Jim Bob")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], "Jim Bob")
if __name__ == '__main__':
unittest.main()
Add a blank line in the test case for CI?
|
import unittest
from ogn.parser.aprs_comment.inreach_parser import InreachParser
class TestStringMethods(unittest.TestCase):
def test_position_comment(self):
message = InreachParser().parse_position("id300434060496190 inReac True")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], None)
message = InreachParser().parse_position("id300434060496190 inReac True Jim Bob")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], "Jim Bob")
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from ogn.parser.aprs_comment.inreach_parser import InreachParser
class TestStringMethods(unittest.TestCase):
def test_position_comment(self):
message = InreachParser().parse_position("id300434060496190 inReac True")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], None)
message = InreachParser().parse_position("id300434060496190 inReac True Jim Bob")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], "Jim Bob")
if __name__ == '__main__':
unittest.main()
<commit_msg>Add a blank line in the test case for CI?<commit_after>
|
import unittest
from ogn.parser.aprs_comment.inreach_parser import InreachParser
class TestStringMethods(unittest.TestCase):
def test_position_comment(self):
message = InreachParser().parse_position("id300434060496190 inReac True")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], None)
message = InreachParser().parse_position("id300434060496190 inReac True Jim Bob")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], "Jim Bob")
if __name__ == '__main__':
unittest.main()
|
import unittest
from ogn.parser.aprs_comment.inreach_parser import InreachParser
class TestStringMethods(unittest.TestCase):
def test_position_comment(self):
message = InreachParser().parse_position("id300434060496190 inReac True")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], None)
message = InreachParser().parse_position("id300434060496190 inReac True Jim Bob")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], "Jim Bob")
if __name__ == '__main__':
unittest.main()
Add a blank line in the test case for CI?import unittest
from ogn.parser.aprs_comment.inreach_parser import InreachParser
class TestStringMethods(unittest.TestCase):
def test_position_comment(self):
message = InreachParser().parse_position("id300434060496190 inReac True")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], None)
message = InreachParser().parse_position("id300434060496190 inReac True Jim Bob")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], "Jim Bob")
if __name__ == '__main__':
unittest.main()
|
<commit_before>import unittest
from ogn.parser.aprs_comment.inreach_parser import InreachParser
class TestStringMethods(unittest.TestCase):
def test_position_comment(self):
message = InreachParser().parse_position("id300434060496190 inReac True")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], None)
message = InreachParser().parse_position("id300434060496190 inReac True Jim Bob")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], "Jim Bob")
if __name__ == '__main__':
unittest.main()
<commit_msg>Add a blank line in the test case for CI?<commit_after>import unittest
from ogn.parser.aprs_comment.inreach_parser import InreachParser
class TestStringMethods(unittest.TestCase):
def test_position_comment(self):
message = InreachParser().parse_position("id300434060496190 inReac True")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], None)
message = InreachParser().parse_position("id300434060496190 inReac True Jim Bob")
self.assertEqual(message['address'], "300434060496190")
self.assertEqual(message['model'], 'inReac')
self.assertEqual(message['status'], "True")
self.assertEqual(message['pilot_name'], "Jim Bob")
if __name__ == '__main__':
unittest.main()
|
f81ddc6297b1372cdba3a5161b4f30d0a42d2f58
|
src/factor.py
|
src/factor.py
|
from collections import Counter
from functools import (
lru_cache,
reduce,
)
from itertools import combinations
from prime import Prime
@lru_cache(maxsize=None)
def get_prime_factors(n):
""" Returns the counts of each prime factor of n
"""
if n == 1:
return Counter()
divisor = 2
while n % divisor != 0:
divisor = Prime.after(divisor)
return Counter({divisor: 1}) + get_prime_factors(n // divisor)
def get_flat_prime_factors(n):
""" Returns a sorted list of n's prime_factor, where each
prime factor is repeated the number of times it divides n
"""
prime_factors = get_prime_factors(n)
return sorted([
x for list_ in (
[factor] * count for
factor, count in prime_factors.items()
) for x in list_
])
def get_divisors(n):
""" Returns a set of all divisors of n
"""
if n < 1:
return set()
flat_factors = get_flat_prime_factors(n)
divisors = set([1, n])
for i in range(len(flat_factors)):
for comb in combinations(flat_factors, i + 1):
divisors.add(reduce(lambda x, y: x * y, comb))
return divisors
def get_proper_divisors(n):
""" Returns a set of all proper divisors of n
"""
return get_divisors(n) - {n}
|
from collections import Counter
from functools import (
lru_cache,
reduce,
)
from itertools import combinations
from prime import Prime
@lru_cache(maxsize=None)
def get_prime_factors(n):
""" Returns the counts of each prime factor of n
"""
if n < 1:
raise ValueError
if n == 1:
return Counter()
divisor = 2
while n % divisor != 0:
divisor = Prime.after(divisor)
return Counter({divisor: 1}) + get_prime_factors(n // divisor)
def get_flat_prime_factors(n):
""" Returns a sorted list of n's prime_factor, where each
prime factor is repeated the number of times it divides n
"""
prime_factors = get_prime_factors(n)
return sorted([
x for list_ in (
[factor] * count for
factor, count in prime_factors.items()
) for x in list_
])
def get_divisors(n):
""" Returns a set of all divisors of n
"""
if n < 1:
return set()
flat_factors = get_flat_prime_factors(n)
divisors = set([1, n])
for i in range(len(flat_factors)):
for comb in combinations(flat_factors, i + 1):
divisors.add(reduce(lambda x, y: x * y, comb))
return divisors
def get_proper_divisors(n):
""" Returns a set of all proper divisors of n
"""
return get_divisors(n) - {n}
|
Raise ValueError if n < 1
|
Raise ValueError if n < 1
|
Python
|
mit
|
mackorone/euler
|
from collections import Counter
from functools import (
lru_cache,
reduce,
)
from itertools import combinations
from prime import Prime
@lru_cache(maxsize=None)
def get_prime_factors(n):
""" Returns the counts of each prime factor of n
"""
if n == 1:
return Counter()
divisor = 2
while n % divisor != 0:
divisor = Prime.after(divisor)
return Counter({divisor: 1}) + get_prime_factors(n // divisor)
def get_flat_prime_factors(n):
""" Returns a sorted list of n's prime_factor, where each
prime factor is repeated the number of times it divides n
"""
prime_factors = get_prime_factors(n)
return sorted([
x for list_ in (
[factor] * count for
factor, count in prime_factors.items()
) for x in list_
])
def get_divisors(n):
""" Returns a set of all divisors of n
"""
if n < 1:
return set()
flat_factors = get_flat_prime_factors(n)
divisors = set([1, n])
for i in range(len(flat_factors)):
for comb in combinations(flat_factors, i + 1):
divisors.add(reduce(lambda x, y: x * y, comb))
return divisors
def get_proper_divisors(n):
""" Returns a set of all proper divisors of n
"""
return get_divisors(n) - {n}
Raise ValueError if n < 1
|
from collections import Counter
from functools import (
lru_cache,
reduce,
)
from itertools import combinations
from prime import Prime
@lru_cache(maxsize=None)
def get_prime_factors(n):
""" Returns the counts of each prime factor of n
"""
if n < 1:
raise ValueError
if n == 1:
return Counter()
divisor = 2
while n % divisor != 0:
divisor = Prime.after(divisor)
return Counter({divisor: 1}) + get_prime_factors(n // divisor)
def get_flat_prime_factors(n):
""" Returns a sorted list of n's prime_factor, where each
prime factor is repeated the number of times it divides n
"""
prime_factors = get_prime_factors(n)
return sorted([
x for list_ in (
[factor] * count for
factor, count in prime_factors.items()
) for x in list_
])
def get_divisors(n):
""" Returns a set of all divisors of n
"""
if n < 1:
return set()
flat_factors = get_flat_prime_factors(n)
divisors = set([1, n])
for i in range(len(flat_factors)):
for comb in combinations(flat_factors, i + 1):
divisors.add(reduce(lambda x, y: x * y, comb))
return divisors
def get_proper_divisors(n):
""" Returns a set of all proper divisors of n
"""
return get_divisors(n) - {n}
|
<commit_before>from collections import Counter
from functools import (
lru_cache,
reduce,
)
from itertools import combinations
from prime import Prime
@lru_cache(maxsize=None)
def get_prime_factors(n):
""" Returns the counts of each prime factor of n
"""
if n == 1:
return Counter()
divisor = 2
while n % divisor != 0:
divisor = Prime.after(divisor)
return Counter({divisor: 1}) + get_prime_factors(n // divisor)
def get_flat_prime_factors(n):
""" Returns a sorted list of n's prime_factor, where each
prime factor is repeated the number of times it divides n
"""
prime_factors = get_prime_factors(n)
return sorted([
x for list_ in (
[factor] * count for
factor, count in prime_factors.items()
) for x in list_
])
def get_divisors(n):
""" Returns a set of all divisors of n
"""
if n < 1:
return set()
flat_factors = get_flat_prime_factors(n)
divisors = set([1, n])
for i in range(len(flat_factors)):
for comb in combinations(flat_factors, i + 1):
divisors.add(reduce(lambda x, y: x * y, comb))
return divisors
def get_proper_divisors(n):
""" Returns a set of all proper divisors of n
"""
return get_divisors(n) - {n}
<commit_msg>Raise ValueError if n < 1<commit_after>
|
from collections import Counter
from functools import (
lru_cache,
reduce,
)
from itertools import combinations
from prime import Prime
@lru_cache(maxsize=None)
def get_prime_factors(n):
""" Returns the counts of each prime factor of n
"""
if n < 1:
raise ValueError
if n == 1:
return Counter()
divisor = 2
while n % divisor != 0:
divisor = Prime.after(divisor)
return Counter({divisor: 1}) + get_prime_factors(n // divisor)
def get_flat_prime_factors(n):
""" Returns a sorted list of n's prime_factor, where each
prime factor is repeated the number of times it divides n
"""
prime_factors = get_prime_factors(n)
return sorted([
x for list_ in (
[factor] * count for
factor, count in prime_factors.items()
) for x in list_
])
def get_divisors(n):
""" Returns a set of all divisors of n
"""
if n < 1:
return set()
flat_factors = get_flat_prime_factors(n)
divisors = set([1, n])
for i in range(len(flat_factors)):
for comb in combinations(flat_factors, i + 1):
divisors.add(reduce(lambda x, y: x * y, comb))
return divisors
def get_proper_divisors(n):
""" Returns a set of all proper divisors of n
"""
return get_divisors(n) - {n}
|
from collections import Counter
from functools import (
lru_cache,
reduce,
)
from itertools import combinations
from prime import Prime
@lru_cache(maxsize=None)
def get_prime_factors(n):
""" Returns the counts of each prime factor of n
"""
if n == 1:
return Counter()
divisor = 2
while n % divisor != 0:
divisor = Prime.after(divisor)
return Counter({divisor: 1}) + get_prime_factors(n // divisor)
def get_flat_prime_factors(n):
""" Returns a sorted list of n's prime_factor, where each
prime factor is repeated the number of times it divides n
"""
prime_factors = get_prime_factors(n)
return sorted([
x for list_ in (
[factor] * count for
factor, count in prime_factors.items()
) for x in list_
])
def get_divisors(n):
""" Returns a set of all divisors of n
"""
if n < 1:
return set()
flat_factors = get_flat_prime_factors(n)
divisors = set([1, n])
for i in range(len(flat_factors)):
for comb in combinations(flat_factors, i + 1):
divisors.add(reduce(lambda x, y: x * y, comb))
return divisors
def get_proper_divisors(n):
""" Returns a set of all proper divisors of n
"""
return get_divisors(n) - {n}
Raise ValueError if n < 1from collections import Counter
from functools import (
lru_cache,
reduce,
)
from itertools import combinations
from prime import Prime
@lru_cache(maxsize=None)
def get_prime_factors(n):
""" Returns the counts of each prime factor of n
"""
if n < 1:
raise ValueError
if n == 1:
return Counter()
divisor = 2
while n % divisor != 0:
divisor = Prime.after(divisor)
return Counter({divisor: 1}) + get_prime_factors(n // divisor)
def get_flat_prime_factors(n):
""" Returns a sorted list of n's prime_factor, where each
prime factor is repeated the number of times it divides n
"""
prime_factors = get_prime_factors(n)
return sorted([
x for list_ in (
[factor] * count for
factor, count in prime_factors.items()
) for x in list_
])
def get_divisors(n):
""" Returns a set of all divisors of n
"""
if n < 1:
return set()
flat_factors = get_flat_prime_factors(n)
divisors = set([1, n])
for i in range(len(flat_factors)):
for comb in combinations(flat_factors, i + 1):
divisors.add(reduce(lambda x, y: x * y, comb))
return divisors
def get_proper_divisors(n):
""" Returns a set of all proper divisors of n
"""
return get_divisors(n) - {n}
|
<commit_before>from collections import Counter
from functools import (
lru_cache,
reduce,
)
from itertools import combinations
from prime import Prime
@lru_cache(maxsize=None)
def get_prime_factors(n):
""" Returns the counts of each prime factor of n
"""
if n == 1:
return Counter()
divisor = 2
while n % divisor != 0:
divisor = Prime.after(divisor)
return Counter({divisor: 1}) + get_prime_factors(n // divisor)
def get_flat_prime_factors(n):
""" Returns a sorted list of n's prime_factor, where each
prime factor is repeated the number of times it divides n
"""
prime_factors = get_prime_factors(n)
return sorted([
x for list_ in (
[factor] * count for
factor, count in prime_factors.items()
) for x in list_
])
def get_divisors(n):
""" Returns a set of all divisors of n
"""
if n < 1:
return set()
flat_factors = get_flat_prime_factors(n)
divisors = set([1, n])
for i in range(len(flat_factors)):
for comb in combinations(flat_factors, i + 1):
divisors.add(reduce(lambda x, y: x * y, comb))
return divisors
def get_proper_divisors(n):
""" Returns a set of all proper divisors of n
"""
return get_divisors(n) - {n}
<commit_msg>Raise ValueError if n < 1<commit_after>from collections import Counter
from functools import (
lru_cache,
reduce,
)
from itertools import combinations
from prime import Prime
@lru_cache(maxsize=None)
def get_prime_factors(n):
""" Returns the counts of each prime factor of n
"""
if n < 1:
raise ValueError
if n == 1:
return Counter()
divisor = 2
while n % divisor != 0:
divisor = Prime.after(divisor)
return Counter({divisor: 1}) + get_prime_factors(n // divisor)
def get_flat_prime_factors(n):
""" Returns a sorted list of n's prime_factor, where each
prime factor is repeated the number of times it divides n
"""
prime_factors = get_prime_factors(n)
return sorted([
x for list_ in (
[factor] * count for
factor, count in prime_factors.items()
) for x in list_
])
def get_divisors(n):
""" Returns a set of all divisors of n
"""
if n < 1:
return set()
flat_factors = get_flat_prime_factors(n)
divisors = set([1, n])
for i in range(len(flat_factors)):
for comb in combinations(flat_factors, i + 1):
divisors.add(reduce(lambda x, y: x * y, comb))
return divisors
def get_proper_divisors(n):
""" Returns a set of all proper divisors of n
"""
return get_divisors(n) - {n}
|
caf48c98f0cb176c2cd0302d1667d2272a192c91
|
WebSphere/checkAppStatus.py
|
WebSphere/checkAppStatus.py
|
# Author: Christoph Stoettner
# E-Mail: christoph.stoettner@stoeps.de
# Blog: http://www.stoeps.de
# Check if applications are running
print "Getting application status of all installed applications..."
applications = AdminApp.list().splitlines();
for application in applications:
applName = AdminControl.completeObjectName( 'type=Application,name=' + application + ',*' )
if applName != '':
aStatus = 'running';
else:
aStatus = 'stopped';
print 'Application: ' + application + ' is ' + aStatus
# ToDo: Change Script to write Application name and status to a dictionary, then sort this dict and print
# grouped Applications, first running, after this stopped applications.
# If this works -> change DataSource Script to print grouped status.
# ToDo: Put this in a function, that function can be used in other scripts.
|
# Author: Christoph Stoettner
# E-Mail: christoph.stoettner@stoeps.de
# Blog: http://www.stoeps.de
# Check if applications are running
print "Getting application status of all installed applications..."
applications = AdminApp.list().splitlines();
runningApps = []
stoppedApps = []
for application in applications:
applName = AdminControl.completeObjectName( 'type=Application,name=' + application + ',*' )
if applName != '':
aStatus = 'running';
runningApps.append( application )
else:
aStatus = 'stopped';
stoppedApps.append( application )
print ''
print '\tRUNNING APPLICATIONS: \n'
for app in runningApps:
print '\t\t' + app
print ''
print '\tSTOPPED APPLICATIONS: \n'
for app in stoppedApps:
print '\t\t' + app
print ''
|
Print running and stopped Applications
|
Print running and stopped Applications
|
Python
|
apache-2.0
|
stoeps13/ibmcnxscripting,stoeps13/ibmcnxscripting,stoeps13/ibmcnxscripting
|
# Author: Christoph Stoettner
# E-Mail: christoph.stoettner@stoeps.de
# Blog: http://www.stoeps.de
# Check if applications are running
print "Getting application status of all installed applications..."
applications = AdminApp.list().splitlines();
for application in applications:
applName = AdminControl.completeObjectName( 'type=Application,name=' + application + ',*' )
if applName != '':
aStatus = 'running';
else:
aStatus = 'stopped';
print 'Application: ' + application + ' is ' + aStatus
# ToDo: Change Script to write Application name and status to a dictionary, then sort this dict and print
# grouped Applications, first running, after this stopped applications.
# If this works -> change DataSource Script to print grouped status.
# ToDo: Put this in a function, that function can be used in other scripts.
Print running and stopped Applications
|
# Author: Christoph Stoettner
# E-Mail: christoph.stoettner@stoeps.de
# Blog: http://www.stoeps.de
# Check if applications are running
print "Getting application status of all installed applications..."
applications = AdminApp.list().splitlines();
runningApps = []
stoppedApps = []
for application in applications:
applName = AdminControl.completeObjectName( 'type=Application,name=' + application + ',*' )
if applName != '':
aStatus = 'running';
runningApps.append( application )
else:
aStatus = 'stopped';
stoppedApps.append( application )
print ''
print '\tRUNNING APPLICATIONS: \n'
for app in runningApps:
print '\t\t' + app
print ''
print '\tSTOPPED APPLICATIONS: \n'
for app in stoppedApps:
print '\t\t' + app
print ''
|
<commit_before># Author: Christoph Stoettner
# E-Mail: christoph.stoettner@stoeps.de
# Blog: http://www.stoeps.de
# Check if applications are running
print "Getting application status of all installed applications..."
applications = AdminApp.list().splitlines();
for application in applications:
applName = AdminControl.completeObjectName( 'type=Application,name=' + application + ',*' )
if applName != '':
aStatus = 'running';
else:
aStatus = 'stopped';
print 'Application: ' + application + ' is ' + aStatus
# ToDo: Change Script to write Application name and status to a dictionary, then sort this dict and print
# grouped Applications, first running, after this stopped applications.
# If this works -> change DataSource Script to print grouped status.
# ToDo: Put this in a function, that function can be used in other scripts.
<commit_msg>Print running and stopped Applications<commit_after>
|
# Author: Christoph Stoettner
# E-Mail: christoph.stoettner@stoeps.de
# Blog: http://www.stoeps.de
# Check if applications are running
print "Getting application status of all installed applications..."
applications = AdminApp.list().splitlines();
runningApps = []
stoppedApps = []
for application in applications:
applName = AdminControl.completeObjectName( 'type=Application,name=' + application + ',*' )
if applName != '':
aStatus = 'running';
runningApps.append( application )
else:
aStatus = 'stopped';
stoppedApps.append( application )
print ''
print '\tRUNNING APPLICATIONS: \n'
for app in runningApps:
print '\t\t' + app
print ''
print '\tSTOPPED APPLICATIONS: \n'
for app in stoppedApps:
print '\t\t' + app
print ''
|
# Author: Christoph Stoettner
# E-Mail: christoph.stoettner@stoeps.de
# Blog: http://www.stoeps.de
# Check if applications are running
print "Getting application status of all installed applications..."
applications = AdminApp.list().splitlines();
for application in applications:
applName = AdminControl.completeObjectName( 'type=Application,name=' + application + ',*' )
if applName != '':
aStatus = 'running';
else:
aStatus = 'stopped';
print 'Application: ' + application + ' is ' + aStatus
# ToDo: Change Script to write Application name and status to a dictionary, then sort this dict and print
# grouped Applications, first running, after this stopped applications.
# If this works -> change DataSource Script to print grouped status.
# ToDo: Put this in a function, that function can be used in other scripts.
Print running and stopped Applications# Author: Christoph Stoettner
# E-Mail: christoph.stoettner@stoeps.de
# Blog: http://www.stoeps.de
# Check if applications are running
print "Getting application status of all installed applications..."
applications = AdminApp.list().splitlines();
runningApps = []
stoppedApps = []
for application in applications:
applName = AdminControl.completeObjectName( 'type=Application,name=' + application + ',*' )
if applName != '':
aStatus = 'running';
runningApps.append( application )
else:
aStatus = 'stopped';
stoppedApps.append( application )
print ''
print '\tRUNNING APPLICATIONS: \n'
for app in runningApps:
print '\t\t' + app
print ''
print '\tSTOPPED APPLICATIONS: \n'
for app in stoppedApps:
print '\t\t' + app
print ''
|
<commit_before># Author: Christoph Stoettner
# E-Mail: christoph.stoettner@stoeps.de
# Blog: http://www.stoeps.de
# Check if applications are running
print "Getting application status of all installed applications..."
applications = AdminApp.list().splitlines();
for application in applications:
applName = AdminControl.completeObjectName( 'type=Application,name=' + application + ',*' )
if applName != '':
aStatus = 'running';
else:
aStatus = 'stopped';
print 'Application: ' + application + ' is ' + aStatus
# ToDo: Change Script to write Application name and status to a dictionary, then sort this dict and print
# grouped Applications, first running, after this stopped applications.
# If this works -> change DataSource Script to print grouped status.
# ToDo: Put this in a function, that function can be used in other scripts.
<commit_msg>Print running and stopped Applications<commit_after># Author: Christoph Stoettner
# E-Mail: christoph.stoettner@stoeps.de
# Blog: http://www.stoeps.de
# Check if applications are running
print "Getting application status of all installed applications..."
applications = AdminApp.list().splitlines();
runningApps = []
stoppedApps = []
for application in applications:
applName = AdminControl.completeObjectName( 'type=Application,name=' + application + ',*' )
if applName != '':
aStatus = 'running';
runningApps.append( application )
else:
aStatus = 'stopped';
stoppedApps.append( application )
print ''
print '\tRUNNING APPLICATIONS: \n'
for app in runningApps:
print '\t\t' + app
print ''
print '\tSTOPPED APPLICATIONS: \n'
for app in stoppedApps:
print '\t\t' + app
print ''
|
420153447bdd069153cf58c36d6b6cb51259ca14
|
tba_config.py
|
tba_config.py
|
import json
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# For choosing what the main landing page displays
KICKOFF = 1
BUILDSEASON = 2
COMPETITIONSEASON = 3
OFFSEASON = 4
# The CONFIG variables should have exactly the same structure between environments
# Eventually a test environment should be added. -gregmarra 17 Jul 2012
if DEBUG:
CONFIG = {
"env": "dev",
"memcache": False,
"firebase-url": "https://thebluealliance-dev.firebaseio.com/{}.json?print=silent&auth={}"
}
else:
CONFIG = {
"env": "prod",
"memcache": True,
"firebase-url": "https://thebluealliance.firebaseio.com/{}.json?print=silent&auth={}"
}
CONFIG['landing_handler'] = COMPETITIONSEASON
CONFIG["static_resource_version"] = 3
|
import json
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# For choosing what the main landing page displays
KICKOFF = 1
BUILDSEASON = 2
COMPETITIONSEASON = 3
OFFSEASON = 4
# The CONFIG variables should have exactly the same structure between environments
# Eventually a test environment should be added. -gregmarra 17 Jul 2012
if DEBUG:
CONFIG = {
"env": "dev",
"memcache": False,
"firebase-url": "https://thebluealliance-dev.firebaseio.com/{}.json?print=silent&auth={}"
}
else:
CONFIG = {
"env": "prod",
"memcache": True,
"firebase-url": "https://thebluealliance.firebaseio.com/{}.json?print=silent&auth={}"
}
CONFIG['landing_handler'] = COMPETITIONSEASON
CONFIG["static_resource_version"] = 4
|
Increment static resources for Firebase.
|
Increment static resources for Firebase.
|
Python
|
mit
|
1fish2/the-blue-alliance,tsteward/the-blue-alliance,nwalters512/the-blue-alliance,verycumbersome/the-blue-alliance,bvisness/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,synth3tk/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,bvisness/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,the-blue-alliance/the-blue-alliance,nwalters512/the-blue-alliance,josephbisch/the-blue-alliance,1fish2/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,1fish2/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,the-blue-alliance/the-blue-alliance,tsteward/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,jaredhasenklein/the-blue-alliance,1fish2/the-blue-alliance,bvisness/the-blue-alliance,bdaroz/the-blue-alliance,nwalters512/the-blue-alliance,josephbisch/the-blue-alliance,fangeugene/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,verycumbersome/the-blue-alliance,phil-lopreiato/the-blue-alliance,josephbisch/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,tsteward/the-blue-alliance,1fish2/the-blue-alliance,jaredhasenklein/the-blue-alliance,bdaroz/the-blue-alliance,1fish2/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,nwalters512/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,verycumbersome/the-blue-alliance
|
import json
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# For choosing what the main landing page displays
KICKOFF = 1
BUILDSEASON = 2
COMPETITIONSEASON = 3
OFFSEASON = 4
# The CONFIG variables should have exactly the same structure between environments
# Eventually a test environment should be added. -gregmarra 17 Jul 2012
if DEBUG:
CONFIG = {
"env": "dev",
"memcache": False,
"firebase-url": "https://thebluealliance-dev.firebaseio.com/{}.json?print=silent&auth={}"
}
else:
CONFIG = {
"env": "prod",
"memcache": True,
"firebase-url": "https://thebluealliance.firebaseio.com/{}.json?print=silent&auth={}"
}
CONFIG['landing_handler'] = COMPETITIONSEASON
CONFIG["static_resource_version"] = 3
Increment static resources for Firebase.
|
import json
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# For choosing what the main landing page displays
KICKOFF = 1
BUILDSEASON = 2
COMPETITIONSEASON = 3
OFFSEASON = 4
# The CONFIG variables should have exactly the same structure between environments
# Eventually a test environment should be added. -gregmarra 17 Jul 2012
if DEBUG:
CONFIG = {
"env": "dev",
"memcache": False,
"firebase-url": "https://thebluealliance-dev.firebaseio.com/{}.json?print=silent&auth={}"
}
else:
CONFIG = {
"env": "prod",
"memcache": True,
"firebase-url": "https://thebluealliance.firebaseio.com/{}.json?print=silent&auth={}"
}
CONFIG['landing_handler'] = COMPETITIONSEASON
CONFIG["static_resource_version"] = 4
|
<commit_before>import json
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# For choosing what the main landing page displays
KICKOFF = 1
BUILDSEASON = 2
COMPETITIONSEASON = 3
OFFSEASON = 4
# The CONFIG variables should have exactly the same structure between environments
# Eventually a test environment should be added. -gregmarra 17 Jul 2012
if DEBUG:
CONFIG = {
"env": "dev",
"memcache": False,
"firebase-url": "https://thebluealliance-dev.firebaseio.com/{}.json?print=silent&auth={}"
}
else:
CONFIG = {
"env": "prod",
"memcache": True,
"firebase-url": "https://thebluealliance.firebaseio.com/{}.json?print=silent&auth={}"
}
CONFIG['landing_handler'] = COMPETITIONSEASON
CONFIG["static_resource_version"] = 3
<commit_msg>Increment static resources for Firebase.<commit_after>
|
import json
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# For choosing what the main landing page displays
KICKOFF = 1
BUILDSEASON = 2
COMPETITIONSEASON = 3
OFFSEASON = 4
# The CONFIG variables should have exactly the same structure between environments
# Eventually a test environment should be added. -gregmarra 17 Jul 2012
if DEBUG:
CONFIG = {
"env": "dev",
"memcache": False,
"firebase-url": "https://thebluealliance-dev.firebaseio.com/{}.json?print=silent&auth={}"
}
else:
CONFIG = {
"env": "prod",
"memcache": True,
"firebase-url": "https://thebluealliance.firebaseio.com/{}.json?print=silent&auth={}"
}
CONFIG['landing_handler'] = COMPETITIONSEASON
CONFIG["static_resource_version"] = 4
|
import json
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# For choosing what the main landing page displays
KICKOFF = 1
BUILDSEASON = 2
COMPETITIONSEASON = 3
OFFSEASON = 4
# The CONFIG variables should have exactly the same structure between environments
# Eventually a test environment should be added. -gregmarra 17 Jul 2012
if DEBUG:
CONFIG = {
"env": "dev",
"memcache": False,
"firebase-url": "https://thebluealliance-dev.firebaseio.com/{}.json?print=silent&auth={}"
}
else:
CONFIG = {
"env": "prod",
"memcache": True,
"firebase-url": "https://thebluealliance.firebaseio.com/{}.json?print=silent&auth={}"
}
CONFIG['landing_handler'] = COMPETITIONSEASON
CONFIG["static_resource_version"] = 3
Increment static resources for Firebase.import json
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# For choosing what the main landing page displays
KICKOFF = 1
BUILDSEASON = 2
COMPETITIONSEASON = 3
OFFSEASON = 4
# The CONFIG variables should have exactly the same structure between environments
# Eventually a test environment should be added. -gregmarra 17 Jul 2012
if DEBUG:
CONFIG = {
"env": "dev",
"memcache": False,
"firebase-url": "https://thebluealliance-dev.firebaseio.com/{}.json?print=silent&auth={}"
}
else:
CONFIG = {
"env": "prod",
"memcache": True,
"firebase-url": "https://thebluealliance.firebaseio.com/{}.json?print=silent&auth={}"
}
CONFIG['landing_handler'] = COMPETITIONSEASON
CONFIG["static_resource_version"] = 4
|
<commit_before>import json
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# For choosing what the main landing page displays
KICKOFF = 1
BUILDSEASON = 2
COMPETITIONSEASON = 3
OFFSEASON = 4
# The CONFIG variables should have exactly the same structure between environments
# Eventually a test environment should be added. -gregmarra 17 Jul 2012
if DEBUG:
CONFIG = {
"env": "dev",
"memcache": False,
"firebase-url": "https://thebluealliance-dev.firebaseio.com/{}.json?print=silent&auth={}"
}
else:
CONFIG = {
"env": "prod",
"memcache": True,
"firebase-url": "https://thebluealliance.firebaseio.com/{}.json?print=silent&auth={}"
}
CONFIG['landing_handler'] = COMPETITIONSEASON
CONFIG["static_resource_version"] = 3
<commit_msg>Increment static resources for Firebase.<commit_after>import json
import os
DEBUG = os.environ.get('SERVER_SOFTWARE', '').startswith('Dev')
# For choosing what the main landing page displays
KICKOFF = 1
BUILDSEASON = 2
COMPETITIONSEASON = 3
OFFSEASON = 4
# The CONFIG variables should have exactly the same structure between environments
# Eventually a test environment should be added. -gregmarra 17 Jul 2012
if DEBUG:
CONFIG = {
"env": "dev",
"memcache": False,
"firebase-url": "https://thebluealliance-dev.firebaseio.com/{}.json?print=silent&auth={}"
}
else:
CONFIG = {
"env": "prod",
"memcache": True,
"firebase-url": "https://thebluealliance.firebaseio.com/{}.json?print=silent&auth={}"
}
CONFIG['landing_handler'] = COMPETITIONSEASON
CONFIG["static_resource_version"] = 4
|
2fec68d8cf1bf2726488730c369aad7b8b96b167
|
openacademy/wizard/openacademy_wizard.py
|
openacademy/wizard/openacademy_wizard.py
|
# -*- coding: utf-8 -*-
from openerp import fields, models, api
"""
This module create model of Wizard
"""
class Wizard(models.TransientModel):
""""
This class create model of Wizard
"""
_name = 'openacademy.wizard'
def _default_sessions(self):
return self.env['openacademy.session'].browse(
self._context.get('active_ids'))
session_ids = fields.Many2many('openacademy.session',
string="Sessions", required=True,
default=_default_sessions)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
@api.multi
def subscribe(self):
for session in self.session_ids:
session.attendee_ids |= self.attendee_ids
return {}
|
# -*- coding: utf-8 -*-
"""
This module create model of Wizard
"""
from openerp import fields, models, api
class Wizard(models.TransientModel):
""""
This class create model of Wizard
"""
_name = 'openacademy.wizard'
def _default_sessions(self):
return self.env['openacademy.session'].browse(
self._context.get('active_ids'))
session_ids = fields.Many2many('openacademy.session',
string="Sessions", required=True,
default=_default_sessions)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
@api.multi
def subscribe(self):
for session in self.session_ids:
session.attendee_ids |= self.attendee_ids
return {}
|
Fix error String statement has no effect
|
[FIX] pylint: Fix error String statement has no effect
|
Python
|
apache-2.0
|
JesusZapata/openacademy
|
# -*- coding: utf-8 -*-
from openerp import fields, models, api
"""
This module create model of Wizard
"""
class Wizard(models.TransientModel):
""""
This class create model of Wizard
"""
_name = 'openacademy.wizard'
def _default_sessions(self):
return self.env['openacademy.session'].browse(
self._context.get('active_ids'))
session_ids = fields.Many2many('openacademy.session',
string="Sessions", required=True,
default=_default_sessions)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
@api.multi
def subscribe(self):
for session in self.session_ids:
session.attendee_ids |= self.attendee_ids
return {}
[FIX] pylint: Fix error String statement has no effect
|
# -*- coding: utf-8 -*-
"""
This module create model of Wizard
"""
from openerp import fields, models, api
class Wizard(models.TransientModel):
""""
This class create model of Wizard
"""
_name = 'openacademy.wizard'
def _default_sessions(self):
return self.env['openacademy.session'].browse(
self._context.get('active_ids'))
session_ids = fields.Many2many('openacademy.session',
string="Sessions", required=True,
default=_default_sessions)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
@api.multi
def subscribe(self):
for session in self.session_ids:
session.attendee_ids |= self.attendee_ids
return {}
|
<commit_before># -*- coding: utf-8 -*-
from openerp import fields, models, api
"""
This module create model of Wizard
"""
class Wizard(models.TransientModel):
""""
This class create model of Wizard
"""
_name = 'openacademy.wizard'
def _default_sessions(self):
return self.env['openacademy.session'].browse(
self._context.get('active_ids'))
session_ids = fields.Many2many('openacademy.session',
string="Sessions", required=True,
default=_default_sessions)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
@api.multi
def subscribe(self):
for session in self.session_ids:
session.attendee_ids |= self.attendee_ids
return {}
<commit_msg>[FIX] pylint: Fix error String statement has no effect<commit_after>
|
# -*- coding: utf-8 -*-
"""
This module create model of Wizard
"""
from openerp import fields, models, api
class Wizard(models.TransientModel):
""""
This class create model of Wizard
"""
_name = 'openacademy.wizard'
def _default_sessions(self):
return self.env['openacademy.session'].browse(
self._context.get('active_ids'))
session_ids = fields.Many2many('openacademy.session',
string="Sessions", required=True,
default=_default_sessions)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
@api.multi
def subscribe(self):
for session in self.session_ids:
session.attendee_ids |= self.attendee_ids
return {}
|
# -*- coding: utf-8 -*-
from openerp import fields, models, api
"""
This module create model of Wizard
"""
class Wizard(models.TransientModel):
""""
This class create model of Wizard
"""
_name = 'openacademy.wizard'
def _default_sessions(self):
return self.env['openacademy.session'].browse(
self._context.get('active_ids'))
session_ids = fields.Many2many('openacademy.session',
string="Sessions", required=True,
default=_default_sessions)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
@api.multi
def subscribe(self):
for session in self.session_ids:
session.attendee_ids |= self.attendee_ids
return {}
[FIX] pylint: Fix error String statement has no effect# -*- coding: utf-8 -*-
"""
This module create model of Wizard
"""
from openerp import fields, models, api
class Wizard(models.TransientModel):
""""
This class create model of Wizard
"""
_name = 'openacademy.wizard'
def _default_sessions(self):
return self.env['openacademy.session'].browse(
self._context.get('active_ids'))
session_ids = fields.Many2many('openacademy.session',
string="Sessions", required=True,
default=_default_sessions)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
@api.multi
def subscribe(self):
for session in self.session_ids:
session.attendee_ids |= self.attendee_ids
return {}
|
<commit_before># -*- coding: utf-8 -*-
from openerp import fields, models, api
"""
This module create model of Wizard
"""
class Wizard(models.TransientModel):
""""
This class create model of Wizard
"""
_name = 'openacademy.wizard'
def _default_sessions(self):
return self.env['openacademy.session'].browse(
self._context.get('active_ids'))
session_ids = fields.Many2many('openacademy.session',
string="Sessions", required=True,
default=_default_sessions)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
@api.multi
def subscribe(self):
for session in self.session_ids:
session.attendee_ids |= self.attendee_ids
return {}
<commit_msg>[FIX] pylint: Fix error String statement has no effect<commit_after># -*- coding: utf-8 -*-
"""
This module create model of Wizard
"""
from openerp import fields, models, api
class Wizard(models.TransientModel):
""""
This class create model of Wizard
"""
_name = 'openacademy.wizard'
def _default_sessions(self):
return self.env['openacademy.session'].browse(
self._context.get('active_ids'))
session_ids = fields.Many2many('openacademy.session',
string="Sessions", required=True,
default=_default_sessions)
attendee_ids = fields.Many2many('res.partner', string="Attendees")
@api.multi
def subscribe(self):
for session in self.session_ids:
session.attendee_ids |= self.attendee_ids
return {}
|
f00cb6c748e2eda022a7f9f739b60b98a0308eb7
|
github3/search/repository.py
|
github3/search/repository.py
|
# -*- coding: utf-8 -*-
from github3.models import GitHubCore
from github3.repos import Repository
class RepositorySearchResult(GitHubCore):
def __init__(self, data, session=None):
result = data.copy()
#: Score of the result
self.score = result.pop('score')
#: Text matches
self.text_matches = result.pop('text_matches', [])
#: Repository object
self.repository = Repository(result, self)
|
# -*- coding: utf-8 -*-
from github3.models import GitHubCore
from github3.repos import Repository
class RepositorySearchResult(GitHubCore):
def __init__(self, data, session=None):
result = data.copy()
#: Score of the result
self.score = result.pop('score')
#: Text matches
self.text_matches = result.pop('text_matches', [])
#: Repository object
self.repository = Repository(result, self)
def __repr__(self):
return '<RepositorySearchResult [{0}]>'.format(self.repository)
|
Add a __repr__ for RepositorySearchResult
|
Add a __repr__ for RepositorySearchResult
|
Python
|
bsd-3-clause
|
ueg1990/github3.py,icio/github3.py,christophelec/github3.py,jim-minter/github3.py,agamdua/github3.py,krxsky/github3.py,itsmemattchung/github3.py,sigmavirus24/github3.py,h4ck3rm1k3/github3.py,balloob/github3.py,wbrefvem/github3.py,degustaf/github3.py
|
# -*- coding: utf-8 -*-
from github3.models import GitHubCore
from github3.repos import Repository
class RepositorySearchResult(GitHubCore):
def __init__(self, data, session=None):
result = data.copy()
#: Score of the result
self.score = result.pop('score')
#: Text matches
self.text_matches = result.pop('text_matches', [])
#: Repository object
self.repository = Repository(result, self)
Add a __repr__ for RepositorySearchResult
|
# -*- coding: utf-8 -*-
from github3.models import GitHubCore
from github3.repos import Repository
class RepositorySearchResult(GitHubCore):
def __init__(self, data, session=None):
result = data.copy()
#: Score of the result
self.score = result.pop('score')
#: Text matches
self.text_matches = result.pop('text_matches', [])
#: Repository object
self.repository = Repository(result, self)
def __repr__(self):
return '<RepositorySearchResult [{0}]>'.format(self.repository)
|
<commit_before># -*- coding: utf-8 -*-
from github3.models import GitHubCore
from github3.repos import Repository
class RepositorySearchResult(GitHubCore):
def __init__(self, data, session=None):
result = data.copy()
#: Score of the result
self.score = result.pop('score')
#: Text matches
self.text_matches = result.pop('text_matches', [])
#: Repository object
self.repository = Repository(result, self)
<commit_msg>Add a __repr__ for RepositorySearchResult<commit_after>
|
# -*- coding: utf-8 -*-
from github3.models import GitHubCore
from github3.repos import Repository
class RepositorySearchResult(GitHubCore):
def __init__(self, data, session=None):
result = data.copy()
#: Score of the result
self.score = result.pop('score')
#: Text matches
self.text_matches = result.pop('text_matches', [])
#: Repository object
self.repository = Repository(result, self)
def __repr__(self):
return '<RepositorySearchResult [{0}]>'.format(self.repository)
|
# -*- coding: utf-8 -*-
from github3.models import GitHubCore
from github3.repos import Repository
class RepositorySearchResult(GitHubCore):
def __init__(self, data, session=None):
result = data.copy()
#: Score of the result
self.score = result.pop('score')
#: Text matches
self.text_matches = result.pop('text_matches', [])
#: Repository object
self.repository = Repository(result, self)
Add a __repr__ for RepositorySearchResult# -*- coding: utf-8 -*-
from github3.models import GitHubCore
from github3.repos import Repository
class RepositorySearchResult(GitHubCore):
def __init__(self, data, session=None):
result = data.copy()
#: Score of the result
self.score = result.pop('score')
#: Text matches
self.text_matches = result.pop('text_matches', [])
#: Repository object
self.repository = Repository(result, self)
def __repr__(self):
return '<RepositorySearchResult [{0}]>'.format(self.repository)
|
<commit_before># -*- coding: utf-8 -*-
from github3.models import GitHubCore
from github3.repos import Repository
class RepositorySearchResult(GitHubCore):
def __init__(self, data, session=None):
result = data.copy()
#: Score of the result
self.score = result.pop('score')
#: Text matches
self.text_matches = result.pop('text_matches', [])
#: Repository object
self.repository = Repository(result, self)
<commit_msg>Add a __repr__ for RepositorySearchResult<commit_after># -*- coding: utf-8 -*-
from github3.models import GitHubCore
from github3.repos import Repository
class RepositorySearchResult(GitHubCore):
def __init__(self, data, session=None):
result = data.copy()
#: Score of the result
self.score = result.pop('score')
#: Text matches
self.text_matches = result.pop('text_matches', [])
#: Repository object
self.repository = Repository(result, self)
def __repr__(self):
return '<RepositorySearchResult [{0}]>'.format(self.repository)
|
e66bd19fc4baae27f40b1b63bdc0a3280d8d25e9
|
src/heap.py
|
src/heap.py
|
# -*- coding: utf-8 -*-
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
def __init__(self, initial=[]):
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
self.__heap = []
|
# -*- coding: utf-8 -*-
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
def __init__(self, initial=None):
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
if isinstance(initial, list) or isinstance(initial, tuple):
self.__heap = initial
|
Fix massive bug in initialization
|
Fix massive bug in initialization
|
Python
|
mit
|
DasAllFolks/PyAlgo
|
# -*- coding: utf-8 -*-
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
def __init__(self, initial=[]):
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
self.__heap = []
Fix massive bug in initialization
|
# -*- coding: utf-8 -*-
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
def __init__(self, initial=None):
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
if isinstance(initial, list) or isinstance(initial, tuple):
self.__heap = initial
|
<commit_before># -*- coding: utf-8 -*-
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
def __init__(self, initial=[]):
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
self.__heap = []
<commit_msg>Fix massive bug in initialization<commit_after>
|
# -*- coding: utf-8 -*-
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
def __init__(self, initial=None):
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
if isinstance(initial, list) or isinstance(initial, tuple):
self.__heap = initial
|
# -*- coding: utf-8 -*-
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
def __init__(self, initial=[]):
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
self.__heap = []
Fix massive bug in initialization# -*- coding: utf-8 -*-
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
def __init__(self, initial=None):
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
if isinstance(initial, list) or isinstance(initial, tuple):
self.__heap = initial
|
<commit_before># -*- coding: utf-8 -*-
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
def __init__(self, initial=[]):
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
self.__heap = []
<commit_msg>Fix massive bug in initialization<commit_after># -*- coding: utf-8 -*-
class Heap(object):
"""Implements a heap data structure in Python.
The underlying data structure used to hold the data is an array.
"""
__heap = []
def __init__(self, initial=None):
"""Creates a new heap.
Args:
initial: (Optional): A continguous array containing the data with which to
initialize the new heap.
"""
if isinstance(initial, list) or isinstance(initial, tuple):
self.__heap = initial
|
54d5db67523deea7e34f784df667ffb705f3bb16
|
TWLight/resources/admin.py
|
TWLight/resources/admin.py
|
from django.contrib import admin
from .models import Partner, Stream, Contact, Language
class LanguageAdmin(admin.ModelAdmin):
search_fields = ('language',)
list_display = ('language',)
admin.site.register(Language, LanguageAdmin)
class PartnerAdmin(admin.ModelAdmin):
search_fields = ('company_name',)
list_display = ('company_name', 'description', 'id', 'get_languages')
admin.site.register(Partner, PartnerAdmin)
class StreamAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'name',)
list_display = ('id', 'partner', 'name', 'description', 'get_languages')
admin.site.register(Stream, StreamAdmin)
class ContactAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'full_name', 'short_name',)
list_display = ('id', 'title', 'full_name', 'partner', 'email',)
admin.site.register(Contact, ContactAdmin)
|
from django import forms
from django.contrib import admin
from TWLight.users.groups import get_coordinators
from .models import Partner, Stream, Contact, Language
class LanguageAdmin(admin.ModelAdmin):
search_fields = ('language',)
list_display = ('language',)
admin.site.register(Language, LanguageAdmin)
class PartnerAdmin(admin.ModelAdmin):
class CustomModelChoiceField(forms.ModelChoiceField):
"""
This lets us relabel the users in the dropdown with their recognizable
wikipedia usernames, rather than their cryptic local IDs. It should be
used only for the coordinator field.
"""
def label_from_instance(self, obj):
return '{editor.wp_username} ({editor.home_wiki})'.format(
editor=obj.editor)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
"""
The coordinator dropdown should limit choices to actual coordinators,
for admin ease of use.
"""
if db_field.name == "coordinator":
return self.CustomModelChoiceField(
queryset=get_coordinators().user_set.all())
return super(PartnerAdmin, self).formfield_for_foreignkey(
db_field, request, **kwargs)
search_fields = ('company_name',)
list_display = ('company_name', 'description', 'id', 'get_languages')
admin.site.register(Partner, PartnerAdmin)
class StreamAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'name',)
list_display = ('id', 'partner', 'name', 'description', 'get_languages')
admin.site.register(Stream, StreamAdmin)
class ContactAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'full_name', 'short_name',)
list_display = ('id', 'title', 'full_name', 'partner', 'email',)
admin.site.register(Contact, ContactAdmin)
|
Improve usability of coordinator designation interaction
|
Improve usability of coordinator designation interaction
I'm limiting the dropdown to actual coordinators so that admins don't
have to scroll through a giant list. I don't want to enforce/validate
this on the database level, though, as people may proceed through the
coordinator designation process in different orders, and also the exact
set of coordinators may change over time (and therefore the database
might be left with a previously-valid coordinator in an instance
record).
|
Python
|
mit
|
WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight,WikipediaLibrary/TWLight
|
from django.contrib import admin
from .models import Partner, Stream, Contact, Language
class LanguageAdmin(admin.ModelAdmin):
search_fields = ('language',)
list_display = ('language',)
admin.site.register(Language, LanguageAdmin)
class PartnerAdmin(admin.ModelAdmin):
search_fields = ('company_name',)
list_display = ('company_name', 'description', 'id', 'get_languages')
admin.site.register(Partner, PartnerAdmin)
class StreamAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'name',)
list_display = ('id', 'partner', 'name', 'description', 'get_languages')
admin.site.register(Stream, StreamAdmin)
class ContactAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'full_name', 'short_name',)
list_display = ('id', 'title', 'full_name', 'partner', 'email',)
admin.site.register(Contact, ContactAdmin)
Improve usability of coordinator designation interaction
I'm limiting the dropdown to actual coordinators so that admins don't
have to scroll through a giant list. I don't want to enforce/validate
this on the database level, though, as people may proceed through the
coordinator designation process in different orders, and also the exact
set of coordinators may change over time (and therefore the database
might be left with a previously-valid coordinator in an instance
record).
|
from django import forms
from django.contrib import admin
from TWLight.users.groups import get_coordinators
from .models import Partner, Stream, Contact, Language
class LanguageAdmin(admin.ModelAdmin):
search_fields = ('language',)
list_display = ('language',)
admin.site.register(Language, LanguageAdmin)
class PartnerAdmin(admin.ModelAdmin):
class CustomModelChoiceField(forms.ModelChoiceField):
"""
This lets us relabel the users in the dropdown with their recognizable
wikipedia usernames, rather than their cryptic local IDs. It should be
used only for the coordinator field.
"""
def label_from_instance(self, obj):
return '{editor.wp_username} ({editor.home_wiki})'.format(
editor=obj.editor)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
"""
The coordinator dropdown should limit choices to actual coordinators,
for admin ease of use.
"""
if db_field.name == "coordinator":
return self.CustomModelChoiceField(
queryset=get_coordinators().user_set.all())
return super(PartnerAdmin, self).formfield_for_foreignkey(
db_field, request, **kwargs)
search_fields = ('company_name',)
list_display = ('company_name', 'description', 'id', 'get_languages')
admin.site.register(Partner, PartnerAdmin)
class StreamAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'name',)
list_display = ('id', 'partner', 'name', 'description', 'get_languages')
admin.site.register(Stream, StreamAdmin)
class ContactAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'full_name', 'short_name',)
list_display = ('id', 'title', 'full_name', 'partner', 'email',)
admin.site.register(Contact, ContactAdmin)
|
<commit_before>from django.contrib import admin
from .models import Partner, Stream, Contact, Language
class LanguageAdmin(admin.ModelAdmin):
search_fields = ('language',)
list_display = ('language',)
admin.site.register(Language, LanguageAdmin)
class PartnerAdmin(admin.ModelAdmin):
search_fields = ('company_name',)
list_display = ('company_name', 'description', 'id', 'get_languages')
admin.site.register(Partner, PartnerAdmin)
class StreamAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'name',)
list_display = ('id', 'partner', 'name', 'description', 'get_languages')
admin.site.register(Stream, StreamAdmin)
class ContactAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'full_name', 'short_name',)
list_display = ('id', 'title', 'full_name', 'partner', 'email',)
admin.site.register(Contact, ContactAdmin)
<commit_msg>Improve usability of coordinator designation interaction
I'm limiting the dropdown to actual coordinators so that admins don't
have to scroll through a giant list. I don't want to enforce/validate
this on the database level, though, as people may proceed through the
coordinator designation process in different orders, and also the exact
set of coordinators may change over time (and therefore the database
might be left with a previously-valid coordinator in an instance
record).<commit_after>
|
from django import forms
from django.contrib import admin
from TWLight.users.groups import get_coordinators
from .models import Partner, Stream, Contact, Language
class LanguageAdmin(admin.ModelAdmin):
search_fields = ('language',)
list_display = ('language',)
admin.site.register(Language, LanguageAdmin)
class PartnerAdmin(admin.ModelAdmin):
class CustomModelChoiceField(forms.ModelChoiceField):
"""
This lets us relabel the users in the dropdown with their recognizable
wikipedia usernames, rather than their cryptic local IDs. It should be
used only for the coordinator field.
"""
def label_from_instance(self, obj):
return '{editor.wp_username} ({editor.home_wiki})'.format(
editor=obj.editor)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
"""
The coordinator dropdown should limit choices to actual coordinators,
for admin ease of use.
"""
if db_field.name == "coordinator":
return self.CustomModelChoiceField(
queryset=get_coordinators().user_set.all())
return super(PartnerAdmin, self).formfield_for_foreignkey(
db_field, request, **kwargs)
search_fields = ('company_name',)
list_display = ('company_name', 'description', 'id', 'get_languages')
admin.site.register(Partner, PartnerAdmin)
class StreamAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'name',)
list_display = ('id', 'partner', 'name', 'description', 'get_languages')
admin.site.register(Stream, StreamAdmin)
class ContactAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'full_name', 'short_name',)
list_display = ('id', 'title', 'full_name', 'partner', 'email',)
admin.site.register(Contact, ContactAdmin)
|
from django.contrib import admin
from .models import Partner, Stream, Contact, Language
class LanguageAdmin(admin.ModelAdmin):
search_fields = ('language',)
list_display = ('language',)
admin.site.register(Language, LanguageAdmin)
class PartnerAdmin(admin.ModelAdmin):
search_fields = ('company_name',)
list_display = ('company_name', 'description', 'id', 'get_languages')
admin.site.register(Partner, PartnerAdmin)
class StreamAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'name',)
list_display = ('id', 'partner', 'name', 'description', 'get_languages')
admin.site.register(Stream, StreamAdmin)
class ContactAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'full_name', 'short_name',)
list_display = ('id', 'title', 'full_name', 'partner', 'email',)
admin.site.register(Contact, ContactAdmin)
Improve usability of coordinator designation interaction
I'm limiting the dropdown to actual coordinators so that admins don't
have to scroll through a giant list. I don't want to enforce/validate
this on the database level, though, as people may proceed through the
coordinator designation process in different orders, and also the exact
set of coordinators may change over time (and therefore the database
might be left with a previously-valid coordinator in an instance
record).from django import forms
from django.contrib import admin
from TWLight.users.groups import get_coordinators
from .models import Partner, Stream, Contact, Language
class LanguageAdmin(admin.ModelAdmin):
search_fields = ('language',)
list_display = ('language',)
admin.site.register(Language, LanguageAdmin)
class PartnerAdmin(admin.ModelAdmin):
class CustomModelChoiceField(forms.ModelChoiceField):
"""
This lets us relabel the users in the dropdown with their recognizable
wikipedia usernames, rather than their cryptic local IDs. It should be
used only for the coordinator field.
"""
def label_from_instance(self, obj):
return '{editor.wp_username} ({editor.home_wiki})'.format(
editor=obj.editor)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
"""
The coordinator dropdown should limit choices to actual coordinators,
for admin ease of use.
"""
if db_field.name == "coordinator":
return self.CustomModelChoiceField(
queryset=get_coordinators().user_set.all())
return super(PartnerAdmin, self).formfield_for_foreignkey(
db_field, request, **kwargs)
search_fields = ('company_name',)
list_display = ('company_name', 'description', 'id', 'get_languages')
admin.site.register(Partner, PartnerAdmin)
class StreamAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'name',)
list_display = ('id', 'partner', 'name', 'description', 'get_languages')
admin.site.register(Stream, StreamAdmin)
class ContactAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'full_name', 'short_name',)
list_display = ('id', 'title', 'full_name', 'partner', 'email',)
admin.site.register(Contact, ContactAdmin)
|
<commit_before>from django.contrib import admin
from .models import Partner, Stream, Contact, Language
class LanguageAdmin(admin.ModelAdmin):
search_fields = ('language',)
list_display = ('language',)
admin.site.register(Language, LanguageAdmin)
class PartnerAdmin(admin.ModelAdmin):
search_fields = ('company_name',)
list_display = ('company_name', 'description', 'id', 'get_languages')
admin.site.register(Partner, PartnerAdmin)
class StreamAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'name',)
list_display = ('id', 'partner', 'name', 'description', 'get_languages')
admin.site.register(Stream, StreamAdmin)
class ContactAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'full_name', 'short_name',)
list_display = ('id', 'title', 'full_name', 'partner', 'email',)
admin.site.register(Contact, ContactAdmin)
<commit_msg>Improve usability of coordinator designation interaction
I'm limiting the dropdown to actual coordinators so that admins don't
have to scroll through a giant list. I don't want to enforce/validate
this on the database level, though, as people may proceed through the
coordinator designation process in different orders, and also the exact
set of coordinators may change over time (and therefore the database
might be left with a previously-valid coordinator in an instance
record).<commit_after>from django import forms
from django.contrib import admin
from TWLight.users.groups import get_coordinators
from .models import Partner, Stream, Contact, Language
class LanguageAdmin(admin.ModelAdmin):
search_fields = ('language',)
list_display = ('language',)
admin.site.register(Language, LanguageAdmin)
class PartnerAdmin(admin.ModelAdmin):
class CustomModelChoiceField(forms.ModelChoiceField):
"""
This lets us relabel the users in the dropdown with their recognizable
wikipedia usernames, rather than their cryptic local IDs. It should be
used only for the coordinator field.
"""
def label_from_instance(self, obj):
return '{editor.wp_username} ({editor.home_wiki})'.format(
editor=obj.editor)
def formfield_for_foreignkey(self, db_field, request, **kwargs):
"""
The coordinator dropdown should limit choices to actual coordinators,
for admin ease of use.
"""
if db_field.name == "coordinator":
return self.CustomModelChoiceField(
queryset=get_coordinators().user_set.all())
return super(PartnerAdmin, self).formfield_for_foreignkey(
db_field, request, **kwargs)
search_fields = ('company_name',)
list_display = ('company_name', 'description', 'id', 'get_languages')
admin.site.register(Partner, PartnerAdmin)
class StreamAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'name',)
list_display = ('id', 'partner', 'name', 'description', 'get_languages')
admin.site.register(Stream, StreamAdmin)
class ContactAdmin(admin.ModelAdmin):
search_fields = ('partner__company_name', 'full_name', 'short_name',)
list_display = ('id', 'title', 'full_name', 'partner', 'email',)
admin.site.register(Contact, ContactAdmin)
|
f1cb71f6647a843f519606da4a8f652fd3f8a172
|
yithlibraryserver/config.py
|
yithlibraryserver/config.py
|
import os
def read_setting_from_env(settings, key, default=None):
env_variable = key.upper()
if env_variable in os.environ:
return os.environ[env_variable]
else:
return settings.get(key, default)
|
import logging
import os
log = logging.getLogger(__name__)
def read_setting_from_env(settings, key, default=None):
env_variable = key.upper()
if env_variable in os.environ:
log.debug('Setting %s found in the environment: %s' %
(key, os.environ[env_variable]))
return os.environ[env_variable]
else:
log.debug('Looking for setting %s in the selected .ini file: %s' %
(key, settings.get(key, default)))
return settings.get(key, default)
|
Add some logging calls to the setting reading
|
Add some logging calls to the setting reading
|
Python
|
agpl-3.0
|
lorenzogil/yith-library-server,Yaco-Sistemas/yith-library-server,lorenzogil/yith-library-server,Yaco-Sistemas/yith-library-server,Yaco-Sistemas/yith-library-server,lorenzogil/yith-library-server
|
import os
def read_setting_from_env(settings, key, default=None):
env_variable = key.upper()
if env_variable in os.environ:
return os.environ[env_variable]
else:
return settings.get(key, default)
Add some logging calls to the setting reading
|
import logging
import os
log = logging.getLogger(__name__)
def read_setting_from_env(settings, key, default=None):
env_variable = key.upper()
if env_variable in os.environ:
log.debug('Setting %s found in the environment: %s' %
(key, os.environ[env_variable]))
return os.environ[env_variable]
else:
log.debug('Looking for setting %s in the selected .ini file: %s' %
(key, settings.get(key, default)))
return settings.get(key, default)
|
<commit_before>import os
def read_setting_from_env(settings, key, default=None):
env_variable = key.upper()
if env_variable in os.environ:
return os.environ[env_variable]
else:
return settings.get(key, default)
<commit_msg>Add some logging calls to the setting reading<commit_after>
|
import logging
import os
log = logging.getLogger(__name__)
def read_setting_from_env(settings, key, default=None):
env_variable = key.upper()
if env_variable in os.environ:
log.debug('Setting %s found in the environment: %s' %
(key, os.environ[env_variable]))
return os.environ[env_variable]
else:
log.debug('Looking for setting %s in the selected .ini file: %s' %
(key, settings.get(key, default)))
return settings.get(key, default)
|
import os
def read_setting_from_env(settings, key, default=None):
env_variable = key.upper()
if env_variable in os.environ:
return os.environ[env_variable]
else:
return settings.get(key, default)
Add some logging calls to the setting readingimport logging
import os
log = logging.getLogger(__name__)
def read_setting_from_env(settings, key, default=None):
env_variable = key.upper()
if env_variable in os.environ:
log.debug('Setting %s found in the environment: %s' %
(key, os.environ[env_variable]))
return os.environ[env_variable]
else:
log.debug('Looking for setting %s in the selected .ini file: %s' %
(key, settings.get(key, default)))
return settings.get(key, default)
|
<commit_before>import os
def read_setting_from_env(settings, key, default=None):
env_variable = key.upper()
if env_variable in os.environ:
return os.environ[env_variable]
else:
return settings.get(key, default)
<commit_msg>Add some logging calls to the setting reading<commit_after>import logging
import os
log = logging.getLogger(__name__)
def read_setting_from_env(settings, key, default=None):
env_variable = key.upper()
if env_variable in os.environ:
log.debug('Setting %s found in the environment: %s' %
(key, os.environ[env_variable]))
return os.environ[env_variable]
else:
log.debug('Looking for setting %s in the selected .ini file: %s' %
(key, settings.get(key, default)))
return settings.get(key, default)
|
35b965a645955bbb757f4e6854edc7744a42e3bc
|
tests/test_settings.py
|
tests/test_settings.py
|
SECRET_KEY = 'dog'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'asgiref.inmemory.ChannelLayer',
'ROUTING': [],
},
}
MIDDLEWARE_CLASSES = []
INSTALLED_APPS = ('tests', )
|
SECRET_KEY = 'dog'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'asgiref.inmemory.ChannelLayer',
'ROUTING': [],
},
}
MIDDLEWARE_CLASSES = []
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'channels',
'tests'
)
|
Add contrib.auth to test settings
|
Add contrib.auth to test settings
|
Python
|
mit
|
linuxlewis/channels-api,linuxlewis/channels-api
|
SECRET_KEY = 'dog'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'asgiref.inmemory.ChannelLayer',
'ROUTING': [],
},
}
MIDDLEWARE_CLASSES = []
INSTALLED_APPS = ('tests', )
Add contrib.auth to test settings
|
SECRET_KEY = 'dog'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'asgiref.inmemory.ChannelLayer',
'ROUTING': [],
},
}
MIDDLEWARE_CLASSES = []
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'channels',
'tests'
)
|
<commit_before>SECRET_KEY = 'dog'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'asgiref.inmemory.ChannelLayer',
'ROUTING': [],
},
}
MIDDLEWARE_CLASSES = []
INSTALLED_APPS = ('tests', )
<commit_msg>Add contrib.auth to test settings<commit_after>
|
SECRET_KEY = 'dog'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'asgiref.inmemory.ChannelLayer',
'ROUTING': [],
},
}
MIDDLEWARE_CLASSES = []
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'channels',
'tests'
)
|
SECRET_KEY = 'dog'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'asgiref.inmemory.ChannelLayer',
'ROUTING': [],
},
}
MIDDLEWARE_CLASSES = []
INSTALLED_APPS = ('tests', )
Add contrib.auth to test settingsSECRET_KEY = 'dog'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'asgiref.inmemory.ChannelLayer',
'ROUTING': [],
},
}
MIDDLEWARE_CLASSES = []
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'channels',
'tests'
)
|
<commit_before>SECRET_KEY = 'dog'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'asgiref.inmemory.ChannelLayer',
'ROUTING': [],
},
}
MIDDLEWARE_CLASSES = []
INSTALLED_APPS = ('tests', )
<commit_msg>Add contrib.auth to test settings<commit_after>SECRET_KEY = 'dog'
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
}
}
CHANNEL_LAYERS = {
'default': {
'BACKEND': 'asgiref.inmemory.ChannelLayer',
'ROUTING': [],
},
}
MIDDLEWARE_CLASSES = []
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'channels',
'tests'
)
|
0471c689bbe4e5b1116c25a6ccea58588c09d4d7
|
jasmin_notifications/urls.py
|
jasmin_notifications/urls.py
|
"""
URL configuration for the JASMIN notifications app.
"""
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.conf.urls import url, include
from . import views
app_name = 'jasmin_notifications'
urlpatterns = [
url(r'^(?P<uuid>[a-zA-Z0-9-]+)/$', views.follow, name = 'follow'),
]
|
"""
URL configuration for the JASMIN notifications app.
"""
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.conf.urls import url, include
from . import views
app_name = 'jasmin_notifications'
urlpatterns = [
url(
r'^(?P<uuid>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/$',
views.follow,
name = 'follow'
),
]
|
Update regex to match only UUIDs
|
Update regex to match only UUIDs
|
Python
|
mit
|
cedadev/jasmin-notifications,cedadev/jasmin-notifications
|
"""
URL configuration for the JASMIN notifications app.
"""
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.conf.urls import url, include
from . import views
app_name = 'jasmin_notifications'
urlpatterns = [
url(r'^(?P<uuid>[a-zA-Z0-9-]+)/$', views.follow, name = 'follow'),
]
Update regex to match only UUIDs
|
"""
URL configuration for the JASMIN notifications app.
"""
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.conf.urls import url, include
from . import views
app_name = 'jasmin_notifications'
urlpatterns = [
url(
r'^(?P<uuid>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/$',
views.follow,
name = 'follow'
),
]
|
<commit_before>"""
URL configuration for the JASMIN notifications app.
"""
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.conf.urls import url, include
from . import views
app_name = 'jasmin_notifications'
urlpatterns = [
url(r'^(?P<uuid>[a-zA-Z0-9-]+)/$', views.follow, name = 'follow'),
]
<commit_msg>Update regex to match only UUIDs<commit_after>
|
"""
URL configuration for the JASMIN notifications app.
"""
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.conf.urls import url, include
from . import views
app_name = 'jasmin_notifications'
urlpatterns = [
url(
r'^(?P<uuid>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/$',
views.follow,
name = 'follow'
),
]
|
"""
URL configuration for the JASMIN notifications app.
"""
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.conf.urls import url, include
from . import views
app_name = 'jasmin_notifications'
urlpatterns = [
url(r'^(?P<uuid>[a-zA-Z0-9-]+)/$', views.follow, name = 'follow'),
]
Update regex to match only UUIDs"""
URL configuration for the JASMIN notifications app.
"""
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.conf.urls import url, include
from . import views
app_name = 'jasmin_notifications'
urlpatterns = [
url(
r'^(?P<uuid>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/$',
views.follow,
name = 'follow'
),
]
|
<commit_before>"""
URL configuration for the JASMIN notifications app.
"""
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.conf.urls import url, include
from . import views
app_name = 'jasmin_notifications'
urlpatterns = [
url(r'^(?P<uuid>[a-zA-Z0-9-]+)/$', views.follow, name = 'follow'),
]
<commit_msg>Update regex to match only UUIDs<commit_after>"""
URL configuration for the JASMIN notifications app.
"""
__author__ = "Matt Pryor"
__copyright__ = "Copyright 2015 UK Science and Technology Facilities Council"
from django.conf.urls import url, include
from . import views
app_name = 'jasmin_notifications'
urlpatterns = [
url(
r'^(?P<uuid>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})/$',
views.follow,
name = 'follow'
),
]
|
2e3119b5f45a65f585e34b1239764d73b41c65fd
|
misp_modules/modules/expansion/__init__.py
|
misp_modules/modules/expansion/__init__.py
|
from . import _vmray
__all__ = ['vmray_submit', 'asn_history', 'circl_passivedns', 'circl_passivessl', 'countrycode', 'cve', 'dns',
'eupi', 'ipasn', 'passivetotal', 'sourcecache', 'virustotal', 'whois', 'shodan', 'reversedns', 'wiki']
|
from . import _vmray
__all__ = ['vmray_submit', 'asn_history', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'domaintools', 'eupi', 'ipasn', 'passivetotal', 'sourcecache',
'virustotal', 'whois', 'shodan', 'reversedns', 'wiki']
|
Add domaintools to the import list
|
Add domaintools to the import list
|
Python
|
agpl-3.0
|
Rafiot/misp-modules,MISP/misp-modules,Rafiot/misp-modules,amuehlem/misp-modules,Rafiot/misp-modules,amuehlem/misp-modules,VirusTotal/misp-modules,VirusTotal/misp-modules,MISP/misp-modules,amuehlem/misp-modules,VirusTotal/misp-modules,MISP/misp-modules
|
from . import _vmray
__all__ = ['vmray_submit', 'asn_history', 'circl_passivedns', 'circl_passivessl', 'countrycode', 'cve', 'dns',
'eupi', 'ipasn', 'passivetotal', 'sourcecache', 'virustotal', 'whois', 'shodan', 'reversedns', 'wiki']
Add domaintools to the import list
|
from . import _vmray
__all__ = ['vmray_submit', 'asn_history', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'domaintools', 'eupi', 'ipasn', 'passivetotal', 'sourcecache',
'virustotal', 'whois', 'shodan', 'reversedns', 'wiki']
|
<commit_before>from . import _vmray
__all__ = ['vmray_submit', 'asn_history', 'circl_passivedns', 'circl_passivessl', 'countrycode', 'cve', 'dns',
'eupi', 'ipasn', 'passivetotal', 'sourcecache', 'virustotal', 'whois', 'shodan', 'reversedns', 'wiki']
<commit_msg>Add domaintools to the import list<commit_after>
|
from . import _vmray
__all__ = ['vmray_submit', 'asn_history', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'domaintools', 'eupi', 'ipasn', 'passivetotal', 'sourcecache',
'virustotal', 'whois', 'shodan', 'reversedns', 'wiki']
|
from . import _vmray
__all__ = ['vmray_submit', 'asn_history', 'circl_passivedns', 'circl_passivessl', 'countrycode', 'cve', 'dns',
'eupi', 'ipasn', 'passivetotal', 'sourcecache', 'virustotal', 'whois', 'shodan', 'reversedns', 'wiki']
Add domaintools to the import listfrom . import _vmray
__all__ = ['vmray_submit', 'asn_history', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'domaintools', 'eupi', 'ipasn', 'passivetotal', 'sourcecache',
'virustotal', 'whois', 'shodan', 'reversedns', 'wiki']
|
<commit_before>from . import _vmray
__all__ = ['vmray_submit', 'asn_history', 'circl_passivedns', 'circl_passivessl', 'countrycode', 'cve', 'dns',
'eupi', 'ipasn', 'passivetotal', 'sourcecache', 'virustotal', 'whois', 'shodan', 'reversedns', 'wiki']
<commit_msg>Add domaintools to the import list<commit_after>from . import _vmray
__all__ = ['vmray_submit', 'asn_history', 'circl_passivedns', 'circl_passivessl',
'countrycode', 'cve', 'dns', 'domaintools', 'eupi', 'ipasn', 'passivetotal', 'sourcecache',
'virustotal', 'whois', 'shodan', 'reversedns', 'wiki']
|
a4a8e3a8ed6753c5d4a51c90c5f68f76e7372f2a
|
selvbetjening/sadmin2/tests/ui/common.py
|
selvbetjening/sadmin2/tests/ui/common.py
|
from splinter import Browser
import urlparse
from django.core.urlresolvers import reverse
from django.test import LiveServerTestCase
class UITestCase(LiveServerTestCase):
@classmethod
def setUpClass(cls):
cls.wd = Browser()
super(UITestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.wd.quit()
super(UITestCase, cls).tearDownClass()
def open(self, url):
self.wd.visit(urlparse.urljoin(self.live_server_url, url))
def login_admin(self):
self.open(reverse('sadmin2:dashboard'))
self.wd.fill('username', 'admin')
self.wd.fill('password', 'admin')
self.wd.find_by_name('login').first.click()
self.wd.is_element_not_present_by_name('login', wait_time=10) # wait for the page to start reloading
|
from selenium.common.exceptions import WebDriverException
from splinter import Browser
import urlparse
from django.core.urlresolvers import reverse
from django.test import LiveServerTestCase
class UITestCase(LiveServerTestCase):
@classmethod
def setUpClass(cls):
try:
cls.wd = Browser('phantomjs')
except WebDriverException:
cls.wd = Browser() # fall back to the default (firefox) if phantomjs is unavailable
super(UITestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.wd.quit()
super(UITestCase, cls).tearDownClass()
def open(self, url):
self.wd.visit(urlparse.urljoin(self.live_server_url, url))
def login_admin(self):
self.open(reverse('sadmin2:dashboard'))
self.wd.fill('username', 'admin')
self.wd.fill('password', 'admin')
self.wd.find_by_name('login').first.click()
self.wd.is_element_not_present_by_name('login', wait_time=10) # wait for the page to start reloading
|
Switch to headless UI testing by default
|
Switch to headless UI testing by default
|
Python
|
mit
|
animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening,animekita/selvbetjening
|
from splinter import Browser
import urlparse
from django.core.urlresolvers import reverse
from django.test import LiveServerTestCase
class UITestCase(LiveServerTestCase):
@classmethod
def setUpClass(cls):
cls.wd = Browser()
super(UITestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.wd.quit()
super(UITestCase, cls).tearDownClass()
def open(self, url):
self.wd.visit(urlparse.urljoin(self.live_server_url, url))
def login_admin(self):
self.open(reverse('sadmin2:dashboard'))
self.wd.fill('username', 'admin')
self.wd.fill('password', 'admin')
self.wd.find_by_name('login').first.click()
self.wd.is_element_not_present_by_name('login', wait_time=10) # wait for the page to start reloading
Switch to headless UI testing by default
|
from selenium.common.exceptions import WebDriverException
from splinter import Browser
import urlparse
from django.core.urlresolvers import reverse
from django.test import LiveServerTestCase
class UITestCase(LiveServerTestCase):
@classmethod
def setUpClass(cls):
try:
cls.wd = Browser('phantomjs')
except WebDriverException:
cls.wd = Browser() # fall back to the default (firefox) if phantomjs is unavailable
super(UITestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.wd.quit()
super(UITestCase, cls).tearDownClass()
def open(self, url):
self.wd.visit(urlparse.urljoin(self.live_server_url, url))
def login_admin(self):
self.open(reverse('sadmin2:dashboard'))
self.wd.fill('username', 'admin')
self.wd.fill('password', 'admin')
self.wd.find_by_name('login').first.click()
self.wd.is_element_not_present_by_name('login', wait_time=10) # wait for the page to start reloading
|
<commit_before>
from splinter import Browser
import urlparse
from django.core.urlresolvers import reverse
from django.test import LiveServerTestCase
class UITestCase(LiveServerTestCase):
@classmethod
def setUpClass(cls):
cls.wd = Browser()
super(UITestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.wd.quit()
super(UITestCase, cls).tearDownClass()
def open(self, url):
self.wd.visit(urlparse.urljoin(self.live_server_url, url))
def login_admin(self):
self.open(reverse('sadmin2:dashboard'))
self.wd.fill('username', 'admin')
self.wd.fill('password', 'admin')
self.wd.find_by_name('login').first.click()
self.wd.is_element_not_present_by_name('login', wait_time=10) # wait for the page to start reloading
<commit_msg>Switch to headless UI testing by default<commit_after>
|
from selenium.common.exceptions import WebDriverException
from splinter import Browser
import urlparse
from django.core.urlresolvers import reverse
from django.test import LiveServerTestCase
class UITestCase(LiveServerTestCase):
@classmethod
def setUpClass(cls):
try:
cls.wd = Browser('phantomjs')
except WebDriverException:
cls.wd = Browser() # fall back to the default (firefox) if phantomjs is unavailable
super(UITestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.wd.quit()
super(UITestCase, cls).tearDownClass()
def open(self, url):
self.wd.visit(urlparse.urljoin(self.live_server_url, url))
def login_admin(self):
self.open(reverse('sadmin2:dashboard'))
self.wd.fill('username', 'admin')
self.wd.fill('password', 'admin')
self.wd.find_by_name('login').first.click()
self.wd.is_element_not_present_by_name('login', wait_time=10) # wait for the page to start reloading
|
from splinter import Browser
import urlparse
from django.core.urlresolvers import reverse
from django.test import LiveServerTestCase
class UITestCase(LiveServerTestCase):
@classmethod
def setUpClass(cls):
cls.wd = Browser()
super(UITestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.wd.quit()
super(UITestCase, cls).tearDownClass()
def open(self, url):
self.wd.visit(urlparse.urljoin(self.live_server_url, url))
def login_admin(self):
self.open(reverse('sadmin2:dashboard'))
self.wd.fill('username', 'admin')
self.wd.fill('password', 'admin')
self.wd.find_by_name('login').first.click()
self.wd.is_element_not_present_by_name('login', wait_time=10) # wait for the page to start reloading
Switch to headless UI testing by defaultfrom selenium.common.exceptions import WebDriverException
from splinter import Browser
import urlparse
from django.core.urlresolvers import reverse
from django.test import LiveServerTestCase
class UITestCase(LiveServerTestCase):
@classmethod
def setUpClass(cls):
try:
cls.wd = Browser('phantomjs')
except WebDriverException:
cls.wd = Browser() # fall back to the default (firefox) if phantomjs is unavailable
super(UITestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.wd.quit()
super(UITestCase, cls).tearDownClass()
def open(self, url):
self.wd.visit(urlparse.urljoin(self.live_server_url, url))
def login_admin(self):
self.open(reverse('sadmin2:dashboard'))
self.wd.fill('username', 'admin')
self.wd.fill('password', 'admin')
self.wd.find_by_name('login').first.click()
self.wd.is_element_not_present_by_name('login', wait_time=10) # wait for the page to start reloading
|
<commit_before>
from splinter import Browser
import urlparse
from django.core.urlresolvers import reverse
from django.test import LiveServerTestCase
class UITestCase(LiveServerTestCase):
@classmethod
def setUpClass(cls):
cls.wd = Browser()
super(UITestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.wd.quit()
super(UITestCase, cls).tearDownClass()
def open(self, url):
self.wd.visit(urlparse.urljoin(self.live_server_url, url))
def login_admin(self):
self.open(reverse('sadmin2:dashboard'))
self.wd.fill('username', 'admin')
self.wd.fill('password', 'admin')
self.wd.find_by_name('login').first.click()
self.wd.is_element_not_present_by_name('login', wait_time=10) # wait for the page to start reloading
<commit_msg>Switch to headless UI testing by default<commit_after>from selenium.common.exceptions import WebDriverException
from splinter import Browser
import urlparse
from django.core.urlresolvers import reverse
from django.test import LiveServerTestCase
class UITestCase(LiveServerTestCase):
@classmethod
def setUpClass(cls):
try:
cls.wd = Browser('phantomjs')
except WebDriverException:
cls.wd = Browser() # fall back to the default (firefox) if phantomjs is unavailable
super(UITestCase, cls).setUpClass()
@classmethod
def tearDownClass(cls):
cls.wd.quit()
super(UITestCase, cls).tearDownClass()
def open(self, url):
self.wd.visit(urlparse.urljoin(self.live_server_url, url))
def login_admin(self):
self.open(reverse('sadmin2:dashboard'))
self.wd.fill('username', 'admin')
self.wd.fill('password', 'admin')
self.wd.find_by_name('login').first.click()
self.wd.is_element_not_present_by_name('login', wait_time=10) # wait for the page to start reloading
|
9c348c88771acb49f820cbb2fa16ce318068b777
|
groundstation/peer_socket.py
|
groundstation/peer_socket.py
|
from sockets.socket_closed_exception import SocketClosedException
from sockets.stream_socket import StreamSocket
import socket.error
from groundstation import settings
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
class PeerSocket(StreamSocket):
"""Wrapper for a peer who just connected, or one we've connected to
Since the communication protocol should be implicitly bidirectional, the
factory methods should be the only instanciation methods"""
def __init__(self, conn, peer):
self._sock = conn
super(PeerSocket, self).__init__()
self.peer = peer
@classmethod
def from_accept(klass, args):
return klass(*args)
@classmethod
def from_connect(klass, args):
return klass(*args)
def __repr__(self):
return "<%s: from %s>" % (self.__class__, self.peer)
# Wrap StreamSocket's send and recv in exception handling
def send(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
def recv(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
class PeerSocketClosedException(SocketClosedException):
"""Raised when a peer closes their socket"""
pass
|
from sockets.socket_closed_exception import SocketClosedException
from sockets.stream_socket import StreamSocket
import socket
from groundstation import settings
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
class PeerSocket(StreamSocket):
"""Wrapper for a peer who just connected, or one we've connected to
Since the communication protocol should be implicitly bidirectional, the
factory methods should be the only instanciation methods"""
def __init__(self, conn, peer):
self._sock = conn
super(PeerSocket, self).__init__()
self.peer = peer
@classmethod
def from_accept(klass, args):
return klass(*args)
@classmethod
def from_connect(klass, args):
return klass(*args)
def __repr__(self):
return "<%s: from %s>" % (self.__class__, self.peer)
# Wrap StreamSocket's send and recv in exception handling
def send(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
def recv(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
class PeerSocketClosedException(SocketClosedException):
"""Raised when a peer closes their socket"""
pass
|
Fix broken import of socket errors
|
Fix broken import of socket errors
|
Python
|
mit
|
richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation,richo/groundstation
|
from sockets.socket_closed_exception import SocketClosedException
from sockets.stream_socket import StreamSocket
import socket.error
from groundstation import settings
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
class PeerSocket(StreamSocket):
"""Wrapper for a peer who just connected, or one we've connected to
Since the communication protocol should be implicitly bidirectional, the
factory methods should be the only instanciation methods"""
def __init__(self, conn, peer):
self._sock = conn
super(PeerSocket, self).__init__()
self.peer = peer
@classmethod
def from_accept(klass, args):
return klass(*args)
@classmethod
def from_connect(klass, args):
return klass(*args)
def __repr__(self):
return "<%s: from %s>" % (self.__class__, self.peer)
# Wrap StreamSocket's send and recv in exception handling
def send(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
def recv(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
class PeerSocketClosedException(SocketClosedException):
"""Raised when a peer closes their socket"""
pass
Fix broken import of socket errors
|
from sockets.socket_closed_exception import SocketClosedException
from sockets.stream_socket import StreamSocket
import socket
from groundstation import settings
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
class PeerSocket(StreamSocket):
"""Wrapper for a peer who just connected, or one we've connected to
Since the communication protocol should be implicitly bidirectional, the
factory methods should be the only instanciation methods"""
def __init__(self, conn, peer):
self._sock = conn
super(PeerSocket, self).__init__()
self.peer = peer
@classmethod
def from_accept(klass, args):
return klass(*args)
@classmethod
def from_connect(klass, args):
return klass(*args)
def __repr__(self):
return "<%s: from %s>" % (self.__class__, self.peer)
# Wrap StreamSocket's send and recv in exception handling
def send(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
def recv(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
class PeerSocketClosedException(SocketClosedException):
"""Raised when a peer closes their socket"""
pass
|
<commit_before>from sockets.socket_closed_exception import SocketClosedException
from sockets.stream_socket import StreamSocket
import socket.error
from groundstation import settings
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
class PeerSocket(StreamSocket):
"""Wrapper for a peer who just connected, or one we've connected to
Since the communication protocol should be implicitly bidirectional, the
factory methods should be the only instanciation methods"""
def __init__(self, conn, peer):
self._sock = conn
super(PeerSocket, self).__init__()
self.peer = peer
@classmethod
def from_accept(klass, args):
return klass(*args)
@classmethod
def from_connect(klass, args):
return klass(*args)
def __repr__(self):
return "<%s: from %s>" % (self.__class__, self.peer)
# Wrap StreamSocket's send and recv in exception handling
def send(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
def recv(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
class PeerSocketClosedException(SocketClosedException):
"""Raised when a peer closes their socket"""
pass
<commit_msg>Fix broken import of socket errors<commit_after>
|
from sockets.socket_closed_exception import SocketClosedException
from sockets.stream_socket import StreamSocket
import socket
from groundstation import settings
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
class PeerSocket(StreamSocket):
"""Wrapper for a peer who just connected, or one we've connected to
Since the communication protocol should be implicitly bidirectional, the
factory methods should be the only instanciation methods"""
def __init__(self, conn, peer):
self._sock = conn
super(PeerSocket, self).__init__()
self.peer = peer
@classmethod
def from_accept(klass, args):
return klass(*args)
@classmethod
def from_connect(klass, args):
return klass(*args)
def __repr__(self):
return "<%s: from %s>" % (self.__class__, self.peer)
# Wrap StreamSocket's send and recv in exception handling
def send(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
def recv(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
class PeerSocketClosedException(SocketClosedException):
"""Raised when a peer closes their socket"""
pass
|
from sockets.socket_closed_exception import SocketClosedException
from sockets.stream_socket import StreamSocket
import socket.error
from groundstation import settings
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
class PeerSocket(StreamSocket):
"""Wrapper for a peer who just connected, or one we've connected to
Since the communication protocol should be implicitly bidirectional, the
factory methods should be the only instanciation methods"""
def __init__(self, conn, peer):
self._sock = conn
super(PeerSocket, self).__init__()
self.peer = peer
@classmethod
def from_accept(klass, args):
return klass(*args)
@classmethod
def from_connect(klass, args):
return klass(*args)
def __repr__(self):
return "<%s: from %s>" % (self.__class__, self.peer)
# Wrap StreamSocket's send and recv in exception handling
def send(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
def recv(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
class PeerSocketClosedException(SocketClosedException):
"""Raised when a peer closes their socket"""
pass
Fix broken import of socket errorsfrom sockets.socket_closed_exception import SocketClosedException
from sockets.stream_socket import StreamSocket
import socket
from groundstation import settings
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
class PeerSocket(StreamSocket):
"""Wrapper for a peer who just connected, or one we've connected to
Since the communication protocol should be implicitly bidirectional, the
factory methods should be the only instanciation methods"""
def __init__(self, conn, peer):
self._sock = conn
super(PeerSocket, self).__init__()
self.peer = peer
@classmethod
def from_accept(klass, args):
return klass(*args)
@classmethod
def from_connect(klass, args):
return klass(*args)
def __repr__(self):
return "<%s: from %s>" % (self.__class__, self.peer)
# Wrap StreamSocket's send and recv in exception handling
def send(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
def recv(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
class PeerSocketClosedException(SocketClosedException):
"""Raised when a peer closes their socket"""
pass
|
<commit_before>from sockets.socket_closed_exception import SocketClosedException
from sockets.stream_socket import StreamSocket
import socket.error
from groundstation import settings
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
class PeerSocket(StreamSocket):
"""Wrapper for a peer who just connected, or one we've connected to
Since the communication protocol should be implicitly bidirectional, the
factory methods should be the only instanciation methods"""
def __init__(self, conn, peer):
self._sock = conn
super(PeerSocket, self).__init__()
self.peer = peer
@classmethod
def from_accept(klass, args):
return klass(*args)
@classmethod
def from_connect(klass, args):
return klass(*args)
def __repr__(self):
return "<%s: from %s>" % (self.__class__, self.peer)
# Wrap StreamSocket's send and recv in exception handling
def send(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
def recv(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
class PeerSocketClosedException(SocketClosedException):
"""Raised when a peer closes their socket"""
pass
<commit_msg>Fix broken import of socket errors<commit_after>from sockets.socket_closed_exception import SocketClosedException
from sockets.stream_socket import StreamSocket
import socket
from groundstation import settings
import groundstation.logger
log = groundstation.logger.getLogger(__name__)
class PeerSocket(StreamSocket):
"""Wrapper for a peer who just connected, or one we've connected to
Since the communication protocol should be implicitly bidirectional, the
factory methods should be the only instanciation methods"""
def __init__(self, conn, peer):
self._sock = conn
super(PeerSocket, self).__init__()
self.peer = peer
@classmethod
def from_accept(klass, args):
return klass(*args)
@classmethod
def from_connect(klass, args):
return klass(*args)
def __repr__(self):
return "<%s: from %s>" % (self.__class__, self.peer)
# Wrap StreamSocket's send and recv in exception handling
def send(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
def recv(self, *args, **kwargs):
try:
return super(PeerSocket, self).send(*args, **kwargs)
except socket.error as e:
raise PeerSocketClosedException(e)
class PeerSocketClosedException(SocketClosedException):
"""Raised when a peer closes their socket"""
pass
|
f2426c54a07f4492bfc23936fe4b1970315c6890
|
MessageClient.py
|
MessageClient.py
|
import time
from twilio.rest import TwilioRestClient
import smtplib
from email.mime.text import MIMEText
from Environ import config
class MessageClient(object):
def __init__(self):
self.twilioClient = TwilioRestClient(config["twilioAccount"], config["twilioToken"])
pass
def alertSMS(self, sendto, msg):
print("SMS to :" + sendto)
print("Message: " + msg)
#resp = self.twilioClient.messages.create(to=sendto, from_=config["twilioNumber"], body=msg)
#print(resp)
return
def alertEMail(self, sendto, msg):
print("Email to :" + sendto)
print("Message: " + msg)
msg = MIMEText(msg)
msg['Subject'] = "Alert: Critcal Care Control Center"
msg['From'] = "criticalcare@hexaware.com"
msg['To'] = sendto
s = smtplib.SMTP('localhost')
s.send_message(msg)
s.quit()
return
|
import time
from twilio.rest import TwilioRestClient
import smtplib
from email.mime.text import MIMEText
from Environ import config
class MessageClient(object):
def __init__(self):
self.twilioClient = TwilioRestClient(config["twilioAccount"], config["twilioToken"])
pass
def alertSMS(self, sendto, msg):
print("SMS to :" + sendto)
print("Message: " + msg)
resp = self.twilioClient.messages.create(to=sendto, from_=config["twilioNumber"], body=msg)
print(resp)
return
def alertEMail(self, sendto, msg):
print("Email to :" + sendto)
print("Message: " + msg)
msg = MIMEText(msg)
msg['Subject'] = "Alert: Critcal Care Control Center"
msg['From'] = "criticalcare@hexaware.com"
msg['To'] = sendto
#s = smtplib.SMTP('localhost')
#s.send_message(msg)
#s.quit()
return
|
Comment email and activated SMS
|
Comment email and activated SMS
|
Python
|
apache-2.0
|
johnfelixc/CriticalMonitor,johnfelixc/CriticalMonitor,johnfelixc/CriticalMonitor
|
import time
from twilio.rest import TwilioRestClient
import smtplib
from email.mime.text import MIMEText
from Environ import config
class MessageClient(object):
def __init__(self):
self.twilioClient = TwilioRestClient(config["twilioAccount"], config["twilioToken"])
pass
def alertSMS(self, sendto, msg):
print("SMS to :" + sendto)
print("Message: " + msg)
#resp = self.twilioClient.messages.create(to=sendto, from_=config["twilioNumber"], body=msg)
#print(resp)
return
def alertEMail(self, sendto, msg):
print("Email to :" + sendto)
print("Message: " + msg)
msg = MIMEText(msg)
msg['Subject'] = "Alert: Critcal Care Control Center"
msg['From'] = "criticalcare@hexaware.com"
msg['To'] = sendto
s = smtplib.SMTP('localhost')
s.send_message(msg)
s.quit()
returnComment email and activated SMS
|
import time
from twilio.rest import TwilioRestClient
import smtplib
from email.mime.text import MIMEText
from Environ import config
class MessageClient(object):
def __init__(self):
self.twilioClient = TwilioRestClient(config["twilioAccount"], config["twilioToken"])
pass
def alertSMS(self, sendto, msg):
print("SMS to :" + sendto)
print("Message: " + msg)
resp = self.twilioClient.messages.create(to=sendto, from_=config["twilioNumber"], body=msg)
print(resp)
return
def alertEMail(self, sendto, msg):
print("Email to :" + sendto)
print("Message: " + msg)
msg = MIMEText(msg)
msg['Subject'] = "Alert: Critcal Care Control Center"
msg['From'] = "criticalcare@hexaware.com"
msg['To'] = sendto
#s = smtplib.SMTP('localhost')
#s.send_message(msg)
#s.quit()
return
|
<commit_before>
import time
from twilio.rest import TwilioRestClient
import smtplib
from email.mime.text import MIMEText
from Environ import config
class MessageClient(object):
def __init__(self):
self.twilioClient = TwilioRestClient(config["twilioAccount"], config["twilioToken"])
pass
def alertSMS(self, sendto, msg):
print("SMS to :" + sendto)
print("Message: " + msg)
#resp = self.twilioClient.messages.create(to=sendto, from_=config["twilioNumber"], body=msg)
#print(resp)
return
def alertEMail(self, sendto, msg):
print("Email to :" + sendto)
print("Message: " + msg)
msg = MIMEText(msg)
msg['Subject'] = "Alert: Critcal Care Control Center"
msg['From'] = "criticalcare@hexaware.com"
msg['To'] = sendto
s = smtplib.SMTP('localhost')
s.send_message(msg)
s.quit()
return<commit_msg>Comment email and activated SMS<commit_after>
|
import time
from twilio.rest import TwilioRestClient
import smtplib
from email.mime.text import MIMEText
from Environ import config
class MessageClient(object):
def __init__(self):
self.twilioClient = TwilioRestClient(config["twilioAccount"], config["twilioToken"])
pass
def alertSMS(self, sendto, msg):
print("SMS to :" + sendto)
print("Message: " + msg)
resp = self.twilioClient.messages.create(to=sendto, from_=config["twilioNumber"], body=msg)
print(resp)
return
def alertEMail(self, sendto, msg):
print("Email to :" + sendto)
print("Message: " + msg)
msg = MIMEText(msg)
msg['Subject'] = "Alert: Critcal Care Control Center"
msg['From'] = "criticalcare@hexaware.com"
msg['To'] = sendto
#s = smtplib.SMTP('localhost')
#s.send_message(msg)
#s.quit()
return
|
import time
from twilio.rest import TwilioRestClient
import smtplib
from email.mime.text import MIMEText
from Environ import config
class MessageClient(object):
def __init__(self):
self.twilioClient = TwilioRestClient(config["twilioAccount"], config["twilioToken"])
pass
def alertSMS(self, sendto, msg):
print("SMS to :" + sendto)
print("Message: " + msg)
#resp = self.twilioClient.messages.create(to=sendto, from_=config["twilioNumber"], body=msg)
#print(resp)
return
def alertEMail(self, sendto, msg):
print("Email to :" + sendto)
print("Message: " + msg)
msg = MIMEText(msg)
msg['Subject'] = "Alert: Critcal Care Control Center"
msg['From'] = "criticalcare@hexaware.com"
msg['To'] = sendto
s = smtplib.SMTP('localhost')
s.send_message(msg)
s.quit()
returnComment email and activated SMS
import time
from twilio.rest import TwilioRestClient
import smtplib
from email.mime.text import MIMEText
from Environ import config
class MessageClient(object):
def __init__(self):
self.twilioClient = TwilioRestClient(config["twilioAccount"], config["twilioToken"])
pass
def alertSMS(self, sendto, msg):
print("SMS to :" + sendto)
print("Message: " + msg)
resp = self.twilioClient.messages.create(to=sendto, from_=config["twilioNumber"], body=msg)
print(resp)
return
def alertEMail(self, sendto, msg):
print("Email to :" + sendto)
print("Message: " + msg)
msg = MIMEText(msg)
msg['Subject'] = "Alert: Critcal Care Control Center"
msg['From'] = "criticalcare@hexaware.com"
msg['To'] = sendto
#s = smtplib.SMTP('localhost')
#s.send_message(msg)
#s.quit()
return
|
<commit_before>
import time
from twilio.rest import TwilioRestClient
import smtplib
from email.mime.text import MIMEText
from Environ import config
class MessageClient(object):
def __init__(self):
self.twilioClient = TwilioRestClient(config["twilioAccount"], config["twilioToken"])
pass
def alertSMS(self, sendto, msg):
print("SMS to :" + sendto)
print("Message: " + msg)
#resp = self.twilioClient.messages.create(to=sendto, from_=config["twilioNumber"], body=msg)
#print(resp)
return
def alertEMail(self, sendto, msg):
print("Email to :" + sendto)
print("Message: " + msg)
msg = MIMEText(msg)
msg['Subject'] = "Alert: Critcal Care Control Center"
msg['From'] = "criticalcare@hexaware.com"
msg['To'] = sendto
s = smtplib.SMTP('localhost')
s.send_message(msg)
s.quit()
return<commit_msg>Comment email and activated SMS<commit_after>
import time
from twilio.rest import TwilioRestClient
import smtplib
from email.mime.text import MIMEText
from Environ import config
class MessageClient(object):
def __init__(self):
self.twilioClient = TwilioRestClient(config["twilioAccount"], config["twilioToken"])
pass
def alertSMS(self, sendto, msg):
print("SMS to :" + sendto)
print("Message: " + msg)
resp = self.twilioClient.messages.create(to=sendto, from_=config["twilioNumber"], body=msg)
print(resp)
return
def alertEMail(self, sendto, msg):
print("Email to :" + sendto)
print("Message: " + msg)
msg = MIMEText(msg)
msg['Subject'] = "Alert: Critcal Care Control Center"
msg['From'] = "criticalcare@hexaware.com"
msg['To'] = sendto
#s = smtplib.SMTP('localhost')
#s.send_message(msg)
#s.quit()
return
|
de66fe28d2bd3e118a468257601d2bdfcc4341ed
|
niche_vlaanderen/__init__.py
|
niche_vlaanderen/__init__.py
|
from .acidity import Acidity # noqa
from .niche import Niche, NicheDelta # noqa
from .nutrient_level import NutrientLevel # noqa
from .vegetation import Vegetation # noqa
from .version import __version__ # noqa
|
from .acidity import Acidity # noqa
from .niche import Niche, NicheDelta # noqa
from .nutrient_level import NutrientLevel # noqa
from .vegetation import Vegetation # noqa
from .version import __version__ # noqa
from .floodplain import FloodPlain
|
Add FloodPlain class to module namespace
|
Add FloodPlain class to module namespace
|
Python
|
mit
|
johanvdw/niche_vlaanderen
|
from .acidity import Acidity # noqa
from .niche import Niche, NicheDelta # noqa
from .nutrient_level import NutrientLevel # noqa
from .vegetation import Vegetation # noqa
from .version import __version__ # noqa
Add FloodPlain class to module namespace
|
from .acidity import Acidity # noqa
from .niche import Niche, NicheDelta # noqa
from .nutrient_level import NutrientLevel # noqa
from .vegetation import Vegetation # noqa
from .version import __version__ # noqa
from .floodplain import FloodPlain
|
<commit_before>from .acidity import Acidity # noqa
from .niche import Niche, NicheDelta # noqa
from .nutrient_level import NutrientLevel # noqa
from .vegetation import Vegetation # noqa
from .version import __version__ # noqa
<commit_msg>Add FloodPlain class to module namespace<commit_after>
|
from .acidity import Acidity # noqa
from .niche import Niche, NicheDelta # noqa
from .nutrient_level import NutrientLevel # noqa
from .vegetation import Vegetation # noqa
from .version import __version__ # noqa
from .floodplain import FloodPlain
|
from .acidity import Acidity # noqa
from .niche import Niche, NicheDelta # noqa
from .nutrient_level import NutrientLevel # noqa
from .vegetation import Vegetation # noqa
from .version import __version__ # noqa
Add FloodPlain class to module namespacefrom .acidity import Acidity # noqa
from .niche import Niche, NicheDelta # noqa
from .nutrient_level import NutrientLevel # noqa
from .vegetation import Vegetation # noqa
from .version import __version__ # noqa
from .floodplain import FloodPlain
|
<commit_before>from .acidity import Acidity # noqa
from .niche import Niche, NicheDelta # noqa
from .nutrient_level import NutrientLevel # noqa
from .vegetation import Vegetation # noqa
from .version import __version__ # noqa
<commit_msg>Add FloodPlain class to module namespace<commit_after>from .acidity import Acidity # noqa
from .niche import Niche, NicheDelta # noqa
from .nutrient_level import NutrientLevel # noqa
from .vegetation import Vegetation # noqa
from .version import __version__ # noqa
from .floodplain import FloodPlain
|
b1c67321e5eec29b9fd91d728bd8e63382dc063a
|
src/keybar/conf/test.py
|
src/keybar/conf/test.py
|
from keybar.conf.base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'keybar_test',
}
}
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
KEYBAR_VERIFY_CLIENT_CERTIFICATE = True
KEYBAR_DOMAIN = 'local.keybar.io'
KEYBAR_HOST = 'local.keybar.io:8443'
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
KEYBAR_HOST = 'local.keybar.io:9999'
KEYBAR_KDF_ITERATIONS = 100
|
from keybar.conf.base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'keybar_test',
}
}
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
KEYBAR_VERIFY_CLIENT_CERTIFICATE = True
KEYBAR_DOMAIN = 'local.keybar.io'
KEYBAR_HOST = 'local.keybar.io:9999'
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
KEYBAR_KDF_ITERATIONS = 100
|
Remove duplicate keybar host value
|
Remove duplicate keybar host value
|
Python
|
bsd-3-clause
|
keybar/keybar
|
from keybar.conf.base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'keybar_test',
}
}
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
KEYBAR_VERIFY_CLIENT_CERTIFICATE = True
KEYBAR_DOMAIN = 'local.keybar.io'
KEYBAR_HOST = 'local.keybar.io:8443'
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
KEYBAR_HOST = 'local.keybar.io:9999'
KEYBAR_KDF_ITERATIONS = 100
Remove duplicate keybar host value
|
from keybar.conf.base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'keybar_test',
}
}
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
KEYBAR_VERIFY_CLIENT_CERTIFICATE = True
KEYBAR_DOMAIN = 'local.keybar.io'
KEYBAR_HOST = 'local.keybar.io:9999'
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
KEYBAR_KDF_ITERATIONS = 100
|
<commit_before>from keybar.conf.base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'keybar_test',
}
}
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
KEYBAR_VERIFY_CLIENT_CERTIFICATE = True
KEYBAR_DOMAIN = 'local.keybar.io'
KEYBAR_HOST = 'local.keybar.io:8443'
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
KEYBAR_HOST = 'local.keybar.io:9999'
KEYBAR_KDF_ITERATIONS = 100
<commit_msg>Remove duplicate keybar host value<commit_after>
|
from keybar.conf.base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'keybar_test',
}
}
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
KEYBAR_VERIFY_CLIENT_CERTIFICATE = True
KEYBAR_DOMAIN = 'local.keybar.io'
KEYBAR_HOST = 'local.keybar.io:9999'
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
KEYBAR_KDF_ITERATIONS = 100
|
from keybar.conf.base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'keybar_test',
}
}
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
KEYBAR_VERIFY_CLIENT_CERTIFICATE = True
KEYBAR_DOMAIN = 'local.keybar.io'
KEYBAR_HOST = 'local.keybar.io:8443'
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
KEYBAR_HOST = 'local.keybar.io:9999'
KEYBAR_KDF_ITERATIONS = 100
Remove duplicate keybar host valuefrom keybar.conf.base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'keybar_test',
}
}
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
KEYBAR_VERIFY_CLIENT_CERTIFICATE = True
KEYBAR_DOMAIN = 'local.keybar.io'
KEYBAR_HOST = 'local.keybar.io:9999'
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
KEYBAR_KDF_ITERATIONS = 100
|
<commit_before>from keybar.conf.base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'keybar_test',
}
}
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
KEYBAR_VERIFY_CLIENT_CERTIFICATE = True
KEYBAR_DOMAIN = 'local.keybar.io'
KEYBAR_HOST = 'local.keybar.io:8443'
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
KEYBAR_HOST = 'local.keybar.io:9999'
KEYBAR_KDF_ITERATIONS = 100
<commit_msg>Remove duplicate keybar host value<commit_after>from keybar.conf.base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'keybar_test',
}
}
certificates_dir = os.path.join(BASE_DIR, 'tests', 'resources', 'certificates')
KEYBAR_SERVER_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.cert')
KEYBAR_SERVER_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-SERVER.key')
KEYBAR_CLIENT_CERTIFICATE = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.cert')
KEYBAR_CLIENT_KEY = os.path.join(certificates_dir, 'KEYBAR-intermediate-CLIENT.key')
KEYBAR_CA_BUNDLE = os.path.join(certificates_dir, 'KEYBAR-ca-bundle.crt')
KEYBAR_VERIFY_CLIENT_CERTIFICATE = True
KEYBAR_DOMAIN = 'local.keybar.io'
KEYBAR_HOST = 'local.keybar.io:9999'
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher',
)
CELERY_ALWAYS_EAGER = True
CELERY_EAGER_PROPAGATES_EXCEPTIONS = True
KEYBAR_KDF_ITERATIONS = 100
|
7f0ab3d1db2257a630df44ad92b4f094f6a61894
|
application.py
|
application.py
|
import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations import logging
from app import create_app
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration(), RedisIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
sentry_sdk.set_level('error') # only record error logs or exceptions
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
|
import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations import logging
from app import create_app
if 'SENTRY_DSN' in os.environ:
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration(), RedisIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
|
Tweak Sentry config to work in development
|
Tweak Sentry config to work in development
This makes a couple of changes:
- Most importantly, it wraps the setup code in a conditional so that
developers don't need to have a DSN set to start the app locally.
- Secondly, it removes the redundant call to "set_level". Originally
I thought the integration was sending info/warning events, but this
isn't the case [1] and even if it was, "set_level" affects the level
of custom events [2], not the level they are dispatched at.
[1]: https://github.com/getsentry/sentry-python/blob/4c09f3203d6d19789c6fa729a2e46557ad4ea913/sentry_sdk/integrations/logging.py#L56
[2]: https://docs.sentry.io/platforms/python/guides/logging/usage/set-level/
|
Python
|
mit
|
alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin,alphagov/notifications-admin
|
import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations import logging
from app import create_app
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration(), RedisIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
sentry_sdk.set_level('error') # only record error logs or exceptions
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
Tweak Sentry config to work in development
This makes a couple of changes:
- Most importantly, it wraps the setup code in a conditional so that
developers don't need to have a DSN set to start the app locally.
- Secondly, it removes the redundant call to "set_level". Originally
I thought the integration was sending info/warning events, but this
isn't the case [1] and even if it was, "set_level" affects the level
of custom events [2], not the level they are dispatched at.
[1]: https://github.com/getsentry/sentry-python/blob/4c09f3203d6d19789c6fa729a2e46557ad4ea913/sentry_sdk/integrations/logging.py#L56
[2]: https://docs.sentry.io/platforms/python/guides/logging/usage/set-level/
|
import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations import logging
from app import create_app
if 'SENTRY_DSN' in os.environ:
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration(), RedisIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
|
<commit_before>import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations import logging
from app import create_app
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration(), RedisIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
sentry_sdk.set_level('error') # only record error logs or exceptions
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
<commit_msg>Tweak Sentry config to work in development
This makes a couple of changes:
- Most importantly, it wraps the setup code in a conditional so that
developers don't need to have a DSN set to start the app locally.
- Secondly, it removes the redundant call to "set_level". Originally
I thought the integration was sending info/warning events, but this
isn't the case [1] and even if it was, "set_level" affects the level
of custom events [2], not the level they are dispatched at.
[1]: https://github.com/getsentry/sentry-python/blob/4c09f3203d6d19789c6fa729a2e46557ad4ea913/sentry_sdk/integrations/logging.py#L56
[2]: https://docs.sentry.io/platforms/python/guides/logging/usage/set-level/<commit_after>
|
import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations import logging
from app import create_app
if 'SENTRY_DSN' in os.environ:
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration(), RedisIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
|
import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations import logging
from app import create_app
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration(), RedisIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
sentry_sdk.set_level('error') # only record error logs or exceptions
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
Tweak Sentry config to work in development
This makes a couple of changes:
- Most importantly, it wraps the setup code in a conditional so that
developers don't need to have a DSN set to start the app locally.
- Secondly, it removes the redundant call to "set_level". Originally
I thought the integration was sending info/warning events, but this
isn't the case [1] and even if it was, "set_level" affects the level
of custom events [2], not the level they are dispatched at.
[1]: https://github.com/getsentry/sentry-python/blob/4c09f3203d6d19789c6fa729a2e46557ad4ea913/sentry_sdk/integrations/logging.py#L56
[2]: https://docs.sentry.io/platforms/python/guides/logging/usage/set-level/import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations import logging
from app import create_app
if 'SENTRY_DSN' in os.environ:
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration(), RedisIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
|
<commit_before>import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations import logging
from app import create_app
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration(), RedisIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
sentry_sdk.set_level('error') # only record error logs or exceptions
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
<commit_msg>Tweak Sentry config to work in development
This makes a couple of changes:
- Most importantly, it wraps the setup code in a conditional so that
developers don't need to have a DSN set to start the app locally.
- Secondly, it removes the redundant call to "set_level". Originally
I thought the integration was sending info/warning events, but this
isn't the case [1] and even if it was, "set_level" affects the level
of custom events [2], not the level they are dispatched at.
[1]: https://github.com/getsentry/sentry-python/blob/4c09f3203d6d19789c6fa729a2e46557ad4ea913/sentry_sdk/integrations/logging.py#L56
[2]: https://docs.sentry.io/platforms/python/guides/logging/usage/set-level/<commit_after>import os
import sentry_sdk
from flask import Flask
from sentry_sdk.integrations.flask import FlaskIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations import logging
from app import create_app
if 'SENTRY_DSN' in os.environ:
sentry_sdk.init(
dsn=os.environ['SENTRY_DSN'],
integrations=[FlaskIntegration(), RedisIntegration()],
environment=os.environ['NOTIFY_ENVIRONMENT'],
attach_stacktrace=True,
traces_sample_rate=0.00005 # avoid exceeding rate limits in Production
)
logging.ignore_logger('notifications_python_client.*') # ignore logs about 404s, etc.
application = Flask('app')
create_app(application)
|
d945090bda715d1d3b8c610f4017542eed06e73e
|
src/runtime/pcode_io.py
|
src/runtime/pcode_io.py
|
# pcode_io.py 19/01/2016 D.J.Whale
# simplest possible implementation. Only really works well
# for small files.
def readline(filename, lineno):
f = open(filename)
lines = f.readlines()
f.close()
return lines[lineno-1] # runtime error if does not exist
def writeline(filename, lineno, data):
# read all lines in
f = open(filename)
lines = f.readlines()
f.close()
# modify in-memory copy first
lineno -= 1
if lineno >= len(lines):
# pad out extra lines as blanks
for i in range(1+lineno-len(lines)):
lines.append("")
lines[lineno] = data
# now create a brand new file and write all the lines out
f = open(filename, "w")
f.writelines(lines)
f.close()
# END
|
# pcode_io.py 19/01/2016 D.J.Whale
# simplest possible implementation. Only really works well
# for small files. Poor efficiency on large files.
def readline(filename, lineno):
f = open(filename)
lines = f.readlines()
f.close()
return lines[lineno-1] # runtime error if does not exist
def writeline(filename, lineno, data):
# read all lines in
f = open(filename)
lines = f.readlines()
f.close()
# modify in-memory copy first
lineno -= 1
if lineno >= len(lines):
# pad out extra lines as blanks
for i in range(1+lineno-len(lines)):
lines.append("")
lines[lineno] = data
# now create a brand new file and write all the lines out
f = open(filename, "w")
f.writelines(lines)
f.close()
#----- TEST HARNESS -----------------------------------------------------------
def tests():
pass
# write to a file that does not exist, to create it
# write to a file that does exist, to modify it
# write to a file that is locked, get an error
# write to a file that does not exist, no dir permissions, get error
# write to a file that adds a new line at the end
# write to a file that adds a new line way past the end (padding)
# write to a file that modifies a line to make it longer
# write to a file that modifies a line to make it shorter
# read from a file that does not exist
# read from a file in a dir with no permissions, get error
# read from a file without read permissions, get error
# read from a file that exists
# read a line that does not exist
# read a line that does exist
if __name__ == "__main__":
tests()
# END
|
Test cases specified for io
|
Test cases specified for io
|
Python
|
mit
|
whaleygeek/pc_parser,whaleygeek/pc_parser
|
# pcode_io.py 19/01/2016 D.J.Whale
# simplest possible implementation. Only really works well
# for small files.
def readline(filename, lineno):
f = open(filename)
lines = f.readlines()
f.close()
return lines[lineno-1] # runtime error if does not exist
def writeline(filename, lineno, data):
# read all lines in
f = open(filename)
lines = f.readlines()
f.close()
# modify in-memory copy first
lineno -= 1
if lineno >= len(lines):
# pad out extra lines as blanks
for i in range(1+lineno-len(lines)):
lines.append("")
lines[lineno] = data
# now create a brand new file and write all the lines out
f = open(filename, "w")
f.writelines(lines)
f.close()
# END
Test cases specified for io
|
# pcode_io.py 19/01/2016 D.J.Whale
# simplest possible implementation. Only really works well
# for small files. Poor efficiency on large files.
def readline(filename, lineno):
f = open(filename)
lines = f.readlines()
f.close()
return lines[lineno-1] # runtime error if does not exist
def writeline(filename, lineno, data):
# read all lines in
f = open(filename)
lines = f.readlines()
f.close()
# modify in-memory copy first
lineno -= 1
if lineno >= len(lines):
# pad out extra lines as blanks
for i in range(1+lineno-len(lines)):
lines.append("")
lines[lineno] = data
# now create a brand new file and write all the lines out
f = open(filename, "w")
f.writelines(lines)
f.close()
#----- TEST HARNESS -----------------------------------------------------------
def tests():
pass
# write to a file that does not exist, to create it
# write to a file that does exist, to modify it
# write to a file that is locked, get an error
# write to a file that does not exist, no dir permissions, get error
# write to a file that adds a new line at the end
# write to a file that adds a new line way past the end (padding)
# write to a file that modifies a line to make it longer
# write to a file that modifies a line to make it shorter
# read from a file that does not exist
# read from a file in a dir with no permissions, get error
# read from a file without read permissions, get error
# read from a file that exists
# read a line that does not exist
# read a line that does exist
if __name__ == "__main__":
tests()
# END
|
<commit_before># pcode_io.py 19/01/2016 D.J.Whale
# simplest possible implementation. Only really works well
# for small files.
def readline(filename, lineno):
f = open(filename)
lines = f.readlines()
f.close()
return lines[lineno-1] # runtime error if does not exist
def writeline(filename, lineno, data):
# read all lines in
f = open(filename)
lines = f.readlines()
f.close()
# modify in-memory copy first
lineno -= 1
if lineno >= len(lines):
# pad out extra lines as blanks
for i in range(1+lineno-len(lines)):
lines.append("")
lines[lineno] = data
# now create a brand new file and write all the lines out
f = open(filename, "w")
f.writelines(lines)
f.close()
# END
<commit_msg>Test cases specified for io<commit_after>
|
# pcode_io.py 19/01/2016 D.J.Whale
# simplest possible implementation. Only really works well
# for small files. Poor efficiency on large files.
def readline(filename, lineno):
f = open(filename)
lines = f.readlines()
f.close()
return lines[lineno-1] # runtime error if does not exist
def writeline(filename, lineno, data):
# read all lines in
f = open(filename)
lines = f.readlines()
f.close()
# modify in-memory copy first
lineno -= 1
if lineno >= len(lines):
# pad out extra lines as blanks
for i in range(1+lineno-len(lines)):
lines.append("")
lines[lineno] = data
# now create a brand new file and write all the lines out
f = open(filename, "w")
f.writelines(lines)
f.close()
#----- TEST HARNESS -----------------------------------------------------------
def tests():
pass
# write to a file that does not exist, to create it
# write to a file that does exist, to modify it
# write to a file that is locked, get an error
# write to a file that does not exist, no dir permissions, get error
# write to a file that adds a new line at the end
# write to a file that adds a new line way past the end (padding)
# write to a file that modifies a line to make it longer
# write to a file that modifies a line to make it shorter
# read from a file that does not exist
# read from a file in a dir with no permissions, get error
# read from a file without read permissions, get error
# read from a file that exists
# read a line that does not exist
# read a line that does exist
if __name__ == "__main__":
tests()
# END
|
# pcode_io.py 19/01/2016 D.J.Whale
# simplest possible implementation. Only really works well
# for small files.
def readline(filename, lineno):
f = open(filename)
lines = f.readlines()
f.close()
return lines[lineno-1] # runtime error if does not exist
def writeline(filename, lineno, data):
# read all lines in
f = open(filename)
lines = f.readlines()
f.close()
# modify in-memory copy first
lineno -= 1
if lineno >= len(lines):
# pad out extra lines as blanks
for i in range(1+lineno-len(lines)):
lines.append("")
lines[lineno] = data
# now create a brand new file and write all the lines out
f = open(filename, "w")
f.writelines(lines)
f.close()
# END
Test cases specified for io# pcode_io.py 19/01/2016 D.J.Whale
# simplest possible implementation. Only really works well
# for small files. Poor efficiency on large files.
def readline(filename, lineno):
f = open(filename)
lines = f.readlines()
f.close()
return lines[lineno-1] # runtime error if does not exist
def writeline(filename, lineno, data):
# read all lines in
f = open(filename)
lines = f.readlines()
f.close()
# modify in-memory copy first
lineno -= 1
if lineno >= len(lines):
# pad out extra lines as blanks
for i in range(1+lineno-len(lines)):
lines.append("")
lines[lineno] = data
# now create a brand new file and write all the lines out
f = open(filename, "w")
f.writelines(lines)
f.close()
#----- TEST HARNESS -----------------------------------------------------------
def tests():
pass
# write to a file that does not exist, to create it
# write to a file that does exist, to modify it
# write to a file that is locked, get an error
# write to a file that does not exist, no dir permissions, get error
# write to a file that adds a new line at the end
# write to a file that adds a new line way past the end (padding)
# write to a file that modifies a line to make it longer
# write to a file that modifies a line to make it shorter
# read from a file that does not exist
# read from a file in a dir with no permissions, get error
# read from a file without read permissions, get error
# read from a file that exists
# read a line that does not exist
# read a line that does exist
if __name__ == "__main__":
tests()
# END
|
<commit_before># pcode_io.py 19/01/2016 D.J.Whale
# simplest possible implementation. Only really works well
# for small files.
def readline(filename, lineno):
f = open(filename)
lines = f.readlines()
f.close()
return lines[lineno-1] # runtime error if does not exist
def writeline(filename, lineno, data):
# read all lines in
f = open(filename)
lines = f.readlines()
f.close()
# modify in-memory copy first
lineno -= 1
if lineno >= len(lines):
# pad out extra lines as blanks
for i in range(1+lineno-len(lines)):
lines.append("")
lines[lineno] = data
# now create a brand new file and write all the lines out
f = open(filename, "w")
f.writelines(lines)
f.close()
# END
<commit_msg>Test cases specified for io<commit_after># pcode_io.py 19/01/2016 D.J.Whale
# simplest possible implementation. Only really works well
# for small files. Poor efficiency on large files.
def readline(filename, lineno):
f = open(filename)
lines = f.readlines()
f.close()
return lines[lineno-1] # runtime error if does not exist
def writeline(filename, lineno, data):
# read all lines in
f = open(filename)
lines = f.readlines()
f.close()
# modify in-memory copy first
lineno -= 1
if lineno >= len(lines):
# pad out extra lines as blanks
for i in range(1+lineno-len(lines)):
lines.append("")
lines[lineno] = data
# now create a brand new file and write all the lines out
f = open(filename, "w")
f.writelines(lines)
f.close()
#----- TEST HARNESS -----------------------------------------------------------
def tests():
pass
# write to a file that does not exist, to create it
# write to a file that does exist, to modify it
# write to a file that is locked, get an error
# write to a file that does not exist, no dir permissions, get error
# write to a file that adds a new line at the end
# write to a file that adds a new line way past the end (padding)
# write to a file that modifies a line to make it longer
# write to a file that modifies a line to make it shorter
# read from a file that does not exist
# read from a file in a dir with no permissions, get error
# read from a file without read permissions, get error
# read from a file that exists
# read a line that does not exist
# read a line that does exist
if __name__ == "__main__":
tests()
# END
|
539f4baccb968d9d222f2f62573da34d85699f91
|
comment_parser/parsers/common.py
|
comment_parser/parsers/common.py
|
#!/usr/bin/python
"""This module provides constructs common to all comment parsers."""
class Error(Exception):
"""Base Error class for all comment parsers."""
pass
class FileError(Error):
"""Raised if there is an issue reading a given file."""
pass
class UnterminatedCommentError(Error):
"""Raised if an Unterminated multi-line comment is encountered."""
pass
class Comment(object):
"""Represents comments found in source files."""
def __init__(self, text, line_number, multiline=False):
"""Initializes Comment.
Args:
text: String text of comment.
line_number: Line number (int) comment was found on.
multiline: Boolean whether this comment was a multiline comment.
"""
self._text = text
self._line_number = line_number
self._multiline = multiline
def text(self):
"""Returns the comment's text.
Returns:
String
"""
return self._text
def line_number(self):
"""Returns the line number the comment was found on.
Returns:
Int
"""
return self._line_number
def is_multiline(self):
"""Returns whether this comment was a multiline comment.
Returns:
True if comment was a multiline comment, False if not.
"""
return self._multiline
def __str__(self):
return self._text
def __eq__(self, other):
if isinstance(other, self.__class__):
if self.__dict__ == other.__dict__:
return True
return False
|
#!/usr/bin/python
"""This module provides constructs common to all comment parsers."""
class Error(Exception):
"""Base Error class for all comment parsers."""
pass
class FileError(Error):
"""Raised if there is an issue reading a given file."""
pass
class UnterminatedCommentError(Error):
"""Raised if an Unterminated multi-line comment is encountered."""
pass
class Comment(object):
"""Represents comments found in source files."""
def __init__(self, text, line_number, multiline=False):
"""Initializes Comment.
Args:
text: String text of comment.
line_number: Line number (int) comment was found on.
multiline: Boolean whether this comment was a multiline comment.
"""
self._text = text
self._line_number = line_number
self._multiline = multiline
def text(self):
"""Returns the comment's text.
Returns:
String
"""
return self._text
def line_number(self):
"""Returns the line number the comment was found on.
Returns:
Int
"""
return self._line_number
def is_multiline(self):
"""Returns whether this comment was a multiline comment.
Returns:
True if comment was a multiline comment, False if not.
"""
return self._multiline
def __str__(self):
return self._text
def __repr__(self):
return 'Comment(%s, %d, %s)' % (
self._text, self._line_number, self._multiline)
def __eq__(self, other):
if isinstance(other, self.__class__):
if self.__dict__ == other.__dict__:
return True
return False
|
Add __repr__ to Comment class.
|
comment_parser: Add __repr__ to Comment class.
|
Python
|
mit
|
jeanralphaviles/comment_parser
|
#!/usr/bin/python
"""This module provides constructs common to all comment parsers."""
class Error(Exception):
"""Base Error class for all comment parsers."""
pass
class FileError(Error):
"""Raised if there is an issue reading a given file."""
pass
class UnterminatedCommentError(Error):
"""Raised if an Unterminated multi-line comment is encountered."""
pass
class Comment(object):
"""Represents comments found in source files."""
def __init__(self, text, line_number, multiline=False):
"""Initializes Comment.
Args:
text: String text of comment.
line_number: Line number (int) comment was found on.
multiline: Boolean whether this comment was a multiline comment.
"""
self._text = text
self._line_number = line_number
self._multiline = multiline
def text(self):
"""Returns the comment's text.
Returns:
String
"""
return self._text
def line_number(self):
"""Returns the line number the comment was found on.
Returns:
Int
"""
return self._line_number
def is_multiline(self):
"""Returns whether this comment was a multiline comment.
Returns:
True if comment was a multiline comment, False if not.
"""
return self._multiline
def __str__(self):
return self._text
def __eq__(self, other):
if isinstance(other, self.__class__):
if self.__dict__ == other.__dict__:
return True
return False
comment_parser: Add __repr__ to Comment class.
|
#!/usr/bin/python
"""This module provides constructs common to all comment parsers."""
class Error(Exception):
"""Base Error class for all comment parsers."""
pass
class FileError(Error):
"""Raised if there is an issue reading a given file."""
pass
class UnterminatedCommentError(Error):
"""Raised if an Unterminated multi-line comment is encountered."""
pass
class Comment(object):
"""Represents comments found in source files."""
def __init__(self, text, line_number, multiline=False):
"""Initializes Comment.
Args:
text: String text of comment.
line_number: Line number (int) comment was found on.
multiline: Boolean whether this comment was a multiline comment.
"""
self._text = text
self._line_number = line_number
self._multiline = multiline
def text(self):
"""Returns the comment's text.
Returns:
String
"""
return self._text
def line_number(self):
"""Returns the line number the comment was found on.
Returns:
Int
"""
return self._line_number
def is_multiline(self):
"""Returns whether this comment was a multiline comment.
Returns:
True if comment was a multiline comment, False if not.
"""
return self._multiline
def __str__(self):
return self._text
def __repr__(self):
return 'Comment(%s, %d, %s)' % (
self._text, self._line_number, self._multiline)
def __eq__(self, other):
if isinstance(other, self.__class__):
if self.__dict__ == other.__dict__:
return True
return False
|
<commit_before>#!/usr/bin/python
"""This module provides constructs common to all comment parsers."""
class Error(Exception):
"""Base Error class for all comment parsers."""
pass
class FileError(Error):
"""Raised if there is an issue reading a given file."""
pass
class UnterminatedCommentError(Error):
"""Raised if an Unterminated multi-line comment is encountered."""
pass
class Comment(object):
"""Represents comments found in source files."""
def __init__(self, text, line_number, multiline=False):
"""Initializes Comment.
Args:
text: String text of comment.
line_number: Line number (int) comment was found on.
multiline: Boolean whether this comment was a multiline comment.
"""
self._text = text
self._line_number = line_number
self._multiline = multiline
def text(self):
"""Returns the comment's text.
Returns:
String
"""
return self._text
def line_number(self):
"""Returns the line number the comment was found on.
Returns:
Int
"""
return self._line_number
def is_multiline(self):
"""Returns whether this comment was a multiline comment.
Returns:
True if comment was a multiline comment, False if not.
"""
return self._multiline
def __str__(self):
return self._text
def __eq__(self, other):
if isinstance(other, self.__class__):
if self.__dict__ == other.__dict__:
return True
return False
<commit_msg>comment_parser: Add __repr__ to Comment class.<commit_after>
|
#!/usr/bin/python
"""This module provides constructs common to all comment parsers."""
class Error(Exception):
"""Base Error class for all comment parsers."""
pass
class FileError(Error):
"""Raised if there is an issue reading a given file."""
pass
class UnterminatedCommentError(Error):
"""Raised if an Unterminated multi-line comment is encountered."""
pass
class Comment(object):
"""Represents comments found in source files."""
def __init__(self, text, line_number, multiline=False):
"""Initializes Comment.
Args:
text: String text of comment.
line_number: Line number (int) comment was found on.
multiline: Boolean whether this comment was a multiline comment.
"""
self._text = text
self._line_number = line_number
self._multiline = multiline
def text(self):
"""Returns the comment's text.
Returns:
String
"""
return self._text
def line_number(self):
"""Returns the line number the comment was found on.
Returns:
Int
"""
return self._line_number
def is_multiline(self):
"""Returns whether this comment was a multiline comment.
Returns:
True if comment was a multiline comment, False if not.
"""
return self._multiline
def __str__(self):
return self._text
def __repr__(self):
return 'Comment(%s, %d, %s)' % (
self._text, self._line_number, self._multiline)
def __eq__(self, other):
if isinstance(other, self.__class__):
if self.__dict__ == other.__dict__:
return True
return False
|
#!/usr/bin/python
"""This module provides constructs common to all comment parsers."""
class Error(Exception):
"""Base Error class for all comment parsers."""
pass
class FileError(Error):
"""Raised if there is an issue reading a given file."""
pass
class UnterminatedCommentError(Error):
"""Raised if an Unterminated multi-line comment is encountered."""
pass
class Comment(object):
"""Represents comments found in source files."""
def __init__(self, text, line_number, multiline=False):
"""Initializes Comment.
Args:
text: String text of comment.
line_number: Line number (int) comment was found on.
multiline: Boolean whether this comment was a multiline comment.
"""
self._text = text
self._line_number = line_number
self._multiline = multiline
def text(self):
"""Returns the comment's text.
Returns:
String
"""
return self._text
def line_number(self):
"""Returns the line number the comment was found on.
Returns:
Int
"""
return self._line_number
def is_multiline(self):
"""Returns whether this comment was a multiline comment.
Returns:
True if comment was a multiline comment, False if not.
"""
return self._multiline
def __str__(self):
return self._text
def __eq__(self, other):
if isinstance(other, self.__class__):
if self.__dict__ == other.__dict__:
return True
return False
comment_parser: Add __repr__ to Comment class.#!/usr/bin/python
"""This module provides constructs common to all comment parsers."""
class Error(Exception):
"""Base Error class for all comment parsers."""
pass
class FileError(Error):
"""Raised if there is an issue reading a given file."""
pass
class UnterminatedCommentError(Error):
"""Raised if an Unterminated multi-line comment is encountered."""
pass
class Comment(object):
"""Represents comments found in source files."""
def __init__(self, text, line_number, multiline=False):
"""Initializes Comment.
Args:
text: String text of comment.
line_number: Line number (int) comment was found on.
multiline: Boolean whether this comment was a multiline comment.
"""
self._text = text
self._line_number = line_number
self._multiline = multiline
def text(self):
"""Returns the comment's text.
Returns:
String
"""
return self._text
def line_number(self):
"""Returns the line number the comment was found on.
Returns:
Int
"""
return self._line_number
def is_multiline(self):
"""Returns whether this comment was a multiline comment.
Returns:
True if comment was a multiline comment, False if not.
"""
return self._multiline
def __str__(self):
return self._text
def __repr__(self):
return 'Comment(%s, %d, %s)' % (
self._text, self._line_number, self._multiline)
def __eq__(self, other):
if isinstance(other, self.__class__):
if self.__dict__ == other.__dict__:
return True
return False
|
<commit_before>#!/usr/bin/python
"""This module provides constructs common to all comment parsers."""
class Error(Exception):
"""Base Error class for all comment parsers."""
pass
class FileError(Error):
"""Raised if there is an issue reading a given file."""
pass
class UnterminatedCommentError(Error):
"""Raised if an Unterminated multi-line comment is encountered."""
pass
class Comment(object):
"""Represents comments found in source files."""
def __init__(self, text, line_number, multiline=False):
"""Initializes Comment.
Args:
text: String text of comment.
line_number: Line number (int) comment was found on.
multiline: Boolean whether this comment was a multiline comment.
"""
self._text = text
self._line_number = line_number
self._multiline = multiline
def text(self):
"""Returns the comment's text.
Returns:
String
"""
return self._text
def line_number(self):
"""Returns the line number the comment was found on.
Returns:
Int
"""
return self._line_number
def is_multiline(self):
"""Returns whether this comment was a multiline comment.
Returns:
True if comment was a multiline comment, False if not.
"""
return self._multiline
def __str__(self):
return self._text
def __eq__(self, other):
if isinstance(other, self.__class__):
if self.__dict__ == other.__dict__:
return True
return False
<commit_msg>comment_parser: Add __repr__ to Comment class.<commit_after>#!/usr/bin/python
"""This module provides constructs common to all comment parsers."""
class Error(Exception):
"""Base Error class for all comment parsers."""
pass
class FileError(Error):
"""Raised if there is an issue reading a given file."""
pass
class UnterminatedCommentError(Error):
"""Raised if an Unterminated multi-line comment is encountered."""
pass
class Comment(object):
"""Represents comments found in source files."""
def __init__(self, text, line_number, multiline=False):
"""Initializes Comment.
Args:
text: String text of comment.
line_number: Line number (int) comment was found on.
multiline: Boolean whether this comment was a multiline comment.
"""
self._text = text
self._line_number = line_number
self._multiline = multiline
def text(self):
"""Returns the comment's text.
Returns:
String
"""
return self._text
def line_number(self):
"""Returns the line number the comment was found on.
Returns:
Int
"""
return self._line_number
def is_multiline(self):
"""Returns whether this comment was a multiline comment.
Returns:
True if comment was a multiline comment, False if not.
"""
return self._multiline
def __str__(self):
return self._text
def __repr__(self):
return 'Comment(%s, %d, %s)' % (
self._text, self._line_number, self._multiline)
def __eq__(self, other):
if isinstance(other, self.__class__):
if self.__dict__ == other.__dict__:
return True
return False
|
1290bc59774aac7756658c3480d6a5293c7a3467
|
planner/models.py
|
planner/models.py
|
from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
origin = models.CharField(max_length=63)
destination = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.origin,
self.destination
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
|
from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
start = models.CharField(max_length=63)
end = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.start,
self.end
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
|
Rename Route model's start and end fields to be consistent with front end identification
|
Rename Route model's start and end fields to be consistent with front end identification
|
Python
|
apache-2.0
|
jwarren116/RoadTrip,jwarren116/RoadTrip,jwarren116/RoadTrip
|
from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
origin = models.CharField(max_length=63)
destination = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.origin,
self.destination
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
Rename Route model's start and end fields to be consistent with front end identification
|
from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
start = models.CharField(max_length=63)
end = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.start,
self.end
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
|
<commit_before>from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
origin = models.CharField(max_length=63)
destination = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.origin,
self.destination
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
<commit_msg>Rename Route model's start and end fields to be consistent with front end identification<commit_after>
|
from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
start = models.CharField(max_length=63)
end = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.start,
self.end
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
|
from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
origin = models.CharField(max_length=63)
destination = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.origin,
self.destination
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
Rename Route model's start and end fields to be consistent with front end identificationfrom django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
start = models.CharField(max_length=63)
end = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.start,
self.end
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
|
<commit_before>from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
origin = models.CharField(max_length=63)
destination = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.origin,
self.destination
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
<commit_msg>Rename Route model's start and end fields to be consistent with front end identification<commit_after>from django.db import models
# Route model
# Start and end locations with additional stop-overs
class Route(models.Model):
start = models.CharField(max_length=63)
end = models.CharField(max_length=63)
def __unicode__(self):
return "{} to {}".format(
self.start,
self.end
)
class Waypoint(models.Model):
waypoint = models.CharField(max_length=63)
route = models.ForeignKey(Route, related_name="waypoints")
def __unicode__(self):
return str(self.waypoint)
def __repr__(self):
return str(self.waypoint)
# TripDetail model
# Additional trip details, such as traveling with children or pets
class TripDetail(models.Model):
description = models.CharField(max_length=127)
def __unicode__(self):
return str(self.description)
# RoadTrip model
# Start and end dates, Route and TripDetails
class RoadTrip(models.Model):
start_date = models.DateField()
end_date = models.DateField()
route = models.OneToOneField(Route)
details = models.ManyToManyField(TripDetail)
def __unicode__(self):
return "{} from {} to {}".format(
self.route,
self.start_date,
self.end_date
)
|
42402aa72fdaf3bd5430505a1ceb86631aea97b8
|
scripts/slave/chromium/dart_buildbot_run.py
|
scripts/slave/chromium/dart_buildbot_run.py
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Entry point for the dartium buildbots.
This script is called from buildbot and reports results using the buildbot
annotation scheme.
"""
import os
import sys
from common import chromium_utils
def main():
builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='')
is_release_bot = builder_name.startswith('release')
script = ''
if is_release_bot:
script = 'src/dartium_tools/buildbot_release_annotated_steps.py'
else:
script = 'src/dartium_tools/buildbot_annotated_steps.py'
return chromium_utils.RunCommand([sys.executable, script])
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Entry point for the dartium buildbots.
This script is called from buildbot and reports results using the buildbot
annotation scheme.
"""
import os
import sys
from common import chromium_utils
def main():
builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='')
script = 'src/dartium_tools/buildbot_annotated_steps.py'
chromium_utils.RunCommand([sys.executable, script])
# BIG HACK
# Normal ninja clobbering does not work due to symlinks/python on windows
# Full clobbering before building does not work since it will destroy
# the ninja build files
# So we basically clobber at the end here
if chromium_utils.IsWindows() and 'full' in builder_name:
chromium_utils.RemoveDirectory('src/out')
return 0
if __name__ == '__main__':
sys.exit(main())
|
Move hackish clobbering to the script that calls the dartium annotated steps
|
Move hackish clobbering to the script that calls the dartium annotated steps
Also clean it up, we don't have any builders that starts with release
I will remove this functionality from the dartium annotated step since it does not work correctly. This change allow us to use the normal chromium_utils function which we know work
TBR=whesse
Review URL: https://codereview.chromium.org/230683002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@262656 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
eunchong/build,eunchong/build,eunchong/build,eunchong/build
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Entry point for the dartium buildbots.
This script is called from buildbot and reports results using the buildbot
annotation scheme.
"""
import os
import sys
from common import chromium_utils
def main():
builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='')
is_release_bot = builder_name.startswith('release')
script = ''
if is_release_bot:
script = 'src/dartium_tools/buildbot_release_annotated_steps.py'
else:
script = 'src/dartium_tools/buildbot_annotated_steps.py'
return chromium_utils.RunCommand([sys.executable, script])
if __name__ == '__main__':
sys.exit(main())
Move hackish clobbering to the script that calls the dartium annotated steps
Also clean it up, we don't have any builders that starts with release
I will remove this functionality from the dartium annotated step since it does not work correctly. This change allow us to use the normal chromium_utils function which we know work
TBR=whesse
Review URL: https://codereview.chromium.org/230683002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@262656 0039d316-1c4b-4281-b951-d872f2087c98
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Entry point for the dartium buildbots.
This script is called from buildbot and reports results using the buildbot
annotation scheme.
"""
import os
import sys
from common import chromium_utils
def main():
builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='')
script = 'src/dartium_tools/buildbot_annotated_steps.py'
chromium_utils.RunCommand([sys.executable, script])
# BIG HACK
# Normal ninja clobbering does not work due to symlinks/python on windows
# Full clobbering before building does not work since it will destroy
# the ninja build files
# So we basically clobber at the end here
if chromium_utils.IsWindows() and 'full' in builder_name:
chromium_utils.RemoveDirectory('src/out')
return 0
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Entry point for the dartium buildbots.
This script is called from buildbot and reports results using the buildbot
annotation scheme.
"""
import os
import sys
from common import chromium_utils
def main():
builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='')
is_release_bot = builder_name.startswith('release')
script = ''
if is_release_bot:
script = 'src/dartium_tools/buildbot_release_annotated_steps.py'
else:
script = 'src/dartium_tools/buildbot_annotated_steps.py'
return chromium_utils.RunCommand([sys.executable, script])
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Move hackish clobbering to the script that calls the dartium annotated steps
Also clean it up, we don't have any builders that starts with release
I will remove this functionality from the dartium annotated step since it does not work correctly. This change allow us to use the normal chromium_utils function which we know work
TBR=whesse
Review URL: https://codereview.chromium.org/230683002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@262656 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Entry point for the dartium buildbots.
This script is called from buildbot and reports results using the buildbot
annotation scheme.
"""
import os
import sys
from common import chromium_utils
def main():
builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='')
script = 'src/dartium_tools/buildbot_annotated_steps.py'
chromium_utils.RunCommand([sys.executable, script])
# BIG HACK
# Normal ninja clobbering does not work due to symlinks/python on windows
# Full clobbering before building does not work since it will destroy
# the ninja build files
# So we basically clobber at the end here
if chromium_utils.IsWindows() and 'full' in builder_name:
chromium_utils.RemoveDirectory('src/out')
return 0
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Entry point for the dartium buildbots.
This script is called from buildbot and reports results using the buildbot
annotation scheme.
"""
import os
import sys
from common import chromium_utils
def main():
builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='')
is_release_bot = builder_name.startswith('release')
script = ''
if is_release_bot:
script = 'src/dartium_tools/buildbot_release_annotated_steps.py'
else:
script = 'src/dartium_tools/buildbot_annotated_steps.py'
return chromium_utils.RunCommand([sys.executable, script])
if __name__ == '__main__':
sys.exit(main())
Move hackish clobbering to the script that calls the dartium annotated steps
Also clean it up, we don't have any builders that starts with release
I will remove this functionality from the dartium annotated step since it does not work correctly. This change allow us to use the normal chromium_utils function which we know work
TBR=whesse
Review URL: https://codereview.chromium.org/230683002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@262656 0039d316-1c4b-4281-b951-d872f2087c98#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Entry point for the dartium buildbots.
This script is called from buildbot and reports results using the buildbot
annotation scheme.
"""
import os
import sys
from common import chromium_utils
def main():
builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='')
script = 'src/dartium_tools/buildbot_annotated_steps.py'
chromium_utils.RunCommand([sys.executable, script])
# BIG HACK
# Normal ninja clobbering does not work due to symlinks/python on windows
# Full clobbering before building does not work since it will destroy
# the ninja build files
# So we basically clobber at the end here
if chromium_utils.IsWindows() and 'full' in builder_name:
chromium_utils.RemoveDirectory('src/out')
return 0
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Entry point for the dartium buildbots.
This script is called from buildbot and reports results using the buildbot
annotation scheme.
"""
import os
import sys
from common import chromium_utils
def main():
builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='')
is_release_bot = builder_name.startswith('release')
script = ''
if is_release_bot:
script = 'src/dartium_tools/buildbot_release_annotated_steps.py'
else:
script = 'src/dartium_tools/buildbot_annotated_steps.py'
return chromium_utils.RunCommand([sys.executable, script])
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Move hackish clobbering to the script that calls the dartium annotated steps
Also clean it up, we don't have any builders that starts with release
I will remove this functionality from the dartium annotated step since it does not work correctly. This change allow us to use the normal chromium_utils function which we know work
TBR=whesse
Review URL: https://codereview.chromium.org/230683002
git-svn-id: 239fca9b83025a0b6f823aeeca02ba5be3d9fd76@262656 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>#!/usr/bin/env python
# Copyright (c) 2012 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Entry point for the dartium buildbots.
This script is called from buildbot and reports results using the buildbot
annotation scheme.
"""
import os
import sys
from common import chromium_utils
def main():
builder_name = os.getenv('BUILDBOT_BUILDERNAME', default='')
script = 'src/dartium_tools/buildbot_annotated_steps.py'
chromium_utils.RunCommand([sys.executable, script])
# BIG HACK
# Normal ninja clobbering does not work due to symlinks/python on windows
# Full clobbering before building does not work since it will destroy
# the ninja build files
# So we basically clobber at the end here
if chromium_utils.IsWindows() and 'full' in builder_name:
chromium_utils.RemoveDirectory('src/out')
return 0
if __name__ == '__main__':
sys.exit(main())
|
9d4647dca6f5e356f807d6885019d41a4b6d4847
|
skimage/measure/__init__.py
|
skimage/measure/__init__.py
|
from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
|
from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
__all__ = ['find_contours',
'regionprops',
'perimeter',
'structural_similarity',
'approximate_polygon',
'subdivide_polygon']
|
Add __all__ to measure package
|
Add __all__ to measure package
|
Python
|
bsd-3-clause
|
robintw/scikit-image,ClinicalGraphics/scikit-image,vighneshbirodkar/scikit-image,newville/scikit-image,bennlich/scikit-image,paalge/scikit-image,chriscrosscutler/scikit-image,michaelaye/scikit-image,rjeli/scikit-image,rjeli/scikit-image,keflavich/scikit-image,pratapvardhan/scikit-image,oew1v07/scikit-image,bennlich/scikit-image,michaelpacer/scikit-image,jwiggins/scikit-image,ofgulban/scikit-image,ajaybhat/scikit-image,warmspringwinds/scikit-image,SamHames/scikit-image,keflavich/scikit-image,paalge/scikit-image,blink1073/scikit-image,Britefury/scikit-image,GaZ3ll3/scikit-image,oew1v07/scikit-image,almarklein/scikit-image,ClinicalGraphics/scikit-image,robintw/scikit-image,newville/scikit-image,emon10005/scikit-image,youprofit/scikit-image,vighneshbirodkar/scikit-image,ofgulban/scikit-image,michaelaye/scikit-image,Hiyorimi/scikit-image,dpshelio/scikit-image,jwiggins/scikit-image,chintak/scikit-image,WarrenWeckesser/scikits-image,bsipocz/scikit-image,ofgulban/scikit-image,bsipocz/scikit-image,Hiyorimi/scikit-image,Midafi/scikit-image,ajaybhat/scikit-image,chintak/scikit-image,chriscrosscutler/scikit-image,emon10005/scikit-image,pratapvardhan/scikit-image,Midafi/scikit-image,SamHames/scikit-image,chintak/scikit-image,WarrenWeckesser/scikits-image,chintak/scikit-image,rjeli/scikit-image,paalge/scikit-image,almarklein/scikit-image,Britefury/scikit-image,juliusbierk/scikit-image,vighneshbirodkar/scikit-image,warmspringwinds/scikit-image,blink1073/scikit-image,almarklein/scikit-image,GaZ3ll3/scikit-image,juliusbierk/scikit-image,SamHames/scikit-image,almarklein/scikit-image,michaelpacer/scikit-image,dpshelio/scikit-image,SamHames/scikit-image,youprofit/scikit-image
|
from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygonAdd __all__ to measure package
|
from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
__all__ = ['find_contours',
'regionprops',
'perimeter',
'structural_similarity',
'approximate_polygon',
'subdivide_polygon']
|
<commit_before>from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon<commit_msg>Add __all__ to measure package<commit_after>
|
from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
__all__ = ['find_contours',
'regionprops',
'perimeter',
'structural_similarity',
'approximate_polygon',
'subdivide_polygon']
|
from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygonAdd __all__ to measure packagefrom .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
__all__ = ['find_contours',
'regionprops',
'perimeter',
'structural_similarity',
'approximate_polygon',
'subdivide_polygon']
|
<commit_before>from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon<commit_msg>Add __all__ to measure package<commit_after>from .find_contours import find_contours
from ._regionprops import regionprops, perimeter
from ._structural_similarity import structural_similarity
from ._polygon import approximate_polygon, subdivide_polygon
__all__ = ['find_contours',
'regionprops',
'perimeter',
'structural_similarity',
'approximate_polygon',
'subdivide_polygon']
|
42d0edb5fcd71634dccf030cf3daa54e606de0f8
|
pombola/south_africa/urls.py
|
pombola/south_africa/urls.py
|
from django.conf.urls import patterns, include, url
from pombola.south_africa.views import LatLonDetailView, SAPlaceDetailSub, SAOrganisationDetailView
from pombola.core.urls import organisation_patterns
# Override the organisation url so we can vary it depending on the organisation type.
for index, pattern in enumerate(organisation_patterns):
if pattern.name == 'organisation':
organisation_patterns[index] = url(r'^(?P<slug>[-\w]+)/$', SAOrganisationDetailView.as_view(), name='organisation')
urlpatterns = patterns('pombola.south_africa.views',
url(r'^place/latlon/(?P<lat>[0-9\.-]+),(?P<lon>[0-9\.-]+)/', LatLonDetailView.as_view(), name='latlon'),
url(r'^place/(?P<slug>[-\w]+)/places/', SAPlaceDetailSub.as_view(), {'sub_page': 'places'}, name='place_places'),
)
|
from django.conf.urls import patterns, include, url
from pombola.south_africa.views import LatLonDetailView, SAPlaceDetailSub, SAOrganisationDetailView
from pombola.core.urls import organisation_patterns
# Override the organisation url so we can vary it depending on the organisation type.
for index, pattern in enumerate(organisation_patterns):
if pattern.name == 'organisation':
organisation_patterns[index] = url(r'^(?P<slug>[-\w+]+)/$', SAOrganisationDetailView.as_view(), name='organisation')
urlpatterns = patterns('pombola.south_africa.views',
url(r'^place/latlon/(?P<lat>[0-9\.-]+),(?P<lon>[0-9\.-]+)/', LatLonDetailView.as_view(), name='latlon'),
url(r'^place/(?P<slug>[-\w]+)/places/', SAPlaceDetailSub.as_view(), {'sub_page': 'places'}, name='place_places'),
)
|
Handle a '+' in organisation slugs
|
Handle a '+' in organisation slugs
|
Python
|
agpl-3.0
|
hzj123/56th,patricmutwiri/pombola,ken-muturi/pombola,ken-muturi/pombola,ken-muturi/pombola,patricmutwiri/pombola,hzj123/56th,patricmutwiri/pombola,geoffkilpin/pombola,mysociety/pombola,mysociety/pombola,mysociety/pombola,patricmutwiri/pombola,geoffkilpin/pombola,geoffkilpin/pombola,hzj123/56th,mysociety/pombola,patricmutwiri/pombola,patricmutwiri/pombola,hzj123/56th,hzj123/56th,geoffkilpin/pombola,mysociety/pombola,ken-muturi/pombola,ken-muturi/pombola,hzj123/56th,mysociety/pombola,geoffkilpin/pombola,geoffkilpin/pombola,ken-muturi/pombola
|
from django.conf.urls import patterns, include, url
from pombola.south_africa.views import LatLonDetailView, SAPlaceDetailSub, SAOrganisationDetailView
from pombola.core.urls import organisation_patterns
# Override the organisation url so we can vary it depending on the organisation type.
for index, pattern in enumerate(organisation_patterns):
if pattern.name == 'organisation':
organisation_patterns[index] = url(r'^(?P<slug>[-\w]+)/$', SAOrganisationDetailView.as_view(), name='organisation')
urlpatterns = patterns('pombola.south_africa.views',
url(r'^place/latlon/(?P<lat>[0-9\.-]+),(?P<lon>[0-9\.-]+)/', LatLonDetailView.as_view(), name='latlon'),
url(r'^place/(?P<slug>[-\w]+)/places/', SAPlaceDetailSub.as_view(), {'sub_page': 'places'}, name='place_places'),
)
Handle a '+' in organisation slugs
|
from django.conf.urls import patterns, include, url
from pombola.south_africa.views import LatLonDetailView, SAPlaceDetailSub, SAOrganisationDetailView
from pombola.core.urls import organisation_patterns
# Override the organisation url so we can vary it depending on the organisation type.
for index, pattern in enumerate(organisation_patterns):
if pattern.name == 'organisation':
organisation_patterns[index] = url(r'^(?P<slug>[-\w+]+)/$', SAOrganisationDetailView.as_view(), name='organisation')
urlpatterns = patterns('pombola.south_africa.views',
url(r'^place/latlon/(?P<lat>[0-9\.-]+),(?P<lon>[0-9\.-]+)/', LatLonDetailView.as_view(), name='latlon'),
url(r'^place/(?P<slug>[-\w]+)/places/', SAPlaceDetailSub.as_view(), {'sub_page': 'places'}, name='place_places'),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from pombola.south_africa.views import LatLonDetailView, SAPlaceDetailSub, SAOrganisationDetailView
from pombola.core.urls import organisation_patterns
# Override the organisation url so we can vary it depending on the organisation type.
for index, pattern in enumerate(organisation_patterns):
if pattern.name == 'organisation':
organisation_patterns[index] = url(r'^(?P<slug>[-\w]+)/$', SAOrganisationDetailView.as_view(), name='organisation')
urlpatterns = patterns('pombola.south_africa.views',
url(r'^place/latlon/(?P<lat>[0-9\.-]+),(?P<lon>[0-9\.-]+)/', LatLonDetailView.as_view(), name='latlon'),
url(r'^place/(?P<slug>[-\w]+)/places/', SAPlaceDetailSub.as_view(), {'sub_page': 'places'}, name='place_places'),
)
<commit_msg>Handle a '+' in organisation slugs<commit_after>
|
from django.conf.urls import patterns, include, url
from pombola.south_africa.views import LatLonDetailView, SAPlaceDetailSub, SAOrganisationDetailView
from pombola.core.urls import organisation_patterns
# Override the organisation url so we can vary it depending on the organisation type.
for index, pattern in enumerate(organisation_patterns):
if pattern.name == 'organisation':
organisation_patterns[index] = url(r'^(?P<slug>[-\w+]+)/$', SAOrganisationDetailView.as_view(), name='organisation')
urlpatterns = patterns('pombola.south_africa.views',
url(r'^place/latlon/(?P<lat>[0-9\.-]+),(?P<lon>[0-9\.-]+)/', LatLonDetailView.as_view(), name='latlon'),
url(r'^place/(?P<slug>[-\w]+)/places/', SAPlaceDetailSub.as_view(), {'sub_page': 'places'}, name='place_places'),
)
|
from django.conf.urls import patterns, include, url
from pombola.south_africa.views import LatLonDetailView, SAPlaceDetailSub, SAOrganisationDetailView
from pombola.core.urls import organisation_patterns
# Override the organisation url so we can vary it depending on the organisation type.
for index, pattern in enumerate(organisation_patterns):
if pattern.name == 'organisation':
organisation_patterns[index] = url(r'^(?P<slug>[-\w]+)/$', SAOrganisationDetailView.as_view(), name='organisation')
urlpatterns = patterns('pombola.south_africa.views',
url(r'^place/latlon/(?P<lat>[0-9\.-]+),(?P<lon>[0-9\.-]+)/', LatLonDetailView.as_view(), name='latlon'),
url(r'^place/(?P<slug>[-\w]+)/places/', SAPlaceDetailSub.as_view(), {'sub_page': 'places'}, name='place_places'),
)
Handle a '+' in organisation slugsfrom django.conf.urls import patterns, include, url
from pombola.south_africa.views import LatLonDetailView, SAPlaceDetailSub, SAOrganisationDetailView
from pombola.core.urls import organisation_patterns
# Override the organisation url so we can vary it depending on the organisation type.
for index, pattern in enumerate(organisation_patterns):
if pattern.name == 'organisation':
organisation_patterns[index] = url(r'^(?P<slug>[-\w+]+)/$', SAOrganisationDetailView.as_view(), name='organisation')
urlpatterns = patterns('pombola.south_africa.views',
url(r'^place/latlon/(?P<lat>[0-9\.-]+),(?P<lon>[0-9\.-]+)/', LatLonDetailView.as_view(), name='latlon'),
url(r'^place/(?P<slug>[-\w]+)/places/', SAPlaceDetailSub.as_view(), {'sub_page': 'places'}, name='place_places'),
)
|
<commit_before>from django.conf.urls import patterns, include, url
from pombola.south_africa.views import LatLonDetailView, SAPlaceDetailSub, SAOrganisationDetailView
from pombola.core.urls import organisation_patterns
# Override the organisation url so we can vary it depending on the organisation type.
for index, pattern in enumerate(organisation_patterns):
if pattern.name == 'organisation':
organisation_patterns[index] = url(r'^(?P<slug>[-\w]+)/$', SAOrganisationDetailView.as_view(), name='organisation')
urlpatterns = patterns('pombola.south_africa.views',
url(r'^place/latlon/(?P<lat>[0-9\.-]+),(?P<lon>[0-9\.-]+)/', LatLonDetailView.as_view(), name='latlon'),
url(r'^place/(?P<slug>[-\w]+)/places/', SAPlaceDetailSub.as_view(), {'sub_page': 'places'}, name='place_places'),
)
<commit_msg>Handle a '+' in organisation slugs<commit_after>from django.conf.urls import patterns, include, url
from pombola.south_africa.views import LatLonDetailView, SAPlaceDetailSub, SAOrganisationDetailView
from pombola.core.urls import organisation_patterns
# Override the organisation url so we can vary it depending on the organisation type.
for index, pattern in enumerate(organisation_patterns):
if pattern.name == 'organisation':
organisation_patterns[index] = url(r'^(?P<slug>[-\w+]+)/$', SAOrganisationDetailView.as_view(), name='organisation')
urlpatterns = patterns('pombola.south_africa.views',
url(r'^place/latlon/(?P<lat>[0-9\.-]+),(?P<lon>[0-9\.-]+)/', LatLonDetailView.as_view(), name='latlon'),
url(r'^place/(?P<slug>[-\w]+)/places/', SAPlaceDetailSub.as_view(), {'sub_page': 'places'}, name='place_places'),
)
|
26bb10e96072fd901cb13b326f525bdcd7045337
|
byceps/blueprints/news_admin/forms.py
|
byceps/blueprints/news_admin/forms.py
|
"""
byceps.blueprints.news_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import re
from wtforms import StringField, TextAreaField
from wtforms.validators import InputRequired, Length, Optional, Regexp
from ...util.l10n import LocalizedForm
SLUG_REGEX = re.compile('^[a-z0-9-]+$')
class ChannelCreateForm(LocalizedForm):
channel_id = StringField('ID', validators=[Length(min=1, max=40)])
url_prefix = StringField('URL-Präfix', [InputRequired(), Length(max=80)])
class ItemCreateForm(LocalizedForm):
slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')])
title = StringField('Titel', [InputRequired(), Length(max=80)])
body = TextAreaField('Text', [InputRequired()])
image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)])
class ItemUpdateForm(ItemCreateForm):
pass
|
"""
byceps.blueprints.news_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import re
from wtforms import StringField, TextAreaField
from wtforms.validators import InputRequired, Length, Optional, Regexp
from ...util.l10n import LocalizedForm
SLUG_REGEX = re.compile('^[a-z0-9-]+$')
class ChannelCreateForm(LocalizedForm):
channel_id = StringField('ID', validators=[Length(min=1, max=40)])
url_prefix = StringField('URL-Präfix', [InputRequired(), Length(max=80)])
class ItemCreateForm(LocalizedForm):
slug = StringField('Slug', [InputRequired(), Length(max=100), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')])
title = StringField('Titel', [InputRequired(), Length(max=100)])
body = TextAreaField('Text', [InputRequired()])
image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=100)])
class ItemUpdateForm(ItemCreateForm):
pass
|
Increase form length limits for news item's slug, title, and image URL path
|
Increase form length limits for news item's slug, title, and image URL path
|
Python
|
bsd-3-clause
|
homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps
|
"""
byceps.blueprints.news_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import re
from wtforms import StringField, TextAreaField
from wtforms.validators import InputRequired, Length, Optional, Regexp
from ...util.l10n import LocalizedForm
SLUG_REGEX = re.compile('^[a-z0-9-]+$')
class ChannelCreateForm(LocalizedForm):
channel_id = StringField('ID', validators=[Length(min=1, max=40)])
url_prefix = StringField('URL-Präfix', [InputRequired(), Length(max=80)])
class ItemCreateForm(LocalizedForm):
slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')])
title = StringField('Titel', [InputRequired(), Length(max=80)])
body = TextAreaField('Text', [InputRequired()])
image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)])
class ItemUpdateForm(ItemCreateForm):
pass
Increase form length limits for news item's slug, title, and image URL path
|
"""
byceps.blueprints.news_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import re
from wtforms import StringField, TextAreaField
from wtforms.validators import InputRequired, Length, Optional, Regexp
from ...util.l10n import LocalizedForm
SLUG_REGEX = re.compile('^[a-z0-9-]+$')
class ChannelCreateForm(LocalizedForm):
channel_id = StringField('ID', validators=[Length(min=1, max=40)])
url_prefix = StringField('URL-Präfix', [InputRequired(), Length(max=80)])
class ItemCreateForm(LocalizedForm):
slug = StringField('Slug', [InputRequired(), Length(max=100), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')])
title = StringField('Titel', [InputRequired(), Length(max=100)])
body = TextAreaField('Text', [InputRequired()])
image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=100)])
class ItemUpdateForm(ItemCreateForm):
pass
|
<commit_before>"""
byceps.blueprints.news_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import re
from wtforms import StringField, TextAreaField
from wtforms.validators import InputRequired, Length, Optional, Regexp
from ...util.l10n import LocalizedForm
SLUG_REGEX = re.compile('^[a-z0-9-]+$')
class ChannelCreateForm(LocalizedForm):
channel_id = StringField('ID', validators=[Length(min=1, max=40)])
url_prefix = StringField('URL-Präfix', [InputRequired(), Length(max=80)])
class ItemCreateForm(LocalizedForm):
slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')])
title = StringField('Titel', [InputRequired(), Length(max=80)])
body = TextAreaField('Text', [InputRequired()])
image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)])
class ItemUpdateForm(ItemCreateForm):
pass
<commit_msg>Increase form length limits for news item's slug, title, and image URL path<commit_after>
|
"""
byceps.blueprints.news_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import re
from wtforms import StringField, TextAreaField
from wtforms.validators import InputRequired, Length, Optional, Regexp
from ...util.l10n import LocalizedForm
SLUG_REGEX = re.compile('^[a-z0-9-]+$')
class ChannelCreateForm(LocalizedForm):
channel_id = StringField('ID', validators=[Length(min=1, max=40)])
url_prefix = StringField('URL-Präfix', [InputRequired(), Length(max=80)])
class ItemCreateForm(LocalizedForm):
slug = StringField('Slug', [InputRequired(), Length(max=100), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')])
title = StringField('Titel', [InputRequired(), Length(max=100)])
body = TextAreaField('Text', [InputRequired()])
image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=100)])
class ItemUpdateForm(ItemCreateForm):
pass
|
"""
byceps.blueprints.news_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import re
from wtforms import StringField, TextAreaField
from wtforms.validators import InputRequired, Length, Optional, Regexp
from ...util.l10n import LocalizedForm
SLUG_REGEX = re.compile('^[a-z0-9-]+$')
class ChannelCreateForm(LocalizedForm):
channel_id = StringField('ID', validators=[Length(min=1, max=40)])
url_prefix = StringField('URL-Präfix', [InputRequired(), Length(max=80)])
class ItemCreateForm(LocalizedForm):
slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')])
title = StringField('Titel', [InputRequired(), Length(max=80)])
body = TextAreaField('Text', [InputRequired()])
image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)])
class ItemUpdateForm(ItemCreateForm):
pass
Increase form length limits for news item's slug, title, and image URL path"""
byceps.blueprints.news_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import re
from wtforms import StringField, TextAreaField
from wtforms.validators import InputRequired, Length, Optional, Regexp
from ...util.l10n import LocalizedForm
SLUG_REGEX = re.compile('^[a-z0-9-]+$')
class ChannelCreateForm(LocalizedForm):
channel_id = StringField('ID', validators=[Length(min=1, max=40)])
url_prefix = StringField('URL-Präfix', [InputRequired(), Length(max=80)])
class ItemCreateForm(LocalizedForm):
slug = StringField('Slug', [InputRequired(), Length(max=100), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')])
title = StringField('Titel', [InputRequired(), Length(max=100)])
body = TextAreaField('Text', [InputRequired()])
image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=100)])
class ItemUpdateForm(ItemCreateForm):
pass
|
<commit_before>"""
byceps.blueprints.news_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import re
from wtforms import StringField, TextAreaField
from wtforms.validators import InputRequired, Length, Optional, Regexp
from ...util.l10n import LocalizedForm
SLUG_REGEX = re.compile('^[a-z0-9-]+$')
class ChannelCreateForm(LocalizedForm):
channel_id = StringField('ID', validators=[Length(min=1, max=40)])
url_prefix = StringField('URL-Präfix', [InputRequired(), Length(max=80)])
class ItemCreateForm(LocalizedForm):
slug = StringField('Slug', [InputRequired(), Length(max=80), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')])
title = StringField('Titel', [InputRequired(), Length(max=80)])
body = TextAreaField('Text', [InputRequired()])
image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=80)])
class ItemUpdateForm(ItemCreateForm):
pass
<commit_msg>Increase form length limits for news item's slug, title, and image URL path<commit_after>"""
byceps.blueprints.news_admin.forms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
import re
from wtforms import StringField, TextAreaField
from wtforms.validators import InputRequired, Length, Optional, Regexp
from ...util.l10n import LocalizedForm
SLUG_REGEX = re.compile('^[a-z0-9-]+$')
class ChannelCreateForm(LocalizedForm):
channel_id = StringField('ID', validators=[Length(min=1, max=40)])
url_prefix = StringField('URL-Präfix', [InputRequired(), Length(max=80)])
class ItemCreateForm(LocalizedForm):
slug = StringField('Slug', [InputRequired(), Length(max=100), Regexp(SLUG_REGEX, message='Nur Kleinbuchstaben, Ziffern und Bindestrich sind erlaubt.')])
title = StringField('Titel', [InputRequired(), Length(max=100)])
body = TextAreaField('Text', [InputRequired()])
image_url_path = StringField('Bild-URL-Pfad', [Optional(), Length(max=100)])
class ItemUpdateForm(ItemCreateForm):
pass
|
2dfd70a064162fe9a1392e5870dd45dac001bca4
|
varify/conf/settings.py
|
varify/conf/settings.py
|
import os
from global_settings import *
try:
from local_settings import *
except ImportError:
import warnings
warnings.warn('Local settings have not been found (src.conf.local_settings)')
# FORCE_SCRIPT_NAME overrides the interpreted 'SCRIPT_NAME' provided by the
# web server. since the URLs below are used for various purposes outside of
# the WSGI application (static and media files), these need to be updated to
# reflect this alteration
if FORCE_SCRIPT_NAME:
ADMIN_MEDIA_PREFIX = os.path.join(FORCE_SCRIPT_NAME, ADMIN_MEDIA_PREFIX[1:])
STATIC_URL = os.path.join(FORCE_SCRIPT_NAME, STATIC_URL[1:])
MEDIA_URL = os.path.join(FORCE_SCRIPT_NAME, MEDIA_URL[1:])
LOGIN_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_URL[1:])
LOGOUT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGOUT_URL[1:])
LOGIN_REDIRECT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_REDIRECT_URL[1:])
|
import os
from global_settings import *
try:
from local_settings import *
except ImportError:
import warnings
warnings.warn('Local settings have not been found (varify.conf.local_settings)')
# FORCE_SCRIPT_NAME overrides the interpreted 'SCRIPT_NAME' provided by the
# web server. since the URLs below are used for various purposes outside of
# the WSGI application (static and media files), these need to be updated to
# reflect this alteration
if FORCE_SCRIPT_NAME:
ADMIN_MEDIA_PREFIX = os.path.join(FORCE_SCRIPT_NAME, ADMIN_MEDIA_PREFIX[1:])
STATIC_URL = os.path.join(FORCE_SCRIPT_NAME, STATIC_URL[1:])
MEDIA_URL = os.path.join(FORCE_SCRIPT_NAME, MEDIA_URL[1:])
LOGIN_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_URL[1:])
LOGOUT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGOUT_URL[1:])
LOGIN_REDIRECT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_REDIRECT_URL[1:])
|
Fix warning message to use 'varify' package instead of 'src'
|
Fix warning message to use 'varify' package instead of 'src'
|
Python
|
bsd-2-clause
|
chop-dbhi/varify,chop-dbhi/varify,chop-dbhi/varify,chop-dbhi/varify
|
import os
from global_settings import *
try:
from local_settings import *
except ImportError:
import warnings
warnings.warn('Local settings have not been found (src.conf.local_settings)')
# FORCE_SCRIPT_NAME overrides the interpreted 'SCRIPT_NAME' provided by the
# web server. since the URLs below are used for various purposes outside of
# the WSGI application (static and media files), these need to be updated to
# reflect this alteration
if FORCE_SCRIPT_NAME:
ADMIN_MEDIA_PREFIX = os.path.join(FORCE_SCRIPT_NAME, ADMIN_MEDIA_PREFIX[1:])
STATIC_URL = os.path.join(FORCE_SCRIPT_NAME, STATIC_URL[1:])
MEDIA_URL = os.path.join(FORCE_SCRIPT_NAME, MEDIA_URL[1:])
LOGIN_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_URL[1:])
LOGOUT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGOUT_URL[1:])
LOGIN_REDIRECT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_REDIRECT_URL[1:])
Fix warning message to use 'varify' package instead of 'src'
|
import os
from global_settings import *
try:
from local_settings import *
except ImportError:
import warnings
warnings.warn('Local settings have not been found (varify.conf.local_settings)')
# FORCE_SCRIPT_NAME overrides the interpreted 'SCRIPT_NAME' provided by the
# web server. since the URLs below are used for various purposes outside of
# the WSGI application (static and media files), these need to be updated to
# reflect this alteration
if FORCE_SCRIPT_NAME:
ADMIN_MEDIA_PREFIX = os.path.join(FORCE_SCRIPT_NAME, ADMIN_MEDIA_PREFIX[1:])
STATIC_URL = os.path.join(FORCE_SCRIPT_NAME, STATIC_URL[1:])
MEDIA_URL = os.path.join(FORCE_SCRIPT_NAME, MEDIA_URL[1:])
LOGIN_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_URL[1:])
LOGOUT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGOUT_URL[1:])
LOGIN_REDIRECT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_REDIRECT_URL[1:])
|
<commit_before>import os
from global_settings import *
try:
from local_settings import *
except ImportError:
import warnings
warnings.warn('Local settings have not been found (src.conf.local_settings)')
# FORCE_SCRIPT_NAME overrides the interpreted 'SCRIPT_NAME' provided by the
# web server. since the URLs below are used for various purposes outside of
# the WSGI application (static and media files), these need to be updated to
# reflect this alteration
if FORCE_SCRIPT_NAME:
ADMIN_MEDIA_PREFIX = os.path.join(FORCE_SCRIPT_NAME, ADMIN_MEDIA_PREFIX[1:])
STATIC_URL = os.path.join(FORCE_SCRIPT_NAME, STATIC_URL[1:])
MEDIA_URL = os.path.join(FORCE_SCRIPT_NAME, MEDIA_URL[1:])
LOGIN_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_URL[1:])
LOGOUT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGOUT_URL[1:])
LOGIN_REDIRECT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_REDIRECT_URL[1:])
<commit_msg>Fix warning message to use 'varify' package instead of 'src'<commit_after>
|
import os
from global_settings import *
try:
from local_settings import *
except ImportError:
import warnings
warnings.warn('Local settings have not been found (varify.conf.local_settings)')
# FORCE_SCRIPT_NAME overrides the interpreted 'SCRIPT_NAME' provided by the
# web server. since the URLs below are used for various purposes outside of
# the WSGI application (static and media files), these need to be updated to
# reflect this alteration
if FORCE_SCRIPT_NAME:
ADMIN_MEDIA_PREFIX = os.path.join(FORCE_SCRIPT_NAME, ADMIN_MEDIA_PREFIX[1:])
STATIC_URL = os.path.join(FORCE_SCRIPT_NAME, STATIC_URL[1:])
MEDIA_URL = os.path.join(FORCE_SCRIPT_NAME, MEDIA_URL[1:])
LOGIN_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_URL[1:])
LOGOUT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGOUT_URL[1:])
LOGIN_REDIRECT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_REDIRECT_URL[1:])
|
import os
from global_settings import *
try:
from local_settings import *
except ImportError:
import warnings
warnings.warn('Local settings have not been found (src.conf.local_settings)')
# FORCE_SCRIPT_NAME overrides the interpreted 'SCRIPT_NAME' provided by the
# web server. since the URLs below are used for various purposes outside of
# the WSGI application (static and media files), these need to be updated to
# reflect this alteration
if FORCE_SCRIPT_NAME:
ADMIN_MEDIA_PREFIX = os.path.join(FORCE_SCRIPT_NAME, ADMIN_MEDIA_PREFIX[1:])
STATIC_URL = os.path.join(FORCE_SCRIPT_NAME, STATIC_URL[1:])
MEDIA_URL = os.path.join(FORCE_SCRIPT_NAME, MEDIA_URL[1:])
LOGIN_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_URL[1:])
LOGOUT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGOUT_URL[1:])
LOGIN_REDIRECT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_REDIRECT_URL[1:])
Fix warning message to use 'varify' package instead of 'src'import os
from global_settings import *
try:
from local_settings import *
except ImportError:
import warnings
warnings.warn('Local settings have not been found (varify.conf.local_settings)')
# FORCE_SCRIPT_NAME overrides the interpreted 'SCRIPT_NAME' provided by the
# web server. since the URLs below are used for various purposes outside of
# the WSGI application (static and media files), these need to be updated to
# reflect this alteration
if FORCE_SCRIPT_NAME:
ADMIN_MEDIA_PREFIX = os.path.join(FORCE_SCRIPT_NAME, ADMIN_MEDIA_PREFIX[1:])
STATIC_URL = os.path.join(FORCE_SCRIPT_NAME, STATIC_URL[1:])
MEDIA_URL = os.path.join(FORCE_SCRIPT_NAME, MEDIA_URL[1:])
LOGIN_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_URL[1:])
LOGOUT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGOUT_URL[1:])
LOGIN_REDIRECT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_REDIRECT_URL[1:])
|
<commit_before>import os
from global_settings import *
try:
from local_settings import *
except ImportError:
import warnings
warnings.warn('Local settings have not been found (src.conf.local_settings)')
# FORCE_SCRIPT_NAME overrides the interpreted 'SCRIPT_NAME' provided by the
# web server. since the URLs below are used for various purposes outside of
# the WSGI application (static and media files), these need to be updated to
# reflect this alteration
if FORCE_SCRIPT_NAME:
ADMIN_MEDIA_PREFIX = os.path.join(FORCE_SCRIPT_NAME, ADMIN_MEDIA_PREFIX[1:])
STATIC_URL = os.path.join(FORCE_SCRIPT_NAME, STATIC_URL[1:])
MEDIA_URL = os.path.join(FORCE_SCRIPT_NAME, MEDIA_URL[1:])
LOGIN_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_URL[1:])
LOGOUT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGOUT_URL[1:])
LOGIN_REDIRECT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_REDIRECT_URL[1:])
<commit_msg>Fix warning message to use 'varify' package instead of 'src'<commit_after>import os
from global_settings import *
try:
from local_settings import *
except ImportError:
import warnings
warnings.warn('Local settings have not been found (varify.conf.local_settings)')
# FORCE_SCRIPT_NAME overrides the interpreted 'SCRIPT_NAME' provided by the
# web server. since the URLs below are used for various purposes outside of
# the WSGI application (static and media files), these need to be updated to
# reflect this alteration
if FORCE_SCRIPT_NAME:
ADMIN_MEDIA_PREFIX = os.path.join(FORCE_SCRIPT_NAME, ADMIN_MEDIA_PREFIX[1:])
STATIC_URL = os.path.join(FORCE_SCRIPT_NAME, STATIC_URL[1:])
MEDIA_URL = os.path.join(FORCE_SCRIPT_NAME, MEDIA_URL[1:])
LOGIN_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_URL[1:])
LOGOUT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGOUT_URL[1:])
LOGIN_REDIRECT_URL = os.path.join(FORCE_SCRIPT_NAME, LOGIN_REDIRECT_URL[1:])
|
1aa75af659daac62fdef423beac16aef1f057afb
|
test/testCore.py
|
test/testCore.py
|
import pyfits
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
|
import pyfits
import numpy as np
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
def test_byteswap():
p = pyfits.PrimaryHDU()
l = pyfits.HDUList()
n = np.zeros(3, dtype='i2')
n[0] = 1
n[1] = 60000
n[2] = 2
c = pyfits.Column(name='foo', format='i2', bscale=1, bzero=32768, array=n)
t = pyfits.new_table([c])
l.append(p)
l.append(t)
l.writeto('test.fits', clobber=True)
p = pyfits.open('test.fits')
assert p[1].data[1]['foo'] == 60000.0
|
Add test for byteswapping bug resolved in r514.
|
Add test for byteswapping bug resolved in r514.
git-svn-id: 5305e2c1a78737cf7dd5f8f44e9bbbd00348fde7@543 ed100bfc-0583-0410-97f2-c26b58777a21
|
Python
|
bsd-3-clause
|
embray/PyFITS,spacetelescope/PyFITS,embray/PyFITS,embray/PyFITS,spacetelescope/PyFITS,embray/PyFITS
|
import pyfits
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
Add test for byteswapping bug resolved in r514.
git-svn-id: 5305e2c1a78737cf7dd5f8f44e9bbbd00348fde7@543 ed100bfc-0583-0410-97f2-c26b58777a21
|
import pyfits
import numpy as np
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
def test_byteswap():
p = pyfits.PrimaryHDU()
l = pyfits.HDUList()
n = np.zeros(3, dtype='i2')
n[0] = 1
n[1] = 60000
n[2] = 2
c = pyfits.Column(name='foo', format='i2', bscale=1, bzero=32768, array=n)
t = pyfits.new_table([c])
l.append(p)
l.append(t)
l.writeto('test.fits', clobber=True)
p = pyfits.open('test.fits')
assert p[1].data[1]['foo'] == 60000.0
|
<commit_before>import pyfits
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
<commit_msg>Add test for byteswapping bug resolved in r514.
git-svn-id: 5305e2c1a78737cf7dd5f8f44e9bbbd00348fde7@543 ed100bfc-0583-0410-97f2-c26b58777a21<commit_after>
|
import pyfits
import numpy as np
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
def test_byteswap():
p = pyfits.PrimaryHDU()
l = pyfits.HDUList()
n = np.zeros(3, dtype='i2')
n[0] = 1
n[1] = 60000
n[2] = 2
c = pyfits.Column(name='foo', format='i2', bscale=1, bzero=32768, array=n)
t = pyfits.new_table([c])
l.append(p)
l.append(t)
l.writeto('test.fits', clobber=True)
p = pyfits.open('test.fits')
assert p[1].data[1]['foo'] == 60000.0
|
import pyfits
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
Add test for byteswapping bug resolved in r514.
git-svn-id: 5305e2c1a78737cf7dd5f8f44e9bbbd00348fde7@543 ed100bfc-0583-0410-97f2-c26b58777a21import pyfits
import numpy as np
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
def test_byteswap():
p = pyfits.PrimaryHDU()
l = pyfits.HDUList()
n = np.zeros(3, dtype='i2')
n[0] = 1
n[1] = 60000
n[2] = 2
c = pyfits.Column(name='foo', format='i2', bscale=1, bzero=32768, array=n)
t = pyfits.new_table([c])
l.append(p)
l.append(t)
l.writeto('test.fits', clobber=True)
p = pyfits.open('test.fits')
assert p[1].data[1]['foo'] == 60000.0
|
<commit_before>import pyfits
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
<commit_msg>Add test for byteswapping bug resolved in r514.
git-svn-id: 5305e2c1a78737cf7dd5f8f44e9bbbd00348fde7@543 ed100bfc-0583-0410-97f2-c26b58777a21<commit_after>import pyfits
import numpy as np
import sys
def test_with_statement():
if sys.hexversion >= 0x02050000:
exec("""from __future__ import with_statement
with pyfits.open("ascii.fits") as f: pass""")
def test_naxisj_check():
hdulist = pyfits.open("o4sp040b0_raw.fits")
hdulist[1].header.update("NAXIS3", 500)
assert 'NAXIS3' in hdulist[1].header
hdulist.verify('fix')
assert 'NAXIS3' not in hdulist[1].header
def test_byteswap():
p = pyfits.PrimaryHDU()
l = pyfits.HDUList()
n = np.zeros(3, dtype='i2')
n[0] = 1
n[1] = 60000
n[2] = 2
c = pyfits.Column(name='foo', format='i2', bscale=1, bzero=32768, array=n)
t = pyfits.new_table([c])
l.append(p)
l.append(t)
l.writeto('test.fits', clobber=True)
p = pyfits.open('test.fits')
assert p[1].data[1]['foo'] == 60000.0
|
fbc42057c647e4e42825b0b4e33d69e5967901f0
|
cid/locals/thread_local.py
|
cid/locals/thread_local.py
|
from threading import local
from django.conf import settings
from .base import build_cid
_thread_locals = local()
def set_cid(cid):
"""Set the correlation id for the current request."""
setattr(_thread_locals, 'CID', cid)
def get_cid():
"""Return the currently set correlation id (if any).
If no correlation id has been set and ``CID_GENERATE`` is enabled
in the settings, a new correlation id is set and returned.
FIXME (dbaty): in version 2, just `return getattr(_thread_locals, 'CID', None)`
We want the simplest thing here and let `generate_new_cid` do the job.
"""
cid = getattr(_thread_locals, 'CID', None)
if cid is None and getattr(settings, 'CID_GENERATE', False):
cid = build_cid()
set_cid(cid)
return cid
|
from threading import local
from django.conf import settings
from .base import build_cid
_thread_locals = local()
def set_cid(cid):
"""Set the correlation id for the current request."""
setattr(_thread_locals, 'CID', cid)
def get_cid():
"""Return the currently set correlation id (if any).
If no correlation id has been set and ``CID_GENERATE`` is enabled
in the settings, a new correlation id is set and returned.
"""
cid = getattr(_thread_locals, 'CID', None)
if cid is None and getattr(settings, 'CID_GENERATE', False):
cid = build_cid()
set_cid(cid)
return cid
|
Remove ancient FIXME in `get_cid()`
|
Remove ancient FIXME in `get_cid()`
Maybe I had a great idea in mind when I wrote the comment. Or maybe it
was just a vague thought. I guess we'll never know.
|
Python
|
bsd-3-clause
|
snowball-one/cid
|
from threading import local
from django.conf import settings
from .base import build_cid
_thread_locals = local()
def set_cid(cid):
"""Set the correlation id for the current request."""
setattr(_thread_locals, 'CID', cid)
def get_cid():
"""Return the currently set correlation id (if any).
If no correlation id has been set and ``CID_GENERATE`` is enabled
in the settings, a new correlation id is set and returned.
FIXME (dbaty): in version 2, just `return getattr(_thread_locals, 'CID', None)`
We want the simplest thing here and let `generate_new_cid` do the job.
"""
cid = getattr(_thread_locals, 'CID', None)
if cid is None and getattr(settings, 'CID_GENERATE', False):
cid = build_cid()
set_cid(cid)
return cid
Remove ancient FIXME in `get_cid()`
Maybe I had a great idea in mind when I wrote the comment. Or maybe it
was just a vague thought. I guess we'll never know.
|
from threading import local
from django.conf import settings
from .base import build_cid
_thread_locals = local()
def set_cid(cid):
"""Set the correlation id for the current request."""
setattr(_thread_locals, 'CID', cid)
def get_cid():
"""Return the currently set correlation id (if any).
If no correlation id has been set and ``CID_GENERATE`` is enabled
in the settings, a new correlation id is set and returned.
"""
cid = getattr(_thread_locals, 'CID', None)
if cid is None and getattr(settings, 'CID_GENERATE', False):
cid = build_cid()
set_cid(cid)
return cid
|
<commit_before>from threading import local
from django.conf import settings
from .base import build_cid
_thread_locals = local()
def set_cid(cid):
"""Set the correlation id for the current request."""
setattr(_thread_locals, 'CID', cid)
def get_cid():
"""Return the currently set correlation id (if any).
If no correlation id has been set and ``CID_GENERATE`` is enabled
in the settings, a new correlation id is set and returned.
FIXME (dbaty): in version 2, just `return getattr(_thread_locals, 'CID', None)`
We want the simplest thing here and let `generate_new_cid` do the job.
"""
cid = getattr(_thread_locals, 'CID', None)
if cid is None and getattr(settings, 'CID_GENERATE', False):
cid = build_cid()
set_cid(cid)
return cid
<commit_msg>Remove ancient FIXME in `get_cid()`
Maybe I had a great idea in mind when I wrote the comment. Or maybe it
was just a vague thought. I guess we'll never know.<commit_after>
|
from threading import local
from django.conf import settings
from .base import build_cid
_thread_locals = local()
def set_cid(cid):
"""Set the correlation id for the current request."""
setattr(_thread_locals, 'CID', cid)
def get_cid():
"""Return the currently set correlation id (if any).
If no correlation id has been set and ``CID_GENERATE`` is enabled
in the settings, a new correlation id is set and returned.
"""
cid = getattr(_thread_locals, 'CID', None)
if cid is None and getattr(settings, 'CID_GENERATE', False):
cid = build_cid()
set_cid(cid)
return cid
|
from threading import local
from django.conf import settings
from .base import build_cid
_thread_locals = local()
def set_cid(cid):
"""Set the correlation id for the current request."""
setattr(_thread_locals, 'CID', cid)
def get_cid():
"""Return the currently set correlation id (if any).
If no correlation id has been set and ``CID_GENERATE`` is enabled
in the settings, a new correlation id is set and returned.
FIXME (dbaty): in version 2, just `return getattr(_thread_locals, 'CID', None)`
We want the simplest thing here and let `generate_new_cid` do the job.
"""
cid = getattr(_thread_locals, 'CID', None)
if cid is None and getattr(settings, 'CID_GENERATE', False):
cid = build_cid()
set_cid(cid)
return cid
Remove ancient FIXME in `get_cid()`
Maybe I had a great idea in mind when I wrote the comment. Or maybe it
was just a vague thought. I guess we'll never know.from threading import local
from django.conf import settings
from .base import build_cid
_thread_locals = local()
def set_cid(cid):
"""Set the correlation id for the current request."""
setattr(_thread_locals, 'CID', cid)
def get_cid():
"""Return the currently set correlation id (if any).
If no correlation id has been set and ``CID_GENERATE`` is enabled
in the settings, a new correlation id is set and returned.
"""
cid = getattr(_thread_locals, 'CID', None)
if cid is None and getattr(settings, 'CID_GENERATE', False):
cid = build_cid()
set_cid(cid)
return cid
|
<commit_before>from threading import local
from django.conf import settings
from .base import build_cid
_thread_locals = local()
def set_cid(cid):
"""Set the correlation id for the current request."""
setattr(_thread_locals, 'CID', cid)
def get_cid():
"""Return the currently set correlation id (if any).
If no correlation id has been set and ``CID_GENERATE`` is enabled
in the settings, a new correlation id is set and returned.
FIXME (dbaty): in version 2, just `return getattr(_thread_locals, 'CID', None)`
We want the simplest thing here and let `generate_new_cid` do the job.
"""
cid = getattr(_thread_locals, 'CID', None)
if cid is None and getattr(settings, 'CID_GENERATE', False):
cid = build_cid()
set_cid(cid)
return cid
<commit_msg>Remove ancient FIXME in `get_cid()`
Maybe I had a great idea in mind when I wrote the comment. Or maybe it
was just a vague thought. I guess we'll never know.<commit_after>from threading import local
from django.conf import settings
from .base import build_cid
_thread_locals = local()
def set_cid(cid):
"""Set the correlation id for the current request."""
setattr(_thread_locals, 'CID', cid)
def get_cid():
"""Return the currently set correlation id (if any).
If no correlation id has been set and ``CID_GENERATE`` is enabled
in the settings, a new correlation id is set and returned.
"""
cid = getattr(_thread_locals, 'CID', None)
if cid is None and getattr(settings, 'CID_GENERATE', False):
cid = build_cid()
set_cid(cid)
return cid
|
370507fc48636417a10e4075917783169f3653c3
|
test_edelbaum.py
|
test_edelbaum.py
|
from astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
i_f = 0.0 # deg
i_0 = 28.5 # deg
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
assert_almost_equal(t_f / 86400, expected_t_f, decimal=0)
assert_almost_equal(delta_V, expected_delta_V, decimal=1)
|
from astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
i_f = 0.0 # rad
i_0 = (28.5 * u.deg).to(u.rad).value # rad
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
assert_almost_equal(t_f / 86400, expected_t_f, decimal=2)
assert_almost_equal(delta_V, expected_delta_V, decimal=4)
|
Fix unit error, improve precision
|
Fix unit error, improve precision
|
Python
|
mit
|
Juanlu001/pfc-uc3m
|
from astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
i_f = 0.0 # deg
i_0 = 28.5 # deg
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
assert_almost_equal(t_f / 86400, expected_t_f, decimal=0)
assert_almost_equal(delta_V, expected_delta_V, decimal=1)
Fix unit error, improve precision
|
from astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
i_f = 0.0 # rad
i_0 = (28.5 * u.deg).to(u.rad).value # rad
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
assert_almost_equal(t_f / 86400, expected_t_f, decimal=2)
assert_almost_equal(delta_V, expected_delta_V, decimal=4)
|
<commit_before>from astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
i_f = 0.0 # deg
i_0 = 28.5 # deg
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
assert_almost_equal(t_f / 86400, expected_t_f, decimal=0)
assert_almost_equal(delta_V, expected_delta_V, decimal=1)
<commit_msg>Fix unit error, improve precision<commit_after>
|
from astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
i_f = 0.0 # rad
i_0 = (28.5 * u.deg).to(u.rad).value # rad
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
assert_almost_equal(t_f / 86400, expected_t_f, decimal=2)
assert_almost_equal(delta_V, expected_delta_V, decimal=4)
|
from astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
i_f = 0.0 # deg
i_0 = 28.5 # deg
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
assert_almost_equal(t_f / 86400, expected_t_f, decimal=0)
assert_almost_equal(delta_V, expected_delta_V, decimal=1)
Fix unit error, improve precisionfrom astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
i_f = 0.0 # rad
i_0 = (28.5 * u.deg).to(u.rad).value # rad
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
assert_almost_equal(t_f / 86400, expected_t_f, decimal=2)
assert_almost_equal(delta_V, expected_delta_V, decimal=4)
|
<commit_before>from astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
i_f = 0.0 # deg
i_0 = 28.5 # deg
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
assert_almost_equal(t_f / 86400, expected_t_f, decimal=0)
assert_almost_equal(delta_V, expected_delta_V, decimal=1)
<commit_msg>Fix unit error, improve precision<commit_after>from astropy import units as u
from numpy.testing import assert_almost_equal
from poliastro.bodies import Earth
from edelbaum import extra_quantities
def test_leo_geo_time_and_delta_v():
a_0 = 7000.0 # km
a_f = 42166.0 # km
i_f = 0.0 # rad
i_0 = (28.5 * u.deg).to(u.rad).value # rad
f = 3.5e-7 # km / s2
k = Earth.k.decompose([u.km, u.s]).value
expected_t_f = 191.26295 # s
expected_delta_V = 5.78378 # km / s
delta_V, t_f = extra_quantities(k, a_0, a_f, i_f - i_0, f)
assert_almost_equal(t_f / 86400, expected_t_f, decimal=2)
assert_almost_equal(delta_V, expected_delta_V, decimal=4)
|
2b70b4d2ca40cfbf36265a650ca04855999c5a03
|
elm_open_in_browser.py
|
elm_open_in_browser.py
|
import sublime
import os.path as fs
if int(sublime.version()) < 3000:
from elm_project import ElmProject
from ViewInBrowserCommand import ViewInBrowserCommand
else:
from .elm_project import ElmProject
ViewInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand
class ElmOpenInBrowserCommand(ViewInBrowserCommand):
def run(self, edit):
super(ElmOpenInBrowserCommand, self).run(edit)
def is_enabled(self):
self.project = ElmProject(self.view.file_name())
return self.project.exists
def normalizePath(self, fileToOpen): # ViewInBrowserCommand
norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path))
return super(ElmOpenInBrowserCommand, self).normalizePath(fs.abspath(norm_path))
|
import sublime
import os.path as fs
if int(sublime.version()) < 3000:
from elm_project import ElmProject
from ViewInBrowserCommand import ViewInBrowserCommand as OpenInBrowserCommand
else:
from .elm_project import ElmProject
try:
from SideBarEnhancements.SideBar import SideBarOpenInBrowserCommand as OpenInBrowserCommand
except:
OpenInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand
class ElmOpenInBrowserCommand(OpenInBrowserCommand):
def run(self, edit=None):
if edit: # ViewInBrowserCommand
super(ElmOpenInBrowserCommand, self).run(edit)
else: # SideBarOpenInBrowserCommand
super(ElmOpenInBrowserCommand, self).run([self.html_path()])
def is_enabled(self):
try: # ViewInBrowserCommand
self.project = ElmProject(self.view.file_name())
except: # SideBarOpenInBrowserCommand
self.project = ElmProject(self.window.active_view().file_name())
return self.project.exists
def normalizePath(self, fileToOpen): # ViewInBrowserCommand
return super(ElmOpenInBrowserCommand, self).normalizePath(self.html_path())
def html_path(self):
norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path))
return fs.abspath(norm_path)
|
Add alternative support for open in browser
|
Add alternative support for open in browser
Integrate SideBarEnhancements for ST3 for poplarity and browser detection
|
Python
|
mit
|
deadfoxygrandpa/Elm.tmLanguage,deadfoxygrandpa/Elm.tmLanguage,rtfeldman/Elm.tmLanguage,rtfeldman/Elm.tmLanguage,sekjun9878/Elm.tmLanguage,sekjun9878/Elm.tmLanguage
|
import sublime
import os.path as fs
if int(sublime.version()) < 3000:
from elm_project import ElmProject
from ViewInBrowserCommand import ViewInBrowserCommand
else:
from .elm_project import ElmProject
ViewInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand
class ElmOpenInBrowserCommand(ViewInBrowserCommand):
def run(self, edit):
super(ElmOpenInBrowserCommand, self).run(edit)
def is_enabled(self):
self.project = ElmProject(self.view.file_name())
return self.project.exists
def normalizePath(self, fileToOpen): # ViewInBrowserCommand
norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path))
return super(ElmOpenInBrowserCommand, self).normalizePath(fs.abspath(norm_path))
Add alternative support for open in browser
Integrate SideBarEnhancements for ST3 for poplarity and browser detection
|
import sublime
import os.path as fs
if int(sublime.version()) < 3000:
from elm_project import ElmProject
from ViewInBrowserCommand import ViewInBrowserCommand as OpenInBrowserCommand
else:
from .elm_project import ElmProject
try:
from SideBarEnhancements.SideBar import SideBarOpenInBrowserCommand as OpenInBrowserCommand
except:
OpenInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand
class ElmOpenInBrowserCommand(OpenInBrowserCommand):
def run(self, edit=None):
if edit: # ViewInBrowserCommand
super(ElmOpenInBrowserCommand, self).run(edit)
else: # SideBarOpenInBrowserCommand
super(ElmOpenInBrowserCommand, self).run([self.html_path()])
def is_enabled(self):
try: # ViewInBrowserCommand
self.project = ElmProject(self.view.file_name())
except: # SideBarOpenInBrowserCommand
self.project = ElmProject(self.window.active_view().file_name())
return self.project.exists
def normalizePath(self, fileToOpen): # ViewInBrowserCommand
return super(ElmOpenInBrowserCommand, self).normalizePath(self.html_path())
def html_path(self):
norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path))
return fs.abspath(norm_path)
|
<commit_before>import sublime
import os.path as fs
if int(sublime.version()) < 3000:
from elm_project import ElmProject
from ViewInBrowserCommand import ViewInBrowserCommand
else:
from .elm_project import ElmProject
ViewInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand
class ElmOpenInBrowserCommand(ViewInBrowserCommand):
def run(self, edit):
super(ElmOpenInBrowserCommand, self).run(edit)
def is_enabled(self):
self.project = ElmProject(self.view.file_name())
return self.project.exists
def normalizePath(self, fileToOpen): # ViewInBrowserCommand
norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path))
return super(ElmOpenInBrowserCommand, self).normalizePath(fs.abspath(norm_path))
<commit_msg>Add alternative support for open in browser
Integrate SideBarEnhancements for ST3 for poplarity and browser detection<commit_after>
|
import sublime
import os.path as fs
if int(sublime.version()) < 3000:
from elm_project import ElmProject
from ViewInBrowserCommand import ViewInBrowserCommand as OpenInBrowserCommand
else:
from .elm_project import ElmProject
try:
from SideBarEnhancements.SideBar import SideBarOpenInBrowserCommand as OpenInBrowserCommand
except:
OpenInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand
class ElmOpenInBrowserCommand(OpenInBrowserCommand):
def run(self, edit=None):
if edit: # ViewInBrowserCommand
super(ElmOpenInBrowserCommand, self).run(edit)
else: # SideBarOpenInBrowserCommand
super(ElmOpenInBrowserCommand, self).run([self.html_path()])
def is_enabled(self):
try: # ViewInBrowserCommand
self.project = ElmProject(self.view.file_name())
except: # SideBarOpenInBrowserCommand
self.project = ElmProject(self.window.active_view().file_name())
return self.project.exists
def normalizePath(self, fileToOpen): # ViewInBrowserCommand
return super(ElmOpenInBrowserCommand, self).normalizePath(self.html_path())
def html_path(self):
norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path))
return fs.abspath(norm_path)
|
import sublime
import os.path as fs
if int(sublime.version()) < 3000:
from elm_project import ElmProject
from ViewInBrowserCommand import ViewInBrowserCommand
else:
from .elm_project import ElmProject
ViewInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand
class ElmOpenInBrowserCommand(ViewInBrowserCommand):
def run(self, edit):
super(ElmOpenInBrowserCommand, self).run(edit)
def is_enabled(self):
self.project = ElmProject(self.view.file_name())
return self.project.exists
def normalizePath(self, fileToOpen): # ViewInBrowserCommand
norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path))
return super(ElmOpenInBrowserCommand, self).normalizePath(fs.abspath(norm_path))
Add alternative support for open in browser
Integrate SideBarEnhancements for ST3 for poplarity and browser detectionimport sublime
import os.path as fs
if int(sublime.version()) < 3000:
from elm_project import ElmProject
from ViewInBrowserCommand import ViewInBrowserCommand as OpenInBrowserCommand
else:
from .elm_project import ElmProject
try:
from SideBarEnhancements.SideBar import SideBarOpenInBrowserCommand as OpenInBrowserCommand
except:
OpenInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand
class ElmOpenInBrowserCommand(OpenInBrowserCommand):
def run(self, edit=None):
if edit: # ViewInBrowserCommand
super(ElmOpenInBrowserCommand, self).run(edit)
else: # SideBarOpenInBrowserCommand
super(ElmOpenInBrowserCommand, self).run([self.html_path()])
def is_enabled(self):
try: # ViewInBrowserCommand
self.project = ElmProject(self.view.file_name())
except: # SideBarOpenInBrowserCommand
self.project = ElmProject(self.window.active_view().file_name())
return self.project.exists
def normalizePath(self, fileToOpen): # ViewInBrowserCommand
return super(ElmOpenInBrowserCommand, self).normalizePath(self.html_path())
def html_path(self):
norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path))
return fs.abspath(norm_path)
|
<commit_before>import sublime
import os.path as fs
if int(sublime.version()) < 3000:
from elm_project import ElmProject
from ViewInBrowserCommand import ViewInBrowserCommand
else:
from .elm_project import ElmProject
ViewInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand
class ElmOpenInBrowserCommand(ViewInBrowserCommand):
def run(self, edit):
super(ElmOpenInBrowserCommand, self).run(edit)
def is_enabled(self):
self.project = ElmProject(self.view.file_name())
return self.project.exists
def normalizePath(self, fileToOpen): # ViewInBrowserCommand
norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path))
return super(ElmOpenInBrowserCommand, self).normalizePath(fs.abspath(norm_path))
<commit_msg>Add alternative support for open in browser
Integrate SideBarEnhancements for ST3 for poplarity and browser detection<commit_after>import sublime
import os.path as fs
if int(sublime.version()) < 3000:
from elm_project import ElmProject
from ViewInBrowserCommand import ViewInBrowserCommand as OpenInBrowserCommand
else:
from .elm_project import ElmProject
try:
from SideBarEnhancements.SideBar import SideBarOpenInBrowserCommand as OpenInBrowserCommand
except:
OpenInBrowserCommand = __import__('View In Browser').ViewInBrowserCommand.ViewInBrowserCommand
class ElmOpenInBrowserCommand(OpenInBrowserCommand):
def run(self, edit=None):
if edit: # ViewInBrowserCommand
super(ElmOpenInBrowserCommand, self).run(edit)
else: # SideBarOpenInBrowserCommand
super(ElmOpenInBrowserCommand, self).run([self.html_path()])
def is_enabled(self):
try: # ViewInBrowserCommand
self.project = ElmProject(self.view.file_name())
except: # SideBarOpenInBrowserCommand
self.project = ElmProject(self.window.active_view().file_name())
return self.project.exists
def normalizePath(self, fileToOpen): # ViewInBrowserCommand
return super(ElmOpenInBrowserCommand, self).normalizePath(self.html_path())
def html_path(self):
norm_path = fs.join(self.project.working_dir, fs.expanduser(self.project.html_path))
return fs.abspath(norm_path)
|
65529690d8fecbf81087c6f43316f054288785ec
|
twenty3.py
|
twenty3.py
|
from pync import Notifier
from time import sleep
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--min', type=int, help="Minutes before break", default="20")
args = parser.parse_args()
if not args.min:
raise ValueError("Invalid minutes")
while True:
sleep(args.min*60)
Notifier.notify('Time for a break.', title="Reminder")
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
|
from pync import Notifier
from time import sleep
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--min', type=int, help="Timeout before sending alert (minutes)", default="20")
parser.add_argument('--duration', type=int, help="Duration of break (seconds)", default="20")
args = parser.parse_args()
if not (args.min and args.duration):
raise ValueError("Invalid arguments")
while True:
# sleep for n minutes
sleep(args.min*60)
# break time
Notifier.notify(
'Take a break for %d secs' % args.duration,
title="Break reminder"
)
# on break
sleep(args.duration)
# back to work
Notifier.notify("Back to work", Title="Break reminder")
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
|
Add break duration argument and sleep timeout. Add notification when it is time to get back to work
|
Add break duration argument and sleep timeout. Add notification when it is time to get back to work
|
Python
|
mit
|
mgalang/twenty3
|
from pync import Notifier
from time import sleep
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--min', type=int, help="Minutes before break", default="20")
args = parser.parse_args()
if not args.min:
raise ValueError("Invalid minutes")
while True:
sleep(args.min*60)
Notifier.notify('Time for a break.', title="Reminder")
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
Add break duration argument and sleep timeout. Add notification when it is time to get back to work
|
from pync import Notifier
from time import sleep
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--min', type=int, help="Timeout before sending alert (minutes)", default="20")
parser.add_argument('--duration', type=int, help="Duration of break (seconds)", default="20")
args = parser.parse_args()
if not (args.min and args.duration):
raise ValueError("Invalid arguments")
while True:
# sleep for n minutes
sleep(args.min*60)
# break time
Notifier.notify(
'Take a break for %d secs' % args.duration,
title="Break reminder"
)
# on break
sleep(args.duration)
# back to work
Notifier.notify("Back to work", Title="Break reminder")
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
|
<commit_before>from pync import Notifier
from time import sleep
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--min', type=int, help="Minutes before break", default="20")
args = parser.parse_args()
if not args.min:
raise ValueError("Invalid minutes")
while True:
sleep(args.min*60)
Notifier.notify('Time for a break.', title="Reminder")
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
<commit_msg>Add break duration argument and sleep timeout. Add notification when it is time to get back to work<commit_after>
|
from pync import Notifier
from time import sleep
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--min', type=int, help="Timeout before sending alert (minutes)", default="20")
parser.add_argument('--duration', type=int, help="Duration of break (seconds)", default="20")
args = parser.parse_args()
if not (args.min and args.duration):
raise ValueError("Invalid arguments")
while True:
# sleep for n minutes
sleep(args.min*60)
# break time
Notifier.notify(
'Take a break for %d secs' % args.duration,
title="Break reminder"
)
# on break
sleep(args.duration)
# back to work
Notifier.notify("Back to work", Title="Break reminder")
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
|
from pync import Notifier
from time import sleep
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--min', type=int, help="Minutes before break", default="20")
args = parser.parse_args()
if not args.min:
raise ValueError("Invalid minutes")
while True:
sleep(args.min*60)
Notifier.notify('Time for a break.', title="Reminder")
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
Add break duration argument and sleep timeout. Add notification when it is time to get back to workfrom pync import Notifier
from time import sleep
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--min', type=int, help="Timeout before sending alert (minutes)", default="20")
parser.add_argument('--duration', type=int, help="Duration of break (seconds)", default="20")
args = parser.parse_args()
if not (args.min and args.duration):
raise ValueError("Invalid arguments")
while True:
# sleep for n minutes
sleep(args.min*60)
# break time
Notifier.notify(
'Take a break for %d secs' % args.duration,
title="Break reminder"
)
# on break
sleep(args.duration)
# back to work
Notifier.notify("Back to work", Title="Break reminder")
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
|
<commit_before>from pync import Notifier
from time import sleep
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--min', type=int, help="Minutes before break", default="20")
args = parser.parse_args()
if not args.min:
raise ValueError("Invalid minutes")
while True:
sleep(args.min*60)
Notifier.notify('Time for a break.', title="Reminder")
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
<commit_msg>Add break duration argument and sleep timeout. Add notification when it is time to get back to work<commit_after>from pync import Notifier
from time import sleep
import argparse
def main():
parser = argparse.ArgumentParser()
parser.add_argument('--min', type=int, help="Timeout before sending alert (minutes)", default="20")
parser.add_argument('--duration', type=int, help="Duration of break (seconds)", default="20")
args = parser.parse_args()
if not (args.min and args.duration):
raise ValueError("Invalid arguments")
while True:
# sleep for n minutes
sleep(args.min*60)
# break time
Notifier.notify(
'Take a break for %d secs' % args.duration,
title="Break reminder"
)
# on break
sleep(args.duration)
# back to work
Notifier.notify("Back to work", Title="Break reminder")
if __name__ == '__main__':
try:
main()
except KeyboardInterrupt:
pass
|
2ad4dd2fe877248b33aefa4465352710f95d953a
|
djlotrek/decorators.py
|
djlotrek/decorators.py
|
from functools import wraps
from django.conf import settings
import requests
def check_recaptcha(view_func):
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
request.recaptcha_is_valid = None
if request.method == 'POST':
recaptcha_response = request.POST.get('g-recaptcha-response')
data = {
'secret': settings.GOOGLE_RECAPTCHA_SECRET_KEY,
'response': recaptcha_response
}
r = requests.post('https://www.google.com/recaptcha/api/siteverify', data=data)
result = r.json()
if result['success']:
request.recaptcha_is_valid = True
else:
request.recaptcha_is_valid = False
print('Invalid reCAPTCHA. Please try again. '+str(result['error-codes']))
return view_func(request, *args, **kwargs)
return _wrapped_view
|
from functools import wraps
from django.conf import settings
import requests
def check_recaptcha(view_func):
"""Chech that the entered recaptcha data is correct"""
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
request.recaptcha_is_valid = None
if request.method == 'POST':
recaptcha_response = request.POST.get('g-recaptcha-response')
data = {
'secret': settings.GOOGLE_RECAPTCHA_SECRET_KEY,
'response': recaptcha_response
}
r = requests.post('https://www.google.com/recaptcha/api/siteverify', data=data)
result = r.json()
if result['success']:
request.recaptcha_is_valid = True
else:
request.recaptcha_is_valid = False
print('Invalid reCAPTCHA. Please try again. '+str(result['error-codes']))
return view_func(request, *args, **kwargs)
return _wrapped_view
|
Add docstring to recaptcha check
|
Add docstring to recaptcha check
|
Python
|
mit
|
lotrekagency/djlotrek,lotrekagency/djlotrek
|
from functools import wraps
from django.conf import settings
import requests
def check_recaptcha(view_func):
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
request.recaptcha_is_valid = None
if request.method == 'POST':
recaptcha_response = request.POST.get('g-recaptcha-response')
data = {
'secret': settings.GOOGLE_RECAPTCHA_SECRET_KEY,
'response': recaptcha_response
}
r = requests.post('https://www.google.com/recaptcha/api/siteverify', data=data)
result = r.json()
if result['success']:
request.recaptcha_is_valid = True
else:
request.recaptcha_is_valid = False
print('Invalid reCAPTCHA. Please try again. '+str(result['error-codes']))
return view_func(request, *args, **kwargs)
return _wrapped_view
Add docstring to recaptcha check
|
from functools import wraps
from django.conf import settings
import requests
def check_recaptcha(view_func):
"""Chech that the entered recaptcha data is correct"""
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
request.recaptcha_is_valid = None
if request.method == 'POST':
recaptcha_response = request.POST.get('g-recaptcha-response')
data = {
'secret': settings.GOOGLE_RECAPTCHA_SECRET_KEY,
'response': recaptcha_response
}
r = requests.post('https://www.google.com/recaptcha/api/siteverify', data=data)
result = r.json()
if result['success']:
request.recaptcha_is_valid = True
else:
request.recaptcha_is_valid = False
print('Invalid reCAPTCHA. Please try again. '+str(result['error-codes']))
return view_func(request, *args, **kwargs)
return _wrapped_view
|
<commit_before>from functools import wraps
from django.conf import settings
import requests
def check_recaptcha(view_func):
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
request.recaptcha_is_valid = None
if request.method == 'POST':
recaptcha_response = request.POST.get('g-recaptcha-response')
data = {
'secret': settings.GOOGLE_RECAPTCHA_SECRET_KEY,
'response': recaptcha_response
}
r = requests.post('https://www.google.com/recaptcha/api/siteverify', data=data)
result = r.json()
if result['success']:
request.recaptcha_is_valid = True
else:
request.recaptcha_is_valid = False
print('Invalid reCAPTCHA. Please try again. '+str(result['error-codes']))
return view_func(request, *args, **kwargs)
return _wrapped_view
<commit_msg>Add docstring to recaptcha check<commit_after>
|
from functools import wraps
from django.conf import settings
import requests
def check_recaptcha(view_func):
"""Chech that the entered recaptcha data is correct"""
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
request.recaptcha_is_valid = None
if request.method == 'POST':
recaptcha_response = request.POST.get('g-recaptcha-response')
data = {
'secret': settings.GOOGLE_RECAPTCHA_SECRET_KEY,
'response': recaptcha_response
}
r = requests.post('https://www.google.com/recaptcha/api/siteverify', data=data)
result = r.json()
if result['success']:
request.recaptcha_is_valid = True
else:
request.recaptcha_is_valid = False
print('Invalid reCAPTCHA. Please try again. '+str(result['error-codes']))
return view_func(request, *args, **kwargs)
return _wrapped_view
|
from functools import wraps
from django.conf import settings
import requests
def check_recaptcha(view_func):
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
request.recaptcha_is_valid = None
if request.method == 'POST':
recaptcha_response = request.POST.get('g-recaptcha-response')
data = {
'secret': settings.GOOGLE_RECAPTCHA_SECRET_KEY,
'response': recaptcha_response
}
r = requests.post('https://www.google.com/recaptcha/api/siteverify', data=data)
result = r.json()
if result['success']:
request.recaptcha_is_valid = True
else:
request.recaptcha_is_valid = False
print('Invalid reCAPTCHA. Please try again. '+str(result['error-codes']))
return view_func(request, *args, **kwargs)
return _wrapped_view
Add docstring to recaptcha checkfrom functools import wraps
from django.conf import settings
import requests
def check_recaptcha(view_func):
"""Chech that the entered recaptcha data is correct"""
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
request.recaptcha_is_valid = None
if request.method == 'POST':
recaptcha_response = request.POST.get('g-recaptcha-response')
data = {
'secret': settings.GOOGLE_RECAPTCHA_SECRET_KEY,
'response': recaptcha_response
}
r = requests.post('https://www.google.com/recaptcha/api/siteverify', data=data)
result = r.json()
if result['success']:
request.recaptcha_is_valid = True
else:
request.recaptcha_is_valid = False
print('Invalid reCAPTCHA. Please try again. '+str(result['error-codes']))
return view_func(request, *args, **kwargs)
return _wrapped_view
|
<commit_before>from functools import wraps
from django.conf import settings
import requests
def check_recaptcha(view_func):
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
request.recaptcha_is_valid = None
if request.method == 'POST':
recaptcha_response = request.POST.get('g-recaptcha-response')
data = {
'secret': settings.GOOGLE_RECAPTCHA_SECRET_KEY,
'response': recaptcha_response
}
r = requests.post('https://www.google.com/recaptcha/api/siteverify', data=data)
result = r.json()
if result['success']:
request.recaptcha_is_valid = True
else:
request.recaptcha_is_valid = False
print('Invalid reCAPTCHA. Please try again. '+str(result['error-codes']))
return view_func(request, *args, **kwargs)
return _wrapped_view
<commit_msg>Add docstring to recaptcha check<commit_after>from functools import wraps
from django.conf import settings
import requests
def check_recaptcha(view_func):
"""Chech that the entered recaptcha data is correct"""
@wraps(view_func)
def _wrapped_view(request, *args, **kwargs):
request.recaptcha_is_valid = None
if request.method == 'POST':
recaptcha_response = request.POST.get('g-recaptcha-response')
data = {
'secret': settings.GOOGLE_RECAPTCHA_SECRET_KEY,
'response': recaptcha_response
}
r = requests.post('https://www.google.com/recaptcha/api/siteverify', data=data)
result = r.json()
if result['success']:
request.recaptcha_is_valid = True
else:
request.recaptcha_is_valid = False
print('Invalid reCAPTCHA. Please try again. '+str(result['error-codes']))
return view_func(request, *args, **kwargs)
return _wrapped_view
|
d3734c7d8d006ba91c04f6cf03e6725bb966c439
|
version.py
|
version.py
|
major = 0
minor=0
patch=28
branch="master"
timestamp=1376705489.59
|
major = 0
minor=0
patch=29
branch="master"
timestamp=1376800912.72
|
Tag commit for v0.0.29-master generated by gitmake.py
|
Tag commit for v0.0.29-master generated by gitmake.py
|
Python
|
mit
|
ryansturmer/gitmake
|
major = 0
minor=0
patch=28
branch="master"
timestamp=1376705489.59Tag commit for v0.0.29-master generated by gitmake.py
|
major = 0
minor=0
patch=29
branch="master"
timestamp=1376800912.72
|
<commit_before>major = 0
minor=0
patch=28
branch="master"
timestamp=1376705489.59<commit_msg>Tag commit for v0.0.29-master generated by gitmake.py<commit_after>
|
major = 0
minor=0
patch=29
branch="master"
timestamp=1376800912.72
|
major = 0
minor=0
patch=28
branch="master"
timestamp=1376705489.59Tag commit for v0.0.29-master generated by gitmake.pymajor = 0
minor=0
patch=29
branch="master"
timestamp=1376800912.72
|
<commit_before>major = 0
minor=0
patch=28
branch="master"
timestamp=1376705489.59<commit_msg>Tag commit for v0.0.29-master generated by gitmake.py<commit_after>major = 0
minor=0
patch=29
branch="master"
timestamp=1376800912.72
|
ad9ad98b27c1640c5c5a336e62b9e8c3c805259f
|
api/serializers.py
|
api/serializers.py
|
from django.contrib.auth.models import User
from rest_framework import serializers
from api.models import UserPreferences, HelpLink
class HelpLinkSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HelpLink
fields = (
'link_key',
'topic',
'href',
'created_date',
'modified_date'
)
class UserPreferencesSummarySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = UserPreferences
fields = (
'id',
'url'
)
class UserRelatedField(serializers.PrimaryKeyRelatedField):
def use_pk_only_optimization(self):
return False
def to_representation(self, value):
serializer = UserSerializer(value, context=self.context)
return serializer.data
class UserSerializer(serializers.HyperlinkedModelSerializer):
user_pref = UserPreferencesSummarySerializer(
source='userpreferences_set',
many=True)
class Meta:
model = User
fields = (
'id',
'url',
'username',
'first_name',
'last_name',
'email',
'is_staff',
'is_superuser',
'date_joined',
'user_pref'
)
class UserPreferenceSerializer(serializers.HyperlinkedModelSerializer):
user = UserRelatedField(read_only=True)
class Meta:
model = UserPreferences
fields = (
'id',
'url',
'user',
'show_beta_interface',
'airport_ui',
'created_date',
'modified_date'
)
|
from django.contrib.auth.models import User
from rest_framework import serializers
from api.models import UserPreferences, HelpLink
class HelpLinkSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HelpLink
fields = (
'link_key',
'href'
)
class UserPreferencesSummarySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = UserPreferences
fields = (
'id',
'url'
)
class UserRelatedField(serializers.PrimaryKeyRelatedField):
def use_pk_only_optimization(self):
return False
def to_representation(self, value):
serializer = UserSerializer(value, context=self.context)
return serializer.data
class UserSerializer(serializers.HyperlinkedModelSerializer):
user_pref = UserPreferencesSummarySerializer(
source='userpreferences_set',
many=True)
class Meta:
model = User
fields = (
'id',
'url',
'username',
'first_name',
'last_name',
'email',
'is_staff',
'is_superuser',
'date_joined',
'user_pref'
)
class UserPreferenceSerializer(serializers.HyperlinkedModelSerializer):
user = UserRelatedField(read_only=True)
class Meta:
model = UserPreferences
fields = (
'id',
'url',
'user',
'show_beta_interface',
'airport_ui',
'created_date',
'modified_date'
)
|
Reduce response data for HelpLink
|
Reduce response data for HelpLink
|
Python
|
apache-2.0
|
CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend,CCI-MOC/GUI-Frontend
|
from django.contrib.auth.models import User
from rest_framework import serializers
from api.models import UserPreferences, HelpLink
class HelpLinkSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HelpLink
fields = (
'link_key',
'topic',
'href',
'created_date',
'modified_date'
)
class UserPreferencesSummarySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = UserPreferences
fields = (
'id',
'url'
)
class UserRelatedField(serializers.PrimaryKeyRelatedField):
def use_pk_only_optimization(self):
return False
def to_representation(self, value):
serializer = UserSerializer(value, context=self.context)
return serializer.data
class UserSerializer(serializers.HyperlinkedModelSerializer):
user_pref = UserPreferencesSummarySerializer(
source='userpreferences_set',
many=True)
class Meta:
model = User
fields = (
'id',
'url',
'username',
'first_name',
'last_name',
'email',
'is_staff',
'is_superuser',
'date_joined',
'user_pref'
)
class UserPreferenceSerializer(serializers.HyperlinkedModelSerializer):
user = UserRelatedField(read_only=True)
class Meta:
model = UserPreferences
fields = (
'id',
'url',
'user',
'show_beta_interface',
'airport_ui',
'created_date',
'modified_date'
)
Reduce response data for HelpLink
|
from django.contrib.auth.models import User
from rest_framework import serializers
from api.models import UserPreferences, HelpLink
class HelpLinkSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HelpLink
fields = (
'link_key',
'href'
)
class UserPreferencesSummarySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = UserPreferences
fields = (
'id',
'url'
)
class UserRelatedField(serializers.PrimaryKeyRelatedField):
def use_pk_only_optimization(self):
return False
def to_representation(self, value):
serializer = UserSerializer(value, context=self.context)
return serializer.data
class UserSerializer(serializers.HyperlinkedModelSerializer):
user_pref = UserPreferencesSummarySerializer(
source='userpreferences_set',
many=True)
class Meta:
model = User
fields = (
'id',
'url',
'username',
'first_name',
'last_name',
'email',
'is_staff',
'is_superuser',
'date_joined',
'user_pref'
)
class UserPreferenceSerializer(serializers.HyperlinkedModelSerializer):
user = UserRelatedField(read_only=True)
class Meta:
model = UserPreferences
fields = (
'id',
'url',
'user',
'show_beta_interface',
'airport_ui',
'created_date',
'modified_date'
)
|
<commit_before>from django.contrib.auth.models import User
from rest_framework import serializers
from api.models import UserPreferences, HelpLink
class HelpLinkSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HelpLink
fields = (
'link_key',
'topic',
'href',
'created_date',
'modified_date'
)
class UserPreferencesSummarySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = UserPreferences
fields = (
'id',
'url'
)
class UserRelatedField(serializers.PrimaryKeyRelatedField):
def use_pk_only_optimization(self):
return False
def to_representation(self, value):
serializer = UserSerializer(value, context=self.context)
return serializer.data
class UserSerializer(serializers.HyperlinkedModelSerializer):
user_pref = UserPreferencesSummarySerializer(
source='userpreferences_set',
many=True)
class Meta:
model = User
fields = (
'id',
'url',
'username',
'first_name',
'last_name',
'email',
'is_staff',
'is_superuser',
'date_joined',
'user_pref'
)
class UserPreferenceSerializer(serializers.HyperlinkedModelSerializer):
user = UserRelatedField(read_only=True)
class Meta:
model = UserPreferences
fields = (
'id',
'url',
'user',
'show_beta_interface',
'airport_ui',
'created_date',
'modified_date'
)
<commit_msg>Reduce response data for HelpLink<commit_after>
|
from django.contrib.auth.models import User
from rest_framework import serializers
from api.models import UserPreferences, HelpLink
class HelpLinkSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HelpLink
fields = (
'link_key',
'href'
)
class UserPreferencesSummarySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = UserPreferences
fields = (
'id',
'url'
)
class UserRelatedField(serializers.PrimaryKeyRelatedField):
def use_pk_only_optimization(self):
return False
def to_representation(self, value):
serializer = UserSerializer(value, context=self.context)
return serializer.data
class UserSerializer(serializers.HyperlinkedModelSerializer):
user_pref = UserPreferencesSummarySerializer(
source='userpreferences_set',
many=True)
class Meta:
model = User
fields = (
'id',
'url',
'username',
'first_name',
'last_name',
'email',
'is_staff',
'is_superuser',
'date_joined',
'user_pref'
)
class UserPreferenceSerializer(serializers.HyperlinkedModelSerializer):
user = UserRelatedField(read_only=True)
class Meta:
model = UserPreferences
fields = (
'id',
'url',
'user',
'show_beta_interface',
'airport_ui',
'created_date',
'modified_date'
)
|
from django.contrib.auth.models import User
from rest_framework import serializers
from api.models import UserPreferences, HelpLink
class HelpLinkSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HelpLink
fields = (
'link_key',
'topic',
'href',
'created_date',
'modified_date'
)
class UserPreferencesSummarySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = UserPreferences
fields = (
'id',
'url'
)
class UserRelatedField(serializers.PrimaryKeyRelatedField):
def use_pk_only_optimization(self):
return False
def to_representation(self, value):
serializer = UserSerializer(value, context=self.context)
return serializer.data
class UserSerializer(serializers.HyperlinkedModelSerializer):
user_pref = UserPreferencesSummarySerializer(
source='userpreferences_set',
many=True)
class Meta:
model = User
fields = (
'id',
'url',
'username',
'first_name',
'last_name',
'email',
'is_staff',
'is_superuser',
'date_joined',
'user_pref'
)
class UserPreferenceSerializer(serializers.HyperlinkedModelSerializer):
user = UserRelatedField(read_only=True)
class Meta:
model = UserPreferences
fields = (
'id',
'url',
'user',
'show_beta_interface',
'airport_ui',
'created_date',
'modified_date'
)
Reduce response data for HelpLinkfrom django.contrib.auth.models import User
from rest_framework import serializers
from api.models import UserPreferences, HelpLink
class HelpLinkSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HelpLink
fields = (
'link_key',
'href'
)
class UserPreferencesSummarySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = UserPreferences
fields = (
'id',
'url'
)
class UserRelatedField(serializers.PrimaryKeyRelatedField):
def use_pk_only_optimization(self):
return False
def to_representation(self, value):
serializer = UserSerializer(value, context=self.context)
return serializer.data
class UserSerializer(serializers.HyperlinkedModelSerializer):
user_pref = UserPreferencesSummarySerializer(
source='userpreferences_set',
many=True)
class Meta:
model = User
fields = (
'id',
'url',
'username',
'first_name',
'last_name',
'email',
'is_staff',
'is_superuser',
'date_joined',
'user_pref'
)
class UserPreferenceSerializer(serializers.HyperlinkedModelSerializer):
user = UserRelatedField(read_only=True)
class Meta:
model = UserPreferences
fields = (
'id',
'url',
'user',
'show_beta_interface',
'airport_ui',
'created_date',
'modified_date'
)
|
<commit_before>from django.contrib.auth.models import User
from rest_framework import serializers
from api.models import UserPreferences, HelpLink
class HelpLinkSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HelpLink
fields = (
'link_key',
'topic',
'href',
'created_date',
'modified_date'
)
class UserPreferencesSummarySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = UserPreferences
fields = (
'id',
'url'
)
class UserRelatedField(serializers.PrimaryKeyRelatedField):
def use_pk_only_optimization(self):
return False
def to_representation(self, value):
serializer = UserSerializer(value, context=self.context)
return serializer.data
class UserSerializer(serializers.HyperlinkedModelSerializer):
user_pref = UserPreferencesSummarySerializer(
source='userpreferences_set',
many=True)
class Meta:
model = User
fields = (
'id',
'url',
'username',
'first_name',
'last_name',
'email',
'is_staff',
'is_superuser',
'date_joined',
'user_pref'
)
class UserPreferenceSerializer(serializers.HyperlinkedModelSerializer):
user = UserRelatedField(read_only=True)
class Meta:
model = UserPreferences
fields = (
'id',
'url',
'user',
'show_beta_interface',
'airport_ui',
'created_date',
'modified_date'
)
<commit_msg>Reduce response data for HelpLink<commit_after>from django.contrib.auth.models import User
from rest_framework import serializers
from api.models import UserPreferences, HelpLink
class HelpLinkSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = HelpLink
fields = (
'link_key',
'href'
)
class UserPreferencesSummarySerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = UserPreferences
fields = (
'id',
'url'
)
class UserRelatedField(serializers.PrimaryKeyRelatedField):
def use_pk_only_optimization(self):
return False
def to_representation(self, value):
serializer = UserSerializer(value, context=self.context)
return serializer.data
class UserSerializer(serializers.HyperlinkedModelSerializer):
user_pref = UserPreferencesSummarySerializer(
source='userpreferences_set',
many=True)
class Meta:
model = User
fields = (
'id',
'url',
'username',
'first_name',
'last_name',
'email',
'is_staff',
'is_superuser',
'date_joined',
'user_pref'
)
class UserPreferenceSerializer(serializers.HyperlinkedModelSerializer):
user = UserRelatedField(read_only=True)
class Meta:
model = UserPreferences
fields = (
'id',
'url',
'user',
'show_beta_interface',
'airport_ui',
'created_date',
'modified_date'
)
|
63f42d18a2771b6057ae96c80d25f605e353fee6
|
app/main/errors.py
|
app/main/errors.py
|
# coding=utf-8
from flask import render_template
from . import main
from dmapiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
|
# coding=utf-8
from flask import render_template
from . import main
from ..api_client.error import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
|
Change app-level error handler to use api_client.error exceptions
|
Change app-level error handler to use api_client.error exceptions
|
Python
|
mit
|
AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend,AusDTO/dto-digitalmarketplace-buyer-frontend
|
# coding=utf-8
from flask import render_template
from . import main
from dmapiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
Change app-level error handler to use api_client.error exceptions
|
# coding=utf-8
from flask import render_template
from . import main
from ..api_client.error import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
|
<commit_before># coding=utf-8
from flask import render_template
from . import main
from dmapiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
<commit_msg>Change app-level error handler to use api_client.error exceptions<commit_after>
|
# coding=utf-8
from flask import render_template
from . import main
from ..api_client.error import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
|
# coding=utf-8
from flask import render_template
from . import main
from dmapiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
Change app-level error handler to use api_client.error exceptions# coding=utf-8
from flask import render_template
from . import main
from ..api_client.error import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
|
<commit_before># coding=utf-8
from flask import render_template
from . import main
from dmapiclient import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
<commit_msg>Change app-level error handler to use api_client.error exceptions<commit_after># coding=utf-8
from flask import render_template
from . import main
from ..api_client.error import APIError
@main.app_errorhandler(APIError)
def api_error_handler(e):
return _render_error_page(e.status_code)
@main.app_errorhandler(404)
def page_not_found(e):
return _render_error_page(404)
@main.app_errorhandler(500)
def internal_server_error(e):
return _render_error_page(500)
@main.app_errorhandler(503)
def service_unavailable(e):
return _render_error_page(503, e.response)
def _render_error_page(status_code, error_message=None):
templates = {
404: "errors/404.html",
500: "errors/500.html",
503: "errors/500.html",
}
if status_code not in templates:
status_code = 500
return render_template(
templates[status_code],
error_message=error_message
), status_code
|
da5269713a444c8a506535cd88f21fea8f1ffc83
|
antxetamedia/multimedia/handlers.py
|
antxetamedia/multimedia/handlers.py
|
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
bucket.strip('-')
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
else:
raise
key = bucket.new_key(key)
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
from __future__ import unicode_literals
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
bucket = '{}-{}'.format(user, bucket.strip('-'))
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
else:
raise
key = bucket.new_key(key)
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
Prepend S3 account username to buckets
|
Prepend S3 account username to buckets
|
Python
|
agpl-3.0
|
GISAElkartea/antxetamedia,GISAElkartea/antxetamedia,GISAElkartea/antxetamedia
|
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
bucket.strip('-')
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
else:
raise
key = bucket.new_key(key)
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
Prepend S3 account username to buckets
|
from __future__ import unicode_literals
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
bucket = '{}-{}'.format(user, bucket.strip('-'))
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
else:
raise
key = bucket.new_key(key)
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
<commit_before>from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
bucket.strip('-')
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
else:
raise
key = bucket.new_key(key)
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
<commit_msg>Prepend S3 account username to buckets<commit_after>
|
from __future__ import unicode_literals
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
bucket = '{}-{}'.format(user, bucket.strip('-'))
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
else:
raise
key = bucket.new_key(key)
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
bucket.strip('-')
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
else:
raise
key = bucket.new_key(key)
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
Prepend S3 account username to bucketsfrom __future__ import unicode_literals
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
bucket = '{}-{}'.format(user, bucket.strip('-'))
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
else:
raise
key = bucket.new_key(key)
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
<commit_before>from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
bucket.strip('-')
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
else:
raise
key = bucket.new_key(key)
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
<commit_msg>Prepend S3 account username to buckets<commit_after>from __future__ import unicode_literals
from boto.s3.connection import S3Connection
from boto.s3.bucket import Bucket
from boto.exception import S3CreateError
from django.conf import settings
def upload(user, passwd, bucket, metadata, key, fd):
conn = S3Connection(user, passwd, host=settings.S3_HOST, is_secure=False)
bucket = '{}-{}'.format(user, bucket.strip('-'))
try:
bucket = conn.create_bucket(bucket, headers=metadata)
except S3CreateError as e:
if e.status == 409:
bucket = Bucket(conn, bucket)
else:
raise
key = bucket.new_key(key)
key.set_contents_from_file(fd)
return key.generate_url(0).split('?')[0]
|
ceb88623b55cd572d4ef45ec2fb7d81639e07878
|
fancypages/__init__.py
|
fancypages/__init__.py
|
__version__ = (0, 0, 1, 'alpha', 1)
|
import os
__version__ = (0, 0, 1, 'alpha', 1)
FP_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__))
)
|
Add setting for fancypages base template dir
|
Add setting for fancypages base template dir
|
Python
|
bsd-3-clause
|
socradev/django-fancypages,tangentlabs/django-fancypages,socradev/django-fancypages,tangentlabs/django-fancypages,socradev/django-fancypages,tangentlabs/django-fancypages
|
__version__ = (0, 0, 1, 'alpha', 1)
Add setting for fancypages base template dir
|
import os
__version__ = (0, 0, 1, 'alpha', 1)
FP_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__))
)
|
<commit_before>__version__ = (0, 0, 1, 'alpha', 1)
<commit_msg>Add setting for fancypages base template dir<commit_after>
|
import os
__version__ = (0, 0, 1, 'alpha', 1)
FP_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__))
)
|
__version__ = (0, 0, 1, 'alpha', 1)
Add setting for fancypages base template dirimport os
__version__ = (0, 0, 1, 'alpha', 1)
FP_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__))
)
|
<commit_before>__version__ = (0, 0, 1, 'alpha', 1)
<commit_msg>Add setting for fancypages base template dir<commit_after>import os
__version__ = (0, 0, 1, 'alpha', 1)
FP_MAIN_TEMPLATE_DIR = os.path.join(
os.path.dirname(os.path.abspath(__file__))
)
|
3036adf880473741188d2c7c4f9adc4e433b3d3e
|
webkit/tools/layout_tests/run_webkit_tests.py
|
webkit/tools/layout_tests/run_webkit_tests.py
|
#!/usr/bin/env python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper around
third_party/WebKit/Tools/Scripts/run-webkit-tests"""
import os
import subprocess
import sys
def main():
cmd = [sys.executable]
src_dir = os.path.abspath(os.path.join(sys.path[0], '..', '..', '..'))
script_dir=os.path.join(src_dir, "third_party", "WebKit", "Tools",
"Scripts")
script = os.path.join(script_dir, 'run-webkit-tests')
cmd.append(script)
if '--chromium' not in sys.argv:
cmd.append('--chromium')
cmd.extend(sys.argv[1:])
return subprocess.call(cmd)
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper around
third_party/WebKit/Tools/Scripts/new-run-webkit-tests"""
import os
import subprocess
import sys
def main():
cmd = [sys.executable]
src_dir = os.path.abspath(os.path.join(sys.path[0], '..', '..', '..'))
script_dir=os.path.join(src_dir, "third_party", "WebKit", "Tools",
"Scripts")
script = os.path.join(script_dir, 'new-run-webkit-tests')
cmd.append(script)
if '--chromium' not in sys.argv:
cmd.append('--chromium')
cmd.extend(sys.argv[1:])
return subprocess.call(cmd)
if __name__ == '__main__':
sys.exit(main())
|
Revert 193850 "Remove references to new-run-webkit-tests"
|
Revert 193850 "Remove references to new-run-webkit-tests"
Tries to execute the perl script "run-webkit-tests" using "cmd" which is python.
> Remove references to new-run-webkit-tests
>
> We are going to rename it to run-webkit-tests soon.
>
> BUG=
>
> Review URL: https://chromiumcodereview.appspot.com/13980005
TBR=jchaffraix@chromium.org
Review URL: https://codereview.chromium.org/14222003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@193860 0039d316-1c4b-4281-b951-d872f2087c98
|
Python
|
bsd-3-clause
|
M4sse/chromium.src,anirudhSK/chromium,jaruba/chromium.src,anirudhSK/chromium,anirudhSK/chromium,hujiajie/pa-chromium,ondra-novak/chromium.src,ChromiumWebApps/chromium,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,hujiajie/pa-chromium,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,dednal/chromium.src,krieger-od/nwjs_chromium.src,Fireblend/chromium-crosswalk,bright-sparks/chromium-spacewalk,M4sse/chromium.src,axinging/chromium-crosswalk,markYoungH/chromium.src,littlstar/chromium.src,patrickm/chromium.src,hgl888/chromium-crosswalk-efl,patrickm/chromium.src,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,dednal/chromium.src,TheTypoMaster/chromium-crosswalk,hujiajie/pa-chromium,ltilve/chromium,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,ChromiumWebApps/chromium,hujiajie/pa-chromium,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,hgl888/chromium-crosswalk,dushu1203/chromium.src,pozdnyakov/chromium-crosswalk,bright-sparks/chromium-spacewalk,ltilve/chromium,hgl888/chromium-crosswalk-efl,pozdnyakov/chromium-crosswalk,mogoweb/chromium-crosswalk,chuan9/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,ltilve/chromium,fujunwei/chromium-crosswalk,fujunwei/chromium-crosswalk,axinging/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,Fireblend/chromium-crosswalk,fujunwei/chromium-crosswalk,anirudhSK/chromium,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,ChromiumWebApps/chromium,dushu1203/chromium.src,littlstar/chromium.src,Pluto-tv/chromium-crosswalk,littlstar/chromium.src,Chilledheart/chromium,jaruba/chromium.src,fujunwei/chromium-crosswalk,anirudhSK/chromium,M4sse/chromium.src,crosswalk-project/chromium-crosswalk-efl,Jonekee/chromium.src,hgl888/chromium-crosswalk-efl,anirudhSK/chromium,patrickm/chromium.src,axinging/chromium-crosswalk,pozdnyakov/chromium-crosswalk,M4sse/chromium.src,patrickm/chromium.src,Chilledheart/chromium,crosswalk-project/chromium-crosswalk-efl,mogoweb/chromium-crosswalk,axinging/chromium-crosswalk,axinging/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,mohamed--abdel-maksoud/chromium.src,ondra-novak/chromium.src,hgl888/chromium-crosswalk,jaruba/chromium.src,Fireblend/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,Fireblend/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,hujiajie/pa-chromium,dednal/chromium.src,hgl888/chromium-crosswalk,Jonekee/chromium.src,mohamed--abdel-maksoud/chromium.src,ChromiumWebApps/chromium,ondra-novak/chromium.src,Pluto-tv/chromium-crosswalk,hgl888/chromium-crosswalk,Chilledheart/chromium,krieger-od/nwjs_chromium.src,ltilve/chromium,Chilledheart/chromium,dednal/chromium.src,dednal/chromium.src,ChromiumWebApps/chromium,Just-D/chromium-1,TheTypoMaster/chromium-crosswalk,hgl888/chromium-crosswalk,bright-sparks/chromium-spacewalk,Pluto-tv/chromium-crosswalk,Chilledheart/chromium,jaruba/chromium.src,ltilve/chromium,jaruba/chromium.src,bright-sparks/chromium-spacewalk,Jonekee/chromium.src,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,dednal/chromium.src,mogoweb/chromium-crosswalk,ondra-novak/chromium.src,Chilledheart/chromium,jaruba/chromium.src,hujiajie/pa-chromium,PeterWangIntel/chromium-crosswalk,chuan9/chromium-crosswalk,dushu1203/chromium.src,Just-D/chromium-1,mohamed--abdel-maksoud/chromium.src,Jonekee/chromium.src,patrickm/chromium.src,bright-sparks/chromium-spacewalk,PeterWangIntel/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,markYoungH/chromium.src,hujiajie/pa-chromium,fujunwei/chromium-crosswalk,Just-D/chromium-1,Fireblend/chromium-crosswalk,dushu1203/chromium.src,markYoungH/chromium.src,dednal/chromium.src,axinging/chromium-crosswalk,Fireblend/chromium-crosswalk,anirudhSK/chromium,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,hujiajie/pa-chromium,hujiajie/pa-chromium,Jonekee/chromium.src,pozdnyakov/chromium-crosswalk,Just-D/chromium-1,dednal/chromium.src,ltilve/chromium,pozdnyakov/chromium-crosswalk,mogoweb/chromium-crosswalk,fujunwei/chromium-crosswalk,patrickm/chromium.src,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,hgl888/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,markYoungH/chromium.src,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,dednal/chromium.src,hgl888/chromium-crosswalk-efl,markYoungH/chromium.src,Jonekee/chromium.src,Jonekee/chromium.src,hujiajie/pa-chromium,krieger-od/nwjs_chromium.src,dushu1203/chromium.src,pozdnyakov/chromium-crosswalk,markYoungH/chromium.src,ChromiumWebApps/chromium,patrickm/chromium.src,dednal/chromium.src,chuan9/chromium-crosswalk,markYoungH/chromium.src,ltilve/chromium,axinging/chromium-crosswalk,Jonekee/chromium.src,ChromiumWebApps/chromium,ChromiumWebApps/chromium,ltilve/chromium,littlstar/chromium.src,Jonekee/chromium.src,chuan9/chromium-crosswalk,littlstar/chromium.src,anirudhSK/chromium,Fireblend/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,markYoungH/chromium.src,chuan9/chromium-crosswalk,hujiajie/pa-chromium,dednal/chromium.src,Chilledheart/chromium,mohamed--abdel-maksoud/chromium.src,TheTypoMaster/chromium-crosswalk,mogoweb/chromium-crosswalk,hgl888/chromium-crosswalk-efl,fujunwei/chromium-crosswalk,ondra-novak/chromium.src,Chilledheart/chromium,jaruba/chromium.src,markYoungH/chromium.src,pozdnyakov/chromium-crosswalk,dushu1203/chromium.src,ondra-novak/chromium.src,M4sse/chromium.src,ltilve/chromium,hgl888/chromium-crosswalk,axinging/chromium-crosswalk,krieger-od/nwjs_chromium.src,littlstar/chromium.src,mogoweb/chromium-crosswalk,M4sse/chromium.src,PeterWangIntel/chromium-crosswalk,axinging/chromium-crosswalk,jaruba/chromium.src,M4sse/chromium.src,chuan9/chromium-crosswalk,ondra-novak/chromium.src,mogoweb/chromium-crosswalk,PeterWangIntel/chromium-crosswalk,mogoweb/chromium-crosswalk,Chilledheart/chromium,jaruba/chromium.src,markYoungH/chromium.src,krieger-od/nwjs_chromium.src,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,crosswalk-project/chromium-crosswalk-efl,bright-sparks/chromium-spacewalk,Jonekee/chromium.src,anirudhSK/chromium,pozdnyakov/chromium-crosswalk,chuan9/chromium-crosswalk,hgl888/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,krieger-od/nwjs_chromium.src,Just-D/chromium-1,jaruba/chromium.src,dushu1203/chromium.src,mogoweb/chromium-crosswalk,fujunwei/chromium-crosswalk,pozdnyakov/chromium-crosswalk,Jonekee/chromium.src,ChromiumWebApps/chromium,littlstar/chromium.src,anirudhSK/chromium,ondra-novak/chromium.src,hgl888/chromium-crosswalk-efl,Pluto-tv/chromium-crosswalk,Just-D/chromium-1,hgl888/chromium-crosswalk-efl,krieger-od/nwjs_chromium.src,bright-sparks/chromium-spacewalk,patrickm/chromium.src,PeterWangIntel/chromium-crosswalk,M4sse/chromium.src,Pluto-tv/chromium-crosswalk,TheTypoMaster/chromium-crosswalk,mohamed--abdel-maksoud/chromium.src,markYoungH/chromium.src,Fireblend/chromium-crosswalk,chuan9/chromium-crosswalk,bright-sparks/chromium-spacewalk,ChromiumWebApps/chromium,M4sse/chromium.src,anirudhSK/chromium,fujunwei/chromium-crosswalk,Pluto-tv/chromium-crosswalk,patrickm/chromium.src,axinging/chromium-crosswalk,ondra-novak/chromium.src,hgl888/chromium-crosswalk,ChromiumWebApps/chromium,Pluto-tv/chromium-crosswalk,pozdnyakov/chromium-crosswalk,anirudhSK/chromium,littlstar/chromium.src,jaruba/chromium.src,hgl888/chromium-crosswalk-efl,dushu1203/chromium.src,dushu1203/chromium.src,Fireblend/chromium-crosswalk,pozdnyakov/chromium-crosswalk
|
#!/usr/bin/env python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper around
third_party/WebKit/Tools/Scripts/run-webkit-tests"""
import os
import subprocess
import sys
def main():
cmd = [sys.executable]
src_dir = os.path.abspath(os.path.join(sys.path[0], '..', '..', '..'))
script_dir=os.path.join(src_dir, "third_party", "WebKit", "Tools",
"Scripts")
script = os.path.join(script_dir, 'run-webkit-tests')
cmd.append(script)
if '--chromium' not in sys.argv:
cmd.append('--chromium')
cmd.extend(sys.argv[1:])
return subprocess.call(cmd)
if __name__ == '__main__':
sys.exit(main())
Revert 193850 "Remove references to new-run-webkit-tests"
Tries to execute the perl script "run-webkit-tests" using "cmd" which is python.
> Remove references to new-run-webkit-tests
>
> We are going to rename it to run-webkit-tests soon.
>
> BUG=
>
> Review URL: https://chromiumcodereview.appspot.com/13980005
TBR=jchaffraix@chromium.org
Review URL: https://codereview.chromium.org/14222003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@193860 0039d316-1c4b-4281-b951-d872f2087c98
|
#!/usr/bin/env python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper around
third_party/WebKit/Tools/Scripts/new-run-webkit-tests"""
import os
import subprocess
import sys
def main():
cmd = [sys.executable]
src_dir = os.path.abspath(os.path.join(sys.path[0], '..', '..', '..'))
script_dir=os.path.join(src_dir, "third_party", "WebKit", "Tools",
"Scripts")
script = os.path.join(script_dir, 'new-run-webkit-tests')
cmd.append(script)
if '--chromium' not in sys.argv:
cmd.append('--chromium')
cmd.extend(sys.argv[1:])
return subprocess.call(cmd)
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper around
third_party/WebKit/Tools/Scripts/run-webkit-tests"""
import os
import subprocess
import sys
def main():
cmd = [sys.executable]
src_dir = os.path.abspath(os.path.join(sys.path[0], '..', '..', '..'))
script_dir=os.path.join(src_dir, "third_party", "WebKit", "Tools",
"Scripts")
script = os.path.join(script_dir, 'run-webkit-tests')
cmd.append(script)
if '--chromium' not in sys.argv:
cmd.append('--chromium')
cmd.extend(sys.argv[1:])
return subprocess.call(cmd)
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Revert 193850 "Remove references to new-run-webkit-tests"
Tries to execute the perl script "run-webkit-tests" using "cmd" which is python.
> Remove references to new-run-webkit-tests
>
> We are going to rename it to run-webkit-tests soon.
>
> BUG=
>
> Review URL: https://chromiumcodereview.appspot.com/13980005
TBR=jchaffraix@chromium.org
Review URL: https://codereview.chromium.org/14222003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@193860 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>
|
#!/usr/bin/env python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper around
third_party/WebKit/Tools/Scripts/new-run-webkit-tests"""
import os
import subprocess
import sys
def main():
cmd = [sys.executable]
src_dir = os.path.abspath(os.path.join(sys.path[0], '..', '..', '..'))
script_dir=os.path.join(src_dir, "third_party", "WebKit", "Tools",
"Scripts")
script = os.path.join(script_dir, 'new-run-webkit-tests')
cmd.append(script)
if '--chromium' not in sys.argv:
cmd.append('--chromium')
cmd.extend(sys.argv[1:])
return subprocess.call(cmd)
if __name__ == '__main__':
sys.exit(main())
|
#!/usr/bin/env python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper around
third_party/WebKit/Tools/Scripts/run-webkit-tests"""
import os
import subprocess
import sys
def main():
cmd = [sys.executable]
src_dir = os.path.abspath(os.path.join(sys.path[0], '..', '..', '..'))
script_dir=os.path.join(src_dir, "third_party", "WebKit", "Tools",
"Scripts")
script = os.path.join(script_dir, 'run-webkit-tests')
cmd.append(script)
if '--chromium' not in sys.argv:
cmd.append('--chromium')
cmd.extend(sys.argv[1:])
return subprocess.call(cmd)
if __name__ == '__main__':
sys.exit(main())
Revert 193850 "Remove references to new-run-webkit-tests"
Tries to execute the perl script "run-webkit-tests" using "cmd" which is python.
> Remove references to new-run-webkit-tests
>
> We are going to rename it to run-webkit-tests soon.
>
> BUG=
>
> Review URL: https://chromiumcodereview.appspot.com/13980005
TBR=jchaffraix@chromium.org
Review URL: https://codereview.chromium.org/14222003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@193860 0039d316-1c4b-4281-b951-d872f2087c98#!/usr/bin/env python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper around
third_party/WebKit/Tools/Scripts/new-run-webkit-tests"""
import os
import subprocess
import sys
def main():
cmd = [sys.executable]
src_dir = os.path.abspath(os.path.join(sys.path[0], '..', '..', '..'))
script_dir=os.path.join(src_dir, "third_party", "WebKit", "Tools",
"Scripts")
script = os.path.join(script_dir, 'new-run-webkit-tests')
cmd.append(script)
if '--chromium' not in sys.argv:
cmd.append('--chromium')
cmd.extend(sys.argv[1:])
return subprocess.call(cmd)
if __name__ == '__main__':
sys.exit(main())
|
<commit_before>#!/usr/bin/env python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper around
third_party/WebKit/Tools/Scripts/run-webkit-tests"""
import os
import subprocess
import sys
def main():
cmd = [sys.executable]
src_dir = os.path.abspath(os.path.join(sys.path[0], '..', '..', '..'))
script_dir=os.path.join(src_dir, "third_party", "WebKit", "Tools",
"Scripts")
script = os.path.join(script_dir, 'run-webkit-tests')
cmd.append(script)
if '--chromium' not in sys.argv:
cmd.append('--chromium')
cmd.extend(sys.argv[1:])
return subprocess.call(cmd)
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Revert 193850 "Remove references to new-run-webkit-tests"
Tries to execute the perl script "run-webkit-tests" using "cmd" which is python.
> Remove references to new-run-webkit-tests
>
> We are going to rename it to run-webkit-tests soon.
>
> BUG=
>
> Review URL: https://chromiumcodereview.appspot.com/13980005
TBR=jchaffraix@chromium.org
Review URL: https://codereview.chromium.org/14222003
git-svn-id: de016e52bd170d2d4f2344f9bf92d50478b649e0@193860 0039d316-1c4b-4281-b951-d872f2087c98<commit_after>#!/usr/bin/env python
# Copyright (c) 2010 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Wrapper around
third_party/WebKit/Tools/Scripts/new-run-webkit-tests"""
import os
import subprocess
import sys
def main():
cmd = [sys.executable]
src_dir = os.path.abspath(os.path.join(sys.path[0], '..', '..', '..'))
script_dir=os.path.join(src_dir, "third_party", "WebKit", "Tools",
"Scripts")
script = os.path.join(script_dir, 'new-run-webkit-tests')
cmd.append(script)
if '--chromium' not in sys.argv:
cmd.append('--chromium')
cmd.extend(sys.argv[1:])
return subprocess.call(cmd)
if __name__ == '__main__':
sys.exit(main())
|
12a02b479daf8f3a5541e38ff13d8221480842ba
|
base/__init__.py
|
base/__init__.py
|
from __future__ import absolute_import
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # NOQA
|
Make sure Celery is always loaded when Django is.
|
Make sure Celery is always loaded when Django is.
|
Python
|
apache-2.0
|
hello-base/web,hello-base/web,hello-base/web,hello-base/web
|
Make sure Celery is always loaded when Django is.
|
from __future__ import absolute_import
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # NOQA
|
<commit_before><commit_msg>Make sure Celery is always loaded when Django is.<commit_after>
|
from __future__ import absolute_import
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # NOQA
|
Make sure Celery is always loaded when Django is.from __future__ import absolute_import
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # NOQA
|
<commit_before><commit_msg>Make sure Celery is always loaded when Django is.<commit_after>from __future__ import absolute_import
# This will make sure the app is always imported when
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # NOQA
|
|
72655f0b0c7edfd3f51fe0ea847d45f9acd5ba42
|
hoomd/triggers.py
|
hoomd/triggers.py
|
# Copyright (c) 2009-2019 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
from hoomd import _hoomd
class Trigger(_hoomd.Trigger):
pass
class PeriodicTrigger(_hoomd.PeriodicTrigger):
def __init__(self, period, phase=0):
_hoomd.PeriodicTrigger.__init__(self, period, phase)
|
# Copyright (c) 2009-2019 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
from hoomd import _hoomd
class Trigger(_hoomd.Trigger):
pass
class PeriodicTrigger(_hoomd.PeriodicTrigger, Trigger):
def __init__(self, period, phase=0):
_hoomd.PeriodicTrigger.__init__(self, period, phase)
|
Make ``PeriodicTrigger`` inherent from ``Trigger``
|
Make ``PeriodicTrigger`` inherent from ``Trigger``
Fixes bug in checking state and preprocessing ``Triggers`` for duck
typing.
|
Python
|
bsd-3-clause
|
joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue,joaander/hoomd-blue
|
# Copyright (c) 2009-2019 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
from hoomd import _hoomd
class Trigger(_hoomd.Trigger):
pass
class PeriodicTrigger(_hoomd.PeriodicTrigger):
def __init__(self, period, phase=0):
_hoomd.PeriodicTrigger.__init__(self, period, phase)
Make ``PeriodicTrigger`` inherent from ``Trigger``
Fixes bug in checking state and preprocessing ``Triggers`` for duck
typing.
|
# Copyright (c) 2009-2019 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
from hoomd import _hoomd
class Trigger(_hoomd.Trigger):
pass
class PeriodicTrigger(_hoomd.PeriodicTrigger, Trigger):
def __init__(self, period, phase=0):
_hoomd.PeriodicTrigger.__init__(self, period, phase)
|
<commit_before># Copyright (c) 2009-2019 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
from hoomd import _hoomd
class Trigger(_hoomd.Trigger):
pass
class PeriodicTrigger(_hoomd.PeriodicTrigger):
def __init__(self, period, phase=0):
_hoomd.PeriodicTrigger.__init__(self, period, phase)
<commit_msg>Make ``PeriodicTrigger`` inherent from ``Trigger``
Fixes bug in checking state and preprocessing ``Triggers`` for duck
typing.<commit_after>
|
# Copyright (c) 2009-2019 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
from hoomd import _hoomd
class Trigger(_hoomd.Trigger):
pass
class PeriodicTrigger(_hoomd.PeriodicTrigger, Trigger):
def __init__(self, period, phase=0):
_hoomd.PeriodicTrigger.__init__(self, period, phase)
|
# Copyright (c) 2009-2019 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
from hoomd import _hoomd
class Trigger(_hoomd.Trigger):
pass
class PeriodicTrigger(_hoomd.PeriodicTrigger):
def __init__(self, period, phase=0):
_hoomd.PeriodicTrigger.__init__(self, period, phase)
Make ``PeriodicTrigger`` inherent from ``Trigger``
Fixes bug in checking state and preprocessing ``Triggers`` for duck
typing.# Copyright (c) 2009-2019 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
from hoomd import _hoomd
class Trigger(_hoomd.Trigger):
pass
class PeriodicTrigger(_hoomd.PeriodicTrigger, Trigger):
def __init__(self, period, phase=0):
_hoomd.PeriodicTrigger.__init__(self, period, phase)
|
<commit_before># Copyright (c) 2009-2019 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
from hoomd import _hoomd
class Trigger(_hoomd.Trigger):
pass
class PeriodicTrigger(_hoomd.PeriodicTrigger):
def __init__(self, period, phase=0):
_hoomd.PeriodicTrigger.__init__(self, period, phase)
<commit_msg>Make ``PeriodicTrigger`` inherent from ``Trigger``
Fixes bug in checking state and preprocessing ``Triggers`` for duck
typing.<commit_after># Copyright (c) 2009-2019 The Regents of the University of Michigan
# This file is part of the HOOMD-blue project, released under the BSD 3-Clause
# License.
from hoomd import _hoomd
class Trigger(_hoomd.Trigger):
pass
class PeriodicTrigger(_hoomd.PeriodicTrigger, Trigger):
def __init__(self, period, phase=0):
_hoomd.PeriodicTrigger.__init__(self, period, phase)
|
990e33af851172ea3d79e591bde52af554d0eb50
|
common/util.py
|
common/util.py
|
#!/usr/bin/python
"""
common.py
"""
import sys
def log(msg, *args):
if args:
msg = msg % args
print >>sys.stderr, 'webpipe:', msg
|
"""
util.py
"""
import os
import sys
basename = os.path.basename(sys.argv[0])
prefix, _ = os.path.splitext(basename)
def log(msg, *args):
if args:
msg = msg % args
print >>sys.stderr, prefix + ': ' + msg
|
Use the program name as the prefix.
|
Use the program name as the prefix.
|
Python
|
bsd-3-clause
|
andychu/webpipe,andychu/webpipe,andychu/webpipe,andychu/webpipe,andychu/webpipe
|
#!/usr/bin/python
"""
common.py
"""
import sys
def log(msg, *args):
if args:
msg = msg % args
print >>sys.stderr, 'webpipe:', msg
Use the program name as the prefix.
|
"""
util.py
"""
import os
import sys
basename = os.path.basename(sys.argv[0])
prefix, _ = os.path.splitext(basename)
def log(msg, *args):
if args:
msg = msg % args
print >>sys.stderr, prefix + ': ' + msg
|
<commit_before>#!/usr/bin/python
"""
common.py
"""
import sys
def log(msg, *args):
if args:
msg = msg % args
print >>sys.stderr, 'webpipe:', msg
<commit_msg>Use the program name as the prefix.<commit_after>
|
"""
util.py
"""
import os
import sys
basename = os.path.basename(sys.argv[0])
prefix, _ = os.path.splitext(basename)
def log(msg, *args):
if args:
msg = msg % args
print >>sys.stderr, prefix + ': ' + msg
|
#!/usr/bin/python
"""
common.py
"""
import sys
def log(msg, *args):
if args:
msg = msg % args
print >>sys.stderr, 'webpipe:', msg
Use the program name as the prefix."""
util.py
"""
import os
import sys
basename = os.path.basename(sys.argv[0])
prefix, _ = os.path.splitext(basename)
def log(msg, *args):
if args:
msg = msg % args
print >>sys.stderr, prefix + ': ' + msg
|
<commit_before>#!/usr/bin/python
"""
common.py
"""
import sys
def log(msg, *args):
if args:
msg = msg % args
print >>sys.stderr, 'webpipe:', msg
<commit_msg>Use the program name as the prefix.<commit_after>"""
util.py
"""
import os
import sys
basename = os.path.basename(sys.argv[0])
prefix, _ = os.path.splitext(basename)
def log(msg, *args):
if args:
msg = msg % args
print >>sys.stderr, prefix + ': ' + msg
|
befa44a98797542fe2e50b82c0cfbed815cfc6d1
|
duralex/AddGitHubIssueVisitor.py
|
duralex/AddGitHubIssueVisitor.py
|
# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + ' (#' + str(self.current_issue) + ')'
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.number
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
|
# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + ' (#' + str(self.current_issue) + ')'
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
|
Set the githubIssue field to the actual GitHub URL fo the issue instead of the issue number.
|
Set the githubIssue field to the actual GitHub URL fo the issue instead of the issue number.
|
Python
|
mit
|
Legilibre/duralex
|
# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + ' (#' + str(self.current_issue) + ')'
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.number
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
Set the githubIssue field to the actual GitHub URL fo the issue instead of the issue number.
|
# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + ' (#' + str(self.current_issue) + ')'
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
|
<commit_before># -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + ' (#' + str(self.current_issue) + ')'
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.number
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
<commit_msg>Set the githubIssue field to the actual GitHub URL fo the issue instead of the issue number.<commit_after>
|
# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + ' (#' + str(self.current_issue) + ')'
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
|
# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + ' (#' + str(self.current_issue) + ')'
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.number
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
Set the githubIssue field to the actual GitHub URL fo the issue instead of the issue number.# -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + ' (#' + str(self.current_issue) + ')'
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
|
<commit_before># -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + ' (#' + str(self.current_issue) + ')'
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.number
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
<commit_msg>Set the githubIssue field to the actual GitHub URL fo the issue instead of the issue number.<commit_after># -*- coding: utf-8 -*-
from AbstractVisitor import AbstractVisitor
from duralex.alinea_parser import *
from github import Github
class AddGitHubIssueVisitor(AbstractVisitor):
def __init__(self, args):
self.github = Github(args.github_token)
self.repo = self.github.get_repo(args.github_repository)
self.issues = list(self.repo.get_issues())
self.current_issue = -1
super(AddGitHubIssueVisitor, self).__init__()
def visit_edit_node(self, node, post):
if post:
return
if 'commitMessage' not in node:
node['commitMessage'] = '(#' + str(self.current_issue) + ')'
else:
node['commitMessage'] = node['commitMessage'] + ' (#' + str(self.current_issue) + ')'
def visit_node(self, node):
if 'type' in node and node['type'] == 'article':
title = 'Article ' + str(node['order'])
body = node['content']
found = False
for issue in self.issues:
if issue.title == title:
found = True
node['githubIssue'] = issue.html_url
self.current_issue = issue.number
if issue.body != body:
issue.edit(title=title, body=body)
if not found:
issue = self.repo.create_issue(title=title, body=body)
self.current_issue = issue.number
super(AddGitHubIssueVisitor, self).visit_node(node)
|
5997e30e05d51996345e3154c5495683e3229410
|
app/taskqueue/celeryconfig.py
|
app/taskqueue/celeryconfig.py
|
# Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Celery configuration values."""
BROKER_URL = "redis://localhost"
BROKER_POOL_LIMIT = 20
BROKER_TRANSPORT_OPTIONS = {
"visibility_timeout": 10800,
"fanout_prefix": True,
"fanout_patterns": True
}
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_RESULT_SERIALIZER = "json"
CELERY_TASK_SERIALIZER = "json"
CELERY_TIMEZONE = "UTC"
CELERY_ENABLE_UTC = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
# Use a different DB than the redis default one.
CELERY_RESULT_BACKEND = "redis://localhost/1"
|
# Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Celery configuration values."""
BROKER_URL = "redis://localhost"
BROKER_POOL_LIMIT = 20
BROKER_TRANSPORT_OPTIONS = {
"visibility_timeout": 60*60*4,
"fanout_prefix": True,
"fanout_patterns": True
}
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_RESULT_SERIALIZER = "json"
CELERY_TASK_SERIALIZER = "json"
CELERY_TIMEZONE = "UTC"
CELERY_ENABLE_UTC = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
# Use a different DB than the redis default one.
CELERY_RESULT_BACKEND = "redis://localhost/1"
|
Increase ack on broker to 4 hours.
|
Increase ack on broker to 4 hours.
Change-Id: I4a1f0fc6d1c07014896ef6b34336396d4b30bfdd
|
Python
|
lgpl-2.1
|
kernelci/kernelci-backend,joyxu/kernelci-backend,joyxu/kernelci-backend,joyxu/kernelci-backend,kernelci/kernelci-backend
|
# Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Celery configuration values."""
BROKER_URL = "redis://localhost"
BROKER_POOL_LIMIT = 20
BROKER_TRANSPORT_OPTIONS = {
"visibility_timeout": 10800,
"fanout_prefix": True,
"fanout_patterns": True
}
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_RESULT_SERIALIZER = "json"
CELERY_TASK_SERIALIZER = "json"
CELERY_TIMEZONE = "UTC"
CELERY_ENABLE_UTC = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
# Use a different DB than the redis default one.
CELERY_RESULT_BACKEND = "redis://localhost/1"
Increase ack on broker to 4 hours.
Change-Id: I4a1f0fc6d1c07014896ef6b34336396d4b30bfdd
|
# Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Celery configuration values."""
BROKER_URL = "redis://localhost"
BROKER_POOL_LIMIT = 20
BROKER_TRANSPORT_OPTIONS = {
"visibility_timeout": 60*60*4,
"fanout_prefix": True,
"fanout_patterns": True
}
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_RESULT_SERIALIZER = "json"
CELERY_TASK_SERIALIZER = "json"
CELERY_TIMEZONE = "UTC"
CELERY_ENABLE_UTC = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
# Use a different DB than the redis default one.
CELERY_RESULT_BACKEND = "redis://localhost/1"
|
<commit_before># Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Celery configuration values."""
BROKER_URL = "redis://localhost"
BROKER_POOL_LIMIT = 20
BROKER_TRANSPORT_OPTIONS = {
"visibility_timeout": 10800,
"fanout_prefix": True,
"fanout_patterns": True
}
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_RESULT_SERIALIZER = "json"
CELERY_TASK_SERIALIZER = "json"
CELERY_TIMEZONE = "UTC"
CELERY_ENABLE_UTC = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
# Use a different DB than the redis default one.
CELERY_RESULT_BACKEND = "redis://localhost/1"
<commit_msg>Increase ack on broker to 4 hours.
Change-Id: I4a1f0fc6d1c07014896ef6b34336396d4b30bfdd<commit_after>
|
# Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Celery configuration values."""
BROKER_URL = "redis://localhost"
BROKER_POOL_LIMIT = 20
BROKER_TRANSPORT_OPTIONS = {
"visibility_timeout": 60*60*4,
"fanout_prefix": True,
"fanout_patterns": True
}
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_RESULT_SERIALIZER = "json"
CELERY_TASK_SERIALIZER = "json"
CELERY_TIMEZONE = "UTC"
CELERY_ENABLE_UTC = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
# Use a different DB than the redis default one.
CELERY_RESULT_BACKEND = "redis://localhost/1"
|
# Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Celery configuration values."""
BROKER_URL = "redis://localhost"
BROKER_POOL_LIMIT = 20
BROKER_TRANSPORT_OPTIONS = {
"visibility_timeout": 10800,
"fanout_prefix": True,
"fanout_patterns": True
}
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_RESULT_SERIALIZER = "json"
CELERY_TASK_SERIALIZER = "json"
CELERY_TIMEZONE = "UTC"
CELERY_ENABLE_UTC = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
# Use a different DB than the redis default one.
CELERY_RESULT_BACKEND = "redis://localhost/1"
Increase ack on broker to 4 hours.
Change-Id: I4a1f0fc6d1c07014896ef6b34336396d4b30bfdd# Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Celery configuration values."""
BROKER_URL = "redis://localhost"
BROKER_POOL_LIMIT = 20
BROKER_TRANSPORT_OPTIONS = {
"visibility_timeout": 60*60*4,
"fanout_prefix": True,
"fanout_patterns": True
}
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_RESULT_SERIALIZER = "json"
CELERY_TASK_SERIALIZER = "json"
CELERY_TIMEZONE = "UTC"
CELERY_ENABLE_UTC = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
# Use a different DB than the redis default one.
CELERY_RESULT_BACKEND = "redis://localhost/1"
|
<commit_before># Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Celery configuration values."""
BROKER_URL = "redis://localhost"
BROKER_POOL_LIMIT = 20
BROKER_TRANSPORT_OPTIONS = {
"visibility_timeout": 10800,
"fanout_prefix": True,
"fanout_patterns": True
}
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_RESULT_SERIALIZER = "json"
CELERY_TASK_SERIALIZER = "json"
CELERY_TIMEZONE = "UTC"
CELERY_ENABLE_UTC = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
# Use a different DB than the redis default one.
CELERY_RESULT_BACKEND = "redis://localhost/1"
<commit_msg>Increase ack on broker to 4 hours.
Change-Id: I4a1f0fc6d1c07014896ef6b34336396d4b30bfdd<commit_after># Copyright (C) 2014 Linaro Ltd.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Celery configuration values."""
BROKER_URL = "redis://localhost"
BROKER_POOL_LIMIT = 20
BROKER_TRANSPORT_OPTIONS = {
"visibility_timeout": 60*60*4,
"fanout_prefix": True,
"fanout_patterns": True
}
CELERY_ACCEPT_CONTENT = ["json"]
CELERY_RESULT_SERIALIZER = "json"
CELERY_TASK_SERIALIZER = "json"
CELERY_TIMEZONE = "UTC"
CELERY_ENABLE_UTC = True
CELERY_IGNORE_RESULT = True
CELERY_DISABLE_RATE_LIMITS = True
# Use a different DB than the redis default one.
CELERY_RESULT_BACKEND = "redis://localhost/1"
|
af4c24dc7ac5b05ea509b5b6d95d22395aa2d409
|
dist/gae/standalone_main.py
|
dist/gae/standalone_main.py
|
import werkzeug.serving
import standalone_app
werkzeug.serving.run_simple('localhost', 8080, standalone_app.app, use_reloader=True)
|
import werkzeug.serving
import standalone_app
werkzeug.serving.run_simple('0.0.0.0', 8080, standalone_app.app, use_reloader=True)
|
Make the dev server advertise on your local ip as well as localhost (for easier testing on mobile)
|
Make the dev server advertise on your local ip as well as localhost (for easier testing on mobile)
|
Python
|
bsd-3-clause
|
abortz/saycbridge,eseidel/saycbridge,abortz/saycbridge,eseidel/saycbridge,abortz/saycbridge,abortz/saycbridge,eseidel/saycbridge,abortz/saycbridge
|
import werkzeug.serving
import standalone_app
werkzeug.serving.run_simple('localhost', 8080, standalone_app.app, use_reloader=True)
Make the dev server advertise on your local ip as well as localhost (for easier testing on mobile)
|
import werkzeug.serving
import standalone_app
werkzeug.serving.run_simple('0.0.0.0', 8080, standalone_app.app, use_reloader=True)
|
<commit_before>import werkzeug.serving
import standalone_app
werkzeug.serving.run_simple('localhost', 8080, standalone_app.app, use_reloader=True)
<commit_msg>Make the dev server advertise on your local ip as well as localhost (for easier testing on mobile)<commit_after>
|
import werkzeug.serving
import standalone_app
werkzeug.serving.run_simple('0.0.0.0', 8080, standalone_app.app, use_reloader=True)
|
import werkzeug.serving
import standalone_app
werkzeug.serving.run_simple('localhost', 8080, standalone_app.app, use_reloader=True)
Make the dev server advertise on your local ip as well as localhost (for easier testing on mobile)import werkzeug.serving
import standalone_app
werkzeug.serving.run_simple('0.0.0.0', 8080, standalone_app.app, use_reloader=True)
|
<commit_before>import werkzeug.serving
import standalone_app
werkzeug.serving.run_simple('localhost', 8080, standalone_app.app, use_reloader=True)
<commit_msg>Make the dev server advertise on your local ip as well as localhost (for easier testing on mobile)<commit_after>import werkzeug.serving
import standalone_app
werkzeug.serving.run_simple('0.0.0.0', 8080, standalone_app.app, use_reloader=True)
|
0e376d987dd8d513354a840da6bee6d5a2752f89
|
django_countries/widgets.py
|
django_countries/widgets.py
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]*\.gif/, (this.value.toLowerCase() || '__') + '.gif');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]{2}(\.[a-zA-Z]*)$/, (this.value.toLowerCase() || '__') + '$1');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
Make the regular expression not require a gif image.
|
Make the regular expression not require a gif image.
|
Python
|
mit
|
SmileyChris/django-countries,schinckel/django-countries,rahimnathwani/django-countries,jrfernandes/django-countries,velfimov/django-countries,fladi/django-countries,pimlie/django-countries
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]*\.gif/, (this.value.toLowerCase() || '__') + '.gif');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
Make the regular expression not require a gif image.
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]{2}(\.[a-zA-Z]*)$/, (this.value.toLowerCase() || '__') + '$1');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
<commit_before>from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]*\.gif/, (this.value.toLowerCase() || '__') + '.gif');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
<commit_msg>Make the regular expression not require a gif image.<commit_after>
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]{2}(\.[a-zA-Z]*)$/, (this.value.toLowerCase() || '__') + '$1');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]*\.gif/, (this.value.toLowerCase() || '__') + '.gif');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
Make the regular expression not require a gif image.from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]{2}(\.[a-zA-Z]*)$/, (this.value.toLowerCase() || '__') + '$1');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
<commit_before>from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]*\.gif/, (this.value.toLowerCase() || '__') + '.gif');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
<commit_msg>Make the regular expression not require a gif image.<commit_after>from django.conf import settings
from django.forms import widgets
from django.utils.safestring import mark_safe
COUNTRY_CHANGE_HANDLER = """
this.nextSibling.src = this.nextSibling.src.replace(/[a-z_]{2}(\.[a-zA-Z]*)$/, (this.value.toLowerCase() || '__') + '$1');
"""
FLAG_IMAGE = """<img style="margin: 6px 4px; position: absolute;" src="%s" id="%%s-flag">"""
class CountrySelectWidget(widgets.Select):
def render(self, name, value, attrs=None):
attrs = attrs or {}
attrs['onchange'] = COUNTRY_CHANGE_HANDLER
data = super(CountrySelectWidget, self).render(name, value, attrs)
data += mark_safe((FLAG_IMAGE % settings.COUNTRIES_FLAG_URL) % (
settings.STATIC_URL,
unicode(value).lower() or '__',
attrs['id']
))
return data
|
6664f77b8193343fe840b2542a84cc2bf585108a
|
check_version.py
|
check_version.py
|
import re
import sys
changes_file = open('CHANGES.txt', 'r')
changes_first_line = changes_file.readline()
changes_version = re.match(r'v(\d\.\d\.\d).*', changes_first_line).group(1)
setup_file = open('setup.py', 'r')
setup_content = setup_file.read()
setup_version = re.search(r'version=\'(\d\.\d\.\d)\'', setup_content).group(1)
if changes_version != setup_version:
print('Version numbers differ')
print('CHANGES.txt states: v' + changes_version)
print('setup.py states: v' + setup_version)
exit(1)
|
import re
import sys
changes_file = open('CHANGES.txt', 'r')
changes_first_line = changes_file.readline()
changes_version = re.match(r'v(\d\.\d\.\d).*',
changes_first_line).group(1)
setup_file = open('setup.py', 'r')
setup_content = setup_file.read()
setup_version = re.search(r'version=\'(\d\.\d\.\d)\'',
setup_content).group(1)
sphinx_file = open('sphinx/conf.py', 'r')
sphinx_content = sphinx_file.read()
sphinx_version = re.search(r'version = \'(\d\.\d)\'',
sphinx_content).group(1)
sphinx_release = re.search(r'release = \'(\d\.\d\.\d)\'',
sphinx_content).group(1)
if changes_version != setup_version or changes_version != sphinx_release:
print('Version numbers differ:')
print('CHANGES.txt states: v' + changes_version)
print('setup.py states: v' + setup_version)
print('sphinx/conf.py states: v' + sphinx_release)
exit(1)
if not sphinx_release.startswith(sphinx_version):
print('Sphinx version configuration differs:')
print('Sphinx version: ' + sphinx_version)
print('Sphinx release: ' + sphinx_release)
exit(1)
|
Update release version checking to include documentation
|
Update release version checking to include documentation
|
Python
|
unlicense
|
mmurdoch/Vengeance,mmurdoch/Vengeance
|
import re
import sys
changes_file = open('CHANGES.txt', 'r')
changes_first_line = changes_file.readline()
changes_version = re.match(r'v(\d\.\d\.\d).*', changes_first_line).group(1)
setup_file = open('setup.py', 'r')
setup_content = setup_file.read()
setup_version = re.search(r'version=\'(\d\.\d\.\d)\'', setup_content).group(1)
if changes_version != setup_version:
print('Version numbers differ')
print('CHANGES.txt states: v' + changes_version)
print('setup.py states: v' + setup_version)
exit(1)Update release version checking to include documentation
|
import re
import sys
changes_file = open('CHANGES.txt', 'r')
changes_first_line = changes_file.readline()
changes_version = re.match(r'v(\d\.\d\.\d).*',
changes_first_line).group(1)
setup_file = open('setup.py', 'r')
setup_content = setup_file.read()
setup_version = re.search(r'version=\'(\d\.\d\.\d)\'',
setup_content).group(1)
sphinx_file = open('sphinx/conf.py', 'r')
sphinx_content = sphinx_file.read()
sphinx_version = re.search(r'version = \'(\d\.\d)\'',
sphinx_content).group(1)
sphinx_release = re.search(r'release = \'(\d\.\d\.\d)\'',
sphinx_content).group(1)
if changes_version != setup_version or changes_version != sphinx_release:
print('Version numbers differ:')
print('CHANGES.txt states: v' + changes_version)
print('setup.py states: v' + setup_version)
print('sphinx/conf.py states: v' + sphinx_release)
exit(1)
if not sphinx_release.startswith(sphinx_version):
print('Sphinx version configuration differs:')
print('Sphinx version: ' + sphinx_version)
print('Sphinx release: ' + sphinx_release)
exit(1)
|
<commit_before>import re
import sys
changes_file = open('CHANGES.txt', 'r')
changes_first_line = changes_file.readline()
changes_version = re.match(r'v(\d\.\d\.\d).*', changes_first_line).group(1)
setup_file = open('setup.py', 'r')
setup_content = setup_file.read()
setup_version = re.search(r'version=\'(\d\.\d\.\d)\'', setup_content).group(1)
if changes_version != setup_version:
print('Version numbers differ')
print('CHANGES.txt states: v' + changes_version)
print('setup.py states: v' + setup_version)
exit(1)<commit_msg>Update release version checking to include documentation<commit_after>
|
import re
import sys
changes_file = open('CHANGES.txt', 'r')
changes_first_line = changes_file.readline()
changes_version = re.match(r'v(\d\.\d\.\d).*',
changes_first_line).group(1)
setup_file = open('setup.py', 'r')
setup_content = setup_file.read()
setup_version = re.search(r'version=\'(\d\.\d\.\d)\'',
setup_content).group(1)
sphinx_file = open('sphinx/conf.py', 'r')
sphinx_content = sphinx_file.read()
sphinx_version = re.search(r'version = \'(\d\.\d)\'',
sphinx_content).group(1)
sphinx_release = re.search(r'release = \'(\d\.\d\.\d)\'',
sphinx_content).group(1)
if changes_version != setup_version or changes_version != sphinx_release:
print('Version numbers differ:')
print('CHANGES.txt states: v' + changes_version)
print('setup.py states: v' + setup_version)
print('sphinx/conf.py states: v' + sphinx_release)
exit(1)
if not sphinx_release.startswith(sphinx_version):
print('Sphinx version configuration differs:')
print('Sphinx version: ' + sphinx_version)
print('Sphinx release: ' + sphinx_release)
exit(1)
|
import re
import sys
changes_file = open('CHANGES.txt', 'r')
changes_first_line = changes_file.readline()
changes_version = re.match(r'v(\d\.\d\.\d).*', changes_first_line).group(1)
setup_file = open('setup.py', 'r')
setup_content = setup_file.read()
setup_version = re.search(r'version=\'(\d\.\d\.\d)\'', setup_content).group(1)
if changes_version != setup_version:
print('Version numbers differ')
print('CHANGES.txt states: v' + changes_version)
print('setup.py states: v' + setup_version)
exit(1)Update release version checking to include documentationimport re
import sys
changes_file = open('CHANGES.txt', 'r')
changes_first_line = changes_file.readline()
changes_version = re.match(r'v(\d\.\d\.\d).*',
changes_first_line).group(1)
setup_file = open('setup.py', 'r')
setup_content = setup_file.read()
setup_version = re.search(r'version=\'(\d\.\d\.\d)\'',
setup_content).group(1)
sphinx_file = open('sphinx/conf.py', 'r')
sphinx_content = sphinx_file.read()
sphinx_version = re.search(r'version = \'(\d\.\d)\'',
sphinx_content).group(1)
sphinx_release = re.search(r'release = \'(\d\.\d\.\d)\'',
sphinx_content).group(1)
if changes_version != setup_version or changes_version != sphinx_release:
print('Version numbers differ:')
print('CHANGES.txt states: v' + changes_version)
print('setup.py states: v' + setup_version)
print('sphinx/conf.py states: v' + sphinx_release)
exit(1)
if not sphinx_release.startswith(sphinx_version):
print('Sphinx version configuration differs:')
print('Sphinx version: ' + sphinx_version)
print('Sphinx release: ' + sphinx_release)
exit(1)
|
<commit_before>import re
import sys
changes_file = open('CHANGES.txt', 'r')
changes_first_line = changes_file.readline()
changes_version = re.match(r'v(\d\.\d\.\d).*', changes_first_line).group(1)
setup_file = open('setup.py', 'r')
setup_content = setup_file.read()
setup_version = re.search(r'version=\'(\d\.\d\.\d)\'', setup_content).group(1)
if changes_version != setup_version:
print('Version numbers differ')
print('CHANGES.txt states: v' + changes_version)
print('setup.py states: v' + setup_version)
exit(1)<commit_msg>Update release version checking to include documentation<commit_after>import re
import sys
changes_file = open('CHANGES.txt', 'r')
changes_first_line = changes_file.readline()
changes_version = re.match(r'v(\d\.\d\.\d).*',
changes_first_line).group(1)
setup_file = open('setup.py', 'r')
setup_content = setup_file.read()
setup_version = re.search(r'version=\'(\d\.\d\.\d)\'',
setup_content).group(1)
sphinx_file = open('sphinx/conf.py', 'r')
sphinx_content = sphinx_file.read()
sphinx_version = re.search(r'version = \'(\d\.\d)\'',
sphinx_content).group(1)
sphinx_release = re.search(r'release = \'(\d\.\d\.\d)\'',
sphinx_content).group(1)
if changes_version != setup_version or changes_version != sphinx_release:
print('Version numbers differ:')
print('CHANGES.txt states: v' + changes_version)
print('setup.py states: v' + setup_version)
print('sphinx/conf.py states: v' + sphinx_release)
exit(1)
if not sphinx_release.startswith(sphinx_version):
print('Sphinx version configuration differs:')
print('Sphinx version: ' + sphinx_version)
print('Sphinx release: ' + sphinx_release)
exit(1)
|
b0bfbe3bcab7f55dd2ed742d945d0f950bca0a2b
|
ckeditor/urls.py
|
ckeditor/urls.py
|
from django.conf.urls.defaults import patterns, url
from django.contrib import admin
from ckeditor import views
urlpatterns = patterns(
'',
url(r'^upload/', admin.site.admin_view(views.upload), name='ckeditor_upload'),
url(r'^browse/', admin.site.admin_view(views.browse), name='ckeditor_browse'),
)
|
try:
from django.conf.urls import patterns, url
except ImportError: # django < 1.4
from django.conf.urls.defaults import patterns, url
from django.contrib import admin
from ckeditor import views
urlpatterns = patterns(
'',
url(r'^upload/', admin.site.admin_view(views.upload), name='ckeditor_upload'),
url(r'^browse/', admin.site.admin_view(views.browse), name='ckeditor_browse'),
)
|
Fix the file url for Django 1.6
|
Fix the file url for Django 1.6
|
Python
|
bsd-3-clause
|
gian88/django-ckeditor-amazon-s3,gian88/django-ckeditor-amazon-s3,gian88/django-ckeditor-amazon-s3,gian88/django-ckeditor-amazon-s3,gian88/django-ckeditor-amazon-s3,gian88/django-ckeditor-amazon-s3
|
from django.conf.urls.defaults import patterns, url
from django.contrib import admin
from ckeditor import views
urlpatterns = patterns(
'',
url(r'^upload/', admin.site.admin_view(views.upload), name='ckeditor_upload'),
url(r'^browse/', admin.site.admin_view(views.browse), name='ckeditor_browse'),
)
Fix the file url for Django 1.6
|
try:
from django.conf.urls import patterns, url
except ImportError: # django < 1.4
from django.conf.urls.defaults import patterns, url
from django.contrib import admin
from ckeditor import views
urlpatterns = patterns(
'',
url(r'^upload/', admin.site.admin_view(views.upload), name='ckeditor_upload'),
url(r'^browse/', admin.site.admin_view(views.browse), name='ckeditor_browse'),
)
|
<commit_before>from django.conf.urls.defaults import patterns, url
from django.contrib import admin
from ckeditor import views
urlpatterns = patterns(
'',
url(r'^upload/', admin.site.admin_view(views.upload), name='ckeditor_upload'),
url(r'^browse/', admin.site.admin_view(views.browse), name='ckeditor_browse'),
)
<commit_msg>Fix the file url for Django 1.6<commit_after>
|
try:
from django.conf.urls import patterns, url
except ImportError: # django < 1.4
from django.conf.urls.defaults import patterns, url
from django.contrib import admin
from ckeditor import views
urlpatterns = patterns(
'',
url(r'^upload/', admin.site.admin_view(views.upload), name='ckeditor_upload'),
url(r'^browse/', admin.site.admin_view(views.browse), name='ckeditor_browse'),
)
|
from django.conf.urls.defaults import patterns, url
from django.contrib import admin
from ckeditor import views
urlpatterns = patterns(
'',
url(r'^upload/', admin.site.admin_view(views.upload), name='ckeditor_upload'),
url(r'^browse/', admin.site.admin_view(views.browse), name='ckeditor_browse'),
)
Fix the file url for Django 1.6try:
from django.conf.urls import patterns, url
except ImportError: # django < 1.4
from django.conf.urls.defaults import patterns, url
from django.contrib import admin
from ckeditor import views
urlpatterns = patterns(
'',
url(r'^upload/', admin.site.admin_view(views.upload), name='ckeditor_upload'),
url(r'^browse/', admin.site.admin_view(views.browse), name='ckeditor_browse'),
)
|
<commit_before>from django.conf.urls.defaults import patterns, url
from django.contrib import admin
from ckeditor import views
urlpatterns = patterns(
'',
url(r'^upload/', admin.site.admin_view(views.upload), name='ckeditor_upload'),
url(r'^browse/', admin.site.admin_view(views.browse), name='ckeditor_browse'),
)
<commit_msg>Fix the file url for Django 1.6<commit_after>try:
from django.conf.urls import patterns, url
except ImportError: # django < 1.4
from django.conf.urls.defaults import patterns, url
from django.contrib import admin
from ckeditor import views
urlpatterns = patterns(
'',
url(r'^upload/', admin.site.admin_view(views.upload), name='ckeditor_upload'),
url(r'^browse/', admin.site.admin_view(views.browse), name='ckeditor_browse'),
)
|
2758c1086e06a77f9676d678a3d41a53a352ec01
|
testfixtures/seating.py
|
testfixtures/seating.py
|
# -*- coding: utf-8 -*-
"""
testfixtures.seating
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.seating.models.seat_group import SeatGroup
def create_seat_group(party_id, seat_category, title, *, seat_quantity=4):
return SeatGroup(party_id, seat_category, seat_quantity, title)
|
# -*- coding: utf-8 -*-
"""
testfixtures.seating
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.seating.models.category import Category
from byceps.services.seating.models.seat_group import SeatGroup
def create_seat_category(party_id, title):
return SeatCategory(party_id, title)
def create_seat_group(party_id, seat_category, title, *, seat_quantity=4):
return SeatGroup(party_id, seat_category, seat_quantity, title)
|
Add function to create a seat category test fixture
|
Add function to create a seat category test fixture
|
Python
|
bsd-3-clause
|
m-ober/byceps,homeworkprod/byceps,homeworkprod/byceps,homeworkprod/byceps,m-ober/byceps,m-ober/byceps
|
# -*- coding: utf-8 -*-
"""
testfixtures.seating
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.seating.models.seat_group import SeatGroup
def create_seat_group(party_id, seat_category, title, *, seat_quantity=4):
return SeatGroup(party_id, seat_category, seat_quantity, title)
Add function to create a seat category test fixture
|
# -*- coding: utf-8 -*-
"""
testfixtures.seating
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.seating.models.category import Category
from byceps.services.seating.models.seat_group import SeatGroup
def create_seat_category(party_id, title):
return SeatCategory(party_id, title)
def create_seat_group(party_id, seat_category, title, *, seat_quantity=4):
return SeatGroup(party_id, seat_category, seat_quantity, title)
|
<commit_before># -*- coding: utf-8 -*-
"""
testfixtures.seating
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.seating.models.seat_group import SeatGroup
def create_seat_group(party_id, seat_category, title, *, seat_quantity=4):
return SeatGroup(party_id, seat_category, seat_quantity, title)
<commit_msg>Add function to create a seat category test fixture<commit_after>
|
# -*- coding: utf-8 -*-
"""
testfixtures.seating
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.seating.models.category import Category
from byceps.services.seating.models.seat_group import SeatGroup
def create_seat_category(party_id, title):
return SeatCategory(party_id, title)
def create_seat_group(party_id, seat_category, title, *, seat_quantity=4):
return SeatGroup(party_id, seat_category, seat_quantity, title)
|
# -*- coding: utf-8 -*-
"""
testfixtures.seating
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.seating.models.seat_group import SeatGroup
def create_seat_group(party_id, seat_category, title, *, seat_quantity=4):
return SeatGroup(party_id, seat_category, seat_quantity, title)
Add function to create a seat category test fixture# -*- coding: utf-8 -*-
"""
testfixtures.seating
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.seating.models.category import Category
from byceps.services.seating.models.seat_group import SeatGroup
def create_seat_category(party_id, title):
return SeatCategory(party_id, title)
def create_seat_group(party_id, seat_category, title, *, seat_quantity=4):
return SeatGroup(party_id, seat_category, seat_quantity, title)
|
<commit_before># -*- coding: utf-8 -*-
"""
testfixtures.seating
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.seating.models.seat_group import SeatGroup
def create_seat_group(party_id, seat_category, title, *, seat_quantity=4):
return SeatGroup(party_id, seat_category, seat_quantity, title)
<commit_msg>Add function to create a seat category test fixture<commit_after># -*- coding: utf-8 -*-
"""
testfixtures.seating
~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2016 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from byceps.services.seating.models.category import Category
from byceps.services.seating.models.seat_group import SeatGroup
def create_seat_category(party_id, title):
return SeatCategory(party_id, title)
def create_seat_group(party_id, seat_category, title, *, seat_quantity=4):
return SeatGroup(party_id, seat_category, seat_quantity, title)
|
b3c2a47b049f97de0367f012fb35d247f2f1510b
|
oscar/apps/offer/managers.py
|
oscar/apps/offer/managers.py
|
from django.utils.timezone import now
from django.db import models
class ActiveOfferManager(models.Manager):
"""
For searching/creating offers within their date range
"""
def get_query_set(self):
cutoff = now()
return super(ActiveOfferManager, self).get_query_set().filter(
models.Q(end_date__gte=today) | models.Q(end_date=None),
start_date__lte=today)
|
from django.utils.timezone import now
from django.db import models
class ActiveOfferManager(models.Manager):
"""
For searching/creating offers within their date range
"""
def get_query_set(self):
cutoff = now()
return super(ActiveOfferManager, self).get_query_set().filter(
models.Q(end_datetime__gte=cutoff) | models.Q(end_datetime=None),
start_datetime__lte=cutoff)
|
Fix bug in offer manager with new datetimes
|
Fix bug in offer manager with new datetimes
|
Python
|
bsd-3-clause
|
rocopartners/django-oscar,WadeYuChen/django-oscar,sonofatailor/django-oscar,josesanch/django-oscar,mexeniz/django-oscar,faratro/django-oscar,michaelkuty/django-oscar,anentropic/django-oscar,jinnykoo/wuyisj,MatthewWilkes/django-oscar,sasha0/django-oscar,jlmadurga/django-oscar,Idematica/django-oscar,okfish/django-oscar,bschuon/django-oscar,django-oscar/django-oscar,taedori81/django-oscar,bnprk/django-oscar,ka7eh/django-oscar,pasqualguerrero/django-oscar,Bogh/django-oscar,DrOctogon/unwash_ecom,MatthewWilkes/django-oscar,QLGu/django-oscar,jinnykoo/christmas,saadatqadri/django-oscar,kapari/django-oscar,taedori81/django-oscar,ka7eh/django-oscar,spartonia/django-oscar,ka7eh/django-oscar,nickpack/django-oscar,john-parton/django-oscar,monikasulik/django-oscar,rocopartners/django-oscar,jinnykoo/wuyisj,kapt/django-oscar,pasqualguerrero/django-oscar,rocopartners/django-oscar,amirrpp/django-oscar,DrOctogon/unwash_ecom,john-parton/django-oscar,WillisXChen/django-oscar,dongguangming/django-oscar,pdonadeo/django-oscar,nfletton/django-oscar,manevant/django-oscar,nickpack/django-oscar,itbabu/django-oscar,nickpack/django-oscar,machtfit/django-oscar,nfletton/django-oscar,thechampanurag/django-oscar,adamend/django-oscar,thechampanurag/django-oscar,Jannes123/django-oscar,saadatqadri/django-oscar,WadeYuChen/django-oscar,anentropic/django-oscar,makielab/django-oscar,vovanbo/django-oscar,ademuk/django-oscar,vovanbo/django-oscar,vovanbo/django-oscar,sasha0/django-oscar,QLGu/django-oscar,makielab/django-oscar,solarissmoke/django-oscar,amirrpp/django-oscar,django-oscar/django-oscar,Jannes123/django-oscar,WadeYuChen/django-oscar,josesanch/django-oscar,mexeniz/django-oscar,josesanch/django-oscar,ademuk/django-oscar,manevant/django-oscar,itbabu/django-oscar,Idematica/django-oscar,mexeniz/django-oscar,WillisXChen/django-oscar,manevant/django-oscar,rocopartners/django-oscar,jmt4/django-oscar,kapt/django-oscar,pdonadeo/django-oscar,spartonia/django-oscar,Bogh/django-oscar,jmt4/django-oscar,taedori81/django-oscar,marcoantoniooliveira/labweb,ahmetdaglarbas/e-commerce,ahmetdaglarbas/e-commerce,MatthewWilkes/django-oscar,spartonia/django-oscar,adamend/django-oscar,makielab/django-oscar,solarissmoke/django-oscar,kapt/django-oscar,john-parton/django-oscar,bschuon/django-oscar,Bogh/django-oscar,django-oscar/django-oscar,jlmadurga/django-oscar,sasha0/django-oscar,pdonadeo/django-oscar,QLGu/django-oscar,thechampanurag/django-oscar,bnprk/django-oscar,jlmadurga/django-oscar,WillisXChen/django-oscar,bschuon/django-oscar,marcoantoniooliveira/labweb,eddiep1101/django-oscar,machtfit/django-oscar,lijoantony/django-oscar,ka7eh/django-oscar,jinnykoo/wuyisj.com,okfish/django-oscar,manevant/django-oscar,WillisXChen/django-oscar,marcoantoniooliveira/labweb,nfletton/django-oscar,elliotthill/django-oscar,spartonia/django-oscar,john-parton/django-oscar,WillisXChen/django-oscar,taedori81/django-oscar,binarydud/django-oscar,binarydud/django-oscar,jinnykoo/christmas,sonofatailor/django-oscar,nfletton/django-oscar,elliotthill/django-oscar,elliotthill/django-oscar,solarissmoke/django-oscar,Jannes123/django-oscar,bnprk/django-oscar,adamend/django-oscar,michaelkuty/django-oscar,okfish/django-oscar,saadatqadri/django-oscar,ahmetdaglarbas/e-commerce,itbabu/django-oscar,jinnykoo/wuyisj,jinnykoo/wuyisj.com,binarydud/django-oscar,marcoantoniooliveira/labweb,jinnykoo/wuyisj,pasqualguerrero/django-oscar,binarydud/django-oscar,Jannes123/django-oscar,jinnykoo/wuyisj.com,bnprk/django-oscar,eddiep1101/django-oscar,kapari/django-oscar,adamend/django-oscar,eddiep1101/django-oscar,dongguangming/django-oscar,Idematica/django-oscar,sasha0/django-oscar,pdonadeo/django-oscar,eddiep1101/django-oscar,thechampanurag/django-oscar,nickpack/django-oscar,lijoantony/django-oscar,monikasulik/django-oscar,bschuon/django-oscar,machtfit/django-oscar,QLGu/django-oscar,amirrpp/django-oscar,jmt4/django-oscar,anentropic/django-oscar,monikasulik/django-oscar,faratro/django-oscar,jinnykoo/wuyisj.com,anentropic/django-oscar,DrOctogon/unwash_ecom,MatthewWilkes/django-oscar,saadatqadri/django-oscar,faratro/django-oscar,jmt4/django-oscar,mexeniz/django-oscar,michaelkuty/django-oscar,michaelkuty/django-oscar,itbabu/django-oscar,jinnykoo/christmas,kapari/django-oscar,monikasulik/django-oscar,ademuk/django-oscar,makielab/django-oscar,dongguangming/django-oscar,WadeYuChen/django-oscar,solarissmoke/django-oscar,faratro/django-oscar,sonofatailor/django-oscar,ahmetdaglarbas/e-commerce,vovanbo/django-oscar,kapari/django-oscar,dongguangming/django-oscar,sonofatailor/django-oscar,django-oscar/django-oscar,okfish/django-oscar,WillisXChen/django-oscar,amirrpp/django-oscar,lijoantony/django-oscar,pasqualguerrero/django-oscar,ademuk/django-oscar,Bogh/django-oscar,jlmadurga/django-oscar,lijoantony/django-oscar
|
from django.utils.timezone import now
from django.db import models
class ActiveOfferManager(models.Manager):
"""
For searching/creating offers within their date range
"""
def get_query_set(self):
cutoff = now()
return super(ActiveOfferManager, self).get_query_set().filter(
models.Q(end_date__gte=today) | models.Q(end_date=None),
start_date__lte=today)
Fix bug in offer manager with new datetimes
|
from django.utils.timezone import now
from django.db import models
class ActiveOfferManager(models.Manager):
"""
For searching/creating offers within their date range
"""
def get_query_set(self):
cutoff = now()
return super(ActiveOfferManager, self).get_query_set().filter(
models.Q(end_datetime__gte=cutoff) | models.Q(end_datetime=None),
start_datetime__lte=cutoff)
|
<commit_before>from django.utils.timezone import now
from django.db import models
class ActiveOfferManager(models.Manager):
"""
For searching/creating offers within their date range
"""
def get_query_set(self):
cutoff = now()
return super(ActiveOfferManager, self).get_query_set().filter(
models.Q(end_date__gte=today) | models.Q(end_date=None),
start_date__lte=today)
<commit_msg>Fix bug in offer manager with new datetimes<commit_after>
|
from django.utils.timezone import now
from django.db import models
class ActiveOfferManager(models.Manager):
"""
For searching/creating offers within their date range
"""
def get_query_set(self):
cutoff = now()
return super(ActiveOfferManager, self).get_query_set().filter(
models.Q(end_datetime__gte=cutoff) | models.Q(end_datetime=None),
start_datetime__lte=cutoff)
|
from django.utils.timezone import now
from django.db import models
class ActiveOfferManager(models.Manager):
"""
For searching/creating offers within their date range
"""
def get_query_set(self):
cutoff = now()
return super(ActiveOfferManager, self).get_query_set().filter(
models.Q(end_date__gte=today) | models.Q(end_date=None),
start_date__lte=today)
Fix bug in offer manager with new datetimesfrom django.utils.timezone import now
from django.db import models
class ActiveOfferManager(models.Manager):
"""
For searching/creating offers within their date range
"""
def get_query_set(self):
cutoff = now()
return super(ActiveOfferManager, self).get_query_set().filter(
models.Q(end_datetime__gte=cutoff) | models.Q(end_datetime=None),
start_datetime__lte=cutoff)
|
<commit_before>from django.utils.timezone import now
from django.db import models
class ActiveOfferManager(models.Manager):
"""
For searching/creating offers within their date range
"""
def get_query_set(self):
cutoff = now()
return super(ActiveOfferManager, self).get_query_set().filter(
models.Q(end_date__gte=today) | models.Q(end_date=None),
start_date__lte=today)
<commit_msg>Fix bug in offer manager with new datetimes<commit_after>from django.utils.timezone import now
from django.db import models
class ActiveOfferManager(models.Manager):
"""
For searching/creating offers within their date range
"""
def get_query_set(self):
cutoff = now()
return super(ActiveOfferManager, self).get_query_set().filter(
models.Q(end_datetime__gte=cutoff) | models.Q(end_datetime=None),
start_datetime__lte=cutoff)
|
fded6c2f393efb4f8e10afaf450664aa63d87a27
|
imbox/query.py
|
imbox/query.py
|
import datetime
# TODO - Validate query arguments
IMAP_MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
def format_date(date):
return "%s-%s-%s" % (date.day, IMAP_MONTHS[date.month - 1], date.year)
def build_search_query(**kwargs):
# Parse keyword arguments
unread = kwargs.get('unread', False)
sent_from = kwargs.get('sent_from', False)
sent_to = kwargs.get('sent_to', False)
date__gt = kwargs.get('date__gt', False)
if type(date__gt) is datetime.date:
date__gt = format_date(date__gt)
date__lt = kwargs.get('date__lt', False)
if type(date__lt) is datetime.date:
date__lt = format_date(date__lt)
query = []
if unread:
query.append("(UNSEEN)")
if sent_from:
query.append('(FROM "%s")' % sent_from)
if sent_to:
query.append('(TO "%s")' % sent_to)
if date__gt:
query.append('(SINCE "%s")' % date__gt)
if date__lt:
query.append('(BEFORE "%s")' % date__lt)
if query:
return " ".join(query)
return "(ALL)"
|
import datetime
# TODO - Validate query arguments
IMAP_MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
def format_date(date):
return "%s-%s-%s" % (date.day, IMAP_MONTHS[date.month - 1], date.year)
def build_search_query(**kwargs):
# Parse keyword arguments
unread = kwargs.get('unread', False)
sent_from = kwargs.get('sent_from', False)
sent_to = kwargs.get('sent_to', False)
date__gt = kwargs.get('date__gt', False)
if type(date__gt) is datetime.date:
date__gt = format_date(date__gt)
date__lt = kwargs.get('date__lt', False)
if type(date__lt) is datetime.date:
date__lt = format_date(date__lt)
subject = kwargs.get('subject')
query = []
if unread:
query.append("(UNSEEN)")
if sent_from:
query.append('(FROM "%s")' % sent_from)
if sent_to:
query.append('(TO "%s")' % sent_to)
if date__gt:
query.append('(SINCE "%s")' % date__gt)
if date__lt:
query.append('(BEFORE "%s")' % date__lt)
if subject is not None:
query.append('(SUBJECT "%s")' % subject)
if query:
return " ".join(query)
return "(ALL)"
|
Add support for searching subject
|
Add support for searching subject
|
Python
|
mit
|
martinrusev/imbox,eliangcs/imbox,doismellburning/imbox,amuzhou/imbox,johnbaldwin/imbox
|
import datetime
# TODO - Validate query arguments
IMAP_MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
def format_date(date):
return "%s-%s-%s" % (date.day, IMAP_MONTHS[date.month - 1], date.year)
def build_search_query(**kwargs):
# Parse keyword arguments
unread = kwargs.get('unread', False)
sent_from = kwargs.get('sent_from', False)
sent_to = kwargs.get('sent_to', False)
date__gt = kwargs.get('date__gt', False)
if type(date__gt) is datetime.date:
date__gt = format_date(date__gt)
date__lt = kwargs.get('date__lt', False)
if type(date__lt) is datetime.date:
date__lt = format_date(date__lt)
query = []
if unread:
query.append("(UNSEEN)")
if sent_from:
query.append('(FROM "%s")' % sent_from)
if sent_to:
query.append('(TO "%s")' % sent_to)
if date__gt:
query.append('(SINCE "%s")' % date__gt)
if date__lt:
query.append('(BEFORE "%s")' % date__lt)
if query:
return " ".join(query)
return "(ALL)"
Add support for searching subject
|
import datetime
# TODO - Validate query arguments
IMAP_MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
def format_date(date):
return "%s-%s-%s" % (date.day, IMAP_MONTHS[date.month - 1], date.year)
def build_search_query(**kwargs):
# Parse keyword arguments
unread = kwargs.get('unread', False)
sent_from = kwargs.get('sent_from', False)
sent_to = kwargs.get('sent_to', False)
date__gt = kwargs.get('date__gt', False)
if type(date__gt) is datetime.date:
date__gt = format_date(date__gt)
date__lt = kwargs.get('date__lt', False)
if type(date__lt) is datetime.date:
date__lt = format_date(date__lt)
subject = kwargs.get('subject')
query = []
if unread:
query.append("(UNSEEN)")
if sent_from:
query.append('(FROM "%s")' % sent_from)
if sent_to:
query.append('(TO "%s")' % sent_to)
if date__gt:
query.append('(SINCE "%s")' % date__gt)
if date__lt:
query.append('(BEFORE "%s")' % date__lt)
if subject is not None:
query.append('(SUBJECT "%s")' % subject)
if query:
return " ".join(query)
return "(ALL)"
|
<commit_before>import datetime
# TODO - Validate query arguments
IMAP_MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
def format_date(date):
return "%s-%s-%s" % (date.day, IMAP_MONTHS[date.month - 1], date.year)
def build_search_query(**kwargs):
# Parse keyword arguments
unread = kwargs.get('unread', False)
sent_from = kwargs.get('sent_from', False)
sent_to = kwargs.get('sent_to', False)
date__gt = kwargs.get('date__gt', False)
if type(date__gt) is datetime.date:
date__gt = format_date(date__gt)
date__lt = kwargs.get('date__lt', False)
if type(date__lt) is datetime.date:
date__lt = format_date(date__lt)
query = []
if unread:
query.append("(UNSEEN)")
if sent_from:
query.append('(FROM "%s")' % sent_from)
if sent_to:
query.append('(TO "%s")' % sent_to)
if date__gt:
query.append('(SINCE "%s")' % date__gt)
if date__lt:
query.append('(BEFORE "%s")' % date__lt)
if query:
return " ".join(query)
return "(ALL)"
<commit_msg>Add support for searching subject<commit_after>
|
import datetime
# TODO - Validate query arguments
IMAP_MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
def format_date(date):
return "%s-%s-%s" % (date.day, IMAP_MONTHS[date.month - 1], date.year)
def build_search_query(**kwargs):
# Parse keyword arguments
unread = kwargs.get('unread', False)
sent_from = kwargs.get('sent_from', False)
sent_to = kwargs.get('sent_to', False)
date__gt = kwargs.get('date__gt', False)
if type(date__gt) is datetime.date:
date__gt = format_date(date__gt)
date__lt = kwargs.get('date__lt', False)
if type(date__lt) is datetime.date:
date__lt = format_date(date__lt)
subject = kwargs.get('subject')
query = []
if unread:
query.append("(UNSEEN)")
if sent_from:
query.append('(FROM "%s")' % sent_from)
if sent_to:
query.append('(TO "%s")' % sent_to)
if date__gt:
query.append('(SINCE "%s")' % date__gt)
if date__lt:
query.append('(BEFORE "%s")' % date__lt)
if subject is not None:
query.append('(SUBJECT "%s")' % subject)
if query:
return " ".join(query)
return "(ALL)"
|
import datetime
# TODO - Validate query arguments
IMAP_MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
def format_date(date):
return "%s-%s-%s" % (date.day, IMAP_MONTHS[date.month - 1], date.year)
def build_search_query(**kwargs):
# Parse keyword arguments
unread = kwargs.get('unread', False)
sent_from = kwargs.get('sent_from', False)
sent_to = kwargs.get('sent_to', False)
date__gt = kwargs.get('date__gt', False)
if type(date__gt) is datetime.date:
date__gt = format_date(date__gt)
date__lt = kwargs.get('date__lt', False)
if type(date__lt) is datetime.date:
date__lt = format_date(date__lt)
query = []
if unread:
query.append("(UNSEEN)")
if sent_from:
query.append('(FROM "%s")' % sent_from)
if sent_to:
query.append('(TO "%s")' % sent_to)
if date__gt:
query.append('(SINCE "%s")' % date__gt)
if date__lt:
query.append('(BEFORE "%s")' % date__lt)
if query:
return " ".join(query)
return "(ALL)"
Add support for searching subjectimport datetime
# TODO - Validate query arguments
IMAP_MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
def format_date(date):
return "%s-%s-%s" % (date.day, IMAP_MONTHS[date.month - 1], date.year)
def build_search_query(**kwargs):
# Parse keyword arguments
unread = kwargs.get('unread', False)
sent_from = kwargs.get('sent_from', False)
sent_to = kwargs.get('sent_to', False)
date__gt = kwargs.get('date__gt', False)
if type(date__gt) is datetime.date:
date__gt = format_date(date__gt)
date__lt = kwargs.get('date__lt', False)
if type(date__lt) is datetime.date:
date__lt = format_date(date__lt)
subject = kwargs.get('subject')
query = []
if unread:
query.append("(UNSEEN)")
if sent_from:
query.append('(FROM "%s")' % sent_from)
if sent_to:
query.append('(TO "%s")' % sent_to)
if date__gt:
query.append('(SINCE "%s")' % date__gt)
if date__lt:
query.append('(BEFORE "%s")' % date__lt)
if subject is not None:
query.append('(SUBJECT "%s")' % subject)
if query:
return " ".join(query)
return "(ALL)"
|
<commit_before>import datetime
# TODO - Validate query arguments
IMAP_MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
def format_date(date):
return "%s-%s-%s" % (date.day, IMAP_MONTHS[date.month - 1], date.year)
def build_search_query(**kwargs):
# Parse keyword arguments
unread = kwargs.get('unread', False)
sent_from = kwargs.get('sent_from', False)
sent_to = kwargs.get('sent_to', False)
date__gt = kwargs.get('date__gt', False)
if type(date__gt) is datetime.date:
date__gt = format_date(date__gt)
date__lt = kwargs.get('date__lt', False)
if type(date__lt) is datetime.date:
date__lt = format_date(date__lt)
query = []
if unread:
query.append("(UNSEEN)")
if sent_from:
query.append('(FROM "%s")' % sent_from)
if sent_to:
query.append('(TO "%s")' % sent_to)
if date__gt:
query.append('(SINCE "%s")' % date__gt)
if date__lt:
query.append('(BEFORE "%s")' % date__lt)
if query:
return " ".join(query)
return "(ALL)"
<commit_msg>Add support for searching subject<commit_after>import datetime
# TODO - Validate query arguments
IMAP_MONTHS = ["Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"]
def format_date(date):
return "%s-%s-%s" % (date.day, IMAP_MONTHS[date.month - 1], date.year)
def build_search_query(**kwargs):
# Parse keyword arguments
unread = kwargs.get('unread', False)
sent_from = kwargs.get('sent_from', False)
sent_to = kwargs.get('sent_to', False)
date__gt = kwargs.get('date__gt', False)
if type(date__gt) is datetime.date:
date__gt = format_date(date__gt)
date__lt = kwargs.get('date__lt', False)
if type(date__lt) is datetime.date:
date__lt = format_date(date__lt)
subject = kwargs.get('subject')
query = []
if unread:
query.append("(UNSEEN)")
if sent_from:
query.append('(FROM "%s")' % sent_from)
if sent_to:
query.append('(TO "%s")' % sent_to)
if date__gt:
query.append('(SINCE "%s")' % date__gt)
if date__lt:
query.append('(BEFORE "%s")' % date__lt)
if subject is not None:
query.append('(SUBJECT "%s")' % subject)
if query:
return " ".join(query)
return "(ALL)"
|
74816d4af07808009b89163060f97014b1a20ceb
|
tests/test_arguments.py
|
tests/test_arguments.py
|
import unittest
from mock import MagicMock, Mock
from nose.tools import *
from gargoyle.inputs.arguments import *
class BaseArgument(object):
def setUp(self):
self.argument = self.klass(self.valid_comparison_value)
@property
def interface_functions(self):
return ['__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'__cmp__', '__hash__']
@property
def interface_methods(self):
return [getattr(self.argument, f) for f in self.interface_functions]
def test_implements_comparison_methods(self):
map(ok_, self.interface_methods)
class DelegateToValue(object):
def test_delegates_all_interface_function_to_the_value_passed_in(self):
value_passed_in = MagicMock()
value_passed_in.__cmp__ = Mock()
argument = self.klass(value_passed_in)
for function in self.interface_functions:
values_function = getattr(value_passed_in, function)
arguments_function = getattr(argument, function)
arguments_function(self.valid_comparison_value)
values_function.assert_called_once_with(self.valid_comparison_value)
class ValueTest(BaseArgument, DelegateToValue, unittest.TestCase):
klass = Value
@property
def valid_comparison_value(self):
return 'marv'
|
import unittest
from mock import MagicMock, Mock
from nose.tools import *
from gargoyle.inputs.arguments import *
class BaseArgument(object):
def setUp(self):
self.argument = self.klass(self.valid_comparison_value)
@property
def interface_functions(self):
return ['__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'__cmp__', '__hash__', '__nonzero__']
@property
def interface_methods(self):
return [getattr(self.argument, f) for f in self.interface_functions]
def test_implements_comparison_methods(self):
map(ok_, self.interface_methods)
class DelegateToValue(object):
def test_delegates_all_interface_function_to_the_value_passed_in(self):
value_passed_in = MagicMock()
value_passed_in.__cmp__ = Mock()
argument = self.klass(value_passed_in)
for function in self.interface_functions:
values_function = getattr(value_passed_in, function)
arguments_function = getattr(argument, function)
arguments_function(self.valid_comparison_value)
values_function.assert_called_once_with(self.valid_comparison_value)
class ValueTest(BaseArgument, DelegateToValue, unittest.TestCase):
klass = Value
@property
def valid_comparison_value(self):
return 'marv'
|
Enforce that arguments must implement non-zero methods.
|
Enforce that arguments must implement non-zero methods.
|
Python
|
apache-2.0
|
disqus/gutter,disqus/gutter,kalail/gutter,kalail/gutter,kalail/gutter
|
import unittest
from mock import MagicMock, Mock
from nose.tools import *
from gargoyle.inputs.arguments import *
class BaseArgument(object):
def setUp(self):
self.argument = self.klass(self.valid_comparison_value)
@property
def interface_functions(self):
return ['__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'__cmp__', '__hash__']
@property
def interface_methods(self):
return [getattr(self.argument, f) for f in self.interface_functions]
def test_implements_comparison_methods(self):
map(ok_, self.interface_methods)
class DelegateToValue(object):
def test_delegates_all_interface_function_to_the_value_passed_in(self):
value_passed_in = MagicMock()
value_passed_in.__cmp__ = Mock()
argument = self.klass(value_passed_in)
for function in self.interface_functions:
values_function = getattr(value_passed_in, function)
arguments_function = getattr(argument, function)
arguments_function(self.valid_comparison_value)
values_function.assert_called_once_with(self.valid_comparison_value)
class ValueTest(BaseArgument, DelegateToValue, unittest.TestCase):
klass = Value
@property
def valid_comparison_value(self):
return 'marv'
Enforce that arguments must implement non-zero methods.
|
import unittest
from mock import MagicMock, Mock
from nose.tools import *
from gargoyle.inputs.arguments import *
class BaseArgument(object):
def setUp(self):
self.argument = self.klass(self.valid_comparison_value)
@property
def interface_functions(self):
return ['__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'__cmp__', '__hash__', '__nonzero__']
@property
def interface_methods(self):
return [getattr(self.argument, f) for f in self.interface_functions]
def test_implements_comparison_methods(self):
map(ok_, self.interface_methods)
class DelegateToValue(object):
def test_delegates_all_interface_function_to_the_value_passed_in(self):
value_passed_in = MagicMock()
value_passed_in.__cmp__ = Mock()
argument = self.klass(value_passed_in)
for function in self.interface_functions:
values_function = getattr(value_passed_in, function)
arguments_function = getattr(argument, function)
arguments_function(self.valid_comparison_value)
values_function.assert_called_once_with(self.valid_comparison_value)
class ValueTest(BaseArgument, DelegateToValue, unittest.TestCase):
klass = Value
@property
def valid_comparison_value(self):
return 'marv'
|
<commit_before>import unittest
from mock import MagicMock, Mock
from nose.tools import *
from gargoyle.inputs.arguments import *
class BaseArgument(object):
def setUp(self):
self.argument = self.klass(self.valid_comparison_value)
@property
def interface_functions(self):
return ['__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'__cmp__', '__hash__']
@property
def interface_methods(self):
return [getattr(self.argument, f) for f in self.interface_functions]
def test_implements_comparison_methods(self):
map(ok_, self.interface_methods)
class DelegateToValue(object):
def test_delegates_all_interface_function_to_the_value_passed_in(self):
value_passed_in = MagicMock()
value_passed_in.__cmp__ = Mock()
argument = self.klass(value_passed_in)
for function in self.interface_functions:
values_function = getattr(value_passed_in, function)
arguments_function = getattr(argument, function)
arguments_function(self.valid_comparison_value)
values_function.assert_called_once_with(self.valid_comparison_value)
class ValueTest(BaseArgument, DelegateToValue, unittest.TestCase):
klass = Value
@property
def valid_comparison_value(self):
return 'marv'
<commit_msg>Enforce that arguments must implement non-zero methods.<commit_after>
|
import unittest
from mock import MagicMock, Mock
from nose.tools import *
from gargoyle.inputs.arguments import *
class BaseArgument(object):
def setUp(self):
self.argument = self.klass(self.valid_comparison_value)
@property
def interface_functions(self):
return ['__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'__cmp__', '__hash__', '__nonzero__']
@property
def interface_methods(self):
return [getattr(self.argument, f) for f in self.interface_functions]
def test_implements_comparison_methods(self):
map(ok_, self.interface_methods)
class DelegateToValue(object):
def test_delegates_all_interface_function_to_the_value_passed_in(self):
value_passed_in = MagicMock()
value_passed_in.__cmp__ = Mock()
argument = self.klass(value_passed_in)
for function in self.interface_functions:
values_function = getattr(value_passed_in, function)
arguments_function = getattr(argument, function)
arguments_function(self.valid_comparison_value)
values_function.assert_called_once_with(self.valid_comparison_value)
class ValueTest(BaseArgument, DelegateToValue, unittest.TestCase):
klass = Value
@property
def valid_comparison_value(self):
return 'marv'
|
import unittest
from mock import MagicMock, Mock
from nose.tools import *
from gargoyle.inputs.arguments import *
class BaseArgument(object):
def setUp(self):
self.argument = self.klass(self.valid_comparison_value)
@property
def interface_functions(self):
return ['__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'__cmp__', '__hash__']
@property
def interface_methods(self):
return [getattr(self.argument, f) for f in self.interface_functions]
def test_implements_comparison_methods(self):
map(ok_, self.interface_methods)
class DelegateToValue(object):
def test_delegates_all_interface_function_to_the_value_passed_in(self):
value_passed_in = MagicMock()
value_passed_in.__cmp__ = Mock()
argument = self.klass(value_passed_in)
for function in self.interface_functions:
values_function = getattr(value_passed_in, function)
arguments_function = getattr(argument, function)
arguments_function(self.valid_comparison_value)
values_function.assert_called_once_with(self.valid_comparison_value)
class ValueTest(BaseArgument, DelegateToValue, unittest.TestCase):
klass = Value
@property
def valid_comparison_value(self):
return 'marv'
Enforce that arguments must implement non-zero methods.import unittest
from mock import MagicMock, Mock
from nose.tools import *
from gargoyle.inputs.arguments import *
class BaseArgument(object):
def setUp(self):
self.argument = self.klass(self.valid_comparison_value)
@property
def interface_functions(self):
return ['__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'__cmp__', '__hash__', '__nonzero__']
@property
def interface_methods(self):
return [getattr(self.argument, f) for f in self.interface_functions]
def test_implements_comparison_methods(self):
map(ok_, self.interface_methods)
class DelegateToValue(object):
def test_delegates_all_interface_function_to_the_value_passed_in(self):
value_passed_in = MagicMock()
value_passed_in.__cmp__ = Mock()
argument = self.klass(value_passed_in)
for function in self.interface_functions:
values_function = getattr(value_passed_in, function)
arguments_function = getattr(argument, function)
arguments_function(self.valid_comparison_value)
values_function.assert_called_once_with(self.valid_comparison_value)
class ValueTest(BaseArgument, DelegateToValue, unittest.TestCase):
klass = Value
@property
def valid_comparison_value(self):
return 'marv'
|
<commit_before>import unittest
from mock import MagicMock, Mock
from nose.tools import *
from gargoyle.inputs.arguments import *
class BaseArgument(object):
def setUp(self):
self.argument = self.klass(self.valid_comparison_value)
@property
def interface_functions(self):
return ['__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'__cmp__', '__hash__']
@property
def interface_methods(self):
return [getattr(self.argument, f) for f in self.interface_functions]
def test_implements_comparison_methods(self):
map(ok_, self.interface_methods)
class DelegateToValue(object):
def test_delegates_all_interface_function_to_the_value_passed_in(self):
value_passed_in = MagicMock()
value_passed_in.__cmp__ = Mock()
argument = self.klass(value_passed_in)
for function in self.interface_functions:
values_function = getattr(value_passed_in, function)
arguments_function = getattr(argument, function)
arguments_function(self.valid_comparison_value)
values_function.assert_called_once_with(self.valid_comparison_value)
class ValueTest(BaseArgument, DelegateToValue, unittest.TestCase):
klass = Value
@property
def valid_comparison_value(self):
return 'marv'
<commit_msg>Enforce that arguments must implement non-zero methods.<commit_after>import unittest
from mock import MagicMock, Mock
from nose.tools import *
from gargoyle.inputs.arguments import *
class BaseArgument(object):
def setUp(self):
self.argument = self.klass(self.valid_comparison_value)
@property
def interface_functions(self):
return ['__lt__', '__le__', '__eq__', '__ne__', '__gt__', '__ge__',
'__cmp__', '__hash__', '__nonzero__']
@property
def interface_methods(self):
return [getattr(self.argument, f) for f in self.interface_functions]
def test_implements_comparison_methods(self):
map(ok_, self.interface_methods)
class DelegateToValue(object):
def test_delegates_all_interface_function_to_the_value_passed_in(self):
value_passed_in = MagicMock()
value_passed_in.__cmp__ = Mock()
argument = self.klass(value_passed_in)
for function in self.interface_functions:
values_function = getattr(value_passed_in, function)
arguments_function = getattr(argument, function)
arguments_function(self.valid_comparison_value)
values_function.assert_called_once_with(self.valid_comparison_value)
class ValueTest(BaseArgument, DelegateToValue, unittest.TestCase):
klass = Value
@property
def valid_comparison_value(self):
return 'marv'
|
15013c51f602786265b59c1d4a7e894eae090d90
|
tests/test_normalize.py
|
tests/test_normalize.py
|
from hypothesis import assume, given
from utils import isclose, vectors
@given(v=vectors())
def test_normalize_length(v):
"""v.normalize().length == 1 and v == v.length * v.normalize()"""
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normalize())
|
from hypothesis import assume, given
from utils import isclose, vectors
@given(v=vectors())
def test_normalize_length(v):
"""v.normalize().length == 1 and v == v.length * v.normalize()"""
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normalize())
@given(v=vectors())
def test_normalize_angle(v):
"""Normalization preserves direction."""
assume(v)
assert angle_isclose(v.normalize().angle(v), 0)
|
Test that direction is preserved
|
tests/normalize: Test that direction is preserved
|
Python
|
artistic-2.0
|
ppb/ppb-vector,ppb/ppb-vector
|
from hypothesis import assume, given
from utils import isclose, vectors
@given(v=vectors())
def test_normalize_length(v):
"""v.normalize().length == 1 and v == v.length * v.normalize()"""
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normalize())
tests/normalize: Test that direction is preserved
|
from hypothesis import assume, given
from utils import isclose, vectors
@given(v=vectors())
def test_normalize_length(v):
"""v.normalize().length == 1 and v == v.length * v.normalize()"""
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normalize())
@given(v=vectors())
def test_normalize_angle(v):
"""Normalization preserves direction."""
assume(v)
assert angle_isclose(v.normalize().angle(v), 0)
|
<commit_before>from hypothesis import assume, given
from utils import isclose, vectors
@given(v=vectors())
def test_normalize_length(v):
"""v.normalize().length == 1 and v == v.length * v.normalize()"""
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normalize())
<commit_msg>tests/normalize: Test that direction is preserved<commit_after>
|
from hypothesis import assume, given
from utils import isclose, vectors
@given(v=vectors())
def test_normalize_length(v):
"""v.normalize().length == 1 and v == v.length * v.normalize()"""
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normalize())
@given(v=vectors())
def test_normalize_angle(v):
"""Normalization preserves direction."""
assume(v)
assert angle_isclose(v.normalize().angle(v), 0)
|
from hypothesis import assume, given
from utils import isclose, vectors
@given(v=vectors())
def test_normalize_length(v):
"""v.normalize().length == 1 and v == v.length * v.normalize()"""
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normalize())
tests/normalize: Test that direction is preservedfrom hypothesis import assume, given
from utils import isclose, vectors
@given(v=vectors())
def test_normalize_length(v):
"""v.normalize().length == 1 and v == v.length * v.normalize()"""
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normalize())
@given(v=vectors())
def test_normalize_angle(v):
"""Normalization preserves direction."""
assume(v)
assert angle_isclose(v.normalize().angle(v), 0)
|
<commit_before>from hypothesis import assume, given
from utils import isclose, vectors
@given(v=vectors())
def test_normalize_length(v):
"""v.normalize().length == 1 and v == v.length * v.normalize()"""
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normalize())
<commit_msg>tests/normalize: Test that direction is preserved<commit_after>from hypothesis import assume, given
from utils import isclose, vectors
@given(v=vectors())
def test_normalize_length(v):
"""v.normalize().length == 1 and v == v.length * v.normalize()"""
assume(v)
assert isclose(v.normalize().length, 1)
assert v.isclose(v.length * v.normalize())
@given(v=vectors())
def test_normalize_angle(v):
"""Normalization preserves direction."""
assume(v)
assert angle_isclose(v.normalize().angle(v), 0)
|
311b32f3c324d026181aa1718a7dd8c099d2e4b4
|
tests/test_resultset.py
|
tests/test_resultset.py
|
from .config import TweepyTestCase
from tweepy.models import ResultSet
class NoIdItem: pass
class IdItem:
def __init__(self, id):
self.id = id
ids_fixture = [1, 10, 8, 50, 2, 100, 5]
class TweepyResultSetTests(TweepyTestCase):
def setUp(self):
self.results = ResultSet()
for i in ids_fixture:
self.results.append(IdItem(i))
self.results.append(NoIdItem())
def testids(self):
ids = self.results.ids()
self.assertListEqual(ids, ids_fixture)
def testmaxid(self):
self.assertEqual(self.results.max_id, 0)
def testsinceid(self):
self.assertEqual(self.results.since_id, 100)
|
from .config import TweepyTestCase
from tweepy.models import ResultSet
class NoIdItem:
pass
class IdItem:
def __init__(self, id):
self.id = id
ids_fixture = [1, 10, 8, 50, 2, 100, 5]
class TweepyResultSetTests(TweepyTestCase):
def setUp(self):
self.results = ResultSet()
for i in ids_fixture:
self.results.append(IdItem(i))
self.results.append(NoIdItem())
def testids(self):
ids = self.results.ids()
self.assertListEqual(ids, ids_fixture)
def testmaxid(self):
self.assertEqual(self.results.max_id, 0)
def testsinceid(self):
self.assertEqual(self.results.since_id, 100)
|
Improve formatting for NoIdItem in ResultSet tests
|
Improve formatting for NoIdItem in ResultSet tests
|
Python
|
mit
|
tweepy/tweepy,svven/tweepy
|
from .config import TweepyTestCase
from tweepy.models import ResultSet
class NoIdItem: pass
class IdItem:
def __init__(self, id):
self.id = id
ids_fixture = [1, 10, 8, 50, 2, 100, 5]
class TweepyResultSetTests(TweepyTestCase):
def setUp(self):
self.results = ResultSet()
for i in ids_fixture:
self.results.append(IdItem(i))
self.results.append(NoIdItem())
def testids(self):
ids = self.results.ids()
self.assertListEqual(ids, ids_fixture)
def testmaxid(self):
self.assertEqual(self.results.max_id, 0)
def testsinceid(self):
self.assertEqual(self.results.since_id, 100)
Improve formatting for NoIdItem in ResultSet tests
|
from .config import TweepyTestCase
from tweepy.models import ResultSet
class NoIdItem:
pass
class IdItem:
def __init__(self, id):
self.id = id
ids_fixture = [1, 10, 8, 50, 2, 100, 5]
class TweepyResultSetTests(TweepyTestCase):
def setUp(self):
self.results = ResultSet()
for i in ids_fixture:
self.results.append(IdItem(i))
self.results.append(NoIdItem())
def testids(self):
ids = self.results.ids()
self.assertListEqual(ids, ids_fixture)
def testmaxid(self):
self.assertEqual(self.results.max_id, 0)
def testsinceid(self):
self.assertEqual(self.results.since_id, 100)
|
<commit_before>from .config import TweepyTestCase
from tweepy.models import ResultSet
class NoIdItem: pass
class IdItem:
def __init__(self, id):
self.id = id
ids_fixture = [1, 10, 8, 50, 2, 100, 5]
class TweepyResultSetTests(TweepyTestCase):
def setUp(self):
self.results = ResultSet()
for i in ids_fixture:
self.results.append(IdItem(i))
self.results.append(NoIdItem())
def testids(self):
ids = self.results.ids()
self.assertListEqual(ids, ids_fixture)
def testmaxid(self):
self.assertEqual(self.results.max_id, 0)
def testsinceid(self):
self.assertEqual(self.results.since_id, 100)
<commit_msg>Improve formatting for NoIdItem in ResultSet tests<commit_after>
|
from .config import TweepyTestCase
from tweepy.models import ResultSet
class NoIdItem:
pass
class IdItem:
def __init__(self, id):
self.id = id
ids_fixture = [1, 10, 8, 50, 2, 100, 5]
class TweepyResultSetTests(TweepyTestCase):
def setUp(self):
self.results = ResultSet()
for i in ids_fixture:
self.results.append(IdItem(i))
self.results.append(NoIdItem())
def testids(self):
ids = self.results.ids()
self.assertListEqual(ids, ids_fixture)
def testmaxid(self):
self.assertEqual(self.results.max_id, 0)
def testsinceid(self):
self.assertEqual(self.results.since_id, 100)
|
from .config import TweepyTestCase
from tweepy.models import ResultSet
class NoIdItem: pass
class IdItem:
def __init__(self, id):
self.id = id
ids_fixture = [1, 10, 8, 50, 2, 100, 5]
class TweepyResultSetTests(TweepyTestCase):
def setUp(self):
self.results = ResultSet()
for i in ids_fixture:
self.results.append(IdItem(i))
self.results.append(NoIdItem())
def testids(self):
ids = self.results.ids()
self.assertListEqual(ids, ids_fixture)
def testmaxid(self):
self.assertEqual(self.results.max_id, 0)
def testsinceid(self):
self.assertEqual(self.results.since_id, 100)
Improve formatting for NoIdItem in ResultSet testsfrom .config import TweepyTestCase
from tweepy.models import ResultSet
class NoIdItem:
pass
class IdItem:
def __init__(self, id):
self.id = id
ids_fixture = [1, 10, 8, 50, 2, 100, 5]
class TweepyResultSetTests(TweepyTestCase):
def setUp(self):
self.results = ResultSet()
for i in ids_fixture:
self.results.append(IdItem(i))
self.results.append(NoIdItem())
def testids(self):
ids = self.results.ids()
self.assertListEqual(ids, ids_fixture)
def testmaxid(self):
self.assertEqual(self.results.max_id, 0)
def testsinceid(self):
self.assertEqual(self.results.since_id, 100)
|
<commit_before>from .config import TweepyTestCase
from tweepy.models import ResultSet
class NoIdItem: pass
class IdItem:
def __init__(self, id):
self.id = id
ids_fixture = [1, 10, 8, 50, 2, 100, 5]
class TweepyResultSetTests(TweepyTestCase):
def setUp(self):
self.results = ResultSet()
for i in ids_fixture:
self.results.append(IdItem(i))
self.results.append(NoIdItem())
def testids(self):
ids = self.results.ids()
self.assertListEqual(ids, ids_fixture)
def testmaxid(self):
self.assertEqual(self.results.max_id, 0)
def testsinceid(self):
self.assertEqual(self.results.since_id, 100)
<commit_msg>Improve formatting for NoIdItem in ResultSet tests<commit_after>from .config import TweepyTestCase
from tweepy.models import ResultSet
class NoIdItem:
pass
class IdItem:
def __init__(self, id):
self.id = id
ids_fixture = [1, 10, 8, 50, 2, 100, 5]
class TweepyResultSetTests(TweepyTestCase):
def setUp(self):
self.results = ResultSet()
for i in ids_fixture:
self.results.append(IdItem(i))
self.results.append(NoIdItem())
def testids(self):
ids = self.results.ids()
self.assertListEqual(ids, ids_fixture)
def testmaxid(self):
self.assertEqual(self.results.max_id, 0)
def testsinceid(self):
self.assertEqual(self.results.since_id, 100)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.