commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
a6dfc5c5f256acd78d806cc8d4ddac9bd1ac34b5 | barbicanclient/osc_plugin.py | barbicanclient/osc_plugin.py | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from barbicanclient import client
DEFAULT_API_VERSION = '1'
API_VERSION_OPTION = 'os_key_manager_api_version'
API_NAME = 'key_manager'
API_VERSIONS = {
'1': 'barbicanclient.client.Client',
}
def make_client(instance):
"""Returns a Barbican service client."""
return client.Client(session=instance.session,
region_name=instance._region_name)
def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument('--os-key-manager-api-version',
metavar='<key-manager-api-version>',
default=client.env(
'OS_KEY_MANAGER_API_VERSION',
default=DEFAULT_API_VERSION),
help=('Barbican API version, default=' +
DEFAULT_API_VERSION +
' (Env: OS_KEY_MANAGER_API_VERSION)'))
return parser
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""OpenStackClient plugin for Key Manager service."""
from barbicanclient import client
DEFAULT_API_VERSION = '1'
API_VERSION_OPTION = 'os_key_manager_api_version'
API_NAME = 'key_manager'
API_VERSIONS = {
'1': 'barbicanclient.client.Client',
}
def make_client(instance):
"""Returns a Barbican service client."""
return client.Client(session=instance.session,
region_name=instance._region_name)
def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument('--os-key-manager-api-version',
metavar='<key-manager-api-version>',
default=client.env(
'OS_KEY_MANAGER_API_VERSION',
default=DEFAULT_API_VERSION),
help=('Barbican API version, default=' +
DEFAULT_API_VERSION +
' (Env: OS_KEY_MANAGER_API_VERSION)'))
return parser
| Add plug-in summary for osc doc | Add plug-in summary for osc doc
Stevedore Sphinx extension handles this comment.
http://docs.openstack.org/developer/python-openstackclient/plugin-commands.html
Change-Id: Id6339d11b900a644647c8c25bbd630ef52a60aab
| Python | apache-2.0 | openstack/python-barbicanclient | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from barbicanclient import client
DEFAULT_API_VERSION = '1'
API_VERSION_OPTION = 'os_key_manager_api_version'
API_NAME = 'key_manager'
API_VERSIONS = {
'1': 'barbicanclient.client.Client',
}
def make_client(instance):
"""Returns a Barbican service client."""
return client.Client(session=instance.session,
region_name=instance._region_name)
def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument('--os-key-manager-api-version',
metavar='<key-manager-api-version>',
default=client.env(
'OS_KEY_MANAGER_API_VERSION',
default=DEFAULT_API_VERSION),
help=('Barbican API version, default=' +
DEFAULT_API_VERSION +
' (Env: OS_KEY_MANAGER_API_VERSION)'))
return parser
Add plug-in summary for osc doc
Stevedore Sphinx extension handles this comment.
http://docs.openstack.org/developer/python-openstackclient/plugin-commands.html
Change-Id: Id6339d11b900a644647c8c25bbd630ef52a60aab | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""OpenStackClient plugin for Key Manager service."""
from barbicanclient import client
DEFAULT_API_VERSION = '1'
API_VERSION_OPTION = 'os_key_manager_api_version'
API_NAME = 'key_manager'
API_VERSIONS = {
'1': 'barbicanclient.client.Client',
}
def make_client(instance):
"""Returns a Barbican service client."""
return client.Client(session=instance.session,
region_name=instance._region_name)
def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument('--os-key-manager-api-version',
metavar='<key-manager-api-version>',
default=client.env(
'OS_KEY_MANAGER_API_VERSION',
default=DEFAULT_API_VERSION),
help=('Barbican API version, default=' +
DEFAULT_API_VERSION +
' (Env: OS_KEY_MANAGER_API_VERSION)'))
return parser
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from barbicanclient import client
DEFAULT_API_VERSION = '1'
API_VERSION_OPTION = 'os_key_manager_api_version'
API_NAME = 'key_manager'
API_VERSIONS = {
'1': 'barbicanclient.client.Client',
}
def make_client(instance):
"""Returns a Barbican service client."""
return client.Client(session=instance.session,
region_name=instance._region_name)
def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument('--os-key-manager-api-version',
metavar='<key-manager-api-version>',
default=client.env(
'OS_KEY_MANAGER_API_VERSION',
default=DEFAULT_API_VERSION),
help=('Barbican API version, default=' +
DEFAULT_API_VERSION +
' (Env: OS_KEY_MANAGER_API_VERSION)'))
return parser
<commit_msg>Add plug-in summary for osc doc
Stevedore Sphinx extension handles this comment.
http://docs.openstack.org/developer/python-openstackclient/plugin-commands.html
Change-Id: Id6339d11b900a644647c8c25bbd630ef52a60aab<commit_after> | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""OpenStackClient plugin for Key Manager service."""
from barbicanclient import client
DEFAULT_API_VERSION = '1'
API_VERSION_OPTION = 'os_key_manager_api_version'
API_NAME = 'key_manager'
API_VERSIONS = {
'1': 'barbicanclient.client.Client',
}
def make_client(instance):
"""Returns a Barbican service client."""
return client.Client(session=instance.session,
region_name=instance._region_name)
def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument('--os-key-manager-api-version',
metavar='<key-manager-api-version>',
default=client.env(
'OS_KEY_MANAGER_API_VERSION',
default=DEFAULT_API_VERSION),
help=('Barbican API version, default=' +
DEFAULT_API_VERSION +
' (Env: OS_KEY_MANAGER_API_VERSION)'))
return parser
| # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from barbicanclient import client
DEFAULT_API_VERSION = '1'
API_VERSION_OPTION = 'os_key_manager_api_version'
API_NAME = 'key_manager'
API_VERSIONS = {
'1': 'barbicanclient.client.Client',
}
def make_client(instance):
"""Returns a Barbican service client."""
return client.Client(session=instance.session,
region_name=instance._region_name)
def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument('--os-key-manager-api-version',
metavar='<key-manager-api-version>',
default=client.env(
'OS_KEY_MANAGER_API_VERSION',
default=DEFAULT_API_VERSION),
help=('Barbican API version, default=' +
DEFAULT_API_VERSION +
' (Env: OS_KEY_MANAGER_API_VERSION)'))
return parser
Add plug-in summary for osc doc
Stevedore Sphinx extension handles this comment.
http://docs.openstack.org/developer/python-openstackclient/plugin-commands.html
Change-Id: Id6339d11b900a644647c8c25bbd630ef52a60aab# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""OpenStackClient plugin for Key Manager service."""
from barbicanclient import client
DEFAULT_API_VERSION = '1'
API_VERSION_OPTION = 'os_key_manager_api_version'
API_NAME = 'key_manager'
API_VERSIONS = {
'1': 'barbicanclient.client.Client',
}
def make_client(instance):
"""Returns a Barbican service client."""
return client.Client(session=instance.session,
region_name=instance._region_name)
def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument('--os-key-manager-api-version',
metavar='<key-manager-api-version>',
default=client.env(
'OS_KEY_MANAGER_API_VERSION',
default=DEFAULT_API_VERSION),
help=('Barbican API version, default=' +
DEFAULT_API_VERSION +
' (Env: OS_KEY_MANAGER_API_VERSION)'))
return parser
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from barbicanclient import client
DEFAULT_API_VERSION = '1'
API_VERSION_OPTION = 'os_key_manager_api_version'
API_NAME = 'key_manager'
API_VERSIONS = {
'1': 'barbicanclient.client.Client',
}
def make_client(instance):
"""Returns a Barbican service client."""
return client.Client(session=instance.session,
region_name=instance._region_name)
def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument('--os-key-manager-api-version',
metavar='<key-manager-api-version>',
default=client.env(
'OS_KEY_MANAGER_API_VERSION',
default=DEFAULT_API_VERSION),
help=('Barbican API version, default=' +
DEFAULT_API_VERSION +
' (Env: OS_KEY_MANAGER_API_VERSION)'))
return parser
<commit_msg>Add plug-in summary for osc doc
Stevedore Sphinx extension handles this comment.
http://docs.openstack.org/developer/python-openstackclient/plugin-commands.html
Change-Id: Id6339d11b900a644647c8c25bbd630ef52a60aab<commit_after># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""OpenStackClient plugin for Key Manager service."""
from barbicanclient import client
DEFAULT_API_VERSION = '1'
API_VERSION_OPTION = 'os_key_manager_api_version'
API_NAME = 'key_manager'
API_VERSIONS = {
'1': 'barbicanclient.client.Client',
}
def make_client(instance):
"""Returns a Barbican service client."""
return client.Client(session=instance.session,
region_name=instance._region_name)
def build_option_parser(parser):
"""Hook to add global options."""
parser.add_argument('--os-key-manager-api-version',
metavar='<key-manager-api-version>',
default=client.env(
'OS_KEY_MANAGER_API_VERSION',
default=DEFAULT_API_VERSION),
help=('Barbican API version, default=' +
DEFAULT_API_VERSION +
' (Env: OS_KEY_MANAGER_API_VERSION)'))
return parser
|
086e54f0b89670027272e5485d9eb832adecc7b9 | constants/base.py | constants/base.py | from collections import namedtuple
class Constants(object):
Constant = namedtuple('Constant', ['codename', 'value', 'description'])
def __init__(self, **kwargs):
self._constants = []
try:
for codename, (value, description) in kwargs.items():
if hasattr(self, codename):
msg = "'{0}' conflicts with an existing attribute."
raise Exception(msg.format(codename))
setattr(self, codename, value)
c = self.Constant(codename, value, description)
self._constants.append(c)
except (ValueError, TypeError):
raise Exception("Must pass in kwargs in format: "
"**{'codename': (value, description)}")
def choices(self):
"""Django-style choices list to pass to a model or form field."""
return [(c.value, c.description) for c in self._constants]
def get_list(self, *codenames):
"""Returns a list of values corresponding with the codenames."""
return [getattr(self, codename) for codename in codenames]
| from collections import namedtuple
class Constants(object):
Constant = namedtuple('Constant', ['codename', 'value', 'description'])
def __init__(self, **kwargs):
self._constants = []
for codename in kwargs:
try:
value, description = kwargs.get(codename)
except (ValueError, TypeError):
raise ValueError("Must pass in kwargs in format: "
"**{'codename': (value, description)}")
if hasattr(self, codename):
raise AttributeError("'{0}' conflicts with an existing "
"attribute.".format(codename))
setattr(self, codename, value)
c = self.Constant(codename, value, description)
self._constants.append(c)
def choices(self):
"""Django-style choices list to pass to a model or form field."""
return [(c.value, c.description) for c in self._constants]
def get_list(self, *codenames):
"""Returns a list of values corresponding with the codenames."""
return [getattr(self, codename) for codename in codenames]
| Refactor Constants initialization to throw more specific exceptions | Refactor Constants initialization to throw more specific exceptions
| Python | bsd-3-clause | caktus/django-dry-choices | from collections import namedtuple
class Constants(object):
Constant = namedtuple('Constant', ['codename', 'value', 'description'])
def __init__(self, **kwargs):
self._constants = []
try:
for codename, (value, description) in kwargs.items():
if hasattr(self, codename):
msg = "'{0}' conflicts with an existing attribute."
raise Exception(msg.format(codename))
setattr(self, codename, value)
c = self.Constant(codename, value, description)
self._constants.append(c)
except (ValueError, TypeError):
raise Exception("Must pass in kwargs in format: "
"**{'codename': (value, description)}")
def choices(self):
"""Django-style choices list to pass to a model or form field."""
return [(c.value, c.description) for c in self._constants]
def get_list(self, *codenames):
"""Returns a list of values corresponding with the codenames."""
return [getattr(self, codename) for codename in codenames]
Refactor Constants initialization to throw more specific exceptions | from collections import namedtuple
class Constants(object):
Constant = namedtuple('Constant', ['codename', 'value', 'description'])
def __init__(self, **kwargs):
self._constants = []
for codename in kwargs:
try:
value, description = kwargs.get(codename)
except (ValueError, TypeError):
raise ValueError("Must pass in kwargs in format: "
"**{'codename': (value, description)}")
if hasattr(self, codename):
raise AttributeError("'{0}' conflicts with an existing "
"attribute.".format(codename))
setattr(self, codename, value)
c = self.Constant(codename, value, description)
self._constants.append(c)
def choices(self):
"""Django-style choices list to pass to a model or form field."""
return [(c.value, c.description) for c in self._constants]
def get_list(self, *codenames):
"""Returns a list of values corresponding with the codenames."""
return [getattr(self, codename) for codename in codenames]
| <commit_before>from collections import namedtuple
class Constants(object):
Constant = namedtuple('Constant', ['codename', 'value', 'description'])
def __init__(self, **kwargs):
self._constants = []
try:
for codename, (value, description) in kwargs.items():
if hasattr(self, codename):
msg = "'{0}' conflicts with an existing attribute."
raise Exception(msg.format(codename))
setattr(self, codename, value)
c = self.Constant(codename, value, description)
self._constants.append(c)
except (ValueError, TypeError):
raise Exception("Must pass in kwargs in format: "
"**{'codename': (value, description)}")
def choices(self):
"""Django-style choices list to pass to a model or form field."""
return [(c.value, c.description) for c in self._constants]
def get_list(self, *codenames):
"""Returns a list of values corresponding with the codenames."""
return [getattr(self, codename) for codename in codenames]
<commit_msg>Refactor Constants initialization to throw more specific exceptions<commit_after> | from collections import namedtuple
class Constants(object):
Constant = namedtuple('Constant', ['codename', 'value', 'description'])
def __init__(self, **kwargs):
self._constants = []
for codename in kwargs:
try:
value, description = kwargs.get(codename)
except (ValueError, TypeError):
raise ValueError("Must pass in kwargs in format: "
"**{'codename': (value, description)}")
if hasattr(self, codename):
raise AttributeError("'{0}' conflicts with an existing "
"attribute.".format(codename))
setattr(self, codename, value)
c = self.Constant(codename, value, description)
self._constants.append(c)
def choices(self):
"""Django-style choices list to pass to a model or form field."""
return [(c.value, c.description) for c in self._constants]
def get_list(self, *codenames):
"""Returns a list of values corresponding with the codenames."""
return [getattr(self, codename) for codename in codenames]
| from collections import namedtuple
class Constants(object):
Constant = namedtuple('Constant', ['codename', 'value', 'description'])
def __init__(self, **kwargs):
self._constants = []
try:
for codename, (value, description) in kwargs.items():
if hasattr(self, codename):
msg = "'{0}' conflicts with an existing attribute."
raise Exception(msg.format(codename))
setattr(self, codename, value)
c = self.Constant(codename, value, description)
self._constants.append(c)
except (ValueError, TypeError):
raise Exception("Must pass in kwargs in format: "
"**{'codename': (value, description)}")
def choices(self):
"""Django-style choices list to pass to a model or form field."""
return [(c.value, c.description) for c in self._constants]
def get_list(self, *codenames):
"""Returns a list of values corresponding with the codenames."""
return [getattr(self, codename) for codename in codenames]
Refactor Constants initialization to throw more specific exceptionsfrom collections import namedtuple
class Constants(object):
Constant = namedtuple('Constant', ['codename', 'value', 'description'])
def __init__(self, **kwargs):
self._constants = []
for codename in kwargs:
try:
value, description = kwargs.get(codename)
except (ValueError, TypeError):
raise ValueError("Must pass in kwargs in format: "
"**{'codename': (value, description)}")
if hasattr(self, codename):
raise AttributeError("'{0}' conflicts with an existing "
"attribute.".format(codename))
setattr(self, codename, value)
c = self.Constant(codename, value, description)
self._constants.append(c)
def choices(self):
"""Django-style choices list to pass to a model or form field."""
return [(c.value, c.description) for c in self._constants]
def get_list(self, *codenames):
"""Returns a list of values corresponding with the codenames."""
return [getattr(self, codename) for codename in codenames]
| <commit_before>from collections import namedtuple
class Constants(object):
Constant = namedtuple('Constant', ['codename', 'value', 'description'])
def __init__(self, **kwargs):
self._constants = []
try:
for codename, (value, description) in kwargs.items():
if hasattr(self, codename):
msg = "'{0}' conflicts with an existing attribute."
raise Exception(msg.format(codename))
setattr(self, codename, value)
c = self.Constant(codename, value, description)
self._constants.append(c)
except (ValueError, TypeError):
raise Exception("Must pass in kwargs in format: "
"**{'codename': (value, description)}")
def choices(self):
"""Django-style choices list to pass to a model or form field."""
return [(c.value, c.description) for c in self._constants]
def get_list(self, *codenames):
"""Returns a list of values corresponding with the codenames."""
return [getattr(self, codename) for codename in codenames]
<commit_msg>Refactor Constants initialization to throw more specific exceptions<commit_after>from collections import namedtuple
class Constants(object):
Constant = namedtuple('Constant', ['codename', 'value', 'description'])
def __init__(self, **kwargs):
self._constants = []
for codename in kwargs:
try:
value, description = kwargs.get(codename)
except (ValueError, TypeError):
raise ValueError("Must pass in kwargs in format: "
"**{'codename': (value, description)}")
if hasattr(self, codename):
raise AttributeError("'{0}' conflicts with an existing "
"attribute.".format(codename))
setattr(self, codename, value)
c = self.Constant(codename, value, description)
self._constants.append(c)
def choices(self):
"""Django-style choices list to pass to a model or form field."""
return [(c.value, c.description) for c in self._constants]
def get_list(self, *codenames):
"""Returns a list of values corresponding with the codenames."""
return [getattr(self, codename) for codename in codenames]
|
d9ce6cc440019ecfc73f1c82e41da4e9ce02a234 | smart_open/__init__.py | smart_open/__init__.py | # -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
| # -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
logger = logging.getLogger(__name__)
if len(logger.handlers) == 0:
logger.addHandler(logging.NullHandler())
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
| Configure logging handlers before submodule imports | Configure logging handlers before submodule imports
- Fix #474
- Fix #475
| Python | mit | RaRe-Technologies/smart_open,RaRe-Technologies/smart_open,piskvorky/smart_open | # -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
Configure logging handlers before submodule imports
- Fix #474
- Fix #475 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
logger = logging.getLogger(__name__)
if len(logger.handlers) == 0:
logger.addHandler(logging.NullHandler())
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
<commit_msg>Configure logging handlers before submodule imports
- Fix #474
- Fix #475<commit_after> | # -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
logger = logging.getLogger(__name__)
if len(logger.handlers) == 0:
logger.addHandler(logging.NullHandler())
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
| # -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
Configure logging handlers before submodule imports
- Fix #474
- Fix #475# -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
logger = logging.getLogger(__name__)
if len(logger.handlers) == 0:
logger.addHandler(logging.NullHandler())
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
| <commit_before># -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
logger = logging.getLogger(__name__)
logger.addHandler(logging.NullHandler())
<commit_msg>Configure logging handlers before submodule imports
- Fix #474
- Fix #475<commit_after># -*- coding: utf-8 -*-
#
# Copyright (C) 2019 Radim Rehurek <me@radimrehurek.com>
#
# This code is distributed under the terms and conditions
# from the MIT License (MIT).
#
"""
Utilities for streaming to/from several file-like data storages: S3 / HDFS / local
filesystem / compressed files, and many more, using a simple, Pythonic API.
The streaming makes heavy use of generators and pipes, to avoid loading
full file contents into memory, allowing work with arbitrarily large files.
The main functions are:
* `open()`, which opens the given file for reading/writing
* `parse_uri()`
* `s3_iter_bucket()`, which goes over all keys in an S3 bucket in parallel
* `register_compressor()`, which registers callbacks for transparent compressor handling
"""
import logging
from smart_open import version
logger = logging.getLogger(__name__)
if len(logger.handlers) == 0:
logger.addHandler(logging.NullHandler())
from .smart_open_lib import open, parse_uri, smart_open, register_compressor
from .s3 import iter_bucket as s3_iter_bucket
__all__ = [
'open',
'parse_uri',
'register_compressor',
's3_iter_bucket',
'smart_open',
]
__version__ = version.__version__
|
1d90e26b3f4d84252b98f4ea80709beb96d4007e | wafer/registration/apps.py | wafer/registration/apps.py | # Needed due to django 1.7 changed app name restrictions
from django.apps import AppConfig
class RegistrationConfig(AppConfig):
label = 'wafer.registration'
name = 'wafer.registration'
| # Needed due to django 1.7 changed app name restrictions
from django.apps import AppConfig
class RegistrationConfig(AppConfig):
label = 'wafer_registration'
name = 'wafer.registration'
| Fix syntax to work with Django 3.2 | Fix syntax to work with Django 3.2
| Python | isc | CTPUG/wafer,CTPUG/wafer,CTPUG/wafer,CTPUG/wafer | # Needed due to django 1.7 changed app name restrictions
from django.apps import AppConfig
class RegistrationConfig(AppConfig):
label = 'wafer.registration'
name = 'wafer.registration'
Fix syntax to work with Django 3.2 | # Needed due to django 1.7 changed app name restrictions
from django.apps import AppConfig
class RegistrationConfig(AppConfig):
label = 'wafer_registration'
name = 'wafer.registration'
| <commit_before># Needed due to django 1.7 changed app name restrictions
from django.apps import AppConfig
class RegistrationConfig(AppConfig):
label = 'wafer.registration'
name = 'wafer.registration'
<commit_msg>Fix syntax to work with Django 3.2<commit_after> | # Needed due to django 1.7 changed app name restrictions
from django.apps import AppConfig
class RegistrationConfig(AppConfig):
label = 'wafer_registration'
name = 'wafer.registration'
| # Needed due to django 1.7 changed app name restrictions
from django.apps import AppConfig
class RegistrationConfig(AppConfig):
label = 'wafer.registration'
name = 'wafer.registration'
Fix syntax to work with Django 3.2# Needed due to django 1.7 changed app name restrictions
from django.apps import AppConfig
class RegistrationConfig(AppConfig):
label = 'wafer_registration'
name = 'wafer.registration'
| <commit_before># Needed due to django 1.7 changed app name restrictions
from django.apps import AppConfig
class RegistrationConfig(AppConfig):
label = 'wafer.registration'
name = 'wafer.registration'
<commit_msg>Fix syntax to work with Django 3.2<commit_after># Needed due to django 1.7 changed app name restrictions
from django.apps import AppConfig
class RegistrationConfig(AppConfig):
label = 'wafer_registration'
name = 'wafer.registration'
|
ac9123c7926c04af7ac68949e2636a81f771fd7d | ncdc_download/download_mapper2.py | ncdc_download/download_mapper2.py | #!/usr/bin/env python3
import ftplib
import gzip
import sys
host = 'ftp.ncdc.noaa.gov'
base = '/pub/data/noaa'
retries = 3
ftp = ftplib.FTP(host)
ftp.login()
for line in sys.stdin:
(year, filename) = line.strip().split()
for i in range(retries):
sys.stderr.write('reporter:status:Processing file %s/%s (FTP attempt %d of %d)\n' % (year, filename, i + 1, retries))
data = bytearray()
try:
ftp.retrbinary('RETR %s/%s/%s' % (base, year, filename), data.extend)
except ftplib.all_errors as error:
sys.stderr.write('%s\n' % error)
continue
records = gzip.decompress(data).decode('ISO-8859-1').split('\n')[0:-1]
for record in records:
print('%s\t%s' % (year, record))
sys.stderr.write('reporter:counter:NCDC Download,%s,%d\n' % (year, len(records)))
break
else:
ftp.quit()
sys.exit(1)
ftp.quit()
| #!/usr/bin/env python3
import ftplib
import gzip
import os
import sys
host = 'ftp.ncdc.noaa.gov'
base = '/pub/data/noaa'
retries = 3
ftp = ftplib.FTP(host)
ftp.login()
for line in sys.stdin:
(year, filename) = line.strip().split()
for i in range(retries):
sys.stderr.write('reporter:status:Processing file %s/%s (FTP attempt %d of %d)\n' % (year,
filename, i + 1, retries))
try:
ftp.retrbinary('RETR %s/%s/%s' % (base, year, filename), open(filename, 'wb').write)
except ftplib.all_errors as error:
sys.stderr.write('%s\n' % error)
continue
count = 0
for record in gzip.open(filename, 'rb'):
print('%s\t%s' % (year, record.decode('ISO-8859-1').strip()))
count += 1
sys.stderr.write('reporter:counter:NCDC Download,%s,%d\n' % (year, count))
os.remove(filename)
break
else:
ftp.quit()
sys.exit(1)
ftp.quit()
| Decompress downloaded files from disk, not in memory | Decompress downloaded files from disk, not in memory
| Python | mit | simonbrady/cat,simonbrady/cat | #!/usr/bin/env python3
import ftplib
import gzip
import sys
host = 'ftp.ncdc.noaa.gov'
base = '/pub/data/noaa'
retries = 3
ftp = ftplib.FTP(host)
ftp.login()
for line in sys.stdin:
(year, filename) = line.strip().split()
for i in range(retries):
sys.stderr.write('reporter:status:Processing file %s/%s (FTP attempt %d of %d)\n' % (year, filename, i + 1, retries))
data = bytearray()
try:
ftp.retrbinary('RETR %s/%s/%s' % (base, year, filename), data.extend)
except ftplib.all_errors as error:
sys.stderr.write('%s\n' % error)
continue
records = gzip.decompress(data).decode('ISO-8859-1').split('\n')[0:-1]
for record in records:
print('%s\t%s' % (year, record))
sys.stderr.write('reporter:counter:NCDC Download,%s,%d\n' % (year, len(records)))
break
else:
ftp.quit()
sys.exit(1)
ftp.quit()
Decompress downloaded files from disk, not in memory | #!/usr/bin/env python3
import ftplib
import gzip
import os
import sys
host = 'ftp.ncdc.noaa.gov'
base = '/pub/data/noaa'
retries = 3
ftp = ftplib.FTP(host)
ftp.login()
for line in sys.stdin:
(year, filename) = line.strip().split()
for i in range(retries):
sys.stderr.write('reporter:status:Processing file %s/%s (FTP attempt %d of %d)\n' % (year,
filename, i + 1, retries))
try:
ftp.retrbinary('RETR %s/%s/%s' % (base, year, filename), open(filename, 'wb').write)
except ftplib.all_errors as error:
sys.stderr.write('%s\n' % error)
continue
count = 0
for record in gzip.open(filename, 'rb'):
print('%s\t%s' % (year, record.decode('ISO-8859-1').strip()))
count += 1
sys.stderr.write('reporter:counter:NCDC Download,%s,%d\n' % (year, count))
os.remove(filename)
break
else:
ftp.quit()
sys.exit(1)
ftp.quit()
| <commit_before>#!/usr/bin/env python3
import ftplib
import gzip
import sys
host = 'ftp.ncdc.noaa.gov'
base = '/pub/data/noaa'
retries = 3
ftp = ftplib.FTP(host)
ftp.login()
for line in sys.stdin:
(year, filename) = line.strip().split()
for i in range(retries):
sys.stderr.write('reporter:status:Processing file %s/%s (FTP attempt %d of %d)\n' % (year, filename, i + 1, retries))
data = bytearray()
try:
ftp.retrbinary('RETR %s/%s/%s' % (base, year, filename), data.extend)
except ftplib.all_errors as error:
sys.stderr.write('%s\n' % error)
continue
records = gzip.decompress(data).decode('ISO-8859-1').split('\n')[0:-1]
for record in records:
print('%s\t%s' % (year, record))
sys.stderr.write('reporter:counter:NCDC Download,%s,%d\n' % (year, len(records)))
break
else:
ftp.quit()
sys.exit(1)
ftp.quit()
<commit_msg>Decompress downloaded files from disk, not in memory<commit_after> | #!/usr/bin/env python3
import ftplib
import gzip
import os
import sys
host = 'ftp.ncdc.noaa.gov'
base = '/pub/data/noaa'
retries = 3
ftp = ftplib.FTP(host)
ftp.login()
for line in sys.stdin:
(year, filename) = line.strip().split()
for i in range(retries):
sys.stderr.write('reporter:status:Processing file %s/%s (FTP attempt %d of %d)\n' % (year,
filename, i + 1, retries))
try:
ftp.retrbinary('RETR %s/%s/%s' % (base, year, filename), open(filename, 'wb').write)
except ftplib.all_errors as error:
sys.stderr.write('%s\n' % error)
continue
count = 0
for record in gzip.open(filename, 'rb'):
print('%s\t%s' % (year, record.decode('ISO-8859-1').strip()))
count += 1
sys.stderr.write('reporter:counter:NCDC Download,%s,%d\n' % (year, count))
os.remove(filename)
break
else:
ftp.quit()
sys.exit(1)
ftp.quit()
| #!/usr/bin/env python3
import ftplib
import gzip
import sys
host = 'ftp.ncdc.noaa.gov'
base = '/pub/data/noaa'
retries = 3
ftp = ftplib.FTP(host)
ftp.login()
for line in sys.stdin:
(year, filename) = line.strip().split()
for i in range(retries):
sys.stderr.write('reporter:status:Processing file %s/%s (FTP attempt %d of %d)\n' % (year, filename, i + 1, retries))
data = bytearray()
try:
ftp.retrbinary('RETR %s/%s/%s' % (base, year, filename), data.extend)
except ftplib.all_errors as error:
sys.stderr.write('%s\n' % error)
continue
records = gzip.decompress(data).decode('ISO-8859-1').split('\n')[0:-1]
for record in records:
print('%s\t%s' % (year, record))
sys.stderr.write('reporter:counter:NCDC Download,%s,%d\n' % (year, len(records)))
break
else:
ftp.quit()
sys.exit(1)
ftp.quit()
Decompress downloaded files from disk, not in memory#!/usr/bin/env python3
import ftplib
import gzip
import os
import sys
host = 'ftp.ncdc.noaa.gov'
base = '/pub/data/noaa'
retries = 3
ftp = ftplib.FTP(host)
ftp.login()
for line in sys.stdin:
(year, filename) = line.strip().split()
for i in range(retries):
sys.stderr.write('reporter:status:Processing file %s/%s (FTP attempt %d of %d)\n' % (year,
filename, i + 1, retries))
try:
ftp.retrbinary('RETR %s/%s/%s' % (base, year, filename), open(filename, 'wb').write)
except ftplib.all_errors as error:
sys.stderr.write('%s\n' % error)
continue
count = 0
for record in gzip.open(filename, 'rb'):
print('%s\t%s' % (year, record.decode('ISO-8859-1').strip()))
count += 1
sys.stderr.write('reporter:counter:NCDC Download,%s,%d\n' % (year, count))
os.remove(filename)
break
else:
ftp.quit()
sys.exit(1)
ftp.quit()
| <commit_before>#!/usr/bin/env python3
import ftplib
import gzip
import sys
host = 'ftp.ncdc.noaa.gov'
base = '/pub/data/noaa'
retries = 3
ftp = ftplib.FTP(host)
ftp.login()
for line in sys.stdin:
(year, filename) = line.strip().split()
for i in range(retries):
sys.stderr.write('reporter:status:Processing file %s/%s (FTP attempt %d of %d)\n' % (year, filename, i + 1, retries))
data = bytearray()
try:
ftp.retrbinary('RETR %s/%s/%s' % (base, year, filename), data.extend)
except ftplib.all_errors as error:
sys.stderr.write('%s\n' % error)
continue
records = gzip.decompress(data).decode('ISO-8859-1').split('\n')[0:-1]
for record in records:
print('%s\t%s' % (year, record))
sys.stderr.write('reporter:counter:NCDC Download,%s,%d\n' % (year, len(records)))
break
else:
ftp.quit()
sys.exit(1)
ftp.quit()
<commit_msg>Decompress downloaded files from disk, not in memory<commit_after>#!/usr/bin/env python3
import ftplib
import gzip
import os
import sys
host = 'ftp.ncdc.noaa.gov'
base = '/pub/data/noaa'
retries = 3
ftp = ftplib.FTP(host)
ftp.login()
for line in sys.stdin:
(year, filename) = line.strip().split()
for i in range(retries):
sys.stderr.write('reporter:status:Processing file %s/%s (FTP attempt %d of %d)\n' % (year,
filename, i + 1, retries))
try:
ftp.retrbinary('RETR %s/%s/%s' % (base, year, filename), open(filename, 'wb').write)
except ftplib.all_errors as error:
sys.stderr.write('%s\n' % error)
continue
count = 0
for record in gzip.open(filename, 'rb'):
print('%s\t%s' % (year, record.decode('ISO-8859-1').strip()))
count += 1
sys.stderr.write('reporter:counter:NCDC Download,%s,%d\n' % (year, count))
os.remove(filename)
break
else:
ftp.quit()
sys.exit(1)
ftp.quit()
|
db13de154fa44f3ef0bf1e365d2ee0d7a6951700 | cellcounter/accounts/urls.py | cellcounter/accounts/urls.py | from django.conf.urls import patterns, url
from cellcounter.accounts import views
urlpatterns = patterns('',
url('^new/$', views.RegistrationView.as_view(), name='register'),
url('^(?P<pk>[0-9]+)/$', views.UserDetailView.as_view(), name='user-detail'),
url('^(?P<pk>[0-9]+)/delete/$', views.UserDeleteView.as_view(), name='user-delete'),
url('^(?P<pk>[0-9]+)/edit/$', views.UserUpdateView.as_view(), name='user-update'),
url('^password/reset/$', views.PasswordResetView.as_view(),
name='password-reset'),
url('^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[\d\w\-]+)/$',
views.PasswordResetConfirmView.as_view(),
name='password-reset-confirm'),
url('^password/change/$', views.PasswordChangeView.as_view(), name='change-password'),
) | from django.conf.urls import patterns, url
from cellcounter.accounts import views
urlpatterns = patterns('',
url('^new/$', views.RegistrationView.as_view(), name='register'),
url('^(?P<pk>[0-9]+)/$', views.UserDetailView.as_view(), name='user-detail'),
url('^(?P<pk>[0-9]+)/delete/$', views.UserDeleteView.as_view(), name='user-delete'),
url('^(?P<pk>[0-9]+)/edit/$', views.UserUpdateView.as_view(), name='user-update'),
url('^password/reset/$', views.PasswordResetView.as_view(),
name='password-reset'),
url('^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.PasswordResetConfirmView.as_view(),
name='password-reset-confirm'),
url('^password/change/$', views.PasswordChangeView.as_view(), name='change-password'),
) | Use URL regex as per main Django project | Use URL regex as per main Django project
| Python | mit | cellcounter/cellcounter,haematologic/cellcounter,haematologic/cellcounter,cellcounter/cellcounter,cellcounter/cellcounter,cellcounter/cellcounter,haematologic/cellcounter | from django.conf.urls import patterns, url
from cellcounter.accounts import views
urlpatterns = patterns('',
url('^new/$', views.RegistrationView.as_view(), name='register'),
url('^(?P<pk>[0-9]+)/$', views.UserDetailView.as_view(), name='user-detail'),
url('^(?P<pk>[0-9]+)/delete/$', views.UserDeleteView.as_view(), name='user-delete'),
url('^(?P<pk>[0-9]+)/edit/$', views.UserUpdateView.as_view(), name='user-update'),
url('^password/reset/$', views.PasswordResetView.as_view(),
name='password-reset'),
url('^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[\d\w\-]+)/$',
views.PasswordResetConfirmView.as_view(),
name='password-reset-confirm'),
url('^password/change/$', views.PasswordChangeView.as_view(), name='change-password'),
)Use URL regex as per main Django project | from django.conf.urls import patterns, url
from cellcounter.accounts import views
urlpatterns = patterns('',
url('^new/$', views.RegistrationView.as_view(), name='register'),
url('^(?P<pk>[0-9]+)/$', views.UserDetailView.as_view(), name='user-detail'),
url('^(?P<pk>[0-9]+)/delete/$', views.UserDeleteView.as_view(), name='user-delete'),
url('^(?P<pk>[0-9]+)/edit/$', views.UserUpdateView.as_view(), name='user-update'),
url('^password/reset/$', views.PasswordResetView.as_view(),
name='password-reset'),
url('^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.PasswordResetConfirmView.as_view(),
name='password-reset-confirm'),
url('^password/change/$', views.PasswordChangeView.as_view(), name='change-password'),
) | <commit_before>from django.conf.urls import patterns, url
from cellcounter.accounts import views
urlpatterns = patterns('',
url('^new/$', views.RegistrationView.as_view(), name='register'),
url('^(?P<pk>[0-9]+)/$', views.UserDetailView.as_view(), name='user-detail'),
url('^(?P<pk>[0-9]+)/delete/$', views.UserDeleteView.as_view(), name='user-delete'),
url('^(?P<pk>[0-9]+)/edit/$', views.UserUpdateView.as_view(), name='user-update'),
url('^password/reset/$', views.PasswordResetView.as_view(),
name='password-reset'),
url('^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[\d\w\-]+)/$',
views.PasswordResetConfirmView.as_view(),
name='password-reset-confirm'),
url('^password/change/$', views.PasswordChangeView.as_view(), name='change-password'),
)<commit_msg>Use URL regex as per main Django project<commit_after> | from django.conf.urls import patterns, url
from cellcounter.accounts import views
urlpatterns = patterns('',
url('^new/$', views.RegistrationView.as_view(), name='register'),
url('^(?P<pk>[0-9]+)/$', views.UserDetailView.as_view(), name='user-detail'),
url('^(?P<pk>[0-9]+)/delete/$', views.UserDeleteView.as_view(), name='user-delete'),
url('^(?P<pk>[0-9]+)/edit/$', views.UserUpdateView.as_view(), name='user-update'),
url('^password/reset/$', views.PasswordResetView.as_view(),
name='password-reset'),
url('^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.PasswordResetConfirmView.as_view(),
name='password-reset-confirm'),
url('^password/change/$', views.PasswordChangeView.as_view(), name='change-password'),
) | from django.conf.urls import patterns, url
from cellcounter.accounts import views
urlpatterns = patterns('',
url('^new/$', views.RegistrationView.as_view(), name='register'),
url('^(?P<pk>[0-9]+)/$', views.UserDetailView.as_view(), name='user-detail'),
url('^(?P<pk>[0-9]+)/delete/$', views.UserDeleteView.as_view(), name='user-delete'),
url('^(?P<pk>[0-9]+)/edit/$', views.UserUpdateView.as_view(), name='user-update'),
url('^password/reset/$', views.PasswordResetView.as_view(),
name='password-reset'),
url('^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[\d\w\-]+)/$',
views.PasswordResetConfirmView.as_view(),
name='password-reset-confirm'),
url('^password/change/$', views.PasswordChangeView.as_view(), name='change-password'),
)Use URL regex as per main Django projectfrom django.conf.urls import patterns, url
from cellcounter.accounts import views
urlpatterns = patterns('',
url('^new/$', views.RegistrationView.as_view(), name='register'),
url('^(?P<pk>[0-9]+)/$', views.UserDetailView.as_view(), name='user-detail'),
url('^(?P<pk>[0-9]+)/delete/$', views.UserDeleteView.as_view(), name='user-delete'),
url('^(?P<pk>[0-9]+)/edit/$', views.UserUpdateView.as_view(), name='user-update'),
url('^password/reset/$', views.PasswordResetView.as_view(),
name='password-reset'),
url('^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.PasswordResetConfirmView.as_view(),
name='password-reset-confirm'),
url('^password/change/$', views.PasswordChangeView.as_view(), name='change-password'),
) | <commit_before>from django.conf.urls import patterns, url
from cellcounter.accounts import views
urlpatterns = patterns('',
url('^new/$', views.RegistrationView.as_view(), name='register'),
url('^(?P<pk>[0-9]+)/$', views.UserDetailView.as_view(), name='user-detail'),
url('^(?P<pk>[0-9]+)/delete/$', views.UserDeleteView.as_view(), name='user-delete'),
url('^(?P<pk>[0-9]+)/edit/$', views.UserUpdateView.as_view(), name='user-update'),
url('^password/reset/$', views.PasswordResetView.as_view(),
name='password-reset'),
url('^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[\d\w\-]+)/$',
views.PasswordResetConfirmView.as_view(),
name='password-reset-confirm'),
url('^password/change/$', views.PasswordChangeView.as_view(), name='change-password'),
)<commit_msg>Use URL regex as per main Django project<commit_after>from django.conf.urls import patterns, url
from cellcounter.accounts import views
urlpatterns = patterns('',
url('^new/$', views.RegistrationView.as_view(), name='register'),
url('^(?P<pk>[0-9]+)/$', views.UserDetailView.as_view(), name='user-detail'),
url('^(?P<pk>[0-9]+)/delete/$', views.UserDeleteView.as_view(), name='user-delete'),
url('^(?P<pk>[0-9]+)/edit/$', views.UserUpdateView.as_view(), name='user-update'),
url('^password/reset/$', views.PasswordResetView.as_view(),
name='password-reset'),
url('^password/reset/confirm/(?P<uidb64>[0-9A-Za-z_\-]+)/(?P<token>[0-9A-Za-z]{1,13}-[0-9A-Za-z]{1,20})/$',
views.PasswordResetConfirmView.as_view(),
name='password-reset-confirm'),
url('^password/change/$', views.PasswordChangeView.as_view(), name='change-password'),
) |
bea43337d9caa4e9a5271b66d951ae6547a23c80 | DjangoLibrary/middleware.py | DjangoLibrary/middleware.py | from django.contrib import auth
from django.contrib.auth.middleware import AuthenticationMiddleware
import base64
class AutologinAuthenticationMiddleware(AuthenticationMiddleware):
def process_request(self, request):
if 'autologin' not in request.COOKIES:
return
if request.COOKIES['autologin'] == '':
auth.logout(request)
return
autologin_cookie_value = base64.b64decode(request.COOKIES['autologin'])
autologin_cookie_value = autologin_cookie_value.decode('utf8')
username = autologin_cookie_value.split(':')[0]
password = autologin_cookie_value.split(':')[1]
user = auth.authenticate(username=username, password=password)
if user is not None:
if user.is_active:
auth.login(request, user)
| from django.contrib import auth
from django.contrib.auth.middleware import AuthenticationMiddleware
import base64
class AutologinAuthenticationMiddleware(AuthenticationMiddleware):
def process_request(self, request):
if 'autologin' not in request.COOKIES:
return
if request.COOKIES['autologin'] == '':
auth.logout(request)
return
autologin_cookie_value = base64.b64decode(request.COOKIES['autologin'])
# Py3 uses a bytes string here, so we need to decode to utf-8
autologin_cookie_value = autologin_cookie_value.decode('utf-8')
username = autologin_cookie_value.split(':')[0]
password = autologin_cookie_value.split(':')[1]
user = auth.authenticate(username=username, password=password)
if user is not None:
if user.is_active:
auth.login(request, user)
| Add a comment to py3 byte string decode. | Add a comment to py3 byte string decode.
| Python | apache-2.0 | kitconcept/robotframework-djangolibrary | from django.contrib import auth
from django.contrib.auth.middleware import AuthenticationMiddleware
import base64
class AutologinAuthenticationMiddleware(AuthenticationMiddleware):
def process_request(self, request):
if 'autologin' not in request.COOKIES:
return
if request.COOKIES['autologin'] == '':
auth.logout(request)
return
autologin_cookie_value = base64.b64decode(request.COOKIES['autologin'])
autologin_cookie_value = autologin_cookie_value.decode('utf8')
username = autologin_cookie_value.split(':')[0]
password = autologin_cookie_value.split(':')[1]
user = auth.authenticate(username=username, password=password)
if user is not None:
if user.is_active:
auth.login(request, user)
Add a comment to py3 byte string decode. | from django.contrib import auth
from django.contrib.auth.middleware import AuthenticationMiddleware
import base64
class AutologinAuthenticationMiddleware(AuthenticationMiddleware):
def process_request(self, request):
if 'autologin' not in request.COOKIES:
return
if request.COOKIES['autologin'] == '':
auth.logout(request)
return
autologin_cookie_value = base64.b64decode(request.COOKIES['autologin'])
# Py3 uses a bytes string here, so we need to decode to utf-8
autologin_cookie_value = autologin_cookie_value.decode('utf-8')
username = autologin_cookie_value.split(':')[0]
password = autologin_cookie_value.split(':')[1]
user = auth.authenticate(username=username, password=password)
if user is not None:
if user.is_active:
auth.login(request, user)
| <commit_before>from django.contrib import auth
from django.contrib.auth.middleware import AuthenticationMiddleware
import base64
class AutologinAuthenticationMiddleware(AuthenticationMiddleware):
def process_request(self, request):
if 'autologin' not in request.COOKIES:
return
if request.COOKIES['autologin'] == '':
auth.logout(request)
return
autologin_cookie_value = base64.b64decode(request.COOKIES['autologin'])
autologin_cookie_value = autologin_cookie_value.decode('utf8')
username = autologin_cookie_value.split(':')[0]
password = autologin_cookie_value.split(':')[1]
user = auth.authenticate(username=username, password=password)
if user is not None:
if user.is_active:
auth.login(request, user)
<commit_msg>Add a comment to py3 byte string decode.<commit_after> | from django.contrib import auth
from django.contrib.auth.middleware import AuthenticationMiddleware
import base64
class AutologinAuthenticationMiddleware(AuthenticationMiddleware):
def process_request(self, request):
if 'autologin' not in request.COOKIES:
return
if request.COOKIES['autologin'] == '':
auth.logout(request)
return
autologin_cookie_value = base64.b64decode(request.COOKIES['autologin'])
# Py3 uses a bytes string here, so we need to decode to utf-8
autologin_cookie_value = autologin_cookie_value.decode('utf-8')
username = autologin_cookie_value.split(':')[0]
password = autologin_cookie_value.split(':')[1]
user = auth.authenticate(username=username, password=password)
if user is not None:
if user.is_active:
auth.login(request, user)
| from django.contrib import auth
from django.contrib.auth.middleware import AuthenticationMiddleware
import base64
class AutologinAuthenticationMiddleware(AuthenticationMiddleware):
def process_request(self, request):
if 'autologin' not in request.COOKIES:
return
if request.COOKIES['autologin'] == '':
auth.logout(request)
return
autologin_cookie_value = base64.b64decode(request.COOKIES['autologin'])
autologin_cookie_value = autologin_cookie_value.decode('utf8')
username = autologin_cookie_value.split(':')[0]
password = autologin_cookie_value.split(':')[1]
user = auth.authenticate(username=username, password=password)
if user is not None:
if user.is_active:
auth.login(request, user)
Add a comment to py3 byte string decode.from django.contrib import auth
from django.contrib.auth.middleware import AuthenticationMiddleware
import base64
class AutologinAuthenticationMiddleware(AuthenticationMiddleware):
def process_request(self, request):
if 'autologin' not in request.COOKIES:
return
if request.COOKIES['autologin'] == '':
auth.logout(request)
return
autologin_cookie_value = base64.b64decode(request.COOKIES['autologin'])
# Py3 uses a bytes string here, so we need to decode to utf-8
autologin_cookie_value = autologin_cookie_value.decode('utf-8')
username = autologin_cookie_value.split(':')[0]
password = autologin_cookie_value.split(':')[1]
user = auth.authenticate(username=username, password=password)
if user is not None:
if user.is_active:
auth.login(request, user)
| <commit_before>from django.contrib import auth
from django.contrib.auth.middleware import AuthenticationMiddleware
import base64
class AutologinAuthenticationMiddleware(AuthenticationMiddleware):
def process_request(self, request):
if 'autologin' not in request.COOKIES:
return
if request.COOKIES['autologin'] == '':
auth.logout(request)
return
autologin_cookie_value = base64.b64decode(request.COOKIES['autologin'])
autologin_cookie_value = autologin_cookie_value.decode('utf8')
username = autologin_cookie_value.split(':')[0]
password = autologin_cookie_value.split(':')[1]
user = auth.authenticate(username=username, password=password)
if user is not None:
if user.is_active:
auth.login(request, user)
<commit_msg>Add a comment to py3 byte string decode.<commit_after>from django.contrib import auth
from django.contrib.auth.middleware import AuthenticationMiddleware
import base64
class AutologinAuthenticationMiddleware(AuthenticationMiddleware):
def process_request(self, request):
if 'autologin' not in request.COOKIES:
return
if request.COOKIES['autologin'] == '':
auth.logout(request)
return
autologin_cookie_value = base64.b64decode(request.COOKIES['autologin'])
# Py3 uses a bytes string here, so we need to decode to utf-8
autologin_cookie_value = autologin_cookie_value.decode('utf-8')
username = autologin_cookie_value.split(':')[0]
password = autologin_cookie_value.split(':')[1]
user = auth.authenticate(username=username, password=password)
if user is not None:
if user.is_active:
auth.login(request, user)
|
df5594e3da75ecd7f5ab6d112d22e5da628a3ccf | onepercentclub/settings/travis.py | onepercentclub/settings/travis.py | # TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
| # TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
| Use FF as test browser in Travis | Use FF as test browser in Travis
| Python | bsd-3-clause | onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site,onepercentclub/onepercentclub-site | # TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
Use FF as test browser in Travis | # TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
| <commit_before># TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
<commit_msg>Use FF as test browser in Travis<commit_after> | # TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
| # TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
Use FF as test browser in Travis# TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
| <commit_before># TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'remote'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
<commit_msg>Use FF as test browser in Travis<commit_after># TODO: not sure why but we need to include the SECRET_KEY here - importing from the test_runner file doesn't work
SECRET_KEY = 'hbqnTEq+m7Tk61bvRV/TLANr3i0WZ6hgBXDh3aYpSU8m+E1iCtlU3Q=='
from .test_runner import *
# Use firefox for running tests on Travis
SELENIUM_WEBDRIVER = 'firefox'
SELENIUM_TESTS = False
ROOT_URLCONF = 'onepercentclub.urls'
|
7e5777c7b09780d7cde1a94e58dd022f98051168 | scrapple/utils/config.py | scrapple/utils/config.py | """
scrapple.utils.config
~~~~~~~~~~~~~~~~~~~~~
Functions related to traversing the configuration file
"""
from __future__ import print_function
def traverse_next(page, next, results):
"""
Recursive generator to traverse through the next attribute and \
crawl through the links to be followed
"""
for link in page.extract_links(next['follow_link']):
print("Loading page", link.url)
r = results.copy()
for attribute in next['scraping'].get('data'):
if attribute['field'] != "":
print("\nExtracting", attribute['field'], "attribute", sep=' ')
r[attribute['field']] = link.extract_content(attribute['selector'], attribute['attr'], attribute['default'])
if not next['scraping'].get('next'):
yield r
else:
for next2 in next['scraping'].get('next'):
for result in traverse_next(link, next2, r):
yield result
| """
scrapple.utils.config
~~~~~~~~~~~~~~~~~~~~~
Functions related to traversing the configuration file
"""
from __future__ import print_function
from colorama import init, Fore, Back
init()
def traverse_next(page, next, results):
"""
Recursive generator to traverse through the next attribute and \
crawl through the links to be followed
"""
for link in page.extract_links(next['follow_link']):
print(Back.YELLOW + Fore.BLUE + "Loading page ", link.url + Back.RESET + Fore.RESET)
r = results.copy()
for attribute in next['scraping'].get('data'):
if attribute['field'] != "":
print("\nExtracting", attribute['field'], "attribute", sep=' ')
r[attribute['field']] = link.extract_content(attribute['selector'], attribute['attr'], attribute['default'])
if not next['scraping'].get('next'):
yield r
else:
for next2 in next['scraping'].get('next'):
for result in traverse_next(link, next2, r):
yield result
| Modify stdout logging in crawler run | Modify stdout logging in crawler run
| Python | mit | AlexMathew/scrapple,AlexMathew/scrapple,scrappleapp/scrapple,scrappleapp/scrapple,AlexMathew/scrapple | """
scrapple.utils.config
~~~~~~~~~~~~~~~~~~~~~
Functions related to traversing the configuration file
"""
from __future__ import print_function
def traverse_next(page, next, results):
"""
Recursive generator to traverse through the next attribute and \
crawl through the links to be followed
"""
for link in page.extract_links(next['follow_link']):
print("Loading page", link.url)
r = results.copy()
for attribute in next['scraping'].get('data'):
if attribute['field'] != "":
print("\nExtracting", attribute['field'], "attribute", sep=' ')
r[attribute['field']] = link.extract_content(attribute['selector'], attribute['attr'], attribute['default'])
if not next['scraping'].get('next'):
yield r
else:
for next2 in next['scraping'].get('next'):
for result in traverse_next(link, next2, r):
yield result
Modify stdout logging in crawler run | """
scrapple.utils.config
~~~~~~~~~~~~~~~~~~~~~
Functions related to traversing the configuration file
"""
from __future__ import print_function
from colorama import init, Fore, Back
init()
def traverse_next(page, next, results):
"""
Recursive generator to traverse through the next attribute and \
crawl through the links to be followed
"""
for link in page.extract_links(next['follow_link']):
print(Back.YELLOW + Fore.BLUE + "Loading page ", link.url + Back.RESET + Fore.RESET)
r = results.copy()
for attribute in next['scraping'].get('data'):
if attribute['field'] != "":
print("\nExtracting", attribute['field'], "attribute", sep=' ')
r[attribute['field']] = link.extract_content(attribute['selector'], attribute['attr'], attribute['default'])
if not next['scraping'].get('next'):
yield r
else:
for next2 in next['scraping'].get('next'):
for result in traverse_next(link, next2, r):
yield result
| <commit_before>"""
scrapple.utils.config
~~~~~~~~~~~~~~~~~~~~~
Functions related to traversing the configuration file
"""
from __future__ import print_function
def traverse_next(page, next, results):
"""
Recursive generator to traverse through the next attribute and \
crawl through the links to be followed
"""
for link in page.extract_links(next['follow_link']):
print("Loading page", link.url)
r = results.copy()
for attribute in next['scraping'].get('data'):
if attribute['field'] != "":
print("\nExtracting", attribute['field'], "attribute", sep=' ')
r[attribute['field']] = link.extract_content(attribute['selector'], attribute['attr'], attribute['default'])
if not next['scraping'].get('next'):
yield r
else:
for next2 in next['scraping'].get('next'):
for result in traverse_next(link, next2, r):
yield result
<commit_msg>Modify stdout logging in crawler run<commit_after> | """
scrapple.utils.config
~~~~~~~~~~~~~~~~~~~~~
Functions related to traversing the configuration file
"""
from __future__ import print_function
from colorama import init, Fore, Back
init()
def traverse_next(page, next, results):
"""
Recursive generator to traverse through the next attribute and \
crawl through the links to be followed
"""
for link in page.extract_links(next['follow_link']):
print(Back.YELLOW + Fore.BLUE + "Loading page ", link.url + Back.RESET + Fore.RESET)
r = results.copy()
for attribute in next['scraping'].get('data'):
if attribute['field'] != "":
print("\nExtracting", attribute['field'], "attribute", sep=' ')
r[attribute['field']] = link.extract_content(attribute['selector'], attribute['attr'], attribute['default'])
if not next['scraping'].get('next'):
yield r
else:
for next2 in next['scraping'].get('next'):
for result in traverse_next(link, next2, r):
yield result
| """
scrapple.utils.config
~~~~~~~~~~~~~~~~~~~~~
Functions related to traversing the configuration file
"""
from __future__ import print_function
def traverse_next(page, next, results):
"""
Recursive generator to traverse through the next attribute and \
crawl through the links to be followed
"""
for link in page.extract_links(next['follow_link']):
print("Loading page", link.url)
r = results.copy()
for attribute in next['scraping'].get('data'):
if attribute['field'] != "":
print("\nExtracting", attribute['field'], "attribute", sep=' ')
r[attribute['field']] = link.extract_content(attribute['selector'], attribute['attr'], attribute['default'])
if not next['scraping'].get('next'):
yield r
else:
for next2 in next['scraping'].get('next'):
for result in traverse_next(link, next2, r):
yield result
Modify stdout logging in crawler run"""
scrapple.utils.config
~~~~~~~~~~~~~~~~~~~~~
Functions related to traversing the configuration file
"""
from __future__ import print_function
from colorama import init, Fore, Back
init()
def traverse_next(page, next, results):
"""
Recursive generator to traverse through the next attribute and \
crawl through the links to be followed
"""
for link in page.extract_links(next['follow_link']):
print(Back.YELLOW + Fore.BLUE + "Loading page ", link.url + Back.RESET + Fore.RESET)
r = results.copy()
for attribute in next['scraping'].get('data'):
if attribute['field'] != "":
print("\nExtracting", attribute['field'], "attribute", sep=' ')
r[attribute['field']] = link.extract_content(attribute['selector'], attribute['attr'], attribute['default'])
if not next['scraping'].get('next'):
yield r
else:
for next2 in next['scraping'].get('next'):
for result in traverse_next(link, next2, r):
yield result
| <commit_before>"""
scrapple.utils.config
~~~~~~~~~~~~~~~~~~~~~
Functions related to traversing the configuration file
"""
from __future__ import print_function
def traverse_next(page, next, results):
"""
Recursive generator to traverse through the next attribute and \
crawl through the links to be followed
"""
for link in page.extract_links(next['follow_link']):
print("Loading page", link.url)
r = results.copy()
for attribute in next['scraping'].get('data'):
if attribute['field'] != "":
print("\nExtracting", attribute['field'], "attribute", sep=' ')
r[attribute['field']] = link.extract_content(attribute['selector'], attribute['attr'], attribute['default'])
if not next['scraping'].get('next'):
yield r
else:
for next2 in next['scraping'].get('next'):
for result in traverse_next(link, next2, r):
yield result
<commit_msg>Modify stdout logging in crawler run<commit_after>"""
scrapple.utils.config
~~~~~~~~~~~~~~~~~~~~~
Functions related to traversing the configuration file
"""
from __future__ import print_function
from colorama import init, Fore, Back
init()
def traverse_next(page, next, results):
"""
Recursive generator to traverse through the next attribute and \
crawl through the links to be followed
"""
for link in page.extract_links(next['follow_link']):
print(Back.YELLOW + Fore.BLUE + "Loading page ", link.url + Back.RESET + Fore.RESET)
r = results.copy()
for attribute in next['scraping'].get('data'):
if attribute['field'] != "":
print("\nExtracting", attribute['field'], "attribute", sep=' ')
r[attribute['field']] = link.extract_content(attribute['selector'], attribute['attr'], attribute['default'])
if not next['scraping'].get('next'):
yield r
else:
for next2 in next['scraping'].get('next'):
for result in traverse_next(link, next2, r):
yield result
|
cc8f1507c90261947d9520859922bff44ef9c6b4 | observatory/lib/InheritanceQuerySet.py | observatory/lib/InheritanceQuerySet.py | from django.db.models.query import QuerySet
from django.db.models.fields.related import SingleRelatedObjectDescriptor
class InheritanceQuerySet(QuerySet):
def select_subclasses(self, *subclasses):
if not subclasses:
subclasses = [o for o in dir(self.model)
if isinstance(getattr(self.model, o), SingleRelatedObjectDescriptor)\
and issubclass(getattr(self.model,o).related.model, self.model)]
new_qs = self.select_related(*subclasses)
new_qs.subclasses = subclasses
return new_qs
def _clone(self, klass=None, setup=False, **kwargs):
try:
kwargs.update({'subclasses': self.subclasses})
except AttributeError:
pass
return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs)
def iterator(self):
iter = super(InheritanceQuerySet, self).iterator()
if getattr(self, 'subclasses', False):
for obj in iter:
obj = [getattr(obj, s) for s in self.subclasses if getattr(obj, s)] or [obj]
yield obj[0]
else:
for obj in iter:
yield obj
| from django.db.models.query import QuerySet
from django.db.models.fields.related import SingleRelatedObjectDescriptor
from django.core.exceptions import ObjectDoesNotExist
class InheritanceQuerySet(QuerySet):
def select_subclasses(self, *subclasses):
if not subclasses:
subclasses = [o for o in dir(self.model)
if isinstance(getattr(self.model, o), SingleRelatedObjectDescriptor)\
and issubclass(getattr(self.model,o).related.model, self.model)]
new_qs = self.select_related(*subclasses)
new_qs.subclasses = subclasses
return new_qs
def _clone(self, klass=None, setup=False, **kwargs):
try:
kwargs.update({'subclasses': self.subclasses})
except AttributeError:
pass
return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs)
def _get_subclasses(self, obj):
result = []
for s in getattr(self, 'subclassses', []):
try:
if getattr(obj, s):
result += getattr(obj, s)
except ObjectDoesNotExist:
continue
return result or [obj]
def iterator(self):
iter = super(InheritanceQuerySet, self).iterator()
if getattr(self, 'subclasses', False):
for obj in iter:
yield self._get_subclasses(obj)[0]
else:
for obj in iter:
yield obj
| Fix the feed to work with new versions of django | Fix the feed to work with new versions of django
| Python | isc | rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory,rcos/Observatory | from django.db.models.query import QuerySet
from django.db.models.fields.related import SingleRelatedObjectDescriptor
class InheritanceQuerySet(QuerySet):
def select_subclasses(self, *subclasses):
if not subclasses:
subclasses = [o for o in dir(self.model)
if isinstance(getattr(self.model, o), SingleRelatedObjectDescriptor)\
and issubclass(getattr(self.model,o).related.model, self.model)]
new_qs = self.select_related(*subclasses)
new_qs.subclasses = subclasses
return new_qs
def _clone(self, klass=None, setup=False, **kwargs):
try:
kwargs.update({'subclasses': self.subclasses})
except AttributeError:
pass
return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs)
def iterator(self):
iter = super(InheritanceQuerySet, self).iterator()
if getattr(self, 'subclasses', False):
for obj in iter:
obj = [getattr(obj, s) for s in self.subclasses if getattr(obj, s)] or [obj]
yield obj[0]
else:
for obj in iter:
yield obj
Fix the feed to work with new versions of django | from django.db.models.query import QuerySet
from django.db.models.fields.related import SingleRelatedObjectDescriptor
from django.core.exceptions import ObjectDoesNotExist
class InheritanceQuerySet(QuerySet):
def select_subclasses(self, *subclasses):
if not subclasses:
subclasses = [o for o in dir(self.model)
if isinstance(getattr(self.model, o), SingleRelatedObjectDescriptor)\
and issubclass(getattr(self.model,o).related.model, self.model)]
new_qs = self.select_related(*subclasses)
new_qs.subclasses = subclasses
return new_qs
def _clone(self, klass=None, setup=False, **kwargs):
try:
kwargs.update({'subclasses': self.subclasses})
except AttributeError:
pass
return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs)
def _get_subclasses(self, obj):
result = []
for s in getattr(self, 'subclassses', []):
try:
if getattr(obj, s):
result += getattr(obj, s)
except ObjectDoesNotExist:
continue
return result or [obj]
def iterator(self):
iter = super(InheritanceQuerySet, self).iterator()
if getattr(self, 'subclasses', False):
for obj in iter:
yield self._get_subclasses(obj)[0]
else:
for obj in iter:
yield obj
| <commit_before>from django.db.models.query import QuerySet
from django.db.models.fields.related import SingleRelatedObjectDescriptor
class InheritanceQuerySet(QuerySet):
def select_subclasses(self, *subclasses):
if not subclasses:
subclasses = [o for o in dir(self.model)
if isinstance(getattr(self.model, o), SingleRelatedObjectDescriptor)\
and issubclass(getattr(self.model,o).related.model, self.model)]
new_qs = self.select_related(*subclasses)
new_qs.subclasses = subclasses
return new_qs
def _clone(self, klass=None, setup=False, **kwargs):
try:
kwargs.update({'subclasses': self.subclasses})
except AttributeError:
pass
return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs)
def iterator(self):
iter = super(InheritanceQuerySet, self).iterator()
if getattr(self, 'subclasses', False):
for obj in iter:
obj = [getattr(obj, s) for s in self.subclasses if getattr(obj, s)] or [obj]
yield obj[0]
else:
for obj in iter:
yield obj
<commit_msg>Fix the feed to work with new versions of django<commit_after> | from django.db.models.query import QuerySet
from django.db.models.fields.related import SingleRelatedObjectDescriptor
from django.core.exceptions import ObjectDoesNotExist
class InheritanceQuerySet(QuerySet):
def select_subclasses(self, *subclasses):
if not subclasses:
subclasses = [o for o in dir(self.model)
if isinstance(getattr(self.model, o), SingleRelatedObjectDescriptor)\
and issubclass(getattr(self.model,o).related.model, self.model)]
new_qs = self.select_related(*subclasses)
new_qs.subclasses = subclasses
return new_qs
def _clone(self, klass=None, setup=False, **kwargs):
try:
kwargs.update({'subclasses': self.subclasses})
except AttributeError:
pass
return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs)
def _get_subclasses(self, obj):
result = []
for s in getattr(self, 'subclassses', []):
try:
if getattr(obj, s):
result += getattr(obj, s)
except ObjectDoesNotExist:
continue
return result or [obj]
def iterator(self):
iter = super(InheritanceQuerySet, self).iterator()
if getattr(self, 'subclasses', False):
for obj in iter:
yield self._get_subclasses(obj)[0]
else:
for obj in iter:
yield obj
| from django.db.models.query import QuerySet
from django.db.models.fields.related import SingleRelatedObjectDescriptor
class InheritanceQuerySet(QuerySet):
def select_subclasses(self, *subclasses):
if not subclasses:
subclasses = [o for o in dir(self.model)
if isinstance(getattr(self.model, o), SingleRelatedObjectDescriptor)\
and issubclass(getattr(self.model,o).related.model, self.model)]
new_qs = self.select_related(*subclasses)
new_qs.subclasses = subclasses
return new_qs
def _clone(self, klass=None, setup=False, **kwargs):
try:
kwargs.update({'subclasses': self.subclasses})
except AttributeError:
pass
return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs)
def iterator(self):
iter = super(InheritanceQuerySet, self).iterator()
if getattr(self, 'subclasses', False):
for obj in iter:
obj = [getattr(obj, s) for s in self.subclasses if getattr(obj, s)] or [obj]
yield obj[0]
else:
for obj in iter:
yield obj
Fix the feed to work with new versions of djangofrom django.db.models.query import QuerySet
from django.db.models.fields.related import SingleRelatedObjectDescriptor
from django.core.exceptions import ObjectDoesNotExist
class InheritanceQuerySet(QuerySet):
def select_subclasses(self, *subclasses):
if not subclasses:
subclasses = [o for o in dir(self.model)
if isinstance(getattr(self.model, o), SingleRelatedObjectDescriptor)\
and issubclass(getattr(self.model,o).related.model, self.model)]
new_qs = self.select_related(*subclasses)
new_qs.subclasses = subclasses
return new_qs
def _clone(self, klass=None, setup=False, **kwargs):
try:
kwargs.update({'subclasses': self.subclasses})
except AttributeError:
pass
return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs)
def _get_subclasses(self, obj):
result = []
for s in getattr(self, 'subclassses', []):
try:
if getattr(obj, s):
result += getattr(obj, s)
except ObjectDoesNotExist:
continue
return result or [obj]
def iterator(self):
iter = super(InheritanceQuerySet, self).iterator()
if getattr(self, 'subclasses', False):
for obj in iter:
yield self._get_subclasses(obj)[0]
else:
for obj in iter:
yield obj
| <commit_before>from django.db.models.query import QuerySet
from django.db.models.fields.related import SingleRelatedObjectDescriptor
class InheritanceQuerySet(QuerySet):
def select_subclasses(self, *subclasses):
if not subclasses:
subclasses = [o for o in dir(self.model)
if isinstance(getattr(self.model, o), SingleRelatedObjectDescriptor)\
and issubclass(getattr(self.model,o).related.model, self.model)]
new_qs = self.select_related(*subclasses)
new_qs.subclasses = subclasses
return new_qs
def _clone(self, klass=None, setup=False, **kwargs):
try:
kwargs.update({'subclasses': self.subclasses})
except AttributeError:
pass
return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs)
def iterator(self):
iter = super(InheritanceQuerySet, self).iterator()
if getattr(self, 'subclasses', False):
for obj in iter:
obj = [getattr(obj, s) for s in self.subclasses if getattr(obj, s)] or [obj]
yield obj[0]
else:
for obj in iter:
yield obj
<commit_msg>Fix the feed to work with new versions of django<commit_after>from django.db.models.query import QuerySet
from django.db.models.fields.related import SingleRelatedObjectDescriptor
from django.core.exceptions import ObjectDoesNotExist
class InheritanceQuerySet(QuerySet):
def select_subclasses(self, *subclasses):
if not subclasses:
subclasses = [o for o in dir(self.model)
if isinstance(getattr(self.model, o), SingleRelatedObjectDescriptor)\
and issubclass(getattr(self.model,o).related.model, self.model)]
new_qs = self.select_related(*subclasses)
new_qs.subclasses = subclasses
return new_qs
def _clone(self, klass=None, setup=False, **kwargs):
try:
kwargs.update({'subclasses': self.subclasses})
except AttributeError:
pass
return super(InheritanceQuerySet, self)._clone(klass, setup, **kwargs)
def _get_subclasses(self, obj):
result = []
for s in getattr(self, 'subclassses', []):
try:
if getattr(obj, s):
result += getattr(obj, s)
except ObjectDoesNotExist:
continue
return result or [obj]
def iterator(self):
iter = super(InheritanceQuerySet, self).iterator()
if getattr(self, 'subclasses', False):
for obj in iter:
yield self._get_subclasses(obj)[0]
else:
for obj in iter:
yield obj
|
096496a04ae728103e1bd1460cf654854c7e5527 | src/core/settings/production.py | src/core/settings/production.py | from core.settings.base import * # noqa
DEBUG = False
ALLOWED_HOSTS = ['*.miximum.fr']
| from core.settings.base import * # noqa
DEBUG = False
ALLOWED_HOSTS = ['*.miximum.fr']
SECRET_KEY = '7cqu2zqu3lt)uw@@cx9_i7xo-d688zi3!q(r0zt37$rdqo1=lj'
| Add a dummy secret key to settings | Add a dummy secret key to settings
(bad, i know)
| Python | mit | thibault/pomodoro_api | from core.settings.base import * # noqa
DEBUG = False
ALLOWED_HOSTS = ['*.miximum.fr']
Add a dummy secret key to settings
(bad, i know) | from core.settings.base import * # noqa
DEBUG = False
ALLOWED_HOSTS = ['*.miximum.fr']
SECRET_KEY = '7cqu2zqu3lt)uw@@cx9_i7xo-d688zi3!q(r0zt37$rdqo1=lj'
| <commit_before>from core.settings.base import * # noqa
DEBUG = False
ALLOWED_HOSTS = ['*.miximum.fr']
<commit_msg>Add a dummy secret key to settings
(bad, i know)<commit_after> | from core.settings.base import * # noqa
DEBUG = False
ALLOWED_HOSTS = ['*.miximum.fr']
SECRET_KEY = '7cqu2zqu3lt)uw@@cx9_i7xo-d688zi3!q(r0zt37$rdqo1=lj'
| from core.settings.base import * # noqa
DEBUG = False
ALLOWED_HOSTS = ['*.miximum.fr']
Add a dummy secret key to settings
(bad, i know)from core.settings.base import * # noqa
DEBUG = False
ALLOWED_HOSTS = ['*.miximum.fr']
SECRET_KEY = '7cqu2zqu3lt)uw@@cx9_i7xo-d688zi3!q(r0zt37$rdqo1=lj'
| <commit_before>from core.settings.base import * # noqa
DEBUG = False
ALLOWED_HOSTS = ['*.miximum.fr']
<commit_msg>Add a dummy secret key to settings
(bad, i know)<commit_after>from core.settings.base import * # noqa
DEBUG = False
ALLOWED_HOSTS = ['*.miximum.fr']
SECRET_KEY = '7cqu2zqu3lt)uw@@cx9_i7xo-d688zi3!q(r0zt37$rdqo1=lj'
|
48acf57692a2a0eb03a3d616507cae2d5f619ded | yunity/models/relations.py | yunity/models/relations.py | from django.db.models import ForeignKey, DateTimeField, ManyToManyField
from yunity.models.entities import User, Location, Mappable, Message
from yunity.models.utils import BaseModel, MaxLengthCharField
from yunity.utils.decorators import classproperty
class Chat(BaseModel):
participants = ManyToManyField(User)
messages = ManyToManyField(Message)
class MappableLocation(BaseModel):
mappable = ForeignKey(Mappable)
location = ForeignKey(Location)
startTime = DateTimeField(null=True)
endTime = DateTimeField(null=True)
class MappableResponsibility(BaseModel):
@classproperty
def TYPE(cls):
return cls.create_constants('type', 'OWNER')
@classproperty
def STATUS(cls):
return cls.create_constants('status', 'GRANTED', 'PENDING', 'REQUESTED')
responsible = ForeignKey(User)
mappable = ForeignKey(Mappable)
status = MaxLengthCharField()
date = DateTimeField(null=True)
type = MaxLengthCharField()
class UserLocation(BaseModel):
user = ForeignKey(User)
location = ForeignKey(Location)
type = MaxLengthCharField()
class ItemRequest(BaseModel):
requester = ForeignKey(User)
requested = ForeignKey(Mappable)
feedback = MaxLengthCharField(null=True, default=None)
| from django.db.models import ForeignKey, DateTimeField, ManyToManyField
from yunity.models.entities import User, Location, Mappable, Message
from yunity.models.utils import BaseModel, MaxLengthCharField
from yunity.utils.decorators import classproperty
class Chat(BaseModel):
participants = ManyToManyField(User)
messages = ManyToManyField(Message)
class MappableLocation(BaseModel):
mappable = ForeignKey(Mappable)
location = ForeignKey(Location)
startTime = DateTimeField(null=True)
endTime = DateTimeField(null=True)
class MappableResponsibility(BaseModel):
@classproperty
def TYPE(cls):
return cls.create_constants('type', 'OWNER')
@classproperty
def STATUS(cls):
return cls.create_constants('status', 'GRANTED', 'PENDING', 'REQUESTED')
responsible = ForeignKey(User)
mappable = ForeignKey(Mappable)
status = MaxLengthCharField()
date = DateTimeField(null=True, auto_now=True)
type = MaxLengthCharField()
class UserLocation(BaseModel):
user = ForeignKey(User)
location = ForeignKey(Location)
type = MaxLengthCharField()
class ItemRequest(BaseModel):
requester = ForeignKey(User)
requested = ForeignKey(Mappable)
feedback = MaxLengthCharField(null=True, default=None)
| Add default value for date | Add default value for date
with @NerdyProjects
| Python | agpl-3.0 | yunity/yunity-core,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/foodsaving-backend,yunity/yunity-core | from django.db.models import ForeignKey, DateTimeField, ManyToManyField
from yunity.models.entities import User, Location, Mappable, Message
from yunity.models.utils import BaseModel, MaxLengthCharField
from yunity.utils.decorators import classproperty
class Chat(BaseModel):
participants = ManyToManyField(User)
messages = ManyToManyField(Message)
class MappableLocation(BaseModel):
mappable = ForeignKey(Mappable)
location = ForeignKey(Location)
startTime = DateTimeField(null=True)
endTime = DateTimeField(null=True)
class MappableResponsibility(BaseModel):
@classproperty
def TYPE(cls):
return cls.create_constants('type', 'OWNER')
@classproperty
def STATUS(cls):
return cls.create_constants('status', 'GRANTED', 'PENDING', 'REQUESTED')
responsible = ForeignKey(User)
mappable = ForeignKey(Mappable)
status = MaxLengthCharField()
date = DateTimeField(null=True)
type = MaxLengthCharField()
class UserLocation(BaseModel):
user = ForeignKey(User)
location = ForeignKey(Location)
type = MaxLengthCharField()
class ItemRequest(BaseModel):
requester = ForeignKey(User)
requested = ForeignKey(Mappable)
feedback = MaxLengthCharField(null=True, default=None)
Add default value for date
with @NerdyProjects | from django.db.models import ForeignKey, DateTimeField, ManyToManyField
from yunity.models.entities import User, Location, Mappable, Message
from yunity.models.utils import BaseModel, MaxLengthCharField
from yunity.utils.decorators import classproperty
class Chat(BaseModel):
participants = ManyToManyField(User)
messages = ManyToManyField(Message)
class MappableLocation(BaseModel):
mappable = ForeignKey(Mappable)
location = ForeignKey(Location)
startTime = DateTimeField(null=True)
endTime = DateTimeField(null=True)
class MappableResponsibility(BaseModel):
@classproperty
def TYPE(cls):
return cls.create_constants('type', 'OWNER')
@classproperty
def STATUS(cls):
return cls.create_constants('status', 'GRANTED', 'PENDING', 'REQUESTED')
responsible = ForeignKey(User)
mappable = ForeignKey(Mappable)
status = MaxLengthCharField()
date = DateTimeField(null=True, auto_now=True)
type = MaxLengthCharField()
class UserLocation(BaseModel):
user = ForeignKey(User)
location = ForeignKey(Location)
type = MaxLengthCharField()
class ItemRequest(BaseModel):
requester = ForeignKey(User)
requested = ForeignKey(Mappable)
feedback = MaxLengthCharField(null=True, default=None)
| <commit_before>from django.db.models import ForeignKey, DateTimeField, ManyToManyField
from yunity.models.entities import User, Location, Mappable, Message
from yunity.models.utils import BaseModel, MaxLengthCharField
from yunity.utils.decorators import classproperty
class Chat(BaseModel):
participants = ManyToManyField(User)
messages = ManyToManyField(Message)
class MappableLocation(BaseModel):
mappable = ForeignKey(Mappable)
location = ForeignKey(Location)
startTime = DateTimeField(null=True)
endTime = DateTimeField(null=True)
class MappableResponsibility(BaseModel):
@classproperty
def TYPE(cls):
return cls.create_constants('type', 'OWNER')
@classproperty
def STATUS(cls):
return cls.create_constants('status', 'GRANTED', 'PENDING', 'REQUESTED')
responsible = ForeignKey(User)
mappable = ForeignKey(Mappable)
status = MaxLengthCharField()
date = DateTimeField(null=True)
type = MaxLengthCharField()
class UserLocation(BaseModel):
user = ForeignKey(User)
location = ForeignKey(Location)
type = MaxLengthCharField()
class ItemRequest(BaseModel):
requester = ForeignKey(User)
requested = ForeignKey(Mappable)
feedback = MaxLengthCharField(null=True, default=None)
<commit_msg>Add default value for date
with @NerdyProjects<commit_after> | from django.db.models import ForeignKey, DateTimeField, ManyToManyField
from yunity.models.entities import User, Location, Mappable, Message
from yunity.models.utils import BaseModel, MaxLengthCharField
from yunity.utils.decorators import classproperty
class Chat(BaseModel):
participants = ManyToManyField(User)
messages = ManyToManyField(Message)
class MappableLocation(BaseModel):
mappable = ForeignKey(Mappable)
location = ForeignKey(Location)
startTime = DateTimeField(null=True)
endTime = DateTimeField(null=True)
class MappableResponsibility(BaseModel):
@classproperty
def TYPE(cls):
return cls.create_constants('type', 'OWNER')
@classproperty
def STATUS(cls):
return cls.create_constants('status', 'GRANTED', 'PENDING', 'REQUESTED')
responsible = ForeignKey(User)
mappable = ForeignKey(Mappable)
status = MaxLengthCharField()
date = DateTimeField(null=True, auto_now=True)
type = MaxLengthCharField()
class UserLocation(BaseModel):
user = ForeignKey(User)
location = ForeignKey(Location)
type = MaxLengthCharField()
class ItemRequest(BaseModel):
requester = ForeignKey(User)
requested = ForeignKey(Mappable)
feedback = MaxLengthCharField(null=True, default=None)
| from django.db.models import ForeignKey, DateTimeField, ManyToManyField
from yunity.models.entities import User, Location, Mappable, Message
from yunity.models.utils import BaseModel, MaxLengthCharField
from yunity.utils.decorators import classproperty
class Chat(BaseModel):
participants = ManyToManyField(User)
messages = ManyToManyField(Message)
class MappableLocation(BaseModel):
mappable = ForeignKey(Mappable)
location = ForeignKey(Location)
startTime = DateTimeField(null=True)
endTime = DateTimeField(null=True)
class MappableResponsibility(BaseModel):
@classproperty
def TYPE(cls):
return cls.create_constants('type', 'OWNER')
@classproperty
def STATUS(cls):
return cls.create_constants('status', 'GRANTED', 'PENDING', 'REQUESTED')
responsible = ForeignKey(User)
mappable = ForeignKey(Mappable)
status = MaxLengthCharField()
date = DateTimeField(null=True)
type = MaxLengthCharField()
class UserLocation(BaseModel):
user = ForeignKey(User)
location = ForeignKey(Location)
type = MaxLengthCharField()
class ItemRequest(BaseModel):
requester = ForeignKey(User)
requested = ForeignKey(Mappable)
feedback = MaxLengthCharField(null=True, default=None)
Add default value for date
with @NerdyProjectsfrom django.db.models import ForeignKey, DateTimeField, ManyToManyField
from yunity.models.entities import User, Location, Mappable, Message
from yunity.models.utils import BaseModel, MaxLengthCharField
from yunity.utils.decorators import classproperty
class Chat(BaseModel):
participants = ManyToManyField(User)
messages = ManyToManyField(Message)
class MappableLocation(BaseModel):
mappable = ForeignKey(Mappable)
location = ForeignKey(Location)
startTime = DateTimeField(null=True)
endTime = DateTimeField(null=True)
class MappableResponsibility(BaseModel):
@classproperty
def TYPE(cls):
return cls.create_constants('type', 'OWNER')
@classproperty
def STATUS(cls):
return cls.create_constants('status', 'GRANTED', 'PENDING', 'REQUESTED')
responsible = ForeignKey(User)
mappable = ForeignKey(Mappable)
status = MaxLengthCharField()
date = DateTimeField(null=True, auto_now=True)
type = MaxLengthCharField()
class UserLocation(BaseModel):
user = ForeignKey(User)
location = ForeignKey(Location)
type = MaxLengthCharField()
class ItemRequest(BaseModel):
requester = ForeignKey(User)
requested = ForeignKey(Mappable)
feedback = MaxLengthCharField(null=True, default=None)
| <commit_before>from django.db.models import ForeignKey, DateTimeField, ManyToManyField
from yunity.models.entities import User, Location, Mappable, Message
from yunity.models.utils import BaseModel, MaxLengthCharField
from yunity.utils.decorators import classproperty
class Chat(BaseModel):
participants = ManyToManyField(User)
messages = ManyToManyField(Message)
class MappableLocation(BaseModel):
mappable = ForeignKey(Mappable)
location = ForeignKey(Location)
startTime = DateTimeField(null=True)
endTime = DateTimeField(null=True)
class MappableResponsibility(BaseModel):
@classproperty
def TYPE(cls):
return cls.create_constants('type', 'OWNER')
@classproperty
def STATUS(cls):
return cls.create_constants('status', 'GRANTED', 'PENDING', 'REQUESTED')
responsible = ForeignKey(User)
mappable = ForeignKey(Mappable)
status = MaxLengthCharField()
date = DateTimeField(null=True)
type = MaxLengthCharField()
class UserLocation(BaseModel):
user = ForeignKey(User)
location = ForeignKey(Location)
type = MaxLengthCharField()
class ItemRequest(BaseModel):
requester = ForeignKey(User)
requested = ForeignKey(Mappable)
feedback = MaxLengthCharField(null=True, default=None)
<commit_msg>Add default value for date
with @NerdyProjects<commit_after>from django.db.models import ForeignKey, DateTimeField, ManyToManyField
from yunity.models.entities import User, Location, Mappable, Message
from yunity.models.utils import BaseModel, MaxLengthCharField
from yunity.utils.decorators import classproperty
class Chat(BaseModel):
participants = ManyToManyField(User)
messages = ManyToManyField(Message)
class MappableLocation(BaseModel):
mappable = ForeignKey(Mappable)
location = ForeignKey(Location)
startTime = DateTimeField(null=True)
endTime = DateTimeField(null=True)
class MappableResponsibility(BaseModel):
@classproperty
def TYPE(cls):
return cls.create_constants('type', 'OWNER')
@classproperty
def STATUS(cls):
return cls.create_constants('status', 'GRANTED', 'PENDING', 'REQUESTED')
responsible = ForeignKey(User)
mappable = ForeignKey(Mappable)
status = MaxLengthCharField()
date = DateTimeField(null=True, auto_now=True)
type = MaxLengthCharField()
class UserLocation(BaseModel):
user = ForeignKey(User)
location = ForeignKey(Location)
type = MaxLengthCharField()
class ItemRequest(BaseModel):
requester = ForeignKey(User)
requested = ForeignKey(Mappable)
feedback = MaxLengthCharField(null=True, default=None)
|
074520d7de93db9b83d8d20dd03640146609eeb2 | critical_critiques/submission/forms.py | critical_critiques/submission/forms.py | from django.forms import ModelForm
from .models import Submission
class SubmissionForm(ModelForm):
class Meta:
model = Submission
fields = ('url',)
| from urlparse import urlparse
from django import forms
from .models import Submission
class SubmissionForm(forms.ModelForm):
class Meta:
model = Submission
fields = ('url',)
def clean_url(self):
url = self.cleaned_data['url']
parsed_url = urlparse(url)
if not (parsed_url.scheme == 'https'):
raise forms.ValidationError("Must be a HTTPS URL")
if parsed_url.params or parsed_url.query or parsed_url.fragment:
self._raise_url_error()
domain = parsed_url.netloc
path = parsed_url.path.split('/')
if domain == "github.com":
return self._clean_pull_request_url(url, path)
if domain == "gist.github.com":
return self._clean_gist_url(url, path)
else:
self._raise_url_error()
# Valid Gist: https://gist.github.com/rmeritz/2863145
def _clean_gist_url(self, url, path):
if not (self._is_valid_url_length(3, path)
and self._path_has_id(path, 2)):
self._raise_url_error()
return url
# Valid Pull Request: https://github.com/basho/webmachine/pull/143
def _clean_pull_request_url(self, url, path):
if not (self._is_valid_url_length(5, path) and
self._path_has_id(path, 4) and
(path[3] == 'pull')):
self._raise_url_error()
return url
def _is_valid_url_length(self, length, path):
return (((len(path) == length) or
(len(path) == (length + 1) and path[length] == '')) and
(path[0] == ''))
def _path_has_id(self, path, index):
return path[index].isdigit()
def _raise_url_error(self):
raise forms.ValidationError("Must be a valid Github Pull Request URL")
| Validate that the submission URLs are for GitHub | Validate that the submission URLs are for GitHub
| Python | mit | team-stroller/critical_critiques,team-stroller/critical_critiques,team-stroller/critical_critiques | from django.forms import ModelForm
from .models import Submission
class SubmissionForm(ModelForm):
class Meta:
model = Submission
fields = ('url',)
Validate that the submission URLs are for GitHub | from urlparse import urlparse
from django import forms
from .models import Submission
class SubmissionForm(forms.ModelForm):
class Meta:
model = Submission
fields = ('url',)
def clean_url(self):
url = self.cleaned_data['url']
parsed_url = urlparse(url)
if not (parsed_url.scheme == 'https'):
raise forms.ValidationError("Must be a HTTPS URL")
if parsed_url.params or parsed_url.query or parsed_url.fragment:
self._raise_url_error()
domain = parsed_url.netloc
path = parsed_url.path.split('/')
if domain == "github.com":
return self._clean_pull_request_url(url, path)
if domain == "gist.github.com":
return self._clean_gist_url(url, path)
else:
self._raise_url_error()
# Valid Gist: https://gist.github.com/rmeritz/2863145
def _clean_gist_url(self, url, path):
if not (self._is_valid_url_length(3, path)
and self._path_has_id(path, 2)):
self._raise_url_error()
return url
# Valid Pull Request: https://github.com/basho/webmachine/pull/143
def _clean_pull_request_url(self, url, path):
if not (self._is_valid_url_length(5, path) and
self._path_has_id(path, 4) and
(path[3] == 'pull')):
self._raise_url_error()
return url
def _is_valid_url_length(self, length, path):
return (((len(path) == length) or
(len(path) == (length + 1) and path[length] == '')) and
(path[0] == ''))
def _path_has_id(self, path, index):
return path[index].isdigit()
def _raise_url_error(self):
raise forms.ValidationError("Must be a valid Github Pull Request URL")
| <commit_before>from django.forms import ModelForm
from .models import Submission
class SubmissionForm(ModelForm):
class Meta:
model = Submission
fields = ('url',)
<commit_msg>Validate that the submission URLs are for GitHub<commit_after> | from urlparse import urlparse
from django import forms
from .models import Submission
class SubmissionForm(forms.ModelForm):
class Meta:
model = Submission
fields = ('url',)
def clean_url(self):
url = self.cleaned_data['url']
parsed_url = urlparse(url)
if not (parsed_url.scheme == 'https'):
raise forms.ValidationError("Must be a HTTPS URL")
if parsed_url.params or parsed_url.query or parsed_url.fragment:
self._raise_url_error()
domain = parsed_url.netloc
path = parsed_url.path.split('/')
if domain == "github.com":
return self._clean_pull_request_url(url, path)
if domain == "gist.github.com":
return self._clean_gist_url(url, path)
else:
self._raise_url_error()
# Valid Gist: https://gist.github.com/rmeritz/2863145
def _clean_gist_url(self, url, path):
if not (self._is_valid_url_length(3, path)
and self._path_has_id(path, 2)):
self._raise_url_error()
return url
# Valid Pull Request: https://github.com/basho/webmachine/pull/143
def _clean_pull_request_url(self, url, path):
if not (self._is_valid_url_length(5, path) and
self._path_has_id(path, 4) and
(path[3] == 'pull')):
self._raise_url_error()
return url
def _is_valid_url_length(self, length, path):
return (((len(path) == length) or
(len(path) == (length + 1) and path[length] == '')) and
(path[0] == ''))
def _path_has_id(self, path, index):
return path[index].isdigit()
def _raise_url_error(self):
raise forms.ValidationError("Must be a valid Github Pull Request URL")
| from django.forms import ModelForm
from .models import Submission
class SubmissionForm(ModelForm):
class Meta:
model = Submission
fields = ('url',)
Validate that the submission URLs are for GitHubfrom urlparse import urlparse
from django import forms
from .models import Submission
class SubmissionForm(forms.ModelForm):
class Meta:
model = Submission
fields = ('url',)
def clean_url(self):
url = self.cleaned_data['url']
parsed_url = urlparse(url)
if not (parsed_url.scheme == 'https'):
raise forms.ValidationError("Must be a HTTPS URL")
if parsed_url.params or parsed_url.query or parsed_url.fragment:
self._raise_url_error()
domain = parsed_url.netloc
path = parsed_url.path.split('/')
if domain == "github.com":
return self._clean_pull_request_url(url, path)
if domain == "gist.github.com":
return self._clean_gist_url(url, path)
else:
self._raise_url_error()
# Valid Gist: https://gist.github.com/rmeritz/2863145
def _clean_gist_url(self, url, path):
if not (self._is_valid_url_length(3, path)
and self._path_has_id(path, 2)):
self._raise_url_error()
return url
# Valid Pull Request: https://github.com/basho/webmachine/pull/143
def _clean_pull_request_url(self, url, path):
if not (self._is_valid_url_length(5, path) and
self._path_has_id(path, 4) and
(path[3] == 'pull')):
self._raise_url_error()
return url
def _is_valid_url_length(self, length, path):
return (((len(path) == length) or
(len(path) == (length + 1) and path[length] == '')) and
(path[0] == ''))
def _path_has_id(self, path, index):
return path[index].isdigit()
def _raise_url_error(self):
raise forms.ValidationError("Must be a valid Github Pull Request URL")
| <commit_before>from django.forms import ModelForm
from .models import Submission
class SubmissionForm(ModelForm):
class Meta:
model = Submission
fields = ('url',)
<commit_msg>Validate that the submission URLs are for GitHub<commit_after>from urlparse import urlparse
from django import forms
from .models import Submission
class SubmissionForm(forms.ModelForm):
class Meta:
model = Submission
fields = ('url',)
def clean_url(self):
url = self.cleaned_data['url']
parsed_url = urlparse(url)
if not (parsed_url.scheme == 'https'):
raise forms.ValidationError("Must be a HTTPS URL")
if parsed_url.params or parsed_url.query or parsed_url.fragment:
self._raise_url_error()
domain = parsed_url.netloc
path = parsed_url.path.split('/')
if domain == "github.com":
return self._clean_pull_request_url(url, path)
if domain == "gist.github.com":
return self._clean_gist_url(url, path)
else:
self._raise_url_error()
# Valid Gist: https://gist.github.com/rmeritz/2863145
def _clean_gist_url(self, url, path):
if not (self._is_valid_url_length(3, path)
and self._path_has_id(path, 2)):
self._raise_url_error()
return url
# Valid Pull Request: https://github.com/basho/webmachine/pull/143
def _clean_pull_request_url(self, url, path):
if not (self._is_valid_url_length(5, path) and
self._path_has_id(path, 4) and
(path[3] == 'pull')):
self._raise_url_error()
return url
def _is_valid_url_length(self, length, path):
return (((len(path) == length) or
(len(path) == (length + 1) and path[length] == '')) and
(path[0] == ''))
def _path_has_id(self, path, index):
return path[index].isdigit()
def _raise_url_error(self):
raise forms.ValidationError("Must be a valid Github Pull Request URL")
|
4b8b4a295e5b9f9674d351e60ffac74f85eae0d3 | WebIntegration/MatchData.py | WebIntegration/MatchData.py | '''
Created on Mar 28, 2017
@author: Jack Rausch
'''
import http.client
import json
import ssl
import tbapy
#import requests
URL = "http://www.thebluealliance.com/api/v2/"
HEADER_KEY = "X-TBA-App-Id"
HEADER_VAL = 'frc4215:data-analysis:.1'
#I was thinking that we should turn this into a class so that we can have an instance for each regional
def api_is_up():
conn = http.client.HTTPConnection(URL,80)
conn.request('GET',"/status",{HEADER_KEY : HEADER_VAL})
response = conn.getresponse()
return response.read()
def get_event_teams(event_key):
tba = tbapy.TBA(HEADER_VAL)
jsonified = tba.event_teams(event_key)
teams = []
for team in jsonified:
teams.append(team["key"])
return teams
teams = get_event_teams('2010sc')
print(teams)
#up = api_is_up()
#print(up)
| #!/usr/bin/python3
# -*- encoding: utf8 -*-
'''
Created on Mar 28, 2017
@author: Jack Rausch
'''
import http.client
import json
import ssl
import tbapy
#import requests
URL = "http://www.thebluealliance.com/api/v2/"
HEADER_KEY = "X-TBA-App-Id"
HEADER_VAL = 'frc4215:data-analysis:.1'
#I was thinking that we should turn this into a class so that we can have an instance for each regional
def api_is_up():
conn = http.client.HTTPConnection(URL,80)
conn.request('GET',"/status",{HEADER_KEY : HEADER_VAL})
response = conn.getresponse()
return response.read()
def get_event_teams(event_key):
tba = tbapy.TBA(HEADER_VAL)
jsonified = tba.event_teams(event_key)
teams = []
for team in jsonified:
teams.append(team["key"])
return teams
teams = get_event_teams('2010sc')
print(teams)
#up = api_is_up()
#print(up)
| Add sufficient leading lines to indicate the launching program of this script. | Add sufficient leading lines to indicate the launching program of this script.
| Python | mit | TrinityTrihawks/BluePython | '''
Created on Mar 28, 2017
@author: Jack Rausch
'''
import http.client
import json
import ssl
import tbapy
#import requests
URL = "http://www.thebluealliance.com/api/v2/"
HEADER_KEY = "X-TBA-App-Id"
HEADER_VAL = 'frc4215:data-analysis:.1'
#I was thinking that we should turn this into a class so that we can have an instance for each regional
def api_is_up():
conn = http.client.HTTPConnection(URL,80)
conn.request('GET',"/status",{HEADER_KEY : HEADER_VAL})
response = conn.getresponse()
return response.read()
def get_event_teams(event_key):
tba = tbapy.TBA(HEADER_VAL)
jsonified = tba.event_teams(event_key)
teams = []
for team in jsonified:
teams.append(team["key"])
return teams
teams = get_event_teams('2010sc')
print(teams)
#up = api_is_up()
#print(up)
Add sufficient leading lines to indicate the launching program of this script. | #!/usr/bin/python3
# -*- encoding: utf8 -*-
'''
Created on Mar 28, 2017
@author: Jack Rausch
'''
import http.client
import json
import ssl
import tbapy
#import requests
URL = "http://www.thebluealliance.com/api/v2/"
HEADER_KEY = "X-TBA-App-Id"
HEADER_VAL = 'frc4215:data-analysis:.1'
#I was thinking that we should turn this into a class so that we can have an instance for each regional
def api_is_up():
conn = http.client.HTTPConnection(URL,80)
conn.request('GET',"/status",{HEADER_KEY : HEADER_VAL})
response = conn.getresponse()
return response.read()
def get_event_teams(event_key):
tba = tbapy.TBA(HEADER_VAL)
jsonified = tba.event_teams(event_key)
teams = []
for team in jsonified:
teams.append(team["key"])
return teams
teams = get_event_teams('2010sc')
print(teams)
#up = api_is_up()
#print(up)
| <commit_before>'''
Created on Mar 28, 2017
@author: Jack Rausch
'''
import http.client
import json
import ssl
import tbapy
#import requests
URL = "http://www.thebluealliance.com/api/v2/"
HEADER_KEY = "X-TBA-App-Id"
HEADER_VAL = 'frc4215:data-analysis:.1'
#I was thinking that we should turn this into a class so that we can have an instance for each regional
def api_is_up():
conn = http.client.HTTPConnection(URL,80)
conn.request('GET',"/status",{HEADER_KEY : HEADER_VAL})
response = conn.getresponse()
return response.read()
def get_event_teams(event_key):
tba = tbapy.TBA(HEADER_VAL)
jsonified = tba.event_teams(event_key)
teams = []
for team in jsonified:
teams.append(team["key"])
return teams
teams = get_event_teams('2010sc')
print(teams)
#up = api_is_up()
#print(up)
<commit_msg>Add sufficient leading lines to indicate the launching program of this script.<commit_after> | #!/usr/bin/python3
# -*- encoding: utf8 -*-
'''
Created on Mar 28, 2017
@author: Jack Rausch
'''
import http.client
import json
import ssl
import tbapy
#import requests
URL = "http://www.thebluealliance.com/api/v2/"
HEADER_KEY = "X-TBA-App-Id"
HEADER_VAL = 'frc4215:data-analysis:.1'
#I was thinking that we should turn this into a class so that we can have an instance for each regional
def api_is_up():
conn = http.client.HTTPConnection(URL,80)
conn.request('GET',"/status",{HEADER_KEY : HEADER_VAL})
response = conn.getresponse()
return response.read()
def get_event_teams(event_key):
tba = tbapy.TBA(HEADER_VAL)
jsonified = tba.event_teams(event_key)
teams = []
for team in jsonified:
teams.append(team["key"])
return teams
teams = get_event_teams('2010sc')
print(teams)
#up = api_is_up()
#print(up)
| '''
Created on Mar 28, 2017
@author: Jack Rausch
'''
import http.client
import json
import ssl
import tbapy
#import requests
URL = "http://www.thebluealliance.com/api/v2/"
HEADER_KEY = "X-TBA-App-Id"
HEADER_VAL = 'frc4215:data-analysis:.1'
#I was thinking that we should turn this into a class so that we can have an instance for each regional
def api_is_up():
conn = http.client.HTTPConnection(URL,80)
conn.request('GET',"/status",{HEADER_KEY : HEADER_VAL})
response = conn.getresponse()
return response.read()
def get_event_teams(event_key):
tba = tbapy.TBA(HEADER_VAL)
jsonified = tba.event_teams(event_key)
teams = []
for team in jsonified:
teams.append(team["key"])
return teams
teams = get_event_teams('2010sc')
print(teams)
#up = api_is_up()
#print(up)
Add sufficient leading lines to indicate the launching program of this script.#!/usr/bin/python3
# -*- encoding: utf8 -*-
'''
Created on Mar 28, 2017
@author: Jack Rausch
'''
import http.client
import json
import ssl
import tbapy
#import requests
URL = "http://www.thebluealliance.com/api/v2/"
HEADER_KEY = "X-TBA-App-Id"
HEADER_VAL = 'frc4215:data-analysis:.1'
#I was thinking that we should turn this into a class so that we can have an instance for each regional
def api_is_up():
conn = http.client.HTTPConnection(URL,80)
conn.request('GET',"/status",{HEADER_KEY : HEADER_VAL})
response = conn.getresponse()
return response.read()
def get_event_teams(event_key):
tba = tbapy.TBA(HEADER_VAL)
jsonified = tba.event_teams(event_key)
teams = []
for team in jsonified:
teams.append(team["key"])
return teams
teams = get_event_teams('2010sc')
print(teams)
#up = api_is_up()
#print(up)
| <commit_before>'''
Created on Mar 28, 2017
@author: Jack Rausch
'''
import http.client
import json
import ssl
import tbapy
#import requests
URL = "http://www.thebluealliance.com/api/v2/"
HEADER_KEY = "X-TBA-App-Id"
HEADER_VAL = 'frc4215:data-analysis:.1'
#I was thinking that we should turn this into a class so that we can have an instance for each regional
def api_is_up():
conn = http.client.HTTPConnection(URL,80)
conn.request('GET',"/status",{HEADER_KEY : HEADER_VAL})
response = conn.getresponse()
return response.read()
def get_event_teams(event_key):
tba = tbapy.TBA(HEADER_VAL)
jsonified = tba.event_teams(event_key)
teams = []
for team in jsonified:
teams.append(team["key"])
return teams
teams = get_event_teams('2010sc')
print(teams)
#up = api_is_up()
#print(up)
<commit_msg>Add sufficient leading lines to indicate the launching program of this script.<commit_after>#!/usr/bin/python3
# -*- encoding: utf8 -*-
'''
Created on Mar 28, 2017
@author: Jack Rausch
'''
import http.client
import json
import ssl
import tbapy
#import requests
URL = "http://www.thebluealliance.com/api/v2/"
HEADER_KEY = "X-TBA-App-Id"
HEADER_VAL = 'frc4215:data-analysis:.1'
#I was thinking that we should turn this into a class so that we can have an instance for each regional
def api_is_up():
conn = http.client.HTTPConnection(URL,80)
conn.request('GET',"/status",{HEADER_KEY : HEADER_VAL})
response = conn.getresponse()
return response.read()
def get_event_teams(event_key):
tba = tbapy.TBA(HEADER_VAL)
jsonified = tba.event_teams(event_key)
teams = []
for team in jsonified:
teams.append(team["key"])
return teams
teams = get_event_teams('2010sc')
print(teams)
#up = api_is_up()
#print(up)
|
1b080248b2f90879ac43957eafa0d7adf5590a9a | flattener/tests/test_solidity-flattener.py | flattener/tests/test_solidity-flattener.py | import pytest
from .. import flattener
def test_thingy():
assert 1 == 1
| import pytest
from .. import core
def test_thingy():
assert 1 == 1
| Fix test execution. Doesn't actually do anything. | Fix test execution. Doesn't actually do anything.
| Python | mit | BlockCatIO/solidity-flattener,BlockCatIO/solidity-flattener | import pytest
from .. import flattener
def test_thingy():
assert 1 == 1
Fix test execution. Doesn't actually do anything. | import pytest
from .. import core
def test_thingy():
assert 1 == 1
| <commit_before>import pytest
from .. import flattener
def test_thingy():
assert 1 == 1
<commit_msg>Fix test execution. Doesn't actually do anything.<commit_after> | import pytest
from .. import core
def test_thingy():
assert 1 == 1
| import pytest
from .. import flattener
def test_thingy():
assert 1 == 1
Fix test execution. Doesn't actually do anything.import pytest
from .. import core
def test_thingy():
assert 1 == 1
| <commit_before>import pytest
from .. import flattener
def test_thingy():
assert 1 == 1
<commit_msg>Fix test execution. Doesn't actually do anything.<commit_after>import pytest
from .. import core
def test_thingy():
assert 1 == 1
|
0fcb48d98423b1b1e64beaff30a84910920786a2 | acme/_metadata.py | acme/_metadata.py | # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package metadata for acme.
This is kept in a separate module so that it can be imported from setup.py, at
a time when acme's dependencies may not have been installed yet.
"""
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '2'
_PATCH_VERSION = '1'
# Example: '0.4.2'
__version__ = '.'.join([_MAJOR_VERSION, _MINOR_VERSION, _PATCH_VERSION])
| # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package metadata for acme.
This is kept in a separate module so that it can be imported from setup.py, at
a time when acme's dependencies may not have been installed yet.
"""
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '2'
_PATCH_VERSION = '2'
# Example: '0.4.2'
__version__ = '.'.join([_MAJOR_VERSION, _MINOR_VERSION, _PATCH_VERSION])
| Update patch version ahead of a minor release. | Update patch version ahead of a minor release.
PiperOrigin-RevId: 393332838
Change-Id: I70845f5a679a29f6bd9f497896c5820fd2880df2
| Python | apache-2.0 | deepmind/acme,deepmind/acme | # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package metadata for acme.
This is kept in a separate module so that it can be imported from setup.py, at
a time when acme's dependencies may not have been installed yet.
"""
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '2'
_PATCH_VERSION = '1'
# Example: '0.4.2'
__version__ = '.'.join([_MAJOR_VERSION, _MINOR_VERSION, _PATCH_VERSION])
Update patch version ahead of a minor release.
PiperOrigin-RevId: 393332838
Change-Id: I70845f5a679a29f6bd9f497896c5820fd2880df2 | # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package metadata for acme.
This is kept in a separate module so that it can be imported from setup.py, at
a time when acme's dependencies may not have been installed yet.
"""
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '2'
_PATCH_VERSION = '2'
# Example: '0.4.2'
__version__ = '.'.join([_MAJOR_VERSION, _MINOR_VERSION, _PATCH_VERSION])
| <commit_before># python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package metadata for acme.
This is kept in a separate module so that it can be imported from setup.py, at
a time when acme's dependencies may not have been installed yet.
"""
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '2'
_PATCH_VERSION = '1'
# Example: '0.4.2'
__version__ = '.'.join([_MAJOR_VERSION, _MINOR_VERSION, _PATCH_VERSION])
<commit_msg>Update patch version ahead of a minor release.
PiperOrigin-RevId: 393332838
Change-Id: I70845f5a679a29f6bd9f497896c5820fd2880df2<commit_after> | # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package metadata for acme.
This is kept in a separate module so that it can be imported from setup.py, at
a time when acme's dependencies may not have been installed yet.
"""
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '2'
_PATCH_VERSION = '2'
# Example: '0.4.2'
__version__ = '.'.join([_MAJOR_VERSION, _MINOR_VERSION, _PATCH_VERSION])
| # python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package metadata for acme.
This is kept in a separate module so that it can be imported from setup.py, at
a time when acme's dependencies may not have been installed yet.
"""
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '2'
_PATCH_VERSION = '1'
# Example: '0.4.2'
__version__ = '.'.join([_MAJOR_VERSION, _MINOR_VERSION, _PATCH_VERSION])
Update patch version ahead of a minor release.
PiperOrigin-RevId: 393332838
Change-Id: I70845f5a679a29f6bd9f497896c5820fd2880df2# python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package metadata for acme.
This is kept in a separate module so that it can be imported from setup.py, at
a time when acme's dependencies may not have been installed yet.
"""
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '2'
_PATCH_VERSION = '2'
# Example: '0.4.2'
__version__ = '.'.join([_MAJOR_VERSION, _MINOR_VERSION, _PATCH_VERSION])
| <commit_before># python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package metadata for acme.
This is kept in a separate module so that it can be imported from setup.py, at
a time when acme's dependencies may not have been installed yet.
"""
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '2'
_PATCH_VERSION = '1'
# Example: '0.4.2'
__version__ = '.'.join([_MAJOR_VERSION, _MINOR_VERSION, _PATCH_VERSION])
<commit_msg>Update patch version ahead of a minor release.
PiperOrigin-RevId: 393332838
Change-Id: I70845f5a679a29f6bd9f497896c5820fd2880df2<commit_after># python3
# Copyright 2018 DeepMind Technologies Limited. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Package metadata for acme.
This is kept in a separate module so that it can be imported from setup.py, at
a time when acme's dependencies may not have been installed yet.
"""
# We follow Semantic Versioning (https://semver.org/)
_MAJOR_VERSION = '0'
_MINOR_VERSION = '2'
_PATCH_VERSION = '2'
# Example: '0.4.2'
__version__ = '.'.join([_MAJOR_VERSION, _MINOR_VERSION, _PATCH_VERSION])
|
a600543515c286ed7bcba2bad5a0746588b62f9a | app/views.py | app/views.py | import logging
import hashlib
import json
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from app.models import SocialNetworkApp
logger = logging.getLogger(__name__)
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
logger.info('Token received!')
return HttpResponse(challenge)
elif request.method == 'POST':
logger.info(request.body)
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
logger.info(req_signature)
exp_signature = 'sha1=' + hashlib.sha1('sha1='+unicode(request.body)+fb_app.app_secret).hexdigest()
logger.info(exp_signature)
req_json = json.loads(request.body)
if req_signature == exp_signature:
logger.info(req_json)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
| import logging
import hashlib
import hmac
import json
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from app.models import SocialNetworkApp
logger = logging.getLogger(__name__)
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
def _valid_request(app_secret, req_signature, payload):
exp_signature = 'sha1=' + hmac.new(app_secret, msg=unicode(payload), digestmod=hashlib.sha1).hexdigest()
return exp_signature == req_signature
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
logger.info('Token received!')
return HttpResponse(challenge)
elif request.method == 'POST':
logger.info(request.body)
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
if _valid_request(fb_app.app_secret,req_signature,request.body):
req_json = json.loads(request.body)
logger.info(req_json)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
| Modify function that calculate the expected signature | Modify function that calculate the expected signature
| Python | mit | rebearteta/social-ideation,joausaga/social-ideation,rebearteta/social-ideation,joausaga/social-ideation,joausaga/social-ideation,rebearteta/social-ideation,rebearteta/social-ideation,joausaga/social-ideation | import logging
import hashlib
import json
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from app.models import SocialNetworkApp
logger = logging.getLogger(__name__)
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
logger.info('Token received!')
return HttpResponse(challenge)
elif request.method == 'POST':
logger.info(request.body)
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
logger.info(req_signature)
exp_signature = 'sha1=' + hashlib.sha1('sha1='+unicode(request.body)+fb_app.app_secret).hexdigest()
logger.info(exp_signature)
req_json = json.loads(request.body)
if req_signature == exp_signature:
logger.info(req_json)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
Modify function that calculate the expected signature | import logging
import hashlib
import hmac
import json
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from app.models import SocialNetworkApp
logger = logging.getLogger(__name__)
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
def _valid_request(app_secret, req_signature, payload):
exp_signature = 'sha1=' + hmac.new(app_secret, msg=unicode(payload), digestmod=hashlib.sha1).hexdigest()
return exp_signature == req_signature
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
logger.info('Token received!')
return HttpResponse(challenge)
elif request.method == 'POST':
logger.info(request.body)
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
if _valid_request(fb_app.app_secret,req_signature,request.body):
req_json = json.loads(request.body)
logger.info(req_json)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
| <commit_before>import logging
import hashlib
import json
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from app.models import SocialNetworkApp
logger = logging.getLogger(__name__)
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
logger.info('Token received!')
return HttpResponse(challenge)
elif request.method == 'POST':
logger.info(request.body)
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
logger.info(req_signature)
exp_signature = 'sha1=' + hashlib.sha1('sha1='+unicode(request.body)+fb_app.app_secret).hexdigest()
logger.info(exp_signature)
req_json = json.loads(request.body)
if req_signature == exp_signature:
logger.info(req_json)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
<commit_msg>Modify function that calculate the expected signature<commit_after> | import logging
import hashlib
import hmac
import json
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from app.models import SocialNetworkApp
logger = logging.getLogger(__name__)
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
def _valid_request(app_secret, req_signature, payload):
exp_signature = 'sha1=' + hmac.new(app_secret, msg=unicode(payload), digestmod=hashlib.sha1).hexdigest()
return exp_signature == req_signature
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
logger.info('Token received!')
return HttpResponse(challenge)
elif request.method == 'POST':
logger.info(request.body)
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
if _valid_request(fb_app.app_secret,req_signature,request.body):
req_json = json.loads(request.body)
logger.info(req_json)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
| import logging
import hashlib
import json
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from app.models import SocialNetworkApp
logger = logging.getLogger(__name__)
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
logger.info('Token received!')
return HttpResponse(challenge)
elif request.method == 'POST':
logger.info(request.body)
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
logger.info(req_signature)
exp_signature = 'sha1=' + hashlib.sha1('sha1='+unicode(request.body)+fb_app.app_secret).hexdigest()
logger.info(exp_signature)
req_json = json.loads(request.body)
if req_signature == exp_signature:
logger.info(req_json)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
Modify function that calculate the expected signatureimport logging
import hashlib
import hmac
import json
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from app.models import SocialNetworkApp
logger = logging.getLogger(__name__)
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
def _valid_request(app_secret, req_signature, payload):
exp_signature = 'sha1=' + hmac.new(app_secret, msg=unicode(payload), digestmod=hashlib.sha1).hexdigest()
return exp_signature == req_signature
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
logger.info('Token received!')
return HttpResponse(challenge)
elif request.method == 'POST':
logger.info(request.body)
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
if _valid_request(fb_app.app_secret,req_signature,request.body):
req_json = json.loads(request.body)
logger.info(req_json)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
| <commit_before>import logging
import hashlib
import json
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from app.models import SocialNetworkApp
logger = logging.getLogger(__name__)
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
logger.info('Token received!')
return HttpResponse(challenge)
elif request.method == 'POST':
logger.info(request.body)
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
logger.info(req_signature)
exp_signature = 'sha1=' + hashlib.sha1('sha1='+unicode(request.body)+fb_app.app_secret).hexdigest()
logger.info(exp_signature)
req_json = json.loads(request.body)
if req_signature == exp_signature:
logger.info(req_json)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
<commit_msg>Modify function that calculate the expected signature<commit_after>import logging
import hashlib
import hmac
import json
from django.http import HttpResponse, HttpResponseForbidden
from django.views.decorators.csrf import csrf_exempt
from app.models import SocialNetworkApp
logger = logging.getLogger(__name__)
def _get_facebook_app():
apps = SocialNetworkApp.objects.all()
for app in apps:
if app.connector.name.lower() == 'facebook':
return app
return None
def _valid_request(app_secret, req_signature, payload):
exp_signature = 'sha1=' + hmac.new(app_secret, msg=unicode(payload), digestmod=hashlib.sha1).hexdigest()
return exp_signature == req_signature
@csrf_exempt
def fb_real_time_updates(request):
fb_app = _get_facebook_app()
if fb_app:
if request.method == 'GET':
challenge = request.GET.get('hub.challenge')
token = request.GET.get('hub.verify_token')
if fb_app.token_real_time_updates == token:
logger.info('Token received!')
return HttpResponse(challenge)
elif request.method == 'POST':
logger.info(request.body)
req_signature = request.META.get('HTTP_X_HUB_SIGNATURE')
if _valid_request(fb_app.app_secret,req_signature,request.body):
req_json = json.loads(request.body)
logger.info(req_json)
return HttpResponse()
else:
logger.info('The received signature does not correspond to the expected one!')
return HttpResponseForbidden()
|
2fdf0366819c2d1cbc9ff6987797eca5acd8de5a | config/freetype2/__init__.py | config/freetype2/__init__.py | import os
from SCons.Script import *
def configure(conf):
env = conf.env
conf.CBCheckHome('freetype2',
inc_suffix=['/include', '/include/freetype2'])
if not 'FREETYPE2_INCLUDE' in os.environ:
try:
env.ParseConfig('freetype-config --cflags')
except OSError: pass
if env['PLATFORM'] == 'darwin' or int(env.get('cross_osx', 0)):
if not conf.CheckOSXFramework('CoreServices'):
raise Exception('Need CoreServices framework')
conf.CBRequireCHeader('ft2build.h')
conf.CBRequireLib('freetype')
conf.CBConfig('zlib')
conf.CBCheckLib('png')
return True
def generate(env):
env.CBAddConfigTest('freetype2', configure)
env.CBLoadTools('osx zlib')
def exists():
return 1
| import os
from SCons.Script import *
def configure(conf):
env = conf.env
conf.CBCheckHome('freetype2',
inc_suffix=['/include', '/include/freetype2'])
if not 'FREETYPE2_INCLUDE' in os.environ:
try:
env.ParseConfig('pkg-config freetype2 --cflags')
except OSError:
pass
if env['PLATFORM'] == 'darwin' or int(env.get('cross_osx', 0)):
if not conf.CheckOSXFramework('CoreServices'):
raise Exception('Need CoreServices framework')
conf.CBRequireCHeader('ft2build.h')
conf.CBRequireLib('freetype')
conf.CBConfig('zlib')
conf.CBCheckLib('png')
return True
def generate(env):
env.CBAddConfigTest('freetype2', configure)
env.CBLoadTools('osx zlib')
def exists():
return 1
| Use pkg-config for freetype2 include discovery. | Use pkg-config for freetype2 include discovery.
| Python | lgpl-2.1 | CauldronDevelopmentLLC/cbang,CauldronDevelopmentLLC/cbang,CauldronDevelopmentLLC/cbang,CauldronDevelopmentLLC/cbang | import os
from SCons.Script import *
def configure(conf):
env = conf.env
conf.CBCheckHome('freetype2',
inc_suffix=['/include', '/include/freetype2'])
if not 'FREETYPE2_INCLUDE' in os.environ:
try:
env.ParseConfig('freetype-config --cflags')
except OSError: pass
if env['PLATFORM'] == 'darwin' or int(env.get('cross_osx', 0)):
if not conf.CheckOSXFramework('CoreServices'):
raise Exception('Need CoreServices framework')
conf.CBRequireCHeader('ft2build.h')
conf.CBRequireLib('freetype')
conf.CBConfig('zlib')
conf.CBCheckLib('png')
return True
def generate(env):
env.CBAddConfigTest('freetype2', configure)
env.CBLoadTools('osx zlib')
def exists():
return 1
Use pkg-config for freetype2 include discovery. | import os
from SCons.Script import *
def configure(conf):
env = conf.env
conf.CBCheckHome('freetype2',
inc_suffix=['/include', '/include/freetype2'])
if not 'FREETYPE2_INCLUDE' in os.environ:
try:
env.ParseConfig('pkg-config freetype2 --cflags')
except OSError:
pass
if env['PLATFORM'] == 'darwin' or int(env.get('cross_osx', 0)):
if not conf.CheckOSXFramework('CoreServices'):
raise Exception('Need CoreServices framework')
conf.CBRequireCHeader('ft2build.h')
conf.CBRequireLib('freetype')
conf.CBConfig('zlib')
conf.CBCheckLib('png')
return True
def generate(env):
env.CBAddConfigTest('freetype2', configure)
env.CBLoadTools('osx zlib')
def exists():
return 1
| <commit_before>import os
from SCons.Script import *
def configure(conf):
env = conf.env
conf.CBCheckHome('freetype2',
inc_suffix=['/include', '/include/freetype2'])
if not 'FREETYPE2_INCLUDE' in os.environ:
try:
env.ParseConfig('freetype-config --cflags')
except OSError: pass
if env['PLATFORM'] == 'darwin' or int(env.get('cross_osx', 0)):
if not conf.CheckOSXFramework('CoreServices'):
raise Exception('Need CoreServices framework')
conf.CBRequireCHeader('ft2build.h')
conf.CBRequireLib('freetype')
conf.CBConfig('zlib')
conf.CBCheckLib('png')
return True
def generate(env):
env.CBAddConfigTest('freetype2', configure)
env.CBLoadTools('osx zlib')
def exists():
return 1
<commit_msg>Use pkg-config for freetype2 include discovery.<commit_after> | import os
from SCons.Script import *
def configure(conf):
env = conf.env
conf.CBCheckHome('freetype2',
inc_suffix=['/include', '/include/freetype2'])
if not 'FREETYPE2_INCLUDE' in os.environ:
try:
env.ParseConfig('pkg-config freetype2 --cflags')
except OSError:
pass
if env['PLATFORM'] == 'darwin' or int(env.get('cross_osx', 0)):
if not conf.CheckOSXFramework('CoreServices'):
raise Exception('Need CoreServices framework')
conf.CBRequireCHeader('ft2build.h')
conf.CBRequireLib('freetype')
conf.CBConfig('zlib')
conf.CBCheckLib('png')
return True
def generate(env):
env.CBAddConfigTest('freetype2', configure)
env.CBLoadTools('osx zlib')
def exists():
return 1
| import os
from SCons.Script import *
def configure(conf):
env = conf.env
conf.CBCheckHome('freetype2',
inc_suffix=['/include', '/include/freetype2'])
if not 'FREETYPE2_INCLUDE' in os.environ:
try:
env.ParseConfig('freetype-config --cflags')
except OSError: pass
if env['PLATFORM'] == 'darwin' or int(env.get('cross_osx', 0)):
if not conf.CheckOSXFramework('CoreServices'):
raise Exception('Need CoreServices framework')
conf.CBRequireCHeader('ft2build.h')
conf.CBRequireLib('freetype')
conf.CBConfig('zlib')
conf.CBCheckLib('png')
return True
def generate(env):
env.CBAddConfigTest('freetype2', configure)
env.CBLoadTools('osx zlib')
def exists():
return 1
Use pkg-config for freetype2 include discovery.import os
from SCons.Script import *
def configure(conf):
env = conf.env
conf.CBCheckHome('freetype2',
inc_suffix=['/include', '/include/freetype2'])
if not 'FREETYPE2_INCLUDE' in os.environ:
try:
env.ParseConfig('pkg-config freetype2 --cflags')
except OSError:
pass
if env['PLATFORM'] == 'darwin' or int(env.get('cross_osx', 0)):
if not conf.CheckOSXFramework('CoreServices'):
raise Exception('Need CoreServices framework')
conf.CBRequireCHeader('ft2build.h')
conf.CBRequireLib('freetype')
conf.CBConfig('zlib')
conf.CBCheckLib('png')
return True
def generate(env):
env.CBAddConfigTest('freetype2', configure)
env.CBLoadTools('osx zlib')
def exists():
return 1
| <commit_before>import os
from SCons.Script import *
def configure(conf):
env = conf.env
conf.CBCheckHome('freetype2',
inc_suffix=['/include', '/include/freetype2'])
if not 'FREETYPE2_INCLUDE' in os.environ:
try:
env.ParseConfig('freetype-config --cflags')
except OSError: pass
if env['PLATFORM'] == 'darwin' or int(env.get('cross_osx', 0)):
if not conf.CheckOSXFramework('CoreServices'):
raise Exception('Need CoreServices framework')
conf.CBRequireCHeader('ft2build.h')
conf.CBRequireLib('freetype')
conf.CBConfig('zlib')
conf.CBCheckLib('png')
return True
def generate(env):
env.CBAddConfigTest('freetype2', configure)
env.CBLoadTools('osx zlib')
def exists():
return 1
<commit_msg>Use pkg-config for freetype2 include discovery.<commit_after>import os
from SCons.Script import *
def configure(conf):
env = conf.env
conf.CBCheckHome('freetype2',
inc_suffix=['/include', '/include/freetype2'])
if not 'FREETYPE2_INCLUDE' in os.environ:
try:
env.ParseConfig('pkg-config freetype2 --cflags')
except OSError:
pass
if env['PLATFORM'] == 'darwin' or int(env.get('cross_osx', 0)):
if not conf.CheckOSXFramework('CoreServices'):
raise Exception('Need CoreServices framework')
conf.CBRequireCHeader('ft2build.h')
conf.CBRequireLib('freetype')
conf.CBConfig('zlib')
conf.CBCheckLib('png')
return True
def generate(env):
env.CBAddConfigTest('freetype2', configure)
env.CBLoadTools('osx zlib')
def exists():
return 1
|
f9b1418c7ea46d3d69fac027f097c5c1ace62f74 | django_cradmin/viewhelpers/__init__.py | django_cradmin/viewhelpers/__init__.py | from . import mixins # noqa
from . import generic # noqa
from . import formbase # noqa
from . import crudbase # noqa
from . import create # noqa
from . import update # noqa
from . import delete # noqa
from . import detail # noqa
from . import listbuilderview # noqa
from . import multiselect # noqa
from . import objecttable # noqa
| from . import mixins # noqa
from . import generic # noqa
from . import formbase # noqa
from . import crudbase # noqa
from . import create # noqa
from . import update # noqa
from . import delete # noqa
from . import detail # noqa
from . import listbuilderview # noqa
| Remove import for multiselect and objecttable. | viewhelpers: Remove import for multiselect and objecttable.
| Python | bsd-3-clause | appressoas/django_cradmin,appressoas/django_cradmin,appressoas/django_cradmin | from . import mixins # noqa
from . import generic # noqa
from . import formbase # noqa
from . import crudbase # noqa
from . import create # noqa
from . import update # noqa
from . import delete # noqa
from . import detail # noqa
from . import listbuilderview # noqa
from . import multiselect # noqa
from . import objecttable # noqa
viewhelpers: Remove import for multiselect and objecttable. | from . import mixins # noqa
from . import generic # noqa
from . import formbase # noqa
from . import crudbase # noqa
from . import create # noqa
from . import update # noqa
from . import delete # noqa
from . import detail # noqa
from . import listbuilderview # noqa
| <commit_before>from . import mixins # noqa
from . import generic # noqa
from . import formbase # noqa
from . import crudbase # noqa
from . import create # noqa
from . import update # noqa
from . import delete # noqa
from . import detail # noqa
from . import listbuilderview # noqa
from . import multiselect # noqa
from . import objecttable # noqa
<commit_msg>viewhelpers: Remove import for multiselect and objecttable.<commit_after> | from . import mixins # noqa
from . import generic # noqa
from . import formbase # noqa
from . import crudbase # noqa
from . import create # noqa
from . import update # noqa
from . import delete # noqa
from . import detail # noqa
from . import listbuilderview # noqa
| from . import mixins # noqa
from . import generic # noqa
from . import formbase # noqa
from . import crudbase # noqa
from . import create # noqa
from . import update # noqa
from . import delete # noqa
from . import detail # noqa
from . import listbuilderview # noqa
from . import multiselect # noqa
from . import objecttable # noqa
viewhelpers: Remove import for multiselect and objecttable.from . import mixins # noqa
from . import generic # noqa
from . import formbase # noqa
from . import crudbase # noqa
from . import create # noqa
from . import update # noqa
from . import delete # noqa
from . import detail # noqa
from . import listbuilderview # noqa
| <commit_before>from . import mixins # noqa
from . import generic # noqa
from . import formbase # noqa
from . import crudbase # noqa
from . import create # noqa
from . import update # noqa
from . import delete # noqa
from . import detail # noqa
from . import listbuilderview # noqa
from . import multiselect # noqa
from . import objecttable # noqa
<commit_msg>viewhelpers: Remove import for multiselect and objecttable.<commit_after>from . import mixins # noqa
from . import generic # noqa
from . import formbase # noqa
from . import crudbase # noqa
from . import create # noqa
from . import update # noqa
from . import delete # noqa
from . import detail # noqa
from . import listbuilderview # noqa
|
9deeb98a05483bfa262db59319c55eec78e900db | tests/test_stock.py | tests/test_stock.py | import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
if __name__ == "__main__":
unittest.main()
| import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
def test_negative_price_exception(self):
"""An update with a negative price should return a value error.
"""
stock = Stock("GOOG")
try:
stock.update(datetime(2014, 2, 12), price=-10)
except ValueError:
return
self.fail("ValueError was not raised")
if __name__ == "__main__":
unittest.main()
| Add negative price exception test. | Add negative price exception test.
| Python | mit | bsmukasa/stock_alerter | import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
if __name__ == "__main__":
unittest.main()
Add negative price exception test. | import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
def test_negative_price_exception(self):
"""An update with a negative price should return a value error.
"""
stock = Stock("GOOG")
try:
stock.update(datetime(2014, 2, 12), price=-10)
except ValueError:
return
self.fail("ValueError was not raised")
if __name__ == "__main__":
unittest.main()
| <commit_before>import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
if __name__ == "__main__":
unittest.main()
<commit_msg>Add negative price exception test.<commit_after> | import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
def test_negative_price_exception(self):
"""An update with a negative price should return a value error.
"""
stock = Stock("GOOG")
try:
stock.update(datetime(2014, 2, 12), price=-10)
except ValueError:
return
self.fail("ValueError was not raised")
if __name__ == "__main__":
unittest.main()
| import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
if __name__ == "__main__":
unittest.main()
Add negative price exception test.import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
def test_negative_price_exception(self):
"""An update with a negative price should return a value error.
"""
stock = Stock("GOOG")
try:
stock.update(datetime(2014, 2, 12), price=-10)
except ValueError:
return
self.fail("ValueError was not raised")
if __name__ == "__main__":
unittest.main()
| <commit_before>import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
if __name__ == "__main__":
unittest.main()
<commit_msg>Add negative price exception test.<commit_after>import unittest
from datetime import datetime
from stock import Stock
class StockTest(unittest.TestCase):
def test_new_stock_price(self):
"""A new stock should have a price that is None.
"""
stock = Stock("GOOG")
self.assertIsNone(stock.price)
def test_stock_update(self):
"""An update should set the price on the stock object.
Notes:
We will be using the `datetime` module for the timestamp.
"""
stock = Stock("GOOG")
stock.update(datetime(2014, 2, 12), price=10)
self.assertEqual(10, stock.price)
def test_negative_price_exception(self):
"""An update with a negative price should return a value error.
"""
stock = Stock("GOOG")
try:
stock.update(datetime(2014, 2, 12), price=-10)
except ValueError:
return
self.fail("ValueError was not raised")
if __name__ == "__main__":
unittest.main()
|
de88382029a01c036a3601c40cacc342f5212080 | api/base/views.py | api/base/views.py | from rest_framework.decorators import api_view
from rest_framework.response import Response
from .utils import absolute_reverse
from api.users.serializers import UserSerializer
@api_view(('GET',))
def root(request, format=None):
if request.user and not request.user.is_anonymous():
user = request.user
current_user = UserSerializer(user).data
else:
current_user = None
return Response({
'meta': {
'message': 'Welcome to the OSF API v2',
'current_user': current_user,
},
'links': {
'nodes': absolute_reverse('nodes:node-list'),
'users': absolute_reverse('users:user-list'),
}
})
| from rest_framework.decorators import api_view
from rest_framework.response import Response
from .utils import absolute_reverse
from api.users.serializers import UserSerializer
@api_view(('GET',))
def root(request, format=None):
if request.user and not request.user.is_anonymous():
user = request.user
current_user = UserSerializer(user).data
else:
current_user = None
return Response({
'meta': {
'message': 'Welcome to the OSF API.',
'version': request.version,
'current_user': current_user,
},
'links': {
'nodes': absolute_reverse('nodes:node-list'),
'users': absolute_reverse('users:user-list'),
}
})
| Use request.version instead of hardcoding | Use request.version instead of hardcoding
| Python | apache-2.0 | KAsante95/osf.io,Johnetordoff/osf.io,petermalcolm/osf.io,mluo613/osf.io,saradbowman/osf.io,emetsger/osf.io,jmcarp/osf.io,Ghalko/osf.io,jinluyuan/osf.io,mluke93/osf.io,monikagrabowska/osf.io,hmoco/osf.io,chennan47/osf.io,zachjanicki/osf.io,acshi/osf.io,rdhyee/osf.io,jeffreyliu3230/osf.io,acshi/osf.io,mfraezz/osf.io,sloria/osf.io,billyhunt/osf.io,mluo613/osf.io,brianjgeiger/osf.io,cosenal/osf.io,sbt9uc/osf.io,pattisdr/osf.io,sbt9uc/osf.io,Nesiehr/osf.io,chennan47/osf.io,GageGaskins/osf.io,HarryRybacki/osf.io,baylee-d/osf.io,petermalcolm/osf.io,HalcyonChimera/osf.io,ckc6cz/osf.io,ZobairAlijan/osf.io,abought/osf.io,icereval/osf.io,jeffreyliu3230/osf.io,laurenrevere/osf.io,petermalcolm/osf.io,Johnetordoff/osf.io,njantrania/osf.io,kch8qx/osf.io,zamattiac/osf.io,emetsger/osf.io,TomBaxter/osf.io,brandonPurvis/osf.io,leb2dg/osf.io,caseyrygt/osf.io,hmoco/osf.io,njantrania/osf.io,felliott/osf.io,jinluyuan/osf.io,felliott/osf.io,jnayak1/osf.io,jeffreyliu3230/osf.io,barbour-em/osf.io,KAsante95/osf.io,GageGaskins/osf.io,crcresearch/osf.io,adlius/osf.io,aaxelb/osf.io,jnayak1/osf.io,bdyetton/prettychart,lyndsysimon/osf.io,dplorimer/osf,ticklemepierce/osf.io,alexschiller/osf.io,RomanZWang/osf.io,KAsante95/osf.io,kwierman/osf.io,cldershem/osf.io,cslzchen/osf.io,Nesiehr/osf.io,bdyetton/prettychart,kch8qx/osf.io,amyshi188/osf.io,cwisecarver/osf.io,billyhunt/osf.io,cslzchen/osf.io,chennan47/osf.io,pattisdr/osf.io,amyshi188/osf.io,leb2dg/osf.io,arpitar/osf.io,jinluyuan/osf.io,RomanZWang/osf.io,monikagrabowska/osf.io,HarryRybacki/osf.io,DanielSBrown/osf.io,brandonPurvis/osf.io,sloria/osf.io,monikagrabowska/osf.io,HarryRybacki/osf.io,bdyetton/prettychart,TomBaxter/osf.io,arpitar/osf.io,wearpants/osf.io,dplorimer/osf,kwierman/osf.io,saradbowman/osf.io,fabianvf/osf.io,amyshi188/osf.io,arpitar/osf.io,CenterForOpenScience/osf.io,bdyetton/prettychart,jeffreyliu3230/osf.io,rdhyee/osf.io,brandonPurvis/osf.io,monikagrabowska/osf.io,samchrisinger/osf.io,jolene-esposito/osf.io,ZobairAlijan/osf.io,caneruguz/osf.io,TomHeatwole/osf.io,wearpants/osf.io,mfraezz/osf.io,danielneis/osf.io,danielneis/osf.io,felliott/osf.io,mluo613/osf.io,GageGaskins/osf.io,caneruguz/osf.io,CenterForOpenScience/osf.io,pattisdr/osf.io,haoyuchen1992/osf.io,baylee-d/osf.io,haoyuchen1992/osf.io,danielneis/osf.io,abought/osf.io,brandonPurvis/osf.io,cosenal/osf.io,samanehsan/osf.io,zamattiac/osf.io,asanfilippo7/osf.io,jnayak1/osf.io,erinspace/osf.io,MerlinZhang/osf.io,cldershem/osf.io,crcresearch/osf.io,leb2dg/osf.io,lyndsysimon/osf.io,chrisseto/osf.io,MerlinZhang/osf.io,reinaH/osf.io,abought/osf.io,emetsger/osf.io,acshi/osf.io,alexschiller/osf.io,adlius/osf.io,cwisecarver/osf.io,rdhyee/osf.io,reinaH/osf.io,mfraezz/osf.io,samchrisinger/osf.io,doublebits/osf.io,Ghalko/osf.io,RomanZWang/osf.io,leb2dg/osf.io,aaxelb/osf.io,sbt9uc/osf.io,jmcarp/osf.io,caseyrollins/osf.io,samchrisinger/osf.io,cldershem/osf.io,samanehsan/osf.io,jnayak1/osf.io,ticklemepierce/osf.io,njantrania/osf.io,caseyrygt/osf.io,TomHeatwole/osf.io,zamattiac/osf.io,CenterForOpenScience/osf.io,mluke93/osf.io,billyhunt/osf.io,mattclark/osf.io,mattclark/osf.io,KAsante95/osf.io,cslzchen/osf.io,hmoco/osf.io,alexschiller/osf.io,Johnetordoff/osf.io,sbt9uc/osf.io,adlius/osf.io,fabianvf/osf.io,caseyrollins/osf.io,baylee-d/osf.io,kch8qx/osf.io,adlius/osf.io,RomanZWang/osf.io,samchrisinger/osf.io,CenterForOpenScience/osf.io,crcresearch/osf.io,MerlinZhang/osf.io,wearpants/osf.io,mfraezz/osf.io,asanfilippo7/osf.io,mluo613/osf.io,wearpants/osf.io,asanfilippo7/osf.io,brianjgeiger/osf.io,dplorimer/osf,TomHeatwole/osf.io,acshi/osf.io,binoculars/osf.io,petermalcolm/osf.io,zachjanicki/osf.io,acshi/osf.io,samanehsan/osf.io,njantrania/osf.io,lyndsysimon/osf.io,SSJohns/osf.io,zachjanicki/osf.io,rdhyee/osf.io,doublebits/osf.io,MerlinZhang/osf.io,ckc6cz/osf.io,HalcyonChimera/osf.io,caseyrollins/osf.io,mluke93/osf.io,GageGaskins/osf.io,monikagrabowska/osf.io,jolene-esposito/osf.io,fabianvf/osf.io,cosenal/osf.io,ticklemepierce/osf.io,alexschiller/osf.io,abought/osf.io,laurenrevere/osf.io,dplorimer/osf,icereval/osf.io,barbour-em/osf.io,kwierman/osf.io,jolene-esposito/osf.io,haoyuchen1992/osf.io,HalcyonChimera/osf.io,jmcarp/osf.io,danielneis/osf.io,caseyrygt/osf.io,emetsger/osf.io,mluo613/osf.io,caneruguz/osf.io,SSJohns/osf.io,KAsante95/osf.io,brandonPurvis/osf.io,felliott/osf.io,cwisecarver/osf.io,doublebits/osf.io,icereval/osf.io,zachjanicki/osf.io,ticklemepierce/osf.io,Johnetordoff/osf.io,chrisseto/osf.io,billyhunt/osf.io,HarryRybacki/osf.io,erinspace/osf.io,lyndsysimon/osf.io,Nesiehr/osf.io,jolene-esposito/osf.io,cosenal/osf.io,kwierman/osf.io,alexschiller/osf.io,TomHeatwole/osf.io,zamattiac/osf.io,asanfilippo7/osf.io,SSJohns/osf.io,ckc6cz/osf.io,cslzchen/osf.io,brianjgeiger/osf.io,billyhunt/osf.io,chrisseto/osf.io,DanielSBrown/osf.io,RomanZWang/osf.io,ZobairAlijan/osf.io,kch8qx/osf.io,amyshi188/osf.io,samanehsan/osf.io,fabianvf/osf.io,binoculars/osf.io,cwisecarver/osf.io,reinaH/osf.io,sloria/osf.io,TomBaxter/osf.io,Nesiehr/osf.io,caseyrygt/osf.io,chrisseto/osf.io,aaxelb/osf.io,doublebits/osf.io,ZobairAlijan/osf.io,HalcyonChimera/osf.io,haoyuchen1992/osf.io,jmcarp/osf.io,mluke93/osf.io,hmoco/osf.io,doublebits/osf.io,mattclark/osf.io,DanielSBrown/osf.io,aaxelb/osf.io,GageGaskins/osf.io,laurenrevere/osf.io,barbour-em/osf.io,ckc6cz/osf.io,brianjgeiger/osf.io,DanielSBrown/osf.io,Ghalko/osf.io,binoculars/osf.io,arpitar/osf.io,SSJohns/osf.io,jinluyuan/osf.io,barbour-em/osf.io,reinaH/osf.io,caneruguz/osf.io,erinspace/osf.io,cldershem/osf.io,Ghalko/osf.io,kch8qx/osf.io | from rest_framework.decorators import api_view
from rest_framework.response import Response
from .utils import absolute_reverse
from api.users.serializers import UserSerializer
@api_view(('GET',))
def root(request, format=None):
if request.user and not request.user.is_anonymous():
user = request.user
current_user = UserSerializer(user).data
else:
current_user = None
return Response({
'meta': {
'message': 'Welcome to the OSF API v2',
'current_user': current_user,
},
'links': {
'nodes': absolute_reverse('nodes:node-list'),
'users': absolute_reverse('users:user-list'),
}
})
Use request.version instead of hardcoding | from rest_framework.decorators import api_view
from rest_framework.response import Response
from .utils import absolute_reverse
from api.users.serializers import UserSerializer
@api_view(('GET',))
def root(request, format=None):
if request.user and not request.user.is_anonymous():
user = request.user
current_user = UserSerializer(user).data
else:
current_user = None
return Response({
'meta': {
'message': 'Welcome to the OSF API.',
'version': request.version,
'current_user': current_user,
},
'links': {
'nodes': absolute_reverse('nodes:node-list'),
'users': absolute_reverse('users:user-list'),
}
})
| <commit_before>from rest_framework.decorators import api_view
from rest_framework.response import Response
from .utils import absolute_reverse
from api.users.serializers import UserSerializer
@api_view(('GET',))
def root(request, format=None):
if request.user and not request.user.is_anonymous():
user = request.user
current_user = UserSerializer(user).data
else:
current_user = None
return Response({
'meta': {
'message': 'Welcome to the OSF API v2',
'current_user': current_user,
},
'links': {
'nodes': absolute_reverse('nodes:node-list'),
'users': absolute_reverse('users:user-list'),
}
})
<commit_msg>Use request.version instead of hardcoding<commit_after> | from rest_framework.decorators import api_view
from rest_framework.response import Response
from .utils import absolute_reverse
from api.users.serializers import UserSerializer
@api_view(('GET',))
def root(request, format=None):
if request.user and not request.user.is_anonymous():
user = request.user
current_user = UserSerializer(user).data
else:
current_user = None
return Response({
'meta': {
'message': 'Welcome to the OSF API.',
'version': request.version,
'current_user': current_user,
},
'links': {
'nodes': absolute_reverse('nodes:node-list'),
'users': absolute_reverse('users:user-list'),
}
})
| from rest_framework.decorators import api_view
from rest_framework.response import Response
from .utils import absolute_reverse
from api.users.serializers import UserSerializer
@api_view(('GET',))
def root(request, format=None):
if request.user and not request.user.is_anonymous():
user = request.user
current_user = UserSerializer(user).data
else:
current_user = None
return Response({
'meta': {
'message': 'Welcome to the OSF API v2',
'current_user': current_user,
},
'links': {
'nodes': absolute_reverse('nodes:node-list'),
'users': absolute_reverse('users:user-list'),
}
})
Use request.version instead of hardcodingfrom rest_framework.decorators import api_view
from rest_framework.response import Response
from .utils import absolute_reverse
from api.users.serializers import UserSerializer
@api_view(('GET',))
def root(request, format=None):
if request.user and not request.user.is_anonymous():
user = request.user
current_user = UserSerializer(user).data
else:
current_user = None
return Response({
'meta': {
'message': 'Welcome to the OSF API.',
'version': request.version,
'current_user': current_user,
},
'links': {
'nodes': absolute_reverse('nodes:node-list'),
'users': absolute_reverse('users:user-list'),
}
})
| <commit_before>from rest_framework.decorators import api_view
from rest_framework.response import Response
from .utils import absolute_reverse
from api.users.serializers import UserSerializer
@api_view(('GET',))
def root(request, format=None):
if request.user and not request.user.is_anonymous():
user = request.user
current_user = UserSerializer(user).data
else:
current_user = None
return Response({
'meta': {
'message': 'Welcome to the OSF API v2',
'current_user': current_user,
},
'links': {
'nodes': absolute_reverse('nodes:node-list'),
'users': absolute_reverse('users:user-list'),
}
})
<commit_msg>Use request.version instead of hardcoding<commit_after>from rest_framework.decorators import api_view
from rest_framework.response import Response
from .utils import absolute_reverse
from api.users.serializers import UserSerializer
@api_view(('GET',))
def root(request, format=None):
if request.user and not request.user.is_anonymous():
user = request.user
current_user = UserSerializer(user).data
else:
current_user = None
return Response({
'meta': {
'message': 'Welcome to the OSF API.',
'version': request.version,
'current_user': current_user,
},
'links': {
'nodes': absolute_reverse('nodes:node-list'),
'users': absolute_reverse('users:user-list'),
}
})
|
e11166ed27b49250ee914c1227b3022ef7659e15 | curator/script.py | curator/script.py | import hashlib
from redis.exceptions import NoScriptError
class LuaScript(object):
def __init__(self, redis, template, cache):
self.redis = redis
self.template = template
self.cache = cache
self.script = self._render_template(template)
def _render_template(self, template):
if template.filename in self.cache:
script = self.cache[template.filename]
else:
script = template.render()
self.cache[template.filename] = script
return script
def _get_script_sha(self):
return hashlib.sha1(self.script).hexdigest()
def __call__(self, *args, **kwargs):
script_sha = self._get_script_sha()
keys = kwargs.get('keys', [])
arguments = kwargs.get('args', [])
num_keys = len(keys)
keys_and_args = keys + arguments
try:
response = self.redis.evalsha(script_sha, num_keys, *keys_and_args)
except NoScriptError:
response = self.redis.eval(self.script, num_keys, *keys_and_args)
return response
| import hashlib
from redis.exceptions import ResponseError
class LuaScript(object):
def __init__(self, redis, template, cache):
self.redis = redis
self.template = template
self.cache = cache
self.script = self._render_template(template)
def _render_template(self, template):
if template.filename in self.cache:
script = self.cache[template.filename]
else:
script = template.render()
self.cache[template.filename] = script
return script
def _get_script_sha(self):
return hashlib.sha1(self.script).hexdigest()
def __call__(self, *args, **kwargs):
script_sha = self._get_script_sha()
keys = kwargs.get('keys', [])
arguments = kwargs.get('args', [])
num_keys = len(keys)
keys_and_args = keys + arguments
try:
response = self.redis.evalsha(script_sha, num_keys, *keys_and_args)
except ResponseError:
response = self.redis.eval(self.script, num_keys, *keys_and_args)
return response
| Use ResponseError instead of NoScriptError to be compatible with earlier versions of the redis client | Use ResponseError instead of NoScriptError to be compatible with earlier versions of the redis client
| Python | mit | eventbrite/curator | import hashlib
from redis.exceptions import NoScriptError
class LuaScript(object):
def __init__(self, redis, template, cache):
self.redis = redis
self.template = template
self.cache = cache
self.script = self._render_template(template)
def _render_template(self, template):
if template.filename in self.cache:
script = self.cache[template.filename]
else:
script = template.render()
self.cache[template.filename] = script
return script
def _get_script_sha(self):
return hashlib.sha1(self.script).hexdigest()
def __call__(self, *args, **kwargs):
script_sha = self._get_script_sha()
keys = kwargs.get('keys', [])
arguments = kwargs.get('args', [])
num_keys = len(keys)
keys_and_args = keys + arguments
try:
response = self.redis.evalsha(script_sha, num_keys, *keys_and_args)
except NoScriptError:
response = self.redis.eval(self.script, num_keys, *keys_and_args)
return response
Use ResponseError instead of NoScriptError to be compatible with earlier versions of the redis client | import hashlib
from redis.exceptions import ResponseError
class LuaScript(object):
def __init__(self, redis, template, cache):
self.redis = redis
self.template = template
self.cache = cache
self.script = self._render_template(template)
def _render_template(self, template):
if template.filename in self.cache:
script = self.cache[template.filename]
else:
script = template.render()
self.cache[template.filename] = script
return script
def _get_script_sha(self):
return hashlib.sha1(self.script).hexdigest()
def __call__(self, *args, **kwargs):
script_sha = self._get_script_sha()
keys = kwargs.get('keys', [])
arguments = kwargs.get('args', [])
num_keys = len(keys)
keys_and_args = keys + arguments
try:
response = self.redis.evalsha(script_sha, num_keys, *keys_and_args)
except ResponseError:
response = self.redis.eval(self.script, num_keys, *keys_and_args)
return response
| <commit_before>import hashlib
from redis.exceptions import NoScriptError
class LuaScript(object):
def __init__(self, redis, template, cache):
self.redis = redis
self.template = template
self.cache = cache
self.script = self._render_template(template)
def _render_template(self, template):
if template.filename in self.cache:
script = self.cache[template.filename]
else:
script = template.render()
self.cache[template.filename] = script
return script
def _get_script_sha(self):
return hashlib.sha1(self.script).hexdigest()
def __call__(self, *args, **kwargs):
script_sha = self._get_script_sha()
keys = kwargs.get('keys', [])
arguments = kwargs.get('args', [])
num_keys = len(keys)
keys_and_args = keys + arguments
try:
response = self.redis.evalsha(script_sha, num_keys, *keys_and_args)
except NoScriptError:
response = self.redis.eval(self.script, num_keys, *keys_and_args)
return response
<commit_msg>Use ResponseError instead of NoScriptError to be compatible with earlier versions of the redis client<commit_after> | import hashlib
from redis.exceptions import ResponseError
class LuaScript(object):
def __init__(self, redis, template, cache):
self.redis = redis
self.template = template
self.cache = cache
self.script = self._render_template(template)
def _render_template(self, template):
if template.filename in self.cache:
script = self.cache[template.filename]
else:
script = template.render()
self.cache[template.filename] = script
return script
def _get_script_sha(self):
return hashlib.sha1(self.script).hexdigest()
def __call__(self, *args, **kwargs):
script_sha = self._get_script_sha()
keys = kwargs.get('keys', [])
arguments = kwargs.get('args', [])
num_keys = len(keys)
keys_and_args = keys + arguments
try:
response = self.redis.evalsha(script_sha, num_keys, *keys_and_args)
except ResponseError:
response = self.redis.eval(self.script, num_keys, *keys_and_args)
return response
| import hashlib
from redis.exceptions import NoScriptError
class LuaScript(object):
def __init__(self, redis, template, cache):
self.redis = redis
self.template = template
self.cache = cache
self.script = self._render_template(template)
def _render_template(self, template):
if template.filename in self.cache:
script = self.cache[template.filename]
else:
script = template.render()
self.cache[template.filename] = script
return script
def _get_script_sha(self):
return hashlib.sha1(self.script).hexdigest()
def __call__(self, *args, **kwargs):
script_sha = self._get_script_sha()
keys = kwargs.get('keys', [])
arguments = kwargs.get('args', [])
num_keys = len(keys)
keys_and_args = keys + arguments
try:
response = self.redis.evalsha(script_sha, num_keys, *keys_and_args)
except NoScriptError:
response = self.redis.eval(self.script, num_keys, *keys_and_args)
return response
Use ResponseError instead of NoScriptError to be compatible with earlier versions of the redis clientimport hashlib
from redis.exceptions import ResponseError
class LuaScript(object):
def __init__(self, redis, template, cache):
self.redis = redis
self.template = template
self.cache = cache
self.script = self._render_template(template)
def _render_template(self, template):
if template.filename in self.cache:
script = self.cache[template.filename]
else:
script = template.render()
self.cache[template.filename] = script
return script
def _get_script_sha(self):
return hashlib.sha1(self.script).hexdigest()
def __call__(self, *args, **kwargs):
script_sha = self._get_script_sha()
keys = kwargs.get('keys', [])
arguments = kwargs.get('args', [])
num_keys = len(keys)
keys_and_args = keys + arguments
try:
response = self.redis.evalsha(script_sha, num_keys, *keys_and_args)
except ResponseError:
response = self.redis.eval(self.script, num_keys, *keys_and_args)
return response
| <commit_before>import hashlib
from redis.exceptions import NoScriptError
class LuaScript(object):
def __init__(self, redis, template, cache):
self.redis = redis
self.template = template
self.cache = cache
self.script = self._render_template(template)
def _render_template(self, template):
if template.filename in self.cache:
script = self.cache[template.filename]
else:
script = template.render()
self.cache[template.filename] = script
return script
def _get_script_sha(self):
return hashlib.sha1(self.script).hexdigest()
def __call__(self, *args, **kwargs):
script_sha = self._get_script_sha()
keys = kwargs.get('keys', [])
arguments = kwargs.get('args', [])
num_keys = len(keys)
keys_and_args = keys + arguments
try:
response = self.redis.evalsha(script_sha, num_keys, *keys_and_args)
except NoScriptError:
response = self.redis.eval(self.script, num_keys, *keys_and_args)
return response
<commit_msg>Use ResponseError instead of NoScriptError to be compatible with earlier versions of the redis client<commit_after>import hashlib
from redis.exceptions import ResponseError
class LuaScript(object):
def __init__(self, redis, template, cache):
self.redis = redis
self.template = template
self.cache = cache
self.script = self._render_template(template)
def _render_template(self, template):
if template.filename in self.cache:
script = self.cache[template.filename]
else:
script = template.render()
self.cache[template.filename] = script
return script
def _get_script_sha(self):
return hashlib.sha1(self.script).hexdigest()
def __call__(self, *args, **kwargs):
script_sha = self._get_script_sha()
keys = kwargs.get('keys', [])
arguments = kwargs.get('args', [])
num_keys = len(keys)
keys_and_args = keys + arguments
try:
response = self.redis.evalsha(script_sha, num_keys, *keys_and_args)
except ResponseError:
response = self.redis.eval(self.script, num_keys, *keys_and_args)
return response
|
4b459a367c67b40561b170b86c2df8882880d2be | test/test_examples.py | test/test_examples.py | # coding=utf-8
import os
import pytest
from os.path import join, dirname
from glob import glob
# tests to exclude
excludes = ['authorization_v1.py', 'alchemy_data_news_v1.py', 'alchemy_language_v1.py']
# examples path. /examples
examples_path = join(dirname(__file__), '../', 'examples', '*.py')
# environment variables
try:
from dotenv import load_dotenv
except:
print('warning: dotenv module could not be imported')
try:
dotenv_path = join(dirname(__file__), '../', '.env')
load_dotenv(dotenv_path)
except:
print('warning: no .env file loaded')
@pytest.mark.skipif(os.getenv('VCAP_SERVICES') is None, reason='requires VCAP_SERVICES')
def test_examples():
examples = glob(examples_path)
for example in examples:
name = example.split('/')[-1]
# exclude some tests cases like authorization
if name in excludes:
continue
try:
exec(open(example).read(), globals())
except Exception as e:
assert False, 'example in file ' + name + ' failed with error: ' + str(e)
| # coding=utf-8
import os
import pytest
import json as json_import
from os.path import join, dirname
from glob import glob
# tests to exclude
excludes = ['authorization_v1.py', 'alchemy_data_news_v1.py', 'alchemy_language_v1.py', 'discovery_v1.ipynb', '__init__.py']
# examples path. /examples
examples_path = join(dirname(__file__), '../', 'examples', '*.py')
# environment variables
try:
from dotenv import load_dotenv
except:
print('warning: dotenv module could not be imported')
try:
dotenv_path = join(dirname(__file__), '../', '.env')
load_dotenv(dotenv_path)
except:
print('warning: no .env file loaded')
@pytest.mark.skipif(os.getenv('VCAP_SERVICES') is None, reason='requires VCAP_SERVICES')
def test_examples():
vcapServices = json_import.loads(os.getenv('VCAP_SERVICES'))
examples = glob(examples_path)
for example in examples:
name = example.split('/')[-1]
# exclude some tests cases like authorization
if name in excludes:
continue
# exclude tests if there are no credentials for that service
serviceName = name[:-6] if not name.startswith('visual_recognition') else 'watson_vision_combined'
if serviceName not in vcapServices:
print('%s does not have credentials in VCAP_SERVICES', serviceName)
continue
try:
exec(open(example).read(), globals())
except Exception as e:
assert False, 'example in file ' + name + ' failed with error: ' + str(e)
| Exclude tests if there are no credentials in VCAP_SERVICES | Exclude tests if there are no credentials in VCAP_SERVICES
| Python | apache-2.0 | ehdsouza/python-sdk,ehdsouza/python-sdk,ehdsouza/python-sdk | # coding=utf-8
import os
import pytest
from os.path import join, dirname
from glob import glob
# tests to exclude
excludes = ['authorization_v1.py', 'alchemy_data_news_v1.py', 'alchemy_language_v1.py']
# examples path. /examples
examples_path = join(dirname(__file__), '../', 'examples', '*.py')
# environment variables
try:
from dotenv import load_dotenv
except:
print('warning: dotenv module could not be imported')
try:
dotenv_path = join(dirname(__file__), '../', '.env')
load_dotenv(dotenv_path)
except:
print('warning: no .env file loaded')
@pytest.mark.skipif(os.getenv('VCAP_SERVICES') is None, reason='requires VCAP_SERVICES')
def test_examples():
examples = glob(examples_path)
for example in examples:
name = example.split('/')[-1]
# exclude some tests cases like authorization
if name in excludes:
continue
try:
exec(open(example).read(), globals())
except Exception as e:
assert False, 'example in file ' + name + ' failed with error: ' + str(e)
Exclude tests if there are no credentials in VCAP_SERVICES | # coding=utf-8
import os
import pytest
import json as json_import
from os.path import join, dirname
from glob import glob
# tests to exclude
excludes = ['authorization_v1.py', 'alchemy_data_news_v1.py', 'alchemy_language_v1.py', 'discovery_v1.ipynb', '__init__.py']
# examples path. /examples
examples_path = join(dirname(__file__), '../', 'examples', '*.py')
# environment variables
try:
from dotenv import load_dotenv
except:
print('warning: dotenv module could not be imported')
try:
dotenv_path = join(dirname(__file__), '../', '.env')
load_dotenv(dotenv_path)
except:
print('warning: no .env file loaded')
@pytest.mark.skipif(os.getenv('VCAP_SERVICES') is None, reason='requires VCAP_SERVICES')
def test_examples():
vcapServices = json_import.loads(os.getenv('VCAP_SERVICES'))
examples = glob(examples_path)
for example in examples:
name = example.split('/')[-1]
# exclude some tests cases like authorization
if name in excludes:
continue
# exclude tests if there are no credentials for that service
serviceName = name[:-6] if not name.startswith('visual_recognition') else 'watson_vision_combined'
if serviceName not in vcapServices:
print('%s does not have credentials in VCAP_SERVICES', serviceName)
continue
try:
exec(open(example).read(), globals())
except Exception as e:
assert False, 'example in file ' + name + ' failed with error: ' + str(e)
| <commit_before># coding=utf-8
import os
import pytest
from os.path import join, dirname
from glob import glob
# tests to exclude
excludes = ['authorization_v1.py', 'alchemy_data_news_v1.py', 'alchemy_language_v1.py']
# examples path. /examples
examples_path = join(dirname(__file__), '../', 'examples', '*.py')
# environment variables
try:
from dotenv import load_dotenv
except:
print('warning: dotenv module could not be imported')
try:
dotenv_path = join(dirname(__file__), '../', '.env')
load_dotenv(dotenv_path)
except:
print('warning: no .env file loaded')
@pytest.mark.skipif(os.getenv('VCAP_SERVICES') is None, reason='requires VCAP_SERVICES')
def test_examples():
examples = glob(examples_path)
for example in examples:
name = example.split('/')[-1]
# exclude some tests cases like authorization
if name in excludes:
continue
try:
exec(open(example).read(), globals())
except Exception as e:
assert False, 'example in file ' + name + ' failed with error: ' + str(e)
<commit_msg>Exclude tests if there are no credentials in VCAP_SERVICES<commit_after> | # coding=utf-8
import os
import pytest
import json as json_import
from os.path import join, dirname
from glob import glob
# tests to exclude
excludes = ['authorization_v1.py', 'alchemy_data_news_v1.py', 'alchemy_language_v1.py', 'discovery_v1.ipynb', '__init__.py']
# examples path. /examples
examples_path = join(dirname(__file__), '../', 'examples', '*.py')
# environment variables
try:
from dotenv import load_dotenv
except:
print('warning: dotenv module could not be imported')
try:
dotenv_path = join(dirname(__file__), '../', '.env')
load_dotenv(dotenv_path)
except:
print('warning: no .env file loaded')
@pytest.mark.skipif(os.getenv('VCAP_SERVICES') is None, reason='requires VCAP_SERVICES')
def test_examples():
vcapServices = json_import.loads(os.getenv('VCAP_SERVICES'))
examples = glob(examples_path)
for example in examples:
name = example.split('/')[-1]
# exclude some tests cases like authorization
if name in excludes:
continue
# exclude tests if there are no credentials for that service
serviceName = name[:-6] if not name.startswith('visual_recognition') else 'watson_vision_combined'
if serviceName not in vcapServices:
print('%s does not have credentials in VCAP_SERVICES', serviceName)
continue
try:
exec(open(example).read(), globals())
except Exception as e:
assert False, 'example in file ' + name + ' failed with error: ' + str(e)
| # coding=utf-8
import os
import pytest
from os.path import join, dirname
from glob import glob
# tests to exclude
excludes = ['authorization_v1.py', 'alchemy_data_news_v1.py', 'alchemy_language_v1.py']
# examples path. /examples
examples_path = join(dirname(__file__), '../', 'examples', '*.py')
# environment variables
try:
from dotenv import load_dotenv
except:
print('warning: dotenv module could not be imported')
try:
dotenv_path = join(dirname(__file__), '../', '.env')
load_dotenv(dotenv_path)
except:
print('warning: no .env file loaded')
@pytest.mark.skipif(os.getenv('VCAP_SERVICES') is None, reason='requires VCAP_SERVICES')
def test_examples():
examples = glob(examples_path)
for example in examples:
name = example.split('/')[-1]
# exclude some tests cases like authorization
if name in excludes:
continue
try:
exec(open(example).read(), globals())
except Exception as e:
assert False, 'example in file ' + name + ' failed with error: ' + str(e)
Exclude tests if there are no credentials in VCAP_SERVICES# coding=utf-8
import os
import pytest
import json as json_import
from os.path import join, dirname
from glob import glob
# tests to exclude
excludes = ['authorization_v1.py', 'alchemy_data_news_v1.py', 'alchemy_language_v1.py', 'discovery_v1.ipynb', '__init__.py']
# examples path. /examples
examples_path = join(dirname(__file__), '../', 'examples', '*.py')
# environment variables
try:
from dotenv import load_dotenv
except:
print('warning: dotenv module could not be imported')
try:
dotenv_path = join(dirname(__file__), '../', '.env')
load_dotenv(dotenv_path)
except:
print('warning: no .env file loaded')
@pytest.mark.skipif(os.getenv('VCAP_SERVICES') is None, reason='requires VCAP_SERVICES')
def test_examples():
vcapServices = json_import.loads(os.getenv('VCAP_SERVICES'))
examples = glob(examples_path)
for example in examples:
name = example.split('/')[-1]
# exclude some tests cases like authorization
if name in excludes:
continue
# exclude tests if there are no credentials for that service
serviceName = name[:-6] if not name.startswith('visual_recognition') else 'watson_vision_combined'
if serviceName not in vcapServices:
print('%s does not have credentials in VCAP_SERVICES', serviceName)
continue
try:
exec(open(example).read(), globals())
except Exception as e:
assert False, 'example in file ' + name + ' failed with error: ' + str(e)
| <commit_before># coding=utf-8
import os
import pytest
from os.path import join, dirname
from glob import glob
# tests to exclude
excludes = ['authorization_v1.py', 'alchemy_data_news_v1.py', 'alchemy_language_v1.py']
# examples path. /examples
examples_path = join(dirname(__file__), '../', 'examples', '*.py')
# environment variables
try:
from dotenv import load_dotenv
except:
print('warning: dotenv module could not be imported')
try:
dotenv_path = join(dirname(__file__), '../', '.env')
load_dotenv(dotenv_path)
except:
print('warning: no .env file loaded')
@pytest.mark.skipif(os.getenv('VCAP_SERVICES') is None, reason='requires VCAP_SERVICES')
def test_examples():
examples = glob(examples_path)
for example in examples:
name = example.split('/')[-1]
# exclude some tests cases like authorization
if name in excludes:
continue
try:
exec(open(example).read(), globals())
except Exception as e:
assert False, 'example in file ' + name + ' failed with error: ' + str(e)
<commit_msg>Exclude tests if there are no credentials in VCAP_SERVICES<commit_after># coding=utf-8
import os
import pytest
import json as json_import
from os.path import join, dirname
from glob import glob
# tests to exclude
excludes = ['authorization_v1.py', 'alchemy_data_news_v1.py', 'alchemy_language_v1.py', 'discovery_v1.ipynb', '__init__.py']
# examples path. /examples
examples_path = join(dirname(__file__), '../', 'examples', '*.py')
# environment variables
try:
from dotenv import load_dotenv
except:
print('warning: dotenv module could not be imported')
try:
dotenv_path = join(dirname(__file__), '../', '.env')
load_dotenv(dotenv_path)
except:
print('warning: no .env file loaded')
@pytest.mark.skipif(os.getenv('VCAP_SERVICES') is None, reason='requires VCAP_SERVICES')
def test_examples():
vcapServices = json_import.loads(os.getenv('VCAP_SERVICES'))
examples = glob(examples_path)
for example in examples:
name = example.split('/')[-1]
# exclude some tests cases like authorization
if name in excludes:
continue
# exclude tests if there are no credentials for that service
serviceName = name[:-6] if not name.startswith('visual_recognition') else 'watson_vision_combined'
if serviceName not in vcapServices:
print('%s does not have credentials in VCAP_SERVICES', serviceName)
continue
try:
exec(open(example).read(), globals())
except Exception as e:
assert False, 'example in file ' + name + ' failed with error: ' + str(e)
|
c88171bef919dc02bf5796b1bc9318d60a680a8f | test/test_scraping.py | test/test_scraping.py | from datetime import datetime
import sys
import unittest
import btceapi
class TestScraping(unittest.TestCase):
def test_scrape_main_page(self):
mainPage = btceapi.scrapeMainPage()
for message in mainPage.messages:
msgId, user, time, text = message
self.assertIs(type(time), datetime)
if sys.version_info[0] == 2:
# python2.x
self.assertIn(type(msgId), (str, unicode))
self.assertIn(type(user), (str, unicode))
self.assertIn(type(text), (str, unicode))
else:
# python3.x
self.assertIs(type(msgId), str)
self.assertIs(type(user), str)
self.assertIs(type(text), str)
if __name__ == '__main__':
unittest.main()
| from datetime import datetime
import sys
import unittest
import btceapi
class TestScraping(unittest.TestCase):
def test_scrape_main_page(self):
mainPage = btceapi.scrapeMainPage()
for message in mainPage.messages:
msgId, user, time, text = message
assert type(time) is datetime
if sys.version_info[0] == 2:
# python2.x
self.assertIn(type(msgId), (str, unicode))
self.assertIn(type(user), (str, unicode))
self.assertIn(type(text), (str, unicode))
else:
# python3.x
self.assertIs(type(msgId), str)
self.assertIs(type(user), str)
self.assertIs(type(text), str)
if __name__ == '__main__':
unittest.main()
| Fix for assertIs method not being present in Python 2.6. | Fix for assertIs method not being present in Python 2.6.
| Python | mit | alanmcintyre/btce-api,CodeReclaimers/btce-api,lromanov/tidex-api | from datetime import datetime
import sys
import unittest
import btceapi
class TestScraping(unittest.TestCase):
def test_scrape_main_page(self):
mainPage = btceapi.scrapeMainPage()
for message in mainPage.messages:
msgId, user, time, text = message
self.assertIs(type(time), datetime)
if sys.version_info[0] == 2:
# python2.x
self.assertIn(type(msgId), (str, unicode))
self.assertIn(type(user), (str, unicode))
self.assertIn(type(text), (str, unicode))
else:
# python3.x
self.assertIs(type(msgId), str)
self.assertIs(type(user), str)
self.assertIs(type(text), str)
if __name__ == '__main__':
unittest.main()
Fix for assertIs method not being present in Python 2.6. | from datetime import datetime
import sys
import unittest
import btceapi
class TestScraping(unittest.TestCase):
def test_scrape_main_page(self):
mainPage = btceapi.scrapeMainPage()
for message in mainPage.messages:
msgId, user, time, text = message
assert type(time) is datetime
if sys.version_info[0] == 2:
# python2.x
self.assertIn(type(msgId), (str, unicode))
self.assertIn(type(user), (str, unicode))
self.assertIn(type(text), (str, unicode))
else:
# python3.x
self.assertIs(type(msgId), str)
self.assertIs(type(user), str)
self.assertIs(type(text), str)
if __name__ == '__main__':
unittest.main()
| <commit_before>from datetime import datetime
import sys
import unittest
import btceapi
class TestScraping(unittest.TestCase):
def test_scrape_main_page(self):
mainPage = btceapi.scrapeMainPage()
for message in mainPage.messages:
msgId, user, time, text = message
self.assertIs(type(time), datetime)
if sys.version_info[0] == 2:
# python2.x
self.assertIn(type(msgId), (str, unicode))
self.assertIn(type(user), (str, unicode))
self.assertIn(type(text), (str, unicode))
else:
# python3.x
self.assertIs(type(msgId), str)
self.assertIs(type(user), str)
self.assertIs(type(text), str)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix for assertIs method not being present in Python 2.6.<commit_after> | from datetime import datetime
import sys
import unittest
import btceapi
class TestScraping(unittest.TestCase):
def test_scrape_main_page(self):
mainPage = btceapi.scrapeMainPage()
for message in mainPage.messages:
msgId, user, time, text = message
assert type(time) is datetime
if sys.version_info[0] == 2:
# python2.x
self.assertIn(type(msgId), (str, unicode))
self.assertIn(type(user), (str, unicode))
self.assertIn(type(text), (str, unicode))
else:
# python3.x
self.assertIs(type(msgId), str)
self.assertIs(type(user), str)
self.assertIs(type(text), str)
if __name__ == '__main__':
unittest.main()
| from datetime import datetime
import sys
import unittest
import btceapi
class TestScraping(unittest.TestCase):
def test_scrape_main_page(self):
mainPage = btceapi.scrapeMainPage()
for message in mainPage.messages:
msgId, user, time, text = message
self.assertIs(type(time), datetime)
if sys.version_info[0] == 2:
# python2.x
self.assertIn(type(msgId), (str, unicode))
self.assertIn(type(user), (str, unicode))
self.assertIn(type(text), (str, unicode))
else:
# python3.x
self.assertIs(type(msgId), str)
self.assertIs(type(user), str)
self.assertIs(type(text), str)
if __name__ == '__main__':
unittest.main()
Fix for assertIs method not being present in Python 2.6.from datetime import datetime
import sys
import unittest
import btceapi
class TestScraping(unittest.TestCase):
def test_scrape_main_page(self):
mainPage = btceapi.scrapeMainPage()
for message in mainPage.messages:
msgId, user, time, text = message
assert type(time) is datetime
if sys.version_info[0] == 2:
# python2.x
self.assertIn(type(msgId), (str, unicode))
self.assertIn(type(user), (str, unicode))
self.assertIn(type(text), (str, unicode))
else:
# python3.x
self.assertIs(type(msgId), str)
self.assertIs(type(user), str)
self.assertIs(type(text), str)
if __name__ == '__main__':
unittest.main()
| <commit_before>from datetime import datetime
import sys
import unittest
import btceapi
class TestScraping(unittest.TestCase):
def test_scrape_main_page(self):
mainPage = btceapi.scrapeMainPage()
for message in mainPage.messages:
msgId, user, time, text = message
self.assertIs(type(time), datetime)
if sys.version_info[0] == 2:
# python2.x
self.assertIn(type(msgId), (str, unicode))
self.assertIn(type(user), (str, unicode))
self.assertIn(type(text), (str, unicode))
else:
# python3.x
self.assertIs(type(msgId), str)
self.assertIs(type(user), str)
self.assertIs(type(text), str)
if __name__ == '__main__':
unittest.main()
<commit_msg>Fix for assertIs method not being present in Python 2.6.<commit_after>from datetime import datetime
import sys
import unittest
import btceapi
class TestScraping(unittest.TestCase):
def test_scrape_main_page(self):
mainPage = btceapi.scrapeMainPage()
for message in mainPage.messages:
msgId, user, time, text = message
assert type(time) is datetime
if sys.version_info[0] == 2:
# python2.x
self.assertIn(type(msgId), (str, unicode))
self.assertIn(type(user), (str, unicode))
self.assertIn(type(text), (str, unicode))
else:
# python3.x
self.assertIs(type(msgId), str)
self.assertIs(type(user), str)
self.assertIs(type(text), str)
if __name__ == '__main__':
unittest.main()
|
61d71b27111f255c3dad3f974e6c7e0ace0c2ce9 | karld/iter_utils.py | karld/iter_utils.py | from functools import partial
from itertools import imap
from itertools import islice
from itertools import izip_longest
from itertools import ifilter
from operator import itemgetter
from operator import is_not
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return izip_longest(fillvalue=fillvalue, *args)
fo = object()
is_not_fo = partial(is_not, fo)
def batcher(n, iterable):
for batch in grouper(iterable, n, fillvalue=fo):
yield filter(is_not_fo, batch)
def i_batcher(n, iterable):
for batch in grouper(iterable, n, fillvalue=fo):
yield ifilter(is_not_fo, batch)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
while True:
items_batch = tuple(islice(iterable_items, max_size))
if not items_batch:
break
yield items_batch
| from functools import partial
from itertools import imap
from itertools import islice
from operator import itemgetter
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
while True:
items_batch = tuple(islice(iterable_items, max_size))
if not items_batch:
break
yield items_batch
| Remove grouper and grouper based batchers | Remove grouper and grouper based batchers
I prefer to not use the filter fill value method to
batch. I don't like the need to allocate room for
the fill value object.
| Python | apache-2.0 | johnwlockwood/stream_tap,johnwlockwood/karl_data,johnwlockwood/iter_karld_tools,johnwlockwood/stream_tap | from functools import partial
from itertools import imap
from itertools import islice
from itertools import izip_longest
from itertools import ifilter
from operator import itemgetter
from operator import is_not
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return izip_longest(fillvalue=fillvalue, *args)
fo = object()
is_not_fo = partial(is_not, fo)
def batcher(n, iterable):
for batch in grouper(iterable, n, fillvalue=fo):
yield filter(is_not_fo, batch)
def i_batcher(n, iterable):
for batch in grouper(iterable, n, fillvalue=fo):
yield ifilter(is_not_fo, batch)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
while True:
items_batch = tuple(islice(iterable_items, max_size))
if not items_batch:
break
yield items_batch
Remove grouper and grouper based batchers
I prefer to not use the filter fill value method to
batch. I don't like the need to allocate room for
the fill value object. | from functools import partial
from itertools import imap
from itertools import islice
from operator import itemgetter
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
while True:
items_batch = tuple(islice(iterable_items, max_size))
if not items_batch:
break
yield items_batch
| <commit_before>from functools import partial
from itertools import imap
from itertools import islice
from itertools import izip_longest
from itertools import ifilter
from operator import itemgetter
from operator import is_not
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return izip_longest(fillvalue=fillvalue, *args)
fo = object()
is_not_fo = partial(is_not, fo)
def batcher(n, iterable):
for batch in grouper(iterable, n, fillvalue=fo):
yield filter(is_not_fo, batch)
def i_batcher(n, iterable):
for batch in grouper(iterable, n, fillvalue=fo):
yield ifilter(is_not_fo, batch)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
while True:
items_batch = tuple(islice(iterable_items, max_size))
if not items_batch:
break
yield items_batch
<commit_msg>Remove grouper and grouper based batchers
I prefer to not use the filter fill value method to
batch. I don't like the need to allocate room for
the fill value object.<commit_after> | from functools import partial
from itertools import imap
from itertools import islice
from operator import itemgetter
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
while True:
items_batch = tuple(islice(iterable_items, max_size))
if not items_batch:
break
yield items_batch
| from functools import partial
from itertools import imap
from itertools import islice
from itertools import izip_longest
from itertools import ifilter
from operator import itemgetter
from operator import is_not
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return izip_longest(fillvalue=fillvalue, *args)
fo = object()
is_not_fo = partial(is_not, fo)
def batcher(n, iterable):
for batch in grouper(iterable, n, fillvalue=fo):
yield filter(is_not_fo, batch)
def i_batcher(n, iterable):
for batch in grouper(iterable, n, fillvalue=fo):
yield ifilter(is_not_fo, batch)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
while True:
items_batch = tuple(islice(iterable_items, max_size))
if not items_batch:
break
yield items_batch
Remove grouper and grouper based batchers
I prefer to not use the filter fill value method to
batch. I don't like the need to allocate room for
the fill value object.from functools import partial
from itertools import imap
from itertools import islice
from operator import itemgetter
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
while True:
items_batch = tuple(islice(iterable_items, max_size))
if not items_batch:
break
yield items_batch
| <commit_before>from functools import partial
from itertools import imap
from itertools import islice
from itertools import izip_longest
from itertools import ifilter
from operator import itemgetter
from operator import is_not
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx
args = [iter(iterable)] * n
return izip_longest(fillvalue=fillvalue, *args)
fo = object()
is_not_fo = partial(is_not, fo)
def batcher(n, iterable):
for batch in grouper(iterable, n, fillvalue=fo):
yield filter(is_not_fo, batch)
def i_batcher(n, iterable):
for batch in grouper(iterable, n, fillvalue=fo):
yield ifilter(is_not_fo, batch)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
while True:
items_batch = tuple(islice(iterable_items, max_size))
if not items_batch:
break
yield items_batch
<commit_msg>Remove grouper and grouper based batchers
I prefer to not use the filter fill value method to
batch. I don't like the need to allocate room for
the fill value object.<commit_after>from functools import partial
from itertools import imap
from itertools import islice
from operator import itemgetter
def yield_getter_of(getter_maker, iterator):
"""
Iteratively map iterator over the result of getter_maker.
:param getter_maker: function that returns a getter function.
:param iterator: An iterator.
"""
return imap(getter_maker(), iterator)
def yield_nth_of(nth, iterator):
"""
For an iterator that returns sequences,
yield the nth value of each.
:param nth: Index desired column of each sequence.
:type nth: int
:param iterator: iterator of sequences.
"""
return yield_getter_of(partial(itemgetter, nth), iterator)
def i_batch(max_size, iterable):
"""
Generator that iteratively batches items
to a max size and consumes the items iterable
as each batch is yielded.
:param max_size: Max size of each batch.
:type max_size: int
:param iterable: An iterable
:type iterable: iter
"""
iterable_items = iter(iterable)
while True:
items_batch = tuple(islice(iterable_items, max_size))
if not items_batch:
break
yield items_batch
|
6aad52505450b481d7b47e7ffe5c08cb7774e84a | python/testData/skeletons/BinaryStandardModule.py | python/testData/skeletons/BinaryStandardModule.py | import binascii
import datetime
import <error descr="No module named nonexistent">nonexistent</error>
print(binascii)
print(datetime)
print(nonexistent)
| import binascii
import datetime
import <error descr="No module named 'nonexistent'">nonexistent</error>
print(binascii)
print(datetime)
print(nonexistent)
| Fix test data in an env test on skeleton generation | i18n: Fix test data in an env test on skeleton generation
GitOrigin-RevId: 7be12c5b3b3a333e7e3afebe45fe32770bbdfa81 | Python | apache-2.0 | allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community,allotria/intellij-community | import binascii
import datetime
import <error descr="No module named nonexistent">nonexistent</error>
print(binascii)
print(datetime)
print(nonexistent)
i18n: Fix test data in an env test on skeleton generation
GitOrigin-RevId: 7be12c5b3b3a333e7e3afebe45fe32770bbdfa81 | import binascii
import datetime
import <error descr="No module named 'nonexistent'">nonexistent</error>
print(binascii)
print(datetime)
print(nonexistent)
| <commit_before>import binascii
import datetime
import <error descr="No module named nonexistent">nonexistent</error>
print(binascii)
print(datetime)
print(nonexistent)
<commit_msg>i18n: Fix test data in an env test on skeleton generation
GitOrigin-RevId: 7be12c5b3b3a333e7e3afebe45fe32770bbdfa81<commit_after> | import binascii
import datetime
import <error descr="No module named 'nonexistent'">nonexistent</error>
print(binascii)
print(datetime)
print(nonexistent)
| import binascii
import datetime
import <error descr="No module named nonexistent">nonexistent</error>
print(binascii)
print(datetime)
print(nonexistent)
i18n: Fix test data in an env test on skeleton generation
GitOrigin-RevId: 7be12c5b3b3a333e7e3afebe45fe32770bbdfa81import binascii
import datetime
import <error descr="No module named 'nonexistent'">nonexistent</error>
print(binascii)
print(datetime)
print(nonexistent)
| <commit_before>import binascii
import datetime
import <error descr="No module named nonexistent">nonexistent</error>
print(binascii)
print(datetime)
print(nonexistent)
<commit_msg>i18n: Fix test data in an env test on skeleton generation
GitOrigin-RevId: 7be12c5b3b3a333e7e3afebe45fe32770bbdfa81<commit_after>import binascii
import datetime
import <error descr="No module named 'nonexistent'">nonexistent</error>
print(binascii)
print(datetime)
print(nonexistent)
|
6d2255b6f44a18bae0b50fb528564c6767683c68 | src/bindings/python/test/test.py | src/bindings/python/test/test.py | #ckwg +4
# Copyright 2012 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def test_error(msg):
import sys
sys.stderr.write('Error: %s\n' % msg)
def expect_exception(action, kind, func, *args):
got_exception = False
try:
func(*args)
except kind:
got_exception = True
except:
pass
if not got_exception:
test_error('Did not get exception when %s' % action)
| #ckwg +4
# Copyright 2012 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def test_error(msg):
import sys
sys.stderr.write('Error: %s\n' % msg)
def expect_exception(action, kind, func, *args):
got_exception = False
try:
func(*args)
except kind:
got_exception = True
except BaseException as e:
test_error('Got unexpected exception: %s' % str(e))
got_exception = True
except:
test_error('Got non-standard exception')
got_exception = True
if not got_exception:
test_error('Did not get exception when %s' % action)
| Update Python expect_exception to be like C++'s | Update Python expect_exception to be like C++'s
| Python | bsd-3-clause | Kitware/sprokit,Kitware/sprokit,linus-sherrill/sprokit,Kitware/sprokit,mathstuf/sprokit,linus-sherrill/sprokit,linus-sherrill/sprokit,mathstuf/sprokit,mathstuf/sprokit,mathstuf/sprokit,linus-sherrill/sprokit,Kitware/sprokit | #ckwg +4
# Copyright 2012 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def test_error(msg):
import sys
sys.stderr.write('Error: %s\n' % msg)
def expect_exception(action, kind, func, *args):
got_exception = False
try:
func(*args)
except kind:
got_exception = True
except:
pass
if not got_exception:
test_error('Did not get exception when %s' % action)
Update Python expect_exception to be like C++'s | #ckwg +4
# Copyright 2012 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def test_error(msg):
import sys
sys.stderr.write('Error: %s\n' % msg)
def expect_exception(action, kind, func, *args):
got_exception = False
try:
func(*args)
except kind:
got_exception = True
except BaseException as e:
test_error('Got unexpected exception: %s' % str(e))
got_exception = True
except:
test_error('Got non-standard exception')
got_exception = True
if not got_exception:
test_error('Did not get exception when %s' % action)
| <commit_before>#ckwg +4
# Copyright 2012 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def test_error(msg):
import sys
sys.stderr.write('Error: %s\n' % msg)
def expect_exception(action, kind, func, *args):
got_exception = False
try:
func(*args)
except kind:
got_exception = True
except:
pass
if not got_exception:
test_error('Did not get exception when %s' % action)
<commit_msg>Update Python expect_exception to be like C++'s<commit_after> | #ckwg +4
# Copyright 2012 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def test_error(msg):
import sys
sys.stderr.write('Error: %s\n' % msg)
def expect_exception(action, kind, func, *args):
got_exception = False
try:
func(*args)
except kind:
got_exception = True
except BaseException as e:
test_error('Got unexpected exception: %s' % str(e))
got_exception = True
except:
test_error('Got non-standard exception')
got_exception = True
if not got_exception:
test_error('Did not get exception when %s' % action)
| #ckwg +4
# Copyright 2012 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def test_error(msg):
import sys
sys.stderr.write('Error: %s\n' % msg)
def expect_exception(action, kind, func, *args):
got_exception = False
try:
func(*args)
except kind:
got_exception = True
except:
pass
if not got_exception:
test_error('Did not get exception when %s' % action)
Update Python expect_exception to be like C++'s#ckwg +4
# Copyright 2012 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def test_error(msg):
import sys
sys.stderr.write('Error: %s\n' % msg)
def expect_exception(action, kind, func, *args):
got_exception = False
try:
func(*args)
except kind:
got_exception = True
except BaseException as e:
test_error('Got unexpected exception: %s' % str(e))
got_exception = True
except:
test_error('Got non-standard exception')
got_exception = True
if not got_exception:
test_error('Did not get exception when %s' % action)
| <commit_before>#ckwg +4
# Copyright 2012 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def test_error(msg):
import sys
sys.stderr.write('Error: %s\n' % msg)
def expect_exception(action, kind, func, *args):
got_exception = False
try:
func(*args)
except kind:
got_exception = True
except:
pass
if not got_exception:
test_error('Did not get exception when %s' % action)
<commit_msg>Update Python expect_exception to be like C++'s<commit_after>#ckwg +4
# Copyright 2012 by Kitware, Inc. All Rights Reserved. Please refer to
# KITWARE_LICENSE.TXT for licensing information, or contact General Counsel,
# Kitware, Inc., 28 Corporate Drive, Clifton Park, NY 12065.
def test_error(msg):
import sys
sys.stderr.write('Error: %s\n' % msg)
def expect_exception(action, kind, func, *args):
got_exception = False
try:
func(*args)
except kind:
got_exception = True
except BaseException as e:
test_error('Got unexpected exception: %s' % str(e))
got_exception = True
except:
test_error('Got non-standard exception')
got_exception = True
if not got_exception:
test_error('Did not get exception when %s' % action)
|
654d3c666fd963760445f6ae8a6ba745ba85e014 | dci/server/tests/settings.py | dci/server/tests/settings.py | # -*- encoding: utf-8 -*-
#
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import uuid
SQLALCHEMY_DATABASE_URI = "postgresql:///%s?host=%s" % (
uuid.uuid4(), os.path.abspath(os.environ['DCI_DB_DIR'])
)
# detect if we are using docker_compose
if os.environ.get('DB_PORT'):
import dci.server.settings
SQLALCHEMY_DATABASE_URI = dci.server.settings.SQLALCHEMY_DATABASE_URI
SQLALCHEMY_DATABASE_URI += "_test"
| # -*- encoding: utf-8 -*-
#
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import uuid
DEBUG = False
LOG_FILE = '/dev/null'
SQLALCHEMY_DATABASE_URI = "postgresql:///%s?host=%s" % (
uuid.uuid4(), os.path.abspath(os.environ['DCI_DB_DIR'])
)
# detect if we are using docker_compose
if os.environ.get('DB_PORT'):
import dci.server.settings
SQLALCHEMY_DATABASE_URI = dci.server.settings.SQLALCHEMY_DATABASE_URI
SQLALCHEMY_DATABASE_URI += "_test"
| Remove logging to stdout when performing tests | Remove logging to stdout when performing tests
During tests it logs to /dev/null in order to avoid bottleneck
Change-Id: I4fd54cf1f2c572b9750d63450da67ef06101a8d8
| Python | apache-2.0 | enovance/dci-control-server,redhat-cip/dci-control-server,redhat-cip/dci-control-server,enovance/dci-control-server | # -*- encoding: utf-8 -*-
#
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import uuid
SQLALCHEMY_DATABASE_URI = "postgresql:///%s?host=%s" % (
uuid.uuid4(), os.path.abspath(os.environ['DCI_DB_DIR'])
)
# detect if we are using docker_compose
if os.environ.get('DB_PORT'):
import dci.server.settings
SQLALCHEMY_DATABASE_URI = dci.server.settings.SQLALCHEMY_DATABASE_URI
SQLALCHEMY_DATABASE_URI += "_test"
Remove logging to stdout when performing tests
During tests it logs to /dev/null in order to avoid bottleneck
Change-Id: I4fd54cf1f2c572b9750d63450da67ef06101a8d8 | # -*- encoding: utf-8 -*-
#
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import uuid
DEBUG = False
LOG_FILE = '/dev/null'
SQLALCHEMY_DATABASE_URI = "postgresql:///%s?host=%s" % (
uuid.uuid4(), os.path.abspath(os.environ['DCI_DB_DIR'])
)
# detect if we are using docker_compose
if os.environ.get('DB_PORT'):
import dci.server.settings
SQLALCHEMY_DATABASE_URI = dci.server.settings.SQLALCHEMY_DATABASE_URI
SQLALCHEMY_DATABASE_URI += "_test"
| <commit_before># -*- encoding: utf-8 -*-
#
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import uuid
SQLALCHEMY_DATABASE_URI = "postgresql:///%s?host=%s" % (
uuid.uuid4(), os.path.abspath(os.environ['DCI_DB_DIR'])
)
# detect if we are using docker_compose
if os.environ.get('DB_PORT'):
import dci.server.settings
SQLALCHEMY_DATABASE_URI = dci.server.settings.SQLALCHEMY_DATABASE_URI
SQLALCHEMY_DATABASE_URI += "_test"
<commit_msg>Remove logging to stdout when performing tests
During tests it logs to /dev/null in order to avoid bottleneck
Change-Id: I4fd54cf1f2c572b9750d63450da67ef06101a8d8<commit_after> | # -*- encoding: utf-8 -*-
#
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import uuid
DEBUG = False
LOG_FILE = '/dev/null'
SQLALCHEMY_DATABASE_URI = "postgresql:///%s?host=%s" % (
uuid.uuid4(), os.path.abspath(os.environ['DCI_DB_DIR'])
)
# detect if we are using docker_compose
if os.environ.get('DB_PORT'):
import dci.server.settings
SQLALCHEMY_DATABASE_URI = dci.server.settings.SQLALCHEMY_DATABASE_URI
SQLALCHEMY_DATABASE_URI += "_test"
| # -*- encoding: utf-8 -*-
#
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import uuid
SQLALCHEMY_DATABASE_URI = "postgresql:///%s?host=%s" % (
uuid.uuid4(), os.path.abspath(os.environ['DCI_DB_DIR'])
)
# detect if we are using docker_compose
if os.environ.get('DB_PORT'):
import dci.server.settings
SQLALCHEMY_DATABASE_URI = dci.server.settings.SQLALCHEMY_DATABASE_URI
SQLALCHEMY_DATABASE_URI += "_test"
Remove logging to stdout when performing tests
During tests it logs to /dev/null in order to avoid bottleneck
Change-Id: I4fd54cf1f2c572b9750d63450da67ef06101a8d8# -*- encoding: utf-8 -*-
#
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import uuid
DEBUG = False
LOG_FILE = '/dev/null'
SQLALCHEMY_DATABASE_URI = "postgresql:///%s?host=%s" % (
uuid.uuid4(), os.path.abspath(os.environ['DCI_DB_DIR'])
)
# detect if we are using docker_compose
if os.environ.get('DB_PORT'):
import dci.server.settings
SQLALCHEMY_DATABASE_URI = dci.server.settings.SQLALCHEMY_DATABASE_URI
SQLALCHEMY_DATABASE_URI += "_test"
| <commit_before># -*- encoding: utf-8 -*-
#
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import uuid
SQLALCHEMY_DATABASE_URI = "postgresql:///%s?host=%s" % (
uuid.uuid4(), os.path.abspath(os.environ['DCI_DB_DIR'])
)
# detect if we are using docker_compose
if os.environ.get('DB_PORT'):
import dci.server.settings
SQLALCHEMY_DATABASE_URI = dci.server.settings.SQLALCHEMY_DATABASE_URI
SQLALCHEMY_DATABASE_URI += "_test"
<commit_msg>Remove logging to stdout when performing tests
During tests it logs to /dev/null in order to avoid bottleneck
Change-Id: I4fd54cf1f2c572b9750d63450da67ef06101a8d8<commit_after># -*- encoding: utf-8 -*-
#
# Copyright 2015 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import uuid
DEBUG = False
LOG_FILE = '/dev/null'
SQLALCHEMY_DATABASE_URI = "postgresql:///%s?host=%s" % (
uuid.uuid4(), os.path.abspath(os.environ['DCI_DB_DIR'])
)
# detect if we are using docker_compose
if os.environ.get('DB_PORT'):
import dci.server.settings
SQLALCHEMY_DATABASE_URI = dci.server.settings.SQLALCHEMY_DATABASE_URI
SQLALCHEMY_DATABASE_URI += "_test"
|
285f4e21190d28e4e3b26dc62b9fd396de1db24e | keyring/__init__.py | keyring/__init__.py | from __future__ import absolute_import
import logging
logger = logging.getLogger('keyring')
from .core import (set_keyring, get_keyring, set_password, get_password,
delete_password)
from .getpassbackend import get_password as get_pass_get_password
try:
import pkg_resources
__version__ = pkg_resources.require('keyring')[0].version
except Exception:
__version__ = 'unknown'
| from __future__ import absolute_import
import logging
logger = logging.getLogger('keyring')
from .core import (set_keyring, get_keyring, set_password, get_password,
delete_password)
from .getpassbackend import get_password as get_pass_get_password
try:
import pkg_resources
__version__ = pkg_resources.require('keyring')[0].version
except Exception:
__version__ = 'unknown'
__all__ = (
'set_keyring', 'get_keyring', 'set_password', 'get_password',
'delete_password', 'get_pass_get_password',
)
| Define __all__ to suppress lint warnings. | Define __all__ to suppress lint warnings.
| Python | mit | jaraco/keyring | from __future__ import absolute_import
import logging
logger = logging.getLogger('keyring')
from .core import (set_keyring, get_keyring, set_password, get_password,
delete_password)
from .getpassbackend import get_password as get_pass_get_password
try:
import pkg_resources
__version__ = pkg_resources.require('keyring')[0].version
except Exception:
__version__ = 'unknown'
Define __all__ to suppress lint warnings. | from __future__ import absolute_import
import logging
logger = logging.getLogger('keyring')
from .core import (set_keyring, get_keyring, set_password, get_password,
delete_password)
from .getpassbackend import get_password as get_pass_get_password
try:
import pkg_resources
__version__ = pkg_resources.require('keyring')[0].version
except Exception:
__version__ = 'unknown'
__all__ = (
'set_keyring', 'get_keyring', 'set_password', 'get_password',
'delete_password', 'get_pass_get_password',
)
| <commit_before>from __future__ import absolute_import
import logging
logger = logging.getLogger('keyring')
from .core import (set_keyring, get_keyring, set_password, get_password,
delete_password)
from .getpassbackend import get_password as get_pass_get_password
try:
import pkg_resources
__version__ = pkg_resources.require('keyring')[0].version
except Exception:
__version__ = 'unknown'
<commit_msg>Define __all__ to suppress lint warnings.<commit_after> | from __future__ import absolute_import
import logging
logger = logging.getLogger('keyring')
from .core import (set_keyring, get_keyring, set_password, get_password,
delete_password)
from .getpassbackend import get_password as get_pass_get_password
try:
import pkg_resources
__version__ = pkg_resources.require('keyring')[0].version
except Exception:
__version__ = 'unknown'
__all__ = (
'set_keyring', 'get_keyring', 'set_password', 'get_password',
'delete_password', 'get_pass_get_password',
)
| from __future__ import absolute_import
import logging
logger = logging.getLogger('keyring')
from .core import (set_keyring, get_keyring, set_password, get_password,
delete_password)
from .getpassbackend import get_password as get_pass_get_password
try:
import pkg_resources
__version__ = pkg_resources.require('keyring')[0].version
except Exception:
__version__ = 'unknown'
Define __all__ to suppress lint warnings.from __future__ import absolute_import
import logging
logger = logging.getLogger('keyring')
from .core import (set_keyring, get_keyring, set_password, get_password,
delete_password)
from .getpassbackend import get_password as get_pass_get_password
try:
import pkg_resources
__version__ = pkg_resources.require('keyring')[0].version
except Exception:
__version__ = 'unknown'
__all__ = (
'set_keyring', 'get_keyring', 'set_password', 'get_password',
'delete_password', 'get_pass_get_password',
)
| <commit_before>from __future__ import absolute_import
import logging
logger = logging.getLogger('keyring')
from .core import (set_keyring, get_keyring, set_password, get_password,
delete_password)
from .getpassbackend import get_password as get_pass_get_password
try:
import pkg_resources
__version__ = pkg_resources.require('keyring')[0].version
except Exception:
__version__ = 'unknown'
<commit_msg>Define __all__ to suppress lint warnings.<commit_after>from __future__ import absolute_import
import logging
logger = logging.getLogger('keyring')
from .core import (set_keyring, get_keyring, set_password, get_password,
delete_password)
from .getpassbackend import get_password as get_pass_get_password
try:
import pkg_resources
__version__ = pkg_resources.require('keyring')[0].version
except Exception:
__version__ = 'unknown'
__all__ = (
'set_keyring', 'get_keyring', 'set_password', 'get_password',
'delete_password', 'get_pass_get_password',
)
|
aae0e295ef1c020371831aa9145820d8678670f7 | axes/apps.py | axes/apps.py | from logging import getLogger
from pkg_resources import get_distribution
from django import apps
log = getLogger(__name__)
class AppConfig(apps.AppConfig):
name = "axes"
initialized = False
@classmethod
def initialize(cls):
"""
Initialize Axes logging and show version information.
This method is re-entrant and can be called multiple times.
It displays version information exactly once at application startup.
"""
if cls.initialized:
return
cls.initialized = True
# Only import settings, checks, and signals one time after Django has been initialized
from axes.conf import settings
from axes import checks, signals # noqa
# Skip startup log messages if Axes is not set to verbose
if settings.AXES_VERBOSE:
log.info("AXES: BEGIN LOG")
log.info(
"AXES: Using django-axes version %s",
get_distribution("django-axes").version,
)
if settings.AXES_ONLY_USER_FAILURES:
log.info("AXES: blocking by username only.")
elif settings.AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP:
log.info("AXES: blocking by combination of username and IP.")
elif settings.AXES_LOCK_OUT_BY_USER_OR_IP:
log.info("AXES: blocking by username or IP.")
else:
log.info("AXES: blocking by IP only.")
def ready(self):
self.initialize()
| from logging import getLogger
from pkg_resources import get_distribution
from django import apps
log = getLogger(__name__)
class AppConfig(apps.AppConfig):
name = "axes"
initialized = False
@classmethod
def initialize(cls):
"""
Initialize Axes logging and show version information.
This method is re-entrant and can be called multiple times.
It displays version information exactly once at application startup.
"""
if cls.initialized:
return
cls.initialized = True
# Only import settings, checks, and signals one time after Django has been initialized
from axes.conf import settings # noqa
from axes import checks, signals # noqa
# Skip startup log messages if Axes is not set to verbose
if settings.AXES_VERBOSE:
log.info("AXES: BEGIN LOG")
log.info(
"AXES: Using django-axes version %s",
get_distribution("django-axes").version,
)
if settings.AXES_ONLY_USER_FAILURES:
log.info("AXES: blocking by username only.")
elif settings.AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP:
log.info("AXES: blocking by combination of username and IP.")
elif settings.AXES_LOCK_OUT_BY_USER_OR_IP:
log.info("AXES: blocking by username or IP.")
else:
log.info("AXES: blocking by IP only.")
def ready(self):
self.initialize()
| Add QA skip for import ordering | Add QA skip for import ordering | Python | mit | jazzband/django-axes | from logging import getLogger
from pkg_resources import get_distribution
from django import apps
log = getLogger(__name__)
class AppConfig(apps.AppConfig):
name = "axes"
initialized = False
@classmethod
def initialize(cls):
"""
Initialize Axes logging and show version information.
This method is re-entrant and can be called multiple times.
It displays version information exactly once at application startup.
"""
if cls.initialized:
return
cls.initialized = True
# Only import settings, checks, and signals one time after Django has been initialized
from axes.conf import settings
from axes import checks, signals # noqa
# Skip startup log messages if Axes is not set to verbose
if settings.AXES_VERBOSE:
log.info("AXES: BEGIN LOG")
log.info(
"AXES: Using django-axes version %s",
get_distribution("django-axes").version,
)
if settings.AXES_ONLY_USER_FAILURES:
log.info("AXES: blocking by username only.")
elif settings.AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP:
log.info("AXES: blocking by combination of username and IP.")
elif settings.AXES_LOCK_OUT_BY_USER_OR_IP:
log.info("AXES: blocking by username or IP.")
else:
log.info("AXES: blocking by IP only.")
def ready(self):
self.initialize()
Add QA skip for import ordering | from logging import getLogger
from pkg_resources import get_distribution
from django import apps
log = getLogger(__name__)
class AppConfig(apps.AppConfig):
name = "axes"
initialized = False
@classmethod
def initialize(cls):
"""
Initialize Axes logging and show version information.
This method is re-entrant and can be called multiple times.
It displays version information exactly once at application startup.
"""
if cls.initialized:
return
cls.initialized = True
# Only import settings, checks, and signals one time after Django has been initialized
from axes.conf import settings # noqa
from axes import checks, signals # noqa
# Skip startup log messages if Axes is not set to verbose
if settings.AXES_VERBOSE:
log.info("AXES: BEGIN LOG")
log.info(
"AXES: Using django-axes version %s",
get_distribution("django-axes").version,
)
if settings.AXES_ONLY_USER_FAILURES:
log.info("AXES: blocking by username only.")
elif settings.AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP:
log.info("AXES: blocking by combination of username and IP.")
elif settings.AXES_LOCK_OUT_BY_USER_OR_IP:
log.info("AXES: blocking by username or IP.")
else:
log.info("AXES: blocking by IP only.")
def ready(self):
self.initialize()
| <commit_before>from logging import getLogger
from pkg_resources import get_distribution
from django import apps
log = getLogger(__name__)
class AppConfig(apps.AppConfig):
name = "axes"
initialized = False
@classmethod
def initialize(cls):
"""
Initialize Axes logging and show version information.
This method is re-entrant and can be called multiple times.
It displays version information exactly once at application startup.
"""
if cls.initialized:
return
cls.initialized = True
# Only import settings, checks, and signals one time after Django has been initialized
from axes.conf import settings
from axes import checks, signals # noqa
# Skip startup log messages if Axes is not set to verbose
if settings.AXES_VERBOSE:
log.info("AXES: BEGIN LOG")
log.info(
"AXES: Using django-axes version %s",
get_distribution("django-axes").version,
)
if settings.AXES_ONLY_USER_FAILURES:
log.info("AXES: blocking by username only.")
elif settings.AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP:
log.info("AXES: blocking by combination of username and IP.")
elif settings.AXES_LOCK_OUT_BY_USER_OR_IP:
log.info("AXES: blocking by username or IP.")
else:
log.info("AXES: blocking by IP only.")
def ready(self):
self.initialize()
<commit_msg>Add QA skip for import ordering<commit_after> | from logging import getLogger
from pkg_resources import get_distribution
from django import apps
log = getLogger(__name__)
class AppConfig(apps.AppConfig):
name = "axes"
initialized = False
@classmethod
def initialize(cls):
"""
Initialize Axes logging and show version information.
This method is re-entrant and can be called multiple times.
It displays version information exactly once at application startup.
"""
if cls.initialized:
return
cls.initialized = True
# Only import settings, checks, and signals one time after Django has been initialized
from axes.conf import settings # noqa
from axes import checks, signals # noqa
# Skip startup log messages if Axes is not set to verbose
if settings.AXES_VERBOSE:
log.info("AXES: BEGIN LOG")
log.info(
"AXES: Using django-axes version %s",
get_distribution("django-axes").version,
)
if settings.AXES_ONLY_USER_FAILURES:
log.info("AXES: blocking by username only.")
elif settings.AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP:
log.info("AXES: blocking by combination of username and IP.")
elif settings.AXES_LOCK_OUT_BY_USER_OR_IP:
log.info("AXES: blocking by username or IP.")
else:
log.info("AXES: blocking by IP only.")
def ready(self):
self.initialize()
| from logging import getLogger
from pkg_resources import get_distribution
from django import apps
log = getLogger(__name__)
class AppConfig(apps.AppConfig):
name = "axes"
initialized = False
@classmethod
def initialize(cls):
"""
Initialize Axes logging and show version information.
This method is re-entrant and can be called multiple times.
It displays version information exactly once at application startup.
"""
if cls.initialized:
return
cls.initialized = True
# Only import settings, checks, and signals one time after Django has been initialized
from axes.conf import settings
from axes import checks, signals # noqa
# Skip startup log messages if Axes is not set to verbose
if settings.AXES_VERBOSE:
log.info("AXES: BEGIN LOG")
log.info(
"AXES: Using django-axes version %s",
get_distribution("django-axes").version,
)
if settings.AXES_ONLY_USER_FAILURES:
log.info("AXES: blocking by username only.")
elif settings.AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP:
log.info("AXES: blocking by combination of username and IP.")
elif settings.AXES_LOCK_OUT_BY_USER_OR_IP:
log.info("AXES: blocking by username or IP.")
else:
log.info("AXES: blocking by IP only.")
def ready(self):
self.initialize()
Add QA skip for import orderingfrom logging import getLogger
from pkg_resources import get_distribution
from django import apps
log = getLogger(__name__)
class AppConfig(apps.AppConfig):
name = "axes"
initialized = False
@classmethod
def initialize(cls):
"""
Initialize Axes logging and show version information.
This method is re-entrant and can be called multiple times.
It displays version information exactly once at application startup.
"""
if cls.initialized:
return
cls.initialized = True
# Only import settings, checks, and signals one time after Django has been initialized
from axes.conf import settings # noqa
from axes import checks, signals # noqa
# Skip startup log messages if Axes is not set to verbose
if settings.AXES_VERBOSE:
log.info("AXES: BEGIN LOG")
log.info(
"AXES: Using django-axes version %s",
get_distribution("django-axes").version,
)
if settings.AXES_ONLY_USER_FAILURES:
log.info("AXES: blocking by username only.")
elif settings.AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP:
log.info("AXES: blocking by combination of username and IP.")
elif settings.AXES_LOCK_OUT_BY_USER_OR_IP:
log.info("AXES: blocking by username or IP.")
else:
log.info("AXES: blocking by IP only.")
def ready(self):
self.initialize()
| <commit_before>from logging import getLogger
from pkg_resources import get_distribution
from django import apps
log = getLogger(__name__)
class AppConfig(apps.AppConfig):
name = "axes"
initialized = False
@classmethod
def initialize(cls):
"""
Initialize Axes logging and show version information.
This method is re-entrant and can be called multiple times.
It displays version information exactly once at application startup.
"""
if cls.initialized:
return
cls.initialized = True
# Only import settings, checks, and signals one time after Django has been initialized
from axes.conf import settings
from axes import checks, signals # noqa
# Skip startup log messages if Axes is not set to verbose
if settings.AXES_VERBOSE:
log.info("AXES: BEGIN LOG")
log.info(
"AXES: Using django-axes version %s",
get_distribution("django-axes").version,
)
if settings.AXES_ONLY_USER_FAILURES:
log.info("AXES: blocking by username only.")
elif settings.AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP:
log.info("AXES: blocking by combination of username and IP.")
elif settings.AXES_LOCK_OUT_BY_USER_OR_IP:
log.info("AXES: blocking by username or IP.")
else:
log.info("AXES: blocking by IP only.")
def ready(self):
self.initialize()
<commit_msg>Add QA skip for import ordering<commit_after>from logging import getLogger
from pkg_resources import get_distribution
from django import apps
log = getLogger(__name__)
class AppConfig(apps.AppConfig):
name = "axes"
initialized = False
@classmethod
def initialize(cls):
"""
Initialize Axes logging and show version information.
This method is re-entrant and can be called multiple times.
It displays version information exactly once at application startup.
"""
if cls.initialized:
return
cls.initialized = True
# Only import settings, checks, and signals one time after Django has been initialized
from axes.conf import settings # noqa
from axes import checks, signals # noqa
# Skip startup log messages if Axes is not set to verbose
if settings.AXES_VERBOSE:
log.info("AXES: BEGIN LOG")
log.info(
"AXES: Using django-axes version %s",
get_distribution("django-axes").version,
)
if settings.AXES_ONLY_USER_FAILURES:
log.info("AXES: blocking by username only.")
elif settings.AXES_LOCK_OUT_BY_COMBINATION_USER_AND_IP:
log.info("AXES: blocking by combination of username and IP.")
elif settings.AXES_LOCK_OUT_BY_USER_OR_IP:
log.info("AXES: blocking by username or IP.")
else:
log.info("AXES: blocking by IP only.")
def ready(self):
self.initialize()
|
2733cf558a7455eb017ec4690307a2ee18afbd8b | blogtrans.py | blogtrans.py | from blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame=MainWindow()
app.MainLoop()
if __name__ == "__main__" :
main()
| from blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
blogdata = None
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame = MainWindow()
if blogdata!=None:
frame.setBlogData(blogdata)
app.MainLoop()
if __name__ == "__main__" :
main()
| Load blog data from command line | Load blog data from command line
| Python | mit | miaout17/blogtrans,miaout17/blogtrans | from blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame=MainWindow()
app.MainLoop()
if __name__ == "__main__" :
main()
Load blog data from command line | from blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
blogdata = None
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame = MainWindow()
if blogdata!=None:
frame.setBlogData(blogdata)
app.MainLoop()
if __name__ == "__main__" :
main()
| <commit_before>from blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame=MainWindow()
app.MainLoop()
if __name__ == "__main__" :
main()
<commit_msg>Load blog data from command line<commit_after> | from blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
blogdata = None
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame = MainWindow()
if blogdata!=None:
frame.setBlogData(blogdata)
app.MainLoop()
if __name__ == "__main__" :
main()
| from blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame=MainWindow()
app.MainLoop()
if __name__ == "__main__" :
main()
Load blog data from command linefrom blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
blogdata = None
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame = MainWindow()
if blogdata!=None:
frame.setBlogData(blogdata)
app.MainLoop()
if __name__ == "__main__" :
main()
| <commit_before>from blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame=MainWindow()
app.MainLoop()
if __name__ == "__main__" :
main()
<commit_msg>Load blog data from command line<commit_after>from blogtrans.ui.MainWindow import *
import sys, traceback
import getopt
import wx
# Importers / Exporters
from blogtrans.wretch.WretchImporter import WretchImporter
from blogtrans.mt import *
from blogtrans.blogger.BloggerExporter import *
from blogtrans.blogger.BloggerImporter import *
def trap_error(func) :
def f() :
try:
func()
except Exception, inst :
type, value, tb = sys.exc_info()
print "\n".join(traceback.format_exception(type, value, tb))
raw_input()
return f
@trap_error
def main() :
long_opts = [ "import-wretch=", "import-blogger=" ]
opts, args = getopt.getopt(sys.argv[1:], "n", long_opts)
blogdata = None
no_window = False
for o, a in opts :
if o=="-n" :
no_window = True
if o=="--import-wretch" :
blogdata = WretchImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if o=="--import-blogger" :
blogdata = BloggerImporter(a).parse()
print "%d articles, %d comments" % ( blogdata.article_count(), blogdata.comment_count() )
if not no_window :
app = wx.PySimpleApp()
frame = MainWindow()
if blogdata!=None:
frame.setBlogData(blogdata)
app.MainLoop()
if __name__ == "__main__" :
main()
|
a8a257ef2bfb63d175f7db1cb91924adae125b5c | sympy/core/tests/test_compatibility.py | sympy/core/tests/test_compatibility.py | from sympy.core.compatibility import default_sort_key, as_int, ordered
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_ordered():
"Issue 4111 - this was failing with python2/3 problems"
assert(list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
| from sympy.core.compatibility import default_sort_key, as_int, ordered
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_ordered():
# Issue 4111 - this had been failing with python2/3 problems
assert(list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
| Change docstring to inline comment | Change docstring to inline comment
This is in response to comment from smichr
modified: sympy/core/tests/test_compatibility.py
| Python | bsd-3-clause | Mitchkoens/sympy,Sumith1896/sympy,kaichogami/sympy,sunny94/temp,jamesblunt/sympy,pbrady/sympy,Vishluck/sympy,Titan-C/sympy,jaimahajan1997/sympy,kevalds51/sympy,yukoba/sympy,kevalds51/sympy,abhiii5459/sympy,debugger22/sympy,kumarkrishna/sympy,lindsayad/sympy,Vishluck/sympy,souravsingh/sympy,debugger22/sympy,yashsharan/sympy,abloomston/sympy,sunny94/temp,farhaanbukhsh/sympy,aktech/sympy,mcdaniel67/sympy,wanglongqi/sympy,saurabhjn76/sympy,madan96/sympy,hargup/sympy,souravsingh/sympy,Curious72/sympy,atsao72/sympy,atreyv/sympy,Davidjohnwilson/sympy,Designist/sympy,jamesblunt/sympy,VaibhavAgarwalVA/sympy,debugger22/sympy,Gadal/sympy,diofant/diofant,jbbskinny/sympy,dqnykamp/sympy,shikil/sympy,yashsharan/sympy,toolforger/sympy,Designist/sympy,Sumith1896/sympy,saurabhjn76/sympy,shikil/sympy,shipci/sympy,vipulroxx/sympy,postvakje/sympy,sahmed95/sympy,kaushik94/sympy,sampadsaha5/sympy,dqnykamp/sympy,meghana1995/sympy,iamutkarshtiwari/sympy,chaffra/sympy,liangjiaxing/sympy,Mitchkoens/sympy,grevutiu-gabriel/sympy,kaushik94/sympy,skidzo/sympy,ga7g08/sympy,postvakje/sympy,Mitchkoens/sympy,pandeyadarsh/sympy,Gadal/sympy,postvakje/sympy,kumarkrishna/sympy,kumarkrishna/sympy,AunShiLord/sympy,MechCoder/sympy,pbrady/sympy,cccfran/sympy,pbrady/sympy,abloomston/sympy,grevutiu-gabriel/sympy,liangjiaxing/sympy,yashsharan/sympy,wyom/sympy,rahuldan/sympy,jbbskinny/sympy,rahuldan/sympy,garvitr/sympy,madan96/sympy,AkademieOlympia/sympy,pandeyadarsh/sympy,chaffra/sympy,oliverlee/sympy,ga7g08/sympy,sunny94/temp,VaibhavAgarwalVA/sympy,mafiya69/sympy,Curious72/sympy,atsao72/sympy,shikil/sympy,aktech/sympy,souravsingh/sympy,Davidjohnwilson/sympy,maniteja123/sympy,lindsayad/sympy,bukzor/sympy,beni55/sympy,Davidjohnwilson/sympy,sahilshekhawat/sympy,asm666/sympy,drufat/sympy,MridulS/sympy,Shaswat27/sympy,beni55/sympy,Arafatk/sympy,Shaswat27/sympy,jbbskinny/sympy,iamutkarshtiwari/sympy,ChristinaZografou/sympy,wanglongqi/sympy,farhaanbukhsh/sympy,ahhda/sympy,wyom/sympy,emon10005/sympy,abhiii5459/sympy,cswiercz/sympy,ChristinaZografou/sympy,jaimahajan1997/sympy,Gadal/sympy,dqnykamp/sympy,kevalds51/sympy,Shaswat27/sympy,chaffra/sympy,liangjiaxing/sympy,jaimahajan1997/sympy,jamesblunt/sympy,moble/sympy,Vishluck/sympy,yukoba/sympy,Designist/sympy,atreyv/sympy,pandeyadarsh/sympy,hargup/sympy,skirpichev/omg,jerli/sympy,iamutkarshtiwari/sympy,hargup/sympy,vipulroxx/sympy,moble/sympy,cccfran/sympy,mafiya69/sympy,moble/sympy,vipulroxx/sympy,sahmed95/sympy,kaushik94/sympy,AkademieOlympia/sympy,MridulS/sympy,meghana1995/sympy,atreyv/sympy,asm666/sympy,madan96/sympy,AunShiLord/sympy,bukzor/sympy,oliverlee/sympy,ga7g08/sympy,Curious72/sympy,yukoba/sympy,wanglongqi/sympy,Titan-C/sympy,emon10005/sympy,cswiercz/sympy,farhaanbukhsh/sympy,abloomston/sympy,atsao72/sympy,beni55/sympy,rahuldan/sympy,drufat/sympy,meghana1995/sympy,toolforger/sympy,MechCoder/sympy,oliverlee/sympy,ChristinaZografou/sympy,jerli/sympy,mcdaniel67/sympy,Arafatk/sympy,sahmed95/sympy,sahilshekhawat/sympy,ahhda/sympy,maniteja123/sympy,emon10005/sympy,mcdaniel67/sympy,abhiii5459/sympy,jerli/sympy,saurabhjn76/sympy,MechCoder/sympy,sahilshekhawat/sympy,drufat/sympy,cswiercz/sympy,AunShiLord/sympy,garvitr/sympy,VaibhavAgarwalVA/sympy,garvitr/sympy,ahhda/sympy,shipci/sympy,wyom/sympy,toolforger/sympy,lindsayad/sympy,cccfran/sympy,aktech/sympy,maniteja123/sympy,kaichogami/sympy,sampadsaha5/sympy,Titan-C/sympy,MridulS/sympy,sampadsaha5/sympy,kaichogami/sympy,Arafatk/sympy,mafiya69/sympy,shipci/sympy,grevutiu-gabriel/sympy,skidzo/sympy,skidzo/sympy,asm666/sympy,AkademieOlympia/sympy,bukzor/sympy,Sumith1896/sympy | from sympy.core.compatibility import default_sort_key, as_int, ordered
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_ordered():
"Issue 4111 - this was failing with python2/3 problems"
assert(list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
Change docstring to inline comment
This is in response to comment from smichr
modified: sympy/core/tests/test_compatibility.py | from sympy.core.compatibility import default_sort_key, as_int, ordered
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_ordered():
# Issue 4111 - this had been failing with python2/3 problems
assert(list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
| <commit_before>from sympy.core.compatibility import default_sort_key, as_int, ordered
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_ordered():
"Issue 4111 - this was failing with python2/3 problems"
assert(list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
<commit_msg>Change docstring to inline comment
This is in response to comment from smichr
modified: sympy/core/tests/test_compatibility.py<commit_after> | from sympy.core.compatibility import default_sort_key, as_int, ordered
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_ordered():
# Issue 4111 - this had been failing with python2/3 problems
assert(list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
| from sympy.core.compatibility import default_sort_key, as_int, ordered
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_ordered():
"Issue 4111 - this was failing with python2/3 problems"
assert(list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
Change docstring to inline comment
This is in response to comment from smichr
modified: sympy/core/tests/test_compatibility.pyfrom sympy.core.compatibility import default_sort_key, as_int, ordered
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_ordered():
# Issue 4111 - this had been failing with python2/3 problems
assert(list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
| <commit_before>from sympy.core.compatibility import default_sort_key, as_int, ordered
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_ordered():
"Issue 4111 - this was failing with python2/3 problems"
assert(list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
<commit_msg>Change docstring to inline comment
This is in response to comment from smichr
modified: sympy/core/tests/test_compatibility.py<commit_after>from sympy.core.compatibility import default_sort_key, as_int, ordered
from sympy.core.singleton import S
from sympy.utilities.pytest import raises
from sympy.abc import x
def test_default_sort_key():
func = lambda x: x
assert sorted([func, x, func], key=default_sort_key) == [func, func, x]
def test_as_int():
raises(ValueError, lambda : as_int(1.1))
raises(ValueError, lambda : as_int([]))
def test_ordered():
# Issue 4111 - this had been failing with python2/3 problems
assert(list(ordered([{1:3, 2:4, 9:10}, {1:3}])) == \
[{1: 3}, {1: 3, 2: 4, 9: 10}])
|
ce23775d3a76b0b8ecf349733454c0709cfe53d8 | opentreemap/treemap/templatetags/paging.py | opentreemap/treemap/templatetags/paging.py | from django import template
register = template.Library()
@register.filter
def four_before_page(page_range, page):
"""Returns 4 or fewer pages before the given (1-based) page number"""
return page_range[max(page-5, 0):max(page-1, 0)]
@register.filter
def four_after_page(page_range, page):
"""Returns 4 or fewer pages after the given (1-based) page number"""
return page_range[page:min(page+4, len(page_range))]
| from django import template
register = template.Library()
@register.filter
def four_before_page(page_range, page):
"""Returns 4 or fewer pages before the given (1-based) page number"""
return list(page_range)[max(page-5, 0):max(page-1, 0)]
@register.filter
def four_after_page(page_range, page):
"""Returns 4 or fewer pages after the given (1-based) page number"""
return list(page_range)[page:min(page+4, len(page_range))]
| Support Paginator.page_range returning an iterator or list | Support Paginator.page_range returning an iterator or list
Paginator.page_range will return an iterator in Django 1.9+
| Python | agpl-3.0 | maurizi/otm-core,maurizi/otm-core,maurizi/otm-core,maurizi/otm-core | from django import template
register = template.Library()
@register.filter
def four_before_page(page_range, page):
"""Returns 4 or fewer pages before the given (1-based) page number"""
return page_range[max(page-5, 0):max(page-1, 0)]
@register.filter
def four_after_page(page_range, page):
"""Returns 4 or fewer pages after the given (1-based) page number"""
return page_range[page:min(page+4, len(page_range))]
Support Paginator.page_range returning an iterator or list
Paginator.page_range will return an iterator in Django 1.9+ | from django import template
register = template.Library()
@register.filter
def four_before_page(page_range, page):
"""Returns 4 or fewer pages before the given (1-based) page number"""
return list(page_range)[max(page-5, 0):max(page-1, 0)]
@register.filter
def four_after_page(page_range, page):
"""Returns 4 or fewer pages after the given (1-based) page number"""
return list(page_range)[page:min(page+4, len(page_range))]
| <commit_before>from django import template
register = template.Library()
@register.filter
def four_before_page(page_range, page):
"""Returns 4 or fewer pages before the given (1-based) page number"""
return page_range[max(page-5, 0):max(page-1, 0)]
@register.filter
def four_after_page(page_range, page):
"""Returns 4 or fewer pages after the given (1-based) page number"""
return page_range[page:min(page+4, len(page_range))]
<commit_msg>Support Paginator.page_range returning an iterator or list
Paginator.page_range will return an iterator in Django 1.9+<commit_after> | from django import template
register = template.Library()
@register.filter
def four_before_page(page_range, page):
"""Returns 4 or fewer pages before the given (1-based) page number"""
return list(page_range)[max(page-5, 0):max(page-1, 0)]
@register.filter
def four_after_page(page_range, page):
"""Returns 4 or fewer pages after the given (1-based) page number"""
return list(page_range)[page:min(page+4, len(page_range))]
| from django import template
register = template.Library()
@register.filter
def four_before_page(page_range, page):
"""Returns 4 or fewer pages before the given (1-based) page number"""
return page_range[max(page-5, 0):max(page-1, 0)]
@register.filter
def four_after_page(page_range, page):
"""Returns 4 or fewer pages after the given (1-based) page number"""
return page_range[page:min(page+4, len(page_range))]
Support Paginator.page_range returning an iterator or list
Paginator.page_range will return an iterator in Django 1.9+from django import template
register = template.Library()
@register.filter
def four_before_page(page_range, page):
"""Returns 4 or fewer pages before the given (1-based) page number"""
return list(page_range)[max(page-5, 0):max(page-1, 0)]
@register.filter
def four_after_page(page_range, page):
"""Returns 4 or fewer pages after the given (1-based) page number"""
return list(page_range)[page:min(page+4, len(page_range))]
| <commit_before>from django import template
register = template.Library()
@register.filter
def four_before_page(page_range, page):
"""Returns 4 or fewer pages before the given (1-based) page number"""
return page_range[max(page-5, 0):max(page-1, 0)]
@register.filter
def four_after_page(page_range, page):
"""Returns 4 or fewer pages after the given (1-based) page number"""
return page_range[page:min(page+4, len(page_range))]
<commit_msg>Support Paginator.page_range returning an iterator or list
Paginator.page_range will return an iterator in Django 1.9+<commit_after>from django import template
register = template.Library()
@register.filter
def four_before_page(page_range, page):
"""Returns 4 or fewer pages before the given (1-based) page number"""
return list(page_range)[max(page-5, 0):max(page-1, 0)]
@register.filter
def four_after_page(page_range, page):
"""Returns 4 or fewer pages after the given (1-based) page number"""
return list(page_range)[page:min(page+4, len(page_range))]
|
56b85e7f995eb460a5e6fedb9f2296e430d17e96 | listener.py | listener.py | # Listener Class Override
import time
import json
from tweepy.streaming import StreamListener
import sys
class Listener(StreamListener):
def __init__(self, twitter_api, start_time=time.time()):
self.time = start_time
self.api = twitter_api
def on_data(self, data):
# uids we are currently trackin
user_ids = [
842062946,
896881849,
786673790,
1003145436,
1702767096,
2767456571,
2844888263,
19991403,
2516461339
]
try:
uid = json.loads(data)["user"]["id"]
print "UID:" + str(uid)
if (uid in user_ids):
tweet = json.loads(data)["text"] + "something"
print "tweeting " + tweet + "..."
self.api.update_status(status = tweet)
print "Done."
else:
print "Do nothing. UID:" + str(uid) + " not tracked."
time.sleep(5)
saveFile = open("raw_tweets.json", "a")
saveFile.write(data)
saveFile.write("\n")
saveFile.close()
return True
except BaseException, e:
print "failed in ondata,", str(e)
time.sleep(5)
pass
def on_error(self, status):
print status
| # Listener Class Override
import time
import json
from tweepy.streaming import StreamListener
import sys
class Listener(StreamListener):
def __init__(self, twitter_api, start_time=time.time()):
self.time = start_time
self.api = twitter_api
def on_data(self, data):
# uids we are currently trackin
user_ids = [
842062946,
896881849,
786673790,
1003145436,
1702767096,
2767456571,
2844888263,
19991403,
2516461339
]
try:
uid = json.loads(data)["user"]["id"]
print "UID:" + str(uid)
if (uid in user_ids):
tweet = json.loads(data)["text"]
print "tweeting " + tweet + "..."
self.api.update_status(status = tweet)
print "Done."
else:
print "Do nothing. UID:" + str(uid) + " not tracked."
time.sleep(5)
saveFile = open("raw_tweets.json", "a")
saveFile.write(data)
saveFile.write("\n")
saveFile.close()
return True
except BaseException, e:
print "failed in ondata,", str(e)
time.sleep(5)
pass
def on_error(self, status):
print status
| Remove text to make tweet unique for testing | Remove text to make tweet unique for testing
| Python | mit | robot-overlord/syriarightnow | # Listener Class Override
import time
import json
from tweepy.streaming import StreamListener
import sys
class Listener(StreamListener):
def __init__(self, twitter_api, start_time=time.time()):
self.time = start_time
self.api = twitter_api
def on_data(self, data):
# uids we are currently trackin
user_ids = [
842062946,
896881849,
786673790,
1003145436,
1702767096,
2767456571,
2844888263,
19991403,
2516461339
]
try:
uid = json.loads(data)["user"]["id"]
print "UID:" + str(uid)
if (uid in user_ids):
tweet = json.loads(data)["text"] + "something"
print "tweeting " + tweet + "..."
self.api.update_status(status = tweet)
print "Done."
else:
print "Do nothing. UID:" + str(uid) + " not tracked."
time.sleep(5)
saveFile = open("raw_tweets.json", "a")
saveFile.write(data)
saveFile.write("\n")
saveFile.close()
return True
except BaseException, e:
print "failed in ondata,", str(e)
time.sleep(5)
pass
def on_error(self, status):
print status
Remove text to make tweet unique for testing | # Listener Class Override
import time
import json
from tweepy.streaming import StreamListener
import sys
class Listener(StreamListener):
def __init__(self, twitter_api, start_time=time.time()):
self.time = start_time
self.api = twitter_api
def on_data(self, data):
# uids we are currently trackin
user_ids = [
842062946,
896881849,
786673790,
1003145436,
1702767096,
2767456571,
2844888263,
19991403,
2516461339
]
try:
uid = json.loads(data)["user"]["id"]
print "UID:" + str(uid)
if (uid in user_ids):
tweet = json.loads(data)["text"]
print "tweeting " + tweet + "..."
self.api.update_status(status = tweet)
print "Done."
else:
print "Do nothing. UID:" + str(uid) + " not tracked."
time.sleep(5)
saveFile = open("raw_tweets.json", "a")
saveFile.write(data)
saveFile.write("\n")
saveFile.close()
return True
except BaseException, e:
print "failed in ondata,", str(e)
time.sleep(5)
pass
def on_error(self, status):
print status
| <commit_before># Listener Class Override
import time
import json
from tweepy.streaming import StreamListener
import sys
class Listener(StreamListener):
def __init__(self, twitter_api, start_time=time.time()):
self.time = start_time
self.api = twitter_api
def on_data(self, data):
# uids we are currently trackin
user_ids = [
842062946,
896881849,
786673790,
1003145436,
1702767096,
2767456571,
2844888263,
19991403,
2516461339
]
try:
uid = json.loads(data)["user"]["id"]
print "UID:" + str(uid)
if (uid in user_ids):
tweet = json.loads(data)["text"] + "something"
print "tweeting " + tweet + "..."
self.api.update_status(status = tweet)
print "Done."
else:
print "Do nothing. UID:" + str(uid) + " not tracked."
time.sleep(5)
saveFile = open("raw_tweets.json", "a")
saveFile.write(data)
saveFile.write("\n")
saveFile.close()
return True
except BaseException, e:
print "failed in ondata,", str(e)
time.sleep(5)
pass
def on_error(self, status):
print status
<commit_msg>Remove text to make tweet unique for testing<commit_after> | # Listener Class Override
import time
import json
from tweepy.streaming import StreamListener
import sys
class Listener(StreamListener):
def __init__(self, twitter_api, start_time=time.time()):
self.time = start_time
self.api = twitter_api
def on_data(self, data):
# uids we are currently trackin
user_ids = [
842062946,
896881849,
786673790,
1003145436,
1702767096,
2767456571,
2844888263,
19991403,
2516461339
]
try:
uid = json.loads(data)["user"]["id"]
print "UID:" + str(uid)
if (uid in user_ids):
tweet = json.loads(data)["text"]
print "tweeting " + tweet + "..."
self.api.update_status(status = tweet)
print "Done."
else:
print "Do nothing. UID:" + str(uid) + " not tracked."
time.sleep(5)
saveFile = open("raw_tweets.json", "a")
saveFile.write(data)
saveFile.write("\n")
saveFile.close()
return True
except BaseException, e:
print "failed in ondata,", str(e)
time.sleep(5)
pass
def on_error(self, status):
print status
| # Listener Class Override
import time
import json
from tweepy.streaming import StreamListener
import sys
class Listener(StreamListener):
def __init__(self, twitter_api, start_time=time.time()):
self.time = start_time
self.api = twitter_api
def on_data(self, data):
# uids we are currently trackin
user_ids = [
842062946,
896881849,
786673790,
1003145436,
1702767096,
2767456571,
2844888263,
19991403,
2516461339
]
try:
uid = json.loads(data)["user"]["id"]
print "UID:" + str(uid)
if (uid in user_ids):
tweet = json.loads(data)["text"] + "something"
print "tweeting " + tweet + "..."
self.api.update_status(status = tweet)
print "Done."
else:
print "Do nothing. UID:" + str(uid) + " not tracked."
time.sleep(5)
saveFile = open("raw_tweets.json", "a")
saveFile.write(data)
saveFile.write("\n")
saveFile.close()
return True
except BaseException, e:
print "failed in ondata,", str(e)
time.sleep(5)
pass
def on_error(self, status):
print status
Remove text to make tweet unique for testing# Listener Class Override
import time
import json
from tweepy.streaming import StreamListener
import sys
class Listener(StreamListener):
def __init__(self, twitter_api, start_time=time.time()):
self.time = start_time
self.api = twitter_api
def on_data(self, data):
# uids we are currently trackin
user_ids = [
842062946,
896881849,
786673790,
1003145436,
1702767096,
2767456571,
2844888263,
19991403,
2516461339
]
try:
uid = json.loads(data)["user"]["id"]
print "UID:" + str(uid)
if (uid in user_ids):
tweet = json.loads(data)["text"]
print "tweeting " + tweet + "..."
self.api.update_status(status = tweet)
print "Done."
else:
print "Do nothing. UID:" + str(uid) + " not tracked."
time.sleep(5)
saveFile = open("raw_tweets.json", "a")
saveFile.write(data)
saveFile.write("\n")
saveFile.close()
return True
except BaseException, e:
print "failed in ondata,", str(e)
time.sleep(5)
pass
def on_error(self, status):
print status
| <commit_before># Listener Class Override
import time
import json
from tweepy.streaming import StreamListener
import sys
class Listener(StreamListener):
def __init__(self, twitter_api, start_time=time.time()):
self.time = start_time
self.api = twitter_api
def on_data(self, data):
# uids we are currently trackin
user_ids = [
842062946,
896881849,
786673790,
1003145436,
1702767096,
2767456571,
2844888263,
19991403,
2516461339
]
try:
uid = json.loads(data)["user"]["id"]
print "UID:" + str(uid)
if (uid in user_ids):
tweet = json.loads(data)["text"] + "something"
print "tweeting " + tweet + "..."
self.api.update_status(status = tweet)
print "Done."
else:
print "Do nothing. UID:" + str(uid) + " not tracked."
time.sleep(5)
saveFile = open("raw_tweets.json", "a")
saveFile.write(data)
saveFile.write("\n")
saveFile.close()
return True
except BaseException, e:
print "failed in ondata,", str(e)
time.sleep(5)
pass
def on_error(self, status):
print status
<commit_msg>Remove text to make tweet unique for testing<commit_after># Listener Class Override
import time
import json
from tweepy.streaming import StreamListener
import sys
class Listener(StreamListener):
def __init__(self, twitter_api, start_time=time.time()):
self.time = start_time
self.api = twitter_api
def on_data(self, data):
# uids we are currently trackin
user_ids = [
842062946,
896881849,
786673790,
1003145436,
1702767096,
2767456571,
2844888263,
19991403,
2516461339
]
try:
uid = json.loads(data)["user"]["id"]
print "UID:" + str(uid)
if (uid in user_ids):
tweet = json.loads(data)["text"]
print "tweeting " + tweet + "..."
self.api.update_status(status = tweet)
print "Done."
else:
print "Do nothing. UID:" + str(uid) + " not tracked."
time.sleep(5)
saveFile = open("raw_tweets.json", "a")
saveFile.write(data)
saveFile.write("\n")
saveFile.close()
return True
except BaseException, e:
print "failed in ondata,", str(e)
time.sleep(5)
pass
def on_error(self, status):
print status
|
10db07f1fc432f6f3e1e530d28cfbfd6ada0a321 | versebot/webparser.py | versebot/webparser.py | """
VerseBot for reddit
By Matthieu Grieger
webparser.py
Copyright (c) 2015 Matthieu Grieger (MIT License)
"""
from bs4 import BeautifulSoup
from urllib.request import urlopen
class Parser:
""" Parser class for BibleGateway parsing methods. """
def __init__(self):
""" Initializes translations attribute and checks if there are any new translations
to add to the database. """
trans = self.find_supported_translations()
if trans is None:
self.translations = None
else:
self.translations = trans.sort(key=len, reverse=True)
def find_supported_translations(self):
""" Retrieves a list of supported translations from BibleGateway's translation
page. """
url = "http://www.biblegateway.com/versions/"
translations = list()
page = urlopen(url)
soup = BeautifulSoup(page.read())
# It seems that BibleGateway has changed the layout of their versions page. This needs
# to be redone!
translations = soup.find("select", {"class":"search-translation-select"})
trans = translations.findAll("option")
for t in trans:
if t.has_attr("value") and not t.has_attr("class"):
cur_trans = t["value"]
translations.append(cur_trans)
# Add local translations to supported translations list
translations.append("NJPS")
return translations
| """
VerseBot for reddit
By Matthieu Grieger
webparser.py
Copyright (c) 2015 Matthieu Grieger (MIT License)
"""
from bs4 import BeautifulSoup
from urllib.request import urlopen
class Parser:
""" Parser class for BibleGateway parsing methods. """
def __init__(self):
""" Initializes translations attribute and checks if there are any new translations
to add to the database. """
trans = self.find_supported_translations()
if trans is None:
self.translations = None
else:
self.translations = trans.sort(key=len, reverse=True)
def find_supported_translations(self):
""" Retrieves a list of supported translations from BibleGateway's translation
page. """
url = "https://www.biblegateway.com/versions/"
translations = list()
page = urlopen(url)
soup = BeautifulSoup(page.read())
# It seems that BibleGateway has changed the layout of their versions page. This needs
# to be redone!
translations_select = soup.find("select", {"class":"search-translation-select"})
trans = translations_select.findAll("option")
for t in trans:
if t.has_attr("value") and not t.has_attr("class"):
cur_trans = t["value"]
translations.append(cur_trans)
# Add local translations to supported translations list
translations.append("NJPS")
return translations
| Rename translations to translations_select, switch to HTTPS | Rename translations to translations_select, switch to HTTPS
| Python | mit | Matthew-Arnold/slack-versebot,Matthew-Arnold/slack-versebot | """
VerseBot for reddit
By Matthieu Grieger
webparser.py
Copyright (c) 2015 Matthieu Grieger (MIT License)
"""
from bs4 import BeautifulSoup
from urllib.request import urlopen
class Parser:
""" Parser class for BibleGateway parsing methods. """
def __init__(self):
""" Initializes translations attribute and checks if there are any new translations
to add to the database. """
trans = self.find_supported_translations()
if trans is None:
self.translations = None
else:
self.translations = trans.sort(key=len, reverse=True)
def find_supported_translations(self):
""" Retrieves a list of supported translations from BibleGateway's translation
page. """
url = "http://www.biblegateway.com/versions/"
translations = list()
page = urlopen(url)
soup = BeautifulSoup(page.read())
# It seems that BibleGateway has changed the layout of their versions page. This needs
# to be redone!
translations = soup.find("select", {"class":"search-translation-select"})
trans = translations.findAll("option")
for t in trans:
if t.has_attr("value") and not t.has_attr("class"):
cur_trans = t["value"]
translations.append(cur_trans)
# Add local translations to supported translations list
translations.append("NJPS")
return translations
Rename translations to translations_select, switch to HTTPS | """
VerseBot for reddit
By Matthieu Grieger
webparser.py
Copyright (c) 2015 Matthieu Grieger (MIT License)
"""
from bs4 import BeautifulSoup
from urllib.request import urlopen
class Parser:
""" Parser class for BibleGateway parsing methods. """
def __init__(self):
""" Initializes translations attribute and checks if there are any new translations
to add to the database. """
trans = self.find_supported_translations()
if trans is None:
self.translations = None
else:
self.translations = trans.sort(key=len, reverse=True)
def find_supported_translations(self):
""" Retrieves a list of supported translations from BibleGateway's translation
page. """
url = "https://www.biblegateway.com/versions/"
translations = list()
page = urlopen(url)
soup = BeautifulSoup(page.read())
# It seems that BibleGateway has changed the layout of their versions page. This needs
# to be redone!
translations_select = soup.find("select", {"class":"search-translation-select"})
trans = translations_select.findAll("option")
for t in trans:
if t.has_attr("value") and not t.has_attr("class"):
cur_trans = t["value"]
translations.append(cur_trans)
# Add local translations to supported translations list
translations.append("NJPS")
return translations
| <commit_before>"""
VerseBot for reddit
By Matthieu Grieger
webparser.py
Copyright (c) 2015 Matthieu Grieger (MIT License)
"""
from bs4 import BeautifulSoup
from urllib.request import urlopen
class Parser:
""" Parser class for BibleGateway parsing methods. """
def __init__(self):
""" Initializes translations attribute and checks if there are any new translations
to add to the database. """
trans = self.find_supported_translations()
if trans is None:
self.translations = None
else:
self.translations = trans.sort(key=len, reverse=True)
def find_supported_translations(self):
""" Retrieves a list of supported translations from BibleGateway's translation
page. """
url = "http://www.biblegateway.com/versions/"
translations = list()
page = urlopen(url)
soup = BeautifulSoup(page.read())
# It seems that BibleGateway has changed the layout of their versions page. This needs
# to be redone!
translations = soup.find("select", {"class":"search-translation-select"})
trans = translations.findAll("option")
for t in trans:
if t.has_attr("value") and not t.has_attr("class"):
cur_trans = t["value"]
translations.append(cur_trans)
# Add local translations to supported translations list
translations.append("NJPS")
return translations
<commit_msg>Rename translations to translations_select, switch to HTTPS<commit_after> | """
VerseBot for reddit
By Matthieu Grieger
webparser.py
Copyright (c) 2015 Matthieu Grieger (MIT License)
"""
from bs4 import BeautifulSoup
from urllib.request import urlopen
class Parser:
""" Parser class for BibleGateway parsing methods. """
def __init__(self):
""" Initializes translations attribute and checks if there are any new translations
to add to the database. """
trans = self.find_supported_translations()
if trans is None:
self.translations = None
else:
self.translations = trans.sort(key=len, reverse=True)
def find_supported_translations(self):
""" Retrieves a list of supported translations from BibleGateway's translation
page. """
url = "https://www.biblegateway.com/versions/"
translations = list()
page = urlopen(url)
soup = BeautifulSoup(page.read())
# It seems that BibleGateway has changed the layout of their versions page. This needs
# to be redone!
translations_select = soup.find("select", {"class":"search-translation-select"})
trans = translations_select.findAll("option")
for t in trans:
if t.has_attr("value") and not t.has_attr("class"):
cur_trans = t["value"]
translations.append(cur_trans)
# Add local translations to supported translations list
translations.append("NJPS")
return translations
| """
VerseBot for reddit
By Matthieu Grieger
webparser.py
Copyright (c) 2015 Matthieu Grieger (MIT License)
"""
from bs4 import BeautifulSoup
from urllib.request import urlopen
class Parser:
""" Parser class for BibleGateway parsing methods. """
def __init__(self):
""" Initializes translations attribute and checks if there are any new translations
to add to the database. """
trans = self.find_supported_translations()
if trans is None:
self.translations = None
else:
self.translations = trans.sort(key=len, reverse=True)
def find_supported_translations(self):
""" Retrieves a list of supported translations from BibleGateway's translation
page. """
url = "http://www.biblegateway.com/versions/"
translations = list()
page = urlopen(url)
soup = BeautifulSoup(page.read())
# It seems that BibleGateway has changed the layout of their versions page. This needs
# to be redone!
translations = soup.find("select", {"class":"search-translation-select"})
trans = translations.findAll("option")
for t in trans:
if t.has_attr("value") and not t.has_attr("class"):
cur_trans = t["value"]
translations.append(cur_trans)
# Add local translations to supported translations list
translations.append("NJPS")
return translations
Rename translations to translations_select, switch to HTTPS"""
VerseBot for reddit
By Matthieu Grieger
webparser.py
Copyright (c) 2015 Matthieu Grieger (MIT License)
"""
from bs4 import BeautifulSoup
from urllib.request import urlopen
class Parser:
""" Parser class for BibleGateway parsing methods. """
def __init__(self):
""" Initializes translations attribute and checks if there are any new translations
to add to the database. """
trans = self.find_supported_translations()
if trans is None:
self.translations = None
else:
self.translations = trans.sort(key=len, reverse=True)
def find_supported_translations(self):
""" Retrieves a list of supported translations from BibleGateway's translation
page. """
url = "https://www.biblegateway.com/versions/"
translations = list()
page = urlopen(url)
soup = BeautifulSoup(page.read())
# It seems that BibleGateway has changed the layout of their versions page. This needs
# to be redone!
translations_select = soup.find("select", {"class":"search-translation-select"})
trans = translations_select.findAll("option")
for t in trans:
if t.has_attr("value") and not t.has_attr("class"):
cur_trans = t["value"]
translations.append(cur_trans)
# Add local translations to supported translations list
translations.append("NJPS")
return translations
| <commit_before>"""
VerseBot for reddit
By Matthieu Grieger
webparser.py
Copyright (c) 2015 Matthieu Grieger (MIT License)
"""
from bs4 import BeautifulSoup
from urllib.request import urlopen
class Parser:
""" Parser class for BibleGateway parsing methods. """
def __init__(self):
""" Initializes translations attribute and checks if there are any new translations
to add to the database. """
trans = self.find_supported_translations()
if trans is None:
self.translations = None
else:
self.translations = trans.sort(key=len, reverse=True)
def find_supported_translations(self):
""" Retrieves a list of supported translations from BibleGateway's translation
page. """
url = "http://www.biblegateway.com/versions/"
translations = list()
page = urlopen(url)
soup = BeautifulSoup(page.read())
# It seems that BibleGateway has changed the layout of their versions page. This needs
# to be redone!
translations = soup.find("select", {"class":"search-translation-select"})
trans = translations.findAll("option")
for t in trans:
if t.has_attr("value") and not t.has_attr("class"):
cur_trans = t["value"]
translations.append(cur_trans)
# Add local translations to supported translations list
translations.append("NJPS")
return translations
<commit_msg>Rename translations to translations_select, switch to HTTPS<commit_after>"""
VerseBot for reddit
By Matthieu Grieger
webparser.py
Copyright (c) 2015 Matthieu Grieger (MIT License)
"""
from bs4 import BeautifulSoup
from urllib.request import urlopen
class Parser:
""" Parser class for BibleGateway parsing methods. """
def __init__(self):
""" Initializes translations attribute and checks if there are any new translations
to add to the database. """
trans = self.find_supported_translations()
if trans is None:
self.translations = None
else:
self.translations = trans.sort(key=len, reverse=True)
def find_supported_translations(self):
""" Retrieves a list of supported translations from BibleGateway's translation
page. """
url = "https://www.biblegateway.com/versions/"
translations = list()
page = urlopen(url)
soup = BeautifulSoup(page.read())
# It seems that BibleGateway has changed the layout of their versions page. This needs
# to be redone!
translations_select = soup.find("select", {"class":"search-translation-select"})
trans = translations_select.findAll("option")
for t in trans:
if t.has_attr("value") and not t.has_attr("class"):
cur_trans = t["value"]
translations.append(cur_trans)
# Add local translations to supported translations list
translations.append("NJPS")
return translations
|
9d95974bb35ab6e7286fe762def7f117944268f0 | examples/balance.py | examples/balance.py | #!/usr/bin/env python
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import messagebird
ACCESS_KEY = 'test_gshuPaZoeEG6ovbc8M79w0QyM'
try:
# Create a MessageBird client with the specified ACCESS_KEY.
client = messagebird.Client(ACCESS_KEY)
# Fetch the Balance object.
balance = client.balance()
# Print the object information.
print('\nThe following information was returned as a Balance object:\n')
print(' amount : %d' % balance.amount)
print(' type : %s' % balance.type)
print(' payment : %s\n' % balance.payment)
except messagebird.client.ErrorException as e:
print('\nAn error occured while requesting a Balance object:\n')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
| #!/usr/bin/env python
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import messagebird
ACCESS_KEY = 'test_gshuPaZoeEG6ovbc8M79w0QyM'
try:
# Create a MessageBird client with the specified ACCESS_KEY.
client = messagebird.Client(ACCESS_KEY)
# Fetch the Balance object.
balance = client.balance()
# Print the object information.
print('\nThe following information was returned as a Balance object:\n')
print(' amount : %d' % balance.amount)
print(' type : %s' % balance.type)
print(' payment : %s\n' % balance.payment)
except messagebird.client.ErrorException as e:
print('\nAn error occured while requesting a Balance object:\n')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
| Change double quotes to single quotes | Change double quotes to single quotes
| Python | bsd-2-clause | messagebird/python-rest-api | #!/usr/bin/env python
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import messagebird
ACCESS_KEY = 'test_gshuPaZoeEG6ovbc8M79w0QyM'
try:
# Create a MessageBird client with the specified ACCESS_KEY.
client = messagebird.Client(ACCESS_KEY)
# Fetch the Balance object.
balance = client.balance()
# Print the object information.
print('\nThe following information was returned as a Balance object:\n')
print(' amount : %d' % balance.amount)
print(' type : %s' % balance.type)
print(' payment : %s\n' % balance.payment)
except messagebird.client.ErrorException as e:
print('\nAn error occured while requesting a Balance object:\n')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
Change double quotes to single quotes | #!/usr/bin/env python
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import messagebird
ACCESS_KEY = 'test_gshuPaZoeEG6ovbc8M79w0QyM'
try:
# Create a MessageBird client with the specified ACCESS_KEY.
client = messagebird.Client(ACCESS_KEY)
# Fetch the Balance object.
balance = client.balance()
# Print the object information.
print('\nThe following information was returned as a Balance object:\n')
print(' amount : %d' % balance.amount)
print(' type : %s' % balance.type)
print(' payment : %s\n' % balance.payment)
except messagebird.client.ErrorException as e:
print('\nAn error occured while requesting a Balance object:\n')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
| <commit_before>#!/usr/bin/env python
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import messagebird
ACCESS_KEY = 'test_gshuPaZoeEG6ovbc8M79w0QyM'
try:
# Create a MessageBird client with the specified ACCESS_KEY.
client = messagebird.Client(ACCESS_KEY)
# Fetch the Balance object.
balance = client.balance()
# Print the object information.
print('\nThe following information was returned as a Balance object:\n')
print(' amount : %d' % balance.amount)
print(' type : %s' % balance.type)
print(' payment : %s\n' % balance.payment)
except messagebird.client.ErrorException as e:
print('\nAn error occured while requesting a Balance object:\n')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
<commit_msg>Change double quotes to single quotes<commit_after> | #!/usr/bin/env python
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import messagebird
ACCESS_KEY = 'test_gshuPaZoeEG6ovbc8M79w0QyM'
try:
# Create a MessageBird client with the specified ACCESS_KEY.
client = messagebird.Client(ACCESS_KEY)
# Fetch the Balance object.
balance = client.balance()
# Print the object information.
print('\nThe following information was returned as a Balance object:\n')
print(' amount : %d' % balance.amount)
print(' type : %s' % balance.type)
print(' payment : %s\n' % balance.payment)
except messagebird.client.ErrorException as e:
print('\nAn error occured while requesting a Balance object:\n')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
| #!/usr/bin/env python
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import messagebird
ACCESS_KEY = 'test_gshuPaZoeEG6ovbc8M79w0QyM'
try:
# Create a MessageBird client with the specified ACCESS_KEY.
client = messagebird.Client(ACCESS_KEY)
# Fetch the Balance object.
balance = client.balance()
# Print the object information.
print('\nThe following information was returned as a Balance object:\n')
print(' amount : %d' % balance.amount)
print(' type : %s' % balance.type)
print(' payment : %s\n' % balance.payment)
except messagebird.client.ErrorException as e:
print('\nAn error occured while requesting a Balance object:\n')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
Change double quotes to single quotes#!/usr/bin/env python
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import messagebird
ACCESS_KEY = 'test_gshuPaZoeEG6ovbc8M79w0QyM'
try:
# Create a MessageBird client with the specified ACCESS_KEY.
client = messagebird.Client(ACCESS_KEY)
# Fetch the Balance object.
balance = client.balance()
# Print the object information.
print('\nThe following information was returned as a Balance object:\n')
print(' amount : %d' % balance.amount)
print(' type : %s' % balance.type)
print(' payment : %s\n' % balance.payment)
except messagebird.client.ErrorException as e:
print('\nAn error occured while requesting a Balance object:\n')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
| <commit_before>#!/usr/bin/env python
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import messagebird
ACCESS_KEY = 'test_gshuPaZoeEG6ovbc8M79w0QyM'
try:
# Create a MessageBird client with the specified ACCESS_KEY.
client = messagebird.Client(ACCESS_KEY)
# Fetch the Balance object.
balance = client.balance()
# Print the object information.
print('\nThe following information was returned as a Balance object:\n')
print(' amount : %d' % balance.amount)
print(' type : %s' % balance.type)
print(' payment : %s\n' % balance.payment)
except messagebird.client.ErrorException as e:
print('\nAn error occured while requesting a Balance object:\n')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
<commit_msg>Change double quotes to single quotes<commit_after>#!/usr/bin/env python
import sys, os
sys.path.append(os.path.join(os.path.dirname(__file__), '..'))
import messagebird
ACCESS_KEY = 'test_gshuPaZoeEG6ovbc8M79w0QyM'
try:
# Create a MessageBird client with the specified ACCESS_KEY.
client = messagebird.Client(ACCESS_KEY)
# Fetch the Balance object.
balance = client.balance()
# Print the object information.
print('\nThe following information was returned as a Balance object:\n')
print(' amount : %d' % balance.amount)
print(' type : %s' % balance.type)
print(' payment : %s\n' % balance.payment)
except messagebird.client.ErrorException as e:
print('\nAn error occured while requesting a Balance object:\n')
for error in e.errors:
print(' code : %d' % error.code)
print(' description : %s' % error.description)
print(' parameter : %s\n' % error.parameter)
|
6d660b0c27029817bc20406454ba565d09cfa31d | wagtail/core/forms.py | wagtail/core/forms.py | from django import forms
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
class PasswordViewRestrictionForm(forms.Form):
password = forms.CharField(label=gettext_lazy("Password"), widget=forms.PasswordInput)
return_url = forms.CharField(widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
self.restriction = kwargs.pop('instance')
super().__init__(*args, **kwargs)
def clean_password(self):
data = self.cleaned_data['password']
if data != self.restriction.password:
raise forms.ValidationError(_("The password you have entered is not correct. Please try again."))
return data
| from django import forms
from django.utils.crypto import constant_time_compare
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
class PasswordViewRestrictionForm(forms.Form):
password = forms.CharField(label=gettext_lazy("Password"), widget=forms.PasswordInput)
return_url = forms.CharField(widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
self.restriction = kwargs.pop('instance')
super().__init__(*args, **kwargs)
def clean_password(self):
data = self.cleaned_data['password']
if not constant_time_compare(data, self.restriction.password):
raise forms.ValidationError(_("The password you have entered is not correct. Please try again."))
return data
| Use constant_time_compare for view restriction password checks | Use constant_time_compare for view restriction password checks
| Python | bsd-3-clause | zerolab/wagtail,takeflight/wagtail,rsalmaso/wagtail,gasman/wagtail,thenewguy/wagtail,takeflight/wagtail,gasman/wagtail,mixxorz/wagtail,kaedroho/wagtail,FlipperPA/wagtail,rsalmaso/wagtail,wagtail/wagtail,thenewguy/wagtail,rsalmaso/wagtail,takeflight/wagtail,wagtail/wagtail,jnns/wagtail,torchbox/wagtail,mixxorz/wagtail,mixxorz/wagtail,thenewguy/wagtail,kaedroho/wagtail,rsalmaso/wagtail,thenewguy/wagtail,kaedroho/wagtail,gasman/wagtail,gasman/wagtail,jnns/wagtail,thenewguy/wagtail,jnns/wagtail,takeflight/wagtail,kaedroho/wagtail,FlipperPA/wagtail,zerolab/wagtail,torchbox/wagtail,wagtail/wagtail,wagtail/wagtail,FlipperPA/wagtail,zerolab/wagtail,torchbox/wagtail,wagtail/wagtail,gasman/wagtail,zerolab/wagtail,rsalmaso/wagtail,mixxorz/wagtail,mixxorz/wagtail,torchbox/wagtail,kaedroho/wagtail,FlipperPA/wagtail,jnns/wagtail,zerolab/wagtail | from django import forms
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
class PasswordViewRestrictionForm(forms.Form):
password = forms.CharField(label=gettext_lazy("Password"), widget=forms.PasswordInput)
return_url = forms.CharField(widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
self.restriction = kwargs.pop('instance')
super().__init__(*args, **kwargs)
def clean_password(self):
data = self.cleaned_data['password']
if data != self.restriction.password:
raise forms.ValidationError(_("The password you have entered is not correct. Please try again."))
return data
Use constant_time_compare for view restriction password checks | from django import forms
from django.utils.crypto import constant_time_compare
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
class PasswordViewRestrictionForm(forms.Form):
password = forms.CharField(label=gettext_lazy("Password"), widget=forms.PasswordInput)
return_url = forms.CharField(widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
self.restriction = kwargs.pop('instance')
super().__init__(*args, **kwargs)
def clean_password(self):
data = self.cleaned_data['password']
if not constant_time_compare(data, self.restriction.password):
raise forms.ValidationError(_("The password you have entered is not correct. Please try again."))
return data
| <commit_before>from django import forms
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
class PasswordViewRestrictionForm(forms.Form):
password = forms.CharField(label=gettext_lazy("Password"), widget=forms.PasswordInput)
return_url = forms.CharField(widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
self.restriction = kwargs.pop('instance')
super().__init__(*args, **kwargs)
def clean_password(self):
data = self.cleaned_data['password']
if data != self.restriction.password:
raise forms.ValidationError(_("The password you have entered is not correct. Please try again."))
return data
<commit_msg>Use constant_time_compare for view restriction password checks<commit_after> | from django import forms
from django.utils.crypto import constant_time_compare
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
class PasswordViewRestrictionForm(forms.Form):
password = forms.CharField(label=gettext_lazy("Password"), widget=forms.PasswordInput)
return_url = forms.CharField(widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
self.restriction = kwargs.pop('instance')
super().__init__(*args, **kwargs)
def clean_password(self):
data = self.cleaned_data['password']
if not constant_time_compare(data, self.restriction.password):
raise forms.ValidationError(_("The password you have entered is not correct. Please try again."))
return data
| from django import forms
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
class PasswordViewRestrictionForm(forms.Form):
password = forms.CharField(label=gettext_lazy("Password"), widget=forms.PasswordInput)
return_url = forms.CharField(widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
self.restriction = kwargs.pop('instance')
super().__init__(*args, **kwargs)
def clean_password(self):
data = self.cleaned_data['password']
if data != self.restriction.password:
raise forms.ValidationError(_("The password you have entered is not correct. Please try again."))
return data
Use constant_time_compare for view restriction password checksfrom django import forms
from django.utils.crypto import constant_time_compare
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
class PasswordViewRestrictionForm(forms.Form):
password = forms.CharField(label=gettext_lazy("Password"), widget=forms.PasswordInput)
return_url = forms.CharField(widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
self.restriction = kwargs.pop('instance')
super().__init__(*args, **kwargs)
def clean_password(self):
data = self.cleaned_data['password']
if not constant_time_compare(data, self.restriction.password):
raise forms.ValidationError(_("The password you have entered is not correct. Please try again."))
return data
| <commit_before>from django import forms
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
class PasswordViewRestrictionForm(forms.Form):
password = forms.CharField(label=gettext_lazy("Password"), widget=forms.PasswordInput)
return_url = forms.CharField(widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
self.restriction = kwargs.pop('instance')
super().__init__(*args, **kwargs)
def clean_password(self):
data = self.cleaned_data['password']
if data != self.restriction.password:
raise forms.ValidationError(_("The password you have entered is not correct. Please try again."))
return data
<commit_msg>Use constant_time_compare for view restriction password checks<commit_after>from django import forms
from django.utils.crypto import constant_time_compare
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
class PasswordViewRestrictionForm(forms.Form):
password = forms.CharField(label=gettext_lazy("Password"), widget=forms.PasswordInput)
return_url = forms.CharField(widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
self.restriction = kwargs.pop('instance')
super().__init__(*args, **kwargs)
def clean_password(self):
data = self.cleaned_data['password']
if not constant_time_compare(data, self.restriction.password):
raise forms.ValidationError(_("The password you have entered is not correct. Please try again."))
return data
|
29beebfa571164827bf22618e85cc9db9d402c1a | plugins/FileHandlers/OBJReader/__init__.py | plugins/FileHandlers/OBJReader/__init__.py | #Shoopdawoop
from . import OBJReader
def getMetaData():
return {
'type': 'mesh_reader',
'plugin': {
"name": "OBJ Reader",
},
'mesh_reader': {
'extension': 'obj',
'description': 'LightWave OBJ File'
}
}
def register(app):
return OBJReader.OBJReader()
| #Shoopdawoop
from . import OBJReader
def getMetaData():
return {
'type': 'mesh_reader',
'plugin': {
"name": "OBJ Reader",
},
'mesh_reader': {
'extension': 'obj',
'description': 'Wavefront OBJ File'
}
}
def register(app):
return OBJReader.OBJReader()
| Set the right description for the OBJ reader | Set the right description for the OBJ reader
| Python | agpl-3.0 | onitake/Uranium,onitake/Uranium | #Shoopdawoop
from . import OBJReader
def getMetaData():
return {
'type': 'mesh_reader',
'plugin': {
"name": "OBJ Reader",
},
'mesh_reader': {
'extension': 'obj',
'description': 'LightWave OBJ File'
}
}
def register(app):
return OBJReader.OBJReader()
Set the right description for the OBJ reader | #Shoopdawoop
from . import OBJReader
def getMetaData():
return {
'type': 'mesh_reader',
'plugin': {
"name": "OBJ Reader",
},
'mesh_reader': {
'extension': 'obj',
'description': 'Wavefront OBJ File'
}
}
def register(app):
return OBJReader.OBJReader()
| <commit_before>#Shoopdawoop
from . import OBJReader
def getMetaData():
return {
'type': 'mesh_reader',
'plugin': {
"name": "OBJ Reader",
},
'mesh_reader': {
'extension': 'obj',
'description': 'LightWave OBJ File'
}
}
def register(app):
return OBJReader.OBJReader()
<commit_msg>Set the right description for the OBJ reader<commit_after> | #Shoopdawoop
from . import OBJReader
def getMetaData():
return {
'type': 'mesh_reader',
'plugin': {
"name": "OBJ Reader",
},
'mesh_reader': {
'extension': 'obj',
'description': 'Wavefront OBJ File'
}
}
def register(app):
return OBJReader.OBJReader()
| #Shoopdawoop
from . import OBJReader
def getMetaData():
return {
'type': 'mesh_reader',
'plugin': {
"name": "OBJ Reader",
},
'mesh_reader': {
'extension': 'obj',
'description': 'LightWave OBJ File'
}
}
def register(app):
return OBJReader.OBJReader()
Set the right description for the OBJ reader#Shoopdawoop
from . import OBJReader
def getMetaData():
return {
'type': 'mesh_reader',
'plugin': {
"name": "OBJ Reader",
},
'mesh_reader': {
'extension': 'obj',
'description': 'Wavefront OBJ File'
}
}
def register(app):
return OBJReader.OBJReader()
| <commit_before>#Shoopdawoop
from . import OBJReader
def getMetaData():
return {
'type': 'mesh_reader',
'plugin': {
"name": "OBJ Reader",
},
'mesh_reader': {
'extension': 'obj',
'description': 'LightWave OBJ File'
}
}
def register(app):
return OBJReader.OBJReader()
<commit_msg>Set the right description for the OBJ reader<commit_after>#Shoopdawoop
from . import OBJReader
def getMetaData():
return {
'type': 'mesh_reader',
'plugin': {
"name": "OBJ Reader",
},
'mesh_reader': {
'extension': 'obj',
'description': 'Wavefront OBJ File'
}
}
def register(app):
return OBJReader.OBJReader()
|
eac6629dca5a368048b4d2eeec30f760efd867a1 | conanfile.py | conanfile.py | from conans import ConanFile
import os
class ArghConan(ConanFile):
name = "argh"
version = "1.2.0"
url = "https://github.com/adishavit/argh"
description = "Argh! A minimalist argument handler."
license = "BSD 3-Clause"
exports = ["LICENSE"]
exports_sources = "*.h"
def package(self):
self.copy(pattern="LICENSE", dst="license")
self.copy(pattern="argh.h", dst="include")
| from conans import ConanFile
import os
class ArghConan(ConanFile):
name = "argh"
version = "1.2.1"
url = "https://github.com/adishavit/argh"
description = "Argh! A minimalist argument handler."
license = "BSD 3-Clause"
exports = ["LICENSE"]
exports_sources = "argh.h"
def package(self):
self.copy(pattern="LICENSE", dst="license")
self.copy(pattern="argh.h", dst="include")
| Create release 1.2.1 with conan and bintray support. | Create release 1.2.1 with conan and bintray support.
| Python | bsd-3-clause | adishavit/argh,adishavit/argh | from conans import ConanFile
import os
class ArghConan(ConanFile):
name = "argh"
version = "1.2.0"
url = "https://github.com/adishavit/argh"
description = "Argh! A minimalist argument handler."
license = "BSD 3-Clause"
exports = ["LICENSE"]
exports_sources = "*.h"
def package(self):
self.copy(pattern="LICENSE", dst="license")
self.copy(pattern="argh.h", dst="include")
Create release 1.2.1 with conan and bintray support. | from conans import ConanFile
import os
class ArghConan(ConanFile):
name = "argh"
version = "1.2.1"
url = "https://github.com/adishavit/argh"
description = "Argh! A minimalist argument handler."
license = "BSD 3-Clause"
exports = ["LICENSE"]
exports_sources = "argh.h"
def package(self):
self.copy(pattern="LICENSE", dst="license")
self.copy(pattern="argh.h", dst="include")
| <commit_before>from conans import ConanFile
import os
class ArghConan(ConanFile):
name = "argh"
version = "1.2.0"
url = "https://github.com/adishavit/argh"
description = "Argh! A minimalist argument handler."
license = "BSD 3-Clause"
exports = ["LICENSE"]
exports_sources = "*.h"
def package(self):
self.copy(pattern="LICENSE", dst="license")
self.copy(pattern="argh.h", dst="include")
<commit_msg>Create release 1.2.1 with conan and bintray support.<commit_after> | from conans import ConanFile
import os
class ArghConan(ConanFile):
name = "argh"
version = "1.2.1"
url = "https://github.com/adishavit/argh"
description = "Argh! A minimalist argument handler."
license = "BSD 3-Clause"
exports = ["LICENSE"]
exports_sources = "argh.h"
def package(self):
self.copy(pattern="LICENSE", dst="license")
self.copy(pattern="argh.h", dst="include")
| from conans import ConanFile
import os
class ArghConan(ConanFile):
name = "argh"
version = "1.2.0"
url = "https://github.com/adishavit/argh"
description = "Argh! A minimalist argument handler."
license = "BSD 3-Clause"
exports = ["LICENSE"]
exports_sources = "*.h"
def package(self):
self.copy(pattern="LICENSE", dst="license")
self.copy(pattern="argh.h", dst="include")
Create release 1.2.1 with conan and bintray support.from conans import ConanFile
import os
class ArghConan(ConanFile):
name = "argh"
version = "1.2.1"
url = "https://github.com/adishavit/argh"
description = "Argh! A minimalist argument handler."
license = "BSD 3-Clause"
exports = ["LICENSE"]
exports_sources = "argh.h"
def package(self):
self.copy(pattern="LICENSE", dst="license")
self.copy(pattern="argh.h", dst="include")
| <commit_before>from conans import ConanFile
import os
class ArghConan(ConanFile):
name = "argh"
version = "1.2.0"
url = "https://github.com/adishavit/argh"
description = "Argh! A minimalist argument handler."
license = "BSD 3-Clause"
exports = ["LICENSE"]
exports_sources = "*.h"
def package(self):
self.copy(pattern="LICENSE", dst="license")
self.copy(pattern="argh.h", dst="include")
<commit_msg>Create release 1.2.1 with conan and bintray support.<commit_after>from conans import ConanFile
import os
class ArghConan(ConanFile):
name = "argh"
version = "1.2.1"
url = "https://github.com/adishavit/argh"
description = "Argh! A minimalist argument handler."
license = "BSD 3-Clause"
exports = ["LICENSE"]
exports_sources = "argh.h"
def package(self):
self.copy(pattern="LICENSE", dst="license")
self.copy(pattern="argh.h", dst="include")
|
fc8318361755d7327fc6e02dbbd68a6221acad34 | impersonate/migrations/0001_initial.py | impersonate/migrations/0001_initial.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ImpersonationLog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('session_key', models.CharField(help_text=b'The Django session request key.', max_length=40)),
('session_started_at', models.DateTimeField(help_text=b'The time impersonation began.', null=True, blank=True)),
('session_ended_at', models.DateTimeField(help_text=b'The time impersonation ended.', null=True, blank=True)),
('impersonating', models.ForeignKey(related_name='impersonated_by', to=settings.AUTH_USER_MODEL, help_text=b'The user being impersonated.')),
('impersonator', models.ForeignKey(related_name='impersonations', to=settings.AUTH_USER_MODEL, help_text=b'The user doing the impersonating.')),
],
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ImpersonationLog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('session_key', models.CharField(help_text='The Django session request key.', max_length=40)),
('session_started_at', models.DateTimeField(help_text='The time impersonation began.', null=True, blank=True)),
('session_ended_at', models.DateTimeField(help_text='The time impersonation ended.', null=True, blank=True)),
('impersonating', models.ForeignKey(related_name='impersonated_by', to=settings.AUTH_USER_MODEL, help_text='The user being impersonated.')),
('impersonator', models.ForeignKey(related_name='impersonations', to=settings.AUTH_USER_MODEL, help_text='The user doing the impersonating.')),
],
),
]
| Fix unicode strings in migration | Fix unicode strings in migration
Fixes #31
| Python | bsd-3-clause | Top20Talent/django-impersonate,Top20Talent/django-impersonate | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ImpersonationLog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('session_key', models.CharField(help_text=b'The Django session request key.', max_length=40)),
('session_started_at', models.DateTimeField(help_text=b'The time impersonation began.', null=True, blank=True)),
('session_ended_at', models.DateTimeField(help_text=b'The time impersonation ended.', null=True, blank=True)),
('impersonating', models.ForeignKey(related_name='impersonated_by', to=settings.AUTH_USER_MODEL, help_text=b'The user being impersonated.')),
('impersonator', models.ForeignKey(related_name='impersonations', to=settings.AUTH_USER_MODEL, help_text=b'The user doing the impersonating.')),
],
),
]
Fix unicode strings in migration
Fixes #31 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ImpersonationLog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('session_key', models.CharField(help_text='The Django session request key.', max_length=40)),
('session_started_at', models.DateTimeField(help_text='The time impersonation began.', null=True, blank=True)),
('session_ended_at', models.DateTimeField(help_text='The time impersonation ended.', null=True, blank=True)),
('impersonating', models.ForeignKey(related_name='impersonated_by', to=settings.AUTH_USER_MODEL, help_text='The user being impersonated.')),
('impersonator', models.ForeignKey(related_name='impersonations', to=settings.AUTH_USER_MODEL, help_text='The user doing the impersonating.')),
],
),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ImpersonationLog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('session_key', models.CharField(help_text=b'The Django session request key.', max_length=40)),
('session_started_at', models.DateTimeField(help_text=b'The time impersonation began.', null=True, blank=True)),
('session_ended_at', models.DateTimeField(help_text=b'The time impersonation ended.', null=True, blank=True)),
('impersonating', models.ForeignKey(related_name='impersonated_by', to=settings.AUTH_USER_MODEL, help_text=b'The user being impersonated.')),
('impersonator', models.ForeignKey(related_name='impersonations', to=settings.AUTH_USER_MODEL, help_text=b'The user doing the impersonating.')),
],
),
]
<commit_msg>Fix unicode strings in migration
Fixes #31<commit_after> | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ImpersonationLog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('session_key', models.CharField(help_text='The Django session request key.', max_length=40)),
('session_started_at', models.DateTimeField(help_text='The time impersonation began.', null=True, blank=True)),
('session_ended_at', models.DateTimeField(help_text='The time impersonation ended.', null=True, blank=True)),
('impersonating', models.ForeignKey(related_name='impersonated_by', to=settings.AUTH_USER_MODEL, help_text='The user being impersonated.')),
('impersonator', models.ForeignKey(related_name='impersonations', to=settings.AUTH_USER_MODEL, help_text='The user doing the impersonating.')),
],
),
]
| # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ImpersonationLog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('session_key', models.CharField(help_text=b'The Django session request key.', max_length=40)),
('session_started_at', models.DateTimeField(help_text=b'The time impersonation began.', null=True, blank=True)),
('session_ended_at', models.DateTimeField(help_text=b'The time impersonation ended.', null=True, blank=True)),
('impersonating', models.ForeignKey(related_name='impersonated_by', to=settings.AUTH_USER_MODEL, help_text=b'The user being impersonated.')),
('impersonator', models.ForeignKey(related_name='impersonations', to=settings.AUTH_USER_MODEL, help_text=b'The user doing the impersonating.')),
],
),
]
Fix unicode strings in migration
Fixes #31# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ImpersonationLog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('session_key', models.CharField(help_text='The Django session request key.', max_length=40)),
('session_started_at', models.DateTimeField(help_text='The time impersonation began.', null=True, blank=True)),
('session_ended_at', models.DateTimeField(help_text='The time impersonation ended.', null=True, blank=True)),
('impersonating', models.ForeignKey(related_name='impersonated_by', to=settings.AUTH_USER_MODEL, help_text='The user being impersonated.')),
('impersonator', models.ForeignKey(related_name='impersonations', to=settings.AUTH_USER_MODEL, help_text='The user doing the impersonating.')),
],
),
]
| <commit_before># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ImpersonationLog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('session_key', models.CharField(help_text=b'The Django session request key.', max_length=40)),
('session_started_at', models.DateTimeField(help_text=b'The time impersonation began.', null=True, blank=True)),
('session_ended_at', models.DateTimeField(help_text=b'The time impersonation ended.', null=True, blank=True)),
('impersonating', models.ForeignKey(related_name='impersonated_by', to=settings.AUTH_USER_MODEL, help_text=b'The user being impersonated.')),
('impersonator', models.ForeignKey(related_name='impersonations', to=settings.AUTH_USER_MODEL, help_text=b'The user doing the impersonating.')),
],
),
]
<commit_msg>Fix unicode strings in migration
Fixes #31<commit_after># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
from django.conf import settings
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='ImpersonationLog',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('session_key', models.CharField(help_text='The Django session request key.', max_length=40)),
('session_started_at', models.DateTimeField(help_text='The time impersonation began.', null=True, blank=True)),
('session_ended_at', models.DateTimeField(help_text='The time impersonation ended.', null=True, blank=True)),
('impersonating', models.ForeignKey(related_name='impersonated_by', to=settings.AUTH_USER_MODEL, help_text='The user being impersonated.')),
('impersonator', models.ForeignKey(related_name='impersonations', to=settings.AUTH_USER_MODEL, help_text='The user doing the impersonating.')),
],
),
]
|
ef4bd591abc794211624c0723d15cfa311370bb2 | examples/chatserver/views.py | examples/chatserver/views.py | # -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.views.generic.base import TemplateView
from django.views.decorators.csrf import csrf_exempt
import redis
from ws4redis import settings as redis_settings
class BaseTemplateView(TemplateView):
def get_context_data(self, **kwargs):
context = super(BaseTemplateView, self).get_context_data(**kwargs)
context.update(ws_url='ws://localhost:{SERVER_PORT}/ws/foobar'.format(**self.request.META))
return context
class BroadcastChatView(BaseTemplateView):
template_name = 'broadcast_chat.html'
class UserChatView(BaseTemplateView):
template_name = 'user_chat.html'
def __init__(self):
self._connection = redis.StrictRedis(host=redis_settings.REDIS_HOST, port=redis_settings.REDIS_PORT, db=0)
def get_context_data(self, **kwargs):
users = User.objects.all()
context = super(UserChatView, self).get_context_data(**kwargs)
context.update(users=users)
return context
@csrf_exempt
def post(self, request, *args, **kwargs):
channel = u'{0}:foobar'.format(request.POST.get('user'))
self._connection.publish(channel, request.POST.get('message'))
return HttpResponse('OK')
| # -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.views.generic.base import TemplateView
from django.views.decorators.csrf import csrf_exempt
import redis
from ws4redis import settings as redis_settings
class BaseTemplateView(TemplateView):
def get_context_data(self, **kwargs):
context = super(BaseTemplateView, self).get_context_data(**kwargs)
context.update(ws_url='ws://{SERVER_NAME}:{SERVER_PORT}/ws/foobar'.format(**self.request.META))
return context
class BroadcastChatView(BaseTemplateView):
template_name = 'broadcast_chat.html'
class UserChatView(BaseTemplateView):
template_name = 'user_chat.html'
def __init__(self):
self._connection = redis.StrictRedis(host=redis_settings.REDIS_HOST, port=redis_settings.REDIS_PORT, db=0)
def get_context_data(self, **kwargs):
users = User.objects.all()
context = super(UserChatView, self).get_context_data(**kwargs)
context.update(users=users)
return context
@csrf_exempt
def post(self, request, *args, **kwargs):
channel = u'{0}:foobar'.format(request.POST.get('user'))
self._connection.publish(channel, request.POST.get('message'))
return HttpResponse('OK')
| Use META.SERVER_NAME in template view. | Use META.SERVER_NAME in template view.
We may want to access this example from a different machine.
| Python | mit | schinckel/django-websocket-redis | # -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.views.generic.base import TemplateView
from django.views.decorators.csrf import csrf_exempt
import redis
from ws4redis import settings as redis_settings
class BaseTemplateView(TemplateView):
def get_context_data(self, **kwargs):
context = super(BaseTemplateView, self).get_context_data(**kwargs)
context.update(ws_url='ws://localhost:{SERVER_PORT}/ws/foobar'.format(**self.request.META))
return context
class BroadcastChatView(BaseTemplateView):
template_name = 'broadcast_chat.html'
class UserChatView(BaseTemplateView):
template_name = 'user_chat.html'
def __init__(self):
self._connection = redis.StrictRedis(host=redis_settings.REDIS_HOST, port=redis_settings.REDIS_PORT, db=0)
def get_context_data(self, **kwargs):
users = User.objects.all()
context = super(UserChatView, self).get_context_data(**kwargs)
context.update(users=users)
return context
@csrf_exempt
def post(self, request, *args, **kwargs):
channel = u'{0}:foobar'.format(request.POST.get('user'))
self._connection.publish(channel, request.POST.get('message'))
return HttpResponse('OK')
Use META.SERVER_NAME in template view.
We may want to access this example from a different machine. | # -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.views.generic.base import TemplateView
from django.views.decorators.csrf import csrf_exempt
import redis
from ws4redis import settings as redis_settings
class BaseTemplateView(TemplateView):
def get_context_data(self, **kwargs):
context = super(BaseTemplateView, self).get_context_data(**kwargs)
context.update(ws_url='ws://{SERVER_NAME}:{SERVER_PORT}/ws/foobar'.format(**self.request.META))
return context
class BroadcastChatView(BaseTemplateView):
template_name = 'broadcast_chat.html'
class UserChatView(BaseTemplateView):
template_name = 'user_chat.html'
def __init__(self):
self._connection = redis.StrictRedis(host=redis_settings.REDIS_HOST, port=redis_settings.REDIS_PORT, db=0)
def get_context_data(self, **kwargs):
users = User.objects.all()
context = super(UserChatView, self).get_context_data(**kwargs)
context.update(users=users)
return context
@csrf_exempt
def post(self, request, *args, **kwargs):
channel = u'{0}:foobar'.format(request.POST.get('user'))
self._connection.publish(channel, request.POST.get('message'))
return HttpResponse('OK')
| <commit_before># -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.views.generic.base import TemplateView
from django.views.decorators.csrf import csrf_exempt
import redis
from ws4redis import settings as redis_settings
class BaseTemplateView(TemplateView):
def get_context_data(self, **kwargs):
context = super(BaseTemplateView, self).get_context_data(**kwargs)
context.update(ws_url='ws://localhost:{SERVER_PORT}/ws/foobar'.format(**self.request.META))
return context
class BroadcastChatView(BaseTemplateView):
template_name = 'broadcast_chat.html'
class UserChatView(BaseTemplateView):
template_name = 'user_chat.html'
def __init__(self):
self._connection = redis.StrictRedis(host=redis_settings.REDIS_HOST, port=redis_settings.REDIS_PORT, db=0)
def get_context_data(self, **kwargs):
users = User.objects.all()
context = super(UserChatView, self).get_context_data(**kwargs)
context.update(users=users)
return context
@csrf_exempt
def post(self, request, *args, **kwargs):
channel = u'{0}:foobar'.format(request.POST.get('user'))
self._connection.publish(channel, request.POST.get('message'))
return HttpResponse('OK')
<commit_msg>Use META.SERVER_NAME in template view.
We may want to access this example from a different machine.<commit_after> | # -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.views.generic.base import TemplateView
from django.views.decorators.csrf import csrf_exempt
import redis
from ws4redis import settings as redis_settings
class BaseTemplateView(TemplateView):
def get_context_data(self, **kwargs):
context = super(BaseTemplateView, self).get_context_data(**kwargs)
context.update(ws_url='ws://{SERVER_NAME}:{SERVER_PORT}/ws/foobar'.format(**self.request.META))
return context
class BroadcastChatView(BaseTemplateView):
template_name = 'broadcast_chat.html'
class UserChatView(BaseTemplateView):
template_name = 'user_chat.html'
def __init__(self):
self._connection = redis.StrictRedis(host=redis_settings.REDIS_HOST, port=redis_settings.REDIS_PORT, db=0)
def get_context_data(self, **kwargs):
users = User.objects.all()
context = super(UserChatView, self).get_context_data(**kwargs)
context.update(users=users)
return context
@csrf_exempt
def post(self, request, *args, **kwargs):
channel = u'{0}:foobar'.format(request.POST.get('user'))
self._connection.publish(channel, request.POST.get('message'))
return HttpResponse('OK')
| # -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.views.generic.base import TemplateView
from django.views.decorators.csrf import csrf_exempt
import redis
from ws4redis import settings as redis_settings
class BaseTemplateView(TemplateView):
def get_context_data(self, **kwargs):
context = super(BaseTemplateView, self).get_context_data(**kwargs)
context.update(ws_url='ws://localhost:{SERVER_PORT}/ws/foobar'.format(**self.request.META))
return context
class BroadcastChatView(BaseTemplateView):
template_name = 'broadcast_chat.html'
class UserChatView(BaseTemplateView):
template_name = 'user_chat.html'
def __init__(self):
self._connection = redis.StrictRedis(host=redis_settings.REDIS_HOST, port=redis_settings.REDIS_PORT, db=0)
def get_context_data(self, **kwargs):
users = User.objects.all()
context = super(UserChatView, self).get_context_data(**kwargs)
context.update(users=users)
return context
@csrf_exempt
def post(self, request, *args, **kwargs):
channel = u'{0}:foobar'.format(request.POST.get('user'))
self._connection.publish(channel, request.POST.get('message'))
return HttpResponse('OK')
Use META.SERVER_NAME in template view.
We may want to access this example from a different machine.# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.views.generic.base import TemplateView
from django.views.decorators.csrf import csrf_exempt
import redis
from ws4redis import settings as redis_settings
class BaseTemplateView(TemplateView):
def get_context_data(self, **kwargs):
context = super(BaseTemplateView, self).get_context_data(**kwargs)
context.update(ws_url='ws://{SERVER_NAME}:{SERVER_PORT}/ws/foobar'.format(**self.request.META))
return context
class BroadcastChatView(BaseTemplateView):
template_name = 'broadcast_chat.html'
class UserChatView(BaseTemplateView):
template_name = 'user_chat.html'
def __init__(self):
self._connection = redis.StrictRedis(host=redis_settings.REDIS_HOST, port=redis_settings.REDIS_PORT, db=0)
def get_context_data(self, **kwargs):
users = User.objects.all()
context = super(UserChatView, self).get_context_data(**kwargs)
context.update(users=users)
return context
@csrf_exempt
def post(self, request, *args, **kwargs):
channel = u'{0}:foobar'.format(request.POST.get('user'))
self._connection.publish(channel, request.POST.get('message'))
return HttpResponse('OK')
| <commit_before># -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.views.generic.base import TemplateView
from django.views.decorators.csrf import csrf_exempt
import redis
from ws4redis import settings as redis_settings
class BaseTemplateView(TemplateView):
def get_context_data(self, **kwargs):
context = super(BaseTemplateView, self).get_context_data(**kwargs)
context.update(ws_url='ws://localhost:{SERVER_PORT}/ws/foobar'.format(**self.request.META))
return context
class BroadcastChatView(BaseTemplateView):
template_name = 'broadcast_chat.html'
class UserChatView(BaseTemplateView):
template_name = 'user_chat.html'
def __init__(self):
self._connection = redis.StrictRedis(host=redis_settings.REDIS_HOST, port=redis_settings.REDIS_PORT, db=0)
def get_context_data(self, **kwargs):
users = User.objects.all()
context = super(UserChatView, self).get_context_data(**kwargs)
context.update(users=users)
return context
@csrf_exempt
def post(self, request, *args, **kwargs):
channel = u'{0}:foobar'.format(request.POST.get('user'))
self._connection.publish(channel, request.POST.get('message'))
return HttpResponse('OK')
<commit_msg>Use META.SERVER_NAME in template view.
We may want to access this example from a different machine.<commit_after># -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.http import HttpResponse
from django.views.generic.base import TemplateView
from django.views.decorators.csrf import csrf_exempt
import redis
from ws4redis import settings as redis_settings
class BaseTemplateView(TemplateView):
def get_context_data(self, **kwargs):
context = super(BaseTemplateView, self).get_context_data(**kwargs)
context.update(ws_url='ws://{SERVER_NAME}:{SERVER_PORT}/ws/foobar'.format(**self.request.META))
return context
class BroadcastChatView(BaseTemplateView):
template_name = 'broadcast_chat.html'
class UserChatView(BaseTemplateView):
template_name = 'user_chat.html'
def __init__(self):
self._connection = redis.StrictRedis(host=redis_settings.REDIS_HOST, port=redis_settings.REDIS_PORT, db=0)
def get_context_data(self, **kwargs):
users = User.objects.all()
context = super(UserChatView, self).get_context_data(**kwargs)
context.update(users=users)
return context
@csrf_exempt
def post(self, request, *args, **kwargs):
channel = u'{0}:foobar'.format(request.POST.get('user'))
self._connection.publish(channel, request.POST.get('message'))
return HttpResponse('OK')
|
c8c858cd26031178a8be30c3824577e0832806dc | bookworm/settings_mobile.py | bookworm/settings_mobile.py | from settings import *
import settings
TEMPLATE_DIRS_BASE = TEMPLATE_DIRS
TEMPLATE_DIRS = (
'%s/library/templates/mobile/auth' % ROOT_PATH,
'%s/library/templates/mobile' % ROOT_PATH,
)
TEMPLATE_DIRS += TEMPLATE_DIRS_BASE
MOBILE = True
| from settings import *
import settings
TEMPLATE_DIRS_BASE = TEMPLATE_DIRS
TEMPLATE_DIRS = (
'%s/library/templates/mobile/auth' % ROOT_PATH,
'%s/library/templates/mobile' % ROOT_PATH,
)
TEMPLATE_DIRS += TEMPLATE_DIRS_BASE
MOBILE = True
SESSION_COOKIE_NAME = 'bookworm_mobile'
| Change cookie name for mobile setting | Change cookie name for mobile setting
--HG--
extra : convert_revision : svn%3Ae08d0fb5-4147-0410-a205-bba44f1f51a3/trunk%40553
| Python | bsd-3-clause | erochest/threepress-rdfa,erochest/threepress-rdfa,erochest/threepress-rdfa,erochest/threepress-rdfa | from settings import *
import settings
TEMPLATE_DIRS_BASE = TEMPLATE_DIRS
TEMPLATE_DIRS = (
'%s/library/templates/mobile/auth' % ROOT_PATH,
'%s/library/templates/mobile' % ROOT_PATH,
)
TEMPLATE_DIRS += TEMPLATE_DIRS_BASE
MOBILE = True
Change cookie name for mobile setting
--HG--
extra : convert_revision : svn%3Ae08d0fb5-4147-0410-a205-bba44f1f51a3/trunk%40553 | from settings import *
import settings
TEMPLATE_DIRS_BASE = TEMPLATE_DIRS
TEMPLATE_DIRS = (
'%s/library/templates/mobile/auth' % ROOT_PATH,
'%s/library/templates/mobile' % ROOT_PATH,
)
TEMPLATE_DIRS += TEMPLATE_DIRS_BASE
MOBILE = True
SESSION_COOKIE_NAME = 'bookworm_mobile'
| <commit_before>from settings import *
import settings
TEMPLATE_DIRS_BASE = TEMPLATE_DIRS
TEMPLATE_DIRS = (
'%s/library/templates/mobile/auth' % ROOT_PATH,
'%s/library/templates/mobile' % ROOT_PATH,
)
TEMPLATE_DIRS += TEMPLATE_DIRS_BASE
MOBILE = True
<commit_msg>Change cookie name for mobile setting
--HG--
extra : convert_revision : svn%3Ae08d0fb5-4147-0410-a205-bba44f1f51a3/trunk%40553<commit_after> | from settings import *
import settings
TEMPLATE_DIRS_BASE = TEMPLATE_DIRS
TEMPLATE_DIRS = (
'%s/library/templates/mobile/auth' % ROOT_PATH,
'%s/library/templates/mobile' % ROOT_PATH,
)
TEMPLATE_DIRS += TEMPLATE_DIRS_BASE
MOBILE = True
SESSION_COOKIE_NAME = 'bookworm_mobile'
| from settings import *
import settings
TEMPLATE_DIRS_BASE = TEMPLATE_DIRS
TEMPLATE_DIRS = (
'%s/library/templates/mobile/auth' % ROOT_PATH,
'%s/library/templates/mobile' % ROOT_PATH,
)
TEMPLATE_DIRS += TEMPLATE_DIRS_BASE
MOBILE = True
Change cookie name for mobile setting
--HG--
extra : convert_revision : svn%3Ae08d0fb5-4147-0410-a205-bba44f1f51a3/trunk%40553from settings import *
import settings
TEMPLATE_DIRS_BASE = TEMPLATE_DIRS
TEMPLATE_DIRS = (
'%s/library/templates/mobile/auth' % ROOT_PATH,
'%s/library/templates/mobile' % ROOT_PATH,
)
TEMPLATE_DIRS += TEMPLATE_DIRS_BASE
MOBILE = True
SESSION_COOKIE_NAME = 'bookworm_mobile'
| <commit_before>from settings import *
import settings
TEMPLATE_DIRS_BASE = TEMPLATE_DIRS
TEMPLATE_DIRS = (
'%s/library/templates/mobile/auth' % ROOT_PATH,
'%s/library/templates/mobile' % ROOT_PATH,
)
TEMPLATE_DIRS += TEMPLATE_DIRS_BASE
MOBILE = True
<commit_msg>Change cookie name for mobile setting
--HG--
extra : convert_revision : svn%3Ae08d0fb5-4147-0410-a205-bba44f1f51a3/trunk%40553<commit_after>from settings import *
import settings
TEMPLATE_DIRS_BASE = TEMPLATE_DIRS
TEMPLATE_DIRS = (
'%s/library/templates/mobile/auth' % ROOT_PATH,
'%s/library/templates/mobile' % ROOT_PATH,
)
TEMPLATE_DIRS += TEMPLATE_DIRS_BASE
MOBILE = True
SESSION_COOKIE_NAME = 'bookworm_mobile'
|
d883208fa4084e06aa0f19ba7031566f33260e23 | lib/web/web.py | lib/web/web.py | import os, os.path
import random
import string
import json
import cherrypy
from . import get_pic
class StringGenerator(object):
@cherrypy.expose
def index(self):
return """<html>
<head>
<link href="/static/css/style.css" rel="stylesheet">
</head>
<body>
wrong page
</body>
</html>"""
@cherrypy.expose
def get_picture(self, url=""):
return get_pic.base64_picture(url)
@cherrypy.expose
def search(self, query):
return json.dumps({"book": {"title":"Gs", "author":"Bash Gs"},
"recommended":[{"title":"Gs1", "author":"Bash Gs1"}, {"title":"Gs2", "author":"Bash Gs2"}]})
@cherrypy.expose
def display(self):
return cherrypy.session['mystring']
def main():
conf = {
'/': {
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.join(
os.path.abspath(os.path.dirname(os.path.realpath(__file__))),
'web'
)
},
}
cherrypy.quickstart(StringGenerator(), '/', conf)
if __name__ == '__main__':
main()
| import os, os.path
import random
import string
import json
import cherrypy
from . import get_pic
class StringGenerator(object):
@cherrypy.expose
def index(self):
return """<html>
<head>
<link href="/static/css/style.css" rel="stylesheet">
</head>
<body>
wrong page
</body>
</html>"""
@cherrypy.expose
def get_picture(self, url=""):
return get_pic.base64_picture(url)
@cherrypy.expose
def search(self, query):
return json.dumps({"book": {"title":"Gs", "author":"Bash Gs", "url":"https://chitanka.info/text/1"},
"recommended":[{"title":"Gs1", "author":"Bash Gs1", "url":"https://chitanka.info/text/2"},
{"title":"Gs2", "author":"Bash Gs2", "url":"https://chitanka.info/text/3"}]})
@cherrypy.expose
def display(self):
return cherrypy.session['mystring']
def main():
conf = {
'/': {
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.join(
os.path.abspath(os.path.dirname(os.path.realpath(__file__))),
'web'
)
},
}
cherrypy.quickstart(StringGenerator(), '/', conf)
if __name__ == '__main__':
main()
| Add sample urls to json.dump | Add sample urls to json.dump
| Python | mit | DexterLB/bookrat,DexterLB/bookrat,DexterLB/bookrat,DexterLB/bookrat,DexterLB/bookrat | import os, os.path
import random
import string
import json
import cherrypy
from . import get_pic
class StringGenerator(object):
@cherrypy.expose
def index(self):
return """<html>
<head>
<link href="/static/css/style.css" rel="stylesheet">
</head>
<body>
wrong page
</body>
</html>"""
@cherrypy.expose
def get_picture(self, url=""):
return get_pic.base64_picture(url)
@cherrypy.expose
def search(self, query):
return json.dumps({"book": {"title":"Gs", "author":"Bash Gs"},
"recommended":[{"title":"Gs1", "author":"Bash Gs1"}, {"title":"Gs2", "author":"Bash Gs2"}]})
@cherrypy.expose
def display(self):
return cherrypy.session['mystring']
def main():
conf = {
'/': {
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.join(
os.path.abspath(os.path.dirname(os.path.realpath(__file__))),
'web'
)
},
}
cherrypy.quickstart(StringGenerator(), '/', conf)
if __name__ == '__main__':
main()
Add sample urls to json.dump | import os, os.path
import random
import string
import json
import cherrypy
from . import get_pic
class StringGenerator(object):
@cherrypy.expose
def index(self):
return """<html>
<head>
<link href="/static/css/style.css" rel="stylesheet">
</head>
<body>
wrong page
</body>
</html>"""
@cherrypy.expose
def get_picture(self, url=""):
return get_pic.base64_picture(url)
@cherrypy.expose
def search(self, query):
return json.dumps({"book": {"title":"Gs", "author":"Bash Gs", "url":"https://chitanka.info/text/1"},
"recommended":[{"title":"Gs1", "author":"Bash Gs1", "url":"https://chitanka.info/text/2"},
{"title":"Gs2", "author":"Bash Gs2", "url":"https://chitanka.info/text/3"}]})
@cherrypy.expose
def display(self):
return cherrypy.session['mystring']
def main():
conf = {
'/': {
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.join(
os.path.abspath(os.path.dirname(os.path.realpath(__file__))),
'web'
)
},
}
cherrypy.quickstart(StringGenerator(), '/', conf)
if __name__ == '__main__':
main()
| <commit_before>import os, os.path
import random
import string
import json
import cherrypy
from . import get_pic
class StringGenerator(object):
@cherrypy.expose
def index(self):
return """<html>
<head>
<link href="/static/css/style.css" rel="stylesheet">
</head>
<body>
wrong page
</body>
</html>"""
@cherrypy.expose
def get_picture(self, url=""):
return get_pic.base64_picture(url)
@cherrypy.expose
def search(self, query):
return json.dumps({"book": {"title":"Gs", "author":"Bash Gs"},
"recommended":[{"title":"Gs1", "author":"Bash Gs1"}, {"title":"Gs2", "author":"Bash Gs2"}]})
@cherrypy.expose
def display(self):
return cherrypy.session['mystring']
def main():
conf = {
'/': {
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.join(
os.path.abspath(os.path.dirname(os.path.realpath(__file__))),
'web'
)
},
}
cherrypy.quickstart(StringGenerator(), '/', conf)
if __name__ == '__main__':
main()
<commit_msg>Add sample urls to json.dump<commit_after> | import os, os.path
import random
import string
import json
import cherrypy
from . import get_pic
class StringGenerator(object):
@cherrypy.expose
def index(self):
return """<html>
<head>
<link href="/static/css/style.css" rel="stylesheet">
</head>
<body>
wrong page
</body>
</html>"""
@cherrypy.expose
def get_picture(self, url=""):
return get_pic.base64_picture(url)
@cherrypy.expose
def search(self, query):
return json.dumps({"book": {"title":"Gs", "author":"Bash Gs", "url":"https://chitanka.info/text/1"},
"recommended":[{"title":"Gs1", "author":"Bash Gs1", "url":"https://chitanka.info/text/2"},
{"title":"Gs2", "author":"Bash Gs2", "url":"https://chitanka.info/text/3"}]})
@cherrypy.expose
def display(self):
return cherrypy.session['mystring']
def main():
conf = {
'/': {
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.join(
os.path.abspath(os.path.dirname(os.path.realpath(__file__))),
'web'
)
},
}
cherrypy.quickstart(StringGenerator(), '/', conf)
if __name__ == '__main__':
main()
| import os, os.path
import random
import string
import json
import cherrypy
from . import get_pic
class StringGenerator(object):
@cherrypy.expose
def index(self):
return """<html>
<head>
<link href="/static/css/style.css" rel="stylesheet">
</head>
<body>
wrong page
</body>
</html>"""
@cherrypy.expose
def get_picture(self, url=""):
return get_pic.base64_picture(url)
@cherrypy.expose
def search(self, query):
return json.dumps({"book": {"title":"Gs", "author":"Bash Gs"},
"recommended":[{"title":"Gs1", "author":"Bash Gs1"}, {"title":"Gs2", "author":"Bash Gs2"}]})
@cherrypy.expose
def display(self):
return cherrypy.session['mystring']
def main():
conf = {
'/': {
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.join(
os.path.abspath(os.path.dirname(os.path.realpath(__file__))),
'web'
)
},
}
cherrypy.quickstart(StringGenerator(), '/', conf)
if __name__ == '__main__':
main()
Add sample urls to json.dumpimport os, os.path
import random
import string
import json
import cherrypy
from . import get_pic
class StringGenerator(object):
@cherrypy.expose
def index(self):
return """<html>
<head>
<link href="/static/css/style.css" rel="stylesheet">
</head>
<body>
wrong page
</body>
</html>"""
@cherrypy.expose
def get_picture(self, url=""):
return get_pic.base64_picture(url)
@cherrypy.expose
def search(self, query):
return json.dumps({"book": {"title":"Gs", "author":"Bash Gs", "url":"https://chitanka.info/text/1"},
"recommended":[{"title":"Gs1", "author":"Bash Gs1", "url":"https://chitanka.info/text/2"},
{"title":"Gs2", "author":"Bash Gs2", "url":"https://chitanka.info/text/3"}]})
@cherrypy.expose
def display(self):
return cherrypy.session['mystring']
def main():
conf = {
'/': {
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.join(
os.path.abspath(os.path.dirname(os.path.realpath(__file__))),
'web'
)
},
}
cherrypy.quickstart(StringGenerator(), '/', conf)
if __name__ == '__main__':
main()
| <commit_before>import os, os.path
import random
import string
import json
import cherrypy
from . import get_pic
class StringGenerator(object):
@cherrypy.expose
def index(self):
return """<html>
<head>
<link href="/static/css/style.css" rel="stylesheet">
</head>
<body>
wrong page
</body>
</html>"""
@cherrypy.expose
def get_picture(self, url=""):
return get_pic.base64_picture(url)
@cherrypy.expose
def search(self, query):
return json.dumps({"book": {"title":"Gs", "author":"Bash Gs"},
"recommended":[{"title":"Gs1", "author":"Bash Gs1"}, {"title":"Gs2", "author":"Bash Gs2"}]})
@cherrypy.expose
def display(self):
return cherrypy.session['mystring']
def main():
conf = {
'/': {
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.join(
os.path.abspath(os.path.dirname(os.path.realpath(__file__))),
'web'
)
},
}
cherrypy.quickstart(StringGenerator(), '/', conf)
if __name__ == '__main__':
main()
<commit_msg>Add sample urls to json.dump<commit_after>import os, os.path
import random
import string
import json
import cherrypy
from . import get_pic
class StringGenerator(object):
@cherrypy.expose
def index(self):
return """<html>
<head>
<link href="/static/css/style.css" rel="stylesheet">
</head>
<body>
wrong page
</body>
</html>"""
@cherrypy.expose
def get_picture(self, url=""):
return get_pic.base64_picture(url)
@cherrypy.expose
def search(self, query):
return json.dumps({"book": {"title":"Gs", "author":"Bash Gs", "url":"https://chitanka.info/text/1"},
"recommended":[{"title":"Gs1", "author":"Bash Gs1", "url":"https://chitanka.info/text/2"},
{"title":"Gs2", "author":"Bash Gs2", "url":"https://chitanka.info/text/3"}]})
@cherrypy.expose
def display(self):
return cherrypy.session['mystring']
def main():
conf = {
'/': {
'tools.staticdir.on': True,
'tools.staticdir.dir': os.path.join(
os.path.abspath(os.path.dirname(os.path.realpath(__file__))),
'web'
)
},
}
cherrypy.quickstart(StringGenerator(), '/', conf)
if __name__ == '__main__':
main()
|
fc3f64de95554a66e2ec64804acf9c6032dd7e7b | test/rsrc/convert_stub.py | test/rsrc/convert_stub.py | #!/usr/bin/env python
"""A tiny tool used to test the `convert` plugin. It copies a file and appends
a specified text tag.
"""
import sys
def convert(in_file, out_file, tag):
"""Copy `in_file` to `out_file` and append the string `tag`.
"""
with open(out_file, 'wb') as out_f:
with open(in_file, 'rb') as in_f:
out_f.write(in_f.read())
out_f.write(tag)
if __name__ == '__main__':
convert(sys.argv[1], sys.argv[2], sys.argv[3])
| #!/usr/bin/env python
"""A tiny tool used to test the `convert` plugin. It copies a file and appends
a specified text tag.
"""
import sys
def convert(in_file, out_file, tag):
"""Copy `in_file` to `out_file` and append the string `tag`.
"""
# On Python 3, encode the tag argument as bytes.
if not isinstance(tag, bytes):
tag = tag.encode('utf8')
with open(out_file, 'wb') as out_f:
with open(in_file, 'rb') as in_f:
out_f.write(in_f.read())
out_f.write(tag)
if __name__ == '__main__':
convert(sys.argv[1], sys.argv[2], sys.argv[3])
| Convert stub: Python 3 compatibility | Convert stub: Python 3 compatibility
Important for systems where `python` is 3.x, like Arch, even when beets itself
is running on Python 2.
| Python | mit | madmouser1/beets,jcoady9/beets,jcoady9/beets,xsteadfastx/beets,madmouser1/beets,diego-plan9/beets,Kraymer/beets,jackwilsdon/beets,sampsyo/beets,beetbox/beets,MyTunesFreeMusic/privacy-policy,shamangeorge/beets,pkess/beets,Kraymer/beets,mosesfistos1/beetbox,Kraymer/beets,sampsyo/beets,lengtche/beets,pkess/beets,xsteadfastx/beets,xsteadfastx/beets,shamangeorge/beets,ibmibmibm/beets,diego-plan9/beets,madmouser1/beets,mosesfistos1/beetbox,beetbox/beets,lengtche/beets,Kraymer/beets,SusannaMaria/beets,lengtche/beets,shamangeorge/beets,shamangeorge/beets,jcoady9/beets,pkess/beets,artemutin/beets,mosesfistos1/beetbox,beetbox/beets,SusannaMaria/beets,MyTunesFreeMusic/privacy-policy,jackwilsdon/beets,madmouser1/beets,MyTunesFreeMusic/privacy-policy,jackwilsdon/beets,jackwilsdon/beets,MyTunesFreeMusic/privacy-policy,diego-plan9/beets,SusannaMaria/beets,jcoady9/beets,artemutin/beets,artemutin/beets,ibmibmibm/beets,pkess/beets,ibmibmibm/beets,sampsyo/beets,xsteadfastx/beets,ibmibmibm/beets,beetbox/beets,lengtche/beets,mosesfistos1/beetbox,SusannaMaria/beets,artemutin/beets,diego-plan9/beets,sampsyo/beets | #!/usr/bin/env python
"""A tiny tool used to test the `convert` plugin. It copies a file and appends
a specified text tag.
"""
import sys
def convert(in_file, out_file, tag):
"""Copy `in_file` to `out_file` and append the string `tag`.
"""
with open(out_file, 'wb') as out_f:
with open(in_file, 'rb') as in_f:
out_f.write(in_f.read())
out_f.write(tag)
if __name__ == '__main__':
convert(sys.argv[1], sys.argv[2], sys.argv[3])
Convert stub: Python 3 compatibility
Important for systems where `python` is 3.x, like Arch, even when beets itself
is running on Python 2. | #!/usr/bin/env python
"""A tiny tool used to test the `convert` plugin. It copies a file and appends
a specified text tag.
"""
import sys
def convert(in_file, out_file, tag):
"""Copy `in_file` to `out_file` and append the string `tag`.
"""
# On Python 3, encode the tag argument as bytes.
if not isinstance(tag, bytes):
tag = tag.encode('utf8')
with open(out_file, 'wb') as out_f:
with open(in_file, 'rb') as in_f:
out_f.write(in_f.read())
out_f.write(tag)
if __name__ == '__main__':
convert(sys.argv[1], sys.argv[2], sys.argv[3])
| <commit_before>#!/usr/bin/env python
"""A tiny tool used to test the `convert` plugin. It copies a file and appends
a specified text tag.
"""
import sys
def convert(in_file, out_file, tag):
"""Copy `in_file` to `out_file` and append the string `tag`.
"""
with open(out_file, 'wb') as out_f:
with open(in_file, 'rb') as in_f:
out_f.write(in_f.read())
out_f.write(tag)
if __name__ == '__main__':
convert(sys.argv[1], sys.argv[2], sys.argv[3])
<commit_msg>Convert stub: Python 3 compatibility
Important for systems where `python` is 3.x, like Arch, even when beets itself
is running on Python 2.<commit_after> | #!/usr/bin/env python
"""A tiny tool used to test the `convert` plugin. It copies a file and appends
a specified text tag.
"""
import sys
def convert(in_file, out_file, tag):
"""Copy `in_file` to `out_file` and append the string `tag`.
"""
# On Python 3, encode the tag argument as bytes.
if not isinstance(tag, bytes):
tag = tag.encode('utf8')
with open(out_file, 'wb') as out_f:
with open(in_file, 'rb') as in_f:
out_f.write(in_f.read())
out_f.write(tag)
if __name__ == '__main__':
convert(sys.argv[1], sys.argv[2], sys.argv[3])
| #!/usr/bin/env python
"""A tiny tool used to test the `convert` plugin. It copies a file and appends
a specified text tag.
"""
import sys
def convert(in_file, out_file, tag):
"""Copy `in_file` to `out_file` and append the string `tag`.
"""
with open(out_file, 'wb') as out_f:
with open(in_file, 'rb') as in_f:
out_f.write(in_f.read())
out_f.write(tag)
if __name__ == '__main__':
convert(sys.argv[1], sys.argv[2], sys.argv[3])
Convert stub: Python 3 compatibility
Important for systems where `python` is 3.x, like Arch, even when beets itself
is running on Python 2.#!/usr/bin/env python
"""A tiny tool used to test the `convert` plugin. It copies a file and appends
a specified text tag.
"""
import sys
def convert(in_file, out_file, tag):
"""Copy `in_file` to `out_file` and append the string `tag`.
"""
# On Python 3, encode the tag argument as bytes.
if not isinstance(tag, bytes):
tag = tag.encode('utf8')
with open(out_file, 'wb') as out_f:
with open(in_file, 'rb') as in_f:
out_f.write(in_f.read())
out_f.write(tag)
if __name__ == '__main__':
convert(sys.argv[1], sys.argv[2], sys.argv[3])
| <commit_before>#!/usr/bin/env python
"""A tiny tool used to test the `convert` plugin. It copies a file and appends
a specified text tag.
"""
import sys
def convert(in_file, out_file, tag):
"""Copy `in_file` to `out_file` and append the string `tag`.
"""
with open(out_file, 'wb') as out_f:
with open(in_file, 'rb') as in_f:
out_f.write(in_f.read())
out_f.write(tag)
if __name__ == '__main__':
convert(sys.argv[1], sys.argv[2], sys.argv[3])
<commit_msg>Convert stub: Python 3 compatibility
Important for systems where `python` is 3.x, like Arch, even when beets itself
is running on Python 2.<commit_after>#!/usr/bin/env python
"""A tiny tool used to test the `convert` plugin. It copies a file and appends
a specified text tag.
"""
import sys
def convert(in_file, out_file, tag):
"""Copy `in_file` to `out_file` and append the string `tag`.
"""
# On Python 3, encode the tag argument as bytes.
if not isinstance(tag, bytes):
tag = tag.encode('utf8')
with open(out_file, 'wb') as out_f:
with open(in_file, 'rb') as in_f:
out_f.write(in_f.read())
out_f.write(tag)
if __name__ == '__main__':
convert(sys.argv[1], sys.argv[2], sys.argv[3])
|
901868b42f98b8b25bc49e0930e0eb4bb56f26d1 | lib/rapidsms/tests/test_backend_irc.py | lib/rapidsms/tests/test_backend_irc.py | #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import unittest
from harness import MockRouter
class TestLog(unittest.TestCase):
def test_backend_irc (self):
router = MockRouter()
try:
import irclib
from rapidsms.backends.irc import Backend
backend = Backend("irc", router)
backend.configure(host="localhost",nick="test",channels="#test1,#test2")
self.assertEquals(type(backend), Backend, "IRC backend loads")
self.assertEquals(backend.nick, "test", "IRC backend has nick set")
self.assertEquals(backend.host, "localhost", "IRC backend has host set")
self.assertEquals(backend.channels, ["#test1","#test2"],
"IRC backend has channels correctly set")
except ImportError:
pass
if __name__ == "__main__":
unittest.main()
| #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import unittest
from harness import MockRouter
class TestBackendIRC(unittest.TestCase):
def test_backend_irc (self):
router = MockRouter()
try:
import irclib
from rapidsms.backends.irc import Backend
backend = Backend("irc", router)
backend.configure(host="localhost",nick="test",channels="#test1,#test2")
self.assertEquals(type(backend), Backend, "IRC backend loads")
self.assertEquals(backend.nick, "test", "IRC backend has nick set")
self.assertEquals(backend.host, "localhost", "IRC backend has host set")
self.assertEquals(backend.channels, ["#test1","#test2"],
"IRC backend has channels correctly set")
except ImportError:
pass
if __name__ == "__main__":
unittest.main()
| Rename test class (sloppy cut n' paste job) | Rename test class (sloppy cut n' paste job)
| Python | bsd-3-clause | rapidsms/rapidsms-legacy,rapidsms/rapidsms-legacy,rapidsms/rapidsms-legacy | #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import unittest
from harness import MockRouter
class TestLog(unittest.TestCase):
def test_backend_irc (self):
router = MockRouter()
try:
import irclib
from rapidsms.backends.irc import Backend
backend = Backend("irc", router)
backend.configure(host="localhost",nick="test",channels="#test1,#test2")
self.assertEquals(type(backend), Backend, "IRC backend loads")
self.assertEquals(backend.nick, "test", "IRC backend has nick set")
self.assertEquals(backend.host, "localhost", "IRC backend has host set")
self.assertEquals(backend.channels, ["#test1","#test2"],
"IRC backend has channels correctly set")
except ImportError:
pass
if __name__ == "__main__":
unittest.main()
Rename test class (sloppy cut n' paste job) | #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import unittest
from harness import MockRouter
class TestBackendIRC(unittest.TestCase):
def test_backend_irc (self):
router = MockRouter()
try:
import irclib
from rapidsms.backends.irc import Backend
backend = Backend("irc", router)
backend.configure(host="localhost",nick="test",channels="#test1,#test2")
self.assertEquals(type(backend), Backend, "IRC backend loads")
self.assertEquals(backend.nick, "test", "IRC backend has nick set")
self.assertEquals(backend.host, "localhost", "IRC backend has host set")
self.assertEquals(backend.channels, ["#test1","#test2"],
"IRC backend has channels correctly set")
except ImportError:
pass
if __name__ == "__main__":
unittest.main()
| <commit_before>#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import unittest
from harness import MockRouter
class TestLog(unittest.TestCase):
def test_backend_irc (self):
router = MockRouter()
try:
import irclib
from rapidsms.backends.irc import Backend
backend = Backend("irc", router)
backend.configure(host="localhost",nick="test",channels="#test1,#test2")
self.assertEquals(type(backend), Backend, "IRC backend loads")
self.assertEquals(backend.nick, "test", "IRC backend has nick set")
self.assertEquals(backend.host, "localhost", "IRC backend has host set")
self.assertEquals(backend.channels, ["#test1","#test2"],
"IRC backend has channels correctly set")
except ImportError:
pass
if __name__ == "__main__":
unittest.main()
<commit_msg>Rename test class (sloppy cut n' paste job)<commit_after> | #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import unittest
from harness import MockRouter
class TestBackendIRC(unittest.TestCase):
def test_backend_irc (self):
router = MockRouter()
try:
import irclib
from rapidsms.backends.irc import Backend
backend = Backend("irc", router)
backend.configure(host="localhost",nick="test",channels="#test1,#test2")
self.assertEquals(type(backend), Backend, "IRC backend loads")
self.assertEquals(backend.nick, "test", "IRC backend has nick set")
self.assertEquals(backend.host, "localhost", "IRC backend has host set")
self.assertEquals(backend.channels, ["#test1","#test2"],
"IRC backend has channels correctly set")
except ImportError:
pass
if __name__ == "__main__":
unittest.main()
| #!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import unittest
from harness import MockRouter
class TestLog(unittest.TestCase):
def test_backend_irc (self):
router = MockRouter()
try:
import irclib
from rapidsms.backends.irc import Backend
backend = Backend("irc", router)
backend.configure(host="localhost",nick="test",channels="#test1,#test2")
self.assertEquals(type(backend), Backend, "IRC backend loads")
self.assertEquals(backend.nick, "test", "IRC backend has nick set")
self.assertEquals(backend.host, "localhost", "IRC backend has host set")
self.assertEquals(backend.channels, ["#test1","#test2"],
"IRC backend has channels correctly set")
except ImportError:
pass
if __name__ == "__main__":
unittest.main()
Rename test class (sloppy cut n' paste job)#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import unittest
from harness import MockRouter
class TestBackendIRC(unittest.TestCase):
def test_backend_irc (self):
router = MockRouter()
try:
import irclib
from rapidsms.backends.irc import Backend
backend = Backend("irc", router)
backend.configure(host="localhost",nick="test",channels="#test1,#test2")
self.assertEquals(type(backend), Backend, "IRC backend loads")
self.assertEquals(backend.nick, "test", "IRC backend has nick set")
self.assertEquals(backend.host, "localhost", "IRC backend has host set")
self.assertEquals(backend.channels, ["#test1","#test2"],
"IRC backend has channels correctly set")
except ImportError:
pass
if __name__ == "__main__":
unittest.main()
| <commit_before>#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import unittest
from harness import MockRouter
class TestLog(unittest.TestCase):
def test_backend_irc (self):
router = MockRouter()
try:
import irclib
from rapidsms.backends.irc import Backend
backend = Backend("irc", router)
backend.configure(host="localhost",nick="test",channels="#test1,#test2")
self.assertEquals(type(backend), Backend, "IRC backend loads")
self.assertEquals(backend.nick, "test", "IRC backend has nick set")
self.assertEquals(backend.host, "localhost", "IRC backend has host set")
self.assertEquals(backend.channels, ["#test1","#test2"],
"IRC backend has channels correctly set")
except ImportError:
pass
if __name__ == "__main__":
unittest.main()
<commit_msg>Rename test class (sloppy cut n' paste job)<commit_after>#!/usr/bin/env python
# vim: ai ts=4 sts=4 et sw=4
import unittest
from harness import MockRouter
class TestBackendIRC(unittest.TestCase):
def test_backend_irc (self):
router = MockRouter()
try:
import irclib
from rapidsms.backends.irc import Backend
backend = Backend("irc", router)
backend.configure(host="localhost",nick="test",channels="#test1,#test2")
self.assertEquals(type(backend), Backend, "IRC backend loads")
self.assertEquals(backend.nick, "test", "IRC backend has nick set")
self.assertEquals(backend.host, "localhost", "IRC backend has host set")
self.assertEquals(backend.channels, ["#test1","#test2"],
"IRC backend has channels correctly set")
except ImportError:
pass
if __name__ == "__main__":
unittest.main()
|
9dad4f997371011ee7fe9f6ecd0c1a58cbba6d27 | html_parse.py | html_parse.py | from bs4 import BeautifulSoup
def parse(html):
soup = BeautifulSoup(html, features="html.parser")
return soup.get_text() | import imp
import logging
log = logging.getLogger(__name__)
def module_exists(module_name):
try:
imp.find_module(module_name)
return True
except ImportError:
return False
if module_exists("bs4"):
log.info("Parsing HTML using beautifulsoup4")
from bs4 import BeautifulSoup
def parse(html):
soup = BeautifulSoup(html, features="html.parser")
return soup.get_text()
elif module_exists("html2text"):
log.info("Parsing HTML using html2text")
import html2text
def parse(html):
h = html2text.HTML2Text()
h.single_line_break = True
return h.handle(html)
else:
warning_msg = "HTML parsing not available. Install beautifulsoup4 or html2text"
log.warning(warning_msg)
def parse(html):
raise ImportWarning(warning_msg) | Add support for html2text or no parser Will still prefer beautifulsoup4 if installed | Add support for html2text or no parser
Will still prefer beautifulsoup4 if installed
| Python | mit | idiotandrobot/heathergraph | from bs4 import BeautifulSoup
def parse(html):
soup = BeautifulSoup(html, features="html.parser")
return soup.get_text()Add support for html2text or no parser
Will still prefer beautifulsoup4 if installed | import imp
import logging
log = logging.getLogger(__name__)
def module_exists(module_name):
try:
imp.find_module(module_name)
return True
except ImportError:
return False
if module_exists("bs4"):
log.info("Parsing HTML using beautifulsoup4")
from bs4 import BeautifulSoup
def parse(html):
soup = BeautifulSoup(html, features="html.parser")
return soup.get_text()
elif module_exists("html2text"):
log.info("Parsing HTML using html2text")
import html2text
def parse(html):
h = html2text.HTML2Text()
h.single_line_break = True
return h.handle(html)
else:
warning_msg = "HTML parsing not available. Install beautifulsoup4 or html2text"
log.warning(warning_msg)
def parse(html):
raise ImportWarning(warning_msg) | <commit_before>from bs4 import BeautifulSoup
def parse(html):
soup = BeautifulSoup(html, features="html.parser")
return soup.get_text()<commit_msg>Add support for html2text or no parser
Will still prefer beautifulsoup4 if installed<commit_after> | import imp
import logging
log = logging.getLogger(__name__)
def module_exists(module_name):
try:
imp.find_module(module_name)
return True
except ImportError:
return False
if module_exists("bs4"):
log.info("Parsing HTML using beautifulsoup4")
from bs4 import BeautifulSoup
def parse(html):
soup = BeautifulSoup(html, features="html.parser")
return soup.get_text()
elif module_exists("html2text"):
log.info("Parsing HTML using html2text")
import html2text
def parse(html):
h = html2text.HTML2Text()
h.single_line_break = True
return h.handle(html)
else:
warning_msg = "HTML parsing not available. Install beautifulsoup4 or html2text"
log.warning(warning_msg)
def parse(html):
raise ImportWarning(warning_msg) | from bs4 import BeautifulSoup
def parse(html):
soup = BeautifulSoup(html, features="html.parser")
return soup.get_text()Add support for html2text or no parser
Will still prefer beautifulsoup4 if installedimport imp
import logging
log = logging.getLogger(__name__)
def module_exists(module_name):
try:
imp.find_module(module_name)
return True
except ImportError:
return False
if module_exists("bs4"):
log.info("Parsing HTML using beautifulsoup4")
from bs4 import BeautifulSoup
def parse(html):
soup = BeautifulSoup(html, features="html.parser")
return soup.get_text()
elif module_exists("html2text"):
log.info("Parsing HTML using html2text")
import html2text
def parse(html):
h = html2text.HTML2Text()
h.single_line_break = True
return h.handle(html)
else:
warning_msg = "HTML parsing not available. Install beautifulsoup4 or html2text"
log.warning(warning_msg)
def parse(html):
raise ImportWarning(warning_msg) | <commit_before>from bs4 import BeautifulSoup
def parse(html):
soup = BeautifulSoup(html, features="html.parser")
return soup.get_text()<commit_msg>Add support for html2text or no parser
Will still prefer beautifulsoup4 if installed<commit_after>import imp
import logging
log = logging.getLogger(__name__)
def module_exists(module_name):
try:
imp.find_module(module_name)
return True
except ImportError:
return False
if module_exists("bs4"):
log.info("Parsing HTML using beautifulsoup4")
from bs4 import BeautifulSoup
def parse(html):
soup = BeautifulSoup(html, features="html.parser")
return soup.get_text()
elif module_exists("html2text"):
log.info("Parsing HTML using html2text")
import html2text
def parse(html):
h = html2text.HTML2Text()
h.single_line_break = True
return h.handle(html)
else:
warning_msg = "HTML parsing not available. Install beautifulsoup4 or html2text"
log.warning(warning_msg)
def parse(html):
raise ImportWarning(warning_msg) |
ebafc242445e1a8413cd6afd45ae53d989850f9e | subprocrunner/error.py | subprocrunner/error.py | """
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import subprocess
import sys
from typing import Optional
from .typing import Command
class CommandError(Exception):
@property
def cmd(self) -> Optional[Command]:
return self.__cmd
@property
def errno(self) -> Optional[int]:
return self.__errno
def __init__(self, *args, **kwargs) -> None:
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super().__init__(*args)
class CalledProcessError(subprocess.CalledProcessError):
def __init__(self, *args, **kwargs) -> None:
if sys.version_info[0:2] <= (3, 4):
# stdout and stderr attribute added to subprocess.CalledProcessError since Python 3.5
self.stdout = kwargs.pop("stdout", None)
self.stderr = kwargs.pop("stderr", None)
super().__init__(*args, **kwargs)
| """
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
# keep the following line for backward compatibility
from subprocess import CalledProcessError # noqa
from typing import Optional
from .typing import Command
class CommandError(Exception):
@property
def cmd(self) -> Optional[Command]:
return self.__cmd
@property
def errno(self) -> Optional[int]:
return self.__errno
def __init__(self, *args, **kwargs) -> None:
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super().__init__(*args)
| Remove a class definition that no longer needed | Remove a class definition that no longer needed
| Python | mit | thombashi/subprocrunner,thombashi/subprocrunner | """
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import subprocess
import sys
from typing import Optional
from .typing import Command
class CommandError(Exception):
@property
def cmd(self) -> Optional[Command]:
return self.__cmd
@property
def errno(self) -> Optional[int]:
return self.__errno
def __init__(self, *args, **kwargs) -> None:
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super().__init__(*args)
class CalledProcessError(subprocess.CalledProcessError):
def __init__(self, *args, **kwargs) -> None:
if sys.version_info[0:2] <= (3, 4):
# stdout and stderr attribute added to subprocess.CalledProcessError since Python 3.5
self.stdout = kwargs.pop("stdout", None)
self.stderr = kwargs.pop("stderr", None)
super().__init__(*args, **kwargs)
Remove a class definition that no longer needed | """
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
# keep the following line for backward compatibility
from subprocess import CalledProcessError # noqa
from typing import Optional
from .typing import Command
class CommandError(Exception):
@property
def cmd(self) -> Optional[Command]:
return self.__cmd
@property
def errno(self) -> Optional[int]:
return self.__errno
def __init__(self, *args, **kwargs) -> None:
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super().__init__(*args)
| <commit_before>"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import subprocess
import sys
from typing import Optional
from .typing import Command
class CommandError(Exception):
@property
def cmd(self) -> Optional[Command]:
return self.__cmd
@property
def errno(self) -> Optional[int]:
return self.__errno
def __init__(self, *args, **kwargs) -> None:
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super().__init__(*args)
class CalledProcessError(subprocess.CalledProcessError):
def __init__(self, *args, **kwargs) -> None:
if sys.version_info[0:2] <= (3, 4):
# stdout and stderr attribute added to subprocess.CalledProcessError since Python 3.5
self.stdout = kwargs.pop("stdout", None)
self.stderr = kwargs.pop("stderr", None)
super().__init__(*args, **kwargs)
<commit_msg>Remove a class definition that no longer needed<commit_after> | """
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
# keep the following line for backward compatibility
from subprocess import CalledProcessError # noqa
from typing import Optional
from .typing import Command
class CommandError(Exception):
@property
def cmd(self) -> Optional[Command]:
return self.__cmd
@property
def errno(self) -> Optional[int]:
return self.__errno
def __init__(self, *args, **kwargs) -> None:
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super().__init__(*args)
| """
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import subprocess
import sys
from typing import Optional
from .typing import Command
class CommandError(Exception):
@property
def cmd(self) -> Optional[Command]:
return self.__cmd
@property
def errno(self) -> Optional[int]:
return self.__errno
def __init__(self, *args, **kwargs) -> None:
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super().__init__(*args)
class CalledProcessError(subprocess.CalledProcessError):
def __init__(self, *args, **kwargs) -> None:
if sys.version_info[0:2] <= (3, 4):
# stdout and stderr attribute added to subprocess.CalledProcessError since Python 3.5
self.stdout = kwargs.pop("stdout", None)
self.stderr = kwargs.pop("stderr", None)
super().__init__(*args, **kwargs)
Remove a class definition that no longer needed"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
# keep the following line for backward compatibility
from subprocess import CalledProcessError # noqa
from typing import Optional
from .typing import Command
class CommandError(Exception):
@property
def cmd(self) -> Optional[Command]:
return self.__cmd
@property
def errno(self) -> Optional[int]:
return self.__errno
def __init__(self, *args, **kwargs) -> None:
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super().__init__(*args)
| <commit_before>"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
import subprocess
import sys
from typing import Optional
from .typing import Command
class CommandError(Exception):
@property
def cmd(self) -> Optional[Command]:
return self.__cmd
@property
def errno(self) -> Optional[int]:
return self.__errno
def __init__(self, *args, **kwargs) -> None:
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super().__init__(*args)
class CalledProcessError(subprocess.CalledProcessError):
def __init__(self, *args, **kwargs) -> None:
if sys.version_info[0:2] <= (3, 4):
# stdout and stderr attribute added to subprocess.CalledProcessError since Python 3.5
self.stdout = kwargs.pop("stdout", None)
self.stderr = kwargs.pop("stderr", None)
super().__init__(*args, **kwargs)
<commit_msg>Remove a class definition that no longer needed<commit_after>"""
.. codeauthor:: Tsuyoshi Hombashi <tsuyoshi.hombashi@gmail.com>
"""
# keep the following line for backward compatibility
from subprocess import CalledProcessError # noqa
from typing import Optional
from .typing import Command
class CommandError(Exception):
@property
def cmd(self) -> Optional[Command]:
return self.__cmd
@property
def errno(self) -> Optional[int]:
return self.__errno
def __init__(self, *args, **kwargs) -> None:
self.__cmd = kwargs.pop("cmd", None)
self.__errno = kwargs.pop("errno", None)
super().__init__(*args)
|
c21fe453911af190f3cbd93356396d4f5e65195e | mopidy/backends/gstreamer.py | mopidy/backends/gstreamer.py | import logging
import gst
from mopidy import config
from mopidy.backends import BaseBackend
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.gstreamer')
class GStreamerBackend(BaseBackend):
def __init__(self, *args, **kwargs):
super(GStreamerBackend, self).__init__(*args, **kwargs)
playlist = []
player = gst.element_factory_make("playbin2", "player")
fakesink = gst.element_factory_make("fakesink", "fakesink")
player.set_property("video-sink", fakesink)
self.player = player
def _play_current_track(self):
self.player.set_property("uri", self._current_track.uri)
self.player.set_state(gst.STATE_PLAYING)
def _play(self):
if self._current_track is not None:
self._play_current_track()
return True
else:
return False
def _stop(self):
self.player.set_state(gst.STATE_NULL)
return True
| import logging
import gst
from mopidy import config
from mopidy.backends import BaseBackend
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.gstreamer')
class GStreamerBackend(BaseBackend):
def __init__(self, *args, **kwargs):
super(GStreamerBackend, self).__init__(*args, **kwargs)
playlist = []
player = gst.element_factory_make("playbin2", "player")
fakesink = gst.element_factory_make("fakesink", "fakesink")
player.set_property("video-sink", fakesink)
self.player = player
def _play(self):
if self._current_track is None:
return False
self.player.set_property("uri", self._current_track.uri)
self.player.set_state(gst.STATE_PLAYING)
return True
def _stop(self):
self.player.set_state(gst.STATE_NULL)
return True
| Clean play code for GStreamer | Clean play code for GStreamer
| Python | apache-2.0 | vrs01/mopidy,bacontext/mopidy,vrs01/mopidy,quartz55/mopidy,priestd09/mopidy,swak/mopidy,liamw9534/mopidy,hkariti/mopidy,pacificIT/mopidy,ZenithDK/mopidy,hkariti/mopidy,dbrgn/mopidy,kingosticks/mopidy,ZenithDK/mopidy,diandiankan/mopidy,vrs01/mopidy,hkariti/mopidy,pacificIT/mopidy,ali/mopidy,SuperStarPL/mopidy,adamcik/mopidy,swak/mopidy,glogiotatidis/mopidy,adamcik/mopidy,SuperStarPL/mopidy,mokieyue/mopidy,rawdlite/mopidy,priestd09/mopidy,diandiankan/mopidy,swak/mopidy,tkem/mopidy,diandiankan/mopidy,dbrgn/mopidy,mopidy/mopidy,woutervanwijk/mopidy,jodal/mopidy,bacontext/mopidy,jcass77/mopidy,ali/mopidy,jcass77/mopidy,bacontext/mopidy,ali/mopidy,SuperStarPL/mopidy,abarisain/mopidy,mopidy/mopidy,mopidy/mopidy,bacontext/mopidy,quartz55/mopidy,quartz55/mopidy,quartz55/mopidy,ZenithDK/mopidy,rawdlite/mopidy,ali/mopidy,abarisain/mopidy,hkariti/mopidy,tkem/mopidy,ZenithDK/mopidy,tkem/mopidy,rawdlite/mopidy,jmarsik/mopidy,jodal/mopidy,diandiankan/mopidy,priestd09/mopidy,tkem/mopidy,liamw9534/mopidy,bencevans/mopidy,dbrgn/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,swak/mopidy,glogiotatidis/mopidy,glogiotatidis/mopidy,mokieyue/mopidy,dbrgn/mopidy,bencevans/mopidy,SuperStarPL/mopidy,adamcik/mopidy,jmarsik/mopidy,jcass77/mopidy,kingosticks/mopidy,rawdlite/mopidy,vrs01/mopidy,mokieyue/mopidy,bencevans/mopidy,bencevans/mopidy,jmarsik/mopidy,jmarsik/mopidy,kingosticks/mopidy,woutervanwijk/mopidy,mokieyue/mopidy,jodal/mopidy,pacificIT/mopidy | import logging
import gst
from mopidy import config
from mopidy.backends import BaseBackend
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.gstreamer')
class GStreamerBackend(BaseBackend):
def __init__(self, *args, **kwargs):
super(GStreamerBackend, self).__init__(*args, **kwargs)
playlist = []
player = gst.element_factory_make("playbin2", "player")
fakesink = gst.element_factory_make("fakesink", "fakesink")
player.set_property("video-sink", fakesink)
self.player = player
def _play_current_track(self):
self.player.set_property("uri", self._current_track.uri)
self.player.set_state(gst.STATE_PLAYING)
def _play(self):
if self._current_track is not None:
self._play_current_track()
return True
else:
return False
def _stop(self):
self.player.set_state(gst.STATE_NULL)
return True
Clean play code for GStreamer | import logging
import gst
from mopidy import config
from mopidy.backends import BaseBackend
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.gstreamer')
class GStreamerBackend(BaseBackend):
def __init__(self, *args, **kwargs):
super(GStreamerBackend, self).__init__(*args, **kwargs)
playlist = []
player = gst.element_factory_make("playbin2", "player")
fakesink = gst.element_factory_make("fakesink", "fakesink")
player.set_property("video-sink", fakesink)
self.player = player
def _play(self):
if self._current_track is None:
return False
self.player.set_property("uri", self._current_track.uri)
self.player.set_state(gst.STATE_PLAYING)
return True
def _stop(self):
self.player.set_state(gst.STATE_NULL)
return True
| <commit_before>import logging
import gst
from mopidy import config
from mopidy.backends import BaseBackend
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.gstreamer')
class GStreamerBackend(BaseBackend):
def __init__(self, *args, **kwargs):
super(GStreamerBackend, self).__init__(*args, **kwargs)
playlist = []
player = gst.element_factory_make("playbin2", "player")
fakesink = gst.element_factory_make("fakesink", "fakesink")
player.set_property("video-sink", fakesink)
self.player = player
def _play_current_track(self):
self.player.set_property("uri", self._current_track.uri)
self.player.set_state(gst.STATE_PLAYING)
def _play(self):
if self._current_track is not None:
self._play_current_track()
return True
else:
return False
def _stop(self):
self.player.set_state(gst.STATE_NULL)
return True
<commit_msg>Clean play code for GStreamer<commit_after> | import logging
import gst
from mopidy import config
from mopidy.backends import BaseBackend
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.gstreamer')
class GStreamerBackend(BaseBackend):
def __init__(self, *args, **kwargs):
super(GStreamerBackend, self).__init__(*args, **kwargs)
playlist = []
player = gst.element_factory_make("playbin2", "player")
fakesink = gst.element_factory_make("fakesink", "fakesink")
player.set_property("video-sink", fakesink)
self.player = player
def _play(self):
if self._current_track is None:
return False
self.player.set_property("uri", self._current_track.uri)
self.player.set_state(gst.STATE_PLAYING)
return True
def _stop(self):
self.player.set_state(gst.STATE_NULL)
return True
| import logging
import gst
from mopidy import config
from mopidy.backends import BaseBackend
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.gstreamer')
class GStreamerBackend(BaseBackend):
def __init__(self, *args, **kwargs):
super(GStreamerBackend, self).__init__(*args, **kwargs)
playlist = []
player = gst.element_factory_make("playbin2", "player")
fakesink = gst.element_factory_make("fakesink", "fakesink")
player.set_property("video-sink", fakesink)
self.player = player
def _play_current_track(self):
self.player.set_property("uri", self._current_track.uri)
self.player.set_state(gst.STATE_PLAYING)
def _play(self):
if self._current_track is not None:
self._play_current_track()
return True
else:
return False
def _stop(self):
self.player.set_state(gst.STATE_NULL)
return True
Clean play code for GStreamerimport logging
import gst
from mopidy import config
from mopidy.backends import BaseBackend
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.gstreamer')
class GStreamerBackend(BaseBackend):
def __init__(self, *args, **kwargs):
super(GStreamerBackend, self).__init__(*args, **kwargs)
playlist = []
player = gst.element_factory_make("playbin2", "player")
fakesink = gst.element_factory_make("fakesink", "fakesink")
player.set_property("video-sink", fakesink)
self.player = player
def _play(self):
if self._current_track is None:
return False
self.player.set_property("uri", self._current_track.uri)
self.player.set_state(gst.STATE_PLAYING)
return True
def _stop(self):
self.player.set_state(gst.STATE_NULL)
return True
| <commit_before>import logging
import gst
from mopidy import config
from mopidy.backends import BaseBackend
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.gstreamer')
class GStreamerBackend(BaseBackend):
def __init__(self, *args, **kwargs):
super(GStreamerBackend, self).__init__(*args, **kwargs)
playlist = []
player = gst.element_factory_make("playbin2", "player")
fakesink = gst.element_factory_make("fakesink", "fakesink")
player.set_property("video-sink", fakesink)
self.player = player
def _play_current_track(self):
self.player.set_property("uri", self._current_track.uri)
self.player.set_state(gst.STATE_PLAYING)
def _play(self):
if self._current_track is not None:
self._play_current_track()
return True
else:
return False
def _stop(self):
self.player.set_state(gst.STATE_NULL)
return True
<commit_msg>Clean play code for GStreamer<commit_after>import logging
import gst
from mopidy import config
from mopidy.backends import BaseBackend
from mopidy.models import Artist, Album, Track, Playlist
logger = logging.getLogger(u'backends.gstreamer')
class GStreamerBackend(BaseBackend):
def __init__(self, *args, **kwargs):
super(GStreamerBackend, self).__init__(*args, **kwargs)
playlist = []
player = gst.element_factory_make("playbin2", "player")
fakesink = gst.element_factory_make("fakesink", "fakesink")
player.set_property("video-sink", fakesink)
self.player = player
def _play(self):
if self._current_track is None:
return False
self.player.set_property("uri", self._current_track.uri)
self.player.set_state(gst.STATE_PLAYING)
return True
def _stop(self):
self.player.set_state(gst.STATE_NULL)
return True
|
454ee9051bd8d949eb290fb4d3a622941c9ccc74 | test/python/test_binarycodec.py | test/python/test_binarycodec.py | '''Unit tests for CODA binary serialization'''
import io
import unittest
from coda import descriptors
from coda.io.binarycodec import BinaryCodec
from coda.runtime.descdata import BoolValue
class BinaryCodecTest(unittest.TestCase):
def setUp(self):
self.buffer = io.BytesIO()
# self.encoder = BinaryCodec.createEncoder(self.buffer)
def testEncodeDecodeBoolean(self):
source = BoolValue()
source.setValue(True)
encoder = BinaryCodec.createEncoder(self.buffer)
source.encode(encoder)
self.buffer.seek(0)
decoder = BinaryCodec.createDecoder(self.buffer)
result = decoder.decode(BoolValue)
self.assertIsInstance(result, BoolValue)
self.assertTrue(result.isValue())
self.assertTrue(result.hasValue())
def testEncodeDecodeStructType(self):
encoder = BinaryCodec.createEncoder(self.buffer)
descriptors.StructType.DESCRIPTOR.encode(encoder)
# buffer.write('\n'.encode())
self.buffer.seek(0)
# print("Binary buffer length:", len(self.buffer.getvalue()))
decoder = BinaryCodec.createDecoder(self.buffer)
decoder.setDebug(True)
st = decoder.decode(descriptors.StructType)
self.assertIsInstance(st, descriptors.StructType)
self.assertEqual(st.getFullName(), descriptors.StructType.DESCRIPTOR.getFullName())
| '''Unit tests for CODA binary serialization'''
import io
import unittest
from coda import descriptors
from coda.io.binarycodec import BinaryCodec
from coda.runtime.descdata import BoolValue
class BinaryCodecTest(unittest.TestCase):
def setUp(self):
self.buffer = io.BytesIO()
# self.encoder = BinaryCodec.createEncoder(self.buffer)
def testEncodeDecodeBoolean(self):
source = BoolValue()
source.setValue(True)
encoder = BinaryCodec.createEncoder(self.buffer)
source.encode(encoder)
self.buffer.seek(0)
decoder = BinaryCodec.createDecoder(self.buffer)
result = decoder.decode(BoolValue)
self.assertIsInstance(result, BoolValue)
self.assertTrue(result.isValue())
self.assertTrue(result.hasValue())
def testEncodeDecodeStructType(self):
encoder = BinaryCodec.createEncoder(self.buffer)
descriptors.StructType.DESCRIPTOR.encode(encoder)
# buffer.write('\n'.encode())
self.buffer.seek(0)
# print("Binary buffer length:", len(self.buffer.getvalue()))
decoder = BinaryCodec.createDecoder(self.buffer)
st = decoder.decode(descriptors.StructType)
self.assertIsInstance(st, descriptors.StructType)
self.assertEqual(st.getFullName(), descriptors.StructType.DESCRIPTOR.getFullName())
| Disable debug mode in python unit test. | Disable debug mode in python unit test.
| Python | apache-2.0 | viridia/coda,viridia/coda,viridia/coda | '''Unit tests for CODA binary serialization'''
import io
import unittest
from coda import descriptors
from coda.io.binarycodec import BinaryCodec
from coda.runtime.descdata import BoolValue
class BinaryCodecTest(unittest.TestCase):
def setUp(self):
self.buffer = io.BytesIO()
# self.encoder = BinaryCodec.createEncoder(self.buffer)
def testEncodeDecodeBoolean(self):
source = BoolValue()
source.setValue(True)
encoder = BinaryCodec.createEncoder(self.buffer)
source.encode(encoder)
self.buffer.seek(0)
decoder = BinaryCodec.createDecoder(self.buffer)
result = decoder.decode(BoolValue)
self.assertIsInstance(result, BoolValue)
self.assertTrue(result.isValue())
self.assertTrue(result.hasValue())
def testEncodeDecodeStructType(self):
encoder = BinaryCodec.createEncoder(self.buffer)
descriptors.StructType.DESCRIPTOR.encode(encoder)
# buffer.write('\n'.encode())
self.buffer.seek(0)
# print("Binary buffer length:", len(self.buffer.getvalue()))
decoder = BinaryCodec.createDecoder(self.buffer)
decoder.setDebug(True)
st = decoder.decode(descriptors.StructType)
self.assertIsInstance(st, descriptors.StructType)
self.assertEqual(st.getFullName(), descriptors.StructType.DESCRIPTOR.getFullName())
Disable debug mode in python unit test. | '''Unit tests for CODA binary serialization'''
import io
import unittest
from coda import descriptors
from coda.io.binarycodec import BinaryCodec
from coda.runtime.descdata import BoolValue
class BinaryCodecTest(unittest.TestCase):
def setUp(self):
self.buffer = io.BytesIO()
# self.encoder = BinaryCodec.createEncoder(self.buffer)
def testEncodeDecodeBoolean(self):
source = BoolValue()
source.setValue(True)
encoder = BinaryCodec.createEncoder(self.buffer)
source.encode(encoder)
self.buffer.seek(0)
decoder = BinaryCodec.createDecoder(self.buffer)
result = decoder.decode(BoolValue)
self.assertIsInstance(result, BoolValue)
self.assertTrue(result.isValue())
self.assertTrue(result.hasValue())
def testEncodeDecodeStructType(self):
encoder = BinaryCodec.createEncoder(self.buffer)
descriptors.StructType.DESCRIPTOR.encode(encoder)
# buffer.write('\n'.encode())
self.buffer.seek(0)
# print("Binary buffer length:", len(self.buffer.getvalue()))
decoder = BinaryCodec.createDecoder(self.buffer)
st = decoder.decode(descriptors.StructType)
self.assertIsInstance(st, descriptors.StructType)
self.assertEqual(st.getFullName(), descriptors.StructType.DESCRIPTOR.getFullName())
| <commit_before>'''Unit tests for CODA binary serialization'''
import io
import unittest
from coda import descriptors
from coda.io.binarycodec import BinaryCodec
from coda.runtime.descdata import BoolValue
class BinaryCodecTest(unittest.TestCase):
def setUp(self):
self.buffer = io.BytesIO()
# self.encoder = BinaryCodec.createEncoder(self.buffer)
def testEncodeDecodeBoolean(self):
source = BoolValue()
source.setValue(True)
encoder = BinaryCodec.createEncoder(self.buffer)
source.encode(encoder)
self.buffer.seek(0)
decoder = BinaryCodec.createDecoder(self.buffer)
result = decoder.decode(BoolValue)
self.assertIsInstance(result, BoolValue)
self.assertTrue(result.isValue())
self.assertTrue(result.hasValue())
def testEncodeDecodeStructType(self):
encoder = BinaryCodec.createEncoder(self.buffer)
descriptors.StructType.DESCRIPTOR.encode(encoder)
# buffer.write('\n'.encode())
self.buffer.seek(0)
# print("Binary buffer length:", len(self.buffer.getvalue()))
decoder = BinaryCodec.createDecoder(self.buffer)
decoder.setDebug(True)
st = decoder.decode(descriptors.StructType)
self.assertIsInstance(st, descriptors.StructType)
self.assertEqual(st.getFullName(), descriptors.StructType.DESCRIPTOR.getFullName())
<commit_msg>Disable debug mode in python unit test.<commit_after> | '''Unit tests for CODA binary serialization'''
import io
import unittest
from coda import descriptors
from coda.io.binarycodec import BinaryCodec
from coda.runtime.descdata import BoolValue
class BinaryCodecTest(unittest.TestCase):
def setUp(self):
self.buffer = io.BytesIO()
# self.encoder = BinaryCodec.createEncoder(self.buffer)
def testEncodeDecodeBoolean(self):
source = BoolValue()
source.setValue(True)
encoder = BinaryCodec.createEncoder(self.buffer)
source.encode(encoder)
self.buffer.seek(0)
decoder = BinaryCodec.createDecoder(self.buffer)
result = decoder.decode(BoolValue)
self.assertIsInstance(result, BoolValue)
self.assertTrue(result.isValue())
self.assertTrue(result.hasValue())
def testEncodeDecodeStructType(self):
encoder = BinaryCodec.createEncoder(self.buffer)
descriptors.StructType.DESCRIPTOR.encode(encoder)
# buffer.write('\n'.encode())
self.buffer.seek(0)
# print("Binary buffer length:", len(self.buffer.getvalue()))
decoder = BinaryCodec.createDecoder(self.buffer)
st = decoder.decode(descriptors.StructType)
self.assertIsInstance(st, descriptors.StructType)
self.assertEqual(st.getFullName(), descriptors.StructType.DESCRIPTOR.getFullName())
| '''Unit tests for CODA binary serialization'''
import io
import unittest
from coda import descriptors
from coda.io.binarycodec import BinaryCodec
from coda.runtime.descdata import BoolValue
class BinaryCodecTest(unittest.TestCase):
def setUp(self):
self.buffer = io.BytesIO()
# self.encoder = BinaryCodec.createEncoder(self.buffer)
def testEncodeDecodeBoolean(self):
source = BoolValue()
source.setValue(True)
encoder = BinaryCodec.createEncoder(self.buffer)
source.encode(encoder)
self.buffer.seek(0)
decoder = BinaryCodec.createDecoder(self.buffer)
result = decoder.decode(BoolValue)
self.assertIsInstance(result, BoolValue)
self.assertTrue(result.isValue())
self.assertTrue(result.hasValue())
def testEncodeDecodeStructType(self):
encoder = BinaryCodec.createEncoder(self.buffer)
descriptors.StructType.DESCRIPTOR.encode(encoder)
# buffer.write('\n'.encode())
self.buffer.seek(0)
# print("Binary buffer length:", len(self.buffer.getvalue()))
decoder = BinaryCodec.createDecoder(self.buffer)
decoder.setDebug(True)
st = decoder.decode(descriptors.StructType)
self.assertIsInstance(st, descriptors.StructType)
self.assertEqual(st.getFullName(), descriptors.StructType.DESCRIPTOR.getFullName())
Disable debug mode in python unit test.'''Unit tests for CODA binary serialization'''
import io
import unittest
from coda import descriptors
from coda.io.binarycodec import BinaryCodec
from coda.runtime.descdata import BoolValue
class BinaryCodecTest(unittest.TestCase):
def setUp(self):
self.buffer = io.BytesIO()
# self.encoder = BinaryCodec.createEncoder(self.buffer)
def testEncodeDecodeBoolean(self):
source = BoolValue()
source.setValue(True)
encoder = BinaryCodec.createEncoder(self.buffer)
source.encode(encoder)
self.buffer.seek(0)
decoder = BinaryCodec.createDecoder(self.buffer)
result = decoder.decode(BoolValue)
self.assertIsInstance(result, BoolValue)
self.assertTrue(result.isValue())
self.assertTrue(result.hasValue())
def testEncodeDecodeStructType(self):
encoder = BinaryCodec.createEncoder(self.buffer)
descriptors.StructType.DESCRIPTOR.encode(encoder)
# buffer.write('\n'.encode())
self.buffer.seek(0)
# print("Binary buffer length:", len(self.buffer.getvalue()))
decoder = BinaryCodec.createDecoder(self.buffer)
st = decoder.decode(descriptors.StructType)
self.assertIsInstance(st, descriptors.StructType)
self.assertEqual(st.getFullName(), descriptors.StructType.DESCRIPTOR.getFullName())
| <commit_before>'''Unit tests for CODA binary serialization'''
import io
import unittest
from coda import descriptors
from coda.io.binarycodec import BinaryCodec
from coda.runtime.descdata import BoolValue
class BinaryCodecTest(unittest.TestCase):
def setUp(self):
self.buffer = io.BytesIO()
# self.encoder = BinaryCodec.createEncoder(self.buffer)
def testEncodeDecodeBoolean(self):
source = BoolValue()
source.setValue(True)
encoder = BinaryCodec.createEncoder(self.buffer)
source.encode(encoder)
self.buffer.seek(0)
decoder = BinaryCodec.createDecoder(self.buffer)
result = decoder.decode(BoolValue)
self.assertIsInstance(result, BoolValue)
self.assertTrue(result.isValue())
self.assertTrue(result.hasValue())
def testEncodeDecodeStructType(self):
encoder = BinaryCodec.createEncoder(self.buffer)
descriptors.StructType.DESCRIPTOR.encode(encoder)
# buffer.write('\n'.encode())
self.buffer.seek(0)
# print("Binary buffer length:", len(self.buffer.getvalue()))
decoder = BinaryCodec.createDecoder(self.buffer)
decoder.setDebug(True)
st = decoder.decode(descriptors.StructType)
self.assertIsInstance(st, descriptors.StructType)
self.assertEqual(st.getFullName(), descriptors.StructType.DESCRIPTOR.getFullName())
<commit_msg>Disable debug mode in python unit test.<commit_after>'''Unit tests for CODA binary serialization'''
import io
import unittest
from coda import descriptors
from coda.io.binarycodec import BinaryCodec
from coda.runtime.descdata import BoolValue
class BinaryCodecTest(unittest.TestCase):
def setUp(self):
self.buffer = io.BytesIO()
# self.encoder = BinaryCodec.createEncoder(self.buffer)
def testEncodeDecodeBoolean(self):
source = BoolValue()
source.setValue(True)
encoder = BinaryCodec.createEncoder(self.buffer)
source.encode(encoder)
self.buffer.seek(0)
decoder = BinaryCodec.createDecoder(self.buffer)
result = decoder.decode(BoolValue)
self.assertIsInstance(result, BoolValue)
self.assertTrue(result.isValue())
self.assertTrue(result.hasValue())
def testEncodeDecodeStructType(self):
encoder = BinaryCodec.createEncoder(self.buffer)
descriptors.StructType.DESCRIPTOR.encode(encoder)
# buffer.write('\n'.encode())
self.buffer.seek(0)
# print("Binary buffer length:", len(self.buffer.getvalue()))
decoder = BinaryCodec.createDecoder(self.buffer)
st = decoder.decode(descriptors.StructType)
self.assertIsInstance(st, descriptors.StructType)
self.assertEqual(st.getFullName(), descriptors.StructType.DESCRIPTOR.getFullName())
|
22f01c8727377fcbeb68489c9658443dd9e367dc | flask-app/nickITAPI/app.py | flask-app/nickITAPI/app.py | from flask import Flask, Response, request, jsonify
from ldap3 import Server, Connection, ALL
import json
import re
app = Flask(__name__) # create the application instance :)
@app.route('/')
def hello_world():
return 'Hello, World!'
def parse_query(query):
nick_list = re.compile('\s*[,]+\s*').split(query)
nick_list = list(filter(None, nick_list))
return nick_list
@app.route('/search/<query>')
def handle_search(query=None):
response = Flask.make_response(app, jsonify(nick_list=parse_query(query)))
response.headers['Access-Control-Allow-Origin'] = '*'
return response
| from flask import Flask, Response, request, jsonify, abort, render_template
from ldap3 import Server, Connection, ALL
import json
import re
app = Flask(__name__) # create the application instance :)
@app.route('/')
def hello_world():
return 'Hello, World!'
def parse_query(query):
nick_list = re.compile('\s*[,]+\s*').split(query)
nick_list = list(filter(None, nick_list))
return nick_list
@app.route('/search/<query>')
def handle_search(query=None):
rv = jsonify(nick_list=parse_query(query))
response = Flask.make_response(app, rv)
response.headers['Access-Control-Allow-Origin'] = '*'
return response
@app.route('/search/')
def handle_empty_search():
abort(400)
@app.errorhandler(400)
def bad_request(error):
response = Flask.make_response(app, 'bad request')
response.headers['Access-Control-Allow-Origin'] = '*'
response.status = '400'
return response
| Add handling of an empty search | Add handling of an empty search
| Python | mit | cthit/nickIT,cthit/nickIT,cthit/nickIT | from flask import Flask, Response, request, jsonify
from ldap3 import Server, Connection, ALL
import json
import re
app = Flask(__name__) # create the application instance :)
@app.route('/')
def hello_world():
return 'Hello, World!'
def parse_query(query):
nick_list = re.compile('\s*[,]+\s*').split(query)
nick_list = list(filter(None, nick_list))
return nick_list
@app.route('/search/<query>')
def handle_search(query=None):
response = Flask.make_response(app, jsonify(nick_list=parse_query(query)))
response.headers['Access-Control-Allow-Origin'] = '*'
return response
Add handling of an empty search | from flask import Flask, Response, request, jsonify, abort, render_template
from ldap3 import Server, Connection, ALL
import json
import re
app = Flask(__name__) # create the application instance :)
@app.route('/')
def hello_world():
return 'Hello, World!'
def parse_query(query):
nick_list = re.compile('\s*[,]+\s*').split(query)
nick_list = list(filter(None, nick_list))
return nick_list
@app.route('/search/<query>')
def handle_search(query=None):
rv = jsonify(nick_list=parse_query(query))
response = Flask.make_response(app, rv)
response.headers['Access-Control-Allow-Origin'] = '*'
return response
@app.route('/search/')
def handle_empty_search():
abort(400)
@app.errorhandler(400)
def bad_request(error):
response = Flask.make_response(app, 'bad request')
response.headers['Access-Control-Allow-Origin'] = '*'
response.status = '400'
return response
| <commit_before>from flask import Flask, Response, request, jsonify
from ldap3 import Server, Connection, ALL
import json
import re
app = Flask(__name__) # create the application instance :)
@app.route('/')
def hello_world():
return 'Hello, World!'
def parse_query(query):
nick_list = re.compile('\s*[,]+\s*').split(query)
nick_list = list(filter(None, nick_list))
return nick_list
@app.route('/search/<query>')
def handle_search(query=None):
response = Flask.make_response(app, jsonify(nick_list=parse_query(query)))
response.headers['Access-Control-Allow-Origin'] = '*'
return response
<commit_msg>Add handling of an empty search<commit_after> | from flask import Flask, Response, request, jsonify, abort, render_template
from ldap3 import Server, Connection, ALL
import json
import re
app = Flask(__name__) # create the application instance :)
@app.route('/')
def hello_world():
return 'Hello, World!'
def parse_query(query):
nick_list = re.compile('\s*[,]+\s*').split(query)
nick_list = list(filter(None, nick_list))
return nick_list
@app.route('/search/<query>')
def handle_search(query=None):
rv = jsonify(nick_list=parse_query(query))
response = Flask.make_response(app, rv)
response.headers['Access-Control-Allow-Origin'] = '*'
return response
@app.route('/search/')
def handle_empty_search():
abort(400)
@app.errorhandler(400)
def bad_request(error):
response = Flask.make_response(app, 'bad request')
response.headers['Access-Control-Allow-Origin'] = '*'
response.status = '400'
return response
| from flask import Flask, Response, request, jsonify
from ldap3 import Server, Connection, ALL
import json
import re
app = Flask(__name__) # create the application instance :)
@app.route('/')
def hello_world():
return 'Hello, World!'
def parse_query(query):
nick_list = re.compile('\s*[,]+\s*').split(query)
nick_list = list(filter(None, nick_list))
return nick_list
@app.route('/search/<query>')
def handle_search(query=None):
response = Flask.make_response(app, jsonify(nick_list=parse_query(query)))
response.headers['Access-Control-Allow-Origin'] = '*'
return response
Add handling of an empty searchfrom flask import Flask, Response, request, jsonify, abort, render_template
from ldap3 import Server, Connection, ALL
import json
import re
app = Flask(__name__) # create the application instance :)
@app.route('/')
def hello_world():
return 'Hello, World!'
def parse_query(query):
nick_list = re.compile('\s*[,]+\s*').split(query)
nick_list = list(filter(None, nick_list))
return nick_list
@app.route('/search/<query>')
def handle_search(query=None):
rv = jsonify(nick_list=parse_query(query))
response = Flask.make_response(app, rv)
response.headers['Access-Control-Allow-Origin'] = '*'
return response
@app.route('/search/')
def handle_empty_search():
abort(400)
@app.errorhandler(400)
def bad_request(error):
response = Flask.make_response(app, 'bad request')
response.headers['Access-Control-Allow-Origin'] = '*'
response.status = '400'
return response
| <commit_before>from flask import Flask, Response, request, jsonify
from ldap3 import Server, Connection, ALL
import json
import re
app = Flask(__name__) # create the application instance :)
@app.route('/')
def hello_world():
return 'Hello, World!'
def parse_query(query):
nick_list = re.compile('\s*[,]+\s*').split(query)
nick_list = list(filter(None, nick_list))
return nick_list
@app.route('/search/<query>')
def handle_search(query=None):
response = Flask.make_response(app, jsonify(nick_list=parse_query(query)))
response.headers['Access-Control-Allow-Origin'] = '*'
return response
<commit_msg>Add handling of an empty search<commit_after>from flask import Flask, Response, request, jsonify, abort, render_template
from ldap3 import Server, Connection, ALL
import json
import re
app = Flask(__name__) # create the application instance :)
@app.route('/')
def hello_world():
return 'Hello, World!'
def parse_query(query):
nick_list = re.compile('\s*[,]+\s*').split(query)
nick_list = list(filter(None, nick_list))
return nick_list
@app.route('/search/<query>')
def handle_search(query=None):
rv = jsonify(nick_list=parse_query(query))
response = Flask.make_response(app, rv)
response.headers['Access-Control-Allow-Origin'] = '*'
return response
@app.route('/search/')
def handle_empty_search():
abort(400)
@app.errorhandler(400)
def bad_request(error):
response = Flask.make_response(app, 'bad request')
response.headers['Access-Control-Allow-Origin'] = '*'
response.status = '400'
return response
|
bbcbcefedcbff4cfd7a16cbfa904b42462f1ee88 | python/ql/test/query-tests/Variables/unused/type_annotation_fp.py | python/ql/test/query-tests/Variables/unused/type_annotation_fp.py | # FP Type annotation counts as redefinition
# See https://github.com/Semmle/ql/issues/2652
def type_annotation(x):
foo = 5
if x:
foo : int
do_stuff_with(foo)
else:
foo : float
do_other_stuff_with(foo)
| # FP Type annotation counts as redefinition
# See https://github.com/Semmle/ql/issues/2652
def type_annotation(x):
foo = 5
if x:
foo : int
do_stuff_with(foo)
else:
foo : float
do_other_stuff_with(foo)
def type_annotation_fn():
# False negative: the value of `bar` is never used, but this is masked by the presence of the type annotation.
bar = 5
bar : int
| Add false negative test case. | Python: Add false negative test case.
| Python | mit | github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql,github/codeql | # FP Type annotation counts as redefinition
# See https://github.com/Semmle/ql/issues/2652
def type_annotation(x):
foo = 5
if x:
foo : int
do_stuff_with(foo)
else:
foo : float
do_other_stuff_with(foo)
Python: Add false negative test case. | # FP Type annotation counts as redefinition
# See https://github.com/Semmle/ql/issues/2652
def type_annotation(x):
foo = 5
if x:
foo : int
do_stuff_with(foo)
else:
foo : float
do_other_stuff_with(foo)
def type_annotation_fn():
# False negative: the value of `bar` is never used, but this is masked by the presence of the type annotation.
bar = 5
bar : int
| <commit_before># FP Type annotation counts as redefinition
# See https://github.com/Semmle/ql/issues/2652
def type_annotation(x):
foo = 5
if x:
foo : int
do_stuff_with(foo)
else:
foo : float
do_other_stuff_with(foo)
<commit_msg>Python: Add false negative test case.<commit_after> | # FP Type annotation counts as redefinition
# See https://github.com/Semmle/ql/issues/2652
def type_annotation(x):
foo = 5
if x:
foo : int
do_stuff_with(foo)
else:
foo : float
do_other_stuff_with(foo)
def type_annotation_fn():
# False negative: the value of `bar` is never used, but this is masked by the presence of the type annotation.
bar = 5
bar : int
| # FP Type annotation counts as redefinition
# See https://github.com/Semmle/ql/issues/2652
def type_annotation(x):
foo = 5
if x:
foo : int
do_stuff_with(foo)
else:
foo : float
do_other_stuff_with(foo)
Python: Add false negative test case.# FP Type annotation counts as redefinition
# See https://github.com/Semmle/ql/issues/2652
def type_annotation(x):
foo = 5
if x:
foo : int
do_stuff_with(foo)
else:
foo : float
do_other_stuff_with(foo)
def type_annotation_fn():
# False negative: the value of `bar` is never used, but this is masked by the presence of the type annotation.
bar = 5
bar : int
| <commit_before># FP Type annotation counts as redefinition
# See https://github.com/Semmle/ql/issues/2652
def type_annotation(x):
foo = 5
if x:
foo : int
do_stuff_with(foo)
else:
foo : float
do_other_stuff_with(foo)
<commit_msg>Python: Add false negative test case.<commit_after># FP Type annotation counts as redefinition
# See https://github.com/Semmle/ql/issues/2652
def type_annotation(x):
foo = 5
if x:
foo : int
do_stuff_with(foo)
else:
foo : float
do_other_stuff_with(foo)
def type_annotation_fn():
# False negative: the value of `bar` is never used, but this is masked by the presence of the type annotation.
bar = 5
bar : int
|
4889f26d51cafca6e36d29e6bcf62f4af6c6712d | openfisca_country_template/entities.py | openfisca_country_template/entities.py | # -*- coding: utf-8 -*-
# This file defines the entities needed by our legislation.
from openfisca_core.entities import build_entity
Household = build_entity(
key = "household",
plural = "households",
label = u'Household',
roles = [
{
'key': 'parent',
'plural': 'parents',
'label': u'Parents',
'max': 2,
'subroles': ['first_parent', 'second_parent']
},
{
'key': 'child',
'plural': 'children',
'label': u'Child',
}
]
)
Person = build_entity(
key = "person",
plural = "persons",
label = u'Person',
is_person = True,
)
entities = [Household, Person]
| # -*- coding: utf-8 -*-
# This file defines the entities needed by our legislation.
from openfisca_core.entities import build_entity
Household = build_entity(
key = "household",
plural = "households",
label = u'Household',
roles = [
{
'key': 'parent',
'plural': 'parents',
'label': u'Parents',
'max': 2,
'subroles': ['first_parent', 'second_parent']
},
{
'key': 'child',
'plural': 'children',
'label': u'Child',
}
]
)
"""
A group entity.
Contains multiple natural persons with specific roles.
From zero to two parents with 'first_parent' and 'second_parent' subroles.
And an unlimited number of children.
"""
Person = build_entity(
key = "person",
plural = "persons",
label = u'Person',
is_person = True,
)
"""
The minimal legal entity on which a legislation might be applied.
Represents a natural person.
"""
entities = [Household, Person]
| Add docstring on every entity | Add docstring on every entity | Python | agpl-3.0 | openfisca/country-template,openfisca/country-template | # -*- coding: utf-8 -*-
# This file defines the entities needed by our legislation.
from openfisca_core.entities import build_entity
Household = build_entity(
key = "household",
plural = "households",
label = u'Household',
roles = [
{
'key': 'parent',
'plural': 'parents',
'label': u'Parents',
'max': 2,
'subroles': ['first_parent', 'second_parent']
},
{
'key': 'child',
'plural': 'children',
'label': u'Child',
}
]
)
Person = build_entity(
key = "person",
plural = "persons",
label = u'Person',
is_person = True,
)
entities = [Household, Person]
Add docstring on every entity | # -*- coding: utf-8 -*-
# This file defines the entities needed by our legislation.
from openfisca_core.entities import build_entity
Household = build_entity(
key = "household",
plural = "households",
label = u'Household',
roles = [
{
'key': 'parent',
'plural': 'parents',
'label': u'Parents',
'max': 2,
'subroles': ['first_parent', 'second_parent']
},
{
'key': 'child',
'plural': 'children',
'label': u'Child',
}
]
)
"""
A group entity.
Contains multiple natural persons with specific roles.
From zero to two parents with 'first_parent' and 'second_parent' subroles.
And an unlimited number of children.
"""
Person = build_entity(
key = "person",
plural = "persons",
label = u'Person',
is_person = True,
)
"""
The minimal legal entity on which a legislation might be applied.
Represents a natural person.
"""
entities = [Household, Person]
| <commit_before># -*- coding: utf-8 -*-
# This file defines the entities needed by our legislation.
from openfisca_core.entities import build_entity
Household = build_entity(
key = "household",
plural = "households",
label = u'Household',
roles = [
{
'key': 'parent',
'plural': 'parents',
'label': u'Parents',
'max': 2,
'subroles': ['first_parent', 'second_parent']
},
{
'key': 'child',
'plural': 'children',
'label': u'Child',
}
]
)
Person = build_entity(
key = "person",
plural = "persons",
label = u'Person',
is_person = True,
)
entities = [Household, Person]
<commit_msg>Add docstring on every entity<commit_after> | # -*- coding: utf-8 -*-
# This file defines the entities needed by our legislation.
from openfisca_core.entities import build_entity
Household = build_entity(
key = "household",
plural = "households",
label = u'Household',
roles = [
{
'key': 'parent',
'plural': 'parents',
'label': u'Parents',
'max': 2,
'subroles': ['first_parent', 'second_parent']
},
{
'key': 'child',
'plural': 'children',
'label': u'Child',
}
]
)
"""
A group entity.
Contains multiple natural persons with specific roles.
From zero to two parents with 'first_parent' and 'second_parent' subroles.
And an unlimited number of children.
"""
Person = build_entity(
key = "person",
plural = "persons",
label = u'Person',
is_person = True,
)
"""
The minimal legal entity on which a legislation might be applied.
Represents a natural person.
"""
entities = [Household, Person]
| # -*- coding: utf-8 -*-
# This file defines the entities needed by our legislation.
from openfisca_core.entities import build_entity
Household = build_entity(
key = "household",
plural = "households",
label = u'Household',
roles = [
{
'key': 'parent',
'plural': 'parents',
'label': u'Parents',
'max': 2,
'subroles': ['first_parent', 'second_parent']
},
{
'key': 'child',
'plural': 'children',
'label': u'Child',
}
]
)
Person = build_entity(
key = "person",
plural = "persons",
label = u'Person',
is_person = True,
)
entities = [Household, Person]
Add docstring on every entity# -*- coding: utf-8 -*-
# This file defines the entities needed by our legislation.
from openfisca_core.entities import build_entity
Household = build_entity(
key = "household",
plural = "households",
label = u'Household',
roles = [
{
'key': 'parent',
'plural': 'parents',
'label': u'Parents',
'max': 2,
'subroles': ['first_parent', 'second_parent']
},
{
'key': 'child',
'plural': 'children',
'label': u'Child',
}
]
)
"""
A group entity.
Contains multiple natural persons with specific roles.
From zero to two parents with 'first_parent' and 'second_parent' subroles.
And an unlimited number of children.
"""
Person = build_entity(
key = "person",
plural = "persons",
label = u'Person',
is_person = True,
)
"""
The minimal legal entity on which a legislation might be applied.
Represents a natural person.
"""
entities = [Household, Person]
| <commit_before># -*- coding: utf-8 -*-
# This file defines the entities needed by our legislation.
from openfisca_core.entities import build_entity
Household = build_entity(
key = "household",
plural = "households",
label = u'Household',
roles = [
{
'key': 'parent',
'plural': 'parents',
'label': u'Parents',
'max': 2,
'subroles': ['first_parent', 'second_parent']
},
{
'key': 'child',
'plural': 'children',
'label': u'Child',
}
]
)
Person = build_entity(
key = "person",
plural = "persons",
label = u'Person',
is_person = True,
)
entities = [Household, Person]
<commit_msg>Add docstring on every entity<commit_after># -*- coding: utf-8 -*-
# This file defines the entities needed by our legislation.
from openfisca_core.entities import build_entity
Household = build_entity(
key = "household",
plural = "households",
label = u'Household',
roles = [
{
'key': 'parent',
'plural': 'parents',
'label': u'Parents',
'max': 2,
'subroles': ['first_parent', 'second_parent']
},
{
'key': 'child',
'plural': 'children',
'label': u'Child',
}
]
)
"""
A group entity.
Contains multiple natural persons with specific roles.
From zero to two parents with 'first_parent' and 'second_parent' subroles.
And an unlimited number of children.
"""
Person = build_entity(
key = "person",
plural = "persons",
label = u'Person',
is_person = True,
)
"""
The minimal legal entity on which a legislation might be applied.
Represents a natural person.
"""
entities = [Household, Person]
|
00f270137c460361537f979adc9da18a38324f2f | openremote-server-python/openremote.py | openremote-server-python/openremote.py | #!/usr/bin/env python
import sys
import re
import subprocess
import urlparse
import platform
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class OpenRemoteHandler(BaseHTTPRequestHandler):
def do_GET(self):
url = ''
try:
request_url = urlparse.urlsplit(self.path)
if re.search('openurl', request_url.path):
query = urlparse.parse_qs(request_url.query)
url = query["url"][0]
subprocess.check_call(["xdg-open", url])
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write("Opened %s on %s" % (url, platform.node()))
else:
self.send_error(404)
except:
self.send_error(500,'Failed to open url: %s, error: %s' % (url, str(sys.exc_info()[1])))
def main():
try:
server = HTTPServer(('', 8080), OpenRemoteHandler)
print 'started httpserver...'
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down server'
server.socket.close()
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import re
import webbrowser
import urlparse
import platform
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class OpenRemoteHandler(BaseHTTPRequestHandler):
def do_GET(self):
url = ''
try:
request_url = urlparse.urlsplit(self.path)
if re.search('openurl', request_url.path):
query = urlparse.parse_qs(request_url.query)
url = query["url"][0]
webbrowser.open_new_tab(url)
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write("Opened %s on %s" % (url, platform.node()))
else:
self.send_error(404)
except:
self.send_error(500,'Failed to open url: %s, error: %s' % (url, str(sys.exc_info()[1])))
def main():
try:
server = HTTPServer(('', 8080), OpenRemoteHandler)
print 'started httpserver...'
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down server'
server.socket.close()
if __name__ == '__main__':
main()
| Use the webserver module, which means the script is now cross-platform. Hooray, Python\! | Use the webserver module, which means the script is now cross-platform. Hooray, Python\!
| Python | mit | chuckbjones/openremote,chuckbjones/openremote,chuckbjones/openremote,chuckbjones/openremote | #!/usr/bin/env python
import sys
import re
import subprocess
import urlparse
import platform
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class OpenRemoteHandler(BaseHTTPRequestHandler):
def do_GET(self):
url = ''
try:
request_url = urlparse.urlsplit(self.path)
if re.search('openurl', request_url.path):
query = urlparse.parse_qs(request_url.query)
url = query["url"][0]
subprocess.check_call(["xdg-open", url])
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write("Opened %s on %s" % (url, platform.node()))
else:
self.send_error(404)
except:
self.send_error(500,'Failed to open url: %s, error: %s' % (url, str(sys.exc_info()[1])))
def main():
try:
server = HTTPServer(('', 8080), OpenRemoteHandler)
print 'started httpserver...'
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down server'
server.socket.close()
if __name__ == '__main__':
main()
Use the webserver module, which means the script is now cross-platform. Hooray, Python\! | #!/usr/bin/env python
import re
import webbrowser
import urlparse
import platform
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class OpenRemoteHandler(BaseHTTPRequestHandler):
def do_GET(self):
url = ''
try:
request_url = urlparse.urlsplit(self.path)
if re.search('openurl', request_url.path):
query = urlparse.parse_qs(request_url.query)
url = query["url"][0]
webbrowser.open_new_tab(url)
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write("Opened %s on %s" % (url, platform.node()))
else:
self.send_error(404)
except:
self.send_error(500,'Failed to open url: %s, error: %s' % (url, str(sys.exc_info()[1])))
def main():
try:
server = HTTPServer(('', 8080), OpenRemoteHandler)
print 'started httpserver...'
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down server'
server.socket.close()
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
import sys
import re
import subprocess
import urlparse
import platform
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class OpenRemoteHandler(BaseHTTPRequestHandler):
def do_GET(self):
url = ''
try:
request_url = urlparse.urlsplit(self.path)
if re.search('openurl', request_url.path):
query = urlparse.parse_qs(request_url.query)
url = query["url"][0]
subprocess.check_call(["xdg-open", url])
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write("Opened %s on %s" % (url, platform.node()))
else:
self.send_error(404)
except:
self.send_error(500,'Failed to open url: %s, error: %s' % (url, str(sys.exc_info()[1])))
def main():
try:
server = HTTPServer(('', 8080), OpenRemoteHandler)
print 'started httpserver...'
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down server'
server.socket.close()
if __name__ == '__main__':
main()
<commit_msg>Use the webserver module, which means the script is now cross-platform. Hooray, Python\!<commit_after> | #!/usr/bin/env python
import re
import webbrowser
import urlparse
import platform
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class OpenRemoteHandler(BaseHTTPRequestHandler):
def do_GET(self):
url = ''
try:
request_url = urlparse.urlsplit(self.path)
if re.search('openurl', request_url.path):
query = urlparse.parse_qs(request_url.query)
url = query["url"][0]
webbrowser.open_new_tab(url)
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write("Opened %s on %s" % (url, platform.node()))
else:
self.send_error(404)
except:
self.send_error(500,'Failed to open url: %s, error: %s' % (url, str(sys.exc_info()[1])))
def main():
try:
server = HTTPServer(('', 8080), OpenRemoteHandler)
print 'started httpserver...'
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down server'
server.socket.close()
if __name__ == '__main__':
main()
| #!/usr/bin/env python
import sys
import re
import subprocess
import urlparse
import platform
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class OpenRemoteHandler(BaseHTTPRequestHandler):
def do_GET(self):
url = ''
try:
request_url = urlparse.urlsplit(self.path)
if re.search('openurl', request_url.path):
query = urlparse.parse_qs(request_url.query)
url = query["url"][0]
subprocess.check_call(["xdg-open", url])
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write("Opened %s on %s" % (url, platform.node()))
else:
self.send_error(404)
except:
self.send_error(500,'Failed to open url: %s, error: %s' % (url, str(sys.exc_info()[1])))
def main():
try:
server = HTTPServer(('', 8080), OpenRemoteHandler)
print 'started httpserver...'
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down server'
server.socket.close()
if __name__ == '__main__':
main()
Use the webserver module, which means the script is now cross-platform. Hooray, Python\!#!/usr/bin/env python
import re
import webbrowser
import urlparse
import platform
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class OpenRemoteHandler(BaseHTTPRequestHandler):
def do_GET(self):
url = ''
try:
request_url = urlparse.urlsplit(self.path)
if re.search('openurl', request_url.path):
query = urlparse.parse_qs(request_url.query)
url = query["url"][0]
webbrowser.open_new_tab(url)
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write("Opened %s on %s" % (url, platform.node()))
else:
self.send_error(404)
except:
self.send_error(500,'Failed to open url: %s, error: %s' % (url, str(sys.exc_info()[1])))
def main():
try:
server = HTTPServer(('', 8080), OpenRemoteHandler)
print 'started httpserver...'
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down server'
server.socket.close()
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python
import sys
import re
import subprocess
import urlparse
import platform
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class OpenRemoteHandler(BaseHTTPRequestHandler):
def do_GET(self):
url = ''
try:
request_url = urlparse.urlsplit(self.path)
if re.search('openurl', request_url.path):
query = urlparse.parse_qs(request_url.query)
url = query["url"][0]
subprocess.check_call(["xdg-open", url])
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write("Opened %s on %s" % (url, platform.node()))
else:
self.send_error(404)
except:
self.send_error(500,'Failed to open url: %s, error: %s' % (url, str(sys.exc_info()[1])))
def main():
try:
server = HTTPServer(('', 8080), OpenRemoteHandler)
print 'started httpserver...'
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down server'
server.socket.close()
if __name__ == '__main__':
main()
<commit_msg>Use the webserver module, which means the script is now cross-platform. Hooray, Python\!<commit_after>#!/usr/bin/env python
import re
import webbrowser
import urlparse
import platform
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
class OpenRemoteHandler(BaseHTTPRequestHandler):
def do_GET(self):
url = ''
try:
request_url = urlparse.urlsplit(self.path)
if re.search('openurl', request_url.path):
query = urlparse.parse_qs(request_url.query)
url = query["url"][0]
webbrowser.open_new_tab(url)
self.send_response(200)
self.send_header('Content-type', 'text/plain')
self.end_headers()
self.wfile.write("Opened %s on %s" % (url, platform.node()))
else:
self.send_error(404)
except:
self.send_error(500,'Failed to open url: %s, error: %s' % (url, str(sys.exc_info()[1])))
def main():
try:
server = HTTPServer(('', 8080), OpenRemoteHandler)
print 'started httpserver...'
server.serve_forever()
except KeyboardInterrupt:
print '^C received, shutting down server'
server.socket.close()
if __name__ == '__main__':
main()
|
d4070aaee0e086b934912982d77126af53e9ade8 | src/core/tests.py | src/core/tests.py | # -*- coding: utf-8 -*-
from django.test import TestCase
from .models import Session
from .models import Spec
# if self.name == '':
# names = Session.objects.filter(name__startswith=self.UNTITLED_PREFIX, user=self.user).only('name')
# name_suffix = 1
# if len(names):
# names = [x.name for x in names]
# names = [int(x.replace(self.UNTITLED_PREFIX, '')) for x in names if x.replace(self.UNTITLED_PREFIX, '').isdigit()]
# names.sort()
# name_suffix = names[-1] + 1
# self.name = self.UNTITLED_PREFIX + str(name_suffix)
# super(Session, self).save(*args, **kwargs) # Call the "real" save() method.
def get_session_name():
return 'Untitled 1'
class SessionNameTests(TestCase):
def test_get_first_session_name(self):
self.assertEquals(get_session_name(), 'Untitled 1')
| # -*- coding: utf-8 -*-
from django.test import TestCase
from .models import Session
from .models import Spec
# if self.name == '':
# names = Session.objects.filter(name__startswith=self.UNTITLED_PREFIX, user=self.user).only('name')
# name_suffix = 1
# if len(names):
# names = [x.name for x in names]
# names = [int(x.replace(self.UNTITLED_PREFIX, '')) for x in names if x.replace(self.UNTITLED_PREFIX, '').isdigit()]
# names.sort()
# name_suffix = names[-1] + 1
# self.name = self.UNTITLED_PREFIX + str(name_suffix)
# super(Session, self).save(*args, **kwargs) # Call the "real" save() method.
def get_session_name(name, existing_names):
new_name = 'Untitled 1'
if new_name in existing_names:
new_name = 'Untitled 2'
return new_name
class UntitledSessionNamesTests(TestCase):
def test_get_first_session_name(self):
self.assertEquals(get_session_name('', []), 'Untitled 1')
def test_get_second_session_name(self):
self.assertEquals(get_session_name('', ['Untitled 1']), 'Untitled 2')
| Create 'Untitled 2' session name. | Create 'Untitled 2' session name. | Python | mit | uxebu/tddbin-backend,uxebu/tddbin-backend | # -*- coding: utf-8 -*-
from django.test import TestCase
from .models import Session
from .models import Spec
# if self.name == '':
# names = Session.objects.filter(name__startswith=self.UNTITLED_PREFIX, user=self.user).only('name')
# name_suffix = 1
# if len(names):
# names = [x.name for x in names]
# names = [int(x.replace(self.UNTITLED_PREFIX, '')) for x in names if x.replace(self.UNTITLED_PREFIX, '').isdigit()]
# names.sort()
# name_suffix = names[-1] + 1
# self.name = self.UNTITLED_PREFIX + str(name_suffix)
# super(Session, self).save(*args, **kwargs) # Call the "real" save() method.
def get_session_name():
return 'Untitled 1'
class SessionNameTests(TestCase):
def test_get_first_session_name(self):
self.assertEquals(get_session_name(), 'Untitled 1')
Create 'Untitled 2' session name. | # -*- coding: utf-8 -*-
from django.test import TestCase
from .models import Session
from .models import Spec
# if self.name == '':
# names = Session.objects.filter(name__startswith=self.UNTITLED_PREFIX, user=self.user).only('name')
# name_suffix = 1
# if len(names):
# names = [x.name for x in names]
# names = [int(x.replace(self.UNTITLED_PREFIX, '')) for x in names if x.replace(self.UNTITLED_PREFIX, '').isdigit()]
# names.sort()
# name_suffix = names[-1] + 1
# self.name = self.UNTITLED_PREFIX + str(name_suffix)
# super(Session, self).save(*args, **kwargs) # Call the "real" save() method.
def get_session_name(name, existing_names):
new_name = 'Untitled 1'
if new_name in existing_names:
new_name = 'Untitled 2'
return new_name
class UntitledSessionNamesTests(TestCase):
def test_get_first_session_name(self):
self.assertEquals(get_session_name('', []), 'Untitled 1')
def test_get_second_session_name(self):
self.assertEquals(get_session_name('', ['Untitled 1']), 'Untitled 2')
| <commit_before># -*- coding: utf-8 -*-
from django.test import TestCase
from .models import Session
from .models import Spec
# if self.name == '':
# names = Session.objects.filter(name__startswith=self.UNTITLED_PREFIX, user=self.user).only('name')
# name_suffix = 1
# if len(names):
# names = [x.name for x in names]
# names = [int(x.replace(self.UNTITLED_PREFIX, '')) for x in names if x.replace(self.UNTITLED_PREFIX, '').isdigit()]
# names.sort()
# name_suffix = names[-1] + 1
# self.name = self.UNTITLED_PREFIX + str(name_suffix)
# super(Session, self).save(*args, **kwargs) # Call the "real" save() method.
def get_session_name():
return 'Untitled 1'
class SessionNameTests(TestCase):
def test_get_first_session_name(self):
self.assertEquals(get_session_name(), 'Untitled 1')
<commit_msg>Create 'Untitled 2' session name.<commit_after> | # -*- coding: utf-8 -*-
from django.test import TestCase
from .models import Session
from .models import Spec
# if self.name == '':
# names = Session.objects.filter(name__startswith=self.UNTITLED_PREFIX, user=self.user).only('name')
# name_suffix = 1
# if len(names):
# names = [x.name for x in names]
# names = [int(x.replace(self.UNTITLED_PREFIX, '')) for x in names if x.replace(self.UNTITLED_PREFIX, '').isdigit()]
# names.sort()
# name_suffix = names[-1] + 1
# self.name = self.UNTITLED_PREFIX + str(name_suffix)
# super(Session, self).save(*args, **kwargs) # Call the "real" save() method.
def get_session_name(name, existing_names):
new_name = 'Untitled 1'
if new_name in existing_names:
new_name = 'Untitled 2'
return new_name
class UntitledSessionNamesTests(TestCase):
def test_get_first_session_name(self):
self.assertEquals(get_session_name('', []), 'Untitled 1')
def test_get_second_session_name(self):
self.assertEquals(get_session_name('', ['Untitled 1']), 'Untitled 2')
| # -*- coding: utf-8 -*-
from django.test import TestCase
from .models import Session
from .models import Spec
# if self.name == '':
# names = Session.objects.filter(name__startswith=self.UNTITLED_PREFIX, user=self.user).only('name')
# name_suffix = 1
# if len(names):
# names = [x.name for x in names]
# names = [int(x.replace(self.UNTITLED_PREFIX, '')) for x in names if x.replace(self.UNTITLED_PREFIX, '').isdigit()]
# names.sort()
# name_suffix = names[-1] + 1
# self.name = self.UNTITLED_PREFIX + str(name_suffix)
# super(Session, self).save(*args, **kwargs) # Call the "real" save() method.
def get_session_name():
return 'Untitled 1'
class SessionNameTests(TestCase):
def test_get_first_session_name(self):
self.assertEquals(get_session_name(), 'Untitled 1')
Create 'Untitled 2' session name.# -*- coding: utf-8 -*-
from django.test import TestCase
from .models import Session
from .models import Spec
# if self.name == '':
# names = Session.objects.filter(name__startswith=self.UNTITLED_PREFIX, user=self.user).only('name')
# name_suffix = 1
# if len(names):
# names = [x.name for x in names]
# names = [int(x.replace(self.UNTITLED_PREFIX, '')) for x in names if x.replace(self.UNTITLED_PREFIX, '').isdigit()]
# names.sort()
# name_suffix = names[-1] + 1
# self.name = self.UNTITLED_PREFIX + str(name_suffix)
# super(Session, self).save(*args, **kwargs) # Call the "real" save() method.
def get_session_name(name, existing_names):
new_name = 'Untitled 1'
if new_name in existing_names:
new_name = 'Untitled 2'
return new_name
class UntitledSessionNamesTests(TestCase):
def test_get_first_session_name(self):
self.assertEquals(get_session_name('', []), 'Untitled 1')
def test_get_second_session_name(self):
self.assertEquals(get_session_name('', ['Untitled 1']), 'Untitled 2')
| <commit_before># -*- coding: utf-8 -*-
from django.test import TestCase
from .models import Session
from .models import Spec
# if self.name == '':
# names = Session.objects.filter(name__startswith=self.UNTITLED_PREFIX, user=self.user).only('name')
# name_suffix = 1
# if len(names):
# names = [x.name for x in names]
# names = [int(x.replace(self.UNTITLED_PREFIX, '')) for x in names if x.replace(self.UNTITLED_PREFIX, '').isdigit()]
# names.sort()
# name_suffix = names[-1] + 1
# self.name = self.UNTITLED_PREFIX + str(name_suffix)
# super(Session, self).save(*args, **kwargs) # Call the "real" save() method.
def get_session_name():
return 'Untitled 1'
class SessionNameTests(TestCase):
def test_get_first_session_name(self):
self.assertEquals(get_session_name(), 'Untitled 1')
<commit_msg>Create 'Untitled 2' session name.<commit_after># -*- coding: utf-8 -*-
from django.test import TestCase
from .models import Session
from .models import Spec
# if self.name == '':
# names = Session.objects.filter(name__startswith=self.UNTITLED_PREFIX, user=self.user).only('name')
# name_suffix = 1
# if len(names):
# names = [x.name for x in names]
# names = [int(x.replace(self.UNTITLED_PREFIX, '')) for x in names if x.replace(self.UNTITLED_PREFIX, '').isdigit()]
# names.sort()
# name_suffix = names[-1] + 1
# self.name = self.UNTITLED_PREFIX + str(name_suffix)
# super(Session, self).save(*args, **kwargs) # Call the "real" save() method.
def get_session_name(name, existing_names):
new_name = 'Untitled 1'
if new_name in existing_names:
new_name = 'Untitled 2'
return new_name
class UntitledSessionNamesTests(TestCase):
def test_get_first_session_name(self):
self.assertEquals(get_session_name('', []), 'Untitled 1')
def test_get_second_session_name(self):
self.assertEquals(get_session_name('', ['Untitled 1']), 'Untitled 2')
|
57f3141bf61fe74cc4ba3472f42640e0fada0f44 | tests/spin_one_half_gen_test.py | tests/spin_one_half_gen_test.py | """Tests for the general model with explicit one-half spin."""
import pytest
from drudge import UP, DOWN, SpinOneHalfGenDrudge
@pytest.fixture(scope='module')
def dr(spark_ctx):
"""The fixture with a general spin one-half drudge."""
return SpinOneHalfGenDrudge(spark_ctx)
def test_spin_one_half_general_drudge_has_properties(dr):
"""Test the basic properties of the drudge."""
assert dr.spin_vals == [UP, DOWN]
assert dr.orig_ham.n_terms == 2 + 4
assert dr.ham.n_terms == 2 + 3
| """Tests for the general model with explicit one-half spin."""
import pytest
from sympy import IndexedBase, symbols, Rational
from drudge import CR, AN, UP, DOWN, SpinOneHalfGenDrudge
@pytest.fixture(scope='module')
def dr(spark_ctx):
"""The fixture with a general spin one-half drudge."""
return SpinOneHalfGenDrudge(spark_ctx)
def test_spin_one_half_general_drudge_has_properties(dr):
"""Test the basic properties of the drudge."""
assert dr.spin_vals == [UP, DOWN]
assert dr.orig_ham.n_terms == 2 + 4
assert dr.ham.n_terms == 2 + 3
def test_restricted_hf_theory(dr):
"""Test the derivation of restricted HF theory."""
p = dr.names
c_dag = p.c_dag
c_ = p.c_
a, b, c, d = p.L_dumms[:4]
alpha = symbols('alpha')
# Concrete summation.
rot = dr.sum(
(alpha, [UP, DOWN]), Rational(1, 2) * c_dag[a, alpha] * c_[b, alpha]
)
comm = (dr.ham | rot).simplify()
# Restricted theory has same density for spin up and down.
rho = IndexedBase('rho')
res = comm.eval_vev(lambda op1, op2, _: (
rho[op2.indices[1], op1.indices[1]]
if op1.indices[0] == CR and op2.indices[0] == AN
and op1.indices[2] == op2.indices[2]
else 0
)).simplify()
# The expected result.
t = dr.one_body
u = dr.two_body
f = IndexedBase('f')
expected = dr.einst(rho[b, c] * f[c, a] - f[b, c] * rho[c, a])
expected = expected.subst(f[a, b], dr.einst(
t[a, b] +
2 * u[a, c, b, d] * rho[d, c] - u[c, a, b, d] * rho[d, c]
))
expected = expected.simplify()
assert res == expected
| Add test for restricted HF theory | Add test for restricted HF theory
| Python | mit | tschijnmo/drudge,tschijnmo/drudge,tschijnmo/drudge | """Tests for the general model with explicit one-half spin."""
import pytest
from drudge import UP, DOWN, SpinOneHalfGenDrudge
@pytest.fixture(scope='module')
def dr(spark_ctx):
"""The fixture with a general spin one-half drudge."""
return SpinOneHalfGenDrudge(spark_ctx)
def test_spin_one_half_general_drudge_has_properties(dr):
"""Test the basic properties of the drudge."""
assert dr.spin_vals == [UP, DOWN]
assert dr.orig_ham.n_terms == 2 + 4
assert dr.ham.n_terms == 2 + 3
Add test for restricted HF theory | """Tests for the general model with explicit one-half spin."""
import pytest
from sympy import IndexedBase, symbols, Rational
from drudge import CR, AN, UP, DOWN, SpinOneHalfGenDrudge
@pytest.fixture(scope='module')
def dr(spark_ctx):
"""The fixture with a general spin one-half drudge."""
return SpinOneHalfGenDrudge(spark_ctx)
def test_spin_one_half_general_drudge_has_properties(dr):
"""Test the basic properties of the drudge."""
assert dr.spin_vals == [UP, DOWN]
assert dr.orig_ham.n_terms == 2 + 4
assert dr.ham.n_terms == 2 + 3
def test_restricted_hf_theory(dr):
"""Test the derivation of restricted HF theory."""
p = dr.names
c_dag = p.c_dag
c_ = p.c_
a, b, c, d = p.L_dumms[:4]
alpha = symbols('alpha')
# Concrete summation.
rot = dr.sum(
(alpha, [UP, DOWN]), Rational(1, 2) * c_dag[a, alpha] * c_[b, alpha]
)
comm = (dr.ham | rot).simplify()
# Restricted theory has same density for spin up and down.
rho = IndexedBase('rho')
res = comm.eval_vev(lambda op1, op2, _: (
rho[op2.indices[1], op1.indices[1]]
if op1.indices[0] == CR and op2.indices[0] == AN
and op1.indices[2] == op2.indices[2]
else 0
)).simplify()
# The expected result.
t = dr.one_body
u = dr.two_body
f = IndexedBase('f')
expected = dr.einst(rho[b, c] * f[c, a] - f[b, c] * rho[c, a])
expected = expected.subst(f[a, b], dr.einst(
t[a, b] +
2 * u[a, c, b, d] * rho[d, c] - u[c, a, b, d] * rho[d, c]
))
expected = expected.simplify()
assert res == expected
| <commit_before>"""Tests for the general model with explicit one-half spin."""
import pytest
from drudge import UP, DOWN, SpinOneHalfGenDrudge
@pytest.fixture(scope='module')
def dr(spark_ctx):
"""The fixture with a general spin one-half drudge."""
return SpinOneHalfGenDrudge(spark_ctx)
def test_spin_one_half_general_drudge_has_properties(dr):
"""Test the basic properties of the drudge."""
assert dr.spin_vals == [UP, DOWN]
assert dr.orig_ham.n_terms == 2 + 4
assert dr.ham.n_terms == 2 + 3
<commit_msg>Add test for restricted HF theory<commit_after> | """Tests for the general model with explicit one-half spin."""
import pytest
from sympy import IndexedBase, symbols, Rational
from drudge import CR, AN, UP, DOWN, SpinOneHalfGenDrudge
@pytest.fixture(scope='module')
def dr(spark_ctx):
"""The fixture with a general spin one-half drudge."""
return SpinOneHalfGenDrudge(spark_ctx)
def test_spin_one_half_general_drudge_has_properties(dr):
"""Test the basic properties of the drudge."""
assert dr.spin_vals == [UP, DOWN]
assert dr.orig_ham.n_terms == 2 + 4
assert dr.ham.n_terms == 2 + 3
def test_restricted_hf_theory(dr):
"""Test the derivation of restricted HF theory."""
p = dr.names
c_dag = p.c_dag
c_ = p.c_
a, b, c, d = p.L_dumms[:4]
alpha = symbols('alpha')
# Concrete summation.
rot = dr.sum(
(alpha, [UP, DOWN]), Rational(1, 2) * c_dag[a, alpha] * c_[b, alpha]
)
comm = (dr.ham | rot).simplify()
# Restricted theory has same density for spin up and down.
rho = IndexedBase('rho')
res = comm.eval_vev(lambda op1, op2, _: (
rho[op2.indices[1], op1.indices[1]]
if op1.indices[0] == CR and op2.indices[0] == AN
and op1.indices[2] == op2.indices[2]
else 0
)).simplify()
# The expected result.
t = dr.one_body
u = dr.two_body
f = IndexedBase('f')
expected = dr.einst(rho[b, c] * f[c, a] - f[b, c] * rho[c, a])
expected = expected.subst(f[a, b], dr.einst(
t[a, b] +
2 * u[a, c, b, d] * rho[d, c] - u[c, a, b, d] * rho[d, c]
))
expected = expected.simplify()
assert res == expected
| """Tests for the general model with explicit one-half spin."""
import pytest
from drudge import UP, DOWN, SpinOneHalfGenDrudge
@pytest.fixture(scope='module')
def dr(spark_ctx):
"""The fixture with a general spin one-half drudge."""
return SpinOneHalfGenDrudge(spark_ctx)
def test_spin_one_half_general_drudge_has_properties(dr):
"""Test the basic properties of the drudge."""
assert dr.spin_vals == [UP, DOWN]
assert dr.orig_ham.n_terms == 2 + 4
assert dr.ham.n_terms == 2 + 3
Add test for restricted HF theory"""Tests for the general model with explicit one-half spin."""
import pytest
from sympy import IndexedBase, symbols, Rational
from drudge import CR, AN, UP, DOWN, SpinOneHalfGenDrudge
@pytest.fixture(scope='module')
def dr(spark_ctx):
"""The fixture with a general spin one-half drudge."""
return SpinOneHalfGenDrudge(spark_ctx)
def test_spin_one_half_general_drudge_has_properties(dr):
"""Test the basic properties of the drudge."""
assert dr.spin_vals == [UP, DOWN]
assert dr.orig_ham.n_terms == 2 + 4
assert dr.ham.n_terms == 2 + 3
def test_restricted_hf_theory(dr):
"""Test the derivation of restricted HF theory."""
p = dr.names
c_dag = p.c_dag
c_ = p.c_
a, b, c, d = p.L_dumms[:4]
alpha = symbols('alpha')
# Concrete summation.
rot = dr.sum(
(alpha, [UP, DOWN]), Rational(1, 2) * c_dag[a, alpha] * c_[b, alpha]
)
comm = (dr.ham | rot).simplify()
# Restricted theory has same density for spin up and down.
rho = IndexedBase('rho')
res = comm.eval_vev(lambda op1, op2, _: (
rho[op2.indices[1], op1.indices[1]]
if op1.indices[0] == CR and op2.indices[0] == AN
and op1.indices[2] == op2.indices[2]
else 0
)).simplify()
# The expected result.
t = dr.one_body
u = dr.two_body
f = IndexedBase('f')
expected = dr.einst(rho[b, c] * f[c, a] - f[b, c] * rho[c, a])
expected = expected.subst(f[a, b], dr.einst(
t[a, b] +
2 * u[a, c, b, d] * rho[d, c] - u[c, a, b, d] * rho[d, c]
))
expected = expected.simplify()
assert res == expected
| <commit_before>"""Tests for the general model with explicit one-half spin."""
import pytest
from drudge import UP, DOWN, SpinOneHalfGenDrudge
@pytest.fixture(scope='module')
def dr(spark_ctx):
"""The fixture with a general spin one-half drudge."""
return SpinOneHalfGenDrudge(spark_ctx)
def test_spin_one_half_general_drudge_has_properties(dr):
"""Test the basic properties of the drudge."""
assert dr.spin_vals == [UP, DOWN]
assert dr.orig_ham.n_terms == 2 + 4
assert dr.ham.n_terms == 2 + 3
<commit_msg>Add test for restricted HF theory<commit_after>"""Tests for the general model with explicit one-half spin."""
import pytest
from sympy import IndexedBase, symbols, Rational
from drudge import CR, AN, UP, DOWN, SpinOneHalfGenDrudge
@pytest.fixture(scope='module')
def dr(spark_ctx):
"""The fixture with a general spin one-half drudge."""
return SpinOneHalfGenDrudge(spark_ctx)
def test_spin_one_half_general_drudge_has_properties(dr):
"""Test the basic properties of the drudge."""
assert dr.spin_vals == [UP, DOWN]
assert dr.orig_ham.n_terms == 2 + 4
assert dr.ham.n_terms == 2 + 3
def test_restricted_hf_theory(dr):
"""Test the derivation of restricted HF theory."""
p = dr.names
c_dag = p.c_dag
c_ = p.c_
a, b, c, d = p.L_dumms[:4]
alpha = symbols('alpha')
# Concrete summation.
rot = dr.sum(
(alpha, [UP, DOWN]), Rational(1, 2) * c_dag[a, alpha] * c_[b, alpha]
)
comm = (dr.ham | rot).simplify()
# Restricted theory has same density for spin up and down.
rho = IndexedBase('rho')
res = comm.eval_vev(lambda op1, op2, _: (
rho[op2.indices[1], op1.indices[1]]
if op1.indices[0] == CR and op2.indices[0] == AN
and op1.indices[2] == op2.indices[2]
else 0
)).simplify()
# The expected result.
t = dr.one_body
u = dr.two_body
f = IndexedBase('f')
expected = dr.einst(rho[b, c] * f[c, a] - f[b, c] * rho[c, a])
expected = expected.subst(f[a, b], dr.einst(
t[a, b] +
2 * u[a, c, b, d] * rho[d, c] - u[c, a, b, d] * rho[d, c]
))
expected = expected.simplify()
assert res == expected
|
f1e7776b8c01081bf54c2c1be0dc2c15e5566ec9 | tests/testnet/aio/test_block.py | tests/testnet/aio/test_block.py | # -*- coding: utf-8 -*-
import pytest
import logging
from bitshares.aio.block import Block
log = logging.getLogger("grapheneapi")
log.setLevel(logging.DEBUG)
@pytest.mark.asyncio
async def test_aio_block(bitshares):
block = await Block(333, blockchain_instance=bitshares)
assert block["witness"] == "1.6.6"
# Tests __contains__
assert "witness" in block
| # -*- coding: utf-8 -*-
import asyncio
import pytest
import logging
from bitshares.aio.block import Block
log = logging.getLogger("grapheneapi")
log.setLevel(logging.DEBUG)
@pytest.mark.asyncio
async def test_aio_block(bitshares):
# Wait for block
await asyncio.sleep(1)
block = await Block(1, blockchain_instance=bitshares)
assert block["witness"].startswith("1.6.")
# Tests __contains__
assert "witness" in block
| Fix test for async Block | Fix test for async Block
| Python | mit | xeroc/python-bitshares | # -*- coding: utf-8 -*-
import pytest
import logging
from bitshares.aio.block import Block
log = logging.getLogger("grapheneapi")
log.setLevel(logging.DEBUG)
@pytest.mark.asyncio
async def test_aio_block(bitshares):
block = await Block(333, blockchain_instance=bitshares)
assert block["witness"] == "1.6.6"
# Tests __contains__
assert "witness" in block
Fix test for async Block | # -*- coding: utf-8 -*-
import asyncio
import pytest
import logging
from bitshares.aio.block import Block
log = logging.getLogger("grapheneapi")
log.setLevel(logging.DEBUG)
@pytest.mark.asyncio
async def test_aio_block(bitshares):
# Wait for block
await asyncio.sleep(1)
block = await Block(1, blockchain_instance=bitshares)
assert block["witness"].startswith("1.6.")
# Tests __contains__
assert "witness" in block
| <commit_before># -*- coding: utf-8 -*-
import pytest
import logging
from bitshares.aio.block import Block
log = logging.getLogger("grapheneapi")
log.setLevel(logging.DEBUG)
@pytest.mark.asyncio
async def test_aio_block(bitshares):
block = await Block(333, blockchain_instance=bitshares)
assert block["witness"] == "1.6.6"
# Tests __contains__
assert "witness" in block
<commit_msg>Fix test for async Block<commit_after> | # -*- coding: utf-8 -*-
import asyncio
import pytest
import logging
from bitshares.aio.block import Block
log = logging.getLogger("grapheneapi")
log.setLevel(logging.DEBUG)
@pytest.mark.asyncio
async def test_aio_block(bitshares):
# Wait for block
await asyncio.sleep(1)
block = await Block(1, blockchain_instance=bitshares)
assert block["witness"].startswith("1.6.")
# Tests __contains__
assert "witness" in block
| # -*- coding: utf-8 -*-
import pytest
import logging
from bitshares.aio.block import Block
log = logging.getLogger("grapheneapi")
log.setLevel(logging.DEBUG)
@pytest.mark.asyncio
async def test_aio_block(bitshares):
block = await Block(333, blockchain_instance=bitshares)
assert block["witness"] == "1.6.6"
# Tests __contains__
assert "witness" in block
Fix test for async Block# -*- coding: utf-8 -*-
import asyncio
import pytest
import logging
from bitshares.aio.block import Block
log = logging.getLogger("grapheneapi")
log.setLevel(logging.DEBUG)
@pytest.mark.asyncio
async def test_aio_block(bitshares):
# Wait for block
await asyncio.sleep(1)
block = await Block(1, blockchain_instance=bitshares)
assert block["witness"].startswith("1.6.")
# Tests __contains__
assert "witness" in block
| <commit_before># -*- coding: utf-8 -*-
import pytest
import logging
from bitshares.aio.block import Block
log = logging.getLogger("grapheneapi")
log.setLevel(logging.DEBUG)
@pytest.mark.asyncio
async def test_aio_block(bitshares):
block = await Block(333, blockchain_instance=bitshares)
assert block["witness"] == "1.6.6"
# Tests __contains__
assert "witness" in block
<commit_msg>Fix test for async Block<commit_after># -*- coding: utf-8 -*-
import asyncio
import pytest
import logging
from bitshares.aio.block import Block
log = logging.getLogger("grapheneapi")
log.setLevel(logging.DEBUG)
@pytest.mark.asyncio
async def test_aio_block(bitshares):
# Wait for block
await asyncio.sleep(1)
block = await Block(1, blockchain_instance=bitshares)
assert block["witness"].startswith("1.6.")
# Tests __contains__
assert "witness" in block
|
7f0097d240c4a231029222fdd2bf507ca7d5b2ed | tests/v6/exemplar_generators.py | tests/v6/exemplar_generators.py | from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Integer(100, 200),
]
EXEMPLAR_DERIVED_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS | from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Integer(100, 200),
HashDigest(length=8),
FakerGenerator(method="name"),
]
EXEMPLAR_DERIVED_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS | Add exemplar generators for HashDigest, FakerGenerator | Add exemplar generators for HashDigest, FakerGenerator
| Python | mit | maxalbert/tohu | from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Integer(100, 200),
]
EXEMPLAR_DERIVED_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORSAdd exemplar generators for HashDigest, FakerGenerator | from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Integer(100, 200),
HashDigest(length=8),
FakerGenerator(method="name"),
]
EXEMPLAR_DERIVED_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS | <commit_before>from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Integer(100, 200),
]
EXEMPLAR_DERIVED_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS<commit_msg>Add exemplar generators for HashDigest, FakerGenerator<commit_after> | from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Integer(100, 200),
HashDigest(length=8),
FakerGenerator(method="name"),
]
EXEMPLAR_DERIVED_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS | from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Integer(100, 200),
]
EXEMPLAR_DERIVED_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORSAdd exemplar generators for HashDigest, FakerGeneratorfrom .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Integer(100, 200),
HashDigest(length=8),
FakerGenerator(method="name"),
]
EXEMPLAR_DERIVED_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS | <commit_before>from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Integer(100, 200),
]
EXEMPLAR_DERIVED_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS<commit_msg>Add exemplar generators for HashDigest, FakerGenerator<commit_after>from .context import tohu
from tohu.v5.primitive_generators import *
EXEMPLAR_PRIMITIVE_GENERATORS = [
Constant("quux"),
Integer(100, 200),
HashDigest(length=8),
FakerGenerator(method="name"),
]
EXEMPLAR_DERIVED_GENERATORS = []
EXEMPLAR_GENERATORS = EXEMPLAR_PRIMITIVE_GENERATORS + EXEMPLAR_DERIVED_GENERATORS |
88c4e5630c7689b44100e899c666e7d78b54b975 | adhocracy4/emails/mixins.py | adhocracy4/emails/mixins.py | from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
class PlatformEmailMixin():
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = finders.find('images/logo.png')
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
| from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
class PlatformEmailMixin():
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = finders.find('images/email_logo.png')
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
| Allow to supply a different logo version for email | Allow to supply a different logo version for email
- use symlink if the same logo should be used
| Python | agpl-3.0 | liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4,liqd/adhocracy4 | from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
class PlatformEmailMixin():
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = finders.find('images/logo.png')
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
Allow to supply a different logo version for email
- use symlink if the same logo should be used | from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
class PlatformEmailMixin():
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = finders.find('images/email_logo.png')
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
| <commit_before>from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
class PlatformEmailMixin():
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = finders.find('images/logo.png')
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
<commit_msg>Allow to supply a different logo version for email
- use symlink if the same logo should be used<commit_after> | from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
class PlatformEmailMixin():
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = finders.find('images/email_logo.png')
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
| from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
class PlatformEmailMixin():
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = finders.find('images/logo.png')
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
Allow to supply a different logo version for email
- use symlink if the same logo should be usedfrom email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
class PlatformEmailMixin():
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = finders.find('images/email_logo.png')
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
| <commit_before>from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
class PlatformEmailMixin():
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = finders.find('images/logo.png')
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
<commit_msg>Allow to supply a different logo version for email
- use symlink if the same logo should be used<commit_after>from email.mime.image import MIMEImage
from django.contrib.staticfiles import finders
class PlatformEmailMixin():
"""
Attaches the static file images/logo.png so it can be used in an html
email.
"""
def get_attachments(self):
attachments = super().get_attachments()
filename = finders.find('images/email_logo.png')
if filename:
f = open(filename, 'rb')
logo = MIMEImage(f.read())
logo.add_header('Content-ID', '<{}>'.format('logo'))
return attachments + [logo]
return attachments
|
fd5f3875d0d7e0fdb7b7ef33a94cf50d1d2b5fa4 | tests/write_to_stringio_test.py | tests/write_to_stringio_test.py | #! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# vi:ts=4:et
import pycurl
import unittest
from . import appmanager
from . import util
setup_module, teardown_module = appmanager.setup(('app', 8380))
class WriteToStringioTest(unittest.TestCase):
def setUp(self):
self.curl = pycurl.Curl()
def tearDown(self):
self.curl.close()
def test_write_to_bytesio(self):
self.curl.setopt(pycurl.URL, 'http://localhost:8380/success')
sio = util.BytesIO()
self.curl.setopt(pycurl.WRITEFUNCTION, sio.write)
self.curl.perform()
self.assertEqual('success', sio.getvalue().decode())
| #! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# vi:ts=4:et
import pycurl
import unittest
import sys
from . import appmanager
from . import util
setup_module, teardown_module = appmanager.setup(('app', 8380))
class WriteToStringioTest(unittest.TestCase):
def setUp(self):
self.curl = pycurl.Curl()
def tearDown(self):
self.curl.close()
def test_write_to_bytesio(self):
self.curl.setopt(pycurl.URL, 'http://localhost:8380/success')
sio = util.BytesIO()
self.curl.setopt(pycurl.WRITEFUNCTION, sio.write)
self.curl.perform()
self.assertEqual('success', sio.getvalue().decode())
@util.only_python3
def test_write_to_stringio(self):
self.curl.setopt(pycurl.URL, 'http://localhost:8380/success')
# stringio in python 3
sio = util.StringIO()
self.curl.setopt(pycurl.WRITEFUNCTION, sio.write)
try:
self.curl.perform()
self.fail('Should have received a write error')
except pycurl.error:
err, msg = sys.exc_info()[1].args
# we expect pycurl.E_WRITE_ERROR as the response
assert pycurl.E_WRITE_ERROR == err
| Add a test for writing to StringIO which is now different and does not work | Add a test for writing to StringIO which is now different and does not work
| Python | lgpl-2.1 | pycurl/pycurl,pycurl/pycurl,pycurl/pycurl | #! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# vi:ts=4:et
import pycurl
import unittest
from . import appmanager
from . import util
setup_module, teardown_module = appmanager.setup(('app', 8380))
class WriteToStringioTest(unittest.TestCase):
def setUp(self):
self.curl = pycurl.Curl()
def tearDown(self):
self.curl.close()
def test_write_to_bytesio(self):
self.curl.setopt(pycurl.URL, 'http://localhost:8380/success')
sio = util.BytesIO()
self.curl.setopt(pycurl.WRITEFUNCTION, sio.write)
self.curl.perform()
self.assertEqual('success', sio.getvalue().decode())
Add a test for writing to StringIO which is now different and does not work | #! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# vi:ts=4:et
import pycurl
import unittest
import sys
from . import appmanager
from . import util
setup_module, teardown_module = appmanager.setup(('app', 8380))
class WriteToStringioTest(unittest.TestCase):
def setUp(self):
self.curl = pycurl.Curl()
def tearDown(self):
self.curl.close()
def test_write_to_bytesio(self):
self.curl.setopt(pycurl.URL, 'http://localhost:8380/success')
sio = util.BytesIO()
self.curl.setopt(pycurl.WRITEFUNCTION, sio.write)
self.curl.perform()
self.assertEqual('success', sio.getvalue().decode())
@util.only_python3
def test_write_to_stringio(self):
self.curl.setopt(pycurl.URL, 'http://localhost:8380/success')
# stringio in python 3
sio = util.StringIO()
self.curl.setopt(pycurl.WRITEFUNCTION, sio.write)
try:
self.curl.perform()
self.fail('Should have received a write error')
except pycurl.error:
err, msg = sys.exc_info()[1].args
# we expect pycurl.E_WRITE_ERROR as the response
assert pycurl.E_WRITE_ERROR == err
| <commit_before>#! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# vi:ts=4:et
import pycurl
import unittest
from . import appmanager
from . import util
setup_module, teardown_module = appmanager.setup(('app', 8380))
class WriteToStringioTest(unittest.TestCase):
def setUp(self):
self.curl = pycurl.Curl()
def tearDown(self):
self.curl.close()
def test_write_to_bytesio(self):
self.curl.setopt(pycurl.URL, 'http://localhost:8380/success')
sio = util.BytesIO()
self.curl.setopt(pycurl.WRITEFUNCTION, sio.write)
self.curl.perform()
self.assertEqual('success', sio.getvalue().decode())
<commit_msg>Add a test for writing to StringIO which is now different and does not work<commit_after> | #! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# vi:ts=4:et
import pycurl
import unittest
import sys
from . import appmanager
from . import util
setup_module, teardown_module = appmanager.setup(('app', 8380))
class WriteToStringioTest(unittest.TestCase):
def setUp(self):
self.curl = pycurl.Curl()
def tearDown(self):
self.curl.close()
def test_write_to_bytesio(self):
self.curl.setopt(pycurl.URL, 'http://localhost:8380/success')
sio = util.BytesIO()
self.curl.setopt(pycurl.WRITEFUNCTION, sio.write)
self.curl.perform()
self.assertEqual('success', sio.getvalue().decode())
@util.only_python3
def test_write_to_stringio(self):
self.curl.setopt(pycurl.URL, 'http://localhost:8380/success')
# stringio in python 3
sio = util.StringIO()
self.curl.setopt(pycurl.WRITEFUNCTION, sio.write)
try:
self.curl.perform()
self.fail('Should have received a write error')
except pycurl.error:
err, msg = sys.exc_info()[1].args
# we expect pycurl.E_WRITE_ERROR as the response
assert pycurl.E_WRITE_ERROR == err
| #! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# vi:ts=4:et
import pycurl
import unittest
from . import appmanager
from . import util
setup_module, teardown_module = appmanager.setup(('app', 8380))
class WriteToStringioTest(unittest.TestCase):
def setUp(self):
self.curl = pycurl.Curl()
def tearDown(self):
self.curl.close()
def test_write_to_bytesio(self):
self.curl.setopt(pycurl.URL, 'http://localhost:8380/success')
sio = util.BytesIO()
self.curl.setopt(pycurl.WRITEFUNCTION, sio.write)
self.curl.perform()
self.assertEqual('success', sio.getvalue().decode())
Add a test for writing to StringIO which is now different and does not work#! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# vi:ts=4:et
import pycurl
import unittest
import sys
from . import appmanager
from . import util
setup_module, teardown_module = appmanager.setup(('app', 8380))
class WriteToStringioTest(unittest.TestCase):
def setUp(self):
self.curl = pycurl.Curl()
def tearDown(self):
self.curl.close()
def test_write_to_bytesio(self):
self.curl.setopt(pycurl.URL, 'http://localhost:8380/success')
sio = util.BytesIO()
self.curl.setopt(pycurl.WRITEFUNCTION, sio.write)
self.curl.perform()
self.assertEqual('success', sio.getvalue().decode())
@util.only_python3
def test_write_to_stringio(self):
self.curl.setopt(pycurl.URL, 'http://localhost:8380/success')
# stringio in python 3
sio = util.StringIO()
self.curl.setopt(pycurl.WRITEFUNCTION, sio.write)
try:
self.curl.perform()
self.fail('Should have received a write error')
except pycurl.error:
err, msg = sys.exc_info()[1].args
# we expect pycurl.E_WRITE_ERROR as the response
assert pycurl.E_WRITE_ERROR == err
| <commit_before>#! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# vi:ts=4:et
import pycurl
import unittest
from . import appmanager
from . import util
setup_module, teardown_module = appmanager.setup(('app', 8380))
class WriteToStringioTest(unittest.TestCase):
def setUp(self):
self.curl = pycurl.Curl()
def tearDown(self):
self.curl.close()
def test_write_to_bytesio(self):
self.curl.setopt(pycurl.URL, 'http://localhost:8380/success')
sio = util.BytesIO()
self.curl.setopt(pycurl.WRITEFUNCTION, sio.write)
self.curl.perform()
self.assertEqual('success', sio.getvalue().decode())
<commit_msg>Add a test for writing to StringIO which is now different and does not work<commit_after>#! /usr/bin/env python
# -*- coding: iso-8859-1 -*-
# vi:ts=4:et
import pycurl
import unittest
import sys
from . import appmanager
from . import util
setup_module, teardown_module = appmanager.setup(('app', 8380))
class WriteToStringioTest(unittest.TestCase):
def setUp(self):
self.curl = pycurl.Curl()
def tearDown(self):
self.curl.close()
def test_write_to_bytesio(self):
self.curl.setopt(pycurl.URL, 'http://localhost:8380/success')
sio = util.BytesIO()
self.curl.setopt(pycurl.WRITEFUNCTION, sio.write)
self.curl.perform()
self.assertEqual('success', sio.getvalue().decode())
@util.only_python3
def test_write_to_stringio(self):
self.curl.setopt(pycurl.URL, 'http://localhost:8380/success')
# stringio in python 3
sio = util.StringIO()
self.curl.setopt(pycurl.WRITEFUNCTION, sio.write)
try:
self.curl.perform()
self.fail('Should have received a write error')
except pycurl.error:
err, msg = sys.exc_info()[1].args
# we expect pycurl.E_WRITE_ERROR as the response
assert pycurl.E_WRITE_ERROR == err
|
c3e88993964f1c775829fc2f14aea2e84d33c099 | tests/frontends/mpd/protocol/audio_output_test.py | tests/frontends/mpd/protocol/audio_output_test.py | from __future__ import unicode_literals
from tests.frontends.mpd import protocol
class AudioOutputHandlerTest(protocol.BaseTestCase):
def test_enableoutput(self):
self.sendRequest('enableoutput "0"')
self.assertInResponse('OK')
def test_disableoutput(self):
self.sendRequest('disableoutput "0"')
self.assertInResponse('OK')
def test_outputs(self):
self.sendRequest('outputs')
self.assertInResponse('outputid: 0')
self.assertInResponse('outputname: Default')
self.assertInResponse('outputenabled: 1')
self.assertInResponse('OK')
| from __future__ import unicode_literals
from tests.frontends.mpd import protocol
class AudioOutputHandlerTest(protocol.BaseTestCase):
def test_enableoutput(self):
self.core.playback.mute = True
self.sendRequest('enableoutput "0"')
self.assertInResponse('OK')
self.assertEqual(self.core.playback.mute.get(), False)
def test_disableoutput(self):
self.core.playback.mute = False
self.sendRequest('disableoutput "0"')
self.assertInResponse('OK')
self.assertEqual(self.core.playback.mute.get(), True)
def test_outputs_when_unmuted(self):
self.core.playback.mute = False
self.sendRequest('outputs')
self.assertInResponse('outputid: 0')
self.assertInResponse('outputname: Default')
self.assertInResponse('outputenabled: 1')
self.assertInResponse('OK')
def test_outputs_when_muted(self):
self.core.playback.mute = True
self.sendRequest('outputs')
self.assertInResponse('outputid: 0')
self.assertInResponse('outputname: Default')
self.assertInResponse('outputenabled: 0')
self.assertInResponse('OK')
| Test that output enabling/disabling unmutes/mutes audio | mpd: Test that output enabling/disabling unmutes/mutes audio
| Python | apache-2.0 | jmarsik/mopidy,mopidy/mopidy,priestd09/mopidy,hkariti/mopidy,rawdlite/mopidy,adamcik/mopidy,vrs01/mopidy,ZenithDK/mopidy,ali/mopidy,bacontext/mopidy,kingosticks/mopidy,jcass77/mopidy,bacontext/mopidy,mopidy/mopidy,glogiotatidis/mopidy,bacontext/mopidy,woutervanwijk/mopidy,diandiankan/mopidy,bencevans/mopidy,dbrgn/mopidy,liamw9534/mopidy,bacontext/mopidy,bencevans/mopidy,SuperStarPL/mopidy,dbrgn/mopidy,ZenithDK/mopidy,kingosticks/mopidy,swak/mopidy,abarisain/mopidy,jodal/mopidy,tkem/mopidy,vrs01/mopidy,tkem/mopidy,quartz55/mopidy,ali/mopidy,dbrgn/mopidy,jmarsik/mopidy,hkariti/mopidy,ZenithDK/mopidy,glogiotatidis/mopidy,pacificIT/mopidy,pacificIT/mopidy,liamw9534/mopidy,priestd09/mopidy,dbrgn/mopidy,ZenithDK/mopidy,mokieyue/mopidy,adamcik/mopidy,adamcik/mopidy,diandiankan/mopidy,bencevans/mopidy,mokieyue/mopidy,quartz55/mopidy,rawdlite/mopidy,bencevans/mopidy,swak/mopidy,priestd09/mopidy,SuperStarPL/mopidy,jmarsik/mopidy,swak/mopidy,mopidy/mopidy,quartz55/mopidy,swak/mopidy,ali/mopidy,jodal/mopidy,quartz55/mopidy,rawdlite/mopidy,ali/mopidy,diandiankan/mopidy,jcass77/mopidy,vrs01/mopidy,vrs01/mopidy,rawdlite/mopidy,abarisain/mopidy,diandiankan/mopidy,glogiotatidis/mopidy,jodal/mopidy,pacificIT/mopidy,hkariti/mopidy,woutervanwijk/mopidy,tkem/mopidy,jcass77/mopidy,mokieyue/mopidy,SuperStarPL/mopidy,glogiotatidis/mopidy,tkem/mopidy,jmarsik/mopidy,SuperStarPL/mopidy,kingosticks/mopidy,pacificIT/mopidy,hkariti/mopidy,mokieyue/mopidy | from __future__ import unicode_literals
from tests.frontends.mpd import protocol
class AudioOutputHandlerTest(protocol.BaseTestCase):
def test_enableoutput(self):
self.sendRequest('enableoutput "0"')
self.assertInResponse('OK')
def test_disableoutput(self):
self.sendRequest('disableoutput "0"')
self.assertInResponse('OK')
def test_outputs(self):
self.sendRequest('outputs')
self.assertInResponse('outputid: 0')
self.assertInResponse('outputname: Default')
self.assertInResponse('outputenabled: 1')
self.assertInResponse('OK')
mpd: Test that output enabling/disabling unmutes/mutes audio | from __future__ import unicode_literals
from tests.frontends.mpd import protocol
class AudioOutputHandlerTest(protocol.BaseTestCase):
def test_enableoutput(self):
self.core.playback.mute = True
self.sendRequest('enableoutput "0"')
self.assertInResponse('OK')
self.assertEqual(self.core.playback.mute.get(), False)
def test_disableoutput(self):
self.core.playback.mute = False
self.sendRequest('disableoutput "0"')
self.assertInResponse('OK')
self.assertEqual(self.core.playback.mute.get(), True)
def test_outputs_when_unmuted(self):
self.core.playback.mute = False
self.sendRequest('outputs')
self.assertInResponse('outputid: 0')
self.assertInResponse('outputname: Default')
self.assertInResponse('outputenabled: 1')
self.assertInResponse('OK')
def test_outputs_when_muted(self):
self.core.playback.mute = True
self.sendRequest('outputs')
self.assertInResponse('outputid: 0')
self.assertInResponse('outputname: Default')
self.assertInResponse('outputenabled: 0')
self.assertInResponse('OK')
| <commit_before>from __future__ import unicode_literals
from tests.frontends.mpd import protocol
class AudioOutputHandlerTest(protocol.BaseTestCase):
def test_enableoutput(self):
self.sendRequest('enableoutput "0"')
self.assertInResponse('OK')
def test_disableoutput(self):
self.sendRequest('disableoutput "0"')
self.assertInResponse('OK')
def test_outputs(self):
self.sendRequest('outputs')
self.assertInResponse('outputid: 0')
self.assertInResponse('outputname: Default')
self.assertInResponse('outputenabled: 1')
self.assertInResponse('OK')
<commit_msg>mpd: Test that output enabling/disabling unmutes/mutes audio<commit_after> | from __future__ import unicode_literals
from tests.frontends.mpd import protocol
class AudioOutputHandlerTest(protocol.BaseTestCase):
def test_enableoutput(self):
self.core.playback.mute = True
self.sendRequest('enableoutput "0"')
self.assertInResponse('OK')
self.assertEqual(self.core.playback.mute.get(), False)
def test_disableoutput(self):
self.core.playback.mute = False
self.sendRequest('disableoutput "0"')
self.assertInResponse('OK')
self.assertEqual(self.core.playback.mute.get(), True)
def test_outputs_when_unmuted(self):
self.core.playback.mute = False
self.sendRequest('outputs')
self.assertInResponse('outputid: 0')
self.assertInResponse('outputname: Default')
self.assertInResponse('outputenabled: 1')
self.assertInResponse('OK')
def test_outputs_when_muted(self):
self.core.playback.mute = True
self.sendRequest('outputs')
self.assertInResponse('outputid: 0')
self.assertInResponse('outputname: Default')
self.assertInResponse('outputenabled: 0')
self.assertInResponse('OK')
| from __future__ import unicode_literals
from tests.frontends.mpd import protocol
class AudioOutputHandlerTest(protocol.BaseTestCase):
def test_enableoutput(self):
self.sendRequest('enableoutput "0"')
self.assertInResponse('OK')
def test_disableoutput(self):
self.sendRequest('disableoutput "0"')
self.assertInResponse('OK')
def test_outputs(self):
self.sendRequest('outputs')
self.assertInResponse('outputid: 0')
self.assertInResponse('outputname: Default')
self.assertInResponse('outputenabled: 1')
self.assertInResponse('OK')
mpd: Test that output enabling/disabling unmutes/mutes audiofrom __future__ import unicode_literals
from tests.frontends.mpd import protocol
class AudioOutputHandlerTest(protocol.BaseTestCase):
def test_enableoutput(self):
self.core.playback.mute = True
self.sendRequest('enableoutput "0"')
self.assertInResponse('OK')
self.assertEqual(self.core.playback.mute.get(), False)
def test_disableoutput(self):
self.core.playback.mute = False
self.sendRequest('disableoutput "0"')
self.assertInResponse('OK')
self.assertEqual(self.core.playback.mute.get(), True)
def test_outputs_when_unmuted(self):
self.core.playback.mute = False
self.sendRequest('outputs')
self.assertInResponse('outputid: 0')
self.assertInResponse('outputname: Default')
self.assertInResponse('outputenabled: 1')
self.assertInResponse('OK')
def test_outputs_when_muted(self):
self.core.playback.mute = True
self.sendRequest('outputs')
self.assertInResponse('outputid: 0')
self.assertInResponse('outputname: Default')
self.assertInResponse('outputenabled: 0')
self.assertInResponse('OK')
| <commit_before>from __future__ import unicode_literals
from tests.frontends.mpd import protocol
class AudioOutputHandlerTest(protocol.BaseTestCase):
def test_enableoutput(self):
self.sendRequest('enableoutput "0"')
self.assertInResponse('OK')
def test_disableoutput(self):
self.sendRequest('disableoutput "0"')
self.assertInResponse('OK')
def test_outputs(self):
self.sendRequest('outputs')
self.assertInResponse('outputid: 0')
self.assertInResponse('outputname: Default')
self.assertInResponse('outputenabled: 1')
self.assertInResponse('OK')
<commit_msg>mpd: Test that output enabling/disabling unmutes/mutes audio<commit_after>from __future__ import unicode_literals
from tests.frontends.mpd import protocol
class AudioOutputHandlerTest(protocol.BaseTestCase):
def test_enableoutput(self):
self.core.playback.mute = True
self.sendRequest('enableoutput "0"')
self.assertInResponse('OK')
self.assertEqual(self.core.playback.mute.get(), False)
def test_disableoutput(self):
self.core.playback.mute = False
self.sendRequest('disableoutput "0"')
self.assertInResponse('OK')
self.assertEqual(self.core.playback.mute.get(), True)
def test_outputs_when_unmuted(self):
self.core.playback.mute = False
self.sendRequest('outputs')
self.assertInResponse('outputid: 0')
self.assertInResponse('outputname: Default')
self.assertInResponse('outputenabled: 1')
self.assertInResponse('OK')
def test_outputs_when_muted(self):
self.core.playback.mute = True
self.sendRequest('outputs')
self.assertInResponse('outputid: 0')
self.assertInResponse('outputname: Default')
self.assertInResponse('outputenabled: 0')
self.assertInResponse('OK')
|
de09310ebfd932cd725954e9c05f5a1ce78311e0 | news/management/commands/sync_newsletters.py | news/management/commands/sync_newsletters.py | from django.conf import settings
from django.core.management import BaseCommand
from synctool.functions import sync_data
DEFAULT_SYNC_DOMAIN = 'basket.mozilla.org'
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-d', '--domain',
default=getattr(settings, 'SYNC_DOMAIN', DEFAULT_SYNC_DOMAIN),
help='Domain of the Basket from which to sync')
parser.add_argument('-k', '--key',
default=settings.SYNC_KEY,
help='Auth key for the sync')
parser.add_argument('-c', '--clean', action='store_true',
help='Delete all Newsletter data before sync')
def handle(self, *args, **options):
sync_data(url='https://{}/news/sync/'.format(options['domain']),
clean=options['clean'],
api_token=options['key'])
| from django.conf import settings
from django.core.management import BaseCommand
from synctool.functions import sync_data
from news.newsletters import clear_newsletter_cache, clear_sms_cache
DEFAULT_SYNC_DOMAIN = 'basket.mozilla.org'
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-d', '--domain',
default=getattr(settings, 'SYNC_DOMAIN', DEFAULT_SYNC_DOMAIN),
help='Domain of the Basket from which to sync')
parser.add_argument('-k', '--key',
default=settings.SYNC_KEY,
help='Auth key for the sync')
parser.add_argument('-c', '--clean', action='store_true',
help='Delete all Newsletter data before sync')
def handle(self, *args, **options):
sync_data(url='https://{}/news/sync/'.format(options['domain']),
clean=options['clean'],
api_token=options['key'])
clear_newsletter_cache()
clear_sms_cache()
| Add cache clear to sync command | Add cache clear to sync command
| Python | mpl-2.0 | glogiotatidis/basket,glogiotatidis/basket,glogiotatidis/basket | from django.conf import settings
from django.core.management import BaseCommand
from synctool.functions import sync_data
DEFAULT_SYNC_DOMAIN = 'basket.mozilla.org'
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-d', '--domain',
default=getattr(settings, 'SYNC_DOMAIN', DEFAULT_SYNC_DOMAIN),
help='Domain of the Basket from which to sync')
parser.add_argument('-k', '--key',
default=settings.SYNC_KEY,
help='Auth key for the sync')
parser.add_argument('-c', '--clean', action='store_true',
help='Delete all Newsletter data before sync')
def handle(self, *args, **options):
sync_data(url='https://{}/news/sync/'.format(options['domain']),
clean=options['clean'],
api_token=options['key'])
Add cache clear to sync command | from django.conf import settings
from django.core.management import BaseCommand
from synctool.functions import sync_data
from news.newsletters import clear_newsletter_cache, clear_sms_cache
DEFAULT_SYNC_DOMAIN = 'basket.mozilla.org'
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-d', '--domain',
default=getattr(settings, 'SYNC_DOMAIN', DEFAULT_SYNC_DOMAIN),
help='Domain of the Basket from which to sync')
parser.add_argument('-k', '--key',
default=settings.SYNC_KEY,
help='Auth key for the sync')
parser.add_argument('-c', '--clean', action='store_true',
help='Delete all Newsletter data before sync')
def handle(self, *args, **options):
sync_data(url='https://{}/news/sync/'.format(options['domain']),
clean=options['clean'],
api_token=options['key'])
clear_newsletter_cache()
clear_sms_cache()
| <commit_before>from django.conf import settings
from django.core.management import BaseCommand
from synctool.functions import sync_data
DEFAULT_SYNC_DOMAIN = 'basket.mozilla.org'
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-d', '--domain',
default=getattr(settings, 'SYNC_DOMAIN', DEFAULT_SYNC_DOMAIN),
help='Domain of the Basket from which to sync')
parser.add_argument('-k', '--key',
default=settings.SYNC_KEY,
help='Auth key for the sync')
parser.add_argument('-c', '--clean', action='store_true',
help='Delete all Newsletter data before sync')
def handle(self, *args, **options):
sync_data(url='https://{}/news/sync/'.format(options['domain']),
clean=options['clean'],
api_token=options['key'])
<commit_msg>Add cache clear to sync command<commit_after> | from django.conf import settings
from django.core.management import BaseCommand
from synctool.functions import sync_data
from news.newsletters import clear_newsletter_cache, clear_sms_cache
DEFAULT_SYNC_DOMAIN = 'basket.mozilla.org'
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-d', '--domain',
default=getattr(settings, 'SYNC_DOMAIN', DEFAULT_SYNC_DOMAIN),
help='Domain of the Basket from which to sync')
parser.add_argument('-k', '--key',
default=settings.SYNC_KEY,
help='Auth key for the sync')
parser.add_argument('-c', '--clean', action='store_true',
help='Delete all Newsletter data before sync')
def handle(self, *args, **options):
sync_data(url='https://{}/news/sync/'.format(options['domain']),
clean=options['clean'],
api_token=options['key'])
clear_newsletter_cache()
clear_sms_cache()
| from django.conf import settings
from django.core.management import BaseCommand
from synctool.functions import sync_data
DEFAULT_SYNC_DOMAIN = 'basket.mozilla.org'
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-d', '--domain',
default=getattr(settings, 'SYNC_DOMAIN', DEFAULT_SYNC_DOMAIN),
help='Domain of the Basket from which to sync')
parser.add_argument('-k', '--key',
default=settings.SYNC_KEY,
help='Auth key for the sync')
parser.add_argument('-c', '--clean', action='store_true',
help='Delete all Newsletter data before sync')
def handle(self, *args, **options):
sync_data(url='https://{}/news/sync/'.format(options['domain']),
clean=options['clean'],
api_token=options['key'])
Add cache clear to sync commandfrom django.conf import settings
from django.core.management import BaseCommand
from synctool.functions import sync_data
from news.newsletters import clear_newsletter_cache, clear_sms_cache
DEFAULT_SYNC_DOMAIN = 'basket.mozilla.org'
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-d', '--domain',
default=getattr(settings, 'SYNC_DOMAIN', DEFAULT_SYNC_DOMAIN),
help='Domain of the Basket from which to sync')
parser.add_argument('-k', '--key',
default=settings.SYNC_KEY,
help='Auth key for the sync')
parser.add_argument('-c', '--clean', action='store_true',
help='Delete all Newsletter data before sync')
def handle(self, *args, **options):
sync_data(url='https://{}/news/sync/'.format(options['domain']),
clean=options['clean'],
api_token=options['key'])
clear_newsletter_cache()
clear_sms_cache()
| <commit_before>from django.conf import settings
from django.core.management import BaseCommand
from synctool.functions import sync_data
DEFAULT_SYNC_DOMAIN = 'basket.mozilla.org'
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-d', '--domain',
default=getattr(settings, 'SYNC_DOMAIN', DEFAULT_SYNC_DOMAIN),
help='Domain of the Basket from which to sync')
parser.add_argument('-k', '--key',
default=settings.SYNC_KEY,
help='Auth key for the sync')
parser.add_argument('-c', '--clean', action='store_true',
help='Delete all Newsletter data before sync')
def handle(self, *args, **options):
sync_data(url='https://{}/news/sync/'.format(options['domain']),
clean=options['clean'],
api_token=options['key'])
<commit_msg>Add cache clear to sync command<commit_after>from django.conf import settings
from django.core.management import BaseCommand
from synctool.functions import sync_data
from news.newsletters import clear_newsletter_cache, clear_sms_cache
DEFAULT_SYNC_DOMAIN = 'basket.mozilla.org'
class Command(BaseCommand):
def add_arguments(self, parser):
parser.add_argument('-d', '--domain',
default=getattr(settings, 'SYNC_DOMAIN', DEFAULT_SYNC_DOMAIN),
help='Domain of the Basket from which to sync')
parser.add_argument('-k', '--key',
default=settings.SYNC_KEY,
help='Auth key for the sync')
parser.add_argument('-c', '--clean', action='store_true',
help='Delete all Newsletter data before sync')
def handle(self, *args, **options):
sync_data(url='https://{}/news/sync/'.format(options['domain']),
clean=options['clean'],
api_token=options['key'])
clear_newsletter_cache()
clear_sms_cache()
|
7226e9ae349eadba10d8f23f81df0b4d70adb6a2 | detectem/plugins/helpers.py | detectem/plugins/helpers.py | def meta_generator(name):
return '//meta[@name="generator" and contains(@content, "{}")]' \
'/@content'.format(name)
| def meta_generator(name):
return '//meta[re:test(@name,"generator","i") and contains(@content, "{}")]' \
'/@content'.format(name)
| Update meta_generator to match case insensitive | Update meta_generator to match case insensitive
| Python | mit | spectresearch/detectem | def meta_generator(name):
return '//meta[@name="generator" and contains(@content, "{}")]' \
'/@content'.format(name)
Update meta_generator to match case insensitive | def meta_generator(name):
return '//meta[re:test(@name,"generator","i") and contains(@content, "{}")]' \
'/@content'.format(name)
| <commit_before>def meta_generator(name):
return '//meta[@name="generator" and contains(@content, "{}")]' \
'/@content'.format(name)
<commit_msg>Update meta_generator to match case insensitive<commit_after> | def meta_generator(name):
return '//meta[re:test(@name,"generator","i") and contains(@content, "{}")]' \
'/@content'.format(name)
| def meta_generator(name):
return '//meta[@name="generator" and contains(@content, "{}")]' \
'/@content'.format(name)
Update meta_generator to match case insensitivedef meta_generator(name):
return '//meta[re:test(@name,"generator","i") and contains(@content, "{}")]' \
'/@content'.format(name)
| <commit_before>def meta_generator(name):
return '//meta[@name="generator" and contains(@content, "{}")]' \
'/@content'.format(name)
<commit_msg>Update meta_generator to match case insensitive<commit_after>def meta_generator(name):
return '//meta[re:test(@name,"generator","i") and contains(@content, "{}")]' \
'/@content'.format(name)
|
dbf8d75c0e4105570676af0bde50d2a4c43e6dd3 | ain7/organizations/autocomplete_light_registry.py | ain7/organizations/autocomplete_light_registry.py | # -*- coding: utf-8
"""
ain7/annuaire/autocomplete_light_registry.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from autocomplete_light import shortcuts as autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
| # -*- coding: utf-8
"""
ain7/annuaire/autocomplete_light_registry.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from autocomplete_light import shortcuts as autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
add_another_url_name='organization-add',
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
| Add a link to add a company in user profile | Add a link to add a company in user profile
When a user change its experience, s⋅he can add an organization (not an
office) if it does not exist yet.
Link to autocomplete-light module's doc:
http://django-autocomplete-light.readthedocs.io/en/2.3.1/addanother.html#autocompletes.
Fix #3
| Python | lgpl-2.1 | ain7/www.ain7.org,ain7/www.ain7.org,ain7/www.ain7.org,ain7/www.ain7.org | # -*- coding: utf-8
"""
ain7/annuaire/autocomplete_light_registry.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from autocomplete_light import shortcuts as autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
Add a link to add a company in user profile
When a user change its experience, s⋅he can add an organization (not an
office) if it does not exist yet.
Link to autocomplete-light module's doc:
http://django-autocomplete-light.readthedocs.io/en/2.3.1/addanother.html#autocompletes.
Fix #3 | # -*- coding: utf-8
"""
ain7/annuaire/autocomplete_light_registry.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from autocomplete_light import shortcuts as autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
add_another_url_name='organization-add',
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
| <commit_before># -*- coding: utf-8
"""
ain7/annuaire/autocomplete_light_registry.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from autocomplete_light import shortcuts as autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
<commit_msg>Add a link to add a company in user profile
When a user change its experience, s⋅he can add an organization (not an
office) if it does not exist yet.
Link to autocomplete-light module's doc:
http://django-autocomplete-light.readthedocs.io/en/2.3.1/addanother.html#autocompletes.
Fix #3<commit_after> | # -*- coding: utf-8
"""
ain7/annuaire/autocomplete_light_registry.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from autocomplete_light import shortcuts as autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
add_another_url_name='organization-add',
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
| # -*- coding: utf-8
"""
ain7/annuaire/autocomplete_light_registry.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from autocomplete_light import shortcuts as autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
Add a link to add a company in user profile
When a user change its experience, s⋅he can add an organization (not an
office) if it does not exist yet.
Link to autocomplete-light module's doc:
http://django-autocomplete-light.readthedocs.io/en/2.3.1/addanother.html#autocompletes.
Fix #3# -*- coding: utf-8
"""
ain7/annuaire/autocomplete_light_registry.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from autocomplete_light import shortcuts as autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
add_another_url_name='organization-add',
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
| <commit_before># -*- coding: utf-8
"""
ain7/annuaire/autocomplete_light_registry.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from autocomplete_light import shortcuts as autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
<commit_msg>Add a link to add a company in user profile
When a user change its experience, s⋅he can add an organization (not an
office) if it does not exist yet.
Link to autocomplete-light module's doc:
http://django-autocomplete-light.readthedocs.io/en/2.3.1/addanother.html#autocompletes.
Fix #3<commit_after># -*- coding: utf-8
"""
ain7/annuaire/autocomplete_light_registry.py
"""
#
# Copyright © 2007-2016 AIn7 Devel Team
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
#
#
from autocomplete_light import shortcuts as autocomplete_light
from ain7.organizations.models import (
Office, Organization, OrganizationActivityField
)
autocomplete_light.register(
Office,
search_fields=['name', 'organization__name'],
add_another_url_name='organization-add',
)
autocomplete_light.register(Organization)
autocomplete_light.register(OrganizationActivityField, search_fields=['label'])
|
b5e239d96b9349b2b21b1d8703a5d3f232f871b4 | spykeutils/monkeypatch/quantities_patch.py | spykeutils/monkeypatch/quantities_patch.py | from __future__ import absolute_import
import quantities as pq
# At least up to quantities 0.10.1 the additional arguments to the min and max
# function did not get passed along.
# A fix already exists:
# <https://github.com/dopplershift/python-quantities/commit/30e8812ac15f551c65311d808c2a004f53913a25>
# Also a pull request exists which has not been merged at the time of writing
# 01/18/2013:
# <https://github.com/python-quantities/python-quantities/pull/41>
def _Quanitity_max(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.max(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.max = _Quanitity_max
def _Quanitity_min(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.min(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.min = _Quanitity_min
| from __future__ import absolute_import
import quantities as pq
# At least up to quantities 0.10.1 the additional arguments to the min and max
# function did not get passed along.
# A fix already exists:
# <https://github.com/dopplershift/python-quantities/commit/30e8812ac15f551c65311d808c2a004f53913a25>
# Also a pull request exists which has not been merged at the time of writing
# 01/18/2013:
# <https://github.com/python-quantities/python-quantities/pull/41>
def _Quanitity_max(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.max(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.max = _Quanitity_max
def _Quanitity_min(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.min(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.min = _Quanitity_min
# Python quantities does not use have additional parameters for astype()
# which became a problem in linspace in numpy 1.11. This is a dirty, dirty
# hack to allow the Quantity astype function to accept any arguments and work
# with numpy >= 1.11. A bug has been filed at
# <https://github.com/python-quantities/python-quantities/issues/105>
_original_astype = pq.Quantity.astype
def _Quantity_astype(self, dtype=None, *args, **kwargs):
return _original_astype(self, dtype)
pq.Quantity.astype = _Quantity_astype
| Fix for Quantities not working with numpy.linspace() in numpy 1.11 | Fix for Quantities not working with numpy.linspace() in numpy 1.11
| Python | bsd-3-clause | rproepp/spykeutils | from __future__ import absolute_import
import quantities as pq
# At least up to quantities 0.10.1 the additional arguments to the min and max
# function did not get passed along.
# A fix already exists:
# <https://github.com/dopplershift/python-quantities/commit/30e8812ac15f551c65311d808c2a004f53913a25>
# Also a pull request exists which has not been merged at the time of writing
# 01/18/2013:
# <https://github.com/python-quantities/python-quantities/pull/41>
def _Quanitity_max(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.max(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.max = _Quanitity_max
def _Quanitity_min(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.min(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.min = _Quanitity_min
Fix for Quantities not working with numpy.linspace() in numpy 1.11 | from __future__ import absolute_import
import quantities as pq
# At least up to quantities 0.10.1 the additional arguments to the min and max
# function did not get passed along.
# A fix already exists:
# <https://github.com/dopplershift/python-quantities/commit/30e8812ac15f551c65311d808c2a004f53913a25>
# Also a pull request exists which has not been merged at the time of writing
# 01/18/2013:
# <https://github.com/python-quantities/python-quantities/pull/41>
def _Quanitity_max(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.max(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.max = _Quanitity_max
def _Quanitity_min(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.min(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.min = _Quanitity_min
# Python quantities does not use have additional parameters for astype()
# which became a problem in linspace in numpy 1.11. This is a dirty, dirty
# hack to allow the Quantity astype function to accept any arguments and work
# with numpy >= 1.11. A bug has been filed at
# <https://github.com/python-quantities/python-quantities/issues/105>
_original_astype = pq.Quantity.astype
def _Quantity_astype(self, dtype=None, *args, **kwargs):
return _original_astype(self, dtype)
pq.Quantity.astype = _Quantity_astype
| <commit_before>from __future__ import absolute_import
import quantities as pq
# At least up to quantities 0.10.1 the additional arguments to the min and max
# function did not get passed along.
# A fix already exists:
# <https://github.com/dopplershift/python-quantities/commit/30e8812ac15f551c65311d808c2a004f53913a25>
# Also a pull request exists which has not been merged at the time of writing
# 01/18/2013:
# <https://github.com/python-quantities/python-quantities/pull/41>
def _Quanitity_max(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.max(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.max = _Quanitity_max
def _Quanitity_min(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.min(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.min = _Quanitity_min
<commit_msg>Fix for Quantities not working with numpy.linspace() in numpy 1.11<commit_after> | from __future__ import absolute_import
import quantities as pq
# At least up to quantities 0.10.1 the additional arguments to the min and max
# function did not get passed along.
# A fix already exists:
# <https://github.com/dopplershift/python-quantities/commit/30e8812ac15f551c65311d808c2a004f53913a25>
# Also a pull request exists which has not been merged at the time of writing
# 01/18/2013:
# <https://github.com/python-quantities/python-quantities/pull/41>
def _Quanitity_max(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.max(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.max = _Quanitity_max
def _Quanitity_min(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.min(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.min = _Quanitity_min
# Python quantities does not use have additional parameters for astype()
# which became a problem in linspace in numpy 1.11. This is a dirty, dirty
# hack to allow the Quantity astype function to accept any arguments and work
# with numpy >= 1.11. A bug has been filed at
# <https://github.com/python-quantities/python-quantities/issues/105>
_original_astype = pq.Quantity.astype
def _Quantity_astype(self, dtype=None, *args, **kwargs):
return _original_astype(self, dtype)
pq.Quantity.astype = _Quantity_astype
| from __future__ import absolute_import
import quantities as pq
# At least up to quantities 0.10.1 the additional arguments to the min and max
# function did not get passed along.
# A fix already exists:
# <https://github.com/dopplershift/python-quantities/commit/30e8812ac15f551c65311d808c2a004f53913a25>
# Also a pull request exists which has not been merged at the time of writing
# 01/18/2013:
# <https://github.com/python-quantities/python-quantities/pull/41>
def _Quanitity_max(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.max(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.max = _Quanitity_max
def _Quanitity_min(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.min(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.min = _Quanitity_min
Fix for Quantities not working with numpy.linspace() in numpy 1.11from __future__ import absolute_import
import quantities as pq
# At least up to quantities 0.10.1 the additional arguments to the min and max
# function did not get passed along.
# A fix already exists:
# <https://github.com/dopplershift/python-quantities/commit/30e8812ac15f551c65311d808c2a004f53913a25>
# Also a pull request exists which has not been merged at the time of writing
# 01/18/2013:
# <https://github.com/python-quantities/python-quantities/pull/41>
def _Quanitity_max(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.max(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.max = _Quanitity_max
def _Quanitity_min(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.min(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.min = _Quanitity_min
# Python quantities does not use have additional parameters for astype()
# which became a problem in linspace in numpy 1.11. This is a dirty, dirty
# hack to allow the Quantity astype function to accept any arguments and work
# with numpy >= 1.11. A bug has been filed at
# <https://github.com/python-quantities/python-quantities/issues/105>
_original_astype = pq.Quantity.astype
def _Quantity_astype(self, dtype=None, *args, **kwargs):
return _original_astype(self, dtype)
pq.Quantity.astype = _Quantity_astype
| <commit_before>from __future__ import absolute_import
import quantities as pq
# At least up to quantities 0.10.1 the additional arguments to the min and max
# function did not get passed along.
# A fix already exists:
# <https://github.com/dopplershift/python-quantities/commit/30e8812ac15f551c65311d808c2a004f53913a25>
# Also a pull request exists which has not been merged at the time of writing
# 01/18/2013:
# <https://github.com/python-quantities/python-quantities/pull/41>
def _Quanitity_max(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.max(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.max = _Quanitity_max
def _Quanitity_min(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.min(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.min = _Quanitity_min
<commit_msg>Fix for Quantities not working with numpy.linspace() in numpy 1.11<commit_after>from __future__ import absolute_import
import quantities as pq
# At least up to quantities 0.10.1 the additional arguments to the min and max
# function did not get passed along.
# A fix already exists:
# <https://github.com/dopplershift/python-quantities/commit/30e8812ac15f551c65311d808c2a004f53913a25>
# Also a pull request exists which has not been merged at the time of writing
# 01/18/2013:
# <https://github.com/python-quantities/python-quantities/pull/41>
def _Quanitity_max(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.max(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.max = _Quanitity_max
def _Quanitity_min(self, axis=None, out=None):
return pq.Quantity(
self.magnitude.min(axis, out),
self.dimensionality,
copy=False
)
pq.Quantity.min = _Quanitity_min
# Python quantities does not use have additional parameters for astype()
# which became a problem in linspace in numpy 1.11. This is a dirty, dirty
# hack to allow the Quantity astype function to accept any arguments and work
# with numpy >= 1.11. A bug has been filed at
# <https://github.com/python-quantities/python-quantities/issues/105>
_original_astype = pq.Quantity.astype
def _Quantity_astype(self, dtype=None, *args, **kwargs):
return _original_astype(self, dtype)
pq.Quantity.astype = _Quantity_astype
|
5228bbadc94159c084f4da77fcebdee3e0733b06 | astropy/utils/setup_package.py | astropy/utils/setup_package.py | from distutils.core import Extension
from os.path import dirname, join, relpath, exists
ASTROPY_UTILS_ROOT = dirname(__file__)
def get_extensions():
return [
Extension('astropy.utils._compiler',
[relpath(join(ASTROPY_UTILS_ROOT, 'src', 'compiler.c'))])
]
def get_package_data():
# Installs the testing data files
return {
'astropy.utils.tests': [
'data/*.dat',
'data/*.dat.gz',
'data/*.dat.bz2',
'data/*.txt']
}
| from distutils.core import Extension
from os.path import dirname, join, relpath
ASTROPY_UTILS_ROOT = dirname(__file__)
def get_extensions():
return [
Extension('astropy.utils._compiler',
[relpath(join(ASTROPY_UTILS_ROOT, 'src', 'compiler.c'))])
]
def get_package_data():
# Installs the testing data files
return {
'astropy.utils.tests': [
'data/*.dat',
'data/*.dat.gz',
'data/*.dat.bz2',
'data/*.txt']
}
| Remove unused import left over from an earlier version of this patch | Remove unused import left over from an earlier version of this patch
| Python | bsd-3-clause | lpsinger/astropy,saimn/astropy,astropy/astropy,pllim/astropy,mhvk/astropy,lpsinger/astropy,larrybradley/astropy,MSeifert04/astropy,StuartLittlefair/astropy,StuartLittlefair/astropy,pllim/astropy,tbabej/astropy,kelle/astropy,AustereCuriosity/astropy,StuartLittlefair/astropy,stargaser/astropy,aleksandr-bakanov/astropy,astropy/astropy,astropy/astropy,saimn/astropy,dhomeier/astropy,bsipocz/astropy,stargaser/astropy,larrybradley/astropy,funbaker/astropy,saimn/astropy,pllim/astropy,joergdietrich/astropy,larrybradley/astropy,kelle/astropy,lpsinger/astropy,mhvk/astropy,tbabej/astropy,dhomeier/astropy,kelle/astropy,funbaker/astropy,mhvk/astropy,saimn/astropy,aleksandr-bakanov/astropy,DougBurke/astropy,mhvk/astropy,stargaser/astropy,DougBurke/astropy,stargaser/astropy,AustereCuriosity/astropy,AustereCuriosity/astropy,tbabej/astropy,AustereCuriosity/astropy,aleksandr-bakanov/astropy,StuartLittlefair/astropy,mhvk/astropy,DougBurke/astropy,lpsinger/astropy,tbabej/astropy,joergdietrich/astropy,StuartLittlefair/astropy,larrybradley/astropy,bsipocz/astropy,dhomeier/astropy,lpsinger/astropy,funbaker/astropy,kelle/astropy,MSeifert04/astropy,pllim/astropy,bsipocz/astropy,joergdietrich/astropy,MSeifert04/astropy,DougBurke/astropy,bsipocz/astropy,joergdietrich/astropy,AustereCuriosity/astropy,dhomeier/astropy,funbaker/astropy,astropy/astropy,kelle/astropy,dhomeier/astropy,saimn/astropy,tbabej/astropy,MSeifert04/astropy,joergdietrich/astropy,astropy/astropy,larrybradley/astropy,aleksandr-bakanov/astropy,pllim/astropy | from distutils.core import Extension
from os.path import dirname, join, relpath, exists
ASTROPY_UTILS_ROOT = dirname(__file__)
def get_extensions():
return [
Extension('astropy.utils._compiler',
[relpath(join(ASTROPY_UTILS_ROOT, 'src', 'compiler.c'))])
]
def get_package_data():
# Installs the testing data files
return {
'astropy.utils.tests': [
'data/*.dat',
'data/*.dat.gz',
'data/*.dat.bz2',
'data/*.txt']
}
Remove unused import left over from an earlier version of this patch | from distutils.core import Extension
from os.path import dirname, join, relpath
ASTROPY_UTILS_ROOT = dirname(__file__)
def get_extensions():
return [
Extension('astropy.utils._compiler',
[relpath(join(ASTROPY_UTILS_ROOT, 'src', 'compiler.c'))])
]
def get_package_data():
# Installs the testing data files
return {
'astropy.utils.tests': [
'data/*.dat',
'data/*.dat.gz',
'data/*.dat.bz2',
'data/*.txt']
}
| <commit_before>from distutils.core import Extension
from os.path import dirname, join, relpath, exists
ASTROPY_UTILS_ROOT = dirname(__file__)
def get_extensions():
return [
Extension('astropy.utils._compiler',
[relpath(join(ASTROPY_UTILS_ROOT, 'src', 'compiler.c'))])
]
def get_package_data():
# Installs the testing data files
return {
'astropy.utils.tests': [
'data/*.dat',
'data/*.dat.gz',
'data/*.dat.bz2',
'data/*.txt']
}
<commit_msg>Remove unused import left over from an earlier version of this patch<commit_after> | from distutils.core import Extension
from os.path import dirname, join, relpath
ASTROPY_UTILS_ROOT = dirname(__file__)
def get_extensions():
return [
Extension('astropy.utils._compiler',
[relpath(join(ASTROPY_UTILS_ROOT, 'src', 'compiler.c'))])
]
def get_package_data():
# Installs the testing data files
return {
'astropy.utils.tests': [
'data/*.dat',
'data/*.dat.gz',
'data/*.dat.bz2',
'data/*.txt']
}
| from distutils.core import Extension
from os.path import dirname, join, relpath, exists
ASTROPY_UTILS_ROOT = dirname(__file__)
def get_extensions():
return [
Extension('astropy.utils._compiler',
[relpath(join(ASTROPY_UTILS_ROOT, 'src', 'compiler.c'))])
]
def get_package_data():
# Installs the testing data files
return {
'astropy.utils.tests': [
'data/*.dat',
'data/*.dat.gz',
'data/*.dat.bz2',
'data/*.txt']
}
Remove unused import left over from an earlier version of this patchfrom distutils.core import Extension
from os.path import dirname, join, relpath
ASTROPY_UTILS_ROOT = dirname(__file__)
def get_extensions():
return [
Extension('astropy.utils._compiler',
[relpath(join(ASTROPY_UTILS_ROOT, 'src', 'compiler.c'))])
]
def get_package_data():
# Installs the testing data files
return {
'astropy.utils.tests': [
'data/*.dat',
'data/*.dat.gz',
'data/*.dat.bz2',
'data/*.txt']
}
| <commit_before>from distutils.core import Extension
from os.path import dirname, join, relpath, exists
ASTROPY_UTILS_ROOT = dirname(__file__)
def get_extensions():
return [
Extension('astropy.utils._compiler',
[relpath(join(ASTROPY_UTILS_ROOT, 'src', 'compiler.c'))])
]
def get_package_data():
# Installs the testing data files
return {
'astropy.utils.tests': [
'data/*.dat',
'data/*.dat.gz',
'data/*.dat.bz2',
'data/*.txt']
}
<commit_msg>Remove unused import left over from an earlier version of this patch<commit_after>from distutils.core import Extension
from os.path import dirname, join, relpath
ASTROPY_UTILS_ROOT = dirname(__file__)
def get_extensions():
return [
Extension('astropy.utils._compiler',
[relpath(join(ASTROPY_UTILS_ROOT, 'src', 'compiler.c'))])
]
def get_package_data():
# Installs the testing data files
return {
'astropy.utils.tests': [
'data/*.dat',
'data/*.dat.gz',
'data/*.dat.bz2',
'data/*.txt']
}
|
ac7c04f76ad4276c34e000a065b6bc900f941ee5 | girder/utility/__init__.py | girder/utility/__init__.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import json
import re
def camelcase(value):
"""
Convert a module name or string with underscores and periods to camel case.
:param value: the string to convert
:type value: str
:returns: the value converted to camel case.
"""
return ''.join(x.capitalize() if x else '_' for x in
re.split("[._]+", value))
class JsonEncoder(json.JSONEncoder):
"""
This extends the standard json.JSONEncoder to allow for more types to be
sensibly serialized. This is used in Girder's REST layer to serialize
route return values when JSON is requested.
"""
def default(self, obj):
if isinstance(obj, set):
return tuple(obj)
return str(obj)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import datetime
import json
import re
def camelcase(value):
"""
Convert a module name or string with underscores and periods to camel case.
:param value: the string to convert
:type value: str
:returns: the value converted to camel case.
"""
return ''.join(x.capitalize() if x else '_' for x in
re.split("[._]+", value))
class JsonEncoder(json.JSONEncoder):
"""
This extends the standard json.JSONEncoder to allow for more types to be
sensibly serialized. This is used in Girder's REST layer to serialize
route return values when JSON is requested.
"""
def default(self, obj):
if isinstance(obj, set):
return tuple(obj)
elif isinstance(obj, datetime.datetime):
return obj.isoformat()
return str(obj)
| Send timestamps as ISO8601 format | Send timestamps as ISO8601 format
| Python | apache-2.0 | msmolens/girder,salamb/girder,jbeezley/girder,Kitware/girder,data-exp-lab/girder,sutartmelson/girder,opadron/girder,Xarthisius/girder,manthey/girder,essamjoubori/girder,Kitware/girder,sutartmelson/girder,Xarthisius/girder,essamjoubori/girder,kotfic/girder,RafaelPalomar/girder,sutartmelson/girder,kotfic/girder,Kitware/girder,girder/girder,manthey/girder,girder/girder,RafaelPalomar/girder,salamb/girder,Xarthisius/girder,essamjoubori/girder,opadron/girder,essamjoubori/girder,data-exp-lab/girder,kotfic/girder,data-exp-lab/girder,data-exp-lab/girder,opadron/girder,msmolens/girder,RafaelPalomar/girder,Xarthisius/girder,RafaelPalomar/girder,adsorensen/girder,Kitware/girder,Xarthisius/girder,msmolens/girder,adsorensen/girder,adsorensen/girder,opadron/girder,sutartmelson/girder,data-exp-lab/girder,manthey/girder,jbeezley/girder,msmolens/girder,jbeezley/girder,RafaelPalomar/girder,salamb/girder,manthey/girder,adsorensen/girder,kotfic/girder,jbeezley/girder,girder/girder,girder/girder,adsorensen/girder,msmolens/girder,essamjoubori/girder,kotfic/girder,salamb/girder,opadron/girder,salamb/girder,sutartmelson/girder | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import json
import re
def camelcase(value):
"""
Convert a module name or string with underscores and periods to camel case.
:param value: the string to convert
:type value: str
:returns: the value converted to camel case.
"""
return ''.join(x.capitalize() if x else '_' for x in
re.split("[._]+", value))
class JsonEncoder(json.JSONEncoder):
"""
This extends the standard json.JSONEncoder to allow for more types to be
sensibly serialized. This is used in Girder's REST layer to serialize
route return values when JSON is requested.
"""
def default(self, obj):
if isinstance(obj, set):
return tuple(obj)
return str(obj)
Send timestamps as ISO8601 format | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import datetime
import json
import re
def camelcase(value):
"""
Convert a module name or string with underscores and periods to camel case.
:param value: the string to convert
:type value: str
:returns: the value converted to camel case.
"""
return ''.join(x.capitalize() if x else '_' for x in
re.split("[._]+", value))
class JsonEncoder(json.JSONEncoder):
"""
This extends the standard json.JSONEncoder to allow for more types to be
sensibly serialized. This is used in Girder's REST layer to serialize
route return values when JSON is requested.
"""
def default(self, obj):
if isinstance(obj, set):
return tuple(obj)
elif isinstance(obj, datetime.datetime):
return obj.isoformat()
return str(obj)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import json
import re
def camelcase(value):
"""
Convert a module name or string with underscores and periods to camel case.
:param value: the string to convert
:type value: str
:returns: the value converted to camel case.
"""
return ''.join(x.capitalize() if x else '_' for x in
re.split("[._]+", value))
class JsonEncoder(json.JSONEncoder):
"""
This extends the standard json.JSONEncoder to allow for more types to be
sensibly serialized. This is used in Girder's REST layer to serialize
route return values when JSON is requested.
"""
def default(self, obj):
if isinstance(obj, set):
return tuple(obj)
return str(obj)
<commit_msg>Send timestamps as ISO8601 format<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import datetime
import json
import re
def camelcase(value):
"""
Convert a module name or string with underscores and periods to camel case.
:param value: the string to convert
:type value: str
:returns: the value converted to camel case.
"""
return ''.join(x.capitalize() if x else '_' for x in
re.split("[._]+", value))
class JsonEncoder(json.JSONEncoder):
"""
This extends the standard json.JSONEncoder to allow for more types to be
sensibly serialized. This is used in Girder's REST layer to serialize
route return values when JSON is requested.
"""
def default(self, obj):
if isinstance(obj, set):
return tuple(obj)
elif isinstance(obj, datetime.datetime):
return obj.isoformat()
return str(obj)
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import json
import re
def camelcase(value):
"""
Convert a module name or string with underscores and periods to camel case.
:param value: the string to convert
:type value: str
:returns: the value converted to camel case.
"""
return ''.join(x.capitalize() if x else '_' for x in
re.split("[._]+", value))
class JsonEncoder(json.JSONEncoder):
"""
This extends the standard json.JSONEncoder to allow for more types to be
sensibly serialized. This is used in Girder's REST layer to serialize
route return values when JSON is requested.
"""
def default(self, obj):
if isinstance(obj, set):
return tuple(obj)
return str(obj)
Send timestamps as ISO8601 format#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import datetime
import json
import re
def camelcase(value):
"""
Convert a module name or string with underscores and periods to camel case.
:param value: the string to convert
:type value: str
:returns: the value converted to camel case.
"""
return ''.join(x.capitalize() if x else '_' for x in
re.split("[._]+", value))
class JsonEncoder(json.JSONEncoder):
"""
This extends the standard json.JSONEncoder to allow for more types to be
sensibly serialized. This is used in Girder's REST layer to serialize
route return values when JSON is requested.
"""
def default(self, obj):
if isinstance(obj, set):
return tuple(obj)
elif isinstance(obj, datetime.datetime):
return obj.isoformat()
return str(obj)
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import json
import re
def camelcase(value):
"""
Convert a module name or string with underscores and periods to camel case.
:param value: the string to convert
:type value: str
:returns: the value converted to camel case.
"""
return ''.join(x.capitalize() if x else '_' for x in
re.split("[._]+", value))
class JsonEncoder(json.JSONEncoder):
"""
This extends the standard json.JSONEncoder to allow for more types to be
sensibly serialized. This is used in Girder's REST layer to serialize
route return values when JSON is requested.
"""
def default(self, obj):
if isinstance(obj, set):
return tuple(obj)
return str(obj)
<commit_msg>Send timestamps as ISO8601 format<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
###############################################################################
# Copyright Kitware Inc.
#
# Licensed under the Apache License, Version 2.0 ( the "License" );
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###############################################################################
import datetime
import json
import re
def camelcase(value):
"""
Convert a module name or string with underscores and periods to camel case.
:param value: the string to convert
:type value: str
:returns: the value converted to camel case.
"""
return ''.join(x.capitalize() if x else '_' for x in
re.split("[._]+", value))
class JsonEncoder(json.JSONEncoder):
"""
This extends the standard json.JSONEncoder to allow for more types to be
sensibly serialized. This is used in Girder's REST layer to serialize
route return values when JSON is requested.
"""
def default(self, obj):
if isinstance(obj, set):
return tuple(obj)
elif isinstance(obj, datetime.datetime):
return obj.isoformat()
return str(obj)
|
d8d8473e9fd61219f75b64b5bf3c703599b7f09b | tests/controller/test_home_controller.py | tests/controller/test_home_controller.py | import unittest
import pyramid.testing
from nflpool.tests.web_settings import settings
class HomeControllerTests(unittest.TestCase):
def setUp(self):
from nflpool import main
app = main({}, **settings, logging="OFF")
# noinspection PyPackageRequirements
from webtest import TestApp
self.app = TestApp(app)
def test_home_page(self):
# noinspection PyPackageRequirements
import webtest.response
response: webtest.response.TestResponse = self.app.get("/", status=200)
self.assertTrue(b"nflpool" in response.body)
| import unittest
import pyramid.testing
from tests.web_settings import settings
class HomeControllerTests(unittest.TestCase):
def setUp(self):
from nflpool import main
app = main({}, **settings, logging="OFF")
# noinspection PyPackageRequirements
from webtest import TestApp
self.app = TestApp(app)
def test_home_page(self):
# noinspection PyPackageRequirements
import webtest.response
response: webtest.response.TestResponse = self.app.get("/", status=200)
self.assertTrue(b"nflpool" in response.body)
| Update home controller test to use correct directory structure | Update home controller test to use correct directory structure
| Python | mit | prcutler/nflpool,prcutler/nflpool | import unittest
import pyramid.testing
from nflpool.tests.web_settings import settings
class HomeControllerTests(unittest.TestCase):
def setUp(self):
from nflpool import main
app = main({}, **settings, logging="OFF")
# noinspection PyPackageRequirements
from webtest import TestApp
self.app = TestApp(app)
def test_home_page(self):
# noinspection PyPackageRequirements
import webtest.response
response: webtest.response.TestResponse = self.app.get("/", status=200)
self.assertTrue(b"nflpool" in response.body)
Update home controller test to use correct directory structure | import unittest
import pyramid.testing
from tests.web_settings import settings
class HomeControllerTests(unittest.TestCase):
def setUp(self):
from nflpool import main
app = main({}, **settings, logging="OFF")
# noinspection PyPackageRequirements
from webtest import TestApp
self.app = TestApp(app)
def test_home_page(self):
# noinspection PyPackageRequirements
import webtest.response
response: webtest.response.TestResponse = self.app.get("/", status=200)
self.assertTrue(b"nflpool" in response.body)
| <commit_before>import unittest
import pyramid.testing
from nflpool.tests.web_settings import settings
class HomeControllerTests(unittest.TestCase):
def setUp(self):
from nflpool import main
app = main({}, **settings, logging="OFF")
# noinspection PyPackageRequirements
from webtest import TestApp
self.app = TestApp(app)
def test_home_page(self):
# noinspection PyPackageRequirements
import webtest.response
response: webtest.response.TestResponse = self.app.get("/", status=200)
self.assertTrue(b"nflpool" in response.body)
<commit_msg>Update home controller test to use correct directory structure<commit_after> | import unittest
import pyramid.testing
from tests.web_settings import settings
class HomeControllerTests(unittest.TestCase):
def setUp(self):
from nflpool import main
app = main({}, **settings, logging="OFF")
# noinspection PyPackageRequirements
from webtest import TestApp
self.app = TestApp(app)
def test_home_page(self):
# noinspection PyPackageRequirements
import webtest.response
response: webtest.response.TestResponse = self.app.get("/", status=200)
self.assertTrue(b"nflpool" in response.body)
| import unittest
import pyramid.testing
from nflpool.tests.web_settings import settings
class HomeControllerTests(unittest.TestCase):
def setUp(self):
from nflpool import main
app = main({}, **settings, logging="OFF")
# noinspection PyPackageRequirements
from webtest import TestApp
self.app = TestApp(app)
def test_home_page(self):
# noinspection PyPackageRequirements
import webtest.response
response: webtest.response.TestResponse = self.app.get("/", status=200)
self.assertTrue(b"nflpool" in response.body)
Update home controller test to use correct directory structureimport unittest
import pyramid.testing
from tests.web_settings import settings
class HomeControllerTests(unittest.TestCase):
def setUp(self):
from nflpool import main
app = main({}, **settings, logging="OFF")
# noinspection PyPackageRequirements
from webtest import TestApp
self.app = TestApp(app)
def test_home_page(self):
# noinspection PyPackageRequirements
import webtest.response
response: webtest.response.TestResponse = self.app.get("/", status=200)
self.assertTrue(b"nflpool" in response.body)
| <commit_before>import unittest
import pyramid.testing
from nflpool.tests.web_settings import settings
class HomeControllerTests(unittest.TestCase):
def setUp(self):
from nflpool import main
app = main({}, **settings, logging="OFF")
# noinspection PyPackageRequirements
from webtest import TestApp
self.app = TestApp(app)
def test_home_page(self):
# noinspection PyPackageRequirements
import webtest.response
response: webtest.response.TestResponse = self.app.get("/", status=200)
self.assertTrue(b"nflpool" in response.body)
<commit_msg>Update home controller test to use correct directory structure<commit_after>import unittest
import pyramid.testing
from tests.web_settings import settings
class HomeControllerTests(unittest.TestCase):
def setUp(self):
from nflpool import main
app = main({}, **settings, logging="OFF")
# noinspection PyPackageRequirements
from webtest import TestApp
self.app = TestApp(app)
def test_home_page(self):
# noinspection PyPackageRequirements
import webtest.response
response: webtest.response.TestResponse = self.app.get("/", status=200)
self.assertTrue(b"nflpool" in response.body)
|
14f0b7e25a795a853b79f781632edb2b82c3b904 | docs/conf.py | docs/conf.py | import os
import ramrod
project = u'stix-ramrod'
copyright = u'2015, The MITRE Corporation'
version = ramrod.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinxcontrib.napoleon',
]
intersphinx_mapping = {'http://docs.python.org/': None}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {}
""".format(release)
exclude_patterns = [
'_build',
'_includes',
'updates/cybox/cybox*.rst',
'updates/stix/stix*.rst'
]
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'stix-ramrod.tex', u'stix-ramrod Documentation',
u'The MITRE Corporation', 'manual'),
]
| import os
import ramrod
project = u'stix-ramrod'
copyright = u'2015, The MITRE Corporation'
version = ramrod.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinxcontrib.napoleon',
]
intersphinx_mapping = {'http://docs.python.org/': None}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
'_includes',
'updates/cybox/cybox*.rst',
'updates/stix/stix*.rst'
]
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'stix-ramrod.tex', u'stix-ramrod Documentation',
u'The MITRE Corporation', 'manual'),
]
| Fix zero-length field error when building docs in Python 2.6 | Fix zero-length field error when building docs in Python 2.6
| Python | bsd-3-clause | STIXProject/stix-ramrod | import os
import ramrod
project = u'stix-ramrod'
copyright = u'2015, The MITRE Corporation'
version = ramrod.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinxcontrib.napoleon',
]
intersphinx_mapping = {'http://docs.python.org/': None}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {}
""".format(release)
exclude_patterns = [
'_build',
'_includes',
'updates/cybox/cybox*.rst',
'updates/stix/stix*.rst'
]
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'stix-ramrod.tex', u'stix-ramrod Documentation',
u'The MITRE Corporation', 'manual'),
]
Fix zero-length field error when building docs in Python 2.6 | import os
import ramrod
project = u'stix-ramrod'
copyright = u'2015, The MITRE Corporation'
version = ramrod.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinxcontrib.napoleon',
]
intersphinx_mapping = {'http://docs.python.org/': None}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
'_includes',
'updates/cybox/cybox*.rst',
'updates/stix/stix*.rst'
]
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'stix-ramrod.tex', u'stix-ramrod Documentation',
u'The MITRE Corporation', 'manual'),
]
| <commit_before>import os
import ramrod
project = u'stix-ramrod'
copyright = u'2015, The MITRE Corporation'
version = ramrod.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinxcontrib.napoleon',
]
intersphinx_mapping = {'http://docs.python.org/': None}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {}
""".format(release)
exclude_patterns = [
'_build',
'_includes',
'updates/cybox/cybox*.rst',
'updates/stix/stix*.rst'
]
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'stix-ramrod.tex', u'stix-ramrod Documentation',
u'The MITRE Corporation', 'manual'),
]
<commit_msg>Fix zero-length field error when building docs in Python 2.6<commit_after> | import os
import ramrod
project = u'stix-ramrod'
copyright = u'2015, The MITRE Corporation'
version = ramrod.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinxcontrib.napoleon',
]
intersphinx_mapping = {'http://docs.python.org/': None}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
'_includes',
'updates/cybox/cybox*.rst',
'updates/stix/stix*.rst'
]
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'stix-ramrod.tex', u'stix-ramrod Documentation',
u'The MITRE Corporation', 'manual'),
]
| import os
import ramrod
project = u'stix-ramrod'
copyright = u'2015, The MITRE Corporation'
version = ramrod.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinxcontrib.napoleon',
]
intersphinx_mapping = {'http://docs.python.org/': None}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {}
""".format(release)
exclude_patterns = [
'_build',
'_includes',
'updates/cybox/cybox*.rst',
'updates/stix/stix*.rst'
]
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'stix-ramrod.tex', u'stix-ramrod Documentation',
u'The MITRE Corporation', 'manual'),
]
Fix zero-length field error when building docs in Python 2.6import os
import ramrod
project = u'stix-ramrod'
copyright = u'2015, The MITRE Corporation'
version = ramrod.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinxcontrib.napoleon',
]
intersphinx_mapping = {'http://docs.python.org/': None}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
'_includes',
'updates/cybox/cybox*.rst',
'updates/stix/stix*.rst'
]
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'stix-ramrod.tex', u'stix-ramrod Documentation',
u'The MITRE Corporation', 'manual'),
]
| <commit_before>import os
import ramrod
project = u'stix-ramrod'
copyright = u'2015, The MITRE Corporation'
version = ramrod.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinxcontrib.napoleon',
]
intersphinx_mapping = {'http://docs.python.org/': None}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {}
""".format(release)
exclude_patterns = [
'_build',
'_includes',
'updates/cybox/cybox*.rst',
'updates/stix/stix*.rst'
]
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'stix-ramrod.tex', u'stix-ramrod Documentation',
u'The MITRE Corporation', 'manual'),
]
<commit_msg>Fix zero-length field error when building docs in Python 2.6<commit_after>import os
import ramrod
project = u'stix-ramrod'
copyright = u'2015, The MITRE Corporation'
version = ramrod.__version__
release = version
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinxcontrib.napoleon',
]
intersphinx_mapping = {'http://docs.python.org/': None}
templates_path = ['_templates']
source_suffix = '.rst'
master_doc = 'index'
rst_prolog = """
**Version**: {0}
""".format(release)
exclude_patterns = [
'_build',
'_includes',
'updates/cybox/cybox*.rst',
'updates/stix/stix*.rst'
]
pygments_style = 'sphinx'
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
else:
html_theme = 'default'
latex_elements = {}
latex_documents = [
('index', 'stix-ramrod.tex', u'stix-ramrod Documentation',
u'The MITRE Corporation', 'manual'),
]
|
712741d4c6189e9cb000935676269e84a1be455b | src/puzzle/puzzlepedia/debug_data_widget.py | src/puzzle/puzzlepedia/debug_data_widget.py | import io
from typing import Any, ContextManager
import numpy as np
from PIL import Image
from ipywidgets import widgets
from puzzle.puzzlepedia import _common
from puzzle.steps import step
def DebugDataWidget(s: step.Step, capture: ContextManager) -> widgets.Widget:
with capture:
try:
data = s.get_debug_data()
except NotImplementedError:
data = '[no data]'
return _data_widget(data)
def _data_widget(data: Any) -> widgets.Widget:
if isinstance(data, np.ndarray) and data.dtype == np.uint8:
height, width = data.shape[:2]
f = io.BytesIO()
Image.fromarray(data).save(f, 'png')
return widgets.Image(
value=f.getvalue(),
width=width,
height=height,
)
# Fallback assumes data is text.
return widgets.HTML(_common.preformat_html(str(data)))
| import io
from typing import Any, ContextManager, Dict
import numpy as np
from PIL import Image
from ipywidgets import widgets
from puzzle.puzzlepedia import _bind, _common
from puzzle.steps import step
def DebugDataWidget(s: step.Step, capture: ContextManager) -> widgets.Widget:
with capture:
try:
data = s.get_debug_data()
except NotImplementedError:
data = '[no data]'
return _data_widget(data)
def _data_widget(data: Any) -> widgets.Widget:
if (isinstance(data, list) and len(data) > 0 and
isinstance(data[0], tuple) and len(data[0]) == 2 and
isinstance(data[0][0], str)
):
options = [label for label, _ in data]
value = options[-1]
data_widgets = {
label: _data_widget(value) for label, value in data
}
slider = widgets.SelectionSlider(
options=options,
value=value,
continuous_update=True,
)
children = [slider, data_widgets[value]]
vbox = widgets.VBox(children)
slider_changed = _bind.widget_observable(slider)
slider_changed.subscribe(_bind.callback_without_event(
_update_debug_data_vbox_children_from_slider, data_widgets, vbox))
return vbox
elif isinstance(data, np.ndarray) and data.dtype == np.uint8:
return _ndarray_image(data)
# Fallback assumes data is text.
return widgets.HTML(_common.preformat_html(str(data)))
def _ndarray_image(data: np.ndarray) -> widgets.Widget:
height, width = data.shape[:2]
f = io.BytesIO()
Image.fromarray(data).save(f, 'png')
return widgets.Image(
value=f.getvalue(),
width=width,
height=height,
)
def _update_debug_data_vbox_children_from_slider(
data_widgets: Dict[str, widgets.Widget],
vbox: widgets.VBox) -> None:
slider, _ = vbox.children
vbox.children = [slider, data_widgets[slider.value]]
| Add the ability to show multiple debug data values. | Add the ability to show multiple debug data values.
Implemented as a slider.
| Python | mit | PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge,PhilHarnish/forge | import io
from typing import Any, ContextManager
import numpy as np
from PIL import Image
from ipywidgets import widgets
from puzzle.puzzlepedia import _common
from puzzle.steps import step
def DebugDataWidget(s: step.Step, capture: ContextManager) -> widgets.Widget:
with capture:
try:
data = s.get_debug_data()
except NotImplementedError:
data = '[no data]'
return _data_widget(data)
def _data_widget(data: Any) -> widgets.Widget:
if isinstance(data, np.ndarray) and data.dtype == np.uint8:
height, width = data.shape[:2]
f = io.BytesIO()
Image.fromarray(data).save(f, 'png')
return widgets.Image(
value=f.getvalue(),
width=width,
height=height,
)
# Fallback assumes data is text.
return widgets.HTML(_common.preformat_html(str(data)))
Add the ability to show multiple debug data values.
Implemented as a slider. | import io
from typing import Any, ContextManager, Dict
import numpy as np
from PIL import Image
from ipywidgets import widgets
from puzzle.puzzlepedia import _bind, _common
from puzzle.steps import step
def DebugDataWidget(s: step.Step, capture: ContextManager) -> widgets.Widget:
with capture:
try:
data = s.get_debug_data()
except NotImplementedError:
data = '[no data]'
return _data_widget(data)
def _data_widget(data: Any) -> widgets.Widget:
if (isinstance(data, list) and len(data) > 0 and
isinstance(data[0], tuple) and len(data[0]) == 2 and
isinstance(data[0][0], str)
):
options = [label for label, _ in data]
value = options[-1]
data_widgets = {
label: _data_widget(value) for label, value in data
}
slider = widgets.SelectionSlider(
options=options,
value=value,
continuous_update=True,
)
children = [slider, data_widgets[value]]
vbox = widgets.VBox(children)
slider_changed = _bind.widget_observable(slider)
slider_changed.subscribe(_bind.callback_without_event(
_update_debug_data_vbox_children_from_slider, data_widgets, vbox))
return vbox
elif isinstance(data, np.ndarray) and data.dtype == np.uint8:
return _ndarray_image(data)
# Fallback assumes data is text.
return widgets.HTML(_common.preformat_html(str(data)))
def _ndarray_image(data: np.ndarray) -> widgets.Widget:
height, width = data.shape[:2]
f = io.BytesIO()
Image.fromarray(data).save(f, 'png')
return widgets.Image(
value=f.getvalue(),
width=width,
height=height,
)
def _update_debug_data_vbox_children_from_slider(
data_widgets: Dict[str, widgets.Widget],
vbox: widgets.VBox) -> None:
slider, _ = vbox.children
vbox.children = [slider, data_widgets[slider.value]]
| <commit_before>import io
from typing import Any, ContextManager
import numpy as np
from PIL import Image
from ipywidgets import widgets
from puzzle.puzzlepedia import _common
from puzzle.steps import step
def DebugDataWidget(s: step.Step, capture: ContextManager) -> widgets.Widget:
with capture:
try:
data = s.get_debug_data()
except NotImplementedError:
data = '[no data]'
return _data_widget(data)
def _data_widget(data: Any) -> widgets.Widget:
if isinstance(data, np.ndarray) and data.dtype == np.uint8:
height, width = data.shape[:2]
f = io.BytesIO()
Image.fromarray(data).save(f, 'png')
return widgets.Image(
value=f.getvalue(),
width=width,
height=height,
)
# Fallback assumes data is text.
return widgets.HTML(_common.preformat_html(str(data)))
<commit_msg>Add the ability to show multiple debug data values.
Implemented as a slider.<commit_after> | import io
from typing import Any, ContextManager, Dict
import numpy as np
from PIL import Image
from ipywidgets import widgets
from puzzle.puzzlepedia import _bind, _common
from puzzle.steps import step
def DebugDataWidget(s: step.Step, capture: ContextManager) -> widgets.Widget:
with capture:
try:
data = s.get_debug_data()
except NotImplementedError:
data = '[no data]'
return _data_widget(data)
def _data_widget(data: Any) -> widgets.Widget:
if (isinstance(data, list) and len(data) > 0 and
isinstance(data[0], tuple) and len(data[0]) == 2 and
isinstance(data[0][0], str)
):
options = [label for label, _ in data]
value = options[-1]
data_widgets = {
label: _data_widget(value) for label, value in data
}
slider = widgets.SelectionSlider(
options=options,
value=value,
continuous_update=True,
)
children = [slider, data_widgets[value]]
vbox = widgets.VBox(children)
slider_changed = _bind.widget_observable(slider)
slider_changed.subscribe(_bind.callback_without_event(
_update_debug_data_vbox_children_from_slider, data_widgets, vbox))
return vbox
elif isinstance(data, np.ndarray) and data.dtype == np.uint8:
return _ndarray_image(data)
# Fallback assumes data is text.
return widgets.HTML(_common.preformat_html(str(data)))
def _ndarray_image(data: np.ndarray) -> widgets.Widget:
height, width = data.shape[:2]
f = io.BytesIO()
Image.fromarray(data).save(f, 'png')
return widgets.Image(
value=f.getvalue(),
width=width,
height=height,
)
def _update_debug_data_vbox_children_from_slider(
data_widgets: Dict[str, widgets.Widget],
vbox: widgets.VBox) -> None:
slider, _ = vbox.children
vbox.children = [slider, data_widgets[slider.value]]
| import io
from typing import Any, ContextManager
import numpy as np
from PIL import Image
from ipywidgets import widgets
from puzzle.puzzlepedia import _common
from puzzle.steps import step
def DebugDataWidget(s: step.Step, capture: ContextManager) -> widgets.Widget:
with capture:
try:
data = s.get_debug_data()
except NotImplementedError:
data = '[no data]'
return _data_widget(data)
def _data_widget(data: Any) -> widgets.Widget:
if isinstance(data, np.ndarray) and data.dtype == np.uint8:
height, width = data.shape[:2]
f = io.BytesIO()
Image.fromarray(data).save(f, 'png')
return widgets.Image(
value=f.getvalue(),
width=width,
height=height,
)
# Fallback assumes data is text.
return widgets.HTML(_common.preformat_html(str(data)))
Add the ability to show multiple debug data values.
Implemented as a slider.import io
from typing import Any, ContextManager, Dict
import numpy as np
from PIL import Image
from ipywidgets import widgets
from puzzle.puzzlepedia import _bind, _common
from puzzle.steps import step
def DebugDataWidget(s: step.Step, capture: ContextManager) -> widgets.Widget:
with capture:
try:
data = s.get_debug_data()
except NotImplementedError:
data = '[no data]'
return _data_widget(data)
def _data_widget(data: Any) -> widgets.Widget:
if (isinstance(data, list) and len(data) > 0 and
isinstance(data[0], tuple) and len(data[0]) == 2 and
isinstance(data[0][0], str)
):
options = [label for label, _ in data]
value = options[-1]
data_widgets = {
label: _data_widget(value) for label, value in data
}
slider = widgets.SelectionSlider(
options=options,
value=value,
continuous_update=True,
)
children = [slider, data_widgets[value]]
vbox = widgets.VBox(children)
slider_changed = _bind.widget_observable(slider)
slider_changed.subscribe(_bind.callback_without_event(
_update_debug_data_vbox_children_from_slider, data_widgets, vbox))
return vbox
elif isinstance(data, np.ndarray) and data.dtype == np.uint8:
return _ndarray_image(data)
# Fallback assumes data is text.
return widgets.HTML(_common.preformat_html(str(data)))
def _ndarray_image(data: np.ndarray) -> widgets.Widget:
height, width = data.shape[:2]
f = io.BytesIO()
Image.fromarray(data).save(f, 'png')
return widgets.Image(
value=f.getvalue(),
width=width,
height=height,
)
def _update_debug_data_vbox_children_from_slider(
data_widgets: Dict[str, widgets.Widget],
vbox: widgets.VBox) -> None:
slider, _ = vbox.children
vbox.children = [slider, data_widgets[slider.value]]
| <commit_before>import io
from typing import Any, ContextManager
import numpy as np
from PIL import Image
from ipywidgets import widgets
from puzzle.puzzlepedia import _common
from puzzle.steps import step
def DebugDataWidget(s: step.Step, capture: ContextManager) -> widgets.Widget:
with capture:
try:
data = s.get_debug_data()
except NotImplementedError:
data = '[no data]'
return _data_widget(data)
def _data_widget(data: Any) -> widgets.Widget:
if isinstance(data, np.ndarray) and data.dtype == np.uint8:
height, width = data.shape[:2]
f = io.BytesIO()
Image.fromarray(data).save(f, 'png')
return widgets.Image(
value=f.getvalue(),
width=width,
height=height,
)
# Fallback assumes data is text.
return widgets.HTML(_common.preformat_html(str(data)))
<commit_msg>Add the ability to show multiple debug data values.
Implemented as a slider.<commit_after>import io
from typing import Any, ContextManager, Dict
import numpy as np
from PIL import Image
from ipywidgets import widgets
from puzzle.puzzlepedia import _bind, _common
from puzzle.steps import step
def DebugDataWidget(s: step.Step, capture: ContextManager) -> widgets.Widget:
with capture:
try:
data = s.get_debug_data()
except NotImplementedError:
data = '[no data]'
return _data_widget(data)
def _data_widget(data: Any) -> widgets.Widget:
if (isinstance(data, list) and len(data) > 0 and
isinstance(data[0], tuple) and len(data[0]) == 2 and
isinstance(data[0][0], str)
):
options = [label for label, _ in data]
value = options[-1]
data_widgets = {
label: _data_widget(value) for label, value in data
}
slider = widgets.SelectionSlider(
options=options,
value=value,
continuous_update=True,
)
children = [slider, data_widgets[value]]
vbox = widgets.VBox(children)
slider_changed = _bind.widget_observable(slider)
slider_changed.subscribe(_bind.callback_without_event(
_update_debug_data_vbox_children_from_slider, data_widgets, vbox))
return vbox
elif isinstance(data, np.ndarray) and data.dtype == np.uint8:
return _ndarray_image(data)
# Fallback assumes data is text.
return widgets.HTML(_common.preformat_html(str(data)))
def _ndarray_image(data: np.ndarray) -> widgets.Widget:
height, width = data.shape[:2]
f = io.BytesIO()
Image.fromarray(data).save(f, 'png')
return widgets.Image(
value=f.getvalue(),
width=width,
height=height,
)
def _update_debug_data_vbox_children_from_slider(
data_widgets: Dict[str, widgets.Widget],
vbox: widgets.VBox) -> None:
slider, _ = vbox.children
vbox.children = [slider, data_widgets[slider.value]]
|
99daef4c39d89ac41c02533e8c6becd67f5c76b5 | docs/conf.py | docs/conf.py | from crate.theme.rtd.conf.python import *
if "sphinx.ext.intersphinx" not in extensions:
extensions += ["sphinx.ext.intersphinx"]
if "intersphinx_mapping" not in globals():
intersphinx_mapping = {}
intersphinx_mapping.update({
'reference': ('https://crate.io/docs/crate/reference/', None),
'sa': ('https://docs.sqlalchemy.org/en/13/', None),
})
rst_prolog = """
.. |nbsp| unicode:: 0xA0
:trim:
"""
| from crate.theme.rtd.conf.python import *
if "sphinx.ext.intersphinx" not in extensions:
extensions += ["sphinx.ext.intersphinx"]
if "intersphinx_mapping" not in globals():
intersphinx_mapping = {}
intersphinx_mapping.update({
'reference': ('https://crate.io/docs/crate/reference/en/latest/', None),
'sa': ('https://docs.sqlalchemy.org/en/13/', None),
})
rst_prolog = """
.. |nbsp| unicode:: 0xA0
:trim:
"""
| Fix documentation checks by adjusting intersphinx mapping | Fix documentation checks by adjusting intersphinx mapping | Python | apache-2.0 | crate/crate-python,crate/crate-python | from crate.theme.rtd.conf.python import *
if "sphinx.ext.intersphinx" not in extensions:
extensions += ["sphinx.ext.intersphinx"]
if "intersphinx_mapping" not in globals():
intersphinx_mapping = {}
intersphinx_mapping.update({
'reference': ('https://crate.io/docs/crate/reference/', None),
'sa': ('https://docs.sqlalchemy.org/en/13/', None),
})
rst_prolog = """
.. |nbsp| unicode:: 0xA0
:trim:
"""
Fix documentation checks by adjusting intersphinx mapping | from crate.theme.rtd.conf.python import *
if "sphinx.ext.intersphinx" not in extensions:
extensions += ["sphinx.ext.intersphinx"]
if "intersphinx_mapping" not in globals():
intersphinx_mapping = {}
intersphinx_mapping.update({
'reference': ('https://crate.io/docs/crate/reference/en/latest/', None),
'sa': ('https://docs.sqlalchemy.org/en/13/', None),
})
rst_prolog = """
.. |nbsp| unicode:: 0xA0
:trim:
"""
| <commit_before>from crate.theme.rtd.conf.python import *
if "sphinx.ext.intersphinx" not in extensions:
extensions += ["sphinx.ext.intersphinx"]
if "intersphinx_mapping" not in globals():
intersphinx_mapping = {}
intersphinx_mapping.update({
'reference': ('https://crate.io/docs/crate/reference/', None),
'sa': ('https://docs.sqlalchemy.org/en/13/', None),
})
rst_prolog = """
.. |nbsp| unicode:: 0xA0
:trim:
"""
<commit_msg>Fix documentation checks by adjusting intersphinx mapping<commit_after> | from crate.theme.rtd.conf.python import *
if "sphinx.ext.intersphinx" not in extensions:
extensions += ["sphinx.ext.intersphinx"]
if "intersphinx_mapping" not in globals():
intersphinx_mapping = {}
intersphinx_mapping.update({
'reference': ('https://crate.io/docs/crate/reference/en/latest/', None),
'sa': ('https://docs.sqlalchemy.org/en/13/', None),
})
rst_prolog = """
.. |nbsp| unicode:: 0xA0
:trim:
"""
| from crate.theme.rtd.conf.python import *
if "sphinx.ext.intersphinx" not in extensions:
extensions += ["sphinx.ext.intersphinx"]
if "intersphinx_mapping" not in globals():
intersphinx_mapping = {}
intersphinx_mapping.update({
'reference': ('https://crate.io/docs/crate/reference/', None),
'sa': ('https://docs.sqlalchemy.org/en/13/', None),
})
rst_prolog = """
.. |nbsp| unicode:: 0xA0
:trim:
"""
Fix documentation checks by adjusting intersphinx mappingfrom crate.theme.rtd.conf.python import *
if "sphinx.ext.intersphinx" not in extensions:
extensions += ["sphinx.ext.intersphinx"]
if "intersphinx_mapping" not in globals():
intersphinx_mapping = {}
intersphinx_mapping.update({
'reference': ('https://crate.io/docs/crate/reference/en/latest/', None),
'sa': ('https://docs.sqlalchemy.org/en/13/', None),
})
rst_prolog = """
.. |nbsp| unicode:: 0xA0
:trim:
"""
| <commit_before>from crate.theme.rtd.conf.python import *
if "sphinx.ext.intersphinx" not in extensions:
extensions += ["sphinx.ext.intersphinx"]
if "intersphinx_mapping" not in globals():
intersphinx_mapping = {}
intersphinx_mapping.update({
'reference': ('https://crate.io/docs/crate/reference/', None),
'sa': ('https://docs.sqlalchemy.org/en/13/', None),
})
rst_prolog = """
.. |nbsp| unicode:: 0xA0
:trim:
"""
<commit_msg>Fix documentation checks by adjusting intersphinx mapping<commit_after>from crate.theme.rtd.conf.python import *
if "sphinx.ext.intersphinx" not in extensions:
extensions += ["sphinx.ext.intersphinx"]
if "intersphinx_mapping" not in globals():
intersphinx_mapping = {}
intersphinx_mapping.update({
'reference': ('https://crate.io/docs/crate/reference/en/latest/', None),
'sa': ('https://docs.sqlalchemy.org/en/13/', None),
})
rst_prolog = """
.. |nbsp| unicode:: 0xA0
:trim:
"""
|
a303d4d0491abf91c8f2856527d0b3566f704b90 | tracestack/__init__.py | tracestack/__init__.py | import sys, urllib, webbrowser
def tracestack():
try:
last_error = sys.last_value
except:
raise Exception("No error message available.")
error_query = urllib.urlencode("[python] " + str(last_error))
search_url = "http://stackoverflow.com/search?q=" + error_query
webbrowser.open(search_url)
| import sys, urllib, webbrowser
def tracestack():
try:
last_error = "{0} {1}".format(sys.last_type, sys.last_value)
except:
raise Exception("No error message available.")
error_query = urllib.urlencode({"q": "[python] " + last_error})
search_url = "http://stackoverflow.com/search?q=" + error_query
webbrowser.open(search_url)
| Fix bugs thanks to Aaron | Fix bugs thanks to Aaron | Python | mit | kod3r/tracestack,danrobinson/tracestack | import sys, urllib, webbrowser
def tracestack():
try:
last_error = sys.last_value
except:
raise Exception("No error message available.")
error_query = urllib.urlencode("[python] " + str(last_error))
search_url = "http://stackoverflow.com/search?q=" + error_query
webbrowser.open(search_url)
Fix bugs thanks to Aaron | import sys, urllib, webbrowser
def tracestack():
try:
last_error = "{0} {1}".format(sys.last_type, sys.last_value)
except:
raise Exception("No error message available.")
error_query = urllib.urlencode({"q": "[python] " + last_error})
search_url = "http://stackoverflow.com/search?q=" + error_query
webbrowser.open(search_url)
| <commit_before>import sys, urllib, webbrowser
def tracestack():
try:
last_error = sys.last_value
except:
raise Exception("No error message available.")
error_query = urllib.urlencode("[python] " + str(last_error))
search_url = "http://stackoverflow.com/search?q=" + error_query
webbrowser.open(search_url)
<commit_msg>Fix bugs thanks to Aaron<commit_after> | import sys, urllib, webbrowser
def tracestack():
try:
last_error = "{0} {1}".format(sys.last_type, sys.last_value)
except:
raise Exception("No error message available.")
error_query = urllib.urlencode({"q": "[python] " + last_error})
search_url = "http://stackoverflow.com/search?q=" + error_query
webbrowser.open(search_url)
| import sys, urllib, webbrowser
def tracestack():
try:
last_error = sys.last_value
except:
raise Exception("No error message available.")
error_query = urllib.urlencode("[python] " + str(last_error))
search_url = "http://stackoverflow.com/search?q=" + error_query
webbrowser.open(search_url)
Fix bugs thanks to Aaronimport sys, urllib, webbrowser
def tracestack():
try:
last_error = "{0} {1}".format(sys.last_type, sys.last_value)
except:
raise Exception("No error message available.")
error_query = urllib.urlencode({"q": "[python] " + last_error})
search_url = "http://stackoverflow.com/search?q=" + error_query
webbrowser.open(search_url)
| <commit_before>import sys, urllib, webbrowser
def tracestack():
try:
last_error = sys.last_value
except:
raise Exception("No error message available.")
error_query = urllib.urlencode("[python] " + str(last_error))
search_url = "http://stackoverflow.com/search?q=" + error_query
webbrowser.open(search_url)
<commit_msg>Fix bugs thanks to Aaron<commit_after>import sys, urllib, webbrowser
def tracestack():
try:
last_error = "{0} {1}".format(sys.last_type, sys.last_value)
except:
raise Exception("No error message available.")
error_query = urllib.urlencode({"q": "[python] " + last_error})
search_url = "http://stackoverflow.com/search?q=" + error_query
webbrowser.open(search_url)
|
03590da0c3ffceb97ccc15c458ca29a18290f559 | jobs/collect_9gag_posts.py | jobs/collect_9gag_posts.py | import time
import feedparser
from bs4 import BeautifulSoup
import boto3
from PIL import Image
import requests
import logging
NINEGAG_RSS_URL = 'http://www.15minutesoffame.be/9gag/rss/9GAG_-_Trending.atom'
def main():
feed = feedparser.parse(NINEGAG_RSS_URL)['items']
table = boto3.resource('dynamodb', 'eu-west-1').Table('LeonardBot9gagPosts')
for item in feed:
title, post_id, img = item['title'], item['link'].split('/')[4], BeautifulSoup(
item['summary'], 'lxml'
).find('img')
if not hasattr(img, 'src'):
continue
img = img['src']
response = requests.get(img, stream=True)
width, height = Image.open(response.raw).size
if height / width >= 3:
continue
table.put_item(
Item={
'postId': post_id,
'title': title,
'img': img,
'createdAt': int(time.time()),
'viewed': {-1},
'file_id': False
}
)
if __name__ == '__main__':
try:
main()
except Exception as e:
logging.error(e)
| import time
import feedparser
from bs4 import BeautifulSoup
import boto3
from PIL import Image
import requests
import logging
NINEGAG_RSS_URL = 'http://www.15minutesoffame.be/9gag/rss/9GAG_-_Trending.atom'
def main():
feed = feedparser.parse(NINEGAG_RSS_URL)['items']
table = boto3.resource('dynamodb', 'eu-west-1').Table('LeonardBot9gagPosts')
for item in feed:
title, post_id, img = item['title'], item['link'].split('/')[4], BeautifulSoup(
item['summary'], 'lxml'
).find('img')
if not hasattr(img, 'src'):
continue
img = img['src']
response = requests.get(img, stream=True)
width, height = Image.open(response.raw).size
if height / width >= 2:
continue
table.put_item(
Item={
'postId': post_id,
'title': title,
'img': img,
'createdAt': int(time.time()),
'viewed': {-1},
'file_id': False
}
)
if __name__ == '__main__':
try:
main()
except Exception as e:
logging.error(e)
| Change 9gag collector longpost detector | Change 9gag collector longpost detector
| Python | mit | sevazhidkov/leonard | import time
import feedparser
from bs4 import BeautifulSoup
import boto3
from PIL import Image
import requests
import logging
NINEGAG_RSS_URL = 'http://www.15minutesoffame.be/9gag/rss/9GAG_-_Trending.atom'
def main():
feed = feedparser.parse(NINEGAG_RSS_URL)['items']
table = boto3.resource('dynamodb', 'eu-west-1').Table('LeonardBot9gagPosts')
for item in feed:
title, post_id, img = item['title'], item['link'].split('/')[4], BeautifulSoup(
item['summary'], 'lxml'
).find('img')
if not hasattr(img, 'src'):
continue
img = img['src']
response = requests.get(img, stream=True)
width, height = Image.open(response.raw).size
if height / width >= 3:
continue
table.put_item(
Item={
'postId': post_id,
'title': title,
'img': img,
'createdAt': int(time.time()),
'viewed': {-1},
'file_id': False
}
)
if __name__ == '__main__':
try:
main()
except Exception as e:
logging.error(e)
Change 9gag collector longpost detector | import time
import feedparser
from bs4 import BeautifulSoup
import boto3
from PIL import Image
import requests
import logging
NINEGAG_RSS_URL = 'http://www.15minutesoffame.be/9gag/rss/9GAG_-_Trending.atom'
def main():
feed = feedparser.parse(NINEGAG_RSS_URL)['items']
table = boto3.resource('dynamodb', 'eu-west-1').Table('LeonardBot9gagPosts')
for item in feed:
title, post_id, img = item['title'], item['link'].split('/')[4], BeautifulSoup(
item['summary'], 'lxml'
).find('img')
if not hasattr(img, 'src'):
continue
img = img['src']
response = requests.get(img, stream=True)
width, height = Image.open(response.raw).size
if height / width >= 2:
continue
table.put_item(
Item={
'postId': post_id,
'title': title,
'img': img,
'createdAt': int(time.time()),
'viewed': {-1},
'file_id': False
}
)
if __name__ == '__main__':
try:
main()
except Exception as e:
logging.error(e)
| <commit_before>import time
import feedparser
from bs4 import BeautifulSoup
import boto3
from PIL import Image
import requests
import logging
NINEGAG_RSS_URL = 'http://www.15minutesoffame.be/9gag/rss/9GAG_-_Trending.atom'
def main():
feed = feedparser.parse(NINEGAG_RSS_URL)['items']
table = boto3.resource('dynamodb', 'eu-west-1').Table('LeonardBot9gagPosts')
for item in feed:
title, post_id, img = item['title'], item['link'].split('/')[4], BeautifulSoup(
item['summary'], 'lxml'
).find('img')
if not hasattr(img, 'src'):
continue
img = img['src']
response = requests.get(img, stream=True)
width, height = Image.open(response.raw).size
if height / width >= 3:
continue
table.put_item(
Item={
'postId': post_id,
'title': title,
'img': img,
'createdAt': int(time.time()),
'viewed': {-1},
'file_id': False
}
)
if __name__ == '__main__':
try:
main()
except Exception as e:
logging.error(e)
<commit_msg>Change 9gag collector longpost detector<commit_after> | import time
import feedparser
from bs4 import BeautifulSoup
import boto3
from PIL import Image
import requests
import logging
NINEGAG_RSS_URL = 'http://www.15minutesoffame.be/9gag/rss/9GAG_-_Trending.atom'
def main():
feed = feedparser.parse(NINEGAG_RSS_URL)['items']
table = boto3.resource('dynamodb', 'eu-west-1').Table('LeonardBot9gagPosts')
for item in feed:
title, post_id, img = item['title'], item['link'].split('/')[4], BeautifulSoup(
item['summary'], 'lxml'
).find('img')
if not hasattr(img, 'src'):
continue
img = img['src']
response = requests.get(img, stream=True)
width, height = Image.open(response.raw).size
if height / width >= 2:
continue
table.put_item(
Item={
'postId': post_id,
'title': title,
'img': img,
'createdAt': int(time.time()),
'viewed': {-1},
'file_id': False
}
)
if __name__ == '__main__':
try:
main()
except Exception as e:
logging.error(e)
| import time
import feedparser
from bs4 import BeautifulSoup
import boto3
from PIL import Image
import requests
import logging
NINEGAG_RSS_URL = 'http://www.15minutesoffame.be/9gag/rss/9GAG_-_Trending.atom'
def main():
feed = feedparser.parse(NINEGAG_RSS_URL)['items']
table = boto3.resource('dynamodb', 'eu-west-1').Table('LeonardBot9gagPosts')
for item in feed:
title, post_id, img = item['title'], item['link'].split('/')[4], BeautifulSoup(
item['summary'], 'lxml'
).find('img')
if not hasattr(img, 'src'):
continue
img = img['src']
response = requests.get(img, stream=True)
width, height = Image.open(response.raw).size
if height / width >= 3:
continue
table.put_item(
Item={
'postId': post_id,
'title': title,
'img': img,
'createdAt': int(time.time()),
'viewed': {-1},
'file_id': False
}
)
if __name__ == '__main__':
try:
main()
except Exception as e:
logging.error(e)
Change 9gag collector longpost detectorimport time
import feedparser
from bs4 import BeautifulSoup
import boto3
from PIL import Image
import requests
import logging
NINEGAG_RSS_URL = 'http://www.15minutesoffame.be/9gag/rss/9GAG_-_Trending.atom'
def main():
feed = feedparser.parse(NINEGAG_RSS_URL)['items']
table = boto3.resource('dynamodb', 'eu-west-1').Table('LeonardBot9gagPosts')
for item in feed:
title, post_id, img = item['title'], item['link'].split('/')[4], BeautifulSoup(
item['summary'], 'lxml'
).find('img')
if not hasattr(img, 'src'):
continue
img = img['src']
response = requests.get(img, stream=True)
width, height = Image.open(response.raw).size
if height / width >= 2:
continue
table.put_item(
Item={
'postId': post_id,
'title': title,
'img': img,
'createdAt': int(time.time()),
'viewed': {-1},
'file_id': False
}
)
if __name__ == '__main__':
try:
main()
except Exception as e:
logging.error(e)
| <commit_before>import time
import feedparser
from bs4 import BeautifulSoup
import boto3
from PIL import Image
import requests
import logging
NINEGAG_RSS_URL = 'http://www.15minutesoffame.be/9gag/rss/9GAG_-_Trending.atom'
def main():
feed = feedparser.parse(NINEGAG_RSS_URL)['items']
table = boto3.resource('dynamodb', 'eu-west-1').Table('LeonardBot9gagPosts')
for item in feed:
title, post_id, img = item['title'], item['link'].split('/')[4], BeautifulSoup(
item['summary'], 'lxml'
).find('img')
if not hasattr(img, 'src'):
continue
img = img['src']
response = requests.get(img, stream=True)
width, height = Image.open(response.raw).size
if height / width >= 3:
continue
table.put_item(
Item={
'postId': post_id,
'title': title,
'img': img,
'createdAt': int(time.time()),
'viewed': {-1},
'file_id': False
}
)
if __name__ == '__main__':
try:
main()
except Exception as e:
logging.error(e)
<commit_msg>Change 9gag collector longpost detector<commit_after>import time
import feedparser
from bs4 import BeautifulSoup
import boto3
from PIL import Image
import requests
import logging
NINEGAG_RSS_URL = 'http://www.15minutesoffame.be/9gag/rss/9GAG_-_Trending.atom'
def main():
feed = feedparser.parse(NINEGAG_RSS_URL)['items']
table = boto3.resource('dynamodb', 'eu-west-1').Table('LeonardBot9gagPosts')
for item in feed:
title, post_id, img = item['title'], item['link'].split('/')[4], BeautifulSoup(
item['summary'], 'lxml'
).find('img')
if not hasattr(img, 'src'):
continue
img = img['src']
response = requests.get(img, stream=True)
width, height = Image.open(response.raw).size
if height / width >= 2:
continue
table.put_item(
Item={
'postId': post_id,
'title': title,
'img': img,
'createdAt': int(time.time()),
'viewed': {-1},
'file_id': False
}
)
if __name__ == '__main__':
try:
main()
except Exception as e:
logging.error(e)
|
5a3c8161943989942c14b0f476241ee52a8706ae | adhocracy/lib/instance/discriminator.py | adhocracy/lib/instance/discriminator.py | import logging
from adhocracy import model
from pylons import config
log = logging.getLogger(__name__)
class InstanceDiscriminatorMiddleware(object):
def __init__(self, app, domain):
self.app = app
self.domain = domain
log.debug("Host name: %s." % domain)
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', "")
environ['adhocracy.domain'] = self.domain
instance_key = config.get('adhocracy.instance')
if instance_key is None:
host = host.split(':', 1)[0]
host = host.replace(self.domain, "")
host = host.strip('.').strip()
instance_key = host
if len(instance_key):
instance = model.Instance.find(instance_key)
if instance is None:
log.debug("No such instance: %s, defaulting!" % instance_key)
else:
model.instance_filter.setup_thread(instance)
try:
return self.app(environ, start_response)
finally:
model.instance_filter.setup_thread(None)
def setup_discriminator(app, config):
domains = config.get('adhocracy.domain',
config.get('adhocracy.domains', ''))
domains = [d.strip() for d in domains.split(',')]
return InstanceDiscriminatorMiddleware(app, domains[0])
| import logging
from adhocracy import model
from pylons import config
log = logging.getLogger(__name__)
class InstanceDiscriminatorMiddleware(object):
def __init__(self, app, domain):
self.app = app
self.domain = domain
log.debug("Host name: %s." % domain)
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', "")
environ['adhocracy.domain'] = self.domain
instance_key = config.get('adhocracy.instance')
if instance_key is None:
host = host.replace(self.domain, "")
host = host.split(':', 1)[0]
host = host.strip('.').strip()
instance_key = host
if len(instance_key):
instance = model.Instance.find(instance_key)
if instance is None:
log.debug("No such instance: %s, defaulting!" % instance_key)
else:
model.instance_filter.setup_thread(instance)
try:
return self.app(environ, start_response)
finally:
model.instance_filter.setup_thread(None)
def setup_discriminator(app, config):
domains = config.get('adhocracy.domain',
config.get('adhocracy.domains', ''))
domains = [d.strip() for d in domains.split(',')]
return InstanceDiscriminatorMiddleware(app, domains[0])
| Allow to set the port in adhocracy.domain to make adhocracy work stand alone again. | Allow to set the port in adhocracy.domain to make adhocracy work stand alone again.
| Python | agpl-3.0 | alkadis/vcv,liqd/adhocracy,SysTheron/adhocracy,alkadis/vcv,phihag/adhocracy,alkadis/vcv,SysTheron/adhocracy,liqd/adhocracy,liqd/adhocracy,DanielNeugebauer/adhocracy,DanielNeugebauer/adhocracy,alkadis/vcv,alkadis/vcv,DanielNeugebauer/adhocracy,SysTheron/adhocracy,phihag/adhocracy,phihag/adhocracy,DanielNeugebauer/adhocracy,DanielNeugebauer/adhocracy,liqd/adhocracy,phihag/adhocracy,phihag/adhocracy | import logging
from adhocracy import model
from pylons import config
log = logging.getLogger(__name__)
class InstanceDiscriminatorMiddleware(object):
def __init__(self, app, domain):
self.app = app
self.domain = domain
log.debug("Host name: %s." % domain)
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', "")
environ['adhocracy.domain'] = self.domain
instance_key = config.get('adhocracy.instance')
if instance_key is None:
host = host.split(':', 1)[0]
host = host.replace(self.domain, "")
host = host.strip('.').strip()
instance_key = host
if len(instance_key):
instance = model.Instance.find(instance_key)
if instance is None:
log.debug("No such instance: %s, defaulting!" % instance_key)
else:
model.instance_filter.setup_thread(instance)
try:
return self.app(environ, start_response)
finally:
model.instance_filter.setup_thread(None)
def setup_discriminator(app, config):
domains = config.get('adhocracy.domain',
config.get('adhocracy.domains', ''))
domains = [d.strip() for d in domains.split(',')]
return InstanceDiscriminatorMiddleware(app, domains[0])
Allow to set the port in adhocracy.domain to make adhocracy work stand alone again. | import logging
from adhocracy import model
from pylons import config
log = logging.getLogger(__name__)
class InstanceDiscriminatorMiddleware(object):
def __init__(self, app, domain):
self.app = app
self.domain = domain
log.debug("Host name: %s." % domain)
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', "")
environ['adhocracy.domain'] = self.domain
instance_key = config.get('adhocracy.instance')
if instance_key is None:
host = host.replace(self.domain, "")
host = host.split(':', 1)[0]
host = host.strip('.').strip()
instance_key = host
if len(instance_key):
instance = model.Instance.find(instance_key)
if instance is None:
log.debug("No such instance: %s, defaulting!" % instance_key)
else:
model.instance_filter.setup_thread(instance)
try:
return self.app(environ, start_response)
finally:
model.instance_filter.setup_thread(None)
def setup_discriminator(app, config):
domains = config.get('adhocracy.domain',
config.get('adhocracy.domains', ''))
domains = [d.strip() for d in domains.split(',')]
return InstanceDiscriminatorMiddleware(app, domains[0])
| <commit_before>import logging
from adhocracy import model
from pylons import config
log = logging.getLogger(__name__)
class InstanceDiscriminatorMiddleware(object):
def __init__(self, app, domain):
self.app = app
self.domain = domain
log.debug("Host name: %s." % domain)
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', "")
environ['adhocracy.domain'] = self.domain
instance_key = config.get('adhocracy.instance')
if instance_key is None:
host = host.split(':', 1)[0]
host = host.replace(self.domain, "")
host = host.strip('.').strip()
instance_key = host
if len(instance_key):
instance = model.Instance.find(instance_key)
if instance is None:
log.debug("No such instance: %s, defaulting!" % instance_key)
else:
model.instance_filter.setup_thread(instance)
try:
return self.app(environ, start_response)
finally:
model.instance_filter.setup_thread(None)
def setup_discriminator(app, config):
domains = config.get('adhocracy.domain',
config.get('adhocracy.domains', ''))
domains = [d.strip() for d in domains.split(',')]
return InstanceDiscriminatorMiddleware(app, domains[0])
<commit_msg>Allow to set the port in adhocracy.domain to make adhocracy work stand alone again.<commit_after> | import logging
from adhocracy import model
from pylons import config
log = logging.getLogger(__name__)
class InstanceDiscriminatorMiddleware(object):
def __init__(self, app, domain):
self.app = app
self.domain = domain
log.debug("Host name: %s." % domain)
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', "")
environ['adhocracy.domain'] = self.domain
instance_key = config.get('adhocracy.instance')
if instance_key is None:
host = host.replace(self.domain, "")
host = host.split(':', 1)[0]
host = host.strip('.').strip()
instance_key = host
if len(instance_key):
instance = model.Instance.find(instance_key)
if instance is None:
log.debug("No such instance: %s, defaulting!" % instance_key)
else:
model.instance_filter.setup_thread(instance)
try:
return self.app(environ, start_response)
finally:
model.instance_filter.setup_thread(None)
def setup_discriminator(app, config):
domains = config.get('adhocracy.domain',
config.get('adhocracy.domains', ''))
domains = [d.strip() for d in domains.split(',')]
return InstanceDiscriminatorMiddleware(app, domains[0])
| import logging
from adhocracy import model
from pylons import config
log = logging.getLogger(__name__)
class InstanceDiscriminatorMiddleware(object):
def __init__(self, app, domain):
self.app = app
self.domain = domain
log.debug("Host name: %s." % domain)
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', "")
environ['adhocracy.domain'] = self.domain
instance_key = config.get('adhocracy.instance')
if instance_key is None:
host = host.split(':', 1)[0]
host = host.replace(self.domain, "")
host = host.strip('.').strip()
instance_key = host
if len(instance_key):
instance = model.Instance.find(instance_key)
if instance is None:
log.debug("No such instance: %s, defaulting!" % instance_key)
else:
model.instance_filter.setup_thread(instance)
try:
return self.app(environ, start_response)
finally:
model.instance_filter.setup_thread(None)
def setup_discriminator(app, config):
domains = config.get('adhocracy.domain',
config.get('adhocracy.domains', ''))
domains = [d.strip() for d in domains.split(',')]
return InstanceDiscriminatorMiddleware(app, domains[0])
Allow to set the port in adhocracy.domain to make adhocracy work stand alone again.import logging
from adhocracy import model
from pylons import config
log = logging.getLogger(__name__)
class InstanceDiscriminatorMiddleware(object):
def __init__(self, app, domain):
self.app = app
self.domain = domain
log.debug("Host name: %s." % domain)
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', "")
environ['adhocracy.domain'] = self.domain
instance_key = config.get('adhocracy.instance')
if instance_key is None:
host = host.replace(self.domain, "")
host = host.split(':', 1)[0]
host = host.strip('.').strip()
instance_key = host
if len(instance_key):
instance = model.Instance.find(instance_key)
if instance is None:
log.debug("No such instance: %s, defaulting!" % instance_key)
else:
model.instance_filter.setup_thread(instance)
try:
return self.app(environ, start_response)
finally:
model.instance_filter.setup_thread(None)
def setup_discriminator(app, config):
domains = config.get('adhocracy.domain',
config.get('adhocracy.domains', ''))
domains = [d.strip() for d in domains.split(',')]
return InstanceDiscriminatorMiddleware(app, domains[0])
| <commit_before>import logging
from adhocracy import model
from pylons import config
log = logging.getLogger(__name__)
class InstanceDiscriminatorMiddleware(object):
def __init__(self, app, domain):
self.app = app
self.domain = domain
log.debug("Host name: %s." % domain)
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', "")
environ['adhocracy.domain'] = self.domain
instance_key = config.get('adhocracy.instance')
if instance_key is None:
host = host.split(':', 1)[0]
host = host.replace(self.domain, "")
host = host.strip('.').strip()
instance_key = host
if len(instance_key):
instance = model.Instance.find(instance_key)
if instance is None:
log.debug("No such instance: %s, defaulting!" % instance_key)
else:
model.instance_filter.setup_thread(instance)
try:
return self.app(environ, start_response)
finally:
model.instance_filter.setup_thread(None)
def setup_discriminator(app, config):
domains = config.get('adhocracy.domain',
config.get('adhocracy.domains', ''))
domains = [d.strip() for d in domains.split(',')]
return InstanceDiscriminatorMiddleware(app, domains[0])
<commit_msg>Allow to set the port in adhocracy.domain to make adhocracy work stand alone again.<commit_after>import logging
from adhocracy import model
from pylons import config
log = logging.getLogger(__name__)
class InstanceDiscriminatorMiddleware(object):
def __init__(self, app, domain):
self.app = app
self.domain = domain
log.debug("Host name: %s." % domain)
def __call__(self, environ, start_response):
host = environ.get('HTTP_HOST', "")
environ['adhocracy.domain'] = self.domain
instance_key = config.get('adhocracy.instance')
if instance_key is None:
host = host.replace(self.domain, "")
host = host.split(':', 1)[0]
host = host.strip('.').strip()
instance_key = host
if len(instance_key):
instance = model.Instance.find(instance_key)
if instance is None:
log.debug("No such instance: %s, defaulting!" % instance_key)
else:
model.instance_filter.setup_thread(instance)
try:
return self.app(environ, start_response)
finally:
model.instance_filter.setup_thread(None)
def setup_discriminator(app, config):
domains = config.get('adhocracy.domain',
config.get('adhocracy.domains', ''))
domains = [d.strip() for d in domains.split(',')]
return InstanceDiscriminatorMiddleware(app, domains[0])
|
57f4b34a70a3506f1fb1e1842b3e75dc977bfe18 | flask_app.py | flask_app.py | from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return data
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
| from flask import abort
from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return data
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
| Add import of abort function. | Add import of abort function.
| Python | bsd-3-clause | talavis/kimenu | from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return data
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
Add import of abort function. | from flask import abort
from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return data
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
| <commit_before>from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return data
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
<commit_msg>Add import of abort function.<commit_after> | from flask import abort
from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return data
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
| from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return data
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
Add import of abort function.from flask import abort
from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return data
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
| <commit_before>from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return data
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
<commit_msg>Add import of abort function.<commit_after>from flask import abort
from flask import Flask
from flask_caching import Cache
import main
app = Flask(__name__)
cache = Cache(app, config={'CACHE_TYPE': 'simple'})
@app.route('/')
def display_available():
content = ('<html>' +
'<head>' +
'<title>Restaurant Menu Parser</title>' +
'</head>' +
'<body>' +
'<p><a href="ki">Campus Solna (KI)</a></p>' +
'<p><a href="uu">Campus Uppsala (BMC)</a></p>' +
'</body>' +
'</html>')
return content
@app.route('/api/restaurants')
@cache.cached(timeout=3600)
def api_list_restaurants():
return main.list_restaurants()
@app.route('/api/restaurant/<name>')
@cache.cached(timeout=3600)
def api_get_restaurant(name):
data = main.get_restaurant(name)
if not data:
abort(404)
return data
@app.route('/ki')
@cache.cached(timeout=3600)
def make_menu_ki():
return main.gen_ki_menu()
@app.route('/uu')
@cache.cached(timeout=3600)
def make_menu_uu():
return main.gen_uu_menu()
|
00b31f3025493942c0ce7eb03c7cc09abf0eb8d0 | txlege84/core/views.py | txlege84/core/views.py | from django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
print context
return context
| from django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
return context
| Print statement snuck in there | Print statement snuck in there
| Python | mit | texastribune/txlege84,texastribune/txlege84,texastribune/txlege84,texastribune/txlege84 | from django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
print context
return context
Print statement snuck in there | from django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
return context
| <commit_before>from django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
print context
return context
<commit_msg>Print statement snuck in there<commit_after> | from django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
return context
| from django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
print context
return context
Print statement snuck in therefrom django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
return context
| <commit_before>from django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
print context
return context
<commit_msg>Print statement snuck in there<commit_after>from django.views.generic import ListView
from topics.models import Topic, TopIssue
class LandingView(ListView):
model = Topic
template_name = 'landing.html'
def get_context_data(self, **kwargs):
context = super(LandingView, self).get_context_data(**kwargs)
context['top_issues'] = TopIssue.objects.all()[:4]
return context
|
a9bc4d98e8b61b63c14a2a5f1e11c85d91747f30 | analysis/data_process/uk_2017/config.py | analysis/data_process/uk_2017/config.py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Config file for the cleaning - plotting and notebook process"""
class CleaningConfig:
# Unprocessed dataset
raw_data = './dataset/raw_results-survey245554.csv'
# load the different answers to questions to classify questions based on that
question_file = '../../../survey_creation/uk_17/uk_17.csv'
answer_folder = '../../../survey_creation/uk_17/listAnswers'
# Location for the json file of all questions
json_to_plot_location = './to_plot.json'
cleaned_df_location = './dataset/cleaned_data.csv'
class PlottingConfig(CleaningConfig):
pass
class NotebookConfig(PlottingConfig):
notebook_folder = './'
notebook_filename = 'uk_17.ipynb'
to_import = ['import pandas as pd',
'import numpy as np',
'get_ipython().magic("matplotlib inline")',
'import matplotlib',
'import matplotlib.pyplot as plt',
'from config import CleaningConfig, PlottingConfig, NotebookConfig',
'from counting import get_count',
'from plotting import get_plot',
'from likertScalePlot import likert_scale']
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Config file for the cleaning - plotting and notebook process"""
class CleaningConfig:
# Unprocessed dataset
raw_data = './dataset/raw_results-survey245554.csv'
# load the different answers to questions to classify questions based on that
question_file = '../../../survey_creation/uk_17/uk_17.csv'
answer_folder = '../../../survey_creation/uk_17/listAnswers'
# Location for the json file of all questions
json_to_plot_location = './to_plot.json'
cleaned_df_location = './dataset/cleaned_data.csv'
class PlottingConfig(CleaningConfig):
count_na = True
plot_na = False
normalise = False
class NotebookConfig(PlottingConfig):
notebook_folder = './'
notebook_filename = 'uk_17.ipynb'
to_import = ['import pandas as pd',
'import numpy as np',
'import matplotlib',
'import matplotlib.pyplot as plt',
'from config import CleaningConfig, PlottingConfig, NotebookConfig',
'from counting import get_count',
'from plotting import get_plot',
'from IPython.display import display',
'from likertScalePlot import likert_scale']
processing_options = {'metadata': {'path': './',
'hide_input': True}}
| Add options in the plot | Add options in the plot
| Python | bsd-3-clause | softwaresaved/international-survey | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Config file for the cleaning - plotting and notebook process"""
class CleaningConfig:
# Unprocessed dataset
raw_data = './dataset/raw_results-survey245554.csv'
# load the different answers to questions to classify questions based on that
question_file = '../../../survey_creation/uk_17/uk_17.csv'
answer_folder = '../../../survey_creation/uk_17/listAnswers'
# Location for the json file of all questions
json_to_plot_location = './to_plot.json'
cleaned_df_location = './dataset/cleaned_data.csv'
class PlottingConfig(CleaningConfig):
pass
class NotebookConfig(PlottingConfig):
notebook_folder = './'
notebook_filename = 'uk_17.ipynb'
to_import = ['import pandas as pd',
'import numpy as np',
'get_ipython().magic("matplotlib inline")',
'import matplotlib',
'import matplotlib.pyplot as plt',
'from config import CleaningConfig, PlottingConfig, NotebookConfig',
'from counting import get_count',
'from plotting import get_plot',
'from likertScalePlot import likert_scale']
Add options in the plot | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Config file for the cleaning - plotting and notebook process"""
class CleaningConfig:
# Unprocessed dataset
raw_data = './dataset/raw_results-survey245554.csv'
# load the different answers to questions to classify questions based on that
question_file = '../../../survey_creation/uk_17/uk_17.csv'
answer_folder = '../../../survey_creation/uk_17/listAnswers'
# Location for the json file of all questions
json_to_plot_location = './to_plot.json'
cleaned_df_location = './dataset/cleaned_data.csv'
class PlottingConfig(CleaningConfig):
count_na = True
plot_na = False
normalise = False
class NotebookConfig(PlottingConfig):
notebook_folder = './'
notebook_filename = 'uk_17.ipynb'
to_import = ['import pandas as pd',
'import numpy as np',
'import matplotlib',
'import matplotlib.pyplot as plt',
'from config import CleaningConfig, PlottingConfig, NotebookConfig',
'from counting import get_count',
'from plotting import get_plot',
'from IPython.display import display',
'from likertScalePlot import likert_scale']
processing_options = {'metadata': {'path': './',
'hide_input': True}}
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Config file for the cleaning - plotting and notebook process"""
class CleaningConfig:
# Unprocessed dataset
raw_data = './dataset/raw_results-survey245554.csv'
# load the different answers to questions to classify questions based on that
question_file = '../../../survey_creation/uk_17/uk_17.csv'
answer_folder = '../../../survey_creation/uk_17/listAnswers'
# Location for the json file of all questions
json_to_plot_location = './to_plot.json'
cleaned_df_location = './dataset/cleaned_data.csv'
class PlottingConfig(CleaningConfig):
pass
class NotebookConfig(PlottingConfig):
notebook_folder = './'
notebook_filename = 'uk_17.ipynb'
to_import = ['import pandas as pd',
'import numpy as np',
'get_ipython().magic("matplotlib inline")',
'import matplotlib',
'import matplotlib.pyplot as plt',
'from config import CleaningConfig, PlottingConfig, NotebookConfig',
'from counting import get_count',
'from plotting import get_plot',
'from likertScalePlot import likert_scale']
<commit_msg>Add options in the plot<commit_after> | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Config file for the cleaning - plotting and notebook process"""
class CleaningConfig:
# Unprocessed dataset
raw_data = './dataset/raw_results-survey245554.csv'
# load the different answers to questions to classify questions based on that
question_file = '../../../survey_creation/uk_17/uk_17.csv'
answer_folder = '../../../survey_creation/uk_17/listAnswers'
# Location for the json file of all questions
json_to_plot_location = './to_plot.json'
cleaned_df_location = './dataset/cleaned_data.csv'
class PlottingConfig(CleaningConfig):
count_na = True
plot_na = False
normalise = False
class NotebookConfig(PlottingConfig):
notebook_folder = './'
notebook_filename = 'uk_17.ipynb'
to_import = ['import pandas as pd',
'import numpy as np',
'import matplotlib',
'import matplotlib.pyplot as plt',
'from config import CleaningConfig, PlottingConfig, NotebookConfig',
'from counting import get_count',
'from plotting import get_plot',
'from IPython.display import display',
'from likertScalePlot import likert_scale']
processing_options = {'metadata': {'path': './',
'hide_input': True}}
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Config file for the cleaning - plotting and notebook process"""
class CleaningConfig:
# Unprocessed dataset
raw_data = './dataset/raw_results-survey245554.csv'
# load the different answers to questions to classify questions based on that
question_file = '../../../survey_creation/uk_17/uk_17.csv'
answer_folder = '../../../survey_creation/uk_17/listAnswers'
# Location for the json file of all questions
json_to_plot_location = './to_plot.json'
cleaned_df_location = './dataset/cleaned_data.csv'
class PlottingConfig(CleaningConfig):
pass
class NotebookConfig(PlottingConfig):
notebook_folder = './'
notebook_filename = 'uk_17.ipynb'
to_import = ['import pandas as pd',
'import numpy as np',
'get_ipython().magic("matplotlib inline")',
'import matplotlib',
'import matplotlib.pyplot as plt',
'from config import CleaningConfig, PlottingConfig, NotebookConfig',
'from counting import get_count',
'from plotting import get_plot',
'from likertScalePlot import likert_scale']
Add options in the plot#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Config file for the cleaning - plotting and notebook process"""
class CleaningConfig:
# Unprocessed dataset
raw_data = './dataset/raw_results-survey245554.csv'
# load the different answers to questions to classify questions based on that
question_file = '../../../survey_creation/uk_17/uk_17.csv'
answer_folder = '../../../survey_creation/uk_17/listAnswers'
# Location for the json file of all questions
json_to_plot_location = './to_plot.json'
cleaned_df_location = './dataset/cleaned_data.csv'
class PlottingConfig(CleaningConfig):
count_na = True
plot_na = False
normalise = False
class NotebookConfig(PlottingConfig):
notebook_folder = './'
notebook_filename = 'uk_17.ipynb'
to_import = ['import pandas as pd',
'import numpy as np',
'import matplotlib',
'import matplotlib.pyplot as plt',
'from config import CleaningConfig, PlottingConfig, NotebookConfig',
'from counting import get_count',
'from plotting import get_plot',
'from IPython.display import display',
'from likertScalePlot import likert_scale']
processing_options = {'metadata': {'path': './',
'hide_input': True}}
| <commit_before>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Config file for the cleaning - plotting and notebook process"""
class CleaningConfig:
# Unprocessed dataset
raw_data = './dataset/raw_results-survey245554.csv'
# load the different answers to questions to classify questions based on that
question_file = '../../../survey_creation/uk_17/uk_17.csv'
answer_folder = '../../../survey_creation/uk_17/listAnswers'
# Location for the json file of all questions
json_to_plot_location = './to_plot.json'
cleaned_df_location = './dataset/cleaned_data.csv'
class PlottingConfig(CleaningConfig):
pass
class NotebookConfig(PlottingConfig):
notebook_folder = './'
notebook_filename = 'uk_17.ipynb'
to_import = ['import pandas as pd',
'import numpy as np',
'get_ipython().magic("matplotlib inline")',
'import matplotlib',
'import matplotlib.pyplot as plt',
'from config import CleaningConfig, PlottingConfig, NotebookConfig',
'from counting import get_count',
'from plotting import get_plot',
'from likertScalePlot import likert_scale']
<commit_msg>Add options in the plot<commit_after>#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Config file for the cleaning - plotting and notebook process"""
class CleaningConfig:
# Unprocessed dataset
raw_data = './dataset/raw_results-survey245554.csv'
# load the different answers to questions to classify questions based on that
question_file = '../../../survey_creation/uk_17/uk_17.csv'
answer_folder = '../../../survey_creation/uk_17/listAnswers'
# Location for the json file of all questions
json_to_plot_location = './to_plot.json'
cleaned_df_location = './dataset/cleaned_data.csv'
class PlottingConfig(CleaningConfig):
count_na = True
plot_na = False
normalise = False
class NotebookConfig(PlottingConfig):
notebook_folder = './'
notebook_filename = 'uk_17.ipynb'
to_import = ['import pandas as pd',
'import numpy as np',
'import matplotlib',
'import matplotlib.pyplot as plt',
'from config import CleaningConfig, PlottingConfig, NotebookConfig',
'from counting import get_count',
'from plotting import get_plot',
'from IPython.display import display',
'from likertScalePlot import likert_scale']
processing_options = {'metadata': {'path': './',
'hide_input': True}}
|
e99196e14cd258960ad188875723178579b4dbf4 | src/rf/apps/workers/image_validator.py | src/rf/apps/workers/image_validator.py | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.conf import settings
import os
import uuid
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from osgeo import gdal
def ensure_band_count(key_string, byte_range=None):
"""
Gets the first `range` bytes from the s3 resource and uses it to
determine the number of bands in the image.
"""
connection = S3Connection()
bucket = connection.get_bucket(settings.AWS_BUCKET_NAME)
s3_key = Key(bucket)
s3_key.key = key_string
random_filename = '/tmp/' + str(uuid.uuid4())
with open(random_filename, 'w') as tempfile:
if byte_range is not None:
header_range = {'Range': 'bytes=' + byte_range}
s3_key.get_contents_to_file(tempfile, headers=header_range)
else:
s3_key.get_contents_to_file(tempfile)
try:
validator = gdal.Open(random_filename)
# Tiler needs 3+ bands.
raster_ok = validator.RasterCount >= 3
except AttributeError:
raster_ok = False
os.remove(random_filename)
return raster_ok
| # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.conf import settings
import os
import uuid
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from osgeo import gdal
def ensure_band_count(key_string, byte_range=None):
"""
Gets the first `range` bytes from the s3 resource and uses it to
determine the number of bands in the image.
"""
connection = S3Connection()
bucket = connection.get_bucket(settings.AWS_BUCKET_NAME)
s3_key = Key(bucket)
s3_key.key = key_string
random_filename = os.path.join(settings.TEMP_DIR, str(uuid.uuid4()))
with open(random_filename, 'w') as tempfile:
if byte_range is not None:
header_range = {'Range': 'bytes=' + byte_range}
s3_key.get_contents_to_file(tempfile, headers=header_range)
else:
s3_key.get_contents_to_file(tempfile)
try:
validator = gdal.Open(random_filename)
# Tiler needs 3+ bands.
raster_ok = validator.RasterCount >= 3
except AttributeError:
raster_ok = False
os.remove(random_filename)
return raster_ok
| Use new temp file directory. | Use new temp file directory.
We created a new scratch directory that matches EC2 mounted temp space and
supplied this as a setting in commit 21916b. This switches temporary images to
be downloaded there rather than at /tmp.
| Python | apache-2.0 | aaronxsu/raster-foundry,aaronxsu/raster-foundry,kdeloach/raster-foundry,azavea/raster-foundry,raster-foundry/raster-foundry,raster-foundry/raster-foundry,azavea/raster-foundry,aaronxsu/raster-foundry,kdeloach/raster-foundry,azavea/raster-foundry,azavea/raster-foundry,azavea/raster-foundry,aaronxsu/raster-foundry,kdeloach/raster-foundry,raster-foundry/raster-foundry,kdeloach/raster-foundry,kdeloach/raster-foundry | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.conf import settings
import os
import uuid
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from osgeo import gdal
def ensure_band_count(key_string, byte_range=None):
"""
Gets the first `range` bytes from the s3 resource and uses it to
determine the number of bands in the image.
"""
connection = S3Connection()
bucket = connection.get_bucket(settings.AWS_BUCKET_NAME)
s3_key = Key(bucket)
s3_key.key = key_string
random_filename = '/tmp/' + str(uuid.uuid4())
with open(random_filename, 'w') as tempfile:
if byte_range is not None:
header_range = {'Range': 'bytes=' + byte_range}
s3_key.get_contents_to_file(tempfile, headers=header_range)
else:
s3_key.get_contents_to_file(tempfile)
try:
validator = gdal.Open(random_filename)
# Tiler needs 3+ bands.
raster_ok = validator.RasterCount >= 3
except AttributeError:
raster_ok = False
os.remove(random_filename)
return raster_ok
Use new temp file directory.
We created a new scratch directory that matches EC2 mounted temp space and
supplied this as a setting in commit 21916b. This switches temporary images to
be downloaded there rather than at /tmp. | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.conf import settings
import os
import uuid
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from osgeo import gdal
def ensure_band_count(key_string, byte_range=None):
"""
Gets the first `range` bytes from the s3 resource and uses it to
determine the number of bands in the image.
"""
connection = S3Connection()
bucket = connection.get_bucket(settings.AWS_BUCKET_NAME)
s3_key = Key(bucket)
s3_key.key = key_string
random_filename = os.path.join(settings.TEMP_DIR, str(uuid.uuid4()))
with open(random_filename, 'w') as tempfile:
if byte_range is not None:
header_range = {'Range': 'bytes=' + byte_range}
s3_key.get_contents_to_file(tempfile, headers=header_range)
else:
s3_key.get_contents_to_file(tempfile)
try:
validator = gdal.Open(random_filename)
# Tiler needs 3+ bands.
raster_ok = validator.RasterCount >= 3
except AttributeError:
raster_ok = False
os.remove(random_filename)
return raster_ok
| <commit_before># -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.conf import settings
import os
import uuid
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from osgeo import gdal
def ensure_band_count(key_string, byte_range=None):
"""
Gets the first `range` bytes from the s3 resource and uses it to
determine the number of bands in the image.
"""
connection = S3Connection()
bucket = connection.get_bucket(settings.AWS_BUCKET_NAME)
s3_key = Key(bucket)
s3_key.key = key_string
random_filename = '/tmp/' + str(uuid.uuid4())
with open(random_filename, 'w') as tempfile:
if byte_range is not None:
header_range = {'Range': 'bytes=' + byte_range}
s3_key.get_contents_to_file(tempfile, headers=header_range)
else:
s3_key.get_contents_to_file(tempfile)
try:
validator = gdal.Open(random_filename)
# Tiler needs 3+ bands.
raster_ok = validator.RasterCount >= 3
except AttributeError:
raster_ok = False
os.remove(random_filename)
return raster_ok
<commit_msg>Use new temp file directory.
We created a new scratch directory that matches EC2 mounted temp space and
supplied this as a setting in commit 21916b. This switches temporary images to
be downloaded there rather than at /tmp.<commit_after> | # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.conf import settings
import os
import uuid
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from osgeo import gdal
def ensure_band_count(key_string, byte_range=None):
"""
Gets the first `range` bytes from the s3 resource and uses it to
determine the number of bands in the image.
"""
connection = S3Connection()
bucket = connection.get_bucket(settings.AWS_BUCKET_NAME)
s3_key = Key(bucket)
s3_key.key = key_string
random_filename = os.path.join(settings.TEMP_DIR, str(uuid.uuid4()))
with open(random_filename, 'w') as tempfile:
if byte_range is not None:
header_range = {'Range': 'bytes=' + byte_range}
s3_key.get_contents_to_file(tempfile, headers=header_range)
else:
s3_key.get_contents_to_file(tempfile)
try:
validator = gdal.Open(random_filename)
# Tiler needs 3+ bands.
raster_ok = validator.RasterCount >= 3
except AttributeError:
raster_ok = False
os.remove(random_filename)
return raster_ok
| # -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.conf import settings
import os
import uuid
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from osgeo import gdal
def ensure_band_count(key_string, byte_range=None):
"""
Gets the first `range` bytes from the s3 resource and uses it to
determine the number of bands in the image.
"""
connection = S3Connection()
bucket = connection.get_bucket(settings.AWS_BUCKET_NAME)
s3_key = Key(bucket)
s3_key.key = key_string
random_filename = '/tmp/' + str(uuid.uuid4())
with open(random_filename, 'w') as tempfile:
if byte_range is not None:
header_range = {'Range': 'bytes=' + byte_range}
s3_key.get_contents_to_file(tempfile, headers=header_range)
else:
s3_key.get_contents_to_file(tempfile)
try:
validator = gdal.Open(random_filename)
# Tiler needs 3+ bands.
raster_ok = validator.RasterCount >= 3
except AttributeError:
raster_ok = False
os.remove(random_filename)
return raster_ok
Use new temp file directory.
We created a new scratch directory that matches EC2 mounted temp space and
supplied this as a setting in commit 21916b. This switches temporary images to
be downloaded there rather than at /tmp.# -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.conf import settings
import os
import uuid
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from osgeo import gdal
def ensure_band_count(key_string, byte_range=None):
"""
Gets the first `range` bytes from the s3 resource and uses it to
determine the number of bands in the image.
"""
connection = S3Connection()
bucket = connection.get_bucket(settings.AWS_BUCKET_NAME)
s3_key = Key(bucket)
s3_key.key = key_string
random_filename = os.path.join(settings.TEMP_DIR, str(uuid.uuid4()))
with open(random_filename, 'w') as tempfile:
if byte_range is not None:
header_range = {'Range': 'bytes=' + byte_range}
s3_key.get_contents_to_file(tempfile, headers=header_range)
else:
s3_key.get_contents_to_file(tempfile)
try:
validator = gdal.Open(random_filename)
# Tiler needs 3+ bands.
raster_ok = validator.RasterCount >= 3
except AttributeError:
raster_ok = False
os.remove(random_filename)
return raster_ok
| <commit_before># -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.conf import settings
import os
import uuid
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from osgeo import gdal
def ensure_band_count(key_string, byte_range=None):
"""
Gets the first `range` bytes from the s3 resource and uses it to
determine the number of bands in the image.
"""
connection = S3Connection()
bucket = connection.get_bucket(settings.AWS_BUCKET_NAME)
s3_key = Key(bucket)
s3_key.key = key_string
random_filename = '/tmp/' + str(uuid.uuid4())
with open(random_filename, 'w') as tempfile:
if byte_range is not None:
header_range = {'Range': 'bytes=' + byte_range}
s3_key.get_contents_to_file(tempfile, headers=header_range)
else:
s3_key.get_contents_to_file(tempfile)
try:
validator = gdal.Open(random_filename)
# Tiler needs 3+ bands.
raster_ok = validator.RasterCount >= 3
except AttributeError:
raster_ok = False
os.remove(random_filename)
return raster_ok
<commit_msg>Use new temp file directory.
We created a new scratch directory that matches EC2 mounted temp space and
supplied this as a setting in commit 21916b. This switches temporary images to
be downloaded there rather than at /tmp.<commit_after># -*- coding: utf-8 -*-
from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
from django.conf import settings
import os
import uuid
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from osgeo import gdal
def ensure_band_count(key_string, byte_range=None):
"""
Gets the first `range` bytes from the s3 resource and uses it to
determine the number of bands in the image.
"""
connection = S3Connection()
bucket = connection.get_bucket(settings.AWS_BUCKET_NAME)
s3_key = Key(bucket)
s3_key.key = key_string
random_filename = os.path.join(settings.TEMP_DIR, str(uuid.uuid4()))
with open(random_filename, 'w') as tempfile:
if byte_range is not None:
header_range = {'Range': 'bytes=' + byte_range}
s3_key.get_contents_to_file(tempfile, headers=header_range)
else:
s3_key.get_contents_to_file(tempfile)
try:
validator = gdal.Open(random_filename)
# Tiler needs 3+ bands.
raster_ok = validator.RasterCount >= 3
except AttributeError:
raster_ok = False
os.remove(random_filename)
return raster_ok
|
ad257e7730d03df984124493814656930bcb0b5f | Communication/tcpServer.py | Communication/tcpServer.py | #!/usr/bin/env python
import socket
TCP_IP = '127.0.0.1'
TCP_PORT = 54321
BUFFER_SIZE = 20 # Normally 1024, but we want fast response
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((TCP_IP, TCP_PORT))
s.listen(1)
conn, addr = s.accept()
print 'Connection address:', addr
while 1:
data = conn.recv(BUFFER_SIZE)
if not data: break
print "received data:", data
conn.send(data) # echo
conn.close()
| #!/usr/bin/env python
import socket
TCP_IP = '127.0.0.1'
TCP_PORT = 1234
BUFFER_SIZE = 20 # Normally 1024, but we want fast response
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((TCP_IP, TCP_PORT))
s.listen(1)
conn, addr = s.accept()
print 'Connection address:', addr
while 1:
data = conn.recv(BUFFER_SIZE)
if not data: break
print "received data:", data
conn.send(data) # echo
conn.close()
| Change port to fit with other script of tcp communication test | Change port to fit with other script of tcp communication test
| Python | mit | baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite,baptistelabat/robokite | #!/usr/bin/env python
import socket
TCP_IP = '127.0.0.1'
TCP_PORT = 54321
BUFFER_SIZE = 20 # Normally 1024, but we want fast response
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((TCP_IP, TCP_PORT))
s.listen(1)
conn, addr = s.accept()
print 'Connection address:', addr
while 1:
data = conn.recv(BUFFER_SIZE)
if not data: break
print "received data:", data
conn.send(data) # echo
conn.close()
Change port to fit with other script of tcp communication test | #!/usr/bin/env python
import socket
TCP_IP = '127.0.0.1'
TCP_PORT = 1234
BUFFER_SIZE = 20 # Normally 1024, but we want fast response
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((TCP_IP, TCP_PORT))
s.listen(1)
conn, addr = s.accept()
print 'Connection address:', addr
while 1:
data = conn.recv(BUFFER_SIZE)
if not data: break
print "received data:", data
conn.send(data) # echo
conn.close()
| <commit_before>#!/usr/bin/env python
import socket
TCP_IP = '127.0.0.1'
TCP_PORT = 54321
BUFFER_SIZE = 20 # Normally 1024, but we want fast response
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((TCP_IP, TCP_PORT))
s.listen(1)
conn, addr = s.accept()
print 'Connection address:', addr
while 1:
data = conn.recv(BUFFER_SIZE)
if not data: break
print "received data:", data
conn.send(data) # echo
conn.close()
<commit_msg>Change port to fit with other script of tcp communication test<commit_after> | #!/usr/bin/env python
import socket
TCP_IP = '127.0.0.1'
TCP_PORT = 1234
BUFFER_SIZE = 20 # Normally 1024, but we want fast response
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((TCP_IP, TCP_PORT))
s.listen(1)
conn, addr = s.accept()
print 'Connection address:', addr
while 1:
data = conn.recv(BUFFER_SIZE)
if not data: break
print "received data:", data
conn.send(data) # echo
conn.close()
| #!/usr/bin/env python
import socket
TCP_IP = '127.0.0.1'
TCP_PORT = 54321
BUFFER_SIZE = 20 # Normally 1024, but we want fast response
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((TCP_IP, TCP_PORT))
s.listen(1)
conn, addr = s.accept()
print 'Connection address:', addr
while 1:
data = conn.recv(BUFFER_SIZE)
if not data: break
print "received data:", data
conn.send(data) # echo
conn.close()
Change port to fit with other script of tcp communication test#!/usr/bin/env python
import socket
TCP_IP = '127.0.0.1'
TCP_PORT = 1234
BUFFER_SIZE = 20 # Normally 1024, but we want fast response
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((TCP_IP, TCP_PORT))
s.listen(1)
conn, addr = s.accept()
print 'Connection address:', addr
while 1:
data = conn.recv(BUFFER_SIZE)
if not data: break
print "received data:", data
conn.send(data) # echo
conn.close()
| <commit_before>#!/usr/bin/env python
import socket
TCP_IP = '127.0.0.1'
TCP_PORT = 54321
BUFFER_SIZE = 20 # Normally 1024, but we want fast response
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((TCP_IP, TCP_PORT))
s.listen(1)
conn, addr = s.accept()
print 'Connection address:', addr
while 1:
data = conn.recv(BUFFER_SIZE)
if not data: break
print "received data:", data
conn.send(data) # echo
conn.close()
<commit_msg>Change port to fit with other script of tcp communication test<commit_after>#!/usr/bin/env python
import socket
TCP_IP = '127.0.0.1'
TCP_PORT = 1234
BUFFER_SIZE = 20 # Normally 1024, but we want fast response
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((TCP_IP, TCP_PORT))
s.listen(1)
conn, addr = s.accept()
print 'Connection address:', addr
while 1:
data = conn.recv(BUFFER_SIZE)
if not data: break
print "received data:", data
conn.send(data) # echo
conn.close()
|
f73775980e37b51882bfbd21f609ddfda807b8c8 | tests/test_datafeed_fms_teams.py | tests/test_datafeed_fms_teams.py | import unittest2
import datetime
from google.appengine.ext import testbed
from datafeeds.datafeed_fms import DatafeedFms
class TestDatafeedFmsTeams(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.datafeed = DatafeedFms()
def tearDown(self):
self.testbed.deactivate()
def test_getFmsTeamList(self):
teams = self.datafeed.getFmsTeamList()
self.find177(teams)
def find177(self, teams):
found_177 = False
for team in teams:
if team.team_number == 177:
found_177 = True
self.assertEqual(team.name, "United Technologies / ClearEdge Power / Gain Talent / EBA&D & South Windsor High School")
#self.assertEqual(team.address, u"South Windsor, CT, USA")
self.assertEqual(team.nickname, "Bobcat Robotics")
self.assertTrue(found_177)
self.assertTrue(len(teams) > 0)
| import unittest2
import datetime
from google.appengine.ext import testbed
from datafeeds.datafeed_fms import DatafeedFms
class TestDatafeedFmsTeams(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.datafeed = DatafeedFms()
def tearDown(self):
self.testbed.deactivate()
def test_getFmsTeamList(self):
teams = self.datafeed.getFmsTeamList()
self.find177(teams)
def find177(self, teams):
found_177 = False
for team in teams:
if team.team_number == 177:
found_177 = True
self.assertEqual(team.name, "United Technologies / ClearEdge Power / Gain Talent / EBA&D & South Windsor High School")
#self.assertEqual(team.address, u"South Windsor, CT, USA")
self.assertEqual(team.nickname, "Bobcat Robotics")
break
self.assertTrue(found_177)
self.assertTrue(len(teams) > 0)
| Speed up test by breaking out of a for loop if match found | Speed up test by breaking out of a for loop if match found
| Python | mit | phil-lopreiato/the-blue-alliance,tsteward/the-blue-alliance,1fish2/the-blue-alliance,synth3tk/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,synth3tk/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,jaredhasenklein/the-blue-alliance,phil-lopreiato/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance,fangeugene/the-blue-alliance,josephbisch/the-blue-alliance,1fish2/the-blue-alliance,the-blue-alliance/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,bvisness/the-blue-alliance,nwalters512/the-blue-alliance,the-blue-alliance/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,josephbisch/the-blue-alliance,josephbisch/the-blue-alliance,josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,bvisness/the-blue-alliance,tsteward/the-blue-alliance,synth3tk/the-blue-alliance,jaredhasenklein/the-blue-alliance,verycumbersome/the-blue-alliance,phil-lopreiato/the-blue-alliance,fangeugene/the-blue-alliance,the-blue-alliance/the-blue-alliance,phil-lopreiato/the-blue-alliance,bdaroz/the-blue-alliance,bvisness/the-blue-alliance,1fish2/the-blue-alliance,bvisness/the-blue-alliance,fangeugene/the-blue-alliance,jaredhasenklein/the-blue-alliance,1fish2/the-blue-alliance,bdaroz/the-blue-alliance,phil-lopreiato/the-blue-alliance,josephbisch/the-blue-alliance,verycumbersome/the-blue-alliance,jaredhasenklein/the-blue-alliance,jaredhasenklein/the-blue-alliance,tsteward/the-blue-alliance,the-blue-alliance/the-blue-alliance,josephbisch/the-blue-alliance,phil-lopreiato/the-blue-alliance,nwalters512/the-blue-alliance,1fish2/the-blue-alliance,bdaroz/the-blue-alliance,nwalters512/the-blue-alliance,fangeugene/the-blue-alliance,nwalters512/the-blue-alliance,tsteward/the-blue-alliance,fangeugene/the-blue-alliance,bdaroz/the-blue-alliance,bdaroz/the-blue-alliance,verycumbersome/the-blue-alliance,nwalters512/the-blue-alliance,bvisness/the-blue-alliance,1fish2/the-blue-alliance,synth3tk/the-blue-alliance,the-blue-alliance/the-blue-alliance,synth3tk/the-blue-alliance | import unittest2
import datetime
from google.appengine.ext import testbed
from datafeeds.datafeed_fms import DatafeedFms
class TestDatafeedFmsTeams(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.datafeed = DatafeedFms()
def tearDown(self):
self.testbed.deactivate()
def test_getFmsTeamList(self):
teams = self.datafeed.getFmsTeamList()
self.find177(teams)
def find177(self, teams):
found_177 = False
for team in teams:
if team.team_number == 177:
found_177 = True
self.assertEqual(team.name, "United Technologies / ClearEdge Power / Gain Talent / EBA&D & South Windsor High School")
#self.assertEqual(team.address, u"South Windsor, CT, USA")
self.assertEqual(team.nickname, "Bobcat Robotics")
self.assertTrue(found_177)
self.assertTrue(len(teams) > 0)
Speed up test by breaking out of a for loop if match found | import unittest2
import datetime
from google.appengine.ext import testbed
from datafeeds.datafeed_fms import DatafeedFms
class TestDatafeedFmsTeams(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.datafeed = DatafeedFms()
def tearDown(self):
self.testbed.deactivate()
def test_getFmsTeamList(self):
teams = self.datafeed.getFmsTeamList()
self.find177(teams)
def find177(self, teams):
found_177 = False
for team in teams:
if team.team_number == 177:
found_177 = True
self.assertEqual(team.name, "United Technologies / ClearEdge Power / Gain Talent / EBA&D & South Windsor High School")
#self.assertEqual(team.address, u"South Windsor, CT, USA")
self.assertEqual(team.nickname, "Bobcat Robotics")
break
self.assertTrue(found_177)
self.assertTrue(len(teams) > 0)
| <commit_before>import unittest2
import datetime
from google.appengine.ext import testbed
from datafeeds.datafeed_fms import DatafeedFms
class TestDatafeedFmsTeams(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.datafeed = DatafeedFms()
def tearDown(self):
self.testbed.deactivate()
def test_getFmsTeamList(self):
teams = self.datafeed.getFmsTeamList()
self.find177(teams)
def find177(self, teams):
found_177 = False
for team in teams:
if team.team_number == 177:
found_177 = True
self.assertEqual(team.name, "United Technologies / ClearEdge Power / Gain Talent / EBA&D & South Windsor High School")
#self.assertEqual(team.address, u"South Windsor, CT, USA")
self.assertEqual(team.nickname, "Bobcat Robotics")
self.assertTrue(found_177)
self.assertTrue(len(teams) > 0)
<commit_msg>Speed up test by breaking out of a for loop if match found<commit_after> | import unittest2
import datetime
from google.appengine.ext import testbed
from datafeeds.datafeed_fms import DatafeedFms
class TestDatafeedFmsTeams(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.datafeed = DatafeedFms()
def tearDown(self):
self.testbed.deactivate()
def test_getFmsTeamList(self):
teams = self.datafeed.getFmsTeamList()
self.find177(teams)
def find177(self, teams):
found_177 = False
for team in teams:
if team.team_number == 177:
found_177 = True
self.assertEqual(team.name, "United Technologies / ClearEdge Power / Gain Talent / EBA&D & South Windsor High School")
#self.assertEqual(team.address, u"South Windsor, CT, USA")
self.assertEqual(team.nickname, "Bobcat Robotics")
break
self.assertTrue(found_177)
self.assertTrue(len(teams) > 0)
| import unittest2
import datetime
from google.appengine.ext import testbed
from datafeeds.datafeed_fms import DatafeedFms
class TestDatafeedFmsTeams(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.datafeed = DatafeedFms()
def tearDown(self):
self.testbed.deactivate()
def test_getFmsTeamList(self):
teams = self.datafeed.getFmsTeamList()
self.find177(teams)
def find177(self, teams):
found_177 = False
for team in teams:
if team.team_number == 177:
found_177 = True
self.assertEqual(team.name, "United Technologies / ClearEdge Power / Gain Talent / EBA&D & South Windsor High School")
#self.assertEqual(team.address, u"South Windsor, CT, USA")
self.assertEqual(team.nickname, "Bobcat Robotics")
self.assertTrue(found_177)
self.assertTrue(len(teams) > 0)
Speed up test by breaking out of a for loop if match foundimport unittest2
import datetime
from google.appengine.ext import testbed
from datafeeds.datafeed_fms import DatafeedFms
class TestDatafeedFmsTeams(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.datafeed = DatafeedFms()
def tearDown(self):
self.testbed.deactivate()
def test_getFmsTeamList(self):
teams = self.datafeed.getFmsTeamList()
self.find177(teams)
def find177(self, teams):
found_177 = False
for team in teams:
if team.team_number == 177:
found_177 = True
self.assertEqual(team.name, "United Technologies / ClearEdge Power / Gain Talent / EBA&D & South Windsor High School")
#self.assertEqual(team.address, u"South Windsor, CT, USA")
self.assertEqual(team.nickname, "Bobcat Robotics")
break
self.assertTrue(found_177)
self.assertTrue(len(teams) > 0)
| <commit_before>import unittest2
import datetime
from google.appengine.ext import testbed
from datafeeds.datafeed_fms import DatafeedFms
class TestDatafeedFmsTeams(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.datafeed = DatafeedFms()
def tearDown(self):
self.testbed.deactivate()
def test_getFmsTeamList(self):
teams = self.datafeed.getFmsTeamList()
self.find177(teams)
def find177(self, teams):
found_177 = False
for team in teams:
if team.team_number == 177:
found_177 = True
self.assertEqual(team.name, "United Technologies / ClearEdge Power / Gain Talent / EBA&D & South Windsor High School")
#self.assertEqual(team.address, u"South Windsor, CT, USA")
self.assertEqual(team.nickname, "Bobcat Robotics")
self.assertTrue(found_177)
self.assertTrue(len(teams) > 0)
<commit_msg>Speed up test by breaking out of a for loop if match found<commit_after>import unittest2
import datetime
from google.appengine.ext import testbed
from datafeeds.datafeed_fms import DatafeedFms
class TestDatafeedFmsTeams(unittest2.TestCase):
def setUp(self):
self.testbed = testbed.Testbed()
self.testbed.activate()
self.testbed.init_urlfetch_stub()
self.testbed.init_memcache_stub()
self.datafeed = DatafeedFms()
def tearDown(self):
self.testbed.deactivate()
def test_getFmsTeamList(self):
teams = self.datafeed.getFmsTeamList()
self.find177(teams)
def find177(self, teams):
found_177 = False
for team in teams:
if team.team_number == 177:
found_177 = True
self.assertEqual(team.name, "United Technologies / ClearEdge Power / Gain Talent / EBA&D & South Windsor High School")
#self.assertEqual(team.address, u"South Windsor, CT, USA")
self.assertEqual(team.nickname, "Bobcat Robotics")
break
self.assertTrue(found_177)
self.assertTrue(len(teams) > 0)
|
f863f37a05855180dce40181a27e7925f0662647 | djangoautoconf/management/commands/dump_settings.py | djangoautoconf/management/commands/dump_settings.py | import os
from django.core.management import BaseCommand
from django.conf import settings
def dump_attrs(obj_instance):
for attr in dir(obj_instance):
if attr != attr.upper():
continue
yield attr, getattr(obj_instance, attr)
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
try:
os.remove("local/total_settings.py")
except:
pass
with open("local/total_settings.py", "w") as f:
for key, value in dump_attrs(settings):
if value is None:
continue
if type(value) in (list, tuple, dict, bool):
print >>f, key, "=", value
elif type(value) in (str, ):
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
else:
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
| import os
from django.core.management import BaseCommand
from django.conf import settings
def dump_attrs(obj_instance):
for attr in dir(obj_instance):
if attr != attr.upper():
continue
yield attr, getattr(obj_instance, attr)
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
try:
os.remove("local/total_settings.py")
except:
pass
with open("local/total_settings.py", "w") as f:
for key, value in dump_attrs(settings):
if value is None:
continue
if type(value) in (list, tuple, dict, bool, int, float):
print >>f, key, "=", value
elif type(value) in (str, ):
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
else:
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
| Fix int float setting issue. | Fix int float setting issue.
| Python | bsd-3-clause | weijia/djangoautoconf,weijia/djangoautoconf | import os
from django.core.management import BaseCommand
from django.conf import settings
def dump_attrs(obj_instance):
for attr in dir(obj_instance):
if attr != attr.upper():
continue
yield attr, getattr(obj_instance, attr)
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
try:
os.remove("local/total_settings.py")
except:
pass
with open("local/total_settings.py", "w") as f:
for key, value in dump_attrs(settings):
if value is None:
continue
if type(value) in (list, tuple, dict, bool):
print >>f, key, "=", value
elif type(value) in (str, ):
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
else:
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
Fix int float setting issue. | import os
from django.core.management import BaseCommand
from django.conf import settings
def dump_attrs(obj_instance):
for attr in dir(obj_instance):
if attr != attr.upper():
continue
yield attr, getattr(obj_instance, attr)
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
try:
os.remove("local/total_settings.py")
except:
pass
with open("local/total_settings.py", "w") as f:
for key, value in dump_attrs(settings):
if value is None:
continue
if type(value) in (list, tuple, dict, bool, int, float):
print >>f, key, "=", value
elif type(value) in (str, ):
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
else:
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
| <commit_before>import os
from django.core.management import BaseCommand
from django.conf import settings
def dump_attrs(obj_instance):
for attr in dir(obj_instance):
if attr != attr.upper():
continue
yield attr, getattr(obj_instance, attr)
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
try:
os.remove("local/total_settings.py")
except:
pass
with open("local/total_settings.py", "w") as f:
for key, value in dump_attrs(settings):
if value is None:
continue
if type(value) in (list, tuple, dict, bool):
print >>f, key, "=", value
elif type(value) in (str, ):
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
else:
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
<commit_msg>Fix int float setting issue.<commit_after> | import os
from django.core.management import BaseCommand
from django.conf import settings
def dump_attrs(obj_instance):
for attr in dir(obj_instance):
if attr != attr.upper():
continue
yield attr, getattr(obj_instance, attr)
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
try:
os.remove("local/total_settings.py")
except:
pass
with open("local/total_settings.py", "w") as f:
for key, value in dump_attrs(settings):
if value is None:
continue
if type(value) in (list, tuple, dict, bool, int, float):
print >>f, key, "=", value
elif type(value) in (str, ):
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
else:
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
| import os
from django.core.management import BaseCommand
from django.conf import settings
def dump_attrs(obj_instance):
for attr in dir(obj_instance):
if attr != attr.upper():
continue
yield attr, getattr(obj_instance, attr)
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
try:
os.remove("local/total_settings.py")
except:
pass
with open("local/total_settings.py", "w") as f:
for key, value in dump_attrs(settings):
if value is None:
continue
if type(value) in (list, tuple, dict, bool):
print >>f, key, "=", value
elif type(value) in (str, ):
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
else:
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
Fix int float setting issue.import os
from django.core.management import BaseCommand
from django.conf import settings
def dump_attrs(obj_instance):
for attr in dir(obj_instance):
if attr != attr.upper():
continue
yield attr, getattr(obj_instance, attr)
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
try:
os.remove("local/total_settings.py")
except:
pass
with open("local/total_settings.py", "w") as f:
for key, value in dump_attrs(settings):
if value is None:
continue
if type(value) in (list, tuple, dict, bool, int, float):
print >>f, key, "=", value
elif type(value) in (str, ):
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
else:
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
| <commit_before>import os
from django.core.management import BaseCommand
from django.conf import settings
def dump_attrs(obj_instance):
for attr in dir(obj_instance):
if attr != attr.upper():
continue
yield attr, getattr(obj_instance, attr)
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
try:
os.remove("local/total_settings.py")
except:
pass
with open("local/total_settings.py", "w") as f:
for key, value in dump_attrs(settings):
if value is None:
continue
if type(value) in (list, tuple, dict, bool):
print >>f, key, "=", value
elif type(value) in (str, ):
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
else:
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
<commit_msg>Fix int float setting issue.<commit_after>import os
from django.core.management import BaseCommand
from django.conf import settings
def dump_attrs(obj_instance):
for attr in dir(obj_instance):
if attr != attr.upper():
continue
yield attr, getattr(obj_instance, attr)
class Command(BaseCommand):
args = ''
help = 'Create command cache for environment where os.listdir is not working'
def handle(self, *args, **options):
try:
os.remove("local/total_settings.py")
except:
pass
with open("local/total_settings.py", "w") as f:
for key, value in dump_attrs(settings):
if value is None:
continue
if type(value) in (list, tuple, dict, bool, int, float):
print >>f, key, "=", value
elif type(value) in (str, ):
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
else:
print >>f, key, "=", '"'+str(value).replace('\\', '\\\\')+'"'
|
e778a2da7938dcf565282635e395dc410ef989d6 | terraform-gce/worker/generate-certs.py | terraform-gce/worker/generate-certs.py | #!/usr/bin/env python
import os.path
import subprocess
import argparse
import shutil
cl_parser = argparse.ArgumentParser()
args = cl_parser.parse_args()
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if not os.path.exists('assets/certificates'):
os.makedirs('assets/certificates')
os.chdir('assets/certificates')
print(os.listdir('.'))
with file('worker.json', 'wt') as f:
f.write("""{
"CN": "node.staging.realtimemusic.com",
"hosts": [
"127.0.0.1",
"staging-node"
],
"key": {
"algo": "rsa",
"size": 2048
},
"names": [
{
"C": "DE",
"L": "Germany",
"ST": ""
}
]
}
""")
subprocess.check_call(
'cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json '
'-profile=client-server worker.json | '
'cfssljson -bare worker-client', shell=True)
| #!/usr/bin/env python
import os.path
import subprocess
import argparse
import shutil
cl_parser = argparse.ArgumentParser()
args = cl_parser.parse_args()
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if not os.path.exists('assets/certificates'):
os.makedirs('assets/certificates')
os.chdir('assets/certificates')
shutil.copy2(
'../../../master/assets/certificates/ca.pem', 'ca.pem'
)
shutil.copy2(
'../../../master/assets/certificates/ca-key.pem',
'ca-key.pem'
)
with file('worker.json', 'wt') as f:
f.write("""{
"CN": "node.staging.realtimemusic.com",
"hosts": [
"127.0.0.1",
"staging-node"
],
"key": {
"algo": "rsa",
"size": 2048
},
"names": [
{
"C": "DE",
"L": "Germany",
"ST": ""
}
]
}
""")
subprocess.check_call(
'cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json '
'-profile=client-server worker.json | '
'cfssljson -bare worker-client', shell=True)
| Copy cert auth from master | Copy cert auth from master
| Python | apache-2.0 | aknuds1/contrib,aknuds1/contrib,aknuds1/contrib,aknuds1/contrib,aknuds1/contrib,aknuds1/contrib | #!/usr/bin/env python
import os.path
import subprocess
import argparse
import shutil
cl_parser = argparse.ArgumentParser()
args = cl_parser.parse_args()
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if not os.path.exists('assets/certificates'):
os.makedirs('assets/certificates')
os.chdir('assets/certificates')
print(os.listdir('.'))
with file('worker.json', 'wt') as f:
f.write("""{
"CN": "node.staging.realtimemusic.com",
"hosts": [
"127.0.0.1",
"staging-node"
],
"key": {
"algo": "rsa",
"size": 2048
},
"names": [
{
"C": "DE",
"L": "Germany",
"ST": ""
}
]
}
""")
subprocess.check_call(
'cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json '
'-profile=client-server worker.json | '
'cfssljson -bare worker-client', shell=True)
Copy cert auth from master | #!/usr/bin/env python
import os.path
import subprocess
import argparse
import shutil
cl_parser = argparse.ArgumentParser()
args = cl_parser.parse_args()
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if not os.path.exists('assets/certificates'):
os.makedirs('assets/certificates')
os.chdir('assets/certificates')
shutil.copy2(
'../../../master/assets/certificates/ca.pem', 'ca.pem'
)
shutil.copy2(
'../../../master/assets/certificates/ca-key.pem',
'ca-key.pem'
)
with file('worker.json', 'wt') as f:
f.write("""{
"CN": "node.staging.realtimemusic.com",
"hosts": [
"127.0.0.1",
"staging-node"
],
"key": {
"algo": "rsa",
"size": 2048
},
"names": [
{
"C": "DE",
"L": "Germany",
"ST": ""
}
]
}
""")
subprocess.check_call(
'cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json '
'-profile=client-server worker.json | '
'cfssljson -bare worker-client', shell=True)
| <commit_before>#!/usr/bin/env python
import os.path
import subprocess
import argparse
import shutil
cl_parser = argparse.ArgumentParser()
args = cl_parser.parse_args()
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if not os.path.exists('assets/certificates'):
os.makedirs('assets/certificates')
os.chdir('assets/certificates')
print(os.listdir('.'))
with file('worker.json', 'wt') as f:
f.write("""{
"CN": "node.staging.realtimemusic.com",
"hosts": [
"127.0.0.1",
"staging-node"
],
"key": {
"algo": "rsa",
"size": 2048
},
"names": [
{
"C": "DE",
"L": "Germany",
"ST": ""
}
]
}
""")
subprocess.check_call(
'cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json '
'-profile=client-server worker.json | '
'cfssljson -bare worker-client', shell=True)
<commit_msg>Copy cert auth from master<commit_after> | #!/usr/bin/env python
import os.path
import subprocess
import argparse
import shutil
cl_parser = argparse.ArgumentParser()
args = cl_parser.parse_args()
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if not os.path.exists('assets/certificates'):
os.makedirs('assets/certificates')
os.chdir('assets/certificates')
shutil.copy2(
'../../../master/assets/certificates/ca.pem', 'ca.pem'
)
shutil.copy2(
'../../../master/assets/certificates/ca-key.pem',
'ca-key.pem'
)
with file('worker.json', 'wt') as f:
f.write("""{
"CN": "node.staging.realtimemusic.com",
"hosts": [
"127.0.0.1",
"staging-node"
],
"key": {
"algo": "rsa",
"size": 2048
},
"names": [
{
"C": "DE",
"L": "Germany",
"ST": ""
}
]
}
""")
subprocess.check_call(
'cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json '
'-profile=client-server worker.json | '
'cfssljson -bare worker-client', shell=True)
| #!/usr/bin/env python
import os.path
import subprocess
import argparse
import shutil
cl_parser = argparse.ArgumentParser()
args = cl_parser.parse_args()
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if not os.path.exists('assets/certificates'):
os.makedirs('assets/certificates')
os.chdir('assets/certificates')
print(os.listdir('.'))
with file('worker.json', 'wt') as f:
f.write("""{
"CN": "node.staging.realtimemusic.com",
"hosts": [
"127.0.0.1",
"staging-node"
],
"key": {
"algo": "rsa",
"size": 2048
},
"names": [
{
"C": "DE",
"L": "Germany",
"ST": ""
}
]
}
""")
subprocess.check_call(
'cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json '
'-profile=client-server worker.json | '
'cfssljson -bare worker-client', shell=True)
Copy cert auth from master#!/usr/bin/env python
import os.path
import subprocess
import argparse
import shutil
cl_parser = argparse.ArgumentParser()
args = cl_parser.parse_args()
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if not os.path.exists('assets/certificates'):
os.makedirs('assets/certificates')
os.chdir('assets/certificates')
shutil.copy2(
'../../../master/assets/certificates/ca.pem', 'ca.pem'
)
shutil.copy2(
'../../../master/assets/certificates/ca-key.pem',
'ca-key.pem'
)
with file('worker.json', 'wt') as f:
f.write("""{
"CN": "node.staging.realtimemusic.com",
"hosts": [
"127.0.0.1",
"staging-node"
],
"key": {
"algo": "rsa",
"size": 2048
},
"names": [
{
"C": "DE",
"L": "Germany",
"ST": ""
}
]
}
""")
subprocess.check_call(
'cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json '
'-profile=client-server worker.json | '
'cfssljson -bare worker-client', shell=True)
| <commit_before>#!/usr/bin/env python
import os.path
import subprocess
import argparse
import shutil
cl_parser = argparse.ArgumentParser()
args = cl_parser.parse_args()
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if not os.path.exists('assets/certificates'):
os.makedirs('assets/certificates')
os.chdir('assets/certificates')
print(os.listdir('.'))
with file('worker.json', 'wt') as f:
f.write("""{
"CN": "node.staging.realtimemusic.com",
"hosts": [
"127.0.0.1",
"staging-node"
],
"key": {
"algo": "rsa",
"size": 2048
},
"names": [
{
"C": "DE",
"L": "Germany",
"ST": ""
}
]
}
""")
subprocess.check_call(
'cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json '
'-profile=client-server worker.json | '
'cfssljson -bare worker-client', shell=True)
<commit_msg>Copy cert auth from master<commit_after>#!/usr/bin/env python
import os.path
import subprocess
import argparse
import shutil
cl_parser = argparse.ArgumentParser()
args = cl_parser.parse_args()
os.chdir(os.path.abspath(os.path.dirname(__file__)))
if not os.path.exists('assets/certificates'):
os.makedirs('assets/certificates')
os.chdir('assets/certificates')
shutil.copy2(
'../../../master/assets/certificates/ca.pem', 'ca.pem'
)
shutil.copy2(
'../../../master/assets/certificates/ca-key.pem',
'ca-key.pem'
)
with file('worker.json', 'wt') as f:
f.write("""{
"CN": "node.staging.realtimemusic.com",
"hosts": [
"127.0.0.1",
"staging-node"
],
"key": {
"algo": "rsa",
"size": 2048
},
"names": [
{
"C": "DE",
"L": "Germany",
"ST": ""
}
]
}
""")
subprocess.check_call(
'cfssl gencert -ca=ca.pem -ca-key=ca-key.pem -config=ca-config.json '
'-profile=client-server worker.json | '
'cfssljson -bare worker-client', shell=True)
|
ffdd45d798eaf1349e12fc061789daacdefcd05c | membership.py | membership.py | """Manage society membership by checking member IDs and adding new members"""
import sqlite3
class MemberDatabase:
def __init__(self, dbFile = 'members.db', safe = True):
self.__connection = sqlite3.connect(dbFile)
self.__safe = safe
def __del__(self):
self.__connection.commit() # here use actual commit: we want to commit regardless of safe
self.__connection.close()
# wrapper around sqlite3.Connection.commit():
# commits if safe is set to True
# this means users can optionally disable autocommiting for potentially better
# performance at the cost of reduced data safety on crashes
def commit(self):
if self.__safe:
conn.commit()
def getMember(self, memberId, updateTimestamp = True):
c = self.__connection.cursor()
c.execute('SELECT firstName,lastName FROM users WHERE barcode=?', (memberId,))
# todo: if updateTimestamp then update last_attended time
return c.fetchone()
def addMember(self, memberId, firstName, lastName, college):
c = self.__connection.cursor()
c.execute('INSERT INTO users (barcode, firstName, lastName, college, datejoined, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)', (memberId, firstName, lastName, college, date.today(), datetime.utcnow(), datetime.utcnow()))
| """Manage society membership by checking member IDs and adding new members"""
import sqlite3
class MemberDatabase:
def __init__(self, dbFile = 'members.db', safe = True):
self.__connection = sqlite3.connect(dbFile)
self.__safe = safe
def __del__(self):
self.__connection.commit() # here use actual commit: we want to commit regardless of safe
self.__connection.close()
# wrapper around sqlite3.Connection.commit():
# commits if safe is set to True
# this means users can optionally disable autocommiting for potentially better
# performance at the cost of reduced data safety on crashes
def commit(self):
if self.__safe:
conn.commit()
def getMember(self, memberId, updateTimestamp = True):
c = self.__connection.cursor()
c.execute('SELECT firstName,lastName FROM users WHERE barcode=?', (memberId,))
users = c.fetchall()
# if necessary update last_attended date
if (updateTimestamp):
c.execute('UPDATE users SET last_attended=? WHERE barcode=?', (datetime.date.today(), memberId))
self.commit()
return users
def addMember(self, memberId, firstName, lastName, college):
c = self.__connection.cursor()
c.execute('INSERT INTO users (barcode, firstName, lastName, college, datejoined, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)', (memberId, firstName, lastName, college, date.today(), datetime.utcnow(), datetime.utcnow()))
| Update last_attended when checking member | Update last_attended when checking member
| Python | mit | NullInfinity/socman,NullInfinity/society-event-manager | """Manage society membership by checking member IDs and adding new members"""
import sqlite3
class MemberDatabase:
def __init__(self, dbFile = 'members.db', safe = True):
self.__connection = sqlite3.connect(dbFile)
self.__safe = safe
def __del__(self):
self.__connection.commit() # here use actual commit: we want to commit regardless of safe
self.__connection.close()
# wrapper around sqlite3.Connection.commit():
# commits if safe is set to True
# this means users can optionally disable autocommiting for potentially better
# performance at the cost of reduced data safety on crashes
def commit(self):
if self.__safe:
conn.commit()
def getMember(self, memberId, updateTimestamp = True):
c = self.__connection.cursor()
c.execute('SELECT firstName,lastName FROM users WHERE barcode=?', (memberId,))
# todo: if updateTimestamp then update last_attended time
return c.fetchone()
def addMember(self, memberId, firstName, lastName, college):
c = self.__connection.cursor()
c.execute('INSERT INTO users (barcode, firstName, lastName, college, datejoined, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)', (memberId, firstName, lastName, college, date.today(), datetime.utcnow(), datetime.utcnow()))
Update last_attended when checking member | """Manage society membership by checking member IDs and adding new members"""
import sqlite3
class MemberDatabase:
def __init__(self, dbFile = 'members.db', safe = True):
self.__connection = sqlite3.connect(dbFile)
self.__safe = safe
def __del__(self):
self.__connection.commit() # here use actual commit: we want to commit regardless of safe
self.__connection.close()
# wrapper around sqlite3.Connection.commit():
# commits if safe is set to True
# this means users can optionally disable autocommiting for potentially better
# performance at the cost of reduced data safety on crashes
def commit(self):
if self.__safe:
conn.commit()
def getMember(self, memberId, updateTimestamp = True):
c = self.__connection.cursor()
c.execute('SELECT firstName,lastName FROM users WHERE barcode=?', (memberId,))
users = c.fetchall()
# if necessary update last_attended date
if (updateTimestamp):
c.execute('UPDATE users SET last_attended=? WHERE barcode=?', (datetime.date.today(), memberId))
self.commit()
return users
def addMember(self, memberId, firstName, lastName, college):
c = self.__connection.cursor()
c.execute('INSERT INTO users (barcode, firstName, lastName, college, datejoined, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)', (memberId, firstName, lastName, college, date.today(), datetime.utcnow(), datetime.utcnow()))
| <commit_before>"""Manage society membership by checking member IDs and adding new members"""
import sqlite3
class MemberDatabase:
def __init__(self, dbFile = 'members.db', safe = True):
self.__connection = sqlite3.connect(dbFile)
self.__safe = safe
def __del__(self):
self.__connection.commit() # here use actual commit: we want to commit regardless of safe
self.__connection.close()
# wrapper around sqlite3.Connection.commit():
# commits if safe is set to True
# this means users can optionally disable autocommiting for potentially better
# performance at the cost of reduced data safety on crashes
def commit(self):
if self.__safe:
conn.commit()
def getMember(self, memberId, updateTimestamp = True):
c = self.__connection.cursor()
c.execute('SELECT firstName,lastName FROM users WHERE barcode=?', (memberId,))
# todo: if updateTimestamp then update last_attended time
return c.fetchone()
def addMember(self, memberId, firstName, lastName, college):
c = self.__connection.cursor()
c.execute('INSERT INTO users (barcode, firstName, lastName, college, datejoined, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)', (memberId, firstName, lastName, college, date.today(), datetime.utcnow(), datetime.utcnow()))
<commit_msg>Update last_attended when checking member<commit_after> | """Manage society membership by checking member IDs and adding new members"""
import sqlite3
class MemberDatabase:
def __init__(self, dbFile = 'members.db', safe = True):
self.__connection = sqlite3.connect(dbFile)
self.__safe = safe
def __del__(self):
self.__connection.commit() # here use actual commit: we want to commit regardless of safe
self.__connection.close()
# wrapper around sqlite3.Connection.commit():
# commits if safe is set to True
# this means users can optionally disable autocommiting for potentially better
# performance at the cost of reduced data safety on crashes
def commit(self):
if self.__safe:
conn.commit()
def getMember(self, memberId, updateTimestamp = True):
c = self.__connection.cursor()
c.execute('SELECT firstName,lastName FROM users WHERE barcode=?', (memberId,))
users = c.fetchall()
# if necessary update last_attended date
if (updateTimestamp):
c.execute('UPDATE users SET last_attended=? WHERE barcode=?', (datetime.date.today(), memberId))
self.commit()
return users
def addMember(self, memberId, firstName, lastName, college):
c = self.__connection.cursor()
c.execute('INSERT INTO users (barcode, firstName, lastName, college, datejoined, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)', (memberId, firstName, lastName, college, date.today(), datetime.utcnow(), datetime.utcnow()))
| """Manage society membership by checking member IDs and adding new members"""
import sqlite3
class MemberDatabase:
def __init__(self, dbFile = 'members.db', safe = True):
self.__connection = sqlite3.connect(dbFile)
self.__safe = safe
def __del__(self):
self.__connection.commit() # here use actual commit: we want to commit regardless of safe
self.__connection.close()
# wrapper around sqlite3.Connection.commit():
# commits if safe is set to True
# this means users can optionally disable autocommiting for potentially better
# performance at the cost of reduced data safety on crashes
def commit(self):
if self.__safe:
conn.commit()
def getMember(self, memberId, updateTimestamp = True):
c = self.__connection.cursor()
c.execute('SELECT firstName,lastName FROM users WHERE barcode=?', (memberId,))
# todo: if updateTimestamp then update last_attended time
return c.fetchone()
def addMember(self, memberId, firstName, lastName, college):
c = self.__connection.cursor()
c.execute('INSERT INTO users (barcode, firstName, lastName, college, datejoined, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)', (memberId, firstName, lastName, college, date.today(), datetime.utcnow(), datetime.utcnow()))
Update last_attended when checking member"""Manage society membership by checking member IDs and adding new members"""
import sqlite3
class MemberDatabase:
def __init__(self, dbFile = 'members.db', safe = True):
self.__connection = sqlite3.connect(dbFile)
self.__safe = safe
def __del__(self):
self.__connection.commit() # here use actual commit: we want to commit regardless of safe
self.__connection.close()
# wrapper around sqlite3.Connection.commit():
# commits if safe is set to True
# this means users can optionally disable autocommiting for potentially better
# performance at the cost of reduced data safety on crashes
def commit(self):
if self.__safe:
conn.commit()
def getMember(self, memberId, updateTimestamp = True):
c = self.__connection.cursor()
c.execute('SELECT firstName,lastName FROM users WHERE barcode=?', (memberId,))
users = c.fetchall()
# if necessary update last_attended date
if (updateTimestamp):
c.execute('UPDATE users SET last_attended=? WHERE barcode=?', (datetime.date.today(), memberId))
self.commit()
return users
def addMember(self, memberId, firstName, lastName, college):
c = self.__connection.cursor()
c.execute('INSERT INTO users (barcode, firstName, lastName, college, datejoined, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)', (memberId, firstName, lastName, college, date.today(), datetime.utcnow(), datetime.utcnow()))
| <commit_before>"""Manage society membership by checking member IDs and adding new members"""
import sqlite3
class MemberDatabase:
def __init__(self, dbFile = 'members.db', safe = True):
self.__connection = sqlite3.connect(dbFile)
self.__safe = safe
def __del__(self):
self.__connection.commit() # here use actual commit: we want to commit regardless of safe
self.__connection.close()
# wrapper around sqlite3.Connection.commit():
# commits if safe is set to True
# this means users can optionally disable autocommiting for potentially better
# performance at the cost of reduced data safety on crashes
def commit(self):
if self.__safe:
conn.commit()
def getMember(self, memberId, updateTimestamp = True):
c = self.__connection.cursor()
c.execute('SELECT firstName,lastName FROM users WHERE barcode=?', (memberId,))
# todo: if updateTimestamp then update last_attended time
return c.fetchone()
def addMember(self, memberId, firstName, lastName, college):
c = self.__connection.cursor()
c.execute('INSERT INTO users (barcode, firstName, lastName, college, datejoined, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)', (memberId, firstName, lastName, college, date.today(), datetime.utcnow(), datetime.utcnow()))
<commit_msg>Update last_attended when checking member<commit_after>"""Manage society membership by checking member IDs and adding new members"""
import sqlite3
class MemberDatabase:
def __init__(self, dbFile = 'members.db', safe = True):
self.__connection = sqlite3.connect(dbFile)
self.__safe = safe
def __del__(self):
self.__connection.commit() # here use actual commit: we want to commit regardless of safe
self.__connection.close()
# wrapper around sqlite3.Connection.commit():
# commits if safe is set to True
# this means users can optionally disable autocommiting for potentially better
# performance at the cost of reduced data safety on crashes
def commit(self):
if self.__safe:
conn.commit()
def getMember(self, memberId, updateTimestamp = True):
c = self.__connection.cursor()
c.execute('SELECT firstName,lastName FROM users WHERE barcode=?', (memberId,))
users = c.fetchall()
# if necessary update last_attended date
if (updateTimestamp):
c.execute('UPDATE users SET last_attended=? WHERE barcode=?', (datetime.date.today(), memberId))
self.commit()
return users
def addMember(self, memberId, firstName, lastName, college):
c = self.__connection.cursor()
c.execute('INSERT INTO users (barcode, firstName, lastName, college, datejoined, created_at, updated_at) VALUES (?, ?, ?, ?, ?, ?, ?)', (memberId, firstName, lastName, college, date.today(), datetime.utcnow(), datetime.utcnow()))
|
c6346fa2c026318b530dbbdc90dbaee8310b6b05 | robot/Cumulus/resources/locators_50.py | robot/Cumulus/resources/locators_50.py | from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']" | from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
# current version (Sravani's )
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"
# stashed (Noah's version)
# npsp_lex_locators["delete_icon"]= "//span[contains(text(),'{}')]/../following::div//span[text() = '{}']/following-sibling::a/child::span[@class = 'deleteIcon']"
# npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
# npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
| Revert "Revert "changes in locator_50 file (current and old versions)"" | Revert "Revert "changes in locator_50 file (current and old versions)""
This reverts commit 7537387aa80109877d6659cc54ec0ee7aa6496bd.
| Python | bsd-3-clause | SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus,SalesforceFoundation/Cumulus | from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"Revert "Revert "changes in locator_50 file (current and old versions)""
This reverts commit 7537387aa80109877d6659cc54ec0ee7aa6496bd. | from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
# current version (Sravani's )
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"
# stashed (Noah's version)
# npsp_lex_locators["delete_icon"]= "//span[contains(text(),'{}')]/../following::div//span[text() = '{}']/following-sibling::a/child::span[@class = 'deleteIcon']"
# npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
# npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
| <commit_before>from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"<commit_msg>Revert "Revert "changes in locator_50 file (current and old versions)""
This reverts commit 7537387aa80109877d6659cc54ec0ee7aa6496bd.<commit_after> | from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
# current version (Sravani's )
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"
# stashed (Noah's version)
# npsp_lex_locators["delete_icon"]= "//span[contains(text(),'{}')]/../following::div//span[text() = '{}']/following-sibling::a/child::span[@class = 'deleteIcon']"
# npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
# npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
| from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"Revert "Revert "changes in locator_50 file (current and old versions)""
This reverts commit 7537387aa80109877d6659cc54ec0ee7aa6496bd.from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
# current version (Sravani's )
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"
# stashed (Noah's version)
# npsp_lex_locators["delete_icon"]= "//span[contains(text(),'{}')]/../following::div//span[text() = '{}']/following-sibling::a/child::span[@class = 'deleteIcon']"
# npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
# npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
| <commit_before>from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"<commit_msg>Revert "Revert "changes in locator_50 file (current and old versions)""
This reverts commit 7537387aa80109877d6659cc54ec0ee7aa6496bd.<commit_after>from locators_51 import *
import copy
npsp_lex_locators = copy.deepcopy(npsp_lex_locators)
# current version (Sravani's )
npsp_lex_locators['delete_icon']='//span[contains(text() ,"{}")]/following::span[. = "{}"]/following-sibling::a/child::span[@class = "deleteIcon"]'
npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
npsp_lex_locators["record"]["related"]["button"]="//article[contains(@class, 'slds-card slds-card_boundary')][.//img][.//span[@title='{}']]//a[@title='{}']"
# stashed (Noah's version)
# npsp_lex_locators["delete_icon"]= "//span[contains(text(),'{}')]/../following::div//span[text() = '{}']/following-sibling::a/child::span[@class = 'deleteIcon']"
# npsp_lex_locators['object']['field']= "//div[contains(@class, 'uiInput')][.//label[contains(@class, 'uiLabel')][.//span[text()='{}']]]//*[self::input or self::textarea]"
# npsp_lex_locators["record"]["related"]["dd-link"]='//div[contains(@class,"actionMenu")]//a[@title="{}"]'
|
b3c99c3ce6ec181cf2ea82dbbbac5801d7f27874 | app/interact_app.py | app/interact_app.py | import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
base_url = config['URL']
socket_url = config['URL'] + 'socket/(\w+)'
settings = dict(
debug=True,
serve_traceback=True,
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path="/srv/interact/app/static",
static_url_prefix="/hub/interact/static/",
)
static_url = "r{}/static/(.*)".format(base_url)
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
super(InteractApp, self).__init__(handlers, **settings)
| import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
base_url = config['URL']
socket_url = config['URL'] + 'socket/(\w+)'
settings = dict(
debug=True,
serve_traceback=True,
compiled_template_cache=False,
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path="/srv/interact/app/static",
static_url_prefix="/hub/interact/static/",
)
static_url = "r{}/static/(.*)".format(base_url)
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
super(InteractApp, self).__init__(handlers, **settings)
| Disable template caching for now. | Disable template caching for now.
| Python | apache-2.0 | data-8/DS8-Interact,data-8/DS8-Interact,data-8/DS8-Interact | import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
base_url = config['URL']
socket_url = config['URL'] + 'socket/(\w+)'
settings = dict(
debug=True,
serve_traceback=True,
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path="/srv/interact/app/static",
static_url_prefix="/hub/interact/static/",
)
static_url = "r{}/static/(.*)".format(base_url)
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
super(InteractApp, self).__init__(handlers, **settings)
Disable template caching for now. | import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
base_url = config['URL']
socket_url = config['URL'] + 'socket/(\w+)'
settings = dict(
debug=True,
serve_traceback=True,
compiled_template_cache=False,
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path="/srv/interact/app/static",
static_url_prefix="/hub/interact/static/",
)
static_url = "r{}/static/(.*)".format(base_url)
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
super(InteractApp, self).__init__(handlers, **settings)
| <commit_before>import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
base_url = config['URL']
socket_url = config['URL'] + 'socket/(\w+)'
settings = dict(
debug=True,
serve_traceback=True,
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path="/srv/interact/app/static",
static_url_prefix="/hub/interact/static/",
)
static_url = "r{}/static/(.*)".format(base_url)
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
super(InteractApp, self).__init__(handlers, **settings)
<commit_msg>Disable template caching for now.<commit_after> | import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
base_url = config['URL']
socket_url = config['URL'] + 'socket/(\w+)'
settings = dict(
debug=True,
serve_traceback=True,
compiled_template_cache=False,
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path="/srv/interact/app/static",
static_url_prefix="/hub/interact/static/",
)
static_url = "r{}/static/(.*)".format(base_url)
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
super(InteractApp, self).__init__(handlers, **settings)
| import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
base_url = config['URL']
socket_url = config['URL'] + 'socket/(\w+)'
settings = dict(
debug=True,
serve_traceback=True,
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path="/srv/interact/app/static",
static_url_prefix="/hub/interact/static/",
)
static_url = "r{}/static/(.*)".format(base_url)
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
super(InteractApp, self).__init__(handlers, **settings)
Disable template caching for now.import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
base_url = config['URL']
socket_url = config['URL'] + 'socket/(\w+)'
settings = dict(
debug=True,
serve_traceback=True,
compiled_template_cache=False,
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path="/srv/interact/app/static",
static_url_prefix="/hub/interact/static/",
)
static_url = "r{}/static/(.*)".format(base_url)
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
super(InteractApp, self).__init__(handlers, **settings)
| <commit_before>import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
base_url = config['URL']
socket_url = config['URL'] + 'socket/(\w+)'
settings = dict(
debug=True,
serve_traceback=True,
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path="/srv/interact/app/static",
static_url_prefix="/hub/interact/static/",
)
static_url = "r{}/static/(.*)".format(base_url)
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
super(InteractApp, self).__init__(handlers, **settings)
<commit_msg>Disable template caching for now.<commit_after>import os
import tornado.web
from tornado.options import define
from .handlers import LandingHandler, RequestHandler
class InteractApp(tornado.web.Application):
"""
Entry point for the interact app.
"""
def __init__(self, config=None):
# Terrible hack to get config object in global namespace. This allows
# us to use options.config to get the global config object.
#
# TODO(sam): Replace with a better solution
define('config', config)
base_url = config['URL']
socket_url = config['URL'] + 'socket/(\w+)'
settings = dict(
debug=True,
serve_traceback=True,
compiled_template_cache=False,
template_path=os.path.join(os.path.dirname(__file__), "templates"),
static_path="/srv/interact/app/static",
static_url_prefix="/hub/interact/static/",
)
static_url = "r{}/static/(.*)".format(base_url)
handlers = [
(base_url, LandingHandler),
(socket_url, RequestHandler),
]
super(InteractApp, self).__init__(handlers, **settings)
|
356c9cd23ebf4953af169f38126fd521b49ca6c4 | recipe_scrapers/_abstract.py | recipe_scrapers/_abstract.py | from urllib import request
from bs4 import BeautifulSoup
class AbstractScraper():
def __init__(self, url, test=False):
if test:
# when testing, we simply load a file
self.soup = BeautifulSoup(url.read(), "html.parser")
else:
self.soup = BeautifulSoup(request.urlopen(url).read(), "html.parser")
def host(self):
raise NotImplementedError("This should be implemented.")
def publisher_site(self):
raise NotImplementedError("This should be implemented.")
def title(self):
raise NotImplementedError("This should be implemented.")
def total_time(self):
raise NotImplementedError("This should be implemented.")
def ingredients(self):
raise NotImplementedError("This should be implemented.")
def directions(self):
raise NotImplementedError("This should be implemented.")
def social_rating(self):
raise NotImplementedError("This should be implemented.")
| from urllib import request
from bs4 import BeautifulSoup
class AbstractScraper():
def __init__(self, url, test=False):
if test:
# when testing, we simply load a file
self.soup = BeautifulSoup(url.read(), "html.parser")
else:
self.soup = BeautifulSoup(request.urlopen(url).read(), "html.parser")
def host(self):
""" get the host of the url, so we can use the correct scraper (check __init__.py) """
raise NotImplementedError("This should be implemented.")
def publisher_site(self):
""" the original url of the publisher site """
raise NotImplementedError("This should be implemented.")
def title(self):
""" title of the recipe itself """
raise NotImplementedError("This should be implemented.")
def total_time(self):
""" total time it takes to preparate the recipe in minutes """
raise NotImplementedError("This should be implemented.")
def ingredients(self):
""" list of ingredients needed for the recipe """
raise NotImplementedError("This should be implemented.")
def directions(self):
""" directions provided on the recipe link """
raise NotImplementedError("This should be implemented.")
def social_rating(self):
""" social rating of the recipe in 0 - 100 scale """
raise NotImplementedError("This should be implemented.")
| Add docstring to the methods structure in the abstract class | Add docstring to the methods structure in the abstract class
| Python | mit | hhursev/recipe-scraper | from urllib import request
from bs4 import BeautifulSoup
class AbstractScraper():
def __init__(self, url, test=False):
if test:
# when testing, we simply load a file
self.soup = BeautifulSoup(url.read(), "html.parser")
else:
self.soup = BeautifulSoup(request.urlopen(url).read(), "html.parser")
def host(self):
raise NotImplementedError("This should be implemented.")
def publisher_site(self):
raise NotImplementedError("This should be implemented.")
def title(self):
raise NotImplementedError("This should be implemented.")
def total_time(self):
raise NotImplementedError("This should be implemented.")
def ingredients(self):
raise NotImplementedError("This should be implemented.")
def directions(self):
raise NotImplementedError("This should be implemented.")
def social_rating(self):
raise NotImplementedError("This should be implemented.")
Add docstring to the methods structure in the abstract class | from urllib import request
from bs4 import BeautifulSoup
class AbstractScraper():
def __init__(self, url, test=False):
if test:
# when testing, we simply load a file
self.soup = BeautifulSoup(url.read(), "html.parser")
else:
self.soup = BeautifulSoup(request.urlopen(url).read(), "html.parser")
def host(self):
""" get the host of the url, so we can use the correct scraper (check __init__.py) """
raise NotImplementedError("This should be implemented.")
def publisher_site(self):
""" the original url of the publisher site """
raise NotImplementedError("This should be implemented.")
def title(self):
""" title of the recipe itself """
raise NotImplementedError("This should be implemented.")
def total_time(self):
""" total time it takes to preparate the recipe in minutes """
raise NotImplementedError("This should be implemented.")
def ingredients(self):
""" list of ingredients needed for the recipe """
raise NotImplementedError("This should be implemented.")
def directions(self):
""" directions provided on the recipe link """
raise NotImplementedError("This should be implemented.")
def social_rating(self):
""" social rating of the recipe in 0 - 100 scale """
raise NotImplementedError("This should be implemented.")
| <commit_before>from urllib import request
from bs4 import BeautifulSoup
class AbstractScraper():
def __init__(self, url, test=False):
if test:
# when testing, we simply load a file
self.soup = BeautifulSoup(url.read(), "html.parser")
else:
self.soup = BeautifulSoup(request.urlopen(url).read(), "html.parser")
def host(self):
raise NotImplementedError("This should be implemented.")
def publisher_site(self):
raise NotImplementedError("This should be implemented.")
def title(self):
raise NotImplementedError("This should be implemented.")
def total_time(self):
raise NotImplementedError("This should be implemented.")
def ingredients(self):
raise NotImplementedError("This should be implemented.")
def directions(self):
raise NotImplementedError("This should be implemented.")
def social_rating(self):
raise NotImplementedError("This should be implemented.")
<commit_msg>Add docstring to the methods structure in the abstract class<commit_after> | from urllib import request
from bs4 import BeautifulSoup
class AbstractScraper():
def __init__(self, url, test=False):
if test:
# when testing, we simply load a file
self.soup = BeautifulSoup(url.read(), "html.parser")
else:
self.soup = BeautifulSoup(request.urlopen(url).read(), "html.parser")
def host(self):
""" get the host of the url, so we can use the correct scraper (check __init__.py) """
raise NotImplementedError("This should be implemented.")
def publisher_site(self):
""" the original url of the publisher site """
raise NotImplementedError("This should be implemented.")
def title(self):
""" title of the recipe itself """
raise NotImplementedError("This should be implemented.")
def total_time(self):
""" total time it takes to preparate the recipe in minutes """
raise NotImplementedError("This should be implemented.")
def ingredients(self):
""" list of ingredients needed for the recipe """
raise NotImplementedError("This should be implemented.")
def directions(self):
""" directions provided on the recipe link """
raise NotImplementedError("This should be implemented.")
def social_rating(self):
""" social rating of the recipe in 0 - 100 scale """
raise NotImplementedError("This should be implemented.")
| from urllib import request
from bs4 import BeautifulSoup
class AbstractScraper():
def __init__(self, url, test=False):
if test:
# when testing, we simply load a file
self.soup = BeautifulSoup(url.read(), "html.parser")
else:
self.soup = BeautifulSoup(request.urlopen(url).read(), "html.parser")
def host(self):
raise NotImplementedError("This should be implemented.")
def publisher_site(self):
raise NotImplementedError("This should be implemented.")
def title(self):
raise NotImplementedError("This should be implemented.")
def total_time(self):
raise NotImplementedError("This should be implemented.")
def ingredients(self):
raise NotImplementedError("This should be implemented.")
def directions(self):
raise NotImplementedError("This should be implemented.")
def social_rating(self):
raise NotImplementedError("This should be implemented.")
Add docstring to the methods structure in the abstract classfrom urllib import request
from bs4 import BeautifulSoup
class AbstractScraper():
def __init__(self, url, test=False):
if test:
# when testing, we simply load a file
self.soup = BeautifulSoup(url.read(), "html.parser")
else:
self.soup = BeautifulSoup(request.urlopen(url).read(), "html.parser")
def host(self):
""" get the host of the url, so we can use the correct scraper (check __init__.py) """
raise NotImplementedError("This should be implemented.")
def publisher_site(self):
""" the original url of the publisher site """
raise NotImplementedError("This should be implemented.")
def title(self):
""" title of the recipe itself """
raise NotImplementedError("This should be implemented.")
def total_time(self):
""" total time it takes to preparate the recipe in minutes """
raise NotImplementedError("This should be implemented.")
def ingredients(self):
""" list of ingredients needed for the recipe """
raise NotImplementedError("This should be implemented.")
def directions(self):
""" directions provided on the recipe link """
raise NotImplementedError("This should be implemented.")
def social_rating(self):
""" social rating of the recipe in 0 - 100 scale """
raise NotImplementedError("This should be implemented.")
| <commit_before>from urllib import request
from bs4 import BeautifulSoup
class AbstractScraper():
def __init__(self, url, test=False):
if test:
# when testing, we simply load a file
self.soup = BeautifulSoup(url.read(), "html.parser")
else:
self.soup = BeautifulSoup(request.urlopen(url).read(), "html.parser")
def host(self):
raise NotImplementedError("This should be implemented.")
def publisher_site(self):
raise NotImplementedError("This should be implemented.")
def title(self):
raise NotImplementedError("This should be implemented.")
def total_time(self):
raise NotImplementedError("This should be implemented.")
def ingredients(self):
raise NotImplementedError("This should be implemented.")
def directions(self):
raise NotImplementedError("This should be implemented.")
def social_rating(self):
raise NotImplementedError("This should be implemented.")
<commit_msg>Add docstring to the methods structure in the abstract class<commit_after>from urllib import request
from bs4 import BeautifulSoup
class AbstractScraper():
def __init__(self, url, test=False):
if test:
# when testing, we simply load a file
self.soup = BeautifulSoup(url.read(), "html.parser")
else:
self.soup = BeautifulSoup(request.urlopen(url).read(), "html.parser")
def host(self):
""" get the host of the url, so we can use the correct scraper (check __init__.py) """
raise NotImplementedError("This should be implemented.")
def publisher_site(self):
""" the original url of the publisher site """
raise NotImplementedError("This should be implemented.")
def title(self):
""" title of the recipe itself """
raise NotImplementedError("This should be implemented.")
def total_time(self):
""" total time it takes to preparate the recipe in minutes """
raise NotImplementedError("This should be implemented.")
def ingredients(self):
""" list of ingredients needed for the recipe """
raise NotImplementedError("This should be implemented.")
def directions(self):
""" directions provided on the recipe link """
raise NotImplementedError("This should be implemented.")
def social_rating(self):
""" social rating of the recipe in 0 - 100 scale """
raise NotImplementedError("This should be implemented.")
|
d371c92e999aa90df39887a33901fdaa58f648f1 | test/gui/test_messages.py | test/gui/test_messages.py | import sys
from sequana.gui import messages
from PyQt5 import QtWidgets as QW
app = QW.QApplication(sys.argv)
def test_warning():
w = messages.WarningMessage("test")
def test_critical():
w = messages.CriticalMessage("test", details="test")
| from sequana.gui import messages
def test_warning(qtbot):
w = messages.WarningMessage("test")
def test_critical(qtbot):
w = messages.CriticalMessage("test", details="test")
| Fix test that caused a pytest hang | Fix test that caused a pytest hang
| Python | bsd-3-clause | sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana,sequana/sequana | import sys
from sequana.gui import messages
from PyQt5 import QtWidgets as QW
app = QW.QApplication(sys.argv)
def test_warning():
w = messages.WarningMessage("test")
def test_critical():
w = messages.CriticalMessage("test", details="test")
Fix test that caused a pytest hang | from sequana.gui import messages
def test_warning(qtbot):
w = messages.WarningMessage("test")
def test_critical(qtbot):
w = messages.CriticalMessage("test", details="test")
| <commit_before>import sys
from sequana.gui import messages
from PyQt5 import QtWidgets as QW
app = QW.QApplication(sys.argv)
def test_warning():
w = messages.WarningMessage("test")
def test_critical():
w = messages.CriticalMessage("test", details="test")
<commit_msg>Fix test that caused a pytest hang<commit_after> | from sequana.gui import messages
def test_warning(qtbot):
w = messages.WarningMessage("test")
def test_critical(qtbot):
w = messages.CriticalMessage("test", details="test")
| import sys
from sequana.gui import messages
from PyQt5 import QtWidgets as QW
app = QW.QApplication(sys.argv)
def test_warning():
w = messages.WarningMessage("test")
def test_critical():
w = messages.CriticalMessage("test", details="test")
Fix test that caused a pytest hangfrom sequana.gui import messages
def test_warning(qtbot):
w = messages.WarningMessage("test")
def test_critical(qtbot):
w = messages.CriticalMessage("test", details="test")
| <commit_before>import sys
from sequana.gui import messages
from PyQt5 import QtWidgets as QW
app = QW.QApplication(sys.argv)
def test_warning():
w = messages.WarningMessage("test")
def test_critical():
w = messages.CriticalMessage("test", details="test")
<commit_msg>Fix test that caused a pytest hang<commit_after>from sequana.gui import messages
def test_warning(qtbot):
w = messages.WarningMessage("test")
def test_critical(qtbot):
w = messages.CriticalMessage("test", details="test")
|
631983a14f941fa745b6e7f4b32fe1ef697d5703 | tests/mixers/denontest.py | tests/mixers/denontest.py | import unittest
import os
from mopidy.mixers.denon import DenonMixer
class DenonMixerDeviceMock(object):
def __init__(self):
self._open = True
self.ret_val = bytes('00')
def write(self, x):
pass
def read(self, x):
return self.ret_val
def isOpen(self):
return self._open
def open(self):
self._open = True
class DenonMixerTest(unittest.TestCase):
def setUp(self):
self.m = DenonMixer()
self.m._device = DenonMixerDeviceMock()
def test_volume_set_to_min(self):
self.m.volume = 0
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_max(self):
self.m.volume = 100
self.assertEqual(self.m.volume, 99)
def test_volume_set_to_below_min_results_in_min(self):
self.m.volume = -10
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_above_max_results_in_max(self):
self.m.volume = 110
self.assertEqual(self.m.volume, 99)
def test_reopen_device(self):
self.m._device._open = False
self.m.volume = 10
self.assertTrue(self.m._device._open)
| import unittest
import os
from mopidy.mixers.denon import DenonMixer
class DenonMixerDeviceMock(object):
def __init__(self):
self._open = True
self.ret_val = bytes('MV00\r')
def write(self, x):
if x[2] != '?':
self.ret_val = bytes(x)
def read(self, x):
return self.ret_val
def isOpen(self):
return self._open
def open(self):
self._open = True
class DenonMixerTest(unittest.TestCase):
def setUp(self):
self.m = DenonMixer()
self.m._device = DenonMixerDeviceMock()
def test_volume_set_to_min(self):
self.m.volume = 0
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_max(self):
self.m.volume = 100
self.assertEqual(self.m.volume, 99)
def test_volume_set_to_below_min_results_in_min(self):
self.m.volume = -10
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_above_max_results_in_max(self):
self.m.volume = 110
self.assertEqual(self.m.volume, 99)
def test_reopen_device(self):
self.m._device._open = False
self.m.volume = 10
self.assertTrue(self.m._device._open)
| Update denon device mock to reflect mixer changes | Update denon device mock to reflect mixer changes
| Python | apache-2.0 | mokieyue/mopidy,diandiankan/mopidy,bacontext/mopidy,tkem/mopidy,bacontext/mopidy,priestd09/mopidy,ZenithDK/mopidy,tkem/mopidy,quartz55/mopidy,bencevans/mopidy,vrs01/mopidy,priestd09/mopidy,jodal/mopidy,glogiotatidis/mopidy,ZenithDK/mopidy,rawdlite/mopidy,dbrgn/mopidy,bencevans/mopidy,abarisain/mopidy,jmarsik/mopidy,ali/mopidy,hkariti/mopidy,hkariti/mopidy,swak/mopidy,kingosticks/mopidy,kingosticks/mopidy,SuperStarPL/mopidy,ZenithDK/mopidy,pacificIT/mopidy,vrs01/mopidy,SuperStarPL/mopidy,swak/mopidy,bencevans/mopidy,hkariti/mopidy,jmarsik/mopidy,jcass77/mopidy,diandiankan/mopidy,jmarsik/mopidy,mokieyue/mopidy,vrs01/mopidy,jcass77/mopidy,mokieyue/mopidy,dbrgn/mopidy,tkem/mopidy,ZenithDK/mopidy,pacificIT/mopidy,glogiotatidis/mopidy,adamcik/mopidy,jcass77/mopidy,bacontext/mopidy,woutervanwijk/mopidy,adamcik/mopidy,hkariti/mopidy,rawdlite/mopidy,abarisain/mopidy,mopidy/mopidy,quartz55/mopidy,jodal/mopidy,dbrgn/mopidy,liamw9534/mopidy,tkem/mopidy,mopidy/mopidy,jmarsik/mopidy,kingosticks/mopidy,pacificIT/mopidy,ali/mopidy,vrs01/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,rawdlite/mopidy,mopidy/mopidy,swak/mopidy,diandiankan/mopidy,glogiotatidis/mopidy,woutervanwijk/mopidy,ali/mopidy,priestd09/mopidy,rawdlite/mopidy,quartz55/mopidy,bacontext/mopidy,liamw9534/mopidy,swak/mopidy,mokieyue/mopidy,adamcik/mopidy,quartz55/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,bencevans/mopidy,ali/mopidy,jodal/mopidy,glogiotatidis/mopidy,dbrgn/mopidy | import unittest
import os
from mopidy.mixers.denon import DenonMixer
class DenonMixerDeviceMock(object):
def __init__(self):
self._open = True
self.ret_val = bytes('00')
def write(self, x):
pass
def read(self, x):
return self.ret_val
def isOpen(self):
return self._open
def open(self):
self._open = True
class DenonMixerTest(unittest.TestCase):
def setUp(self):
self.m = DenonMixer()
self.m._device = DenonMixerDeviceMock()
def test_volume_set_to_min(self):
self.m.volume = 0
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_max(self):
self.m.volume = 100
self.assertEqual(self.m.volume, 99)
def test_volume_set_to_below_min_results_in_min(self):
self.m.volume = -10
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_above_max_results_in_max(self):
self.m.volume = 110
self.assertEqual(self.m.volume, 99)
def test_reopen_device(self):
self.m._device._open = False
self.m.volume = 10
self.assertTrue(self.m._device._open)
Update denon device mock to reflect mixer changes | import unittest
import os
from mopidy.mixers.denon import DenonMixer
class DenonMixerDeviceMock(object):
def __init__(self):
self._open = True
self.ret_val = bytes('MV00\r')
def write(self, x):
if x[2] != '?':
self.ret_val = bytes(x)
def read(self, x):
return self.ret_val
def isOpen(self):
return self._open
def open(self):
self._open = True
class DenonMixerTest(unittest.TestCase):
def setUp(self):
self.m = DenonMixer()
self.m._device = DenonMixerDeviceMock()
def test_volume_set_to_min(self):
self.m.volume = 0
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_max(self):
self.m.volume = 100
self.assertEqual(self.m.volume, 99)
def test_volume_set_to_below_min_results_in_min(self):
self.m.volume = -10
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_above_max_results_in_max(self):
self.m.volume = 110
self.assertEqual(self.m.volume, 99)
def test_reopen_device(self):
self.m._device._open = False
self.m.volume = 10
self.assertTrue(self.m._device._open)
| <commit_before>import unittest
import os
from mopidy.mixers.denon import DenonMixer
class DenonMixerDeviceMock(object):
def __init__(self):
self._open = True
self.ret_val = bytes('00')
def write(self, x):
pass
def read(self, x):
return self.ret_val
def isOpen(self):
return self._open
def open(self):
self._open = True
class DenonMixerTest(unittest.TestCase):
def setUp(self):
self.m = DenonMixer()
self.m._device = DenonMixerDeviceMock()
def test_volume_set_to_min(self):
self.m.volume = 0
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_max(self):
self.m.volume = 100
self.assertEqual(self.m.volume, 99)
def test_volume_set_to_below_min_results_in_min(self):
self.m.volume = -10
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_above_max_results_in_max(self):
self.m.volume = 110
self.assertEqual(self.m.volume, 99)
def test_reopen_device(self):
self.m._device._open = False
self.m.volume = 10
self.assertTrue(self.m._device._open)
<commit_msg>Update denon device mock to reflect mixer changes<commit_after> | import unittest
import os
from mopidy.mixers.denon import DenonMixer
class DenonMixerDeviceMock(object):
def __init__(self):
self._open = True
self.ret_val = bytes('MV00\r')
def write(self, x):
if x[2] != '?':
self.ret_val = bytes(x)
def read(self, x):
return self.ret_val
def isOpen(self):
return self._open
def open(self):
self._open = True
class DenonMixerTest(unittest.TestCase):
def setUp(self):
self.m = DenonMixer()
self.m._device = DenonMixerDeviceMock()
def test_volume_set_to_min(self):
self.m.volume = 0
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_max(self):
self.m.volume = 100
self.assertEqual(self.m.volume, 99)
def test_volume_set_to_below_min_results_in_min(self):
self.m.volume = -10
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_above_max_results_in_max(self):
self.m.volume = 110
self.assertEqual(self.m.volume, 99)
def test_reopen_device(self):
self.m._device._open = False
self.m.volume = 10
self.assertTrue(self.m._device._open)
| import unittest
import os
from mopidy.mixers.denon import DenonMixer
class DenonMixerDeviceMock(object):
def __init__(self):
self._open = True
self.ret_val = bytes('00')
def write(self, x):
pass
def read(self, x):
return self.ret_val
def isOpen(self):
return self._open
def open(self):
self._open = True
class DenonMixerTest(unittest.TestCase):
def setUp(self):
self.m = DenonMixer()
self.m._device = DenonMixerDeviceMock()
def test_volume_set_to_min(self):
self.m.volume = 0
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_max(self):
self.m.volume = 100
self.assertEqual(self.m.volume, 99)
def test_volume_set_to_below_min_results_in_min(self):
self.m.volume = -10
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_above_max_results_in_max(self):
self.m.volume = 110
self.assertEqual(self.m.volume, 99)
def test_reopen_device(self):
self.m._device._open = False
self.m.volume = 10
self.assertTrue(self.m._device._open)
Update denon device mock to reflect mixer changesimport unittest
import os
from mopidy.mixers.denon import DenonMixer
class DenonMixerDeviceMock(object):
def __init__(self):
self._open = True
self.ret_val = bytes('MV00\r')
def write(self, x):
if x[2] != '?':
self.ret_val = bytes(x)
def read(self, x):
return self.ret_val
def isOpen(self):
return self._open
def open(self):
self._open = True
class DenonMixerTest(unittest.TestCase):
def setUp(self):
self.m = DenonMixer()
self.m._device = DenonMixerDeviceMock()
def test_volume_set_to_min(self):
self.m.volume = 0
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_max(self):
self.m.volume = 100
self.assertEqual(self.m.volume, 99)
def test_volume_set_to_below_min_results_in_min(self):
self.m.volume = -10
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_above_max_results_in_max(self):
self.m.volume = 110
self.assertEqual(self.m.volume, 99)
def test_reopen_device(self):
self.m._device._open = False
self.m.volume = 10
self.assertTrue(self.m._device._open)
| <commit_before>import unittest
import os
from mopidy.mixers.denon import DenonMixer
class DenonMixerDeviceMock(object):
def __init__(self):
self._open = True
self.ret_val = bytes('00')
def write(self, x):
pass
def read(self, x):
return self.ret_val
def isOpen(self):
return self._open
def open(self):
self._open = True
class DenonMixerTest(unittest.TestCase):
def setUp(self):
self.m = DenonMixer()
self.m._device = DenonMixerDeviceMock()
def test_volume_set_to_min(self):
self.m.volume = 0
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_max(self):
self.m.volume = 100
self.assertEqual(self.m.volume, 99)
def test_volume_set_to_below_min_results_in_min(self):
self.m.volume = -10
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_above_max_results_in_max(self):
self.m.volume = 110
self.assertEqual(self.m.volume, 99)
def test_reopen_device(self):
self.m._device._open = False
self.m.volume = 10
self.assertTrue(self.m._device._open)
<commit_msg>Update denon device mock to reflect mixer changes<commit_after>import unittest
import os
from mopidy.mixers.denon import DenonMixer
class DenonMixerDeviceMock(object):
def __init__(self):
self._open = True
self.ret_val = bytes('MV00\r')
def write(self, x):
if x[2] != '?':
self.ret_val = bytes(x)
def read(self, x):
return self.ret_val
def isOpen(self):
return self._open
def open(self):
self._open = True
class DenonMixerTest(unittest.TestCase):
def setUp(self):
self.m = DenonMixer()
self.m._device = DenonMixerDeviceMock()
def test_volume_set_to_min(self):
self.m.volume = 0
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_max(self):
self.m.volume = 100
self.assertEqual(self.m.volume, 99)
def test_volume_set_to_below_min_results_in_min(self):
self.m.volume = -10
self.assertEqual(self.m.volume, 0)
def test_volume_set_to_above_max_results_in_max(self):
self.m.volume = 110
self.assertEqual(self.m.volume, 99)
def test_reopen_device(self):
self.m._device._open = False
self.m.volume = 10
self.assertTrue(self.m._device._open)
|
4636c2deb451c284ffdfc44c744cf025a9f87377 | scribeui_pyramid/modules/plugins/__init__.py | scribeui_pyramid/modules/plugins/__init__.py | import imp #For plugins
import sys
import traceback
import logging
import os #For plugins
log = logging.getLogger(__name__)
pluginsList = []
def includeme(config):
global pluginsList
plugins = load_plugins()
for name, plugin in plugins.iteritems():
config.include("..plugins."+name)
pluginsList.append(name)
#===============================
# Plugin load code
#===============================
def load_plugins():
plugins = {}
path = os.path.abspath(os.path.dirname(__file__))
for filename in os.listdir(path):
if os.path.isdir(os.path.join(path, filename)) and os.path.isfile(os.path.join(path, filename, '__init__.py')):
try:
f, pluginPath, descr = imp.find_module(filename, [path])
pluginName = os.path.basename(pluginPath)
plugins[pluginName] = imp.load_module(filename, f, pluginName, descr)
except ImportError:
log.error('There was an error with the '+filename+' plugin:')
traceback.print_exc(file=sys.stdout)
return plugins
| import imp #For plugins
import sys
import traceback
import logging
import os #For plugins
log = logging.getLogger(__name__)
pluginsList = []
def includeme(config):
global pluginsList
plugins = load_plugins()
for name, plugin in plugins.iteritems():
config.include("..plugins."+name)
pluginsList.append(name)
#===============================
# Plugin load code
#===============================
def load_plugins():
plugins = {}
path = os.path.abspath(os.path.dirname(__file__))
for filename in os.listdir(path):
tmp_path = path
if os.path.isdir(os.path.join(tmp_path, filename)) and os.path.isfile(os.path.join(tmp_path, filename, '__init__.py')):
try:
f, pluginPath, descr = imp.find_module(filename, [tmp_path])
pluginName = os.path.basename(pluginPath)
plugins[pluginName] = imp.load_module(filename, f, pluginName, descr)
except ImportError:
log.error('There was an error with the '+filename+' plugin:')
traceback.print_exc(file=sys.stdout)
return plugins
| Fix load_plugin loop loading only one plugin | Fix load_plugin loop loading only one plugin
| Python | mit | mapgears/scribeui,mapgears/scribeui,mapgears/scribeui,mapgears/scribeui,mapgears/scribeui,mapgears/scribeui | import imp #For plugins
import sys
import traceback
import logging
import os #For plugins
log = logging.getLogger(__name__)
pluginsList = []
def includeme(config):
global pluginsList
plugins = load_plugins()
for name, plugin in plugins.iteritems():
config.include("..plugins."+name)
pluginsList.append(name)
#===============================
# Plugin load code
#===============================
def load_plugins():
plugins = {}
path = os.path.abspath(os.path.dirname(__file__))
for filename in os.listdir(path):
if os.path.isdir(os.path.join(path, filename)) and os.path.isfile(os.path.join(path, filename, '__init__.py')):
try:
f, pluginPath, descr = imp.find_module(filename, [path])
pluginName = os.path.basename(pluginPath)
plugins[pluginName] = imp.load_module(filename, f, pluginName, descr)
except ImportError:
log.error('There was an error with the '+filename+' plugin:')
traceback.print_exc(file=sys.stdout)
return plugins
Fix load_plugin loop loading only one plugin | import imp #For plugins
import sys
import traceback
import logging
import os #For plugins
log = logging.getLogger(__name__)
pluginsList = []
def includeme(config):
global pluginsList
plugins = load_plugins()
for name, plugin in plugins.iteritems():
config.include("..plugins."+name)
pluginsList.append(name)
#===============================
# Plugin load code
#===============================
def load_plugins():
plugins = {}
path = os.path.abspath(os.path.dirname(__file__))
for filename in os.listdir(path):
tmp_path = path
if os.path.isdir(os.path.join(tmp_path, filename)) and os.path.isfile(os.path.join(tmp_path, filename, '__init__.py')):
try:
f, pluginPath, descr = imp.find_module(filename, [tmp_path])
pluginName = os.path.basename(pluginPath)
plugins[pluginName] = imp.load_module(filename, f, pluginName, descr)
except ImportError:
log.error('There was an error with the '+filename+' plugin:')
traceback.print_exc(file=sys.stdout)
return plugins
| <commit_before>import imp #For plugins
import sys
import traceback
import logging
import os #For plugins
log = logging.getLogger(__name__)
pluginsList = []
def includeme(config):
global pluginsList
plugins = load_plugins()
for name, plugin in plugins.iteritems():
config.include("..plugins."+name)
pluginsList.append(name)
#===============================
# Plugin load code
#===============================
def load_plugins():
plugins = {}
path = os.path.abspath(os.path.dirname(__file__))
for filename in os.listdir(path):
if os.path.isdir(os.path.join(path, filename)) and os.path.isfile(os.path.join(path, filename, '__init__.py')):
try:
f, pluginPath, descr = imp.find_module(filename, [path])
pluginName = os.path.basename(pluginPath)
plugins[pluginName] = imp.load_module(filename, f, pluginName, descr)
except ImportError:
log.error('There was an error with the '+filename+' plugin:')
traceback.print_exc(file=sys.stdout)
return plugins
<commit_msg>Fix load_plugin loop loading only one plugin<commit_after> | import imp #For plugins
import sys
import traceback
import logging
import os #For plugins
log = logging.getLogger(__name__)
pluginsList = []
def includeme(config):
global pluginsList
plugins = load_plugins()
for name, plugin in plugins.iteritems():
config.include("..plugins."+name)
pluginsList.append(name)
#===============================
# Plugin load code
#===============================
def load_plugins():
plugins = {}
path = os.path.abspath(os.path.dirname(__file__))
for filename in os.listdir(path):
tmp_path = path
if os.path.isdir(os.path.join(tmp_path, filename)) and os.path.isfile(os.path.join(tmp_path, filename, '__init__.py')):
try:
f, pluginPath, descr = imp.find_module(filename, [tmp_path])
pluginName = os.path.basename(pluginPath)
plugins[pluginName] = imp.load_module(filename, f, pluginName, descr)
except ImportError:
log.error('There was an error with the '+filename+' plugin:')
traceback.print_exc(file=sys.stdout)
return plugins
| import imp #For plugins
import sys
import traceback
import logging
import os #For plugins
log = logging.getLogger(__name__)
pluginsList = []
def includeme(config):
global pluginsList
plugins = load_plugins()
for name, plugin in plugins.iteritems():
config.include("..plugins."+name)
pluginsList.append(name)
#===============================
# Plugin load code
#===============================
def load_plugins():
plugins = {}
path = os.path.abspath(os.path.dirname(__file__))
for filename in os.listdir(path):
if os.path.isdir(os.path.join(path, filename)) and os.path.isfile(os.path.join(path, filename, '__init__.py')):
try:
f, pluginPath, descr = imp.find_module(filename, [path])
pluginName = os.path.basename(pluginPath)
plugins[pluginName] = imp.load_module(filename, f, pluginName, descr)
except ImportError:
log.error('There was an error with the '+filename+' plugin:')
traceback.print_exc(file=sys.stdout)
return plugins
Fix load_plugin loop loading only one pluginimport imp #For plugins
import sys
import traceback
import logging
import os #For plugins
log = logging.getLogger(__name__)
pluginsList = []
def includeme(config):
global pluginsList
plugins = load_plugins()
for name, plugin in plugins.iteritems():
config.include("..plugins."+name)
pluginsList.append(name)
#===============================
# Plugin load code
#===============================
def load_plugins():
plugins = {}
path = os.path.abspath(os.path.dirname(__file__))
for filename in os.listdir(path):
tmp_path = path
if os.path.isdir(os.path.join(tmp_path, filename)) and os.path.isfile(os.path.join(tmp_path, filename, '__init__.py')):
try:
f, pluginPath, descr = imp.find_module(filename, [tmp_path])
pluginName = os.path.basename(pluginPath)
plugins[pluginName] = imp.load_module(filename, f, pluginName, descr)
except ImportError:
log.error('There was an error with the '+filename+' plugin:')
traceback.print_exc(file=sys.stdout)
return plugins
| <commit_before>import imp #For plugins
import sys
import traceback
import logging
import os #For plugins
log = logging.getLogger(__name__)
pluginsList = []
def includeme(config):
global pluginsList
plugins = load_plugins()
for name, plugin in plugins.iteritems():
config.include("..plugins."+name)
pluginsList.append(name)
#===============================
# Plugin load code
#===============================
def load_plugins():
plugins = {}
path = os.path.abspath(os.path.dirname(__file__))
for filename in os.listdir(path):
if os.path.isdir(os.path.join(path, filename)) and os.path.isfile(os.path.join(path, filename, '__init__.py')):
try:
f, pluginPath, descr = imp.find_module(filename, [path])
pluginName = os.path.basename(pluginPath)
plugins[pluginName] = imp.load_module(filename, f, pluginName, descr)
except ImportError:
log.error('There was an error with the '+filename+' plugin:')
traceback.print_exc(file=sys.stdout)
return plugins
<commit_msg>Fix load_plugin loop loading only one plugin<commit_after>import imp #For plugins
import sys
import traceback
import logging
import os #For plugins
log = logging.getLogger(__name__)
pluginsList = []
def includeme(config):
global pluginsList
plugins = load_plugins()
for name, plugin in plugins.iteritems():
config.include("..plugins."+name)
pluginsList.append(name)
#===============================
# Plugin load code
#===============================
def load_plugins():
plugins = {}
path = os.path.abspath(os.path.dirname(__file__))
for filename in os.listdir(path):
tmp_path = path
if os.path.isdir(os.path.join(tmp_path, filename)) and os.path.isfile(os.path.join(tmp_path, filename, '__init__.py')):
try:
f, pluginPath, descr = imp.find_module(filename, [tmp_path])
pluginName = os.path.basename(pluginPath)
plugins[pluginName] = imp.load_module(filename, f, pluginName, descr)
except ImportError:
log.error('There was an error with the '+filename+' plugin:')
traceback.print_exc(file=sys.stdout)
return plugins
|
d8f6938649acd4a72a53d47c26a1b16adb0e8fe3 | jupyterlab_gitsync/jupyterlab_gitsync/__init__.py | jupyterlab_gitsync/jupyterlab_gitsync/__init__.py | from notebook.utils import url_path_join
from jupyterlab_gitsync.handlers import *
from jupyterlab_gitsync.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_gitsync'}]
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(
app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
app.add_handlers(host_pattern, [
(url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
(url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
])
| from notebook.utils import url_path_join
from jupyterlab_gitsync.handlers import *
from jupyterlab_gitsync.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_gitsync'}]
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(
app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
app.add_handlers(host_pattern, [
(url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
(url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
])
| Fix indentation to pass tests | Fix indentation to pass tests | Python | apache-2.0 | GoogleCloudPlatform/jupyter-extensions,GoogleCloudPlatform/jupyter-extensions,GoogleCloudPlatform/jupyter-extensions,GoogleCloudPlatform/jupyter-extensions | from notebook.utils import url_path_join
from jupyterlab_gitsync.handlers import *
from jupyterlab_gitsync.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_gitsync'}]
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(
app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
app.add_handlers(host_pattern, [
(url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
(url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
])
Fix indentation to pass tests | from notebook.utils import url_path_join
from jupyterlab_gitsync.handlers import *
from jupyterlab_gitsync.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_gitsync'}]
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(
app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
app.add_handlers(host_pattern, [
(url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
(url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
])
| <commit_before>from notebook.utils import url_path_join
from jupyterlab_gitsync.handlers import *
from jupyterlab_gitsync.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_gitsync'}]
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(
app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
app.add_handlers(host_pattern, [
(url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
(url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
])
<commit_msg>Fix indentation to pass tests<commit_after> | from notebook.utils import url_path_join
from jupyterlab_gitsync.handlers import *
from jupyterlab_gitsync.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_gitsync'}]
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(
app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
app.add_handlers(host_pattern, [
(url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
(url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
])
| from notebook.utils import url_path_join
from jupyterlab_gitsync.handlers import *
from jupyterlab_gitsync.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_gitsync'}]
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(
app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
app.add_handlers(host_pattern, [
(url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
(url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
])
Fix indentation to pass testsfrom notebook.utils import url_path_join
from jupyterlab_gitsync.handlers import *
from jupyterlab_gitsync.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_gitsync'}]
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(
app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
app.add_handlers(host_pattern, [
(url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
(url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
])
| <commit_before>from notebook.utils import url_path_join
from jupyterlab_gitsync.handlers import *
from jupyterlab_gitsync.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_gitsync'}]
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(
app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
app.add_handlers(host_pattern, [
(url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
(url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
])
<commit_msg>Fix indentation to pass tests<commit_after>from notebook.utils import url_path_join
from jupyterlab_gitsync.handlers import *
from jupyterlab_gitsync.version import VERSION
__version__ = VERSION
def _jupyter_server_extension_paths():
return [{'module': 'jupyterlab_gitsync'}]
def load_jupyter_server_extension(nb_server_app):
"""
Called when the extension is loaded.
Args:
nb_server_app (NotebookWebApplication): handle to the Notebook webserver instance.
"""
host_pattern = '.*$'
app = nb_server_app.web_app
gcp_v1_endpoint = url_path_join(
app.settings['base_url'], 'jupyterlab_gitsync', 'v1')
app.add_handlers(host_pattern, [
(url_path_join(gcp_v1_endpoint, 'sync') + '(.*)', SyncHandler),
(url_path_join(gcp_v1_endpoint, 'setup') + '(.*)', SetupHandler)
])
|
d47ba3167b60710efe07e40113150b53c88e7d85 | tests/test_highlighter.py | tests/test_highlighter.py | import pytest
from rich.highlighter import NullHighlighter
def test_wrong_type():
highlighter = NullHighlighter()
with pytest.raises(TypeError):
highlighter([])
| """Tests for the higlighter classes."""
import pytest
from rich.highlighter import NullHighlighter, ReprHighlighter
from rich.text import Span, Text
def test_wrong_type():
highlighter = NullHighlighter()
with pytest.raises(TypeError):
highlighter([])
@pytest.mark.parametrize(
"style_name, test_str",
[
("repr.eui48", "01-23-45-67-89-AB"), # 6x2 hyphen
("repr.eui64", "01-23-45-FF-FE-67-89-AB"), # 8x2 hyphen
("repr.eui48", "01:23:45:67:89:AB"), # 6x2 colon
("repr.eui64", "01:23:45:FF:FE:67:89:AB"), # 8x2 colon
("repr.eui48", "0123.4567.89AB"), # 3x4 dot
("repr.eui64", "0123.45FF.FE67.89AB"), # 4x4 dot
("repr.eui48", "ed-ed-ed-ed-ed-ed"), # lowercase
("repr.eui48", "ED-ED-ED-ED-ED-ED"), # uppercase
("repr.eui48", "Ed-Ed-Ed-Ed-Ed-Ed"), # mixed case
("repr.eui48", "0-00-1-01-2-02"), # dropped zero
],
)
def test_highlight_regex(style_name: str, test_str: str):
"""Tests for the regular expressions used in ReprHighlighter."""
text = Text(test_str)
highlighter = ReprHighlighter()
highlighter.highlight(text)
assert text._spans[-1] == Span(0, len(test_str), style_name)
| Add tests for EUI-48 and EUI-64 in ReprHighlighter | Add tests for EUI-48 and EUI-64 in ReprHighlighter
| Python | mit | willmcgugan/rich | import pytest
from rich.highlighter import NullHighlighter
def test_wrong_type():
highlighter = NullHighlighter()
with pytest.raises(TypeError):
highlighter([])
Add tests for EUI-48 and EUI-64 in ReprHighlighter | """Tests for the higlighter classes."""
import pytest
from rich.highlighter import NullHighlighter, ReprHighlighter
from rich.text import Span, Text
def test_wrong_type():
highlighter = NullHighlighter()
with pytest.raises(TypeError):
highlighter([])
@pytest.mark.parametrize(
"style_name, test_str",
[
("repr.eui48", "01-23-45-67-89-AB"), # 6x2 hyphen
("repr.eui64", "01-23-45-FF-FE-67-89-AB"), # 8x2 hyphen
("repr.eui48", "01:23:45:67:89:AB"), # 6x2 colon
("repr.eui64", "01:23:45:FF:FE:67:89:AB"), # 8x2 colon
("repr.eui48", "0123.4567.89AB"), # 3x4 dot
("repr.eui64", "0123.45FF.FE67.89AB"), # 4x4 dot
("repr.eui48", "ed-ed-ed-ed-ed-ed"), # lowercase
("repr.eui48", "ED-ED-ED-ED-ED-ED"), # uppercase
("repr.eui48", "Ed-Ed-Ed-Ed-Ed-Ed"), # mixed case
("repr.eui48", "0-00-1-01-2-02"), # dropped zero
],
)
def test_highlight_regex(style_name: str, test_str: str):
"""Tests for the regular expressions used in ReprHighlighter."""
text = Text(test_str)
highlighter = ReprHighlighter()
highlighter.highlight(text)
assert text._spans[-1] == Span(0, len(test_str), style_name)
| <commit_before>import pytest
from rich.highlighter import NullHighlighter
def test_wrong_type():
highlighter = NullHighlighter()
with pytest.raises(TypeError):
highlighter([])
<commit_msg>Add tests for EUI-48 and EUI-64 in ReprHighlighter<commit_after> | """Tests for the higlighter classes."""
import pytest
from rich.highlighter import NullHighlighter, ReprHighlighter
from rich.text import Span, Text
def test_wrong_type():
highlighter = NullHighlighter()
with pytest.raises(TypeError):
highlighter([])
@pytest.mark.parametrize(
"style_name, test_str",
[
("repr.eui48", "01-23-45-67-89-AB"), # 6x2 hyphen
("repr.eui64", "01-23-45-FF-FE-67-89-AB"), # 8x2 hyphen
("repr.eui48", "01:23:45:67:89:AB"), # 6x2 colon
("repr.eui64", "01:23:45:FF:FE:67:89:AB"), # 8x2 colon
("repr.eui48", "0123.4567.89AB"), # 3x4 dot
("repr.eui64", "0123.45FF.FE67.89AB"), # 4x4 dot
("repr.eui48", "ed-ed-ed-ed-ed-ed"), # lowercase
("repr.eui48", "ED-ED-ED-ED-ED-ED"), # uppercase
("repr.eui48", "Ed-Ed-Ed-Ed-Ed-Ed"), # mixed case
("repr.eui48", "0-00-1-01-2-02"), # dropped zero
],
)
def test_highlight_regex(style_name: str, test_str: str):
"""Tests for the regular expressions used in ReprHighlighter."""
text = Text(test_str)
highlighter = ReprHighlighter()
highlighter.highlight(text)
assert text._spans[-1] == Span(0, len(test_str), style_name)
| import pytest
from rich.highlighter import NullHighlighter
def test_wrong_type():
highlighter = NullHighlighter()
with pytest.raises(TypeError):
highlighter([])
Add tests for EUI-48 and EUI-64 in ReprHighlighter"""Tests for the higlighter classes."""
import pytest
from rich.highlighter import NullHighlighter, ReprHighlighter
from rich.text import Span, Text
def test_wrong_type():
highlighter = NullHighlighter()
with pytest.raises(TypeError):
highlighter([])
@pytest.mark.parametrize(
"style_name, test_str",
[
("repr.eui48", "01-23-45-67-89-AB"), # 6x2 hyphen
("repr.eui64", "01-23-45-FF-FE-67-89-AB"), # 8x2 hyphen
("repr.eui48", "01:23:45:67:89:AB"), # 6x2 colon
("repr.eui64", "01:23:45:FF:FE:67:89:AB"), # 8x2 colon
("repr.eui48", "0123.4567.89AB"), # 3x4 dot
("repr.eui64", "0123.45FF.FE67.89AB"), # 4x4 dot
("repr.eui48", "ed-ed-ed-ed-ed-ed"), # lowercase
("repr.eui48", "ED-ED-ED-ED-ED-ED"), # uppercase
("repr.eui48", "Ed-Ed-Ed-Ed-Ed-Ed"), # mixed case
("repr.eui48", "0-00-1-01-2-02"), # dropped zero
],
)
def test_highlight_regex(style_name: str, test_str: str):
"""Tests for the regular expressions used in ReprHighlighter."""
text = Text(test_str)
highlighter = ReprHighlighter()
highlighter.highlight(text)
assert text._spans[-1] == Span(0, len(test_str), style_name)
| <commit_before>import pytest
from rich.highlighter import NullHighlighter
def test_wrong_type():
highlighter = NullHighlighter()
with pytest.raises(TypeError):
highlighter([])
<commit_msg>Add tests for EUI-48 and EUI-64 in ReprHighlighter<commit_after>"""Tests for the higlighter classes."""
import pytest
from rich.highlighter import NullHighlighter, ReprHighlighter
from rich.text import Span, Text
def test_wrong_type():
highlighter = NullHighlighter()
with pytest.raises(TypeError):
highlighter([])
@pytest.mark.parametrize(
"style_name, test_str",
[
("repr.eui48", "01-23-45-67-89-AB"), # 6x2 hyphen
("repr.eui64", "01-23-45-FF-FE-67-89-AB"), # 8x2 hyphen
("repr.eui48", "01:23:45:67:89:AB"), # 6x2 colon
("repr.eui64", "01:23:45:FF:FE:67:89:AB"), # 8x2 colon
("repr.eui48", "0123.4567.89AB"), # 3x4 dot
("repr.eui64", "0123.45FF.FE67.89AB"), # 4x4 dot
("repr.eui48", "ed-ed-ed-ed-ed-ed"), # lowercase
("repr.eui48", "ED-ED-ED-ED-ED-ED"), # uppercase
("repr.eui48", "Ed-Ed-Ed-Ed-Ed-Ed"), # mixed case
("repr.eui48", "0-00-1-01-2-02"), # dropped zero
],
)
def test_highlight_regex(style_name: str, test_str: str):
"""Tests for the regular expressions used in ReprHighlighter."""
text = Text(test_str)
highlighter = ReprHighlighter()
highlighter.highlight(text)
assert text._spans[-1] == Span(0, len(test_str), style_name)
|
6d930e7f29e12cd677cce07c7c1accc66ae594c8 | tests/test_zz_jvm_kill.py | tests/test_zz_jvm_kill.py | from cellom2tif import cellom2tif
import pytest
cfile = 'test-data/d1/MFGTMP_120628160001_C18f00d0.C01'
def test_read_image():
im = cellom2tif.read_image(cfile)
assert im.shape == (512, 512)
def test_done():
cellom2tif.done()
assert cellom2tif.VM_KILLED
def test_vm_killed_error():
cellom2tif.done()
with pytest.raises(RuntimeError) as err:
cellom2tif.read_image(cfile)
assert err.value.message.startswith('The Java Virtual Machine')
| from cellom2tif import cellom2tif
import bioformats as bf
import pytest
cfile = 'test-data/d1/MFGTMP_120628160001_C18f00d0.C01'
def test_read_image():
im = cellom2tif.read_image(cfile)
assert im.shape == (512, 512)
def test_read_image_from_reader():
rdr = bf.ImageReader(cfile)
im = cellom2tif.read_image(rdr)
assert im.shape == (512, 512)
def test_done():
cellom2tif.done()
assert cellom2tif.VM_KILLED
def test_vm_killed_error():
cellom2tif.done()
with pytest.raises(RuntimeError) as err:
cellom2tif.read_image(cfile)
assert err.value.message.startswith('The Java Virtual Machine')
| Test reading from bf.ImageReader directly | Test reading from bf.ImageReader directly
| Python | bsd-3-clause | jni/cellom2tif | from cellom2tif import cellom2tif
import pytest
cfile = 'test-data/d1/MFGTMP_120628160001_C18f00d0.C01'
def test_read_image():
im = cellom2tif.read_image(cfile)
assert im.shape == (512, 512)
def test_done():
cellom2tif.done()
assert cellom2tif.VM_KILLED
def test_vm_killed_error():
cellom2tif.done()
with pytest.raises(RuntimeError) as err:
cellom2tif.read_image(cfile)
assert err.value.message.startswith('The Java Virtual Machine')
Test reading from bf.ImageReader directly | from cellom2tif import cellom2tif
import bioformats as bf
import pytest
cfile = 'test-data/d1/MFGTMP_120628160001_C18f00d0.C01'
def test_read_image():
im = cellom2tif.read_image(cfile)
assert im.shape == (512, 512)
def test_read_image_from_reader():
rdr = bf.ImageReader(cfile)
im = cellom2tif.read_image(rdr)
assert im.shape == (512, 512)
def test_done():
cellom2tif.done()
assert cellom2tif.VM_KILLED
def test_vm_killed_error():
cellom2tif.done()
with pytest.raises(RuntimeError) as err:
cellom2tif.read_image(cfile)
assert err.value.message.startswith('The Java Virtual Machine')
| <commit_before>from cellom2tif import cellom2tif
import pytest
cfile = 'test-data/d1/MFGTMP_120628160001_C18f00d0.C01'
def test_read_image():
im = cellom2tif.read_image(cfile)
assert im.shape == (512, 512)
def test_done():
cellom2tif.done()
assert cellom2tif.VM_KILLED
def test_vm_killed_error():
cellom2tif.done()
with pytest.raises(RuntimeError) as err:
cellom2tif.read_image(cfile)
assert err.value.message.startswith('The Java Virtual Machine')
<commit_msg>Test reading from bf.ImageReader directly<commit_after> | from cellom2tif import cellom2tif
import bioformats as bf
import pytest
cfile = 'test-data/d1/MFGTMP_120628160001_C18f00d0.C01'
def test_read_image():
im = cellom2tif.read_image(cfile)
assert im.shape == (512, 512)
def test_read_image_from_reader():
rdr = bf.ImageReader(cfile)
im = cellom2tif.read_image(rdr)
assert im.shape == (512, 512)
def test_done():
cellom2tif.done()
assert cellom2tif.VM_KILLED
def test_vm_killed_error():
cellom2tif.done()
with pytest.raises(RuntimeError) as err:
cellom2tif.read_image(cfile)
assert err.value.message.startswith('The Java Virtual Machine')
| from cellom2tif import cellom2tif
import pytest
cfile = 'test-data/d1/MFGTMP_120628160001_C18f00d0.C01'
def test_read_image():
im = cellom2tif.read_image(cfile)
assert im.shape == (512, 512)
def test_done():
cellom2tif.done()
assert cellom2tif.VM_KILLED
def test_vm_killed_error():
cellom2tif.done()
with pytest.raises(RuntimeError) as err:
cellom2tif.read_image(cfile)
assert err.value.message.startswith('The Java Virtual Machine')
Test reading from bf.ImageReader directlyfrom cellom2tif import cellom2tif
import bioformats as bf
import pytest
cfile = 'test-data/d1/MFGTMP_120628160001_C18f00d0.C01'
def test_read_image():
im = cellom2tif.read_image(cfile)
assert im.shape == (512, 512)
def test_read_image_from_reader():
rdr = bf.ImageReader(cfile)
im = cellom2tif.read_image(rdr)
assert im.shape == (512, 512)
def test_done():
cellom2tif.done()
assert cellom2tif.VM_KILLED
def test_vm_killed_error():
cellom2tif.done()
with pytest.raises(RuntimeError) as err:
cellom2tif.read_image(cfile)
assert err.value.message.startswith('The Java Virtual Machine')
| <commit_before>from cellom2tif import cellom2tif
import pytest
cfile = 'test-data/d1/MFGTMP_120628160001_C18f00d0.C01'
def test_read_image():
im = cellom2tif.read_image(cfile)
assert im.shape == (512, 512)
def test_done():
cellom2tif.done()
assert cellom2tif.VM_KILLED
def test_vm_killed_error():
cellom2tif.done()
with pytest.raises(RuntimeError) as err:
cellom2tif.read_image(cfile)
assert err.value.message.startswith('The Java Virtual Machine')
<commit_msg>Test reading from bf.ImageReader directly<commit_after>from cellom2tif import cellom2tif
import bioformats as bf
import pytest
cfile = 'test-data/d1/MFGTMP_120628160001_C18f00d0.C01'
def test_read_image():
im = cellom2tif.read_image(cfile)
assert im.shape == (512, 512)
def test_read_image_from_reader():
rdr = bf.ImageReader(cfile)
im = cellom2tif.read_image(rdr)
assert im.shape == (512, 512)
def test_done():
cellom2tif.done()
assert cellom2tif.VM_KILLED
def test_vm_killed_error():
cellom2tif.done()
with pytest.raises(RuntimeError) as err:
cellom2tif.read_image(cfile)
assert err.value.message.startswith('The Java Virtual Machine')
|
351bc14c66962e5ef386b6d41073697993c95236 | greengraph/test/test_map.py | greengraph/test/test_map.py | from greengraph.map import Map
import numpy as np
from nose.tools import assert_equal
import yaml
def test_green():
size = (10,10)
zoom = 10
lat = 50
lon = 50
satellite = True
testMap = Map(lat,lon,satellite,zoom,size)
threshold = 1
trueArray = np.ones(size,dtype=bool)
falseArray = np.zeros(size,dtype=bool)
def assert_images_equal(r,g,b,checkArray):
testPixels = np.dstack((r,g,blue))
testMap.pixels = testPixels
np.testing.assert_array_equal(testMap.green(threshold),checkArray)
green = np.ones(size)
red = np.ones(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
blue = np.zeros(size)
assert_images_equal(red,green,blue,falseArray)
red = np.zeros(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
blue = np.zeros(size)
assert_images_equal(red,green,blue,trueArray)
| from greengraph.map import Map
import numpy as np
from nose.tools import assert_equal
from mock import patch
import os
@patch('requests.get')
@patch('matplotlib.image.imread')
@patch('StringIO.StringIO')
def test_green(mock_get,mock_imread,mock_StringIO):
def assert_images_equal(r,g,b,checkArray):
testMap.pixels = np.dstack((r,g,b))
np.testing.assert_array_equal(testMap.green(threshold),checkArray)
lat = 50
lon = 50
testMap = Map(lat,lon)
size = (400,400)
trueArray = np.ones(size,dtype=bool)
falseArray = np.zeros(size,dtype=bool)
threshold = 1
#Check the returned array is false everywhere when the value of the green pixels is identical to the values of the red and blue pixels
green = np.ones(size)
red = np.ones(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
#Check the returned array is false everywhere when the value of the green pixels is greater than the value of the blue pixels but less than the value of the red pixels
blue = np.zeros(size)
assert_images_equal(red,green,blue,falseArray)
#As above but with red and blue pixels switched
red = np.zeros(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
#Check the returned array is true everywhere when the value of the green pixels is greater than the value of the red and blue pixels
blue = np.zeros(size)
assert_images_equal(red,green,blue,trueArray)
| Add patch decorator to test_green() function | Add patch decorator to test_green() function
| Python | mit | MikeVasmer/GreenGraphCoursework | from greengraph.map import Map
import numpy as np
from nose.tools import assert_equal
import yaml
def test_green():
size = (10,10)
zoom = 10
lat = 50
lon = 50
satellite = True
testMap = Map(lat,lon,satellite,zoom,size)
threshold = 1
trueArray = np.ones(size,dtype=bool)
falseArray = np.zeros(size,dtype=bool)
def assert_images_equal(r,g,b,checkArray):
testPixels = np.dstack((r,g,blue))
testMap.pixels = testPixels
np.testing.assert_array_equal(testMap.green(threshold),checkArray)
green = np.ones(size)
red = np.ones(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
blue = np.zeros(size)
assert_images_equal(red,green,blue,falseArray)
red = np.zeros(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
blue = np.zeros(size)
assert_images_equal(red,green,blue,trueArray)
Add patch decorator to test_green() function | from greengraph.map import Map
import numpy as np
from nose.tools import assert_equal
from mock import patch
import os
@patch('requests.get')
@patch('matplotlib.image.imread')
@patch('StringIO.StringIO')
def test_green(mock_get,mock_imread,mock_StringIO):
def assert_images_equal(r,g,b,checkArray):
testMap.pixels = np.dstack((r,g,b))
np.testing.assert_array_equal(testMap.green(threshold),checkArray)
lat = 50
lon = 50
testMap = Map(lat,lon)
size = (400,400)
trueArray = np.ones(size,dtype=bool)
falseArray = np.zeros(size,dtype=bool)
threshold = 1
#Check the returned array is false everywhere when the value of the green pixels is identical to the values of the red and blue pixels
green = np.ones(size)
red = np.ones(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
#Check the returned array is false everywhere when the value of the green pixels is greater than the value of the blue pixels but less than the value of the red pixels
blue = np.zeros(size)
assert_images_equal(red,green,blue,falseArray)
#As above but with red and blue pixels switched
red = np.zeros(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
#Check the returned array is true everywhere when the value of the green pixels is greater than the value of the red and blue pixels
blue = np.zeros(size)
assert_images_equal(red,green,blue,trueArray)
| <commit_before>from greengraph.map import Map
import numpy as np
from nose.tools import assert_equal
import yaml
def test_green():
size = (10,10)
zoom = 10
lat = 50
lon = 50
satellite = True
testMap = Map(lat,lon,satellite,zoom,size)
threshold = 1
trueArray = np.ones(size,dtype=bool)
falseArray = np.zeros(size,dtype=bool)
def assert_images_equal(r,g,b,checkArray):
testPixels = np.dstack((r,g,blue))
testMap.pixels = testPixels
np.testing.assert_array_equal(testMap.green(threshold),checkArray)
green = np.ones(size)
red = np.ones(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
blue = np.zeros(size)
assert_images_equal(red,green,blue,falseArray)
red = np.zeros(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
blue = np.zeros(size)
assert_images_equal(red,green,blue,trueArray)
<commit_msg>Add patch decorator to test_green() function<commit_after> | from greengraph.map import Map
import numpy as np
from nose.tools import assert_equal
from mock import patch
import os
@patch('requests.get')
@patch('matplotlib.image.imread')
@patch('StringIO.StringIO')
def test_green(mock_get,mock_imread,mock_StringIO):
def assert_images_equal(r,g,b,checkArray):
testMap.pixels = np.dstack((r,g,b))
np.testing.assert_array_equal(testMap.green(threshold),checkArray)
lat = 50
lon = 50
testMap = Map(lat,lon)
size = (400,400)
trueArray = np.ones(size,dtype=bool)
falseArray = np.zeros(size,dtype=bool)
threshold = 1
#Check the returned array is false everywhere when the value of the green pixels is identical to the values of the red and blue pixels
green = np.ones(size)
red = np.ones(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
#Check the returned array is false everywhere when the value of the green pixels is greater than the value of the blue pixels but less than the value of the red pixels
blue = np.zeros(size)
assert_images_equal(red,green,blue,falseArray)
#As above but with red and blue pixels switched
red = np.zeros(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
#Check the returned array is true everywhere when the value of the green pixels is greater than the value of the red and blue pixels
blue = np.zeros(size)
assert_images_equal(red,green,blue,trueArray)
| from greengraph.map import Map
import numpy as np
from nose.tools import assert_equal
import yaml
def test_green():
size = (10,10)
zoom = 10
lat = 50
lon = 50
satellite = True
testMap = Map(lat,lon,satellite,zoom,size)
threshold = 1
trueArray = np.ones(size,dtype=bool)
falseArray = np.zeros(size,dtype=bool)
def assert_images_equal(r,g,b,checkArray):
testPixels = np.dstack((r,g,blue))
testMap.pixels = testPixels
np.testing.assert_array_equal(testMap.green(threshold),checkArray)
green = np.ones(size)
red = np.ones(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
blue = np.zeros(size)
assert_images_equal(red,green,blue,falseArray)
red = np.zeros(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
blue = np.zeros(size)
assert_images_equal(red,green,blue,trueArray)
Add patch decorator to test_green() functionfrom greengraph.map import Map
import numpy as np
from nose.tools import assert_equal
from mock import patch
import os
@patch('requests.get')
@patch('matplotlib.image.imread')
@patch('StringIO.StringIO')
def test_green(mock_get,mock_imread,mock_StringIO):
def assert_images_equal(r,g,b,checkArray):
testMap.pixels = np.dstack((r,g,b))
np.testing.assert_array_equal(testMap.green(threshold),checkArray)
lat = 50
lon = 50
testMap = Map(lat,lon)
size = (400,400)
trueArray = np.ones(size,dtype=bool)
falseArray = np.zeros(size,dtype=bool)
threshold = 1
#Check the returned array is false everywhere when the value of the green pixels is identical to the values of the red and blue pixels
green = np.ones(size)
red = np.ones(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
#Check the returned array is false everywhere when the value of the green pixels is greater than the value of the blue pixels but less than the value of the red pixels
blue = np.zeros(size)
assert_images_equal(red,green,blue,falseArray)
#As above but with red and blue pixels switched
red = np.zeros(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
#Check the returned array is true everywhere when the value of the green pixels is greater than the value of the red and blue pixels
blue = np.zeros(size)
assert_images_equal(red,green,blue,trueArray)
| <commit_before>from greengraph.map import Map
import numpy as np
from nose.tools import assert_equal
import yaml
def test_green():
size = (10,10)
zoom = 10
lat = 50
lon = 50
satellite = True
testMap = Map(lat,lon,satellite,zoom,size)
threshold = 1
trueArray = np.ones(size,dtype=bool)
falseArray = np.zeros(size,dtype=bool)
def assert_images_equal(r,g,b,checkArray):
testPixels = np.dstack((r,g,blue))
testMap.pixels = testPixels
np.testing.assert_array_equal(testMap.green(threshold),checkArray)
green = np.ones(size)
red = np.ones(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
blue = np.zeros(size)
assert_images_equal(red,green,blue,falseArray)
red = np.zeros(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
blue = np.zeros(size)
assert_images_equal(red,green,blue,trueArray)
<commit_msg>Add patch decorator to test_green() function<commit_after>from greengraph.map import Map
import numpy as np
from nose.tools import assert_equal
from mock import patch
import os
@patch('requests.get')
@patch('matplotlib.image.imread')
@patch('StringIO.StringIO')
def test_green(mock_get,mock_imread,mock_StringIO):
def assert_images_equal(r,g,b,checkArray):
testMap.pixels = np.dstack((r,g,b))
np.testing.assert_array_equal(testMap.green(threshold),checkArray)
lat = 50
lon = 50
testMap = Map(lat,lon)
size = (400,400)
trueArray = np.ones(size,dtype=bool)
falseArray = np.zeros(size,dtype=bool)
threshold = 1
#Check the returned array is false everywhere when the value of the green pixels is identical to the values of the red and blue pixels
green = np.ones(size)
red = np.ones(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
#Check the returned array is false everywhere when the value of the green pixels is greater than the value of the blue pixels but less than the value of the red pixels
blue = np.zeros(size)
assert_images_equal(red,green,blue,falseArray)
#As above but with red and blue pixels switched
red = np.zeros(size)
blue = np.ones(size)
assert_images_equal(red,green,blue,falseArray)
#Check the returned array is true everywhere when the value of the green pixels is greater than the value of the red and blue pixels
blue = np.zeros(size)
assert_images_equal(red,green,blue,trueArray)
|
efda61fab238c278791245af9a89c6d70d2425e7 | pyflation/analysis/tests/test_deltaprel.py | pyflation/analysis/tests/test_deltaprel.py | ''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 2, 0.5)
assert_(arr == 2)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
| ''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 0.5, 2)
assert_(arr == 2)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
| Fix arrangement of argument list. | Fix arrangement of argument list.
| Python | bsd-3-clause | ihuston/pyflation,ihuston/pyflation | ''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 2, 0.5)
assert_(arr == 2)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
Fix arrangement of argument list. | ''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 0.5, 2)
assert_(arr == 2)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
| <commit_before>''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 2, 0.5)
assert_(arr == 2)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
<commit_msg>Fix arrangement of argument list.<commit_after> | ''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 0.5, 2)
assert_(arr == 2)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
| ''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 2, 0.5)
assert_(arr == 2)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
Fix arrangement of argument list.''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 0.5, 2)
assert_(arr == 2)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
| <commit_before>''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 2, 0.5)
assert_(arr == 2)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
<commit_msg>Fix arrangement of argument list.<commit_after>''' test_deltaprel - Test functions for deltaprel module
Author: Ian Huston
For license and copyright information see LICENSE.txt which was distributed with this file.
'''
import numpy as np
from numpy.testing import assert_, assert_raises
from pyflation.analysis import deltaprel
import nose
class TestSoundSpeeds():
def setup(self):
self.Vphi = np.arange(24).reshape((4,3,2))
self.phidot = self.Vphi
self.H = np.arange(8).reshape((4,1,2))
def test_shape(self):
"""Test whether the soundspeeds are shaped correctly."""
arr = deltaprel.soundspeeds(self.Vphi, self.phidot, self.H)
assert_(arr.shape == self.Vphi.shape)
def test_scalar(self):
"""Test results of 1x1x1 calculation."""
arr = deltaprel.soundspeeds(3, 0.5, 2)
assert_(arr == 2)
def test_wrongshape(self):
"""Test that wrong shapes raise exception."""
self.H = np.arange(8).reshape((4,2))
assert_raises(ValueError, deltaprel.soundspeeds, self.Vphi, self.phidot, self.H)
|
2453d33da9535d069a829ec9a316093874fbb9a4 | pythonwarrior/towers/beginner/level_002.py | pythonwarrior/towers/beginner/level_002.py | # --------
# |@ s >|
# --------
level.description("It is too dark to see anything, but you smell sludge nearby.")
level.tip("Use warrior.feel().is_empty to see if there is anything in front of you, and warrior.attack! to fight it. Remember, you can only do one action (ending in _) per turn.")
level.time_bonus(20)
level.ace_score(26)
level.size(8, 1)
level.stairs(7, 0)
def add_abilities(warrior):
warrior.add_abilities('feel', 'attack_')
level.warrior(0, 0, 'east', func=add_abilities)
level.unit('sludge', 4, 0, 'west')
| # --------
# |@ s >|
# --------
level.description("It is too dark to see anything, but you smell sludge nearby.")
level.tip("Use warrior.feel().is_empty() to see if there is anything in front of you, and warrior.attack() to fight it. Remember, you can only do one action (ending in _) per turn.")
level.time_bonus(20)
level.ace_score(26)
level.size(8, 1)
level.stairs(7, 0)
def add_abilities(warrior):
warrior.add_abilities('feel', 'attack_')
level.warrior(0, 0, 'east', func=add_abilities)
level.unit('sludge', 4, 0, 'west')
| Update description and fix Ruby -> Python syntax | Update description and fix Ruby -> Python syntax | Python | mit | arbylee/python-warrior | # --------
# |@ s >|
# --------
level.description("It is too dark to see anything, but you smell sludge nearby.")
level.tip("Use warrior.feel().is_empty to see if there is anything in front of you, and warrior.attack! to fight it. Remember, you can only do one action (ending in _) per turn.")
level.time_bonus(20)
level.ace_score(26)
level.size(8, 1)
level.stairs(7, 0)
def add_abilities(warrior):
warrior.add_abilities('feel', 'attack_')
level.warrior(0, 0, 'east', func=add_abilities)
level.unit('sludge', 4, 0, 'west')
Update description and fix Ruby -> Python syntax | # --------
# |@ s >|
# --------
level.description("It is too dark to see anything, but you smell sludge nearby.")
level.tip("Use warrior.feel().is_empty() to see if there is anything in front of you, and warrior.attack() to fight it. Remember, you can only do one action (ending in _) per turn.")
level.time_bonus(20)
level.ace_score(26)
level.size(8, 1)
level.stairs(7, 0)
def add_abilities(warrior):
warrior.add_abilities('feel', 'attack_')
level.warrior(0, 0, 'east', func=add_abilities)
level.unit('sludge', 4, 0, 'west')
| <commit_before># --------
# |@ s >|
# --------
level.description("It is too dark to see anything, but you smell sludge nearby.")
level.tip("Use warrior.feel().is_empty to see if there is anything in front of you, and warrior.attack! to fight it. Remember, you can only do one action (ending in _) per turn.")
level.time_bonus(20)
level.ace_score(26)
level.size(8, 1)
level.stairs(7, 0)
def add_abilities(warrior):
warrior.add_abilities('feel', 'attack_')
level.warrior(0, 0, 'east', func=add_abilities)
level.unit('sludge', 4, 0, 'west')
<commit_msg>Update description and fix Ruby -> Python syntax<commit_after> | # --------
# |@ s >|
# --------
level.description("It is too dark to see anything, but you smell sludge nearby.")
level.tip("Use warrior.feel().is_empty() to see if there is anything in front of you, and warrior.attack() to fight it. Remember, you can only do one action (ending in _) per turn.")
level.time_bonus(20)
level.ace_score(26)
level.size(8, 1)
level.stairs(7, 0)
def add_abilities(warrior):
warrior.add_abilities('feel', 'attack_')
level.warrior(0, 0, 'east', func=add_abilities)
level.unit('sludge', 4, 0, 'west')
| # --------
# |@ s >|
# --------
level.description("It is too dark to see anything, but you smell sludge nearby.")
level.tip("Use warrior.feel().is_empty to see if there is anything in front of you, and warrior.attack! to fight it. Remember, you can only do one action (ending in _) per turn.")
level.time_bonus(20)
level.ace_score(26)
level.size(8, 1)
level.stairs(7, 0)
def add_abilities(warrior):
warrior.add_abilities('feel', 'attack_')
level.warrior(0, 0, 'east', func=add_abilities)
level.unit('sludge', 4, 0, 'west')
Update description and fix Ruby -> Python syntax# --------
# |@ s >|
# --------
level.description("It is too dark to see anything, but you smell sludge nearby.")
level.tip("Use warrior.feel().is_empty() to see if there is anything in front of you, and warrior.attack() to fight it. Remember, you can only do one action (ending in _) per turn.")
level.time_bonus(20)
level.ace_score(26)
level.size(8, 1)
level.stairs(7, 0)
def add_abilities(warrior):
warrior.add_abilities('feel', 'attack_')
level.warrior(0, 0, 'east', func=add_abilities)
level.unit('sludge', 4, 0, 'west')
| <commit_before># --------
# |@ s >|
# --------
level.description("It is too dark to see anything, but you smell sludge nearby.")
level.tip("Use warrior.feel().is_empty to see if there is anything in front of you, and warrior.attack! to fight it. Remember, you can only do one action (ending in _) per turn.")
level.time_bonus(20)
level.ace_score(26)
level.size(8, 1)
level.stairs(7, 0)
def add_abilities(warrior):
warrior.add_abilities('feel', 'attack_')
level.warrior(0, 0, 'east', func=add_abilities)
level.unit('sludge', 4, 0, 'west')
<commit_msg>Update description and fix Ruby -> Python syntax<commit_after># --------
# |@ s >|
# --------
level.description("It is too dark to see anything, but you smell sludge nearby.")
level.tip("Use warrior.feel().is_empty() to see if there is anything in front of you, and warrior.attack() to fight it. Remember, you can only do one action (ending in _) per turn.")
level.time_bonus(20)
level.ace_score(26)
level.size(8, 1)
level.stairs(7, 0)
def add_abilities(warrior):
warrior.add_abilities('feel', 'attack_')
level.warrior(0, 0, 'east', func=add_abilities)
level.unit('sludge', 4, 0, 'west')
|
95da47010839da430223700345e07078b2157131 | evewspace/account/models.py | evewspace/account/models.py | from django.db import models
from django.contrib.auth.models import User, Group
from evewspace.Map.models import Map
from django.db.models.signals import post_save
# Create your models here.
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique = True)
jabberid = models.EmailField(blank = True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
group = models.ForeignKey(Group, unique=True)
description = models.CharField(max_length=200, blank=True, null=True)
def create_user_profile(sender, instance, created, **kwargs):
"""Handle user creation event and create a new profile to match the new user"""
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
def create_group_profile(sender, instance, created, **kwargs):
"""Handle group creation even and create a new group profile."""
if created:
GroupProfile.objects.create(group=instance)
post_save.connect(create_group_profile, sender=Group)
| from django.db import models
from django.contrib.auth.models import User, Group
from evewspace.Map.models import Map
from django.db.models.signals import post_save
# Create your models here.
class PlayTime(models.Model):
"""PlayTime represents a choice of play times for use in several forms."""
fromtime = models.TimeField()
totime = models.TimeField()
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique = True)
jabberid = models.EmailField(blank = True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
playtimes = models.ManyToManyField(PlayTime)
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
group = models.ForeignKey(Group, unique=True)
description = models.CharField(max_length=200, blank=True, null=True)
def create_user_profile(sender, instance, created, **kwargs):
"""Handle user creation event and create a new profile to match the new user"""
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
def create_group_profile(sender, instance, created, **kwargs):
"""Handle group creation even and create a new group profile."""
if created:
GroupProfile.objects.create(group=instance)
post_save.connect(create_group_profile, sender=Group)
| Add PlayTime class and tie it to user profiles | Add PlayTime class and tie it to user profiles
Created a PlayTime class in account with from and to times.
Added ManyToManyField to UserProfile to keep track of play times.
| Python | apache-2.0 | evewspace/eve-wspace,Maarten28/eve-wspace,proycon/eve-wspace,gpapaz/eve-wspace,Unsettled/eve-wspace,acdervis/eve-wspace,Unsettled/eve-wspace,marbindrakon/eve-wspace,hybrid1969/eve-wspace,Unsettled/eve-wspace,acdervis/eve-wspace,marbindrakon/eve-wspace,Maarten28/eve-wspace,mmalyska/eve-wspace,acdervis/eve-wspace,nyrocron/eve-wspace,nyrocron/eve-wspace,acdervis/eve-wspace,gpapaz/eve-wspace,marbindrakon/eve-wspace,hybrid1969/eve-wspace,Zumochi/eve-wspace,Maarten28/eve-wspace,mmalyska/eve-wspace,evewspace/eve-wspace,Zumochi/eve-wspace,proycon/eve-wspace,hybrid1969/eve-wspace,nyrocron/eve-wspace,nyrocron/eve-wspace,mmalyska/eve-wspace,marbindrakon/eve-wspace,evewspace/eve-wspace,Maarten28/eve-wspace,evewspace/eve-wspace,mmalyska/eve-wspace,gpapaz/eve-wspace,proycon/eve-wspace,proycon/eve-wspace,Zumochi/eve-wspace,hybrid1969/eve-wspace,Unsettled/eve-wspace,gpapaz/eve-wspace,Zumochi/eve-wspace | from django.db import models
from django.contrib.auth.models import User, Group
from evewspace.Map.models import Map
from django.db.models.signals import post_save
# Create your models here.
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique = True)
jabberid = models.EmailField(blank = True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
group = models.ForeignKey(Group, unique=True)
description = models.CharField(max_length=200, blank=True, null=True)
def create_user_profile(sender, instance, created, **kwargs):
"""Handle user creation event and create a new profile to match the new user"""
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
def create_group_profile(sender, instance, created, **kwargs):
"""Handle group creation even and create a new group profile."""
if created:
GroupProfile.objects.create(group=instance)
post_save.connect(create_group_profile, sender=Group)
Add PlayTime class and tie it to user profiles
Created a PlayTime class in account with from and to times.
Added ManyToManyField to UserProfile to keep track of play times. | from django.db import models
from django.contrib.auth.models import User, Group
from evewspace.Map.models import Map
from django.db.models.signals import post_save
# Create your models here.
class PlayTime(models.Model):
"""PlayTime represents a choice of play times for use in several forms."""
fromtime = models.TimeField()
totime = models.TimeField()
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique = True)
jabberid = models.EmailField(blank = True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
playtimes = models.ManyToManyField(PlayTime)
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
group = models.ForeignKey(Group, unique=True)
description = models.CharField(max_length=200, blank=True, null=True)
def create_user_profile(sender, instance, created, **kwargs):
"""Handle user creation event and create a new profile to match the new user"""
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
def create_group_profile(sender, instance, created, **kwargs):
"""Handle group creation even and create a new group profile."""
if created:
GroupProfile.objects.create(group=instance)
post_save.connect(create_group_profile, sender=Group)
| <commit_before>from django.db import models
from django.contrib.auth.models import User, Group
from evewspace.Map.models import Map
from django.db.models.signals import post_save
# Create your models here.
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique = True)
jabberid = models.EmailField(blank = True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
group = models.ForeignKey(Group, unique=True)
description = models.CharField(max_length=200, blank=True, null=True)
def create_user_profile(sender, instance, created, **kwargs):
"""Handle user creation event and create a new profile to match the new user"""
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
def create_group_profile(sender, instance, created, **kwargs):
"""Handle group creation even and create a new group profile."""
if created:
GroupProfile.objects.create(group=instance)
post_save.connect(create_group_profile, sender=Group)
<commit_msg>Add PlayTime class and tie it to user profiles
Created a PlayTime class in account with from and to times.
Added ManyToManyField to UserProfile to keep track of play times.<commit_after> | from django.db import models
from django.contrib.auth.models import User, Group
from evewspace.Map.models import Map
from django.db.models.signals import post_save
# Create your models here.
class PlayTime(models.Model):
"""PlayTime represents a choice of play times for use in several forms."""
fromtime = models.TimeField()
totime = models.TimeField()
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique = True)
jabberid = models.EmailField(blank = True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
playtimes = models.ManyToManyField(PlayTime)
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
group = models.ForeignKey(Group, unique=True)
description = models.CharField(max_length=200, blank=True, null=True)
def create_user_profile(sender, instance, created, **kwargs):
"""Handle user creation event and create a new profile to match the new user"""
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
def create_group_profile(sender, instance, created, **kwargs):
"""Handle group creation even and create a new group profile."""
if created:
GroupProfile.objects.create(group=instance)
post_save.connect(create_group_profile, sender=Group)
| from django.db import models
from django.contrib.auth.models import User, Group
from evewspace.Map.models import Map
from django.db.models.signals import post_save
# Create your models here.
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique = True)
jabberid = models.EmailField(blank = True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
group = models.ForeignKey(Group, unique=True)
description = models.CharField(max_length=200, blank=True, null=True)
def create_user_profile(sender, instance, created, **kwargs):
"""Handle user creation event and create a new profile to match the new user"""
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
def create_group_profile(sender, instance, created, **kwargs):
"""Handle group creation even and create a new group profile."""
if created:
GroupProfile.objects.create(group=instance)
post_save.connect(create_group_profile, sender=Group)
Add PlayTime class and tie it to user profiles
Created a PlayTime class in account with from and to times.
Added ManyToManyField to UserProfile to keep track of play times.from django.db import models
from django.contrib.auth.models import User, Group
from evewspace.Map.models import Map
from django.db.models.signals import post_save
# Create your models here.
class PlayTime(models.Model):
"""PlayTime represents a choice of play times for use in several forms."""
fromtime = models.TimeField()
totime = models.TimeField()
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique = True)
jabberid = models.EmailField(blank = True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
playtimes = models.ManyToManyField(PlayTime)
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
group = models.ForeignKey(Group, unique=True)
description = models.CharField(max_length=200, blank=True, null=True)
def create_user_profile(sender, instance, created, **kwargs):
"""Handle user creation event and create a new profile to match the new user"""
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
def create_group_profile(sender, instance, created, **kwargs):
"""Handle group creation even and create a new group profile."""
if created:
GroupProfile.objects.create(group=instance)
post_save.connect(create_group_profile, sender=Group)
| <commit_before>from django.db import models
from django.contrib.auth.models import User, Group
from evewspace.Map.models import Map
from django.db.models.signals import post_save
# Create your models here.
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique = True)
jabberid = models.EmailField(blank = True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
group = models.ForeignKey(Group, unique=True)
description = models.CharField(max_length=200, blank=True, null=True)
def create_user_profile(sender, instance, created, **kwargs):
"""Handle user creation event and create a new profile to match the new user"""
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
def create_group_profile(sender, instance, created, **kwargs):
"""Handle group creation even and create a new group profile."""
if created:
GroupProfile.objects.create(group=instance)
post_save.connect(create_group_profile, sender=Group)
<commit_msg>Add PlayTime class and tie it to user profiles
Created a PlayTime class in account with from and to times.
Added ManyToManyField to UserProfile to keep track of play times.<commit_after>from django.db import models
from django.contrib.auth.models import User, Group
from evewspace.Map.models import Map
from django.db.models.signals import post_save
# Create your models here.
class PlayTime(models.Model):
"""PlayTime represents a choice of play times for use in several forms."""
fromtime = models.TimeField()
totime = models.TimeField()
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique = True)
jabberid = models.EmailField(blank = True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
playtimes = models.ManyToManyField(PlayTime)
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
group = models.ForeignKey(Group, unique=True)
description = models.CharField(max_length=200, blank=True, null=True)
def create_user_profile(sender, instance, created, **kwargs):
"""Handle user creation event and create a new profile to match the new user"""
if created:
UserProfile.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
def create_group_profile(sender, instance, created, **kwargs):
"""Handle group creation even and create a new group profile."""
if created:
GroupProfile.objects.create(group=instance)
post_save.connect(create_group_profile, sender=Group)
|
09052d05c27921bc87b0c968de02b244b4e5a56b | cryptchat/test/test_networkhandler.py | cryptchat/test/test_networkhandler.py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python3 -m unittest discover
import unittest
from ..network.networkhandler import NetworkHandler
from ..crypto.aes import AESCipher
from ..crypto.diffiehellman import DiffieHellman
class testNetworkHandler(unittest.TestCase):
def setUp(self):
alice = DiffieHellman()
bob = DiffieHellman()
a = alice.gensessionkey(bob.publickey)
b = bob.gensessionkey(alice.publickey)
aes1 = AESCipher(a)
aes2 = AESCipher(b)
self.server = NetworkHandler("localhost", 8090, True, alice, aes1)
self.client = NetworkHandler("localhost", 8090, False, bob, aes2)
def test_sendmessage(self):
self.server.start()
self.client.start()
m = "This is secret please do not read. And some chars to get unicode-testing out of the way åäö"
self.client.send(m)
m2 = self.server.getinmessage()
self.assertEqual(m, m2)
def tearDown(self):
self.server.stop()
self.client.stop()
def main():
unittest.main()
if __name__ == '__main__':
main()
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python3 -m unittest discover
import unittest
from ..network.networkhandler import NetworkHandler
from ..crypto.aes import AESCipher
from ..crypto.diffiehellman import DiffieHellman
class testNetworkHandler(unittest.TestCase):
@classmethod
def setUpClass(self):
alice = DiffieHellman()
bob = DiffieHellman()
a = alice.gensessionkey(bob.publickey)
b = bob.gensessionkey(alice.publickey)
aes1 = AESCipher(a)
aes2 = AESCipher(b)
self.server = NetworkHandler("localhost", 8090, True, alice, aes1)
self.client = NetworkHandler("localhost", 8090, False, bob, aes2)
def test_sendmessage(self):
self.server.start()
self.client.start()
m = "This is secret please do not read. And some chars to get unicode-testing out of the way åäö"
self.client.send(m)
m2 = self.server.getinmessage()
self.assertEqual(m, m2)
def main():
unittest.main()
if __name__ == '__main__':
main()
| Set up server/client once for the netcode test | Set up server/client once for the netcode test
| Python | mit | djohsson/Cryptchat | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python3 -m unittest discover
import unittest
from ..network.networkhandler import NetworkHandler
from ..crypto.aes import AESCipher
from ..crypto.diffiehellman import DiffieHellman
class testNetworkHandler(unittest.TestCase):
def setUp(self):
alice = DiffieHellman()
bob = DiffieHellman()
a = alice.gensessionkey(bob.publickey)
b = bob.gensessionkey(alice.publickey)
aes1 = AESCipher(a)
aes2 = AESCipher(b)
self.server = NetworkHandler("localhost", 8090, True, alice, aes1)
self.client = NetworkHandler("localhost", 8090, False, bob, aes2)
def test_sendmessage(self):
self.server.start()
self.client.start()
m = "This is secret please do not read. And some chars to get unicode-testing out of the way åäö"
self.client.send(m)
m2 = self.server.getinmessage()
self.assertEqual(m, m2)
def tearDown(self):
self.server.stop()
self.client.stop()
def main():
unittest.main()
if __name__ == '__main__':
main()
Set up server/client once for the netcode test | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python3 -m unittest discover
import unittest
from ..network.networkhandler import NetworkHandler
from ..crypto.aes import AESCipher
from ..crypto.diffiehellman import DiffieHellman
class testNetworkHandler(unittest.TestCase):
@classmethod
def setUpClass(self):
alice = DiffieHellman()
bob = DiffieHellman()
a = alice.gensessionkey(bob.publickey)
b = bob.gensessionkey(alice.publickey)
aes1 = AESCipher(a)
aes2 = AESCipher(b)
self.server = NetworkHandler("localhost", 8090, True, alice, aes1)
self.client = NetworkHandler("localhost", 8090, False, bob, aes2)
def test_sendmessage(self):
self.server.start()
self.client.start()
m = "This is secret please do not read. And some chars to get unicode-testing out of the way åäö"
self.client.send(m)
m2 = self.server.getinmessage()
self.assertEqual(m, m2)
def main():
unittest.main()
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python3 -m unittest discover
import unittest
from ..network.networkhandler import NetworkHandler
from ..crypto.aes import AESCipher
from ..crypto.diffiehellman import DiffieHellman
class testNetworkHandler(unittest.TestCase):
def setUp(self):
alice = DiffieHellman()
bob = DiffieHellman()
a = alice.gensessionkey(bob.publickey)
b = bob.gensessionkey(alice.publickey)
aes1 = AESCipher(a)
aes2 = AESCipher(b)
self.server = NetworkHandler("localhost", 8090, True, alice, aes1)
self.client = NetworkHandler("localhost", 8090, False, bob, aes2)
def test_sendmessage(self):
self.server.start()
self.client.start()
m = "This is secret please do not read. And some chars to get unicode-testing out of the way åäö"
self.client.send(m)
m2 = self.server.getinmessage()
self.assertEqual(m, m2)
def tearDown(self):
self.server.stop()
self.client.stop()
def main():
unittest.main()
if __name__ == '__main__':
main()
<commit_msg>Set up server/client once for the netcode test<commit_after> | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python3 -m unittest discover
import unittest
from ..network.networkhandler import NetworkHandler
from ..crypto.aes import AESCipher
from ..crypto.diffiehellman import DiffieHellman
class testNetworkHandler(unittest.TestCase):
@classmethod
def setUpClass(self):
alice = DiffieHellman()
bob = DiffieHellman()
a = alice.gensessionkey(bob.publickey)
b = bob.gensessionkey(alice.publickey)
aes1 = AESCipher(a)
aes2 = AESCipher(b)
self.server = NetworkHandler("localhost", 8090, True, alice, aes1)
self.client = NetworkHandler("localhost", 8090, False, bob, aes2)
def test_sendmessage(self):
self.server.start()
self.client.start()
m = "This is secret please do not read. And some chars to get unicode-testing out of the way åäö"
self.client.send(m)
m2 = self.server.getinmessage()
self.assertEqual(m, m2)
def main():
unittest.main()
if __name__ == '__main__':
main()
| #!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python3 -m unittest discover
import unittest
from ..network.networkhandler import NetworkHandler
from ..crypto.aes import AESCipher
from ..crypto.diffiehellman import DiffieHellman
class testNetworkHandler(unittest.TestCase):
def setUp(self):
alice = DiffieHellman()
bob = DiffieHellman()
a = alice.gensessionkey(bob.publickey)
b = bob.gensessionkey(alice.publickey)
aes1 = AESCipher(a)
aes2 = AESCipher(b)
self.server = NetworkHandler("localhost", 8090, True, alice, aes1)
self.client = NetworkHandler("localhost", 8090, False, bob, aes2)
def test_sendmessage(self):
self.server.start()
self.client.start()
m = "This is secret please do not read. And some chars to get unicode-testing out of the way åäö"
self.client.send(m)
m2 = self.server.getinmessage()
self.assertEqual(m, m2)
def tearDown(self):
self.server.stop()
self.client.stop()
def main():
unittest.main()
if __name__ == '__main__':
main()
Set up server/client once for the netcode test#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python3 -m unittest discover
import unittest
from ..network.networkhandler import NetworkHandler
from ..crypto.aes import AESCipher
from ..crypto.diffiehellman import DiffieHellman
class testNetworkHandler(unittest.TestCase):
@classmethod
def setUpClass(self):
alice = DiffieHellman()
bob = DiffieHellman()
a = alice.gensessionkey(bob.publickey)
b = bob.gensessionkey(alice.publickey)
aes1 = AESCipher(a)
aes2 = AESCipher(b)
self.server = NetworkHandler("localhost", 8090, True, alice, aes1)
self.client = NetworkHandler("localhost", 8090, False, bob, aes2)
def test_sendmessage(self):
self.server.start()
self.client.start()
m = "This is secret please do not read. And some chars to get unicode-testing out of the way åäö"
self.client.send(m)
m2 = self.server.getinmessage()
self.assertEqual(m, m2)
def main():
unittest.main()
if __name__ == '__main__':
main()
| <commit_before>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python3 -m unittest discover
import unittest
from ..network.networkhandler import NetworkHandler
from ..crypto.aes import AESCipher
from ..crypto.diffiehellman import DiffieHellman
class testNetworkHandler(unittest.TestCase):
def setUp(self):
alice = DiffieHellman()
bob = DiffieHellman()
a = alice.gensessionkey(bob.publickey)
b = bob.gensessionkey(alice.publickey)
aes1 = AESCipher(a)
aes2 = AESCipher(b)
self.server = NetworkHandler("localhost", 8090, True, alice, aes1)
self.client = NetworkHandler("localhost", 8090, False, bob, aes2)
def test_sendmessage(self):
self.server.start()
self.client.start()
m = "This is secret please do not read. And some chars to get unicode-testing out of the way åäö"
self.client.send(m)
m2 = self.server.getinmessage()
self.assertEqual(m, m2)
def tearDown(self):
self.server.stop()
self.client.stop()
def main():
unittest.main()
if __name__ == '__main__':
main()
<commit_msg>Set up server/client once for the netcode test<commit_after>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Run from Cryptchat
# python3 -m unittest discover
import unittest
from ..network.networkhandler import NetworkHandler
from ..crypto.aes import AESCipher
from ..crypto.diffiehellman import DiffieHellman
class testNetworkHandler(unittest.TestCase):
@classmethod
def setUpClass(self):
alice = DiffieHellman()
bob = DiffieHellman()
a = alice.gensessionkey(bob.publickey)
b = bob.gensessionkey(alice.publickey)
aes1 = AESCipher(a)
aes2 = AESCipher(b)
self.server = NetworkHandler("localhost", 8090, True, alice, aes1)
self.client = NetworkHandler("localhost", 8090, False, bob, aes2)
def test_sendmessage(self):
self.server.start()
self.client.start()
m = "This is secret please do not read. And some chars to get unicode-testing out of the way åäö"
self.client.send(m)
m2 = self.server.getinmessage()
self.assertEqual(m, m2)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
83831a3434cdaf0a5ca214dfc4bd7fec65d4ffac | fastai/vision/models/tvm.py | fastai/vision/models/tvm.py | from torchvision.models import ResNet,resnet18,resnet34,resnet50,resnet101,resnet152
from torchvision.models import SqueezeNet,squeezenet1_0,squeezenet1_1
from torchvision.models import densenet121,densenet169,densenet201,densenet161
from torchvision.models import vgg11_bn,vgg13_bn,vgg16_bn,vgg19_bn,alexnet
| from torchvision.models import *
import types as _t
_g = globals()
for _k, _v in list(_g.items()):
if (
isinstance(_v, _t.ModuleType) and _v.__name__.startswith("torchvision.models")
) or (callable(_v) and _v.__module__ == "torchvision.models._api"):
del _g[_k]
del _k, _v, _g, _t
| Add latest TorchVision models on fastai | Add latest TorchVision models on fastai | Python | apache-2.0 | fastai/fastai | from torchvision.models import ResNet,resnet18,resnet34,resnet50,resnet101,resnet152
from torchvision.models import SqueezeNet,squeezenet1_0,squeezenet1_1
from torchvision.models import densenet121,densenet169,densenet201,densenet161
from torchvision.models import vgg11_bn,vgg13_bn,vgg16_bn,vgg19_bn,alexnet
Add latest TorchVision models on fastai | from torchvision.models import *
import types as _t
_g = globals()
for _k, _v in list(_g.items()):
if (
isinstance(_v, _t.ModuleType) and _v.__name__.startswith("torchvision.models")
) or (callable(_v) and _v.__module__ == "torchvision.models._api"):
del _g[_k]
del _k, _v, _g, _t
| <commit_before>from torchvision.models import ResNet,resnet18,resnet34,resnet50,resnet101,resnet152
from torchvision.models import SqueezeNet,squeezenet1_0,squeezenet1_1
from torchvision.models import densenet121,densenet169,densenet201,densenet161
from torchvision.models import vgg11_bn,vgg13_bn,vgg16_bn,vgg19_bn,alexnet
<commit_msg>Add latest TorchVision models on fastai<commit_after> | from torchvision.models import *
import types as _t
_g = globals()
for _k, _v in list(_g.items()):
if (
isinstance(_v, _t.ModuleType) and _v.__name__.startswith("torchvision.models")
) or (callable(_v) and _v.__module__ == "torchvision.models._api"):
del _g[_k]
del _k, _v, _g, _t
| from torchvision.models import ResNet,resnet18,resnet34,resnet50,resnet101,resnet152
from torchvision.models import SqueezeNet,squeezenet1_0,squeezenet1_1
from torchvision.models import densenet121,densenet169,densenet201,densenet161
from torchvision.models import vgg11_bn,vgg13_bn,vgg16_bn,vgg19_bn,alexnet
Add latest TorchVision models on fastaifrom torchvision.models import *
import types as _t
_g = globals()
for _k, _v in list(_g.items()):
if (
isinstance(_v, _t.ModuleType) and _v.__name__.startswith("torchvision.models")
) or (callable(_v) and _v.__module__ == "torchvision.models._api"):
del _g[_k]
del _k, _v, _g, _t
| <commit_before>from torchvision.models import ResNet,resnet18,resnet34,resnet50,resnet101,resnet152
from torchvision.models import SqueezeNet,squeezenet1_0,squeezenet1_1
from torchvision.models import densenet121,densenet169,densenet201,densenet161
from torchvision.models import vgg11_bn,vgg13_bn,vgg16_bn,vgg19_bn,alexnet
<commit_msg>Add latest TorchVision models on fastai<commit_after>from torchvision.models import *
import types as _t
_g = globals()
for _k, _v in list(_g.items()):
if (
isinstance(_v, _t.ModuleType) and _v.__name__.startswith("torchvision.models")
) or (callable(_v) and _v.__module__ == "torchvision.models._api"):
del _g[_k]
del _k, _v, _g, _t
|
8972720110ca73bf01c718bdc5cd2f99d2d12743 | tests/test_parse_perl6.py | tests/test_parse_perl6.py | import cdent.test
import cdent.parser.cdent.yaml
import cdent.parser.perl6
class TestPythonParser(cdent.test.TestCase):
def test_parse_perl6(self):
parser = cdent.parser.perl6.Parser()
# parser.debug = True
input = file('tests/modules/world.cd.pm6', 'r').read()
parser.open(input)
try:
ast = parser.parse()
except cdent.parser.ParseError, err:
print err
return
exit(1)
parser = cdent.parser.cdent.yaml.Parser()
input = file('tests/modules/world.cd.yaml', 'r').read()
parser.open(input)
expected = parser.parse()
self.assertEqual(ast.__class__.__name__, expected.__class__.__name__)
if __name__ == '__main__':
cdent.test.main()
| import cdent.test
import cdent.parser.cdent.yaml
import cdent.parser.perl6
class TestPythonParser(cdent.test.TestCase):
def test_parse_perl6(self):
parser = cdent.parser.perl6.Parser()
# parser.debug = True
input = file('tests/modules/World.cd.pm6', 'r').read()
parser.open(input)
try:
ast = parser.parse()
except cdent.parser.ParseError, err:
print err
return
exit(1)
parser = cdent.parser.cdent.yaml.Parser()
input = file('tests/modules/world.cd.yaml', 'r').read()
parser.open(input)
expected = parser.parse()
self.assertEqual(ast.__class__.__name__, expected.__class__.__name__)
if __name__ == '__main__':
cdent.test.main()
| Fix capitalization in test path | Fix capitalization in test path
| Python | bsd-2-clause | ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py,ingydotnet/cdent-py | import cdent.test
import cdent.parser.cdent.yaml
import cdent.parser.perl6
class TestPythonParser(cdent.test.TestCase):
def test_parse_perl6(self):
parser = cdent.parser.perl6.Parser()
# parser.debug = True
input = file('tests/modules/world.cd.pm6', 'r').read()
parser.open(input)
try:
ast = parser.parse()
except cdent.parser.ParseError, err:
print err
return
exit(1)
parser = cdent.parser.cdent.yaml.Parser()
input = file('tests/modules/world.cd.yaml', 'r').read()
parser.open(input)
expected = parser.parse()
self.assertEqual(ast.__class__.__name__, expected.__class__.__name__)
if __name__ == '__main__':
cdent.test.main()
Fix capitalization in test path | import cdent.test
import cdent.parser.cdent.yaml
import cdent.parser.perl6
class TestPythonParser(cdent.test.TestCase):
def test_parse_perl6(self):
parser = cdent.parser.perl6.Parser()
# parser.debug = True
input = file('tests/modules/World.cd.pm6', 'r').read()
parser.open(input)
try:
ast = parser.parse()
except cdent.parser.ParseError, err:
print err
return
exit(1)
parser = cdent.parser.cdent.yaml.Parser()
input = file('tests/modules/world.cd.yaml', 'r').read()
parser.open(input)
expected = parser.parse()
self.assertEqual(ast.__class__.__name__, expected.__class__.__name__)
if __name__ == '__main__':
cdent.test.main()
| <commit_before>import cdent.test
import cdent.parser.cdent.yaml
import cdent.parser.perl6
class TestPythonParser(cdent.test.TestCase):
def test_parse_perl6(self):
parser = cdent.parser.perl6.Parser()
# parser.debug = True
input = file('tests/modules/world.cd.pm6', 'r').read()
parser.open(input)
try:
ast = parser.parse()
except cdent.parser.ParseError, err:
print err
return
exit(1)
parser = cdent.parser.cdent.yaml.Parser()
input = file('tests/modules/world.cd.yaml', 'r').read()
parser.open(input)
expected = parser.parse()
self.assertEqual(ast.__class__.__name__, expected.__class__.__name__)
if __name__ == '__main__':
cdent.test.main()
<commit_msg>Fix capitalization in test path<commit_after> | import cdent.test
import cdent.parser.cdent.yaml
import cdent.parser.perl6
class TestPythonParser(cdent.test.TestCase):
def test_parse_perl6(self):
parser = cdent.parser.perl6.Parser()
# parser.debug = True
input = file('tests/modules/World.cd.pm6', 'r').read()
parser.open(input)
try:
ast = parser.parse()
except cdent.parser.ParseError, err:
print err
return
exit(1)
parser = cdent.parser.cdent.yaml.Parser()
input = file('tests/modules/world.cd.yaml', 'r').read()
parser.open(input)
expected = parser.parse()
self.assertEqual(ast.__class__.__name__, expected.__class__.__name__)
if __name__ == '__main__':
cdent.test.main()
| import cdent.test
import cdent.parser.cdent.yaml
import cdent.parser.perl6
class TestPythonParser(cdent.test.TestCase):
def test_parse_perl6(self):
parser = cdent.parser.perl6.Parser()
# parser.debug = True
input = file('tests/modules/world.cd.pm6', 'r').read()
parser.open(input)
try:
ast = parser.parse()
except cdent.parser.ParseError, err:
print err
return
exit(1)
parser = cdent.parser.cdent.yaml.Parser()
input = file('tests/modules/world.cd.yaml', 'r').read()
parser.open(input)
expected = parser.parse()
self.assertEqual(ast.__class__.__name__, expected.__class__.__name__)
if __name__ == '__main__':
cdent.test.main()
Fix capitalization in test pathimport cdent.test
import cdent.parser.cdent.yaml
import cdent.parser.perl6
class TestPythonParser(cdent.test.TestCase):
def test_parse_perl6(self):
parser = cdent.parser.perl6.Parser()
# parser.debug = True
input = file('tests/modules/World.cd.pm6', 'r').read()
parser.open(input)
try:
ast = parser.parse()
except cdent.parser.ParseError, err:
print err
return
exit(1)
parser = cdent.parser.cdent.yaml.Parser()
input = file('tests/modules/world.cd.yaml', 'r').read()
parser.open(input)
expected = parser.parse()
self.assertEqual(ast.__class__.__name__, expected.__class__.__name__)
if __name__ == '__main__':
cdent.test.main()
| <commit_before>import cdent.test
import cdent.parser.cdent.yaml
import cdent.parser.perl6
class TestPythonParser(cdent.test.TestCase):
def test_parse_perl6(self):
parser = cdent.parser.perl6.Parser()
# parser.debug = True
input = file('tests/modules/world.cd.pm6', 'r').read()
parser.open(input)
try:
ast = parser.parse()
except cdent.parser.ParseError, err:
print err
return
exit(1)
parser = cdent.parser.cdent.yaml.Parser()
input = file('tests/modules/world.cd.yaml', 'r').read()
parser.open(input)
expected = parser.parse()
self.assertEqual(ast.__class__.__name__, expected.__class__.__name__)
if __name__ == '__main__':
cdent.test.main()
<commit_msg>Fix capitalization in test path<commit_after>import cdent.test
import cdent.parser.cdent.yaml
import cdent.parser.perl6
class TestPythonParser(cdent.test.TestCase):
def test_parse_perl6(self):
parser = cdent.parser.perl6.Parser()
# parser.debug = True
input = file('tests/modules/World.cd.pm6', 'r').read()
parser.open(input)
try:
ast = parser.parse()
except cdent.parser.ParseError, err:
print err
return
exit(1)
parser = cdent.parser.cdent.yaml.Parser()
input = file('tests/modules/world.cd.yaml', 'r').read()
parser.open(input)
expected = parser.parse()
self.assertEqual(ast.__class__.__name__, expected.__class__.__name__)
if __name__ == '__main__':
cdent.test.main()
|
45bdff8fbd19a74bf04aead7d134511605df99d5 | test/settings/gyptest-settings.py | test/settings/gyptest-settings.py | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
| #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
# 'settings' is only supported for make and scons (and will be removed there as
# well eventually).
test = TestGyp.TestGyp(formats=['make', 'scons'])
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
| Make new settings test not run for xcode generator. | Make new settings test not run for xcode generator.
TBR=evan
Review URL: http://codereview.chromium.org/7472006
git-svn-id: e7e1075985beda50ea81ac4472467b4f6e91fc78@970 78cadc50-ecff-11dd-a971-7dbc132099af
| Python | bsd-3-clause | carlTLR/gyp,mistydemeo/gyp,msc-/gyp,okumura/gyp,Omegaphora/external_chromium_org_tools_gyp,channing/gyp,erikge/watch_gyp,ryfx/gyp,trafi/gyp,IllusionRom-deprecated/android_platform_external_chromium_org_tools_gyp,lukeweber/gyp-override,Phuehvk/gyp,duanhjlt/gyp,cchamberlain/gyp,Jack-Q/GYP-copy,clar/gyp,clar/gyp,openpeer/webrtc-gyp,trafi/gyp,svn2github/gyp,azunite/gyp_20150930,ttyangf/pdfium_gyp,lukeweber/gyp-override,pandaxcl/gyp,clar/gyp,alexcrichton/gyp,yangrongwei/gyp,bulldy80/gyp_unofficial,okumura/gyp,luvit/gyp,Chilledheart/gyp,turbulenz/gyp,ttyangf/pdfium_gyp,bpsinc-native/src_tools_gyp,android-ia/platform_external_chromium_org_tools_gyp,trafi/gyp,AOSPU/external_chromium_org_tools_gyp,tarc/gyp,AWhetter/gyp,erikge/watch_gyp,Danath/gyp,LazyCodingCat/gyp,xin3liang/platform_external_chromium_org_tools_gyp,sloanyang/gyp,turbulenz/gyp,azunite/gyp_20150930,sdklite/gyp,kevinchen3315/gyp-git,bnoordhuis/gyp,sdklite/gyp,azunite/gyp_20150930,mapbox/gyp,IllusionRom-deprecated/android_platform_external_chromium_org_tools_gyp,amoikevin/gyp,channing/gyp,pandaxcl/gyp,enkripsi/gyp,cysp/gyp,duanhjlt/gyp,pyokagan/gyp,omasanori/gyp,mkrautz/gyp-libmumble,enkripsi/gyp,geekboxzone/lollipop_external_chromium_org_tools_gyp,springmeyer/gyp,cchamberlain/gyp,saghul/gyn,luvit/gyp,saghul/gyn,amoikevin/gyp,alexcrichton/gyp,brson/gyp,msc-/gyp,ryfx/gyp,dougbeal/gyp,adblockplus/gyp,Danath/gyp,pyokagan/gyp,svn2github/kgyp,mistydemeo/gyp,sport-monkey/GYP,yinquan529/platform-external-chromium_org-tools-gyp,sloanyang/gyp,mgamer/gyp,omasanori/gyp,LazyCodingCat/gyp,ttyangf/gyp,bnoordhuis/gyp,mistydemeo/gyp,bnoordhuis/gyp,chromium/gyp,dougbeal/gyp,mistydemeo/gyp,chromium/gyp,duanhjlt/gyp,bnq4ever/gypgoogle,kevinchen3315/gyp-git,AWhetter/gyp,luvit/gyp,AOSPU/external_chromium_org_tools_gyp,sanyaade-teachings/gyp,pyokagan/gyp,azunite/gyp,chromium/gyp,erikge/watch_gyp,pyokagan/gyp,IllusionRom-deprecated/android_platform_external_chromium_org_tools_gyp,sdklite/gyp,AWhetter/gyp,channing/gyp,AOSPU/external_chromium_org_tools_gyp,mumble-voip/libmumble-gyp,LazyCodingCat/gyp,mapbox/gyp,mgamer/gyp,android-ia/platform_external_chromium_org_tools_gyp,geekboxzone/lollipop_external_chromium_org_tools_gyp,adblockplus/gyp,yangrongwei/gyp,Omegaphora/external_chromium_org_tools_gyp,msc-/gyp,cchamberlain/gyp,sloanyang/gyp,xin3liang/platform_external_chromium_org_tools_gyp,springmeyer/gyp,Chilledheart/gyp,amoikevin/gyp,kevinchen3315/gyp-git,luvit/gyp,AWhetter/gyp,Chilledheart/gyp,bpsinc-native/src_tools_gyp,mkrautz/gyp-libmumble,bdarnell/gyp,Chilledheart/gyp,springmeyer/gyp,adblockplus/gyp,enkripsi/gyp,mapbox/gyp,sloanyang/gyp,yjhjstz/gyp,LazyCodingCat/gyp,Omegaphora/external_chromium_org_tools_gyp,saghul/gyn,saghul/gyn,trafi/gyp,Jack-Q/GYP-copy,cysp/gyp,yinquan529/platform-external-chromium_org-tools-gyp,Phuehvk/gyp,mgamer/gyp,kevinchen3315/gyp-git,ttyangf/gyp,yjhjstz/gyp,bnq4ever/gypgoogle,lianliuwei/gyp,AOSPU/external_chromium_org_tools_gyp,Danath/gyp,pandaxcl/gyp,carlTLR/gyp,omasanori/gyp,msc-/gyp,bulldy80/gyp_unofficial,brson/gyp,enkripsi/gyp,tarc/gyp,LazyCodingCat/gyp,mgamer/gyp,okwasi/gyp,mkrautz/gyp-libmumble,yinquan529/platform-external-chromium_org-tools-gyp,sport-monkey/GYP,yjhjstz/gyp,bulldy80/gyp_unofficial,ryfx/gyp,cysp/gyp,sport-monkey/GYP,channing/gyp,openpeer/webrtc-gyp,bnoordhuis/gyp,mumble-voip/libmumble-gyp,mumble-voip/libmumble-gyp,lianliuwei/gyp,bulldy80/gyp_unofficial,IllusionRom-deprecated/android_platform_external_chromium_org_tools_gyp,mkrautz/gyp-libmumble,ttyangf/gyp,lukeweber/gyp-override,tarc/gyp,carlTLR/gyp,ttyangf/pdfium_gyp,openpeer/webrtc-gyp,carlTLR/gyp,Jack-Q/GYP-copy,adblockplus/gyp,MIPS/external-chromium_org-tools-gyp,Jack-Q/GYP-copy,sport-monkey/GYP,xin3liang/platform_external_chromium_org_tools_gyp,sport-monkey/GYP,svn2github/gyp,cchamberlain/gyp,turbulenz/gyp,alexcrichton/gyp,ttyangf/pdfium_gyp,bdarnell/gyp,bdarnell/gyp,svn2github/gyp,amoikevin/gyp,azunite/gyp_20150930,Danath/gyp,ryfx/gyp,tarc/gyp,openpeer/webrtc-gyp,pandaxcl/gyp,azunite/gyp_20150930,tarc/gyp,springmeyer/gyp,turbulenz/gyp,msc-/gyp,lianliuwei/gyp,svn2github/kgyp,yinquan529/platform-external-chromium_org-tools-gyp,carlTLR/gyp,okumura/gyp,azunite/gyp,Chilledheart/gyp,mapbox/gyp,bnq4ever/gypgoogle,yjhjstz/gyp,bpsinc-native/src_tools_gyp,sanyaade-teachings/gyp,okwasi/gyp,duanhjlt/gyp,dougbeal/gyp,lukeweber/gyp-override,android-ia/platform_external_chromium_org_tools_gyp,bpsinc-native/src_tools_gyp,Jack-Q/GYP-copy,saghul/gyn,omasanori/gyp,okwasi/gyp,dougbeal/gyp,svn2github/gyp,svn2github/kgyp,ttyangf/gyp,bnoordhuis/gyp,cysp/gyp,okwasi/gyp,turbulenz/gyp,xin3liang/platform_external_chromium_org_tools_gyp,MIPS/external-chromium_org-tools-gyp,chromium/gyp,MIPS/external-chromium_org-tools-gyp,chromium/gyp,trafi/gyp,dougbeal/gyp,sanyaade-teachings/gyp,geekboxzone/lollipop_external_chromium_org_tools_gyp,Phuehvk/gyp,duanhjlt/gyp,Omegaphora/external_chromium_org_tools_gyp,svn2github/kgyp,erikge/watch_gyp,MIPS/external-chromium_org-tools-gyp,yangrongwei/gyp,clar/gyp,pyokagan/gyp,mapbox/gyp,clar/gyp,Danath/gyp,brson/gyp,cysp/gyp,Phuehvk/gyp,yjhjstz/gyp,android-ia/platform_external_chromium_org_tools_gyp,erikge/watch_gyp,springmeyer/gyp,cchamberlain/gyp,yangrongwei/gyp,openpeer/webrtc-gyp,sdklite/gyp,geekboxzone/lollipop_external_chromium_org_tools_gyp,bnq4ever/gypgoogle,azunite/gyp,Phuehvk/gyp,sdklite/gyp,enkripsi/gyp,mumble-voip/libmumble-gyp,bnq4ever/gypgoogle,azunite/gyp,azunite/gyp,svn2github/gyp,bulldy80/gyp_unofficial,svn2github/kgyp,mgamer/gyp,ttyangf/pdfium_gyp,sanyaade-teachings/gyp,amoikevin/gyp,bdarnell/gyp,AWhetter/gyp,lianliuwei/gyp,pandaxcl/gyp,sanyaade-teachings/gyp,adblockplus/gyp,brson/gyp,okumura/gyp,ryfx/gyp,alexcrichton/gyp,ttyangf/gyp | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
Make new settings test not run for xcode generator.
TBR=evan
Review URL: http://codereview.chromium.org/7472006
git-svn-id: e7e1075985beda50ea81ac4472467b4f6e91fc78@970 78cadc50-ecff-11dd-a971-7dbc132099af | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
# 'settings' is only supported for make and scons (and will be removed there as
# well eventually).
test = TestGyp.TestGyp(formats=['make', 'scons'])
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
| <commit_before>#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
<commit_msg>Make new settings test not run for xcode generator.
TBR=evan
Review URL: http://codereview.chromium.org/7472006
git-svn-id: e7e1075985beda50ea81ac4472467b4f6e91fc78@970 78cadc50-ecff-11dd-a971-7dbc132099af<commit_after> | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
# 'settings' is only supported for make and scons (and will be removed there as
# well eventually).
test = TestGyp.TestGyp(formats=['make', 'scons'])
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
| #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
Make new settings test not run for xcode generator.
TBR=evan
Review URL: http://codereview.chromium.org/7472006
git-svn-id: e7e1075985beda50ea81ac4472467b4f6e91fc78@970 78cadc50-ecff-11dd-a971-7dbc132099af#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
# 'settings' is only supported for make and scons (and will be removed there as
# well eventually).
test = TestGyp.TestGyp(formats=['make', 'scons'])
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
| <commit_before>#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
test = TestGyp.TestGyp()
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
<commit_msg>Make new settings test not run for xcode generator.
TBR=evan
Review URL: http://codereview.chromium.org/7472006
git-svn-id: e7e1075985beda50ea81ac4472467b4f6e91fc78@970 78cadc50-ecff-11dd-a971-7dbc132099af<commit_after>#!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Smoke-tests 'settings' blocks.
"""
import TestGyp
# 'settings' is only supported for make and scons (and will be removed there as
# well eventually).
test = TestGyp.TestGyp(formats=['make', 'scons'])
test.run_gyp('settings.gyp')
test.build('test.gyp', test.ALL)
test.pass_test()
|
10f2a3d6e748e18bd3858fe5686a6978cf9dc9ea | lib/precompilers.py | lib/precompilers.py | from compressor.filters.base import CompilerFilter
from compressor.filters.css_default import CssAbsoluteFilter
# Work around the fact that django-compressor doesn't succeed in running the
# CssAbsoluteFilter on less files due to broken path lookups.
class LessFilter(CompilerFilter):
def __init__(self, content, attrs, **kwargs):
super(LessFilter, self).__init__(content, command='lessc {infile} {outfile}', **kwargs)
def input(self, **kwargs):
content = super(LessFilter, self).input(**kwargs)
return CssAbsoluteFilter(content).input(**kwargs)
| from compressor.filters.base import CompilerFilter
from compressor.filters.css_default import CssAbsoluteFilter
from compressor.utils import staticfiles
class CustomCssAbsoluteFilter(CssAbsoluteFilter):
def find(self, basename):
# This is the same as the inherited implementation except for the
# removal of a check on settings.DEBUG. See
#
# https://stackoverflow.com/questions/15532464
#
# for details.
if basename and staticfiles.finders:
return staticfiles.finders.find(basename)
# Work around the fact that django-compressor doesn't succeed in running the
# CssAbsoluteFilter on less files due to broken path lookups.
class LessFilter(CompilerFilter):
def __init__(self, content, attrs, **kwargs):
super(LessFilter, self).__init__(content, command='lessc {infile} {outfile}', **kwargs)
def input(self, **kwargs):
content = super(LessFilter, self).input(**kwargs)
return CustomCssAbsoluteFilter(content).input(**kwargs)
| Fix staticfiles lookups on Heroku | lib: Fix staticfiles lookups on Heroku
| Python | mit | okfn/website,okfn/website,MjAbuz/foundation,MjAbuz/foundation,okfn/foundation,okfn/foundation,okfn/website,MjAbuz/foundation,okfn/foundation,MjAbuz/foundation,okfn/website,okfn/foundation | from compressor.filters.base import CompilerFilter
from compressor.filters.css_default import CssAbsoluteFilter
# Work around the fact that django-compressor doesn't succeed in running the
# CssAbsoluteFilter on less files due to broken path lookups.
class LessFilter(CompilerFilter):
def __init__(self, content, attrs, **kwargs):
super(LessFilter, self).__init__(content, command='lessc {infile} {outfile}', **kwargs)
def input(self, **kwargs):
content = super(LessFilter, self).input(**kwargs)
return CssAbsoluteFilter(content).input(**kwargs)
lib: Fix staticfiles lookups on Heroku | from compressor.filters.base import CompilerFilter
from compressor.filters.css_default import CssAbsoluteFilter
from compressor.utils import staticfiles
class CustomCssAbsoluteFilter(CssAbsoluteFilter):
def find(self, basename):
# This is the same as the inherited implementation except for the
# removal of a check on settings.DEBUG. See
#
# https://stackoverflow.com/questions/15532464
#
# for details.
if basename and staticfiles.finders:
return staticfiles.finders.find(basename)
# Work around the fact that django-compressor doesn't succeed in running the
# CssAbsoluteFilter on less files due to broken path lookups.
class LessFilter(CompilerFilter):
def __init__(self, content, attrs, **kwargs):
super(LessFilter, self).__init__(content, command='lessc {infile} {outfile}', **kwargs)
def input(self, **kwargs):
content = super(LessFilter, self).input(**kwargs)
return CustomCssAbsoluteFilter(content).input(**kwargs)
| <commit_before>from compressor.filters.base import CompilerFilter
from compressor.filters.css_default import CssAbsoluteFilter
# Work around the fact that django-compressor doesn't succeed in running the
# CssAbsoluteFilter on less files due to broken path lookups.
class LessFilter(CompilerFilter):
def __init__(self, content, attrs, **kwargs):
super(LessFilter, self).__init__(content, command='lessc {infile} {outfile}', **kwargs)
def input(self, **kwargs):
content = super(LessFilter, self).input(**kwargs)
return CssAbsoluteFilter(content).input(**kwargs)
<commit_msg>lib: Fix staticfiles lookups on Heroku<commit_after> | from compressor.filters.base import CompilerFilter
from compressor.filters.css_default import CssAbsoluteFilter
from compressor.utils import staticfiles
class CustomCssAbsoluteFilter(CssAbsoluteFilter):
def find(self, basename):
# This is the same as the inherited implementation except for the
# removal of a check on settings.DEBUG. See
#
# https://stackoverflow.com/questions/15532464
#
# for details.
if basename and staticfiles.finders:
return staticfiles.finders.find(basename)
# Work around the fact that django-compressor doesn't succeed in running the
# CssAbsoluteFilter on less files due to broken path lookups.
class LessFilter(CompilerFilter):
def __init__(self, content, attrs, **kwargs):
super(LessFilter, self).__init__(content, command='lessc {infile} {outfile}', **kwargs)
def input(self, **kwargs):
content = super(LessFilter, self).input(**kwargs)
return CustomCssAbsoluteFilter(content).input(**kwargs)
| from compressor.filters.base import CompilerFilter
from compressor.filters.css_default import CssAbsoluteFilter
# Work around the fact that django-compressor doesn't succeed in running the
# CssAbsoluteFilter on less files due to broken path lookups.
class LessFilter(CompilerFilter):
def __init__(self, content, attrs, **kwargs):
super(LessFilter, self).__init__(content, command='lessc {infile} {outfile}', **kwargs)
def input(self, **kwargs):
content = super(LessFilter, self).input(**kwargs)
return CssAbsoluteFilter(content).input(**kwargs)
lib: Fix staticfiles lookups on Herokufrom compressor.filters.base import CompilerFilter
from compressor.filters.css_default import CssAbsoluteFilter
from compressor.utils import staticfiles
class CustomCssAbsoluteFilter(CssAbsoluteFilter):
def find(self, basename):
# This is the same as the inherited implementation except for the
# removal of a check on settings.DEBUG. See
#
# https://stackoverflow.com/questions/15532464
#
# for details.
if basename and staticfiles.finders:
return staticfiles.finders.find(basename)
# Work around the fact that django-compressor doesn't succeed in running the
# CssAbsoluteFilter on less files due to broken path lookups.
class LessFilter(CompilerFilter):
def __init__(self, content, attrs, **kwargs):
super(LessFilter, self).__init__(content, command='lessc {infile} {outfile}', **kwargs)
def input(self, **kwargs):
content = super(LessFilter, self).input(**kwargs)
return CustomCssAbsoluteFilter(content).input(**kwargs)
| <commit_before>from compressor.filters.base import CompilerFilter
from compressor.filters.css_default import CssAbsoluteFilter
# Work around the fact that django-compressor doesn't succeed in running the
# CssAbsoluteFilter on less files due to broken path lookups.
class LessFilter(CompilerFilter):
def __init__(self, content, attrs, **kwargs):
super(LessFilter, self).__init__(content, command='lessc {infile} {outfile}', **kwargs)
def input(self, **kwargs):
content = super(LessFilter, self).input(**kwargs)
return CssAbsoluteFilter(content).input(**kwargs)
<commit_msg>lib: Fix staticfiles lookups on Heroku<commit_after>from compressor.filters.base import CompilerFilter
from compressor.filters.css_default import CssAbsoluteFilter
from compressor.utils import staticfiles
class CustomCssAbsoluteFilter(CssAbsoluteFilter):
def find(self, basename):
# This is the same as the inherited implementation except for the
# removal of a check on settings.DEBUG. See
#
# https://stackoverflow.com/questions/15532464
#
# for details.
if basename and staticfiles.finders:
return staticfiles.finders.find(basename)
# Work around the fact that django-compressor doesn't succeed in running the
# CssAbsoluteFilter on less files due to broken path lookups.
class LessFilter(CompilerFilter):
def __init__(self, content, attrs, **kwargs):
super(LessFilter, self).__init__(content, command='lessc {infile} {outfile}', **kwargs)
def input(self, **kwargs):
content = super(LessFilter, self).input(**kwargs)
return CustomCssAbsoluteFilter(content).input(**kwargs)
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.