commit stringlengths 40 40 | old_file stringlengths 4 118 | new_file stringlengths 4 118 | old_contents stringlengths 0 2.94k | new_contents stringlengths 1 4.43k | subject stringlengths 15 444 | message stringlengths 16 3.45k | lang stringclasses 1 value | license stringclasses 13 values | repos stringlengths 5 43.2k | prompt stringlengths 17 4.58k | response stringlengths 1 4.43k | prompt_tagged stringlengths 58 4.62k | response_tagged stringlengths 1 4.43k | text stringlengths 132 7.29k | text_tagged stringlengths 173 7.33k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ce38ad1884cdc602d1b70d5a23d749ff3683f440 | reqon/utils.py | reqon/utils.py | def dict_in(value):
'''
Checks for the existence of a dictionary in a list
Arguments:
value -- A list
Returns:
A Boolean
'''
for item in value:
if isinstance(item, dict):
return True
return False
| def dict_in(value):
'''
Checks for the existence of a dictionary in a list
Arguments:
value -- A list
Returns:
A Boolean
'''
return any(isinstance(item, dict) for item in value)
| Make the dict_in function sleeker and sexier | Make the dict_in function sleeker and sexier
| Python | mit | dmpayton/reqon | def dict_in(value):
'''
Checks for the existence of a dictionary in a list
Arguments:
value -- A list
Returns:
A Boolean
'''
for item in value:
if isinstance(item, dict):
return True
return False
Make the dict_in function sleeker and sexier | def dict_in(value):
'''
Checks for the existence of a dictionary in a list
Arguments:
value -- A list
Returns:
A Boolean
'''
return any(isinstance(item, dict) for item in value)
| <commit_before>def dict_in(value):
'''
Checks for the existence of a dictionary in a list
Arguments:
value -- A list
Returns:
A Boolean
'''
for item in value:
if isinstance(item, dict):
return True
return False
<commit_msg>Make the dict_in function sleeker and sexier<commit_after> | def dict_in(value):
'''
Checks for the existence of a dictionary in a list
Arguments:
value -- A list
Returns:
A Boolean
'''
return any(isinstance(item, dict) for item in value)
| def dict_in(value):
'''
Checks for the existence of a dictionary in a list
Arguments:
value -- A list
Returns:
A Boolean
'''
for item in value:
if isinstance(item, dict):
return True
return False
Make the dict_in function sleeker and sexierdef dict_in(value):
'''
Checks for the existence of a dictionary in a list
Arguments:
value -- A list
Returns:
A Boolean
'''
return any(isinstance(item, dict) for item in value)
| <commit_before>def dict_in(value):
'''
Checks for the existence of a dictionary in a list
Arguments:
value -- A list
Returns:
A Boolean
'''
for item in value:
if isinstance(item, dict):
return True
return False
<commit_msg>Make the dict_in function sleeker and sexier<commit_after>def dict_in(value):
'''
Checks for the existence of a dictionary in a list
Arguments:
value -- A list
Returns:
A Boolean
'''
return any(isinstance(item, dict) for item in value)
|
1e2d1f25cb36db7383e6a2300e2e96d47113cc16 | cryptography/primitives/block/ciphers.py | cryptography/primitives/block/ciphers.py | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
class AES(object):
name = "AES"
block_size = 128
key_sizes = set([128, 192, 256])
def __init__(self, key):
super(AES, self).__init__()
self.key = key
# Verify that the key size matches the expected key size
if self.key_size not in self.key_sizes:
raise ValueError("Invalid key size ({0}) for {1}".format(
self.key_size, self.name
))
@property
def key_size(self):
return len(self.key) * 8
| # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
class AES(object):
name = "AES"
block_size = 128
key_sizes = frozenset([128, 192, 256])
def __init__(self, key):
super(AES, self).__init__()
self.key = key
# Verify that the key size matches the expected key size
if self.key_size not in self.key_sizes:
raise ValueError("Invalid key size ({0}) for {1}".format(
self.key_size, self.name
))
@property
def key_size(self):
return len(self.key) * 8
| Make key_sizes a frozenset, since these are/should be immutable | Make key_sizes a frozenset, since these are/should be immutable
| Python | bsd-3-clause | kimvais/cryptography,Ayrx/cryptography,kimvais/cryptography,Hasimir/cryptography,Lukasa/cryptography,dstufft/cryptography,skeuomorf/cryptography,sholsapp/cryptography,Hasimir/cryptography,Ayrx/cryptography,kimvais/cryptography,dstufft/cryptography,sholsapp/cryptography,dstufft/cryptography,skeuomorf/cryptography,sholsapp/cryptography,bwhmather/cryptography,Ayrx/cryptography,glyph/cryptography,Hasimir/cryptography,skeuomorf/cryptography,sholsapp/cryptography,glyph/cryptography,bwhmather/cryptography,Ayrx/cryptography,dstufft/cryptography,skeuomorf/cryptography,Hasimir/cryptography,bwhmather/cryptography,Lukasa/cryptography,dstufft/cryptography,kimvais/cryptography,bwhmather/cryptography,Lukasa/cryptography | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
class AES(object):
name = "AES"
block_size = 128
key_sizes = set([128, 192, 256])
def __init__(self, key):
super(AES, self).__init__()
self.key = key
# Verify that the key size matches the expected key size
if self.key_size not in self.key_sizes:
raise ValueError("Invalid key size ({0}) for {1}".format(
self.key_size, self.name
))
@property
def key_size(self):
return len(self.key) * 8
Make key_sizes a frozenset, since these are/should be immutable | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
class AES(object):
name = "AES"
block_size = 128
key_sizes = frozenset([128, 192, 256])
def __init__(self, key):
super(AES, self).__init__()
self.key = key
# Verify that the key size matches the expected key size
if self.key_size not in self.key_sizes:
raise ValueError("Invalid key size ({0}) for {1}".format(
self.key_size, self.name
))
@property
def key_size(self):
return len(self.key) * 8
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
class AES(object):
name = "AES"
block_size = 128
key_sizes = set([128, 192, 256])
def __init__(self, key):
super(AES, self).__init__()
self.key = key
# Verify that the key size matches the expected key size
if self.key_size not in self.key_sizes:
raise ValueError("Invalid key size ({0}) for {1}".format(
self.key_size, self.name
))
@property
def key_size(self):
return len(self.key) * 8
<commit_msg>Make key_sizes a frozenset, since these are/should be immutable<commit_after> | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
class AES(object):
name = "AES"
block_size = 128
key_sizes = frozenset([128, 192, 256])
def __init__(self, key):
super(AES, self).__init__()
self.key = key
# Verify that the key size matches the expected key size
if self.key_size not in self.key_sizes:
raise ValueError("Invalid key size ({0}) for {1}".format(
self.key_size, self.name
))
@property
def key_size(self):
return len(self.key) * 8
| # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
class AES(object):
name = "AES"
block_size = 128
key_sizes = set([128, 192, 256])
def __init__(self, key):
super(AES, self).__init__()
self.key = key
# Verify that the key size matches the expected key size
if self.key_size not in self.key_sizes:
raise ValueError("Invalid key size ({0}) for {1}".format(
self.key_size, self.name
))
@property
def key_size(self):
return len(self.key) * 8
Make key_sizes a frozenset, since these are/should be immutable# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
class AES(object):
name = "AES"
block_size = 128
key_sizes = frozenset([128, 192, 256])
def __init__(self, key):
super(AES, self).__init__()
self.key = key
# Verify that the key size matches the expected key size
if self.key_size not in self.key_sizes:
raise ValueError("Invalid key size ({0}) for {1}".format(
self.key_size, self.name
))
@property
def key_size(self):
return len(self.key) * 8
| <commit_before># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
class AES(object):
name = "AES"
block_size = 128
key_sizes = set([128, 192, 256])
def __init__(self, key):
super(AES, self).__init__()
self.key = key
# Verify that the key size matches the expected key size
if self.key_size not in self.key_sizes:
raise ValueError("Invalid key size ({0}) for {1}".format(
self.key_size, self.name
))
@property
def key_size(self):
return len(self.key) * 8
<commit_msg>Make key_sizes a frozenset, since these are/should be immutable<commit_after># Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
class AES(object):
name = "AES"
block_size = 128
key_sizes = frozenset([128, 192, 256])
def __init__(self, key):
super(AES, self).__init__()
self.key = key
# Verify that the key size matches the expected key size
if self.key_size not in self.key_sizes:
raise ValueError("Invalid key size ({0}) for {1}".format(
self.key_size, self.name
))
@property
def key_size(self):
return len(self.key) * 8
|
ad79f01358aa83162730b15507d7d6d3c3575ab3 | akanda/horizon/configuration/tabs.py | akanda/horizon/configuration/tabs.py | import collections
import logging
from django.utils.translation import ugettext as _
from horizon.api import quantum
from horizon import tabs
from akanda.horizon.configuration.tables.publicips import PublicIPsTable
# The table rendering code assumes it is getting an
# object with an "id" property and other properties
# based on the column definitions for the table.
# This is a light-weight data structure that looks
# like what we need for the publicips table.
PublicIP = collections.namedtuple('PublicIP', 'id router_name ipaddr')
class ConfigurationTab(tabs.TableTab):
"""Tab to show the user generic configuration settings.
"""
name = _("Configuration")
slug = "configuration_tab"
template_name = "akanda/configuration/index.html"
table_classes = (PublicIPsTable,)
def get_publicips_data(self):
data = []
c = quantum.quantumclient(self.request)
for router in c.list_routers(tenant_id=self.request.user.tenant_id).values()[0]:
for port in router['ports']:
if port['device_owner'] != 'network:router_gateway':
continue
ips = [i['ip_address'] for i in port['fixed_ips']]
data.append(PublicIP(None, router['name'], ', '.join(ips)))
return data
| import collections
import logging
from django.utils.translation import ugettext as _
from horizon.api import quantum
from horizon import tabs
from akanda.horizon.configuration.tables.publicips import PublicIPsTable
# The table rendering code assumes it is getting an
# object with an "id" property and other properties
# based on the column definitions for the table.
# This is a light-weight data structure that looks
# like what we need for the publicips table.
PublicIP = collections.namedtuple('PublicIP', 'id router_name ipaddr')
class ConfigurationTab(tabs.TableTab):
"""Tab to show the user generic configuration settings.
"""
name = _("Configuration")
slug = "configuration_tab"
template_name = "akanda/configuration/index.html"
table_classes = (PublicIPsTable,)
def get_publicips_data(self):
data = []
c = quantum.quantumclient(self.request)
for router in c.list_routers(
tenant_id=self.request.user.tenant_id).values()[0]:
for port in router.get('ports', []):
if port.get('device_owner') != 'network:router_gateway':
continue
ips = [i['ip_address'] for i in port.get('fixed_ips', [])]
data.append(PublicIP(None, router.get('name'), ', '.join(ips)))
return data
| Handle missing port and router data | Handle missing port and router data
On some systems routers have no ports and ports
have no fixed IPs, so don't assume they do.
Also includes some pep8 fixes.
Change-Id: I73b5f22754958b897a6ae55e453c294f47bf9539
Signed-off-by: Doug Hellmann <8c845c26a3868dadec615703cd974244eb2ac6d1@dreamhost.com>
| Python | apache-2.0 | dreamhost/akanda-horizon,dreamhost/akanda-horizon | import collections
import logging
from django.utils.translation import ugettext as _
from horizon.api import quantum
from horizon import tabs
from akanda.horizon.configuration.tables.publicips import PublicIPsTable
# The table rendering code assumes it is getting an
# object with an "id" property and other properties
# based on the column definitions for the table.
# This is a light-weight data structure that looks
# like what we need for the publicips table.
PublicIP = collections.namedtuple('PublicIP', 'id router_name ipaddr')
class ConfigurationTab(tabs.TableTab):
"""Tab to show the user generic configuration settings.
"""
name = _("Configuration")
slug = "configuration_tab"
template_name = "akanda/configuration/index.html"
table_classes = (PublicIPsTable,)
def get_publicips_data(self):
data = []
c = quantum.quantumclient(self.request)
for router in c.list_routers(tenant_id=self.request.user.tenant_id).values()[0]:
for port in router['ports']:
if port['device_owner'] != 'network:router_gateway':
continue
ips = [i['ip_address'] for i in port['fixed_ips']]
data.append(PublicIP(None, router['name'], ', '.join(ips)))
return data
Handle missing port and router data
On some systems routers have no ports and ports
have no fixed IPs, so don't assume they do.
Also includes some pep8 fixes.
Change-Id: I73b5f22754958b897a6ae55e453c294f47bf9539
Signed-off-by: Doug Hellmann <8c845c26a3868dadec615703cd974244eb2ac6d1@dreamhost.com> | import collections
import logging
from django.utils.translation import ugettext as _
from horizon.api import quantum
from horizon import tabs
from akanda.horizon.configuration.tables.publicips import PublicIPsTable
# The table rendering code assumes it is getting an
# object with an "id" property and other properties
# based on the column definitions for the table.
# This is a light-weight data structure that looks
# like what we need for the publicips table.
PublicIP = collections.namedtuple('PublicIP', 'id router_name ipaddr')
class ConfigurationTab(tabs.TableTab):
"""Tab to show the user generic configuration settings.
"""
name = _("Configuration")
slug = "configuration_tab"
template_name = "akanda/configuration/index.html"
table_classes = (PublicIPsTable,)
def get_publicips_data(self):
data = []
c = quantum.quantumclient(self.request)
for router in c.list_routers(
tenant_id=self.request.user.tenant_id).values()[0]:
for port in router.get('ports', []):
if port.get('device_owner') != 'network:router_gateway':
continue
ips = [i['ip_address'] for i in port.get('fixed_ips', [])]
data.append(PublicIP(None, router.get('name'), ', '.join(ips)))
return data
| <commit_before>import collections
import logging
from django.utils.translation import ugettext as _
from horizon.api import quantum
from horizon import tabs
from akanda.horizon.configuration.tables.publicips import PublicIPsTable
# The table rendering code assumes it is getting an
# object with an "id" property and other properties
# based on the column definitions for the table.
# This is a light-weight data structure that looks
# like what we need for the publicips table.
PublicIP = collections.namedtuple('PublicIP', 'id router_name ipaddr')
class ConfigurationTab(tabs.TableTab):
"""Tab to show the user generic configuration settings.
"""
name = _("Configuration")
slug = "configuration_tab"
template_name = "akanda/configuration/index.html"
table_classes = (PublicIPsTable,)
def get_publicips_data(self):
data = []
c = quantum.quantumclient(self.request)
for router in c.list_routers(tenant_id=self.request.user.tenant_id).values()[0]:
for port in router['ports']:
if port['device_owner'] != 'network:router_gateway':
continue
ips = [i['ip_address'] for i in port['fixed_ips']]
data.append(PublicIP(None, router['name'], ', '.join(ips)))
return data
<commit_msg>Handle missing port and router data
On some systems routers have no ports and ports
have no fixed IPs, so don't assume they do.
Also includes some pep8 fixes.
Change-Id: I73b5f22754958b897a6ae55e453c294f47bf9539
Signed-off-by: Doug Hellmann <8c845c26a3868dadec615703cd974244eb2ac6d1@dreamhost.com><commit_after> | import collections
import logging
from django.utils.translation import ugettext as _
from horizon.api import quantum
from horizon import tabs
from akanda.horizon.configuration.tables.publicips import PublicIPsTable
# The table rendering code assumes it is getting an
# object with an "id" property and other properties
# based on the column definitions for the table.
# This is a light-weight data structure that looks
# like what we need for the publicips table.
PublicIP = collections.namedtuple('PublicIP', 'id router_name ipaddr')
class ConfigurationTab(tabs.TableTab):
"""Tab to show the user generic configuration settings.
"""
name = _("Configuration")
slug = "configuration_tab"
template_name = "akanda/configuration/index.html"
table_classes = (PublicIPsTable,)
def get_publicips_data(self):
data = []
c = quantum.quantumclient(self.request)
for router in c.list_routers(
tenant_id=self.request.user.tenant_id).values()[0]:
for port in router.get('ports', []):
if port.get('device_owner') != 'network:router_gateway':
continue
ips = [i['ip_address'] for i in port.get('fixed_ips', [])]
data.append(PublicIP(None, router.get('name'), ', '.join(ips)))
return data
| import collections
import logging
from django.utils.translation import ugettext as _
from horizon.api import quantum
from horizon import tabs
from akanda.horizon.configuration.tables.publicips import PublicIPsTable
# The table rendering code assumes it is getting an
# object with an "id" property and other properties
# based on the column definitions for the table.
# This is a light-weight data structure that looks
# like what we need for the publicips table.
PublicIP = collections.namedtuple('PublicIP', 'id router_name ipaddr')
class ConfigurationTab(tabs.TableTab):
"""Tab to show the user generic configuration settings.
"""
name = _("Configuration")
slug = "configuration_tab"
template_name = "akanda/configuration/index.html"
table_classes = (PublicIPsTable,)
def get_publicips_data(self):
data = []
c = quantum.quantumclient(self.request)
for router in c.list_routers(tenant_id=self.request.user.tenant_id).values()[0]:
for port in router['ports']:
if port['device_owner'] != 'network:router_gateway':
continue
ips = [i['ip_address'] for i in port['fixed_ips']]
data.append(PublicIP(None, router['name'], ', '.join(ips)))
return data
Handle missing port and router data
On some systems routers have no ports and ports
have no fixed IPs, so don't assume they do.
Also includes some pep8 fixes.
Change-Id: I73b5f22754958b897a6ae55e453c294f47bf9539
Signed-off-by: Doug Hellmann <8c845c26a3868dadec615703cd974244eb2ac6d1@dreamhost.com>import collections
import logging
from django.utils.translation import ugettext as _
from horizon.api import quantum
from horizon import tabs
from akanda.horizon.configuration.tables.publicips import PublicIPsTable
# The table rendering code assumes it is getting an
# object with an "id" property and other properties
# based on the column definitions for the table.
# This is a light-weight data structure that looks
# like what we need for the publicips table.
PublicIP = collections.namedtuple('PublicIP', 'id router_name ipaddr')
class ConfigurationTab(tabs.TableTab):
"""Tab to show the user generic configuration settings.
"""
name = _("Configuration")
slug = "configuration_tab"
template_name = "akanda/configuration/index.html"
table_classes = (PublicIPsTable,)
def get_publicips_data(self):
data = []
c = quantum.quantumclient(self.request)
for router in c.list_routers(
tenant_id=self.request.user.tenant_id).values()[0]:
for port in router.get('ports', []):
if port.get('device_owner') != 'network:router_gateway':
continue
ips = [i['ip_address'] for i in port.get('fixed_ips', [])]
data.append(PublicIP(None, router.get('name'), ', '.join(ips)))
return data
| <commit_before>import collections
import logging
from django.utils.translation import ugettext as _
from horizon.api import quantum
from horizon import tabs
from akanda.horizon.configuration.tables.publicips import PublicIPsTable
# The table rendering code assumes it is getting an
# object with an "id" property and other properties
# based on the column definitions for the table.
# This is a light-weight data structure that looks
# like what we need for the publicips table.
PublicIP = collections.namedtuple('PublicIP', 'id router_name ipaddr')
class ConfigurationTab(tabs.TableTab):
"""Tab to show the user generic configuration settings.
"""
name = _("Configuration")
slug = "configuration_tab"
template_name = "akanda/configuration/index.html"
table_classes = (PublicIPsTable,)
def get_publicips_data(self):
data = []
c = quantum.quantumclient(self.request)
for router in c.list_routers(tenant_id=self.request.user.tenant_id).values()[0]:
for port in router['ports']:
if port['device_owner'] != 'network:router_gateway':
continue
ips = [i['ip_address'] for i in port['fixed_ips']]
data.append(PublicIP(None, router['name'], ', '.join(ips)))
return data
<commit_msg>Handle missing port and router data
On some systems routers have no ports and ports
have no fixed IPs, so don't assume they do.
Also includes some pep8 fixes.
Change-Id: I73b5f22754958b897a6ae55e453c294f47bf9539
Signed-off-by: Doug Hellmann <8c845c26a3868dadec615703cd974244eb2ac6d1@dreamhost.com><commit_after>import collections
import logging
from django.utils.translation import ugettext as _
from horizon.api import quantum
from horizon import tabs
from akanda.horizon.configuration.tables.publicips import PublicIPsTable
# The table rendering code assumes it is getting an
# object with an "id" property and other properties
# based on the column definitions for the table.
# This is a light-weight data structure that looks
# like what we need for the publicips table.
PublicIP = collections.namedtuple('PublicIP', 'id router_name ipaddr')
class ConfigurationTab(tabs.TableTab):
"""Tab to show the user generic configuration settings.
"""
name = _("Configuration")
slug = "configuration_tab"
template_name = "akanda/configuration/index.html"
table_classes = (PublicIPsTable,)
def get_publicips_data(self):
data = []
c = quantum.quantumclient(self.request)
for router in c.list_routers(
tenant_id=self.request.user.tenant_id).values()[0]:
for port in router.get('ports', []):
if port.get('device_owner') != 'network:router_gateway':
continue
ips = [i['ip_address'] for i in port.get('fixed_ips', [])]
data.append(PublicIP(None, router.get('name'), ', '.join(ips)))
return data
|
024209cdefd34e26983ea4910071ccbd1a33faaa | pyramid_celery/__init__.py | pyramid_celery/__init__.py | from celery.app import default_app
from celery.app import defaults
def clean_quoted_config(config, key):
# ini doesn't allow quoting, but lets support it to fit with celery
config[key] = config[key].replace('"', '')
TYPES_TO_OBJ = {
'any': (object, None),
'bool': (bool, defaults.str_to_bool),
'dict': (dict, eval),
'float': (float, float),
'int': (int, int),
'list': (list, eval),
'tuple': (tuple, eval),
'string': (str, str),
}
OPTIONS = {
key: TYPES_TO_OBJ[opt.type]
for key, opt in defaults.flatten(defaults.NAMESPACES)
}
def convert_celery_options(config):
"""
Converts celery options to apropriate types
"""
for key, value in config.iteritems():
opt_type = OPTIONS.get(key)
if opt_type:
if opt_type[0] == str:
clean_quoted_config(config, key)
elif opt_type[0] is object:
try:
config[key] = eval(value)
except:
pass # any can be anything; even a string
elif not isinstance(value, opt_type[0]):
config[key] = opt_type[1](value)
def includeme(config):
convert_celery_options(config.registry.settings)
default_app.config_from_object(config.registry.settings)
default_app.config = config
| from celery.app import default_app
from celery.app import defaults
def clean_quoted_config(config, key):
# ini doesn't allow quoting, but lets support it to fit with celery
config[key] = config[key].replace('"', '')
TYPES_TO_OBJ = {
'any': (object, None),
'bool': (bool, defaults.str_to_bool),
'dict': (dict, eval),
'float': (float, float),
'int': (int, int),
'list': (list, eval),
'tuple': (tuple, eval),
'string': (str, str),
}
OPTIONS = (
(key, TYPES_TO_OBJ[opt.type])
for key, opt in defaults.flatten(defaults.NAMESPACES)
)
def convert_celery_options(config):
"""
Converts celery options to apropriate types
"""
for key, value in config.iteritems():
opt_type = OPTIONS.get(key)
if opt_type:
if opt_type[0] == str:
clean_quoted_config(config, key)
elif opt_type[0] is object:
try:
config[key] = eval(value)
except:
pass # any can be anything; even a string
elif not isinstance(value, opt_type[0]):
config[key] = opt_type[1](value)
def includeme(config):
convert_celery_options(config.registry.settings)
default_app.config_from_object(config.registry.settings)
default_app.config = config
| Change dict-comprehension for python 2.6 compatibility. | Change dict-comprehension for python 2.6 compatibility.
| Python | mit | edelooff/pyramid_celery,miohtama/pyramid_celery | from celery.app import default_app
from celery.app import defaults
def clean_quoted_config(config, key):
# ini doesn't allow quoting, but lets support it to fit with celery
config[key] = config[key].replace('"', '')
TYPES_TO_OBJ = {
'any': (object, None),
'bool': (bool, defaults.str_to_bool),
'dict': (dict, eval),
'float': (float, float),
'int': (int, int),
'list': (list, eval),
'tuple': (tuple, eval),
'string': (str, str),
}
OPTIONS = {
key: TYPES_TO_OBJ[opt.type]
for key, opt in defaults.flatten(defaults.NAMESPACES)
}
def convert_celery_options(config):
"""
Converts celery options to apropriate types
"""
for key, value in config.iteritems():
opt_type = OPTIONS.get(key)
if opt_type:
if opt_type[0] == str:
clean_quoted_config(config, key)
elif opt_type[0] is object:
try:
config[key] = eval(value)
except:
pass # any can be anything; even a string
elif not isinstance(value, opt_type[0]):
config[key] = opt_type[1](value)
def includeme(config):
convert_celery_options(config.registry.settings)
default_app.config_from_object(config.registry.settings)
default_app.config = config
Change dict-comprehension for python 2.6 compatibility. | from celery.app import default_app
from celery.app import defaults
def clean_quoted_config(config, key):
# ini doesn't allow quoting, but lets support it to fit with celery
config[key] = config[key].replace('"', '')
TYPES_TO_OBJ = {
'any': (object, None),
'bool': (bool, defaults.str_to_bool),
'dict': (dict, eval),
'float': (float, float),
'int': (int, int),
'list': (list, eval),
'tuple': (tuple, eval),
'string': (str, str),
}
OPTIONS = (
(key, TYPES_TO_OBJ[opt.type])
for key, opt in defaults.flatten(defaults.NAMESPACES)
)
def convert_celery_options(config):
"""
Converts celery options to apropriate types
"""
for key, value in config.iteritems():
opt_type = OPTIONS.get(key)
if opt_type:
if opt_type[0] == str:
clean_quoted_config(config, key)
elif opt_type[0] is object:
try:
config[key] = eval(value)
except:
pass # any can be anything; even a string
elif not isinstance(value, opt_type[0]):
config[key] = opt_type[1](value)
def includeme(config):
convert_celery_options(config.registry.settings)
default_app.config_from_object(config.registry.settings)
default_app.config = config
| <commit_before>from celery.app import default_app
from celery.app import defaults
def clean_quoted_config(config, key):
# ini doesn't allow quoting, but lets support it to fit with celery
config[key] = config[key].replace('"', '')
TYPES_TO_OBJ = {
'any': (object, None),
'bool': (bool, defaults.str_to_bool),
'dict': (dict, eval),
'float': (float, float),
'int': (int, int),
'list': (list, eval),
'tuple': (tuple, eval),
'string': (str, str),
}
OPTIONS = {
key: TYPES_TO_OBJ[opt.type]
for key, opt in defaults.flatten(defaults.NAMESPACES)
}
def convert_celery_options(config):
"""
Converts celery options to apropriate types
"""
for key, value in config.iteritems():
opt_type = OPTIONS.get(key)
if opt_type:
if opt_type[0] == str:
clean_quoted_config(config, key)
elif opt_type[0] is object:
try:
config[key] = eval(value)
except:
pass # any can be anything; even a string
elif not isinstance(value, opt_type[0]):
config[key] = opt_type[1](value)
def includeme(config):
convert_celery_options(config.registry.settings)
default_app.config_from_object(config.registry.settings)
default_app.config = config
<commit_msg>Change dict-comprehension for python 2.6 compatibility.<commit_after> | from celery.app import default_app
from celery.app import defaults
def clean_quoted_config(config, key):
# ini doesn't allow quoting, but lets support it to fit with celery
config[key] = config[key].replace('"', '')
TYPES_TO_OBJ = {
'any': (object, None),
'bool': (bool, defaults.str_to_bool),
'dict': (dict, eval),
'float': (float, float),
'int': (int, int),
'list': (list, eval),
'tuple': (tuple, eval),
'string': (str, str),
}
OPTIONS = (
(key, TYPES_TO_OBJ[opt.type])
for key, opt in defaults.flatten(defaults.NAMESPACES)
)
def convert_celery_options(config):
"""
Converts celery options to apropriate types
"""
for key, value in config.iteritems():
opt_type = OPTIONS.get(key)
if opt_type:
if opt_type[0] == str:
clean_quoted_config(config, key)
elif opt_type[0] is object:
try:
config[key] = eval(value)
except:
pass # any can be anything; even a string
elif not isinstance(value, opt_type[0]):
config[key] = opt_type[1](value)
def includeme(config):
convert_celery_options(config.registry.settings)
default_app.config_from_object(config.registry.settings)
default_app.config = config
| from celery.app import default_app
from celery.app import defaults
def clean_quoted_config(config, key):
# ini doesn't allow quoting, but lets support it to fit with celery
config[key] = config[key].replace('"', '')
TYPES_TO_OBJ = {
'any': (object, None),
'bool': (bool, defaults.str_to_bool),
'dict': (dict, eval),
'float': (float, float),
'int': (int, int),
'list': (list, eval),
'tuple': (tuple, eval),
'string': (str, str),
}
OPTIONS = {
key: TYPES_TO_OBJ[opt.type]
for key, opt in defaults.flatten(defaults.NAMESPACES)
}
def convert_celery_options(config):
"""
Converts celery options to apropriate types
"""
for key, value in config.iteritems():
opt_type = OPTIONS.get(key)
if opt_type:
if opt_type[0] == str:
clean_quoted_config(config, key)
elif opt_type[0] is object:
try:
config[key] = eval(value)
except:
pass # any can be anything; even a string
elif not isinstance(value, opt_type[0]):
config[key] = opt_type[1](value)
def includeme(config):
convert_celery_options(config.registry.settings)
default_app.config_from_object(config.registry.settings)
default_app.config = config
Change dict-comprehension for python 2.6 compatibility.from celery.app import default_app
from celery.app import defaults
def clean_quoted_config(config, key):
# ini doesn't allow quoting, but lets support it to fit with celery
config[key] = config[key].replace('"', '')
TYPES_TO_OBJ = {
'any': (object, None),
'bool': (bool, defaults.str_to_bool),
'dict': (dict, eval),
'float': (float, float),
'int': (int, int),
'list': (list, eval),
'tuple': (tuple, eval),
'string': (str, str),
}
OPTIONS = (
(key, TYPES_TO_OBJ[opt.type])
for key, opt in defaults.flatten(defaults.NAMESPACES)
)
def convert_celery_options(config):
"""
Converts celery options to apropriate types
"""
for key, value in config.iteritems():
opt_type = OPTIONS.get(key)
if opt_type:
if opt_type[0] == str:
clean_quoted_config(config, key)
elif opt_type[0] is object:
try:
config[key] = eval(value)
except:
pass # any can be anything; even a string
elif not isinstance(value, opt_type[0]):
config[key] = opt_type[1](value)
def includeme(config):
convert_celery_options(config.registry.settings)
default_app.config_from_object(config.registry.settings)
default_app.config = config
| <commit_before>from celery.app import default_app
from celery.app import defaults
def clean_quoted_config(config, key):
# ini doesn't allow quoting, but lets support it to fit with celery
config[key] = config[key].replace('"', '')
TYPES_TO_OBJ = {
'any': (object, None),
'bool': (bool, defaults.str_to_bool),
'dict': (dict, eval),
'float': (float, float),
'int': (int, int),
'list': (list, eval),
'tuple': (tuple, eval),
'string': (str, str),
}
OPTIONS = {
key: TYPES_TO_OBJ[opt.type]
for key, opt in defaults.flatten(defaults.NAMESPACES)
}
def convert_celery_options(config):
"""
Converts celery options to apropriate types
"""
for key, value in config.iteritems():
opt_type = OPTIONS.get(key)
if opt_type:
if opt_type[0] == str:
clean_quoted_config(config, key)
elif opt_type[0] is object:
try:
config[key] = eval(value)
except:
pass # any can be anything; even a string
elif not isinstance(value, opt_type[0]):
config[key] = opt_type[1](value)
def includeme(config):
convert_celery_options(config.registry.settings)
default_app.config_from_object(config.registry.settings)
default_app.config = config
<commit_msg>Change dict-comprehension for python 2.6 compatibility.<commit_after>from celery.app import default_app
from celery.app import defaults
def clean_quoted_config(config, key):
# ini doesn't allow quoting, but lets support it to fit with celery
config[key] = config[key].replace('"', '')
TYPES_TO_OBJ = {
'any': (object, None),
'bool': (bool, defaults.str_to_bool),
'dict': (dict, eval),
'float': (float, float),
'int': (int, int),
'list': (list, eval),
'tuple': (tuple, eval),
'string': (str, str),
}
OPTIONS = (
(key, TYPES_TO_OBJ[opt.type])
for key, opt in defaults.flatten(defaults.NAMESPACES)
)
def convert_celery_options(config):
"""
Converts celery options to apropriate types
"""
for key, value in config.iteritems():
opt_type = OPTIONS.get(key)
if opt_type:
if opt_type[0] == str:
clean_quoted_config(config, key)
elif opt_type[0] is object:
try:
config[key] = eval(value)
except:
pass # any can be anything; even a string
elif not isinstance(value, opt_type[0]):
config[key] = opt_type[1](value)
def includeme(config):
convert_celery_options(config.registry.settings)
default_app.config_from_object(config.registry.settings)
default_app.config = config
|
b0b1be44c64ed48c15f9e796b90a21e7e4597df8 | jsrn/encoding.py | jsrn/encoding.py | # -*- coding: utf-8 -*-
import json
import registration
import resources
class JSRNEncoder(json.JSONEncoder):
"""
Encoder for JSRN resources.
"""
def default(self, o):
if isinstance(o, resources.Resource):
state = o.__getstate__()
state['$'] = o._meta.resource_name
return state
return super(JSRNEncoder, self)
def build_object_graph(obj, resource_name=None):
if isinstance(obj, dict):
if not resource_name:
resource_name = obj.pop("$", None)
if resource_name:
resource_type = registration.get_resource(resource_name)
if resource_type:
new_resource = resource_type()
new_resource.__setstate__(obj)
return new_resource
else:
raise TypeError("Unknown resource: %s" % resource_name)
if isinstance(obj, list):
return [build_object_graph(o, resource_name) for o in obj]
return obj
| # -*- coding: utf-8 -*-
import json
import resources
class JSRNEncoder(json.JSONEncoder):
"""
Encoder for JSRN resources.
"""
def default(self, o):
if isinstance(o, resources.Resource):
obj = {f.name: f.to_json(f.value_from_object(o)) for f in o._meta.fields}
obj[resources.RESOURCE_TYPE_FIELD] = o._meta.resource_name
return obj
return super(JSRNEncoder, self)
def build_object_graph(obj, resource_name=None):
"""
From the decoded JSON structure, generate an object graph.
:raises ValidationError: During building of the object graph and issues discovered are raised as a ValidationError.
"""
if isinstance(obj, dict):
return resources.create_resource_from_dict(obj, resource_name)
if isinstance(obj, list):
return [build_object_graph(o, resource_name) for o in obj]
return obj
| Add extra documentation, remove use of __setstate__ and __getstate__ methods. | Add extra documentation, remove use of __setstate__ and __getstate__ methods.
| Python | bsd-3-clause | timsavage/jsrn,timsavage/jsrn | # -*- coding: utf-8 -*-
import json
import registration
import resources
class JSRNEncoder(json.JSONEncoder):
"""
Encoder for JSRN resources.
"""
def default(self, o):
if isinstance(o, resources.Resource):
state = o.__getstate__()
state['$'] = o._meta.resource_name
return state
return super(JSRNEncoder, self)
def build_object_graph(obj, resource_name=None):
if isinstance(obj, dict):
if not resource_name:
resource_name = obj.pop("$", None)
if resource_name:
resource_type = registration.get_resource(resource_name)
if resource_type:
new_resource = resource_type()
new_resource.__setstate__(obj)
return new_resource
else:
raise TypeError("Unknown resource: %s" % resource_name)
if isinstance(obj, list):
return [build_object_graph(o, resource_name) for o in obj]
return obj
Add extra documentation, remove use of __setstate__ and __getstate__ methods. | # -*- coding: utf-8 -*-
import json
import resources
class JSRNEncoder(json.JSONEncoder):
"""
Encoder for JSRN resources.
"""
def default(self, o):
if isinstance(o, resources.Resource):
obj = {f.name: f.to_json(f.value_from_object(o)) for f in o._meta.fields}
obj[resources.RESOURCE_TYPE_FIELD] = o._meta.resource_name
return obj
return super(JSRNEncoder, self)
def build_object_graph(obj, resource_name=None):
"""
From the decoded JSON structure, generate an object graph.
:raises ValidationError: During building of the object graph and issues discovered are raised as a ValidationError.
"""
if isinstance(obj, dict):
return resources.create_resource_from_dict(obj, resource_name)
if isinstance(obj, list):
return [build_object_graph(o, resource_name) for o in obj]
return obj
| <commit_before># -*- coding: utf-8 -*-
import json
import registration
import resources
class JSRNEncoder(json.JSONEncoder):
"""
Encoder for JSRN resources.
"""
def default(self, o):
if isinstance(o, resources.Resource):
state = o.__getstate__()
state['$'] = o._meta.resource_name
return state
return super(JSRNEncoder, self)
def build_object_graph(obj, resource_name=None):
if isinstance(obj, dict):
if not resource_name:
resource_name = obj.pop("$", None)
if resource_name:
resource_type = registration.get_resource(resource_name)
if resource_type:
new_resource = resource_type()
new_resource.__setstate__(obj)
return new_resource
else:
raise TypeError("Unknown resource: %s" % resource_name)
if isinstance(obj, list):
return [build_object_graph(o, resource_name) for o in obj]
return obj
<commit_msg>Add extra documentation, remove use of __setstate__ and __getstate__ methods.<commit_after> | # -*- coding: utf-8 -*-
import json
import resources
class JSRNEncoder(json.JSONEncoder):
"""
Encoder for JSRN resources.
"""
def default(self, o):
if isinstance(o, resources.Resource):
obj = {f.name: f.to_json(f.value_from_object(o)) for f in o._meta.fields}
obj[resources.RESOURCE_TYPE_FIELD] = o._meta.resource_name
return obj
return super(JSRNEncoder, self)
def build_object_graph(obj, resource_name=None):
"""
From the decoded JSON structure, generate an object graph.
:raises ValidationError: During building of the object graph and issues discovered are raised as a ValidationError.
"""
if isinstance(obj, dict):
return resources.create_resource_from_dict(obj, resource_name)
if isinstance(obj, list):
return [build_object_graph(o, resource_name) for o in obj]
return obj
| # -*- coding: utf-8 -*-
import json
import registration
import resources
class JSRNEncoder(json.JSONEncoder):
"""
Encoder for JSRN resources.
"""
def default(self, o):
if isinstance(o, resources.Resource):
state = o.__getstate__()
state['$'] = o._meta.resource_name
return state
return super(JSRNEncoder, self)
def build_object_graph(obj, resource_name=None):
if isinstance(obj, dict):
if not resource_name:
resource_name = obj.pop("$", None)
if resource_name:
resource_type = registration.get_resource(resource_name)
if resource_type:
new_resource = resource_type()
new_resource.__setstate__(obj)
return new_resource
else:
raise TypeError("Unknown resource: %s" % resource_name)
if isinstance(obj, list):
return [build_object_graph(o, resource_name) for o in obj]
return obj
Add extra documentation, remove use of __setstate__ and __getstate__ methods.# -*- coding: utf-8 -*-
import json
import resources
class JSRNEncoder(json.JSONEncoder):
"""
Encoder for JSRN resources.
"""
def default(self, o):
if isinstance(o, resources.Resource):
obj = {f.name: f.to_json(f.value_from_object(o)) for f in o._meta.fields}
obj[resources.RESOURCE_TYPE_FIELD] = o._meta.resource_name
return obj
return super(JSRNEncoder, self)
def build_object_graph(obj, resource_name=None):
"""
From the decoded JSON structure, generate an object graph.
:raises ValidationError: During building of the object graph and issues discovered are raised as a ValidationError.
"""
if isinstance(obj, dict):
return resources.create_resource_from_dict(obj, resource_name)
if isinstance(obj, list):
return [build_object_graph(o, resource_name) for o in obj]
return obj
| <commit_before># -*- coding: utf-8 -*-
import json
import registration
import resources
class JSRNEncoder(json.JSONEncoder):
"""
Encoder for JSRN resources.
"""
def default(self, o):
if isinstance(o, resources.Resource):
state = o.__getstate__()
state['$'] = o._meta.resource_name
return state
return super(JSRNEncoder, self)
def build_object_graph(obj, resource_name=None):
if isinstance(obj, dict):
if not resource_name:
resource_name = obj.pop("$", None)
if resource_name:
resource_type = registration.get_resource(resource_name)
if resource_type:
new_resource = resource_type()
new_resource.__setstate__(obj)
return new_resource
else:
raise TypeError("Unknown resource: %s" % resource_name)
if isinstance(obj, list):
return [build_object_graph(o, resource_name) for o in obj]
return obj
<commit_msg>Add extra documentation, remove use of __setstate__ and __getstate__ methods.<commit_after># -*- coding: utf-8 -*-
import json
import resources
class JSRNEncoder(json.JSONEncoder):
"""
Encoder for JSRN resources.
"""
def default(self, o):
if isinstance(o, resources.Resource):
obj = {f.name: f.to_json(f.value_from_object(o)) for f in o._meta.fields}
obj[resources.RESOURCE_TYPE_FIELD] = o._meta.resource_name
return obj
return super(JSRNEncoder, self)
def build_object_graph(obj, resource_name=None):
"""
From the decoded JSON structure, generate an object graph.
:raises ValidationError: During building of the object graph and issues discovered are raised as a ValidationError.
"""
if isinstance(obj, dict):
return resources.create_resource_from_dict(obj, resource_name)
if isinstance(obj, list):
return [build_object_graph(o, resource_name) for o in obj]
return obj
|
f6e35897ce3b7e335310016c16a3bf76645bf077 | lib/authenticator.py | lib/authenticator.py | #
# HamperAuthenticator is the class to handle the authentication part of the provisioning portal.
# Instantiate with the email and password you want, it'll pass back the cookie jar if successful,
# or an error message on failure
#
from helpers.driver import HamperDriver
class HamperAuthenticator(object):
def __init__(self):
super(HamperAuthenticator, self).__init__()
def sign_in(self, email=None, password=None):
# Grab the HamperDriver singleton
driver = HamperDriver()
# Open the profile URL. This will forward to the sign in page if session is invalid
driver.get("https://developer.apple.com/account/ios/profile/")
email_element = driver.find_element_by_name("appleId")
email_element.send_keys(email)
password_element = driver.find_element_by_name("accountPassword")
password_element.send_keys(password)
driver.find_element_by_id("submitButton2").click() | #
# HamperAuthenticator is the class to handle the authentication part of the provisioning portal.
# Instantiate with the email and password you want, it'll pass back the cookie jar if successful,
# or an error message on failure
#
from helpers.driver import HamperDriver
from helpers.error import HamperError
from termcolor import colored
class HamperAuthenticator(object):
def __init__(self):
super(HamperAuthenticator, self).__init__()
def sign_in(self, email=None, password=None):
# Grab the HamperDriver singleton
driver = HamperDriver()
print colored("Authenticating user...", "blue")
# Open the profile URL. This will forward to the sign in page if session is invalid
driver.get("https://developer.apple.com/account/ios/profile/")
email_element = driver.find_element_by_name("appleId")
email_element.send_keys(email)
password_element = driver.find_element_by_name("accountPassword")
password_element.send_keys(password)
driver.find_element_by_id("submitButton2").click()
if len(driver.find_elements_by_class_name("dserror")) > 0:
raise Exception(HamperError(HamperError.HECodeLogInError, driver.find_element_by_class_name("dserror").get_attribute("innerHTML"))) | Throw exception if invalid login details are used | Throw exception if invalid login details are used
| Python | mit | MobileXLabs/hamper | #
# HamperAuthenticator is the class to handle the authentication part of the provisioning portal.
# Instantiate with the email and password you want, it'll pass back the cookie jar if successful,
# or an error message on failure
#
from helpers.driver import HamperDriver
class HamperAuthenticator(object):
def __init__(self):
super(HamperAuthenticator, self).__init__()
def sign_in(self, email=None, password=None):
# Grab the HamperDriver singleton
driver = HamperDriver()
# Open the profile URL. This will forward to the sign in page if session is invalid
driver.get("https://developer.apple.com/account/ios/profile/")
email_element = driver.find_element_by_name("appleId")
email_element.send_keys(email)
password_element = driver.find_element_by_name("accountPassword")
password_element.send_keys(password)
driver.find_element_by_id("submitButton2").click()Throw exception if invalid login details are used | #
# HamperAuthenticator is the class to handle the authentication part of the provisioning portal.
# Instantiate with the email and password you want, it'll pass back the cookie jar if successful,
# or an error message on failure
#
from helpers.driver import HamperDriver
from helpers.error import HamperError
from termcolor import colored
class HamperAuthenticator(object):
def __init__(self):
super(HamperAuthenticator, self).__init__()
def sign_in(self, email=None, password=None):
# Grab the HamperDriver singleton
driver = HamperDriver()
print colored("Authenticating user...", "blue")
# Open the profile URL. This will forward to the sign in page if session is invalid
driver.get("https://developer.apple.com/account/ios/profile/")
email_element = driver.find_element_by_name("appleId")
email_element.send_keys(email)
password_element = driver.find_element_by_name("accountPassword")
password_element.send_keys(password)
driver.find_element_by_id("submitButton2").click()
if len(driver.find_elements_by_class_name("dserror")) > 0:
raise Exception(HamperError(HamperError.HECodeLogInError, driver.find_element_by_class_name("dserror").get_attribute("innerHTML"))) | <commit_before>#
# HamperAuthenticator is the class to handle the authentication part of the provisioning portal.
# Instantiate with the email and password you want, it'll pass back the cookie jar if successful,
# or an error message on failure
#
from helpers.driver import HamperDriver
class HamperAuthenticator(object):
def __init__(self):
super(HamperAuthenticator, self).__init__()
def sign_in(self, email=None, password=None):
# Grab the HamperDriver singleton
driver = HamperDriver()
# Open the profile URL. This will forward to the sign in page if session is invalid
driver.get("https://developer.apple.com/account/ios/profile/")
email_element = driver.find_element_by_name("appleId")
email_element.send_keys(email)
password_element = driver.find_element_by_name("accountPassword")
password_element.send_keys(password)
driver.find_element_by_id("submitButton2").click()<commit_msg>Throw exception if invalid login details are used<commit_after> | #
# HamperAuthenticator is the class to handle the authentication part of the provisioning portal.
# Instantiate with the email and password you want, it'll pass back the cookie jar if successful,
# or an error message on failure
#
from helpers.driver import HamperDriver
from helpers.error import HamperError
from termcolor import colored
class HamperAuthenticator(object):
def __init__(self):
super(HamperAuthenticator, self).__init__()
def sign_in(self, email=None, password=None):
# Grab the HamperDriver singleton
driver = HamperDriver()
print colored("Authenticating user...", "blue")
# Open the profile URL. This will forward to the sign in page if session is invalid
driver.get("https://developer.apple.com/account/ios/profile/")
email_element = driver.find_element_by_name("appleId")
email_element.send_keys(email)
password_element = driver.find_element_by_name("accountPassword")
password_element.send_keys(password)
driver.find_element_by_id("submitButton2").click()
if len(driver.find_elements_by_class_name("dserror")) > 0:
raise Exception(HamperError(HamperError.HECodeLogInError, driver.find_element_by_class_name("dserror").get_attribute("innerHTML"))) | #
# HamperAuthenticator is the class to handle the authentication part of the provisioning portal.
# Instantiate with the email and password you want, it'll pass back the cookie jar if successful,
# or an error message on failure
#
from helpers.driver import HamperDriver
class HamperAuthenticator(object):
def __init__(self):
super(HamperAuthenticator, self).__init__()
def sign_in(self, email=None, password=None):
# Grab the HamperDriver singleton
driver = HamperDriver()
# Open the profile URL. This will forward to the sign in page if session is invalid
driver.get("https://developer.apple.com/account/ios/profile/")
email_element = driver.find_element_by_name("appleId")
email_element.send_keys(email)
password_element = driver.find_element_by_name("accountPassword")
password_element.send_keys(password)
driver.find_element_by_id("submitButton2").click()Throw exception if invalid login details are used#
# HamperAuthenticator is the class to handle the authentication part of the provisioning portal.
# Instantiate with the email and password you want, it'll pass back the cookie jar if successful,
# or an error message on failure
#
from helpers.driver import HamperDriver
from helpers.error import HamperError
from termcolor import colored
class HamperAuthenticator(object):
def __init__(self):
super(HamperAuthenticator, self).__init__()
def sign_in(self, email=None, password=None):
# Grab the HamperDriver singleton
driver = HamperDriver()
print colored("Authenticating user...", "blue")
# Open the profile URL. This will forward to the sign in page if session is invalid
driver.get("https://developer.apple.com/account/ios/profile/")
email_element = driver.find_element_by_name("appleId")
email_element.send_keys(email)
password_element = driver.find_element_by_name("accountPassword")
password_element.send_keys(password)
driver.find_element_by_id("submitButton2").click()
if len(driver.find_elements_by_class_name("dserror")) > 0:
raise Exception(HamperError(HamperError.HECodeLogInError, driver.find_element_by_class_name("dserror").get_attribute("innerHTML"))) | <commit_before>#
# HamperAuthenticator is the class to handle the authentication part of the provisioning portal.
# Instantiate with the email and password you want, it'll pass back the cookie jar if successful,
# or an error message on failure
#
from helpers.driver import HamperDriver
class HamperAuthenticator(object):
def __init__(self):
super(HamperAuthenticator, self).__init__()
def sign_in(self, email=None, password=None):
# Grab the HamperDriver singleton
driver = HamperDriver()
# Open the profile URL. This will forward to the sign in page if session is invalid
driver.get("https://developer.apple.com/account/ios/profile/")
email_element = driver.find_element_by_name("appleId")
email_element.send_keys(email)
password_element = driver.find_element_by_name("accountPassword")
password_element.send_keys(password)
driver.find_element_by_id("submitButton2").click()<commit_msg>Throw exception if invalid login details are used<commit_after>#
# HamperAuthenticator is the class to handle the authentication part of the provisioning portal.
# Instantiate with the email and password you want, it'll pass back the cookie jar if successful,
# or an error message on failure
#
from helpers.driver import HamperDriver
from helpers.error import HamperError
from termcolor import colored
class HamperAuthenticator(object):
def __init__(self):
super(HamperAuthenticator, self).__init__()
def sign_in(self, email=None, password=None):
# Grab the HamperDriver singleton
driver = HamperDriver()
print colored("Authenticating user...", "blue")
# Open the profile URL. This will forward to the sign in page if session is invalid
driver.get("https://developer.apple.com/account/ios/profile/")
email_element = driver.find_element_by_name("appleId")
email_element.send_keys(email)
password_element = driver.find_element_by_name("accountPassword")
password_element.send_keys(password)
driver.find_element_by_id("submitButton2").click()
if len(driver.find_elements_by_class_name("dserror")) > 0:
raise Exception(HamperError(HamperError.HECodeLogInError, driver.find_element_by_class_name("dserror").get_attribute("innerHTML"))) |
15e9e3231386cb5a194e184e7b24fed8030f0d41 | Server/server.py | Server/server.py | import serial
import schedule
import time
from flask import Flask, request
from threading import Thread
from influxdb import InfluxDBClient
COM_PORT = 2
BAUDRATE = 9600
READ_THREAD = 10
DB_HOST = 'localhost'
DB_HOST_PORT = 8086
DB_NAME = 'awarehouse'
DB_PASS = 'admin'
DB_USER = 'admin'
influxdb = InfluxDBClient(DB_HOST, DB_HOST_PORT, DB_USER, DB_PASS, DB_NAME)
ser = serial.Serial(COM_PORT, BAUDRATE)
app = Flask(__name__)
start_time = time.time()
def get_sensors():
ser.write('r')
json = ser.readline()
influxdb.write_points([json])
print json
def run_schedule():
while 1:
schedule.run_pending()
time.sleep(1)
@app.route('/', methods=['GET'])
def index():
return '<html>test</html>'
if __name__ == '__main__':
schedule.every(READ_THREAD).seconds.do(get_sensors)
t = Thread(target=run_schedule)
t.start()
app.run(debug=True, use_reloader=False, host='0.0.0.0', port=5000)
ser.close()
| import serial
import schedule
import time
import json
from flask import Flask, request
from threading import Thread
from influxdb import InfluxDBClient
COM_PORT = 2
BAUDRATE = 9600
READ_SENSORS_TIMER = 1
DB_HOST = '192.168.1.73'
DB_PORT = 8086
DB_NAME = 'awarehouse'
DB_PASS = 'admin'
DB_USER = 'admin'
influxdb = InfluxDBClient(DB_HOST, DB_PORT, DB_USER, DB_PASS, DB_NAME)
ser = serial.Serial(COM_PORT, BAUDRATE)
app = Flask(__name__)
start_time = time.time()
def get_sensors():
# ser.write('r')
json = "fixme" # ser.readline()
influxdb.write_points([{
"points": [[20.44, 30, 231]],
"name": "sensors",
"columns": ["temp1", "light_sensor", "light"]
}])
print json
def run_schedule():
while 1:
schedule.run_pending()
time.sleep(1)
@app.route('/', methods=['GET'])
def index():
return '<html>test</html>'
if __name__ == '__main__':
schedule.every(READ_SENSORS_TIMER).seconds.do(get_sensors)
t = Thread(target=run_schedule)
t.start()
app.run(debug=True, use_reloader=False, host='0.0.0.0', port=5000)
ser.close()
| Add example points for InfluxDB | Add example points for InfluxDB
| Python | mit | jpdias/aWareHouse,jpdias/aWareHouse,jpdias/aWareHouse,jpdias/aWareHouse | import serial
import schedule
import time
from flask import Flask, request
from threading import Thread
from influxdb import InfluxDBClient
COM_PORT = 2
BAUDRATE = 9600
READ_THREAD = 10
DB_HOST = 'localhost'
DB_HOST_PORT = 8086
DB_NAME = 'awarehouse'
DB_PASS = 'admin'
DB_USER = 'admin'
influxdb = InfluxDBClient(DB_HOST, DB_HOST_PORT, DB_USER, DB_PASS, DB_NAME)
ser = serial.Serial(COM_PORT, BAUDRATE)
app = Flask(__name__)
start_time = time.time()
def get_sensors():
ser.write('r')
json = ser.readline()
influxdb.write_points([json])
print json
def run_schedule():
while 1:
schedule.run_pending()
time.sleep(1)
@app.route('/', methods=['GET'])
def index():
return '<html>test</html>'
if __name__ == '__main__':
schedule.every(READ_THREAD).seconds.do(get_sensors)
t = Thread(target=run_schedule)
t.start()
app.run(debug=True, use_reloader=False, host='0.0.0.0', port=5000)
ser.close()
Add example points for InfluxDB | import serial
import schedule
import time
import json
from flask import Flask, request
from threading import Thread
from influxdb import InfluxDBClient
COM_PORT = 2
BAUDRATE = 9600
READ_SENSORS_TIMER = 1
DB_HOST = '192.168.1.73'
DB_PORT = 8086
DB_NAME = 'awarehouse'
DB_PASS = 'admin'
DB_USER = 'admin'
influxdb = InfluxDBClient(DB_HOST, DB_PORT, DB_USER, DB_PASS, DB_NAME)
ser = serial.Serial(COM_PORT, BAUDRATE)
app = Flask(__name__)
start_time = time.time()
def get_sensors():
# ser.write('r')
json = "fixme" # ser.readline()
influxdb.write_points([{
"points": [[20.44, 30, 231]],
"name": "sensors",
"columns": ["temp1", "light_sensor", "light"]
}])
print json
def run_schedule():
while 1:
schedule.run_pending()
time.sleep(1)
@app.route('/', methods=['GET'])
def index():
return '<html>test</html>'
if __name__ == '__main__':
schedule.every(READ_SENSORS_TIMER).seconds.do(get_sensors)
t = Thread(target=run_schedule)
t.start()
app.run(debug=True, use_reloader=False, host='0.0.0.0', port=5000)
ser.close()
| <commit_before>import serial
import schedule
import time
from flask import Flask, request
from threading import Thread
from influxdb import InfluxDBClient
COM_PORT = 2
BAUDRATE = 9600
READ_THREAD = 10
DB_HOST = 'localhost'
DB_HOST_PORT = 8086
DB_NAME = 'awarehouse'
DB_PASS = 'admin'
DB_USER = 'admin'
influxdb = InfluxDBClient(DB_HOST, DB_HOST_PORT, DB_USER, DB_PASS, DB_NAME)
ser = serial.Serial(COM_PORT, BAUDRATE)
app = Flask(__name__)
start_time = time.time()
def get_sensors():
ser.write('r')
json = ser.readline()
influxdb.write_points([json])
print json
def run_schedule():
while 1:
schedule.run_pending()
time.sleep(1)
@app.route('/', methods=['GET'])
def index():
return '<html>test</html>'
if __name__ == '__main__':
schedule.every(READ_THREAD).seconds.do(get_sensors)
t = Thread(target=run_schedule)
t.start()
app.run(debug=True, use_reloader=False, host='0.0.0.0', port=5000)
ser.close()
<commit_msg>Add example points for InfluxDB<commit_after> | import serial
import schedule
import time
import json
from flask import Flask, request
from threading import Thread
from influxdb import InfluxDBClient
COM_PORT = 2
BAUDRATE = 9600
READ_SENSORS_TIMER = 1
DB_HOST = '192.168.1.73'
DB_PORT = 8086
DB_NAME = 'awarehouse'
DB_PASS = 'admin'
DB_USER = 'admin'
influxdb = InfluxDBClient(DB_HOST, DB_PORT, DB_USER, DB_PASS, DB_NAME)
ser = serial.Serial(COM_PORT, BAUDRATE)
app = Flask(__name__)
start_time = time.time()
def get_sensors():
# ser.write('r')
json = "fixme" # ser.readline()
influxdb.write_points([{
"points": [[20.44, 30, 231]],
"name": "sensors",
"columns": ["temp1", "light_sensor", "light"]
}])
print json
def run_schedule():
while 1:
schedule.run_pending()
time.sleep(1)
@app.route('/', methods=['GET'])
def index():
return '<html>test</html>'
if __name__ == '__main__':
schedule.every(READ_SENSORS_TIMER).seconds.do(get_sensors)
t = Thread(target=run_schedule)
t.start()
app.run(debug=True, use_reloader=False, host='0.0.0.0', port=5000)
ser.close()
| import serial
import schedule
import time
from flask import Flask, request
from threading import Thread
from influxdb import InfluxDBClient
COM_PORT = 2
BAUDRATE = 9600
READ_THREAD = 10
DB_HOST = 'localhost'
DB_HOST_PORT = 8086
DB_NAME = 'awarehouse'
DB_PASS = 'admin'
DB_USER = 'admin'
influxdb = InfluxDBClient(DB_HOST, DB_HOST_PORT, DB_USER, DB_PASS, DB_NAME)
ser = serial.Serial(COM_PORT, BAUDRATE)
app = Flask(__name__)
start_time = time.time()
def get_sensors():
ser.write('r')
json = ser.readline()
influxdb.write_points([json])
print json
def run_schedule():
while 1:
schedule.run_pending()
time.sleep(1)
@app.route('/', methods=['GET'])
def index():
return '<html>test</html>'
if __name__ == '__main__':
schedule.every(READ_THREAD).seconds.do(get_sensors)
t = Thread(target=run_schedule)
t.start()
app.run(debug=True, use_reloader=False, host='0.0.0.0', port=5000)
ser.close()
Add example points for InfluxDBimport serial
import schedule
import time
import json
from flask import Flask, request
from threading import Thread
from influxdb import InfluxDBClient
COM_PORT = 2
BAUDRATE = 9600
READ_SENSORS_TIMER = 1
DB_HOST = '192.168.1.73'
DB_PORT = 8086
DB_NAME = 'awarehouse'
DB_PASS = 'admin'
DB_USER = 'admin'
influxdb = InfluxDBClient(DB_HOST, DB_PORT, DB_USER, DB_PASS, DB_NAME)
ser = serial.Serial(COM_PORT, BAUDRATE)
app = Flask(__name__)
start_time = time.time()
def get_sensors():
# ser.write('r')
json = "fixme" # ser.readline()
influxdb.write_points([{
"points": [[20.44, 30, 231]],
"name": "sensors",
"columns": ["temp1", "light_sensor", "light"]
}])
print json
def run_schedule():
while 1:
schedule.run_pending()
time.sleep(1)
@app.route('/', methods=['GET'])
def index():
return '<html>test</html>'
if __name__ == '__main__':
schedule.every(READ_SENSORS_TIMER).seconds.do(get_sensors)
t = Thread(target=run_schedule)
t.start()
app.run(debug=True, use_reloader=False, host='0.0.0.0', port=5000)
ser.close()
| <commit_before>import serial
import schedule
import time
from flask import Flask, request
from threading import Thread
from influxdb import InfluxDBClient
COM_PORT = 2
BAUDRATE = 9600
READ_THREAD = 10
DB_HOST = 'localhost'
DB_HOST_PORT = 8086
DB_NAME = 'awarehouse'
DB_PASS = 'admin'
DB_USER = 'admin'
influxdb = InfluxDBClient(DB_HOST, DB_HOST_PORT, DB_USER, DB_PASS, DB_NAME)
ser = serial.Serial(COM_PORT, BAUDRATE)
app = Flask(__name__)
start_time = time.time()
def get_sensors():
ser.write('r')
json = ser.readline()
influxdb.write_points([json])
print json
def run_schedule():
while 1:
schedule.run_pending()
time.sleep(1)
@app.route('/', methods=['GET'])
def index():
return '<html>test</html>'
if __name__ == '__main__':
schedule.every(READ_THREAD).seconds.do(get_sensors)
t = Thread(target=run_schedule)
t.start()
app.run(debug=True, use_reloader=False, host='0.0.0.0', port=5000)
ser.close()
<commit_msg>Add example points for InfluxDB<commit_after>import serial
import schedule
import time
import json
from flask import Flask, request
from threading import Thread
from influxdb import InfluxDBClient
COM_PORT = 2
BAUDRATE = 9600
READ_SENSORS_TIMER = 1
DB_HOST = '192.168.1.73'
DB_PORT = 8086
DB_NAME = 'awarehouse'
DB_PASS = 'admin'
DB_USER = 'admin'
influxdb = InfluxDBClient(DB_HOST, DB_PORT, DB_USER, DB_PASS, DB_NAME)
ser = serial.Serial(COM_PORT, BAUDRATE)
app = Flask(__name__)
start_time = time.time()
def get_sensors():
# ser.write('r')
json = "fixme" # ser.readline()
influxdb.write_points([{
"points": [[20.44, 30, 231]],
"name": "sensors",
"columns": ["temp1", "light_sensor", "light"]
}])
print json
def run_schedule():
while 1:
schedule.run_pending()
time.sleep(1)
@app.route('/', methods=['GET'])
def index():
return '<html>test</html>'
if __name__ == '__main__':
schedule.every(READ_SENSORS_TIMER).seconds.do(get_sensors)
t = Thread(target=run_schedule)
t.start()
app.run(debug=True, use_reloader=False, host='0.0.0.0', port=5000)
ser.close()
|
4b93e5aa8c0ce90189fb852e75ee213d3be0d01a | flicks/base/urls.py | flicks/base/urls.py | from django.conf.urls.defaults import patterns, url
from flicks.base import views
urlpatterns = patterns('',
url(r'^/?$', views.home, name='flicks.base.home'),
url(r'^strings/?$', views.strings, name='flicks.base.strings'),
)
| from django.conf.urls.defaults import patterns, url
from flicks.base import views
urlpatterns = patterns('',
url(r'^/?$', views.home, name='flicks.base.home'),
url(r'^faq/?$', views.faq, name='flicks.base.faq'),
url(r'^strings/?$', views.strings, name='flicks.base.strings'),
)
| Add back in FAQ url that was removed accidentally. | Add back in FAQ url that was removed accidentally.
| Python | bsd-3-clause | mozilla/firefox-flicks,mozilla/firefox-flicks,mozilla/firefox-flicks,mozilla/firefox-flicks | from django.conf.urls.defaults import patterns, url
from flicks.base import views
urlpatterns = patterns('',
url(r'^/?$', views.home, name='flicks.base.home'),
url(r'^strings/?$', views.strings, name='flicks.base.strings'),
)
Add back in FAQ url that was removed accidentally. | from django.conf.urls.defaults import patterns, url
from flicks.base import views
urlpatterns = patterns('',
url(r'^/?$', views.home, name='flicks.base.home'),
url(r'^faq/?$', views.faq, name='flicks.base.faq'),
url(r'^strings/?$', views.strings, name='flicks.base.strings'),
)
| <commit_before>from django.conf.urls.defaults import patterns, url
from flicks.base import views
urlpatterns = patterns('',
url(r'^/?$', views.home, name='flicks.base.home'),
url(r'^strings/?$', views.strings, name='flicks.base.strings'),
)
<commit_msg>Add back in FAQ url that was removed accidentally.<commit_after> | from django.conf.urls.defaults import patterns, url
from flicks.base import views
urlpatterns = patterns('',
url(r'^/?$', views.home, name='flicks.base.home'),
url(r'^faq/?$', views.faq, name='flicks.base.faq'),
url(r'^strings/?$', views.strings, name='flicks.base.strings'),
)
| from django.conf.urls.defaults import patterns, url
from flicks.base import views
urlpatterns = patterns('',
url(r'^/?$', views.home, name='flicks.base.home'),
url(r'^strings/?$', views.strings, name='flicks.base.strings'),
)
Add back in FAQ url that was removed accidentally.from django.conf.urls.defaults import patterns, url
from flicks.base import views
urlpatterns = patterns('',
url(r'^/?$', views.home, name='flicks.base.home'),
url(r'^faq/?$', views.faq, name='flicks.base.faq'),
url(r'^strings/?$', views.strings, name='flicks.base.strings'),
)
| <commit_before>from django.conf.urls.defaults import patterns, url
from flicks.base import views
urlpatterns = patterns('',
url(r'^/?$', views.home, name='flicks.base.home'),
url(r'^strings/?$', views.strings, name='flicks.base.strings'),
)
<commit_msg>Add back in FAQ url that was removed accidentally.<commit_after>from django.conf.urls.defaults import patterns, url
from flicks.base import views
urlpatterns = patterns('',
url(r'^/?$', views.home, name='flicks.base.home'),
url(r'^faq/?$', views.faq, name='flicks.base.faq'),
url(r'^strings/?$', views.strings, name='flicks.base.strings'),
)
|
b236a7961dbc9930ed135602cbed783818bde16e | kolibri/utils/tests/test_cli_at_import.py | kolibri/utils/tests/test_cli_at_import.py | """
Tests for `kolibri.utils.cli` module.
These tests deliberately omit `@pytest.mark.django_db` from the tests,
so that any attempt to access the Django database during the running
of these cli methods will result in an error and test failure.
"""
from __future__ import absolute_import
from __future__ import print_function
from mock import patch
@patch("sqlalchemy.create_engine")
def test_status_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.status.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
@patch("sqlalchemy.create_engine")
def test_stop_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.stop.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
| """
Tests for `kolibri.utils.cli` module.
These tests deliberately omit `@pytest.mark.django_db` from the tests,
so that any attempt to access the Django database during the running
of these cli methods will result in an error and test failure.
"""
from __future__ import absolute_import
from __future__ import print_function
from mock import patch
@patch("sqlalchemy.create_engine")
def test_status_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.status.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
@patch("sqlalchemy.create_engine")
def test_stop_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.stop.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
@patch("kolibri.utils.options.read_options_file")
def test_import_no_options_evaluation(read_options_mock):
from kolibri.utils import cli # noqa F401
read_options_mock.assert_not_called()
| Add regression test against options evaluation during import in cli. | Add regression test against options evaluation during import in cli.
| Python | mit | learningequality/kolibri,learningequality/kolibri,learningequality/kolibri,indirectlylit/kolibri,indirectlylit/kolibri,learningequality/kolibri,indirectlylit/kolibri,indirectlylit/kolibri | """
Tests for `kolibri.utils.cli` module.
These tests deliberately omit `@pytest.mark.django_db` from the tests,
so that any attempt to access the Django database during the running
of these cli methods will result in an error and test failure.
"""
from __future__ import absolute_import
from __future__ import print_function
from mock import patch
@patch("sqlalchemy.create_engine")
def test_status_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.status.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
@patch("sqlalchemy.create_engine")
def test_stop_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.stop.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
Add regression test against options evaluation during import in cli. | """
Tests for `kolibri.utils.cli` module.
These tests deliberately omit `@pytest.mark.django_db` from the tests,
so that any attempt to access the Django database during the running
of these cli methods will result in an error and test failure.
"""
from __future__ import absolute_import
from __future__ import print_function
from mock import patch
@patch("sqlalchemy.create_engine")
def test_status_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.status.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
@patch("sqlalchemy.create_engine")
def test_stop_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.stop.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
@patch("kolibri.utils.options.read_options_file")
def test_import_no_options_evaluation(read_options_mock):
from kolibri.utils import cli # noqa F401
read_options_mock.assert_not_called()
| <commit_before>"""
Tests for `kolibri.utils.cli` module.
These tests deliberately omit `@pytest.mark.django_db` from the tests,
so that any attempt to access the Django database during the running
of these cli methods will result in an error and test failure.
"""
from __future__ import absolute_import
from __future__ import print_function
from mock import patch
@patch("sqlalchemy.create_engine")
def test_status_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.status.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
@patch("sqlalchemy.create_engine")
def test_stop_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.stop.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
<commit_msg>Add regression test against options evaluation during import in cli.<commit_after> | """
Tests for `kolibri.utils.cli` module.
These tests deliberately omit `@pytest.mark.django_db` from the tests,
so that any attempt to access the Django database during the running
of these cli methods will result in an error and test failure.
"""
from __future__ import absolute_import
from __future__ import print_function
from mock import patch
@patch("sqlalchemy.create_engine")
def test_status_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.status.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
@patch("sqlalchemy.create_engine")
def test_stop_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.stop.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
@patch("kolibri.utils.options.read_options_file")
def test_import_no_options_evaluation(read_options_mock):
from kolibri.utils import cli # noqa F401
read_options_mock.assert_not_called()
| """
Tests for `kolibri.utils.cli` module.
These tests deliberately omit `@pytest.mark.django_db` from the tests,
so that any attempt to access the Django database during the running
of these cli methods will result in an error and test failure.
"""
from __future__ import absolute_import
from __future__ import print_function
from mock import patch
@patch("sqlalchemy.create_engine")
def test_status_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.status.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
@patch("sqlalchemy.create_engine")
def test_stop_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.stop.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
Add regression test against options evaluation during import in cli."""
Tests for `kolibri.utils.cli` module.
These tests deliberately omit `@pytest.mark.django_db` from the tests,
so that any attempt to access the Django database during the running
of these cli methods will result in an error and test failure.
"""
from __future__ import absolute_import
from __future__ import print_function
from mock import patch
@patch("sqlalchemy.create_engine")
def test_status_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.status.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
@patch("sqlalchemy.create_engine")
def test_stop_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.stop.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
@patch("kolibri.utils.options.read_options_file")
def test_import_no_options_evaluation(read_options_mock):
from kolibri.utils import cli # noqa F401
read_options_mock.assert_not_called()
| <commit_before>"""
Tests for `kolibri.utils.cli` module.
These tests deliberately omit `@pytest.mark.django_db` from the tests,
so that any attempt to access the Django database during the running
of these cli methods will result in an error and test failure.
"""
from __future__ import absolute_import
from __future__ import print_function
from mock import patch
@patch("sqlalchemy.create_engine")
def test_status_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.status.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
@patch("sqlalchemy.create_engine")
def test_stop_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.stop.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
<commit_msg>Add regression test against options evaluation during import in cli.<commit_after>"""
Tests for `kolibri.utils.cli` module.
These tests deliberately omit `@pytest.mark.django_db` from the tests,
so that any attempt to access the Django database during the running
of these cli methods will result in an error and test failure.
"""
from __future__ import absolute_import
from __future__ import print_function
from mock import patch
@patch("sqlalchemy.create_engine")
def test_status_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.status.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
@patch("sqlalchemy.create_engine")
def test_stop_no_db_access(create_engine_mock):
"""
Tests that status does not try to access the database
"""
try:
from kolibri.utils import cli
cli.stop.callback()
except SystemExit:
pass
create_engine_mock.assert_not_called()
@patch("kolibri.utils.options.read_options_file")
def test_import_no_options_evaluation(read_options_mock):
from kolibri.utils import cli # noqa F401
read_options_mock.assert_not_called()
|
013277886a3c9f5d4ab99f11da6a447562fe9e46 | benchexec/tools/cmaesfuzz.py | benchexec/tools/cmaesfuzz.py | # This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import re
import benchexec.util as util
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool):
"""
Fuzzing with stochastic optimization guided by CMA-ES
Hynsung Kim, Gidon Ernst
https://github.com/lazygrey/fuzzing_with_cmaes
"""
REQUIRED_PATHS = [
"fuzzer",
"fuzzer.py",
"cma",
"verifiers_bytes",
"verifiers_real",
]
def executable(self):
return util.find_executable("fuzzer")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "CMA-ES Fuzz"
def get_value_from_output(self, lines, identifier):
for line in reversed(lines):
pattern = identifier
if pattern[-1] != ":":
pattern += ":"
match = re.match("^" + pattern + "([^(]*)", line)
if match and match.group(1):
return match.group(1).strip()
return None
| # This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import re
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool2):
"""
Fuzzing with stochastic optimization guided by CMA-ES
Hynsung Kim, Gidon Ernst
https://github.com/lazygrey/fuzzing_with_cmaes
"""
REQUIRED_PATHS = [
"fuzzer",
"fuzzer.py",
"cma",
"verifiers_bytes",
"verifiers_real",
]
def executable(self, tool_locator):
return tool_locator.find_executable("fuzzer")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "CMA-ES Fuzz"
def get_value_from_output(self, output, identifier):
for line in reversed(output):
if line.startswith(identifier):
return line[len(identifier):]
return None
| Update module to version 2 | Update module to version 2
| Python | apache-2.0 | sosy-lab/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,dbeyer/benchexec,ultimate-pa/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,sosy-lab/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,ultimate-pa/benchexec,dbeyer/benchexec,dbeyer/benchexec,sosy-lab/benchexec,dbeyer/benchexec,ultimate-pa/benchexec | # This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import re
import benchexec.util as util
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool):
"""
Fuzzing with stochastic optimization guided by CMA-ES
Hynsung Kim, Gidon Ernst
https://github.com/lazygrey/fuzzing_with_cmaes
"""
REQUIRED_PATHS = [
"fuzzer",
"fuzzer.py",
"cma",
"verifiers_bytes",
"verifiers_real",
]
def executable(self):
return util.find_executable("fuzzer")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "CMA-ES Fuzz"
def get_value_from_output(self, lines, identifier):
for line in reversed(lines):
pattern = identifier
if pattern[-1] != ":":
pattern += ":"
match = re.match("^" + pattern + "([^(]*)", line)
if match and match.group(1):
return match.group(1).strip()
return None
Update module to version 2 | # This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import re
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool2):
"""
Fuzzing with stochastic optimization guided by CMA-ES
Hynsung Kim, Gidon Ernst
https://github.com/lazygrey/fuzzing_with_cmaes
"""
REQUIRED_PATHS = [
"fuzzer",
"fuzzer.py",
"cma",
"verifiers_bytes",
"verifiers_real",
]
def executable(self, tool_locator):
return tool_locator.find_executable("fuzzer")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "CMA-ES Fuzz"
def get_value_from_output(self, output, identifier):
for line in reversed(output):
if line.startswith(identifier):
return line[len(identifier):]
return None
| <commit_before># This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import re
import benchexec.util as util
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool):
"""
Fuzzing with stochastic optimization guided by CMA-ES
Hynsung Kim, Gidon Ernst
https://github.com/lazygrey/fuzzing_with_cmaes
"""
REQUIRED_PATHS = [
"fuzzer",
"fuzzer.py",
"cma",
"verifiers_bytes",
"verifiers_real",
]
def executable(self):
return util.find_executable("fuzzer")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "CMA-ES Fuzz"
def get_value_from_output(self, lines, identifier):
for line in reversed(lines):
pattern = identifier
if pattern[-1] != ":":
pattern += ":"
match = re.match("^" + pattern + "([^(]*)", line)
if match and match.group(1):
return match.group(1).strip()
return None
<commit_msg>Update module to version 2<commit_after> | # This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import re
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool2):
"""
Fuzzing with stochastic optimization guided by CMA-ES
Hynsung Kim, Gidon Ernst
https://github.com/lazygrey/fuzzing_with_cmaes
"""
REQUIRED_PATHS = [
"fuzzer",
"fuzzer.py",
"cma",
"verifiers_bytes",
"verifiers_real",
]
def executable(self, tool_locator):
return tool_locator.find_executable("fuzzer")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "CMA-ES Fuzz"
def get_value_from_output(self, output, identifier):
for line in reversed(output):
if line.startswith(identifier):
return line[len(identifier):]
return None
| # This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import re
import benchexec.util as util
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool):
"""
Fuzzing with stochastic optimization guided by CMA-ES
Hynsung Kim, Gidon Ernst
https://github.com/lazygrey/fuzzing_with_cmaes
"""
REQUIRED_PATHS = [
"fuzzer",
"fuzzer.py",
"cma",
"verifiers_bytes",
"verifiers_real",
]
def executable(self):
return util.find_executable("fuzzer")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "CMA-ES Fuzz"
def get_value_from_output(self, lines, identifier):
for line in reversed(lines):
pattern = identifier
if pattern[-1] != ":":
pattern += ":"
match = re.match("^" + pattern + "([^(]*)", line)
if match and match.group(1):
return match.group(1).strip()
return None
Update module to version 2# This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import re
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool2):
"""
Fuzzing with stochastic optimization guided by CMA-ES
Hynsung Kim, Gidon Ernst
https://github.com/lazygrey/fuzzing_with_cmaes
"""
REQUIRED_PATHS = [
"fuzzer",
"fuzzer.py",
"cma",
"verifiers_bytes",
"verifiers_real",
]
def executable(self, tool_locator):
return tool_locator.find_executable("fuzzer")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "CMA-ES Fuzz"
def get_value_from_output(self, output, identifier):
for line in reversed(output):
if line.startswith(identifier):
return line[len(identifier):]
return None
| <commit_before># This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import re
import benchexec.util as util
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool):
"""
Fuzzing with stochastic optimization guided by CMA-ES
Hynsung Kim, Gidon Ernst
https://github.com/lazygrey/fuzzing_with_cmaes
"""
REQUIRED_PATHS = [
"fuzzer",
"fuzzer.py",
"cma",
"verifiers_bytes",
"verifiers_real",
]
def executable(self):
return util.find_executable("fuzzer")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "CMA-ES Fuzz"
def get_value_from_output(self, lines, identifier):
for line in reversed(lines):
pattern = identifier
if pattern[-1] != ":":
pattern += ":"
match = re.match("^" + pattern + "([^(]*)", line)
if match and match.group(1):
return match.group(1).strip()
return None
<commit_msg>Update module to version 2<commit_after># This file is part of BenchExec, a framework for reliable benchmarking:
# https://github.com/sosy-lab/benchexec
#
# SPDX-FileCopyrightText: 2007-2020 Dirk Beyer <https://www.sosy-lab.org>
#
# SPDX-License-Identifier: Apache-2.0
import re
import benchexec.tools.template
class Tool(benchexec.tools.template.BaseTool2):
"""
Fuzzing with stochastic optimization guided by CMA-ES
Hynsung Kim, Gidon Ernst
https://github.com/lazygrey/fuzzing_with_cmaes
"""
REQUIRED_PATHS = [
"fuzzer",
"fuzzer.py",
"cma",
"verifiers_bytes",
"verifiers_real",
]
def executable(self, tool_locator):
return tool_locator.find_executable("fuzzer")
def version(self, executable):
return self._version_from_tool(executable)
def name(self):
return "CMA-ES Fuzz"
def get_value_from_output(self, output, identifier):
for line in reversed(output):
if line.startswith(identifier):
return line[len(identifier):]
return None
|
cde8c1b4c89a2e0ef3765372b4838373d5729cdb | alg_topological_sort.py | alg_topological_sort.py | def topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
for neighbor_vertex in adjacency_dict[start_vertex]:
if neighbor_vertex not in visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
'J': {}
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
| def topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
for neighbor_vertex in adjacency_dict[start_vertex] - visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
'J': set()
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
| Revise for loop to find neighbor vertices | Revise for loop to find neighbor vertices
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | def topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
for neighbor_vertex in adjacency_dict[start_vertex]:
if neighbor_vertex not in visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
'J': {}
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
Revise for loop to find neighbor vertices | def topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
for neighbor_vertex in adjacency_dict[start_vertex] - visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
'J': set()
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
| <commit_before>def topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
for neighbor_vertex in adjacency_dict[start_vertex]:
if neighbor_vertex not in visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
'J': {}
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
<commit_msg>Revise for loop to find neighbor vertices<commit_after> | def topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
for neighbor_vertex in adjacency_dict[start_vertex] - visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
'J': set()
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
| def topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
for neighbor_vertex in adjacency_dict[start_vertex]:
if neighbor_vertex not in visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
'J': {}
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
Revise for loop to find neighbor verticesdef topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
for neighbor_vertex in adjacency_dict[start_vertex] - visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
'J': set()
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
| <commit_before>def topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
for neighbor_vertex in adjacency_dict[start_vertex]:
if neighbor_vertex not in visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
'J': {}
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
<commit_msg>Revise for loop to find neighbor vertices<commit_after>def topological_sort_recur(adjacency_dict, start_vertex,
visited_set, finish_ls):
"""Topological Sorting by Recursion."""
visited_set.add(start_vertex)
for neighbor_vertex in adjacency_dict[start_vertex] - visited_set:
topological_sort_recur(
adjacency_dict, neighbor_vertex,
visited_set, finish_ls)
finish_ls.insert(0, start_vertex)
print(finish_ls)
def topological_sort(adjacency_dict):
"""Topological Sorting for Directed Acyclic Graph (DAG)."""
visited_set = set()
finish_ls = []
for vertex in adjacency_dict:
if vertex not in visited_set:
topological_sort_recur(
adjacency_dict, vertex,
visited_set, finish_ls)
print(finish_ls)
def main():
# DAG.
dag_adjacency_dict = {
'A': {'D'},
'B': {'D'},
'C': {'D'},
'D': {'G', 'E'},
'E': {'J'},
'F': {'G'},
'G': {'I'},
'I': {'J'},
'J': set()
}
topological_sort(dag_adjacency_dict)
if __name__ == '__main__':
main()
|
5496f501ff7da677ee76c442b6a5b544d595ce1d | epages/__init__.py | epages/__init__.py | # coding: utf-8
from epages.client import *
from epages.product_service import *
from epages.shop_service import *
from epages.shop import *
| # coding: utf-8
from epages.client import *
from epages.shop_service import *
from epages.shop import *
| Remove product_service from epages package | Remove product_service from epages package
| Python | mit | ooz/epages-rest-python,ooz/epages-rest-python | # coding: utf-8
from epages.client import *
from epages.product_service import *
from epages.shop_service import *
from epages.shop import *
Remove product_service from epages package | # coding: utf-8
from epages.client import *
from epages.shop_service import *
from epages.shop import *
| <commit_before># coding: utf-8
from epages.client import *
from epages.product_service import *
from epages.shop_service import *
from epages.shop import *
<commit_msg>Remove product_service from epages package<commit_after> | # coding: utf-8
from epages.client import *
from epages.shop_service import *
from epages.shop import *
| # coding: utf-8
from epages.client import *
from epages.product_service import *
from epages.shop_service import *
from epages.shop import *
Remove product_service from epages package# coding: utf-8
from epages.client import *
from epages.shop_service import *
from epages.shop import *
| <commit_before># coding: utf-8
from epages.client import *
from epages.product_service import *
from epages.shop_service import *
from epages.shop import *
<commit_msg>Remove product_service from epages package<commit_after># coding: utf-8
from epages.client import *
from epages.shop_service import *
from epages.shop import *
|
a52bf3cbd84a6e1c9b0a685d3267934ec0ec0036 | misc/decode-mirax.py | misc/decode-mirax.py | #!/usr/bin/python
import struct, sys
f = open(sys.argv[1])
HEADER_OFFSET = 37
f.seek(HEADER_OFFSET)
try:
while True:
n = struct.unpack("<i", f.read(4))[0]
possible_lineno = (n - HEADER_OFFSET) / 4.0
if possible_lineno < 0 or int(possible_lineno) != possible_lineno:
print "%11d" % (n)
else:
print "%11d %10d" % (n, possible_lineno)
except:
pass
| #!/usr/bin/python
import struct, sys, os
f = open(sys.argv[1])
HEADER_OFFSET = 37
f.seek(HEADER_OFFSET)
filesize = os.stat(sys.argv[1]).st_size
num_items = (filesize - HEADER_OFFSET) / 4
skipped = False
i = 0
try:
while True:
n = struct.unpack("<i", f.read(4))[0]
possible_lineno = (n - HEADER_OFFSET) / 4.0
if possible_lineno < 0 or possible_lineno > num_items \
or int(possible_lineno) != possible_lineno:
s = "%7d %11d" % (i, n)
else:
s = "%7d %11d %10d -> %10s" % (i, n, possible_lineno, \
"%+d" % (possible_lineno-i))
i = i+1
if n == 0:
skipped = True
continue
if skipped:
skipped = False
print '%7s %11s %10s' % ('.','.','.')
print s
except:
pass
| Update decode mirax a bit | Update decode mirax a bit
| Python | lgpl-2.1 | openslide/openslide,openslide/openslide,openslide/openslide,openslide/openslide | #!/usr/bin/python
import struct, sys
f = open(sys.argv[1])
HEADER_OFFSET = 37
f.seek(HEADER_OFFSET)
try:
while True:
n = struct.unpack("<i", f.read(4))[0]
possible_lineno = (n - HEADER_OFFSET) / 4.0
if possible_lineno < 0 or int(possible_lineno) != possible_lineno:
print "%11d" % (n)
else:
print "%11d %10d" % (n, possible_lineno)
except:
pass
Update decode mirax a bit | #!/usr/bin/python
import struct, sys, os
f = open(sys.argv[1])
HEADER_OFFSET = 37
f.seek(HEADER_OFFSET)
filesize = os.stat(sys.argv[1]).st_size
num_items = (filesize - HEADER_OFFSET) / 4
skipped = False
i = 0
try:
while True:
n = struct.unpack("<i", f.read(4))[0]
possible_lineno = (n - HEADER_OFFSET) / 4.0
if possible_lineno < 0 or possible_lineno > num_items \
or int(possible_lineno) != possible_lineno:
s = "%7d %11d" % (i, n)
else:
s = "%7d %11d %10d -> %10s" % (i, n, possible_lineno, \
"%+d" % (possible_lineno-i))
i = i+1
if n == 0:
skipped = True
continue
if skipped:
skipped = False
print '%7s %11s %10s' % ('.','.','.')
print s
except:
pass
| <commit_before>#!/usr/bin/python
import struct, sys
f = open(sys.argv[1])
HEADER_OFFSET = 37
f.seek(HEADER_OFFSET)
try:
while True:
n = struct.unpack("<i", f.read(4))[0]
possible_lineno = (n - HEADER_OFFSET) / 4.0
if possible_lineno < 0 or int(possible_lineno) != possible_lineno:
print "%11d" % (n)
else:
print "%11d %10d" % (n, possible_lineno)
except:
pass
<commit_msg>Update decode mirax a bit<commit_after> | #!/usr/bin/python
import struct, sys, os
f = open(sys.argv[1])
HEADER_OFFSET = 37
f.seek(HEADER_OFFSET)
filesize = os.stat(sys.argv[1]).st_size
num_items = (filesize - HEADER_OFFSET) / 4
skipped = False
i = 0
try:
while True:
n = struct.unpack("<i", f.read(4))[0]
possible_lineno = (n - HEADER_OFFSET) / 4.0
if possible_lineno < 0 or possible_lineno > num_items \
or int(possible_lineno) != possible_lineno:
s = "%7d %11d" % (i, n)
else:
s = "%7d %11d %10d -> %10s" % (i, n, possible_lineno, \
"%+d" % (possible_lineno-i))
i = i+1
if n == 0:
skipped = True
continue
if skipped:
skipped = False
print '%7s %11s %10s' % ('.','.','.')
print s
except:
pass
| #!/usr/bin/python
import struct, sys
f = open(sys.argv[1])
HEADER_OFFSET = 37
f.seek(HEADER_OFFSET)
try:
while True:
n = struct.unpack("<i", f.read(4))[0]
possible_lineno = (n - HEADER_OFFSET) / 4.0
if possible_lineno < 0 or int(possible_lineno) != possible_lineno:
print "%11d" % (n)
else:
print "%11d %10d" % (n, possible_lineno)
except:
pass
Update decode mirax a bit#!/usr/bin/python
import struct, sys, os
f = open(sys.argv[1])
HEADER_OFFSET = 37
f.seek(HEADER_OFFSET)
filesize = os.stat(sys.argv[1]).st_size
num_items = (filesize - HEADER_OFFSET) / 4
skipped = False
i = 0
try:
while True:
n = struct.unpack("<i", f.read(4))[0]
possible_lineno = (n - HEADER_OFFSET) / 4.0
if possible_lineno < 0 or possible_lineno > num_items \
or int(possible_lineno) != possible_lineno:
s = "%7d %11d" % (i, n)
else:
s = "%7d %11d %10d -> %10s" % (i, n, possible_lineno, \
"%+d" % (possible_lineno-i))
i = i+1
if n == 0:
skipped = True
continue
if skipped:
skipped = False
print '%7s %11s %10s' % ('.','.','.')
print s
except:
pass
| <commit_before>#!/usr/bin/python
import struct, sys
f = open(sys.argv[1])
HEADER_OFFSET = 37
f.seek(HEADER_OFFSET)
try:
while True:
n = struct.unpack("<i", f.read(4))[0]
possible_lineno = (n - HEADER_OFFSET) / 4.0
if possible_lineno < 0 or int(possible_lineno) != possible_lineno:
print "%11d" % (n)
else:
print "%11d %10d" % (n, possible_lineno)
except:
pass
<commit_msg>Update decode mirax a bit<commit_after>#!/usr/bin/python
import struct, sys, os
f = open(sys.argv[1])
HEADER_OFFSET = 37
f.seek(HEADER_OFFSET)
filesize = os.stat(sys.argv[1]).st_size
num_items = (filesize - HEADER_OFFSET) / 4
skipped = False
i = 0
try:
while True:
n = struct.unpack("<i", f.read(4))[0]
possible_lineno = (n - HEADER_OFFSET) / 4.0
if possible_lineno < 0 or possible_lineno > num_items \
or int(possible_lineno) != possible_lineno:
s = "%7d %11d" % (i, n)
else:
s = "%7d %11d %10d -> %10s" % (i, n, possible_lineno, \
"%+d" % (possible_lineno-i))
i = i+1
if n == 0:
skipped = True
continue
if skipped:
skipped = False
print '%7s %11s %10s' % ('.','.','.')
print s
except:
pass
|
8dbf9d88be33537752f105cd8f8b60ec40de684a | docs/src/examples/over_play.py | docs/src/examples/over_play.py | import numpy as np
from scikits.audiolab import play
# output one second of stereo gaussian white noise at 48000 hz
play(0.05 * np.random.randn(2, 48000), rate=48000)
| import numpy as np
from scikits.audiolab import play
# output one second of stereo gaussian white noise at 48000 hz
play(0.05 * np.random.randn(2, 48000))
| Fix play example (thanks to Samuele CarCagno). | Fix play example (thanks to Samuele CarCagno).
| Python | lgpl-2.1 | cournape/audiolab,cournape/audiolab,cournape/audiolab | import numpy as np
from scikits.audiolab import play
# output one second of stereo gaussian white noise at 48000 hz
play(0.05 * np.random.randn(2, 48000), rate=48000)
Fix play example (thanks to Samuele CarCagno). | import numpy as np
from scikits.audiolab import play
# output one second of stereo gaussian white noise at 48000 hz
play(0.05 * np.random.randn(2, 48000))
| <commit_before>import numpy as np
from scikits.audiolab import play
# output one second of stereo gaussian white noise at 48000 hz
play(0.05 * np.random.randn(2, 48000), rate=48000)
<commit_msg>Fix play example (thanks to Samuele CarCagno).<commit_after> | import numpy as np
from scikits.audiolab import play
# output one second of stereo gaussian white noise at 48000 hz
play(0.05 * np.random.randn(2, 48000))
| import numpy as np
from scikits.audiolab import play
# output one second of stereo gaussian white noise at 48000 hz
play(0.05 * np.random.randn(2, 48000), rate=48000)
Fix play example (thanks to Samuele CarCagno).import numpy as np
from scikits.audiolab import play
# output one second of stereo gaussian white noise at 48000 hz
play(0.05 * np.random.randn(2, 48000))
| <commit_before>import numpy as np
from scikits.audiolab import play
# output one second of stereo gaussian white noise at 48000 hz
play(0.05 * np.random.randn(2, 48000), rate=48000)
<commit_msg>Fix play example (thanks to Samuele CarCagno).<commit_after>import numpy as np
from scikits.audiolab import play
# output one second of stereo gaussian white noise at 48000 hz
play(0.05 * np.random.randn(2, 48000))
|
e64d13486fe20c44dde0dea6a6fed5a95eddbbd1 | awx/main/notifications/email_backend.py | awx/main/notifications/email_backend.py | # Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {} on Ansible Tower, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
| # Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
| Remove Tower reference from email backend | Remove Tower reference from email backend
| Python | apache-2.0 | snahelou/awx,wwitzel3/awx,snahelou/awx,wwitzel3/awx,wwitzel3/awx,wwitzel3/awx,snahelou/awx,snahelou/awx | # Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {} on Ansible Tower, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
Remove Tower reference from email backend | # Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
| <commit_before># Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {} on Ansible Tower, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
<commit_msg>Remove Tower reference from email backend<commit_after> | # Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
| # Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {} on Ansible Tower, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
Remove Tower reference from email backend# Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
| <commit_before># Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {} on Ansible Tower, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
<commit_msg>Remove Tower reference from email backend<commit_after># Copyright (c) 2016 Ansible, Inc.
# All Rights Reserved.
import json
from django.utils.encoding import smart_text
from django.core.mail.backends.smtp import EmailBackend
from django.utils.translation import ugettext_lazy as _
class CustomEmailBackend(EmailBackend):
init_parameters = {"host": {"label": "Host", "type": "string"},
"port": {"label": "Port", "type": "int"},
"username": {"label": "Username", "type": "string"},
"password": {"label": "Password", "type": "password"},
"use_tls": {"label": "Use TLS", "type": "bool"},
"use_ssl": {"label": "Use SSL", "type": "bool"},
"sender": {"label": "Sender Email", "type": "string"},
"recipients": {"label": "Recipient List", "type": "list"}}
recipient_parameter = "recipients"
sender_parameter = "sender"
def format_body(self, body):
if "body" in body:
body_actual = body['body']
else:
body_actual = smart_text(_("{} #{} had status {}, view details at {}\n\n").format(
body['friendly_name'], body['id'], body['status'], body['url'])
)
body_actual += json.dumps(body, indent=4)
return body_actual
|
fc25a6c4796ad008570974a682037bc575f15018 | astroquery/lamda/tests/test_lamda.py | astroquery/lamda/tests/test_lamda.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from ... import lamda
def test_query():
Q = lamda.core.LAMDAQuery()
Q.lamda_query(mol='co', query_type='erg_levels')
Q.lamda_query(mol='co', query_type='rad_trans')
Q.lamda_query(mol='co', query_type='coll_rates')
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
from ... import lamda
def test_query():
lamda.print_mols()
lamda.query(mol='co', query_type='erg_levels')
lamda.query(mol='co', query_type='rad_trans')
lamda.query(mol='co', query_type='coll_rates', coll_partner_index=1)
| Update tests for new style | Update tests for new style
Also added test for printing molecule list and made the collisional rate
test more complicated.
| Python | bsd-3-clause | imbasimba/astroquery,imbasimba/astroquery,ceb8/astroquery,ceb8/astroquery | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from ... import lamda
def test_query():
Q = lamda.core.LAMDAQuery()
Q.lamda_query(mol='co', query_type='erg_levels')
Q.lamda_query(mol='co', query_type='rad_trans')
Q.lamda_query(mol='co', query_type='coll_rates')
Update tests for new style
Also added test for printing molecule list and made the collisional rate
test more complicated. | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from ... import lamda
def test_query():
lamda.print_mols()
lamda.query(mol='co', query_type='erg_levels')
lamda.query(mol='co', query_type='rad_trans')
lamda.query(mol='co', query_type='coll_rates', coll_partner_index=1)
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
from ... import lamda
def test_query():
Q = lamda.core.LAMDAQuery()
Q.lamda_query(mol='co', query_type='erg_levels')
Q.lamda_query(mol='co', query_type='rad_trans')
Q.lamda_query(mol='co', query_type='coll_rates')
<commit_msg>Update tests for new style
Also added test for printing molecule list and made the collisional rate
test more complicated.<commit_after> | # Licensed under a 3-clause BSD style license - see LICENSE.rst
from ... import lamda
def test_query():
lamda.print_mols()
lamda.query(mol='co', query_type='erg_levels')
lamda.query(mol='co', query_type='rad_trans')
lamda.query(mol='co', query_type='coll_rates', coll_partner_index=1)
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
from ... import lamda
def test_query():
Q = lamda.core.LAMDAQuery()
Q.lamda_query(mol='co', query_type='erg_levels')
Q.lamda_query(mol='co', query_type='rad_trans')
Q.lamda_query(mol='co', query_type='coll_rates')
Update tests for new style
Also added test for printing molecule list and made the collisional rate
test more complicated.# Licensed under a 3-clause BSD style license - see LICENSE.rst
from ... import lamda
def test_query():
lamda.print_mols()
lamda.query(mol='co', query_type='erg_levels')
lamda.query(mol='co', query_type='rad_trans')
lamda.query(mol='co', query_type='coll_rates', coll_partner_index=1)
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
from ... import lamda
def test_query():
Q = lamda.core.LAMDAQuery()
Q.lamda_query(mol='co', query_type='erg_levels')
Q.lamda_query(mol='co', query_type='rad_trans')
Q.lamda_query(mol='co', query_type='coll_rates')
<commit_msg>Update tests for new style
Also added test for printing molecule list and made the collisional rate
test more complicated.<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
from ... import lamda
def test_query():
lamda.print_mols()
lamda.query(mol='co', query_type='erg_levels')
lamda.query(mol='co', query_type='rad_trans')
lamda.query(mol='co', query_type='coll_rates', coll_partner_index=1)
|
0cfd376b02da6ebc70ad66c913e3ee4750a6a04c | functional_tests.py | functional_tests.py | from selenium import webdriver
browser = webdriver.Firefox()
browser.get('http://localhost:8000')
assert 'Django' in browser.title
| from selenium import webdriver
import pytest
@pytest.fixture(scope='function')
def browser(request):
browser_ = webdriver.Firefox()
def fin():
browser_.quit()
request.addfinalizer(fin)
return browser_
def test_can_show_a_relevant_code_snippet(browser):
browser.get('http://localhost:8000')
assert 'Django' in browser.title
| Refactor FTs to setup/teardown with pytest.fixture | Refactor FTs to setup/teardown with pytest.fixture
| Python | mit | jvanbrug/scout,jvanbrug/scout | from selenium import webdriver
browser = webdriver.Firefox()
browser.get('http://localhost:8000')
assert 'Django' in browser.title
Refactor FTs to setup/teardown with pytest.fixture | from selenium import webdriver
import pytest
@pytest.fixture(scope='function')
def browser(request):
browser_ = webdriver.Firefox()
def fin():
browser_.quit()
request.addfinalizer(fin)
return browser_
def test_can_show_a_relevant_code_snippet(browser):
browser.get('http://localhost:8000')
assert 'Django' in browser.title
| <commit_before>from selenium import webdriver
browser = webdriver.Firefox()
browser.get('http://localhost:8000')
assert 'Django' in browser.title
<commit_msg>Refactor FTs to setup/teardown with pytest.fixture<commit_after> | from selenium import webdriver
import pytest
@pytest.fixture(scope='function')
def browser(request):
browser_ = webdriver.Firefox()
def fin():
browser_.quit()
request.addfinalizer(fin)
return browser_
def test_can_show_a_relevant_code_snippet(browser):
browser.get('http://localhost:8000')
assert 'Django' in browser.title
| from selenium import webdriver
browser = webdriver.Firefox()
browser.get('http://localhost:8000')
assert 'Django' in browser.title
Refactor FTs to setup/teardown with pytest.fixturefrom selenium import webdriver
import pytest
@pytest.fixture(scope='function')
def browser(request):
browser_ = webdriver.Firefox()
def fin():
browser_.quit()
request.addfinalizer(fin)
return browser_
def test_can_show_a_relevant_code_snippet(browser):
browser.get('http://localhost:8000')
assert 'Django' in browser.title
| <commit_before>from selenium import webdriver
browser = webdriver.Firefox()
browser.get('http://localhost:8000')
assert 'Django' in browser.title
<commit_msg>Refactor FTs to setup/teardown with pytest.fixture<commit_after>from selenium import webdriver
import pytest
@pytest.fixture(scope='function')
def browser(request):
browser_ = webdriver.Firefox()
def fin():
browser_.quit()
request.addfinalizer(fin)
return browser_
def test_can_show_a_relevant_code_snippet(browser):
browser.get('http://localhost:8000')
assert 'Django' in browser.title
|
09eb16e94052cbf45708b20e783a602342a2b85b | photutils/__init__.py | photutils/__init__.py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Photutils is an Astropy affiliated package to provide tools for
detecting and performing photometry of astronomical sources. It also
has tools for background estimation, ePSF building, PSF matching,
centroiding, and morphological measurements.
"""
import os
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import * # noqa
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_: # noqa
from .aperture import * # noqa
from .background import * # noqa
from .centroids import * # noqa
from .detection import * # noqa
from .morphology import * # noqa
from .psf import * # noqa
from .segmentation import * # noqa
# Set the bibtex entry to the article referenced in CITATION.
def _get_bibtex():
citation_file = os.path.join(os.path.dirname(__file__), 'CITATION')
with open(citation_file, 'r') as citation:
refs = citation.read().split('@misc')[1:]
if len(refs) == 0: return ''
bibtexreference = "@misc{0}".format(refs[0])
return bibtexreference
__citation__ = __bibtex__ = _get_bibtex()
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Photutils is an Astropy affiliated package to provide tools for
detecting and performing photometry of astronomical sources. It also
has tools for background estimation, ePSF building, PSF matching,
centroiding, and morphological measurements.
"""
import os
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import * # noqa
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_: # noqa
from .aperture import * # noqa
from .background import * # noqa
from .centroids import * # noqa
from .detection import * # noqa
from .morphology import * # noqa
from .psf import * # noqa
from .segmentation import * # noqa
__all__ = ['test'] # the test runner is defined in _astropy_init
# Set the bibtex entry to the article referenced in CITATION.
def _get_bibtex():
citation_file = os.path.join(os.path.dirname(__file__), 'CITATION')
with open(citation_file, 'r') as citation:
refs = citation.read().split('@misc')[1:]
if len(refs) == 0: return ''
bibtexreference = "@misc{0}".format(refs[0])
return bibtexreference
__citation__ = __bibtex__ = _get_bibtex()
| Add __all__ in package init for the test runner | Add __all__ in package init for the test runner
| Python | bsd-3-clause | larrybradley/photutils,astropy/photutils | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Photutils is an Astropy affiliated package to provide tools for
detecting and performing photometry of astronomical sources. It also
has tools for background estimation, ePSF building, PSF matching,
centroiding, and morphological measurements.
"""
import os
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import * # noqa
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_: # noqa
from .aperture import * # noqa
from .background import * # noqa
from .centroids import * # noqa
from .detection import * # noqa
from .morphology import * # noqa
from .psf import * # noqa
from .segmentation import * # noqa
# Set the bibtex entry to the article referenced in CITATION.
def _get_bibtex():
citation_file = os.path.join(os.path.dirname(__file__), 'CITATION')
with open(citation_file, 'r') as citation:
refs = citation.read().split('@misc')[1:]
if len(refs) == 0: return ''
bibtexreference = "@misc{0}".format(refs[0])
return bibtexreference
__citation__ = __bibtex__ = _get_bibtex()
Add __all__ in package init for the test runner | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Photutils is an Astropy affiliated package to provide tools for
detecting and performing photometry of astronomical sources. It also
has tools for background estimation, ePSF building, PSF matching,
centroiding, and morphological measurements.
"""
import os
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import * # noqa
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_: # noqa
from .aperture import * # noqa
from .background import * # noqa
from .centroids import * # noqa
from .detection import * # noqa
from .morphology import * # noqa
from .psf import * # noqa
from .segmentation import * # noqa
__all__ = ['test'] # the test runner is defined in _astropy_init
# Set the bibtex entry to the article referenced in CITATION.
def _get_bibtex():
citation_file = os.path.join(os.path.dirname(__file__), 'CITATION')
with open(citation_file, 'r') as citation:
refs = citation.read().split('@misc')[1:]
if len(refs) == 0: return ''
bibtexreference = "@misc{0}".format(refs[0])
return bibtexreference
__citation__ = __bibtex__ = _get_bibtex()
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Photutils is an Astropy affiliated package to provide tools for
detecting and performing photometry of astronomical sources. It also
has tools for background estimation, ePSF building, PSF matching,
centroiding, and morphological measurements.
"""
import os
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import * # noqa
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_: # noqa
from .aperture import * # noqa
from .background import * # noqa
from .centroids import * # noqa
from .detection import * # noqa
from .morphology import * # noqa
from .psf import * # noqa
from .segmentation import * # noqa
# Set the bibtex entry to the article referenced in CITATION.
def _get_bibtex():
citation_file = os.path.join(os.path.dirname(__file__), 'CITATION')
with open(citation_file, 'r') as citation:
refs = citation.read().split('@misc')[1:]
if len(refs) == 0: return ''
bibtexreference = "@misc{0}".format(refs[0])
return bibtexreference
__citation__ = __bibtex__ = _get_bibtex()
<commit_msg>Add __all__ in package init for the test runner<commit_after> | # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Photutils is an Astropy affiliated package to provide tools for
detecting and performing photometry of astronomical sources. It also
has tools for background estimation, ePSF building, PSF matching,
centroiding, and morphological measurements.
"""
import os
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import * # noqa
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_: # noqa
from .aperture import * # noqa
from .background import * # noqa
from .centroids import * # noqa
from .detection import * # noqa
from .morphology import * # noqa
from .psf import * # noqa
from .segmentation import * # noqa
__all__ = ['test'] # the test runner is defined in _astropy_init
# Set the bibtex entry to the article referenced in CITATION.
def _get_bibtex():
citation_file = os.path.join(os.path.dirname(__file__), 'CITATION')
with open(citation_file, 'r') as citation:
refs = citation.read().split('@misc')[1:]
if len(refs) == 0: return ''
bibtexreference = "@misc{0}".format(refs[0])
return bibtexreference
__citation__ = __bibtex__ = _get_bibtex()
| # Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Photutils is an Astropy affiliated package to provide tools for
detecting and performing photometry of astronomical sources. It also
has tools for background estimation, ePSF building, PSF matching,
centroiding, and morphological measurements.
"""
import os
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import * # noqa
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_: # noqa
from .aperture import * # noqa
from .background import * # noqa
from .centroids import * # noqa
from .detection import * # noqa
from .morphology import * # noqa
from .psf import * # noqa
from .segmentation import * # noqa
# Set the bibtex entry to the article referenced in CITATION.
def _get_bibtex():
citation_file = os.path.join(os.path.dirname(__file__), 'CITATION')
with open(citation_file, 'r') as citation:
refs = citation.read().split('@misc')[1:]
if len(refs) == 0: return ''
bibtexreference = "@misc{0}".format(refs[0])
return bibtexreference
__citation__ = __bibtex__ = _get_bibtex()
Add __all__ in package init for the test runner# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Photutils is an Astropy affiliated package to provide tools for
detecting and performing photometry of astronomical sources. It also
has tools for background estimation, ePSF building, PSF matching,
centroiding, and morphological measurements.
"""
import os
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import * # noqa
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_: # noqa
from .aperture import * # noqa
from .background import * # noqa
from .centroids import * # noqa
from .detection import * # noqa
from .morphology import * # noqa
from .psf import * # noqa
from .segmentation import * # noqa
__all__ = ['test'] # the test runner is defined in _astropy_init
# Set the bibtex entry to the article referenced in CITATION.
def _get_bibtex():
citation_file = os.path.join(os.path.dirname(__file__), 'CITATION')
with open(citation_file, 'r') as citation:
refs = citation.read().split('@misc')[1:]
if len(refs) == 0: return ''
bibtexreference = "@misc{0}".format(refs[0])
return bibtexreference
__citation__ = __bibtex__ = _get_bibtex()
| <commit_before># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Photutils is an Astropy affiliated package to provide tools for
detecting and performing photometry of astronomical sources. It also
has tools for background estimation, ePSF building, PSF matching,
centroiding, and morphological measurements.
"""
import os
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import * # noqa
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_: # noqa
from .aperture import * # noqa
from .background import * # noqa
from .centroids import * # noqa
from .detection import * # noqa
from .morphology import * # noqa
from .psf import * # noqa
from .segmentation import * # noqa
# Set the bibtex entry to the article referenced in CITATION.
def _get_bibtex():
citation_file = os.path.join(os.path.dirname(__file__), 'CITATION')
with open(citation_file, 'r') as citation:
refs = citation.read().split('@misc')[1:]
if len(refs) == 0: return ''
bibtexreference = "@misc{0}".format(refs[0])
return bibtexreference
__citation__ = __bibtex__ = _get_bibtex()
<commit_msg>Add __all__ in package init for the test runner<commit_after># Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Photutils is an Astropy affiliated package to provide tools for
detecting and performing photometry of astronomical sources. It also
has tools for background estimation, ePSF building, PSF matching,
centroiding, and morphological measurements.
"""
import os
# Affiliated packages may add whatever they like to this file, but
# should keep this content at the top.
# ----------------------------------------------------------------------------
from ._astropy_init import * # noqa
# ----------------------------------------------------------------------------
if not _ASTROPY_SETUP_: # noqa
from .aperture import * # noqa
from .background import * # noqa
from .centroids import * # noqa
from .detection import * # noqa
from .morphology import * # noqa
from .psf import * # noqa
from .segmentation import * # noqa
__all__ = ['test'] # the test runner is defined in _astropy_init
# Set the bibtex entry to the article referenced in CITATION.
def _get_bibtex():
citation_file = os.path.join(os.path.dirname(__file__), 'CITATION')
with open(citation_file, 'r') as citation:
refs = citation.read().split('@misc')[1:]
if len(refs) == 0: return ''
bibtexreference = "@misc{0}".format(refs[0])
return bibtexreference
__citation__ = __bibtex__ = _get_bibtex()
|
8fa1cae882c0ff020c0b9c3c2fac9e4248d46ce4 | deploy/common/sqlite_wrapper.py | deploy/common/sqlite_wrapper.py | import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=OFF")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
| import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA page_size=4096")
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=NORMAL")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
| Use PRAGMA synchronous=NORMAL instead of OFF, and set page_size to 4096. | Use PRAGMA synchronous=NORMAL instead of OFF, and set page_size to 4096.
| Python | mit | mikispag/bitiodine | import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=OFF")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
Use PRAGMA synchronous=NORMAL instead of OFF, and set page_size to 4096. | import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA page_size=4096")
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=NORMAL")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
| <commit_before>import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=OFF")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
<commit_msg>Use PRAGMA synchronous=NORMAL instead of OFF, and set page_size to 4096.<commit_after> | import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA page_size=4096")
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=NORMAL")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
| import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=OFF")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
Use PRAGMA synchronous=NORMAL instead of OFF, and set page_size to 4096.import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA page_size=4096")
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=NORMAL")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
| <commit_before>import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=OFF")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
<commit_msg>Use PRAGMA synchronous=NORMAL instead of OFF, and set page_size to 4096.<commit_after>import sqlite3
class SQLiteWrapper:
def __init__(self, db):
self.conn = sqlite3.connect(db)
self.cursor = self.conn.cursor()
self.cursor.execute("PRAGMA page_size=4096")
self.cursor.execute("PRAGMA cache_size=-16000")
self.cursor.execute("PRAGMA synchronous=NORMAL")
self.conn.commit()
def query(self, sql, params=None, iterator=False, fetch_one=False, multi=False, many_rows=None):
try:
with self.conn as conn:
cursor = conn.cursor()
if many_rows:
cursor.executemany(sql, many_rows)
return
if multi:
cursor.executescript(sql)
if params is None and not multi:
cursor.execute(sql)
if params and not multi:
cursor.execute(sql, params)
if iterator:
return cursor
if fetch_one:
return cursor.fetchone()[0]
if not multi:
return cursor.fetchall()
except Exception as e:
raise Exception('Error in executing query ' + sql + ': ' + format(e))
def close(self):
self.conn.close()
|
b3f60cfd4c1a4241c44fcd6738cbc59b9780af70 | Lib/test/outstanding_bugs.py | Lib/test/outstanding_bugs.py | #
# This file is for everybody to add tests for bugs that aren't
# fixed yet. Please add a test case and appropriate bug description.
#
# When you fix one of the bugs, please move the test to the correct
# test_ module.
#
import unittest
from test import test_support
class TestBug1385040(unittest.TestCase):
def testSyntaxError(self):
import compiler
# The following snippet gives a SyntaxError in the interpreter
#
# If you compile and exec it, the call foo(7) returns (7, 1)
self.assertRaises(SyntaxError, compiler.compile,
"def foo(a=1, b): return a, b\n\n", "<string>", "exec")
def test_main():
test_support.run_unittest(TestBug1385040)
if __name__ == "__main__":
test_main()
| #
# This file is for everybody to add tests for bugs that aren't
# fixed yet. Please add a test case and appropriate bug description.
#
# When you fix one of the bugs, please move the test to the correct
# test_ module.
#
import unittest
from test import test_support
#
# No test cases for outstanding bugs at the moment.
#
def test_main():
#test_support.run_unittest()
pass
if __name__ == "__main__":
test_main()
| Update outstanding bugs test file. | Update outstanding bugs test file.
| Python | mit | sk-/python2.7-type-annotator,sk-/python2.7-type-annotator,sk-/python2.7-type-annotator | #
# This file is for everybody to add tests for bugs that aren't
# fixed yet. Please add a test case and appropriate bug description.
#
# When you fix one of the bugs, please move the test to the correct
# test_ module.
#
import unittest
from test import test_support
class TestBug1385040(unittest.TestCase):
def testSyntaxError(self):
import compiler
# The following snippet gives a SyntaxError in the interpreter
#
# If you compile and exec it, the call foo(7) returns (7, 1)
self.assertRaises(SyntaxError, compiler.compile,
"def foo(a=1, b): return a, b\n\n", "<string>", "exec")
def test_main():
test_support.run_unittest(TestBug1385040)
if __name__ == "__main__":
test_main()
Update outstanding bugs test file. | #
# This file is for everybody to add tests for bugs that aren't
# fixed yet. Please add a test case and appropriate bug description.
#
# When you fix one of the bugs, please move the test to the correct
# test_ module.
#
import unittest
from test import test_support
#
# No test cases for outstanding bugs at the moment.
#
def test_main():
#test_support.run_unittest()
pass
if __name__ == "__main__":
test_main()
| <commit_before>#
# This file is for everybody to add tests for bugs that aren't
# fixed yet. Please add a test case and appropriate bug description.
#
# When you fix one of the bugs, please move the test to the correct
# test_ module.
#
import unittest
from test import test_support
class TestBug1385040(unittest.TestCase):
def testSyntaxError(self):
import compiler
# The following snippet gives a SyntaxError in the interpreter
#
# If you compile and exec it, the call foo(7) returns (7, 1)
self.assertRaises(SyntaxError, compiler.compile,
"def foo(a=1, b): return a, b\n\n", "<string>", "exec")
def test_main():
test_support.run_unittest(TestBug1385040)
if __name__ == "__main__":
test_main()
<commit_msg>Update outstanding bugs test file.<commit_after> | #
# This file is for everybody to add tests for bugs that aren't
# fixed yet. Please add a test case and appropriate bug description.
#
# When you fix one of the bugs, please move the test to the correct
# test_ module.
#
import unittest
from test import test_support
#
# No test cases for outstanding bugs at the moment.
#
def test_main():
#test_support.run_unittest()
pass
if __name__ == "__main__":
test_main()
| #
# This file is for everybody to add tests for bugs that aren't
# fixed yet. Please add a test case and appropriate bug description.
#
# When you fix one of the bugs, please move the test to the correct
# test_ module.
#
import unittest
from test import test_support
class TestBug1385040(unittest.TestCase):
def testSyntaxError(self):
import compiler
# The following snippet gives a SyntaxError in the interpreter
#
# If you compile and exec it, the call foo(7) returns (7, 1)
self.assertRaises(SyntaxError, compiler.compile,
"def foo(a=1, b): return a, b\n\n", "<string>", "exec")
def test_main():
test_support.run_unittest(TestBug1385040)
if __name__ == "__main__":
test_main()
Update outstanding bugs test file.#
# This file is for everybody to add tests for bugs that aren't
# fixed yet. Please add a test case and appropriate bug description.
#
# When you fix one of the bugs, please move the test to the correct
# test_ module.
#
import unittest
from test import test_support
#
# No test cases for outstanding bugs at the moment.
#
def test_main():
#test_support.run_unittest()
pass
if __name__ == "__main__":
test_main()
| <commit_before>#
# This file is for everybody to add tests for bugs that aren't
# fixed yet. Please add a test case and appropriate bug description.
#
# When you fix one of the bugs, please move the test to the correct
# test_ module.
#
import unittest
from test import test_support
class TestBug1385040(unittest.TestCase):
def testSyntaxError(self):
import compiler
# The following snippet gives a SyntaxError in the interpreter
#
# If you compile and exec it, the call foo(7) returns (7, 1)
self.assertRaises(SyntaxError, compiler.compile,
"def foo(a=1, b): return a, b\n\n", "<string>", "exec")
def test_main():
test_support.run_unittest(TestBug1385040)
if __name__ == "__main__":
test_main()
<commit_msg>Update outstanding bugs test file.<commit_after>#
# This file is for everybody to add tests for bugs that aren't
# fixed yet. Please add a test case and appropriate bug description.
#
# When you fix one of the bugs, please move the test to the correct
# test_ module.
#
import unittest
from test import test_support
#
# No test cases for outstanding bugs at the moment.
#
def test_main():
#test_support.run_unittest()
pass
if __name__ == "__main__":
test_main()
|
e2fa4b150546be4b4f0ae59f18ef6ba2b6180d1a | accounts/serializers.py | accounts/serializers.py | """Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
model = User
fields = (
'id',
'username',
'email',
'website',
'avatar',
'steamid',
'is_staff',
)
| """Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
model = User
fields = (
"id",
"username",
"email",
"website",
"avatar_url",
"steamid",
"is_staff",
)
| Change avatar to avatar_url in the user API | Change avatar to avatar_url in the user API
| Python | agpl-3.0 | lutris/website,lutris/website,lutris/website,lutris/website | """Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
model = User
fields = (
'id',
'username',
'email',
'website',
'avatar',
'steamid',
'is_staff',
)
Change avatar to avatar_url in the user API | """Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
model = User
fields = (
"id",
"username",
"email",
"website",
"avatar_url",
"steamid",
"is_staff",
)
| <commit_before>"""Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
model = User
fields = (
'id',
'username',
'email',
'website',
'avatar',
'steamid',
'is_staff',
)
<commit_msg>Change avatar to avatar_url in the user API<commit_after> | """Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
model = User
fields = (
"id",
"username",
"email",
"website",
"avatar_url",
"steamid",
"is_staff",
)
| """Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
model = User
fields = (
'id',
'username',
'email',
'website',
'avatar',
'steamid',
'is_staff',
)
Change avatar to avatar_url in the user API"""Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
model = User
fields = (
"id",
"username",
"email",
"website",
"avatar_url",
"steamid",
"is_staff",
)
| <commit_before>"""Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
model = User
fields = (
'id',
'username',
'email',
'website',
'avatar',
'steamid',
'is_staff',
)
<commit_msg>Change avatar to avatar_url in the user API<commit_after>"""Serializers for account models"""
# pylint: disable=too-few-public-methods
from rest_framework import serializers
from accounts.models import User
class UserSerializer(serializers.ModelSerializer):
"""Serializer for Users"""
class Meta:
"""Model and field definitions"""
model = User
fields = (
"id",
"username",
"email",
"website",
"avatar_url",
"steamid",
"is_staff",
)
|
ce98707e94a772434eaca56791c1b4980da36bf0 | node/trust/global.py | node/trust/global.py | import obelisk
import sys
from twisted.internet import reactor
TESTNET = True
def build_output_info_list(unspent_rows):
unspent_infos = []
for row in unspent_rows:
assert len(row) == 4
outpoint = obelisk.OutPoint()
outpoint.hash = row[0]
outpoint.index = row[1]
value = row[3]
unspent_infos.append(
obelisk.OutputInfo(outpoint, value))
return unspent_infos
def address_from_guid(guid):
if TESTNET:
addr = "mopenbazaar%s" % guid[:17]
else:
addr = "1openbazaar%s" % guid[:17]
addr = obelisk.bitcoin.EncodeBase58Check(addr)
return addr
def get(guid, callback):
def history_fetched(ec, history):
if ec is not None:
print >> sys.stderr, "Error fetching history:", ec
return
unspent_rows = [row[:4] for row in history if row[4] is None]
unspent = build_output_info_list(unspent_rows)
print obelisk.select_outputs(unspent, 10000)
client = obelisk.ObeliskOfLightClient("tcp://85.25.198.97:9091")
client.fetch_history(address_from_guid(guid), history_fetched)
reactor.run()
| Add algorithm for provably unspendable address generation | Add algorithm for provably unspendable address generation
| Python | mit | saltduck/OpenBazaar,dionyziz/OpenBazaar,Renelvon/OpenBazaar,STRML/OpenBazaar,dionyziz/OpenBazaar,NolanZhao/OpenBazaar,mirrax/OpenBazaar,hoffmabc/OpenBazaar,atsuyim/OpenBazaar,bglassy/OpenBazaar,must-/OpenBazaar,NolanZhao/OpenBazaar,akhavr/OpenBazaar,rllola/OpenBazaar,im0rtel/OpenBazaar,hoffmabc/OpenBazaar,dlcorporation/openbazaar,Renelvon/OpenBazaar,bglassy/OpenBazaar,dlcorporation/openbazaar,must-/OpenBazaar,matiasbastos/OpenBazaar,rllola/OpenBazaar,must-/OpenBazaar,STRML/OpenBazaar,blakejakopovic/OpenBazaar,tortxof/OpenBazaar,must-/OpenBazaar,dlcorporation/openbazaar,Renelvon/OpenBazaar,hoffmabc/OpenBazaar,im0rtel/OpenBazaar,dionyziz/OpenBazaar,freebazaar/FreeBazaar,rllola/OpenBazaar,saltduck/OpenBazaar,freebazaar/FreeBazaar,habibmasuro/OpenBazaar,bankonme/OpenBazaar,matiasbastos/OpenBazaar,bankonme/OpenBazaar,dlcorporation/openbazaar,dlcorporation/openbazaar,rllola/OpenBazaar,bglassy/OpenBazaar,kordless/OpenBazaar,habibmasuro/OpenBazaar,mirrax/OpenBazaar,STRML/OpenBazaar,STRML/OpenBazaar,NolanZhao/OpenBazaar,bankonme/OpenBazaar,yagoulas/OpenBazaar,atsuyim/OpenBazaar,dlcorporation/openbazaar,blakejakopovic/OpenBazaar,kordless/OpenBazaar,bankonme/OpenBazaar,freebazaar/FreeBazaar,tortxof/OpenBazaar,freebazaar/FreeBazaar,Renelvon/OpenBazaar,akhavr/OpenBazaar,yagoulas/OpenBazaar,atsuyim/OpenBazaar,mirrax/OpenBazaar,blakejakopovic/OpenBazaar,yagoulas/OpenBazaar,habibmasuro/OpenBazaar,kordless/OpenBazaar,dionyziz/OpenBazaar,hoffmabc/OpenBazaar,blakejakopovic/OpenBazaar,akhavr/OpenBazaar,im0rtel/OpenBazaar,tortxof/OpenBazaar,tortxof/OpenBazaar,hoffmabc/OpenBazaar,akhavr/OpenBazaar,freebazaar/FreeBazaar,hoffmabc/OpenBazaar,saltduck/OpenBazaar,habibmasuro/OpenBazaar,kordless/OpenBazaar,atsuyim/OpenBazaar,matiasbastos/OpenBazaar,akhavr/OpenBazaar,bglassy/OpenBazaar,dionyziz/OpenBazaar,mirrax/OpenBazaar,im0rtel/OpenBazaar,yagoulas/OpenBazaar,saltduck/OpenBazaar,matiasbastos/OpenBazaar,NolanZhao/OpenBazaar | Add algorithm for provably unspendable address generation | import obelisk
import sys
from twisted.internet import reactor
TESTNET = True
def build_output_info_list(unspent_rows):
unspent_infos = []
for row in unspent_rows:
assert len(row) == 4
outpoint = obelisk.OutPoint()
outpoint.hash = row[0]
outpoint.index = row[1]
value = row[3]
unspent_infos.append(
obelisk.OutputInfo(outpoint, value))
return unspent_infos
def address_from_guid(guid):
if TESTNET:
addr = "mopenbazaar%s" % guid[:17]
else:
addr = "1openbazaar%s" % guid[:17]
addr = obelisk.bitcoin.EncodeBase58Check(addr)
return addr
def get(guid, callback):
def history_fetched(ec, history):
if ec is not None:
print >> sys.stderr, "Error fetching history:", ec
return
unspent_rows = [row[:4] for row in history if row[4] is None]
unspent = build_output_info_list(unspent_rows)
print obelisk.select_outputs(unspent, 10000)
client = obelisk.ObeliskOfLightClient("tcp://85.25.198.97:9091")
client.fetch_history(address_from_guid(guid), history_fetched)
reactor.run()
| <commit_before><commit_msg>Add algorithm for provably unspendable address generation<commit_after> | import obelisk
import sys
from twisted.internet import reactor
TESTNET = True
def build_output_info_list(unspent_rows):
unspent_infos = []
for row in unspent_rows:
assert len(row) == 4
outpoint = obelisk.OutPoint()
outpoint.hash = row[0]
outpoint.index = row[1]
value = row[3]
unspent_infos.append(
obelisk.OutputInfo(outpoint, value))
return unspent_infos
def address_from_guid(guid):
if TESTNET:
addr = "mopenbazaar%s" % guid[:17]
else:
addr = "1openbazaar%s" % guid[:17]
addr = obelisk.bitcoin.EncodeBase58Check(addr)
return addr
def get(guid, callback):
def history_fetched(ec, history):
if ec is not None:
print >> sys.stderr, "Error fetching history:", ec
return
unspent_rows = [row[:4] for row in history if row[4] is None]
unspent = build_output_info_list(unspent_rows)
print obelisk.select_outputs(unspent, 10000)
client = obelisk.ObeliskOfLightClient("tcp://85.25.198.97:9091")
client.fetch_history(address_from_guid(guid), history_fetched)
reactor.run()
| Add algorithm for provably unspendable address generationimport obelisk
import sys
from twisted.internet import reactor
TESTNET = True
def build_output_info_list(unspent_rows):
unspent_infos = []
for row in unspent_rows:
assert len(row) == 4
outpoint = obelisk.OutPoint()
outpoint.hash = row[0]
outpoint.index = row[1]
value = row[3]
unspent_infos.append(
obelisk.OutputInfo(outpoint, value))
return unspent_infos
def address_from_guid(guid):
if TESTNET:
addr = "mopenbazaar%s" % guid[:17]
else:
addr = "1openbazaar%s" % guid[:17]
addr = obelisk.bitcoin.EncodeBase58Check(addr)
return addr
def get(guid, callback):
def history_fetched(ec, history):
if ec is not None:
print >> sys.stderr, "Error fetching history:", ec
return
unspent_rows = [row[:4] for row in history if row[4] is None]
unspent = build_output_info_list(unspent_rows)
print obelisk.select_outputs(unspent, 10000)
client = obelisk.ObeliskOfLightClient("tcp://85.25.198.97:9091")
client.fetch_history(address_from_guid(guid), history_fetched)
reactor.run()
| <commit_before><commit_msg>Add algorithm for provably unspendable address generation<commit_after>import obelisk
import sys
from twisted.internet import reactor
TESTNET = True
def build_output_info_list(unspent_rows):
unspent_infos = []
for row in unspent_rows:
assert len(row) == 4
outpoint = obelisk.OutPoint()
outpoint.hash = row[0]
outpoint.index = row[1]
value = row[3]
unspent_infos.append(
obelisk.OutputInfo(outpoint, value))
return unspent_infos
def address_from_guid(guid):
if TESTNET:
addr = "mopenbazaar%s" % guid[:17]
else:
addr = "1openbazaar%s" % guid[:17]
addr = obelisk.bitcoin.EncodeBase58Check(addr)
return addr
def get(guid, callback):
def history_fetched(ec, history):
if ec is not None:
print >> sys.stderr, "Error fetching history:", ec
return
unspent_rows = [row[:4] for row in history if row[4] is None]
unspent = build_output_info_list(unspent_rows)
print obelisk.select_outputs(unspent, 10000)
client = obelisk.ObeliskOfLightClient("tcp://85.25.198.97:9091")
client.fetch_history(address_from_guid(guid), history_fetched)
reactor.run()
| |
9699d573fa459cb7ee90237d7fa64f7014c96db4 | scripts/update_centroid_reports.py | scripts/update_centroid_reports.py | #!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import mica.centroid_dashboard
# Cheat. Needs entrypoint scripts
mica.centroid_dashboard.update_observed_metrics()
| #!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import mica.centroid_dashboard
# Cheat. Needs entrypoint scripts
mica.centroid_dashboard.update_observed_metrics(save=True, make_plots=True)
| Fix script default to actually save plots | Fix script default to actually save plots
| Python | bsd-3-clause | sot/mica,sot/mica | #!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import mica.centroid_dashboard
# Cheat. Needs entrypoint scripts
mica.centroid_dashboard.update_observed_metrics()
Fix script default to actually save plots | #!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import mica.centroid_dashboard
# Cheat. Needs entrypoint scripts
mica.centroid_dashboard.update_observed_metrics(save=True, make_plots=True)
| <commit_before>#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import mica.centroid_dashboard
# Cheat. Needs entrypoint scripts
mica.centroid_dashboard.update_observed_metrics()
<commit_msg>Fix script default to actually save plots<commit_after> | #!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import mica.centroid_dashboard
# Cheat. Needs entrypoint scripts
mica.centroid_dashboard.update_observed_metrics(save=True, make_plots=True)
| #!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import mica.centroid_dashboard
# Cheat. Needs entrypoint scripts
mica.centroid_dashboard.update_observed_metrics()
Fix script default to actually save plots#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import mica.centroid_dashboard
# Cheat. Needs entrypoint scripts
mica.centroid_dashboard.update_observed_metrics(save=True, make_plots=True)
| <commit_before>#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import mica.centroid_dashboard
# Cheat. Needs entrypoint scripts
mica.centroid_dashboard.update_observed_metrics()
<commit_msg>Fix script default to actually save plots<commit_after>#!/usr/bin/env python
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import mica.centroid_dashboard
# Cheat. Needs entrypoint scripts
mica.centroid_dashboard.update_observed_metrics(save=True, make_plots=True)
|
2828bdaaf15f1358211ecac376beb0072c0ef7bf | sentry/contrib/logbook/__init__.py | sentry/contrib/logbook/__init__.py | import logbook
import sys
class SentryLogbookHandler(logbook.Handler):
def emit(self, record):
from sentry import capture
# Avoid typical config issues by overriding loggers behavior
if record.name == 'sentry.errors':
print >> sys.stderr, "Recursive log message sent to SentryHandler"
print >> sys.stderr, record.message
return
kwargs = dict(
message=record.message,
level=record.level,
logger=record.channel,
data=record.extra,
)
if record.exc_info:
return capture('sentry.events.Exception', exc_inf=record.exc_info, **kwargs)
return capture('sentry.events.Exception', **kwargs)
| import logbook
from sentry import capture
class SentryLogbookHandler(logbook.Handler):
def emit(self, record):
# TODO: level should be a string
tags = (('level', record.level), ('logger', record.channel))
if record.exc_info:
return capture('Exception', exc_info=record.exc_info, tags=tags)
return capture('Message', message=record.mesage, tags=tags)
| Clean up logbook handler to use correct params for capture() | Clean up logbook handler to use correct params for capture()
| Python | bsd-3-clause | dcramer/sentry-old,dcramer/sentry-old,dcramer/sentry-old | import logbook
import sys
class SentryLogbookHandler(logbook.Handler):
def emit(self, record):
from sentry import capture
# Avoid typical config issues by overriding loggers behavior
if record.name == 'sentry.errors':
print >> sys.stderr, "Recursive log message sent to SentryHandler"
print >> sys.stderr, record.message
return
kwargs = dict(
message=record.message,
level=record.level,
logger=record.channel,
data=record.extra,
)
if record.exc_info:
return capture('sentry.events.Exception', exc_inf=record.exc_info, **kwargs)
return capture('sentry.events.Exception', **kwargs)
Clean up logbook handler to use correct params for capture() | import logbook
from sentry import capture
class SentryLogbookHandler(logbook.Handler):
def emit(self, record):
# TODO: level should be a string
tags = (('level', record.level), ('logger', record.channel))
if record.exc_info:
return capture('Exception', exc_info=record.exc_info, tags=tags)
return capture('Message', message=record.mesage, tags=tags)
| <commit_before>import logbook
import sys
class SentryLogbookHandler(logbook.Handler):
def emit(self, record):
from sentry import capture
# Avoid typical config issues by overriding loggers behavior
if record.name == 'sentry.errors':
print >> sys.stderr, "Recursive log message sent to SentryHandler"
print >> sys.stderr, record.message
return
kwargs = dict(
message=record.message,
level=record.level,
logger=record.channel,
data=record.extra,
)
if record.exc_info:
return capture('sentry.events.Exception', exc_inf=record.exc_info, **kwargs)
return capture('sentry.events.Exception', **kwargs)
<commit_msg>Clean up logbook handler to use correct params for capture()<commit_after> | import logbook
from sentry import capture
class SentryLogbookHandler(logbook.Handler):
def emit(self, record):
# TODO: level should be a string
tags = (('level', record.level), ('logger', record.channel))
if record.exc_info:
return capture('Exception', exc_info=record.exc_info, tags=tags)
return capture('Message', message=record.mesage, tags=tags)
| import logbook
import sys
class SentryLogbookHandler(logbook.Handler):
def emit(self, record):
from sentry import capture
# Avoid typical config issues by overriding loggers behavior
if record.name == 'sentry.errors':
print >> sys.stderr, "Recursive log message sent to SentryHandler"
print >> sys.stderr, record.message
return
kwargs = dict(
message=record.message,
level=record.level,
logger=record.channel,
data=record.extra,
)
if record.exc_info:
return capture('sentry.events.Exception', exc_inf=record.exc_info, **kwargs)
return capture('sentry.events.Exception', **kwargs)
Clean up logbook handler to use correct params for capture()import logbook
from sentry import capture
class SentryLogbookHandler(logbook.Handler):
def emit(self, record):
# TODO: level should be a string
tags = (('level', record.level), ('logger', record.channel))
if record.exc_info:
return capture('Exception', exc_info=record.exc_info, tags=tags)
return capture('Message', message=record.mesage, tags=tags)
| <commit_before>import logbook
import sys
class SentryLogbookHandler(logbook.Handler):
def emit(self, record):
from sentry import capture
# Avoid typical config issues by overriding loggers behavior
if record.name == 'sentry.errors':
print >> sys.stderr, "Recursive log message sent to SentryHandler"
print >> sys.stderr, record.message
return
kwargs = dict(
message=record.message,
level=record.level,
logger=record.channel,
data=record.extra,
)
if record.exc_info:
return capture('sentry.events.Exception', exc_inf=record.exc_info, **kwargs)
return capture('sentry.events.Exception', **kwargs)
<commit_msg>Clean up logbook handler to use correct params for capture()<commit_after>import logbook
from sentry import capture
class SentryLogbookHandler(logbook.Handler):
def emit(self, record):
# TODO: level should be a string
tags = (('level', record.level), ('logger', record.channel))
if record.exc_info:
return capture('Exception', exc_info=record.exc_info, tags=tags)
return capture('Message', message=record.mesage, tags=tags)
|
cc317eb3244edb0ef5424d45d333e255247c46cd | src/runApp.py | src/runApp.py | import sys
from PyQt5 import QtWidgets
from gui.mainWindow import MainWindow
app = QtWidgets.QApplication(sys.argv)
playerWindow = MainWindow()
playerWindow.show()
app.aboutToQuit.connect(playerWindow.exit)
sys.exit(app.exec_())
| import sys
from PyQt5 import QtWidgets
from gui.mainWindow import MainWindow
app = QtWidgets.QApplication(sys.argv)
playerWindow = MainWindow()
playerWindow.show()
app.aboutToQuit.connect(playerWindow.stop)
sys.exit(app.exec_())
| Revert "Do not write to the configuration file on each stop." | Revert "Do not write to the configuration file on each stop."
This reverts commit 486d2c3e6062f370c7be499ddef10d816fc3f85c.
| Python | mit | michael-stanin/Subtitles-Distributor | import sys
from PyQt5 import QtWidgets
from gui.mainWindow import MainWindow
app = QtWidgets.QApplication(sys.argv)
playerWindow = MainWindow()
playerWindow.show()
app.aboutToQuit.connect(playerWindow.exit)
sys.exit(app.exec_())
Revert "Do not write to the configuration file on each stop."
This reverts commit 486d2c3e6062f370c7be499ddef10d816fc3f85c. | import sys
from PyQt5 import QtWidgets
from gui.mainWindow import MainWindow
app = QtWidgets.QApplication(sys.argv)
playerWindow = MainWindow()
playerWindow.show()
app.aboutToQuit.connect(playerWindow.stop)
sys.exit(app.exec_())
| <commit_before>import sys
from PyQt5 import QtWidgets
from gui.mainWindow import MainWindow
app = QtWidgets.QApplication(sys.argv)
playerWindow = MainWindow()
playerWindow.show()
app.aboutToQuit.connect(playerWindow.exit)
sys.exit(app.exec_())
<commit_msg>Revert "Do not write to the configuration file on each stop."
This reverts commit 486d2c3e6062f370c7be499ddef10d816fc3f85c.<commit_after> | import sys
from PyQt5 import QtWidgets
from gui.mainWindow import MainWindow
app = QtWidgets.QApplication(sys.argv)
playerWindow = MainWindow()
playerWindow.show()
app.aboutToQuit.connect(playerWindow.stop)
sys.exit(app.exec_())
| import sys
from PyQt5 import QtWidgets
from gui.mainWindow import MainWindow
app = QtWidgets.QApplication(sys.argv)
playerWindow = MainWindow()
playerWindow.show()
app.aboutToQuit.connect(playerWindow.exit)
sys.exit(app.exec_())
Revert "Do not write to the configuration file on each stop."
This reverts commit 486d2c3e6062f370c7be499ddef10d816fc3f85c.import sys
from PyQt5 import QtWidgets
from gui.mainWindow import MainWindow
app = QtWidgets.QApplication(sys.argv)
playerWindow = MainWindow()
playerWindow.show()
app.aboutToQuit.connect(playerWindow.stop)
sys.exit(app.exec_())
| <commit_before>import sys
from PyQt5 import QtWidgets
from gui.mainWindow import MainWindow
app = QtWidgets.QApplication(sys.argv)
playerWindow = MainWindow()
playerWindow.show()
app.aboutToQuit.connect(playerWindow.exit)
sys.exit(app.exec_())
<commit_msg>Revert "Do not write to the configuration file on each stop."
This reverts commit 486d2c3e6062f370c7be499ddef10d816fc3f85c.<commit_after>import sys
from PyQt5 import QtWidgets
from gui.mainWindow import MainWindow
app = QtWidgets.QApplication(sys.argv)
playerWindow = MainWindow()
playerWindow.show()
app.aboutToQuit.connect(playerWindow.stop)
sys.exit(app.exec_())
|
733306f0953b9c80ead49529ba3e65b26a031426 | gaphor/diagram/classes/implementation.py | gaphor/diagram/classes/implementation.py | """
Implementation of interface.
"""
from gaphor import UML
from gaphor.diagram.diagramline import DiagramLine
class ImplementationItem(DiagramLine):
__uml__ = UML.Implementation
def __init__(self, id=None, model=None):
DiagramLine.__init__(self, id, model)
self._solid = False
def draw_head(self, context):
cr = context.cairo
cr.move_to(0, 0)
if not self._solid:
cr.set_dash((), 0)
cr.line_to(15, -10)
cr.line_to(15, 10)
cr.close_path()
cr.stroke()
cr.move_to(15, 0)
def draw(self, context):
if not self._solid:
context.cairo.set_dash((7.0, 5.0), 0)
super(ImplementationItem, self).draw(context)
# vim:sw=4
| """
Implementation of interface.
"""
from gaphor import UML
from gaphor.UML.modelfactory import stereotypes_str
from gaphor.diagram.presentation import LinePresentation
from gaphor.diagram.shapes import Box, Text
from gaphor.diagram.support import represents
@represents(UML.Implementation)
class ImplementationItem(LinePresentation):
def __init__(self, id=None, model=None):
super().__init__(id, model, style={"dash-style": (7.0, 5.0)})
self._solid = False
self.shape_middle = Box(
Text(
text=lambda: stereotypes_str(self.subject),
style={"min-width": 0, "min-height": 0},
)
)
self.watch("subject.appliedStereotype.classifier.name")
def draw_head(self, context):
cr = context.cairo
cr.move_to(0, 0)
if not self._solid:
cr.set_dash((), 0)
cr.line_to(15, -10)
cr.line_to(15, 10)
cr.close_path()
cr.stroke()
cr.move_to(15, 0)
| Use new line style for Implementation item | Use new line style for Implementation item
| Python | lgpl-2.1 | amolenaar/gaphor,amolenaar/gaphor | """
Implementation of interface.
"""
from gaphor import UML
from gaphor.diagram.diagramline import DiagramLine
class ImplementationItem(DiagramLine):
__uml__ = UML.Implementation
def __init__(self, id=None, model=None):
DiagramLine.__init__(self, id, model)
self._solid = False
def draw_head(self, context):
cr = context.cairo
cr.move_to(0, 0)
if not self._solid:
cr.set_dash((), 0)
cr.line_to(15, -10)
cr.line_to(15, 10)
cr.close_path()
cr.stroke()
cr.move_to(15, 0)
def draw(self, context):
if not self._solid:
context.cairo.set_dash((7.0, 5.0), 0)
super(ImplementationItem, self).draw(context)
# vim:sw=4
Use new line style for Implementation item | """
Implementation of interface.
"""
from gaphor import UML
from gaphor.UML.modelfactory import stereotypes_str
from gaphor.diagram.presentation import LinePresentation
from gaphor.diagram.shapes import Box, Text
from gaphor.diagram.support import represents
@represents(UML.Implementation)
class ImplementationItem(LinePresentation):
def __init__(self, id=None, model=None):
super().__init__(id, model, style={"dash-style": (7.0, 5.0)})
self._solid = False
self.shape_middle = Box(
Text(
text=lambda: stereotypes_str(self.subject),
style={"min-width": 0, "min-height": 0},
)
)
self.watch("subject.appliedStereotype.classifier.name")
def draw_head(self, context):
cr = context.cairo
cr.move_to(0, 0)
if not self._solid:
cr.set_dash((), 0)
cr.line_to(15, -10)
cr.line_to(15, 10)
cr.close_path()
cr.stroke()
cr.move_to(15, 0)
| <commit_before>"""
Implementation of interface.
"""
from gaphor import UML
from gaphor.diagram.diagramline import DiagramLine
class ImplementationItem(DiagramLine):
__uml__ = UML.Implementation
def __init__(self, id=None, model=None):
DiagramLine.__init__(self, id, model)
self._solid = False
def draw_head(self, context):
cr = context.cairo
cr.move_to(0, 0)
if not self._solid:
cr.set_dash((), 0)
cr.line_to(15, -10)
cr.line_to(15, 10)
cr.close_path()
cr.stroke()
cr.move_to(15, 0)
def draw(self, context):
if not self._solid:
context.cairo.set_dash((7.0, 5.0), 0)
super(ImplementationItem, self).draw(context)
# vim:sw=4
<commit_msg>Use new line style for Implementation item<commit_after> | """
Implementation of interface.
"""
from gaphor import UML
from gaphor.UML.modelfactory import stereotypes_str
from gaphor.diagram.presentation import LinePresentation
from gaphor.diagram.shapes import Box, Text
from gaphor.diagram.support import represents
@represents(UML.Implementation)
class ImplementationItem(LinePresentation):
def __init__(self, id=None, model=None):
super().__init__(id, model, style={"dash-style": (7.0, 5.0)})
self._solid = False
self.shape_middle = Box(
Text(
text=lambda: stereotypes_str(self.subject),
style={"min-width": 0, "min-height": 0},
)
)
self.watch("subject.appliedStereotype.classifier.name")
def draw_head(self, context):
cr = context.cairo
cr.move_to(0, 0)
if not self._solid:
cr.set_dash((), 0)
cr.line_to(15, -10)
cr.line_to(15, 10)
cr.close_path()
cr.stroke()
cr.move_to(15, 0)
| """
Implementation of interface.
"""
from gaphor import UML
from gaphor.diagram.diagramline import DiagramLine
class ImplementationItem(DiagramLine):
__uml__ = UML.Implementation
def __init__(self, id=None, model=None):
DiagramLine.__init__(self, id, model)
self._solid = False
def draw_head(self, context):
cr = context.cairo
cr.move_to(0, 0)
if not self._solid:
cr.set_dash((), 0)
cr.line_to(15, -10)
cr.line_to(15, 10)
cr.close_path()
cr.stroke()
cr.move_to(15, 0)
def draw(self, context):
if not self._solid:
context.cairo.set_dash((7.0, 5.0), 0)
super(ImplementationItem, self).draw(context)
# vim:sw=4
Use new line style for Implementation item"""
Implementation of interface.
"""
from gaphor import UML
from gaphor.UML.modelfactory import stereotypes_str
from gaphor.diagram.presentation import LinePresentation
from gaphor.diagram.shapes import Box, Text
from gaphor.diagram.support import represents
@represents(UML.Implementation)
class ImplementationItem(LinePresentation):
def __init__(self, id=None, model=None):
super().__init__(id, model, style={"dash-style": (7.0, 5.0)})
self._solid = False
self.shape_middle = Box(
Text(
text=lambda: stereotypes_str(self.subject),
style={"min-width": 0, "min-height": 0},
)
)
self.watch("subject.appliedStereotype.classifier.name")
def draw_head(self, context):
cr = context.cairo
cr.move_to(0, 0)
if not self._solid:
cr.set_dash((), 0)
cr.line_to(15, -10)
cr.line_to(15, 10)
cr.close_path()
cr.stroke()
cr.move_to(15, 0)
| <commit_before>"""
Implementation of interface.
"""
from gaphor import UML
from gaphor.diagram.diagramline import DiagramLine
class ImplementationItem(DiagramLine):
__uml__ = UML.Implementation
def __init__(self, id=None, model=None):
DiagramLine.__init__(self, id, model)
self._solid = False
def draw_head(self, context):
cr = context.cairo
cr.move_to(0, 0)
if not self._solid:
cr.set_dash((), 0)
cr.line_to(15, -10)
cr.line_to(15, 10)
cr.close_path()
cr.stroke()
cr.move_to(15, 0)
def draw(self, context):
if not self._solid:
context.cairo.set_dash((7.0, 5.0), 0)
super(ImplementationItem, self).draw(context)
# vim:sw=4
<commit_msg>Use new line style for Implementation item<commit_after>"""
Implementation of interface.
"""
from gaphor import UML
from gaphor.UML.modelfactory import stereotypes_str
from gaphor.diagram.presentation import LinePresentation
from gaphor.diagram.shapes import Box, Text
from gaphor.diagram.support import represents
@represents(UML.Implementation)
class ImplementationItem(LinePresentation):
def __init__(self, id=None, model=None):
super().__init__(id, model, style={"dash-style": (7.0, 5.0)})
self._solid = False
self.shape_middle = Box(
Text(
text=lambda: stereotypes_str(self.subject),
style={"min-width": 0, "min-height": 0},
)
)
self.watch("subject.appliedStereotype.classifier.name")
def draw_head(self, context):
cr = context.cairo
cr.move_to(0, 0)
if not self._solid:
cr.set_dash((), 0)
cr.line_to(15, -10)
cr.line_to(15, 10)
cr.close_path()
cr.stroke()
cr.move_to(15, 0)
|
9ce26dfb42753570ad7a2c89e51638aa5d49df2b | fedora/__init__.py | fedora/__init__.py | # Copyright 2008 Red Hat, Inc.
# This file is part of python-fedora
#
# python-fedora is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# python-fedora is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with python-fedora; if not, see <http://www.gnu.org/licenses/>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
__all__ = ('_', 'release', '__version__',
'accounts', 'client', 'tg')
| # Copyright 2008 Red Hat, Inc.
# This file is part of python-fedora
#
# python-fedora is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# python-fedora is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with python-fedora; if not, see <http://www.gnu.org/licenses/>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import kitchen
# Setup gettext for all of kitchen.
# Remember -- _() is for marking most messages
# b_() is for marking messages that are used in exceptions
(_, N_) = kitchen.i18n.easy_gettext_setup('python-fedora')
(b_, bN_) = kitchen.i18n.eay_gettext_setup('python-fedora', use_unicode=False)
from fedora import release
__version__ = release.VERSION
__all__ = ('__version__', 'accounts', 'client', 'release', 'tg')
| Use kitchen.i18n to setup gettext. Setup b_() for exceptions. | Use kitchen.i18n to setup gettext. Setup b_() for exceptions.
| Python | lgpl-2.1 | fedora-infra/python-fedora | # Copyright 2008 Red Hat, Inc.
# This file is part of python-fedora
#
# python-fedora is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# python-fedora is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with python-fedora; if not, see <http://www.gnu.org/licenses/>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
__all__ = ('_', 'release', '__version__',
'accounts', 'client', 'tg')
Use kitchen.i18n to setup gettext. Setup b_() for exceptions. | # Copyright 2008 Red Hat, Inc.
# This file is part of python-fedora
#
# python-fedora is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# python-fedora is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with python-fedora; if not, see <http://www.gnu.org/licenses/>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import kitchen
# Setup gettext for all of kitchen.
# Remember -- _() is for marking most messages
# b_() is for marking messages that are used in exceptions
(_, N_) = kitchen.i18n.easy_gettext_setup('python-fedora')
(b_, bN_) = kitchen.i18n.eay_gettext_setup('python-fedora', use_unicode=False)
from fedora import release
__version__ = release.VERSION
__all__ = ('__version__', 'accounts', 'client', 'release', 'tg')
| <commit_before># Copyright 2008 Red Hat, Inc.
# This file is part of python-fedora
#
# python-fedora is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# python-fedora is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with python-fedora; if not, see <http://www.gnu.org/licenses/>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
__all__ = ('_', 'release', '__version__',
'accounts', 'client', 'tg')
<commit_msg>Use kitchen.i18n to setup gettext. Setup b_() for exceptions.<commit_after> | # Copyright 2008 Red Hat, Inc.
# This file is part of python-fedora
#
# python-fedora is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# python-fedora is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with python-fedora; if not, see <http://www.gnu.org/licenses/>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import kitchen
# Setup gettext for all of kitchen.
# Remember -- _() is for marking most messages
# b_() is for marking messages that are used in exceptions
(_, N_) = kitchen.i18n.easy_gettext_setup('python-fedora')
(b_, bN_) = kitchen.i18n.eay_gettext_setup('python-fedora', use_unicode=False)
from fedora import release
__version__ = release.VERSION
__all__ = ('__version__', 'accounts', 'client', 'release', 'tg')
| # Copyright 2008 Red Hat, Inc.
# This file is part of python-fedora
#
# python-fedora is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# python-fedora is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with python-fedora; if not, see <http://www.gnu.org/licenses/>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
__all__ = ('_', 'release', '__version__',
'accounts', 'client', 'tg')
Use kitchen.i18n to setup gettext. Setup b_() for exceptions.# Copyright 2008 Red Hat, Inc.
# This file is part of python-fedora
#
# python-fedora is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# python-fedora is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with python-fedora; if not, see <http://www.gnu.org/licenses/>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import kitchen
# Setup gettext for all of kitchen.
# Remember -- _() is for marking most messages
# b_() is for marking messages that are used in exceptions
(_, N_) = kitchen.i18n.easy_gettext_setup('python-fedora')
(b_, bN_) = kitchen.i18n.eay_gettext_setup('python-fedora', use_unicode=False)
from fedora import release
__version__ = release.VERSION
__all__ = ('__version__', 'accounts', 'client', 'release', 'tg')
| <commit_before># Copyright 2008 Red Hat, Inc.
# This file is part of python-fedora
#
# python-fedora is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# python-fedora is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with python-fedora; if not, see <http://www.gnu.org/licenses/>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import gettext
translation = gettext.translation('python-fedora', '/usr/share/locale',
fallback=True)
_ = translation.ugettext
from fedora import release
__version__ = release.VERSION
__all__ = ('_', 'release', '__version__',
'accounts', 'client', 'tg')
<commit_msg>Use kitchen.i18n to setup gettext. Setup b_() for exceptions.<commit_after># Copyright 2008 Red Hat, Inc.
# This file is part of python-fedora
#
# python-fedora is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# python-fedora is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with python-fedora; if not, see <http://www.gnu.org/licenses/>
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
'''
Python Fedora
Modules to communicate with and help implement Fedora Services.
'''
import kitchen
# Setup gettext for all of kitchen.
# Remember -- _() is for marking most messages
# b_() is for marking messages that are used in exceptions
(_, N_) = kitchen.i18n.easy_gettext_setup('python-fedora')
(b_, bN_) = kitchen.i18n.eay_gettext_setup('python-fedora', use_unicode=False)
from fedora import release
__version__ = release.VERSION
__all__ = ('__version__', 'accounts', 'client', 'release', 'tg')
|
d7fc29fb6e0c449617cf6aae1025fd5b718d8b70 | modules/token.py | modules/token.py | class Token(object):
def __init__( self, line ):
entries = line.split('\t')
self.form = entries[0].lower()
self.gold_pos = entries[1]
self.predicted_pos = entries [3]
def createFeatureVector(self, featvec, currentToken, previousToken, nextToken):
self.sparseFeatvec = {}
#if previousToken: self.sparseFeatvec[featvec["prev_pos_"+str(previousToken.gold_pos)]] = 1
self.sparseFeatvec[featvec["current_form_"+currentToken.form]] = 1
if previousToken: self.sparseFeatvec[featvec["prev_form_"+previousToken.form]] = 1
if nextToken: self.sparseFeatvec[featvec["next_form_"+nextToken.form]] = 1
if not previousToken: self.sparseFeatvec[featvec["initial_token"]] = 1
def sentences( filestream ):
sentence = []
for line in filestream:
line = line.rstrip()
if line:
sentence.append(Token(line))
elif sentence:
yield sentence
sentence = []
if sentence:
yield sentence
| class Token(object):
def __init__( self, line ):
# Splits line tab-wise, writes the values in parameters
entries = line.split('\t')
if len(entries) == 4:
self.form = entries[0].lower()
self.gold_pos = entries[1]
self.predicted_pos = entries [3]
elif len(entries) > 4: print "\tInput file not in expected format: Too many columns"
else: print "\tInput file not in expected format: Too many columns"
def createFeatureVector(self, featvec, currentToken, previousToken, nextToken):
# creates a sparse representation of the feature vector (featvec)
self.sparseFeatvec = {}
# The current token
self.sparseFeatvec[featvec["current_form_"+currentToken.form]] = 1
# If exists, the previous token; else it is the initial token of the phrase
if previousToken: self.sparseFeatvec[featvec["prev_form_"+previousToken.form]] = 1
else: self.sparseFeatvec[featvec["initial_token"]] = 1
# if exists, the next token
if nextToken: self.sparseFeatvec[featvec["next_form_"+nextToken.form]] = 1
def sentences( filestream ):
# A generator to read a file sentence-wise and generate a Token object for each line
# A list of Token objects of every sentence is yielded
sentence = []
for line in filestream:
line = line.rstrip()
if line:
sentence.append(Token(line))
elif sentence:
yield sentence
sentence = []
if sentence:
yield sentence
| Handle wrong formatted input, added comments, some cosmetics | Handle wrong formatted input, added comments, some cosmetics
| Python | mit | YNedderhoff/perceptron-pos-tagger,YNedderhoff/named-entity-recognizer,YNedderhoff/perceptron-pos-tagger,YNedderhoff/aspect-classifier,YNedderhoff/aspect-classifier,YNedderhoff/named-entity-recognizer | class Token(object):
def __init__( self, line ):
entries = line.split('\t')
self.form = entries[0].lower()
self.gold_pos = entries[1]
self.predicted_pos = entries [3]
def createFeatureVector(self, featvec, currentToken, previousToken, nextToken):
self.sparseFeatvec = {}
#if previousToken: self.sparseFeatvec[featvec["prev_pos_"+str(previousToken.gold_pos)]] = 1
self.sparseFeatvec[featvec["current_form_"+currentToken.form]] = 1
if previousToken: self.sparseFeatvec[featvec["prev_form_"+previousToken.form]] = 1
if nextToken: self.sparseFeatvec[featvec["next_form_"+nextToken.form]] = 1
if not previousToken: self.sparseFeatvec[featvec["initial_token"]] = 1
def sentences( filestream ):
sentence = []
for line in filestream:
line = line.rstrip()
if line:
sentence.append(Token(line))
elif sentence:
yield sentence
sentence = []
if sentence:
yield sentence
Handle wrong formatted input, added comments, some cosmetics | class Token(object):
def __init__( self, line ):
# Splits line tab-wise, writes the values in parameters
entries = line.split('\t')
if len(entries) == 4:
self.form = entries[0].lower()
self.gold_pos = entries[1]
self.predicted_pos = entries [3]
elif len(entries) > 4: print "\tInput file not in expected format: Too many columns"
else: print "\tInput file not in expected format: Too many columns"
def createFeatureVector(self, featvec, currentToken, previousToken, nextToken):
# creates a sparse representation of the feature vector (featvec)
self.sparseFeatvec = {}
# The current token
self.sparseFeatvec[featvec["current_form_"+currentToken.form]] = 1
# If exists, the previous token; else it is the initial token of the phrase
if previousToken: self.sparseFeatvec[featvec["prev_form_"+previousToken.form]] = 1
else: self.sparseFeatvec[featvec["initial_token"]] = 1
# if exists, the next token
if nextToken: self.sparseFeatvec[featvec["next_form_"+nextToken.form]] = 1
def sentences( filestream ):
# A generator to read a file sentence-wise and generate a Token object for each line
# A list of Token objects of every sentence is yielded
sentence = []
for line in filestream:
line = line.rstrip()
if line:
sentence.append(Token(line))
elif sentence:
yield sentence
sentence = []
if sentence:
yield sentence
| <commit_before>class Token(object):
def __init__( self, line ):
entries = line.split('\t')
self.form = entries[0].lower()
self.gold_pos = entries[1]
self.predicted_pos = entries [3]
def createFeatureVector(self, featvec, currentToken, previousToken, nextToken):
self.sparseFeatvec = {}
#if previousToken: self.sparseFeatvec[featvec["prev_pos_"+str(previousToken.gold_pos)]] = 1
self.sparseFeatvec[featvec["current_form_"+currentToken.form]] = 1
if previousToken: self.sparseFeatvec[featvec["prev_form_"+previousToken.form]] = 1
if nextToken: self.sparseFeatvec[featvec["next_form_"+nextToken.form]] = 1
if not previousToken: self.sparseFeatvec[featvec["initial_token"]] = 1
def sentences( filestream ):
sentence = []
for line in filestream:
line = line.rstrip()
if line:
sentence.append(Token(line))
elif sentence:
yield sentence
sentence = []
if sentence:
yield sentence
<commit_msg>Handle wrong formatted input, added comments, some cosmetics<commit_after> | class Token(object):
def __init__( self, line ):
# Splits line tab-wise, writes the values in parameters
entries = line.split('\t')
if len(entries) == 4:
self.form = entries[0].lower()
self.gold_pos = entries[1]
self.predicted_pos = entries [3]
elif len(entries) > 4: print "\tInput file not in expected format: Too many columns"
else: print "\tInput file not in expected format: Too many columns"
def createFeatureVector(self, featvec, currentToken, previousToken, nextToken):
# creates a sparse representation of the feature vector (featvec)
self.sparseFeatvec = {}
# The current token
self.sparseFeatvec[featvec["current_form_"+currentToken.form]] = 1
# If exists, the previous token; else it is the initial token of the phrase
if previousToken: self.sparseFeatvec[featvec["prev_form_"+previousToken.form]] = 1
else: self.sparseFeatvec[featvec["initial_token"]] = 1
# if exists, the next token
if nextToken: self.sparseFeatvec[featvec["next_form_"+nextToken.form]] = 1
def sentences( filestream ):
# A generator to read a file sentence-wise and generate a Token object for each line
# A list of Token objects of every sentence is yielded
sentence = []
for line in filestream:
line = line.rstrip()
if line:
sentence.append(Token(line))
elif sentence:
yield sentence
sentence = []
if sentence:
yield sentence
| class Token(object):
def __init__( self, line ):
entries = line.split('\t')
self.form = entries[0].lower()
self.gold_pos = entries[1]
self.predicted_pos = entries [3]
def createFeatureVector(self, featvec, currentToken, previousToken, nextToken):
self.sparseFeatvec = {}
#if previousToken: self.sparseFeatvec[featvec["prev_pos_"+str(previousToken.gold_pos)]] = 1
self.sparseFeatvec[featvec["current_form_"+currentToken.form]] = 1
if previousToken: self.sparseFeatvec[featvec["prev_form_"+previousToken.form]] = 1
if nextToken: self.sparseFeatvec[featvec["next_form_"+nextToken.form]] = 1
if not previousToken: self.sparseFeatvec[featvec["initial_token"]] = 1
def sentences( filestream ):
sentence = []
for line in filestream:
line = line.rstrip()
if line:
sentence.append(Token(line))
elif sentence:
yield sentence
sentence = []
if sentence:
yield sentence
Handle wrong formatted input, added comments, some cosmeticsclass Token(object):
def __init__( self, line ):
# Splits line tab-wise, writes the values in parameters
entries = line.split('\t')
if len(entries) == 4:
self.form = entries[0].lower()
self.gold_pos = entries[1]
self.predicted_pos = entries [3]
elif len(entries) > 4: print "\tInput file not in expected format: Too many columns"
else: print "\tInput file not in expected format: Too many columns"
def createFeatureVector(self, featvec, currentToken, previousToken, nextToken):
# creates a sparse representation of the feature vector (featvec)
self.sparseFeatvec = {}
# The current token
self.sparseFeatvec[featvec["current_form_"+currentToken.form]] = 1
# If exists, the previous token; else it is the initial token of the phrase
if previousToken: self.sparseFeatvec[featvec["prev_form_"+previousToken.form]] = 1
else: self.sparseFeatvec[featvec["initial_token"]] = 1
# if exists, the next token
if nextToken: self.sparseFeatvec[featvec["next_form_"+nextToken.form]] = 1
def sentences( filestream ):
# A generator to read a file sentence-wise and generate a Token object for each line
# A list of Token objects of every sentence is yielded
sentence = []
for line in filestream:
line = line.rstrip()
if line:
sentence.append(Token(line))
elif sentence:
yield sentence
sentence = []
if sentence:
yield sentence
| <commit_before>class Token(object):
def __init__( self, line ):
entries = line.split('\t')
self.form = entries[0].lower()
self.gold_pos = entries[1]
self.predicted_pos = entries [3]
def createFeatureVector(self, featvec, currentToken, previousToken, nextToken):
self.sparseFeatvec = {}
#if previousToken: self.sparseFeatvec[featvec["prev_pos_"+str(previousToken.gold_pos)]] = 1
self.sparseFeatvec[featvec["current_form_"+currentToken.form]] = 1
if previousToken: self.sparseFeatvec[featvec["prev_form_"+previousToken.form]] = 1
if nextToken: self.sparseFeatvec[featvec["next_form_"+nextToken.form]] = 1
if not previousToken: self.sparseFeatvec[featvec["initial_token"]] = 1
def sentences( filestream ):
sentence = []
for line in filestream:
line = line.rstrip()
if line:
sentence.append(Token(line))
elif sentence:
yield sentence
sentence = []
if sentence:
yield sentence
<commit_msg>Handle wrong formatted input, added comments, some cosmetics<commit_after>class Token(object):
def __init__( self, line ):
# Splits line tab-wise, writes the values in parameters
entries = line.split('\t')
if len(entries) == 4:
self.form = entries[0].lower()
self.gold_pos = entries[1]
self.predicted_pos = entries [3]
elif len(entries) > 4: print "\tInput file not in expected format: Too many columns"
else: print "\tInput file not in expected format: Too many columns"
def createFeatureVector(self, featvec, currentToken, previousToken, nextToken):
# creates a sparse representation of the feature vector (featvec)
self.sparseFeatvec = {}
# The current token
self.sparseFeatvec[featvec["current_form_"+currentToken.form]] = 1
# If exists, the previous token; else it is the initial token of the phrase
if previousToken: self.sparseFeatvec[featvec["prev_form_"+previousToken.form]] = 1
else: self.sparseFeatvec[featvec["initial_token"]] = 1
# if exists, the next token
if nextToken: self.sparseFeatvec[featvec["next_form_"+nextToken.form]] = 1
def sentences( filestream ):
# A generator to read a file sentence-wise and generate a Token object for each line
# A list of Token objects of every sentence is yielded
sentence = []
for line in filestream:
line = line.rstrip()
if line:
sentence.append(Token(line))
elif sentence:
yield sentence
sentence = []
if sentence:
yield sentence
|
7d08a71874dd7b1ab7ba4bb1cd345161d9118266 | src/extract.py | src/extract.py | import os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
def get_parsed_diff(self):
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
for diff_info in parser.parse_diff('/home/rajika/projects/react-scaffolder'):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
ex.get_parsed_diff() | import os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
def get_parsed_diff(self, repo_path):
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
for diff_info in parser.parse_diff(repo_path):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
ex.get_parsed_diff('/home/rajika/projects/babel-bot') | Move repo name to arg | Move repo name to arg
| Python | mit | rajikaimal/emma,rajikaimal/emma | import os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
def get_parsed_diff(self):
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
for diff_info in parser.parse_diff('/home/rajika/projects/react-scaffolder'):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
ex.get_parsed_diff()Move repo name to arg | import os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
def get_parsed_diff(self, repo_path):
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
for diff_info in parser.parse_diff(repo_path):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
ex.get_parsed_diff('/home/rajika/projects/babel-bot') | <commit_before>import os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
def get_parsed_diff(self):
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
for diff_info in parser.parse_diff('/home/rajika/projects/react-scaffolder'):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
ex.get_parsed_diff()<commit_msg>Move repo name to arg<commit_after> | import os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
def get_parsed_diff(self, repo_path):
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
for diff_info in parser.parse_diff(repo_path):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
ex.get_parsed_diff('/home/rajika/projects/babel-bot') | import os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
def get_parsed_diff(self):
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
for diff_info in parser.parse_diff('/home/rajika/projects/react-scaffolder'):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
ex.get_parsed_diff()Move repo name to argimport os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
def get_parsed_diff(self, repo_path):
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
for diff_info in parser.parse_diff(repo_path):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
ex.get_parsed_diff('/home/rajika/projects/babel-bot') | <commit_before>import os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
def get_parsed_diff(self):
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
for diff_info in parser.parse_diff('/home/rajika/projects/react-scaffolder'):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
ex.get_parsed_diff()<commit_msg>Move repo name to arg<commit_after>import os
import csv
from parser import Parser
class Extract:
"""
Extract data from .git repo and persist as a data set
"""
# repos name git: .git
# destination absolute path
def clone_repo(self, repo_name, destination):
Repo.clone_from(repo_name, destination)
# get parsed .diff file
def get_parsed_diff(self, repo_path):
prev_commiter = None
parser = Parser()
full_path = os.path.dirname(
os.path.realpath(__file__))
for diff_info in parser.parse_diff(repo_path):
print(diff_info)
with open(full_path + '/data/train_emma.csv', 'a') as csv_file:
writer = csv.writer(csv_file)
for key, value in diff_info.items():
if(key == 'file_names'):
for file_name in value:
print('writing')
if(prev_commiter == None):
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], diff_info['author']])
else:
writer.writerow([file_name, diff_info['lines'], diff_info['timestamp'], diff_info['author'], prev_commiter])
prev_commiter = diff_info['author']
ex = Extract()
ex.get_parsed_diff('/home/rajika/projects/babel-bot') |
8d04bb798648980a2fe29ee408bdcff099bfd2c1 | tk/urls.py | tk/urls.py | from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^material/', include('tk.material.urls')),
url(r'^admin/', admin.site.urls),
]
| from django.conf.urls import url, include
from django.contrib import admin
from django.views.generic.base import TemplateView
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='base.html'), name='frontpage'),
url(r'^material/', include('tk.material.urls')),
url(r'^admin/', admin.site.urls),
]
| Add a frontpage rendering view | Add a frontpage rendering view
| Python | agpl-3.0 | GISAElkartea/tresna-kutxa,GISAElkartea/tresna-kutxa,GISAElkartea/tresna-kutxa,GISAElkartea/tresna-kutxa | from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^material/', include('tk.material.urls')),
url(r'^admin/', admin.site.urls),
]
Add a frontpage rendering view | from django.conf.urls import url, include
from django.contrib import admin
from django.views.generic.base import TemplateView
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='base.html'), name='frontpage'),
url(r'^material/', include('tk.material.urls')),
url(r'^admin/', admin.site.urls),
]
| <commit_before>from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^material/', include('tk.material.urls')),
url(r'^admin/', admin.site.urls),
]
<commit_msg>Add a frontpage rendering view<commit_after> | from django.conf.urls import url, include
from django.contrib import admin
from django.views.generic.base import TemplateView
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='base.html'), name='frontpage'),
url(r'^material/', include('tk.material.urls')),
url(r'^admin/', admin.site.urls),
]
| from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^material/', include('tk.material.urls')),
url(r'^admin/', admin.site.urls),
]
Add a frontpage rendering viewfrom django.conf.urls import url, include
from django.contrib import admin
from django.views.generic.base import TemplateView
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='base.html'), name='frontpage'),
url(r'^material/', include('tk.material.urls')),
url(r'^admin/', admin.site.urls),
]
| <commit_before>from django.conf.urls import url, include
from django.contrib import admin
urlpatterns = [
url(r'^material/', include('tk.material.urls')),
url(r'^admin/', admin.site.urls),
]
<commit_msg>Add a frontpage rendering view<commit_after>from django.conf.urls import url, include
from django.contrib import admin
from django.views.generic.base import TemplateView
urlpatterns = [
url(r'^$', TemplateView.as_view(template_name='base.html'), name='frontpage'),
url(r'^material/', include('tk.material.urls')),
url(r'^admin/', admin.site.urls),
]
|
09498335615b7e770f5976b9749d68050966501d | models/timeandplace.py | models/timeandplace.py | #!/usr/bin/env python3
from .base import Serializable
from .locations import Platform
from datetime import datetime
class TimeAndPlace(Serializable):
def __init__(self, platform=None, arrival=None, departure=None):
super().__init__()
self.platform = platform
self.arrival = arrival
self.departure = departure
self.passthrough = False
@classmethod
def _validate(cls):
return {
'platform': (None, Platform),
'arrival': (None, datetime),
'departure': (None, datetime),
'passthrough': bool
}
@property
def stop(self):
return self.platform.stop
def __eq__(self, other):
assert isinstance(other, TimeAndPlace)
return (self.platform == other.platform and
self.arrival == other.arrival and
self.departure == other.departure)
def __repr__(self):
return ('<TimeAndPlace %s %s %s>' % (self.arrival, self.departure, self.platform))
| #!/usr/bin/env python3
from .base import Serializable
from .locations import Platform
from .realtime import RealtimeTime
class TimeAndPlace(Serializable):
def __init__(self, platform=None, arrival=None, departure=None):
super().__init__()
self.platform = platform
self.arrival = arrival
self.departure = departure
self.passthrough = False
@classmethod
def _validate(cls):
return {
'platform': (None, Platform),
'arrival': (None, RealtimeTime),
'departure': (None, RealtimeTime),
'passthrough': bool
}
@property
def stop(self):
return self.platform.stop
def __eq__(self, other):
assert isinstance(other, TimeAndPlace)
return (self.platform == other.platform and
self.arrival == other.arrival and
self.departure == other.departure)
def __repr__(self):
return ('<TimeAndPlace %s %s %s>' % (self.arrival, self.departure, self.platform))
| Revert "TimeAndPlace no longer refers to realtime data" | Revert "TimeAndPlace no longer refers to realtime data"
This reverts commit cf92e191e3748c67102f142b411937517c5051f4.
| Python | apache-2.0 | NoMoKeTo/choo,NoMoKeTo/transit | #!/usr/bin/env python3
from .base import Serializable
from .locations import Platform
from datetime import datetime
class TimeAndPlace(Serializable):
def __init__(self, platform=None, arrival=None, departure=None):
super().__init__()
self.platform = platform
self.arrival = arrival
self.departure = departure
self.passthrough = False
@classmethod
def _validate(cls):
return {
'platform': (None, Platform),
'arrival': (None, datetime),
'departure': (None, datetime),
'passthrough': bool
}
@property
def stop(self):
return self.platform.stop
def __eq__(self, other):
assert isinstance(other, TimeAndPlace)
return (self.platform == other.platform and
self.arrival == other.arrival and
self.departure == other.departure)
def __repr__(self):
return ('<TimeAndPlace %s %s %s>' % (self.arrival, self.departure, self.platform))
Revert "TimeAndPlace no longer refers to realtime data"
This reverts commit cf92e191e3748c67102f142b411937517c5051f4. | #!/usr/bin/env python3
from .base import Serializable
from .locations import Platform
from .realtime import RealtimeTime
class TimeAndPlace(Serializable):
def __init__(self, platform=None, arrival=None, departure=None):
super().__init__()
self.platform = platform
self.arrival = arrival
self.departure = departure
self.passthrough = False
@classmethod
def _validate(cls):
return {
'platform': (None, Platform),
'arrival': (None, RealtimeTime),
'departure': (None, RealtimeTime),
'passthrough': bool
}
@property
def stop(self):
return self.platform.stop
def __eq__(self, other):
assert isinstance(other, TimeAndPlace)
return (self.platform == other.platform and
self.arrival == other.arrival and
self.departure == other.departure)
def __repr__(self):
return ('<TimeAndPlace %s %s %s>' % (self.arrival, self.departure, self.platform))
| <commit_before>#!/usr/bin/env python3
from .base import Serializable
from .locations import Platform
from datetime import datetime
class TimeAndPlace(Serializable):
def __init__(self, platform=None, arrival=None, departure=None):
super().__init__()
self.platform = platform
self.arrival = arrival
self.departure = departure
self.passthrough = False
@classmethod
def _validate(cls):
return {
'platform': (None, Platform),
'arrival': (None, datetime),
'departure': (None, datetime),
'passthrough': bool
}
@property
def stop(self):
return self.platform.stop
def __eq__(self, other):
assert isinstance(other, TimeAndPlace)
return (self.platform == other.platform and
self.arrival == other.arrival and
self.departure == other.departure)
def __repr__(self):
return ('<TimeAndPlace %s %s %s>' % (self.arrival, self.departure, self.platform))
<commit_msg>Revert "TimeAndPlace no longer refers to realtime data"
This reverts commit cf92e191e3748c67102f142b411937517c5051f4.<commit_after> | #!/usr/bin/env python3
from .base import Serializable
from .locations import Platform
from .realtime import RealtimeTime
class TimeAndPlace(Serializable):
def __init__(self, platform=None, arrival=None, departure=None):
super().__init__()
self.platform = platform
self.arrival = arrival
self.departure = departure
self.passthrough = False
@classmethod
def _validate(cls):
return {
'platform': (None, Platform),
'arrival': (None, RealtimeTime),
'departure': (None, RealtimeTime),
'passthrough': bool
}
@property
def stop(self):
return self.platform.stop
def __eq__(self, other):
assert isinstance(other, TimeAndPlace)
return (self.platform == other.platform and
self.arrival == other.arrival and
self.departure == other.departure)
def __repr__(self):
return ('<TimeAndPlace %s %s %s>' % (self.arrival, self.departure, self.platform))
| #!/usr/bin/env python3
from .base import Serializable
from .locations import Platform
from datetime import datetime
class TimeAndPlace(Serializable):
def __init__(self, platform=None, arrival=None, departure=None):
super().__init__()
self.platform = platform
self.arrival = arrival
self.departure = departure
self.passthrough = False
@classmethod
def _validate(cls):
return {
'platform': (None, Platform),
'arrival': (None, datetime),
'departure': (None, datetime),
'passthrough': bool
}
@property
def stop(self):
return self.platform.stop
def __eq__(self, other):
assert isinstance(other, TimeAndPlace)
return (self.platform == other.platform and
self.arrival == other.arrival and
self.departure == other.departure)
def __repr__(self):
return ('<TimeAndPlace %s %s %s>' % (self.arrival, self.departure, self.platform))
Revert "TimeAndPlace no longer refers to realtime data"
This reverts commit cf92e191e3748c67102f142b411937517c5051f4.#!/usr/bin/env python3
from .base import Serializable
from .locations import Platform
from .realtime import RealtimeTime
class TimeAndPlace(Serializable):
def __init__(self, platform=None, arrival=None, departure=None):
super().__init__()
self.platform = platform
self.arrival = arrival
self.departure = departure
self.passthrough = False
@classmethod
def _validate(cls):
return {
'platform': (None, Platform),
'arrival': (None, RealtimeTime),
'departure': (None, RealtimeTime),
'passthrough': bool
}
@property
def stop(self):
return self.platform.stop
def __eq__(self, other):
assert isinstance(other, TimeAndPlace)
return (self.platform == other.platform and
self.arrival == other.arrival and
self.departure == other.departure)
def __repr__(self):
return ('<TimeAndPlace %s %s %s>' % (self.arrival, self.departure, self.platform))
| <commit_before>#!/usr/bin/env python3
from .base import Serializable
from .locations import Platform
from datetime import datetime
class TimeAndPlace(Serializable):
def __init__(self, platform=None, arrival=None, departure=None):
super().__init__()
self.platform = platform
self.arrival = arrival
self.departure = departure
self.passthrough = False
@classmethod
def _validate(cls):
return {
'platform': (None, Platform),
'arrival': (None, datetime),
'departure': (None, datetime),
'passthrough': bool
}
@property
def stop(self):
return self.platform.stop
def __eq__(self, other):
assert isinstance(other, TimeAndPlace)
return (self.platform == other.platform and
self.arrival == other.arrival and
self.departure == other.departure)
def __repr__(self):
return ('<TimeAndPlace %s %s %s>' % (self.arrival, self.departure, self.platform))
<commit_msg>Revert "TimeAndPlace no longer refers to realtime data"
This reverts commit cf92e191e3748c67102f142b411937517c5051f4.<commit_after>#!/usr/bin/env python3
from .base import Serializable
from .locations import Platform
from .realtime import RealtimeTime
class TimeAndPlace(Serializable):
def __init__(self, platform=None, arrival=None, departure=None):
super().__init__()
self.platform = platform
self.arrival = arrival
self.departure = departure
self.passthrough = False
@classmethod
def _validate(cls):
return {
'platform': (None, Platform),
'arrival': (None, RealtimeTime),
'departure': (None, RealtimeTime),
'passthrough': bool
}
@property
def stop(self):
return self.platform.stop
def __eq__(self, other):
assert isinstance(other, TimeAndPlace)
return (self.platform == other.platform and
self.arrival == other.arrival and
self.departure == other.departure)
def __repr__(self):
return ('<TimeAndPlace %s %s %s>' % (self.arrival, self.departure, self.platform))
|
cd471449edad56ef6c3a69d025130f4fb8ea1fea | plumbium/artefacts.py | plumbium/artefacts.py | import os.path
from utils import file_sha1sum
class Artefact(object):
def __init__(self, filename, extension):
if not filename.endswith(extension):
raise ValueError
self._filename = filename
self._ext_length = len(extension)
self._abspath = os.path.abspath(filename)
def checksum(self):
return file_sha1sum(self.filename)
def exists(self):
return os.path.exists(self.filename)
@property
def abspath(self):
return self._abspath
@property
def basename(self):
"""Return the filename without the extension"""
return self._filename[:-self._ext_length]
def dereference(self):
self._filename = os.path.basename(self._filename)
@property
def filename(self):
return self._filename
def __repr__(self):
return 'Artefact({0!r})'.format(self.filename)
class NiiGzImage(Artefact):
def __init__(self, filename):
super(NiiGzImage, self).__init__(filename, '.nii.gz')
def __repr__(self):
return '{0}({1!r})'.format(self.__clsname__, self.filename)
class TextFile(Artefact):
def __init__(self, filename):
super(TextFile, self).__init__(filename, '.txt')
def __repr__(self):
return '{0}({1!r})'.format(self.__clsname__, self.filename)
| import os.path
from utils import file_sha1sum
class Artefact(object):
def __init__(self, filename, extension):
if not filename.endswith(extension):
raise ValueError
self._filename = filename
self._ext_length = len(extension)
self._abspath = os.path.abspath(filename)
def checksum(self):
return file_sha1sum(self.filename)
def exists(self):
return os.path.exists(self.filename)
@property
def abspath(self):
return self._abspath
@property
def basename(self):
"""Return the filename without the extension"""
return self._filename[:-self._ext_length]
def dereference(self):
self._filename = os.path.basename(self._filename)
@property
def filename(self):
return self._filename
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.filename)
class NiiGzImage(Artefact):
def __init__(self, filename):
super(NiiGzImage, self).__init__(filename, '.nii.gz')
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.filename)
class TextFile(Artefact):
def __init__(self, filename):
super(TextFile, self).__init__(filename, '.txt')
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.filename)
| Use correct method to get classname of self | Use correct method to get classname of self
| Python | mit | jstutters/Plumbium | import os.path
from utils import file_sha1sum
class Artefact(object):
def __init__(self, filename, extension):
if not filename.endswith(extension):
raise ValueError
self._filename = filename
self._ext_length = len(extension)
self._abspath = os.path.abspath(filename)
def checksum(self):
return file_sha1sum(self.filename)
def exists(self):
return os.path.exists(self.filename)
@property
def abspath(self):
return self._abspath
@property
def basename(self):
"""Return the filename without the extension"""
return self._filename[:-self._ext_length]
def dereference(self):
self._filename = os.path.basename(self._filename)
@property
def filename(self):
return self._filename
def __repr__(self):
return 'Artefact({0!r})'.format(self.filename)
class NiiGzImage(Artefact):
def __init__(self, filename):
super(NiiGzImage, self).__init__(filename, '.nii.gz')
def __repr__(self):
return '{0}({1!r})'.format(self.__clsname__, self.filename)
class TextFile(Artefact):
def __init__(self, filename):
super(TextFile, self).__init__(filename, '.txt')
def __repr__(self):
return '{0}({1!r})'.format(self.__clsname__, self.filename)
Use correct method to get classname of self | import os.path
from utils import file_sha1sum
class Artefact(object):
def __init__(self, filename, extension):
if not filename.endswith(extension):
raise ValueError
self._filename = filename
self._ext_length = len(extension)
self._abspath = os.path.abspath(filename)
def checksum(self):
return file_sha1sum(self.filename)
def exists(self):
return os.path.exists(self.filename)
@property
def abspath(self):
return self._abspath
@property
def basename(self):
"""Return the filename without the extension"""
return self._filename[:-self._ext_length]
def dereference(self):
self._filename = os.path.basename(self._filename)
@property
def filename(self):
return self._filename
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.filename)
class NiiGzImage(Artefact):
def __init__(self, filename):
super(NiiGzImage, self).__init__(filename, '.nii.gz')
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.filename)
class TextFile(Artefact):
def __init__(self, filename):
super(TextFile, self).__init__(filename, '.txt')
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.filename)
| <commit_before>import os.path
from utils import file_sha1sum
class Artefact(object):
def __init__(self, filename, extension):
if not filename.endswith(extension):
raise ValueError
self._filename = filename
self._ext_length = len(extension)
self._abspath = os.path.abspath(filename)
def checksum(self):
return file_sha1sum(self.filename)
def exists(self):
return os.path.exists(self.filename)
@property
def abspath(self):
return self._abspath
@property
def basename(self):
"""Return the filename without the extension"""
return self._filename[:-self._ext_length]
def dereference(self):
self._filename = os.path.basename(self._filename)
@property
def filename(self):
return self._filename
def __repr__(self):
return 'Artefact({0!r})'.format(self.filename)
class NiiGzImage(Artefact):
def __init__(self, filename):
super(NiiGzImage, self).__init__(filename, '.nii.gz')
def __repr__(self):
return '{0}({1!r})'.format(self.__clsname__, self.filename)
class TextFile(Artefact):
def __init__(self, filename):
super(TextFile, self).__init__(filename, '.txt')
def __repr__(self):
return '{0}({1!r})'.format(self.__clsname__, self.filename)
<commit_msg>Use correct method to get classname of self<commit_after> | import os.path
from utils import file_sha1sum
class Artefact(object):
def __init__(self, filename, extension):
if not filename.endswith(extension):
raise ValueError
self._filename = filename
self._ext_length = len(extension)
self._abspath = os.path.abspath(filename)
def checksum(self):
return file_sha1sum(self.filename)
def exists(self):
return os.path.exists(self.filename)
@property
def abspath(self):
return self._abspath
@property
def basename(self):
"""Return the filename without the extension"""
return self._filename[:-self._ext_length]
def dereference(self):
self._filename = os.path.basename(self._filename)
@property
def filename(self):
return self._filename
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.filename)
class NiiGzImage(Artefact):
def __init__(self, filename):
super(NiiGzImage, self).__init__(filename, '.nii.gz')
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.filename)
class TextFile(Artefact):
def __init__(self, filename):
super(TextFile, self).__init__(filename, '.txt')
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.filename)
| import os.path
from utils import file_sha1sum
class Artefact(object):
def __init__(self, filename, extension):
if not filename.endswith(extension):
raise ValueError
self._filename = filename
self._ext_length = len(extension)
self._abspath = os.path.abspath(filename)
def checksum(self):
return file_sha1sum(self.filename)
def exists(self):
return os.path.exists(self.filename)
@property
def abspath(self):
return self._abspath
@property
def basename(self):
"""Return the filename without the extension"""
return self._filename[:-self._ext_length]
def dereference(self):
self._filename = os.path.basename(self._filename)
@property
def filename(self):
return self._filename
def __repr__(self):
return 'Artefact({0!r})'.format(self.filename)
class NiiGzImage(Artefact):
def __init__(self, filename):
super(NiiGzImage, self).__init__(filename, '.nii.gz')
def __repr__(self):
return '{0}({1!r})'.format(self.__clsname__, self.filename)
class TextFile(Artefact):
def __init__(self, filename):
super(TextFile, self).__init__(filename, '.txt')
def __repr__(self):
return '{0}({1!r})'.format(self.__clsname__, self.filename)
Use correct method to get classname of selfimport os.path
from utils import file_sha1sum
class Artefact(object):
def __init__(self, filename, extension):
if not filename.endswith(extension):
raise ValueError
self._filename = filename
self._ext_length = len(extension)
self._abspath = os.path.abspath(filename)
def checksum(self):
return file_sha1sum(self.filename)
def exists(self):
return os.path.exists(self.filename)
@property
def abspath(self):
return self._abspath
@property
def basename(self):
"""Return the filename without the extension"""
return self._filename[:-self._ext_length]
def dereference(self):
self._filename = os.path.basename(self._filename)
@property
def filename(self):
return self._filename
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.filename)
class NiiGzImage(Artefact):
def __init__(self, filename):
super(NiiGzImage, self).__init__(filename, '.nii.gz')
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.filename)
class TextFile(Artefact):
def __init__(self, filename):
super(TextFile, self).__init__(filename, '.txt')
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.filename)
| <commit_before>import os.path
from utils import file_sha1sum
class Artefact(object):
def __init__(self, filename, extension):
if not filename.endswith(extension):
raise ValueError
self._filename = filename
self._ext_length = len(extension)
self._abspath = os.path.abspath(filename)
def checksum(self):
return file_sha1sum(self.filename)
def exists(self):
return os.path.exists(self.filename)
@property
def abspath(self):
return self._abspath
@property
def basename(self):
"""Return the filename without the extension"""
return self._filename[:-self._ext_length]
def dereference(self):
self._filename = os.path.basename(self._filename)
@property
def filename(self):
return self._filename
def __repr__(self):
return 'Artefact({0!r})'.format(self.filename)
class NiiGzImage(Artefact):
def __init__(self, filename):
super(NiiGzImage, self).__init__(filename, '.nii.gz')
def __repr__(self):
return '{0}({1!r})'.format(self.__clsname__, self.filename)
class TextFile(Artefact):
def __init__(self, filename):
super(TextFile, self).__init__(filename, '.txt')
def __repr__(self):
return '{0}({1!r})'.format(self.__clsname__, self.filename)
<commit_msg>Use correct method to get classname of self<commit_after>import os.path
from utils import file_sha1sum
class Artefact(object):
def __init__(self, filename, extension):
if not filename.endswith(extension):
raise ValueError
self._filename = filename
self._ext_length = len(extension)
self._abspath = os.path.abspath(filename)
def checksum(self):
return file_sha1sum(self.filename)
def exists(self):
return os.path.exists(self.filename)
@property
def abspath(self):
return self._abspath
@property
def basename(self):
"""Return the filename without the extension"""
return self._filename[:-self._ext_length]
def dereference(self):
self._filename = os.path.basename(self._filename)
@property
def filename(self):
return self._filename
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.filename)
class NiiGzImage(Artefact):
def __init__(self, filename):
super(NiiGzImage, self).__init__(filename, '.nii.gz')
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.filename)
class TextFile(Artefact):
def __init__(self, filename):
super(TextFile, self).__init__(filename, '.txt')
def __repr__(self):
return '{0}({1!r})'.format(self.__class__.__name__, self.filename)
|
a5d61bee86c394cf6bc972020cbfe2f95463d1e2 | huxley/www/views.py | huxley/www/views.py | # Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from huxley.api.serializers import UserSerializer
from huxley.utils.shortcuts import render_template
def index(request):
user_dict = {};
if request.user.is_authenticated():
user_dict = UserSerializer(request.user).data
context = {'user_json': json.dumps(user_dict)}
return render_template(request, 'www.html', context)
| # Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from huxley.api.serializers import UserSerializer
from huxley.utils.shortcuts import render_template
def index(request):
user_dict = {};
if request.user.is_authenticated():
user_dict = UserSerializer(request.user).data
context = {'user_json': json.dumps(user_dict).replace('</', '<\\/')}
return render_template(request, 'www.html', context)
| Fix XSS vulnerability in current user bootstrapping. | Fix XSS vulnerability in current user bootstrapping.
| Python | bsd-3-clause | bmun/huxley,nathanielparke/huxley,ctmunwebmaster/huxley,ctmunwebmaster/huxley,nathanielparke/huxley,bmun/huxley,bmun/huxley,ctmunwebmaster/huxley,nathanielparke/huxley,ctmunwebmaster/huxley,bmun/huxley,nathanielparke/huxley | # Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from huxley.api.serializers import UserSerializer
from huxley.utils.shortcuts import render_template
def index(request):
user_dict = {};
if request.user.is_authenticated():
user_dict = UserSerializer(request.user).data
context = {'user_json': json.dumps(user_dict)}
return render_template(request, 'www.html', context)
Fix XSS vulnerability in current user bootstrapping. | # Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from huxley.api.serializers import UserSerializer
from huxley.utils.shortcuts import render_template
def index(request):
user_dict = {};
if request.user.is_authenticated():
user_dict = UserSerializer(request.user).data
context = {'user_json': json.dumps(user_dict).replace('</', '<\\/')}
return render_template(request, 'www.html', context)
| <commit_before># Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from huxley.api.serializers import UserSerializer
from huxley.utils.shortcuts import render_template
def index(request):
user_dict = {};
if request.user.is_authenticated():
user_dict = UserSerializer(request.user).data
context = {'user_json': json.dumps(user_dict)}
return render_template(request, 'www.html', context)
<commit_msg>Fix XSS vulnerability in current user bootstrapping.<commit_after> | # Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from huxley.api.serializers import UserSerializer
from huxley.utils.shortcuts import render_template
def index(request):
user_dict = {};
if request.user.is_authenticated():
user_dict = UserSerializer(request.user).data
context = {'user_json': json.dumps(user_dict).replace('</', '<\\/')}
return render_template(request, 'www.html', context)
| # Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from huxley.api.serializers import UserSerializer
from huxley.utils.shortcuts import render_template
def index(request):
user_dict = {};
if request.user.is_authenticated():
user_dict = UserSerializer(request.user).data
context = {'user_json': json.dumps(user_dict)}
return render_template(request, 'www.html', context)
Fix XSS vulnerability in current user bootstrapping.# Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from huxley.api.serializers import UserSerializer
from huxley.utils.shortcuts import render_template
def index(request):
user_dict = {};
if request.user.is_authenticated():
user_dict = UserSerializer(request.user).data
context = {'user_json': json.dumps(user_dict).replace('</', '<\\/')}
return render_template(request, 'www.html', context)
| <commit_before># Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from huxley.api.serializers import UserSerializer
from huxley.utils.shortcuts import render_template
def index(request):
user_dict = {};
if request.user.is_authenticated():
user_dict = UserSerializer(request.user).data
context = {'user_json': json.dumps(user_dict)}
return render_template(request, 'www.html', context)
<commit_msg>Fix XSS vulnerability in current user bootstrapping.<commit_after># Copyright (c) 2011-2014 Berkeley Model United Nations. All rights reserved.
# Use of this source code is governed by a BSD License (see LICENSE).
import json
from huxley.api.serializers import UserSerializer
from huxley.utils.shortcuts import render_template
def index(request):
user_dict = {};
if request.user.is_authenticated():
user_dict = UserSerializer(request.user).data
context = {'user_json': json.dumps(user_dict).replace('</', '<\\/')}
return render_template(request, 'www.html', context)
|
049e8c746b5f40b3e708dcd749052405e2246160 | lc0084_largest_rectangle_in_histogram.py | lc0084_largest_rectangle_in_histogram.py | """Leetcode 84. Largest Rectangle in Histogram
Hard
URL: https://leetcode.com/problems/largest-rectangle-in-histogram/
Given n non-negative integers representing the histogram's bar height where the
width of each bar is 1, find the area of largest rectangle in the histogram.
Above is a histogram where width of each bar is 1, given height = [2,1,5,6,2,3].
The largest rectangle is shown in the shaded area, which has area = 10 unit:
5 * 2 = 10.
Example:
Input: [2,1,5,6,2,3]
Output: 10
"""
class Solution(object):
def largestRectangleArea(self, heights):
"""
:type heights: List[int]
:rtype: int
"""
pass
def main():
pass
if __name__ == '__main__':
main()
| """Leetcode 84. Largest Rectangle in Histogram
Hard
URL: https://leetcode.com/problems/largest-rectangle-in-histogram/
Given n non-negative integers representing the histogram's bar height where the
width of each bar is 1, find the area of largest rectangle in the histogram.
Above is a histogram where width of each bar is 1, given height = [2,1,5,6,2,3].
The largest rectangle is shown in the shaded area, which has area = 10 unit:
5 * 2 = 10.
Example:
Input: [2,1,5,6,2,3]
Output: 10
"""
class SolutionIncreasingHeightIdxStack(object):
def largestRectangleArea(self, heights):
"""
:type heights: List[int]
:rtype: int
"""
# Use stack to collect idx of buildings with increasing heights.
# Boundary case handled by idx = -1 & height = 0.
idx_stack = [-1]
heights.append(0)
max_area = 0
for i in range(len(heights)):
# Before adding a new building, pop buildings taller than the new one.
while heights[i] < heights[idx_stack[-1]]:
# The building popped out represents the height.
h = heights[idx_stack.pop()]
# Last stack top & new building are the left & right boundaries.
w = i - idx_stack[-1] - 1
max_area = max(max_area, h * w)
idx_stack.append(i)
return max_area
def main():
# Output: 10 = 5 * 2.
heights = [2,1,5,6,2,3]
print heights
print SolutionIncreasingHeightIdxStack().largestRectangleArea(heights)
if __name__ == '__main__':
main()
| Complete increasing heights idx stack sol | Complete increasing heights idx stack sol
| Python | bsd-2-clause | bowen0701/algorithms_data_structures | """Leetcode 84. Largest Rectangle in Histogram
Hard
URL: https://leetcode.com/problems/largest-rectangle-in-histogram/
Given n non-negative integers representing the histogram's bar height where the
width of each bar is 1, find the area of largest rectangle in the histogram.
Above is a histogram where width of each bar is 1, given height = [2,1,5,6,2,3].
The largest rectangle is shown in the shaded area, which has area = 10 unit:
5 * 2 = 10.
Example:
Input: [2,1,5,6,2,3]
Output: 10
"""
class Solution(object):
def largestRectangleArea(self, heights):
"""
:type heights: List[int]
:rtype: int
"""
pass
def main():
pass
if __name__ == '__main__':
main()
Complete increasing heights idx stack sol | """Leetcode 84. Largest Rectangle in Histogram
Hard
URL: https://leetcode.com/problems/largest-rectangle-in-histogram/
Given n non-negative integers representing the histogram's bar height where the
width of each bar is 1, find the area of largest rectangle in the histogram.
Above is a histogram where width of each bar is 1, given height = [2,1,5,6,2,3].
The largest rectangle is shown in the shaded area, which has area = 10 unit:
5 * 2 = 10.
Example:
Input: [2,1,5,6,2,3]
Output: 10
"""
class SolutionIncreasingHeightIdxStack(object):
def largestRectangleArea(self, heights):
"""
:type heights: List[int]
:rtype: int
"""
# Use stack to collect idx of buildings with increasing heights.
# Boundary case handled by idx = -1 & height = 0.
idx_stack = [-1]
heights.append(0)
max_area = 0
for i in range(len(heights)):
# Before adding a new building, pop buildings taller than the new one.
while heights[i] < heights[idx_stack[-1]]:
# The building popped out represents the height.
h = heights[idx_stack.pop()]
# Last stack top & new building are the left & right boundaries.
w = i - idx_stack[-1] - 1
max_area = max(max_area, h * w)
idx_stack.append(i)
return max_area
def main():
# Output: 10 = 5 * 2.
heights = [2,1,5,6,2,3]
print heights
print SolutionIncreasingHeightIdxStack().largestRectangleArea(heights)
if __name__ == '__main__':
main()
| <commit_before>"""Leetcode 84. Largest Rectangle in Histogram
Hard
URL: https://leetcode.com/problems/largest-rectangle-in-histogram/
Given n non-negative integers representing the histogram's bar height where the
width of each bar is 1, find the area of largest rectangle in the histogram.
Above is a histogram where width of each bar is 1, given height = [2,1,5,6,2,3].
The largest rectangle is shown in the shaded area, which has area = 10 unit:
5 * 2 = 10.
Example:
Input: [2,1,5,6,2,3]
Output: 10
"""
class Solution(object):
def largestRectangleArea(self, heights):
"""
:type heights: List[int]
:rtype: int
"""
pass
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>Complete increasing heights idx stack sol<commit_after> | """Leetcode 84. Largest Rectangle in Histogram
Hard
URL: https://leetcode.com/problems/largest-rectangle-in-histogram/
Given n non-negative integers representing the histogram's bar height where the
width of each bar is 1, find the area of largest rectangle in the histogram.
Above is a histogram where width of each bar is 1, given height = [2,1,5,6,2,3].
The largest rectangle is shown in the shaded area, which has area = 10 unit:
5 * 2 = 10.
Example:
Input: [2,1,5,6,2,3]
Output: 10
"""
class SolutionIncreasingHeightIdxStack(object):
def largestRectangleArea(self, heights):
"""
:type heights: List[int]
:rtype: int
"""
# Use stack to collect idx of buildings with increasing heights.
# Boundary case handled by idx = -1 & height = 0.
idx_stack = [-1]
heights.append(0)
max_area = 0
for i in range(len(heights)):
# Before adding a new building, pop buildings taller than the new one.
while heights[i] < heights[idx_stack[-1]]:
# The building popped out represents the height.
h = heights[idx_stack.pop()]
# Last stack top & new building are the left & right boundaries.
w = i - idx_stack[-1] - 1
max_area = max(max_area, h * w)
idx_stack.append(i)
return max_area
def main():
# Output: 10 = 5 * 2.
heights = [2,1,5,6,2,3]
print heights
print SolutionIncreasingHeightIdxStack().largestRectangleArea(heights)
if __name__ == '__main__':
main()
| """Leetcode 84. Largest Rectangle in Histogram
Hard
URL: https://leetcode.com/problems/largest-rectangle-in-histogram/
Given n non-negative integers representing the histogram's bar height where the
width of each bar is 1, find the area of largest rectangle in the histogram.
Above is a histogram where width of each bar is 1, given height = [2,1,5,6,2,3].
The largest rectangle is shown in the shaded area, which has area = 10 unit:
5 * 2 = 10.
Example:
Input: [2,1,5,6,2,3]
Output: 10
"""
class Solution(object):
def largestRectangleArea(self, heights):
"""
:type heights: List[int]
:rtype: int
"""
pass
def main():
pass
if __name__ == '__main__':
main()
Complete increasing heights idx stack sol"""Leetcode 84. Largest Rectangle in Histogram
Hard
URL: https://leetcode.com/problems/largest-rectangle-in-histogram/
Given n non-negative integers representing the histogram's bar height where the
width of each bar is 1, find the area of largest rectangle in the histogram.
Above is a histogram where width of each bar is 1, given height = [2,1,5,6,2,3].
The largest rectangle is shown in the shaded area, which has area = 10 unit:
5 * 2 = 10.
Example:
Input: [2,1,5,6,2,3]
Output: 10
"""
class SolutionIncreasingHeightIdxStack(object):
def largestRectangleArea(self, heights):
"""
:type heights: List[int]
:rtype: int
"""
# Use stack to collect idx of buildings with increasing heights.
# Boundary case handled by idx = -1 & height = 0.
idx_stack = [-1]
heights.append(0)
max_area = 0
for i in range(len(heights)):
# Before adding a new building, pop buildings taller than the new one.
while heights[i] < heights[idx_stack[-1]]:
# The building popped out represents the height.
h = heights[idx_stack.pop()]
# Last stack top & new building are the left & right boundaries.
w = i - idx_stack[-1] - 1
max_area = max(max_area, h * w)
idx_stack.append(i)
return max_area
def main():
# Output: 10 = 5 * 2.
heights = [2,1,5,6,2,3]
print heights
print SolutionIncreasingHeightIdxStack().largestRectangleArea(heights)
if __name__ == '__main__':
main()
| <commit_before>"""Leetcode 84. Largest Rectangle in Histogram
Hard
URL: https://leetcode.com/problems/largest-rectangle-in-histogram/
Given n non-negative integers representing the histogram's bar height where the
width of each bar is 1, find the area of largest rectangle in the histogram.
Above is a histogram where width of each bar is 1, given height = [2,1,5,6,2,3].
The largest rectangle is shown in the shaded area, which has area = 10 unit:
5 * 2 = 10.
Example:
Input: [2,1,5,6,2,3]
Output: 10
"""
class Solution(object):
def largestRectangleArea(self, heights):
"""
:type heights: List[int]
:rtype: int
"""
pass
def main():
pass
if __name__ == '__main__':
main()
<commit_msg>Complete increasing heights idx stack sol<commit_after>"""Leetcode 84. Largest Rectangle in Histogram
Hard
URL: https://leetcode.com/problems/largest-rectangle-in-histogram/
Given n non-negative integers representing the histogram's bar height where the
width of each bar is 1, find the area of largest rectangle in the histogram.
Above is a histogram where width of each bar is 1, given height = [2,1,5,6,2,3].
The largest rectangle is shown in the shaded area, which has area = 10 unit:
5 * 2 = 10.
Example:
Input: [2,1,5,6,2,3]
Output: 10
"""
class SolutionIncreasingHeightIdxStack(object):
def largestRectangleArea(self, heights):
"""
:type heights: List[int]
:rtype: int
"""
# Use stack to collect idx of buildings with increasing heights.
# Boundary case handled by idx = -1 & height = 0.
idx_stack = [-1]
heights.append(0)
max_area = 0
for i in range(len(heights)):
# Before adding a new building, pop buildings taller than the new one.
while heights[i] < heights[idx_stack[-1]]:
# The building popped out represents the height.
h = heights[idx_stack.pop()]
# Last stack top & new building are the left & right boundaries.
w = i - idx_stack[-1] - 1
max_area = max(max_area, h * w)
idx_stack.append(i)
return max_area
def main():
# Output: 10 = 5 * 2.
heights = [2,1,5,6,2,3]
print heights
print SolutionIncreasingHeightIdxStack().largestRectangleArea(heights)
if __name__ == '__main__':
main()
|
59bf9eca217ef8ef3011124d1ff9e1570e8ff76d | plugins/clue/clue.py | plugins/clue/clue.py | from __future__ import unicode_literals
# don't convert to ascii in py2.7 when creating string to return
crontable = []
outputs = []
def process_message(data):
outputs.append([data['channel'], "from repeat1 \"{}\" in channel {}".format(
data['text'], data['channel'])]
)
| from __future__ import unicode_literals
# don't convert to ascii in py2.7 when creating string to return
crontable = []
outputs = []
def process_message(data):
outputs.append([data['channel'], data['text']])
| Simplify stony's response to exactly what the user sent | Simplify stony's response to exactly what the user sent
Rather than providing the "from repeat1" and "in channel D???????" noise.
| Python | mit | cworth-gh/stony | from __future__ import unicode_literals
# don't convert to ascii in py2.7 when creating string to return
crontable = []
outputs = []
def process_message(data):
outputs.append([data['channel'], "from repeat1 \"{}\" in channel {}".format(
data['text'], data['channel'])]
)
Simplify stony's response to exactly what the user sent
Rather than providing the "from repeat1" and "in channel D???????" noise. | from __future__ import unicode_literals
# don't convert to ascii in py2.7 when creating string to return
crontable = []
outputs = []
def process_message(data):
outputs.append([data['channel'], data['text']])
| <commit_before>from __future__ import unicode_literals
# don't convert to ascii in py2.7 when creating string to return
crontable = []
outputs = []
def process_message(data):
outputs.append([data['channel'], "from repeat1 \"{}\" in channel {}".format(
data['text'], data['channel'])]
)
<commit_msg>Simplify stony's response to exactly what the user sent
Rather than providing the "from repeat1" and "in channel D???????" noise.<commit_after> | from __future__ import unicode_literals
# don't convert to ascii in py2.7 when creating string to return
crontable = []
outputs = []
def process_message(data):
outputs.append([data['channel'], data['text']])
| from __future__ import unicode_literals
# don't convert to ascii in py2.7 when creating string to return
crontable = []
outputs = []
def process_message(data):
outputs.append([data['channel'], "from repeat1 \"{}\" in channel {}".format(
data['text'], data['channel'])]
)
Simplify stony's response to exactly what the user sent
Rather than providing the "from repeat1" and "in channel D???????" noise.from __future__ import unicode_literals
# don't convert to ascii in py2.7 when creating string to return
crontable = []
outputs = []
def process_message(data):
outputs.append([data['channel'], data['text']])
| <commit_before>from __future__ import unicode_literals
# don't convert to ascii in py2.7 when creating string to return
crontable = []
outputs = []
def process_message(data):
outputs.append([data['channel'], "from repeat1 \"{}\" in channel {}".format(
data['text'], data['channel'])]
)
<commit_msg>Simplify stony's response to exactly what the user sent
Rather than providing the "from repeat1" and "in channel D???????" noise.<commit_after>from __future__ import unicode_literals
# don't convert to ascii in py2.7 when creating string to return
crontable = []
outputs = []
def process_message(data):
outputs.append([data['channel'], data['text']])
|
6d76842d9f9394aa78cda55fff9c62a4db5da5c6 | common.py | common.py | from __future__ import print_function
import os, pyrax, sys
import pyrax.exceptions as pexc
from termcolor import colored
import ConfigParser
from subprocess import check_output
path = os.path.dirname(os.path.realpath(__file__))
config_file = path + "/config.ini"
def log(level, message):
if level == 'OK':
print(colored('[ OK ]', 'green'), "\t", message, file=sys.stderr)
elif level == 'INFO':
print(colored('[ INFO ]', 'blue'), "\t", message, file=sys.stderr)
elif level == 'ERROR':
print(colored('[ FAIL ]', 'red'), "\t", message, file=sys.stderr)
else:
print(message)
def authenticate(dc):
pyrax.set_setting("identity_type", "rackspace")
log("INFO", "Authenticating")
try:
pyrax.set_credential_file(config_file, region=dc)
except pexc.AuthenticationFailed:
log('ERROR', 'Authentication Failure')
log('OK', 'Authentication Successful')
def get_config(group):
config = ConfigParser.ConfigParser()
config.read(config_file)
if config.has_section(group):
return config
else:
raise Exception('Unknown config section')
def get_machine_uuid():
name = check_output('xenstore-read name', shell=True)
id = name.strip()
return id[9:]
| from __future__ import print_function
import os, pyrax, sys
import pyrax.exceptions as pexc
from termcolor import colored
import ConfigParser
import subprocess
path = os.path.dirname(os.path.realpath(__file__))
config_file = path + "/config.ini"
def log(level, message):
if level == 'OK':
print(colored('[ OK ]', 'green'), "\t", message, file=sys.stderr)
elif level == 'INFO':
print(colored('[ INFO ]', 'blue'), "\t", message, file=sys.stderr)
elif level == 'ERROR':
print(colored('[ FAIL ]', 'red'), "\t", message, file=sys.stderr)
else:
print(message)
def authenticate(dc):
pyrax.set_setting("identity_type", "rackspace")
log("INFO", "Authenticating")
try:
pyrax.set_credential_file(config_file, region=dc)
except pexc.AuthenticationFailed:
log('ERROR', 'Authentication Failure')
log('OK', 'Authentication Successful')
def get_config(group):
config = ConfigParser.ConfigParser()
config.read(config_file)
if config.has_section(group):
return config
else:
raise Exception('Unknown config section')
def get_machine_uuid():
name = subprocess.Popen(['xenstore-read name'], shell=True, stdout=subprocess.PIPE).communicate()[0]
id = name.strip()
return id[9:]
| Fix for python 2.6 compatibility in subprocess | Fix for python 2.6 compatibility in subprocess
| Python | apache-2.0 | boxidau/rax-autoscaler,boxidau/rax-autoscaler,eljrax/rax-autoscaler,rackerlabs/rax-autoscaler | from __future__ import print_function
import os, pyrax, sys
import pyrax.exceptions as pexc
from termcolor import colored
import ConfigParser
from subprocess import check_output
path = os.path.dirname(os.path.realpath(__file__))
config_file = path + "/config.ini"
def log(level, message):
if level == 'OK':
print(colored('[ OK ]', 'green'), "\t", message, file=sys.stderr)
elif level == 'INFO':
print(colored('[ INFO ]', 'blue'), "\t", message, file=sys.stderr)
elif level == 'ERROR':
print(colored('[ FAIL ]', 'red'), "\t", message, file=sys.stderr)
else:
print(message)
def authenticate(dc):
pyrax.set_setting("identity_type", "rackspace")
log("INFO", "Authenticating")
try:
pyrax.set_credential_file(config_file, region=dc)
except pexc.AuthenticationFailed:
log('ERROR', 'Authentication Failure')
log('OK', 'Authentication Successful')
def get_config(group):
config = ConfigParser.ConfigParser()
config.read(config_file)
if config.has_section(group):
return config
else:
raise Exception('Unknown config section')
def get_machine_uuid():
name = check_output('xenstore-read name', shell=True)
id = name.strip()
return id[9:]
Fix for python 2.6 compatibility in subprocess | from __future__ import print_function
import os, pyrax, sys
import pyrax.exceptions as pexc
from termcolor import colored
import ConfigParser
import subprocess
path = os.path.dirname(os.path.realpath(__file__))
config_file = path + "/config.ini"
def log(level, message):
if level == 'OK':
print(colored('[ OK ]', 'green'), "\t", message, file=sys.stderr)
elif level == 'INFO':
print(colored('[ INFO ]', 'blue'), "\t", message, file=sys.stderr)
elif level == 'ERROR':
print(colored('[ FAIL ]', 'red'), "\t", message, file=sys.stderr)
else:
print(message)
def authenticate(dc):
pyrax.set_setting("identity_type", "rackspace")
log("INFO", "Authenticating")
try:
pyrax.set_credential_file(config_file, region=dc)
except pexc.AuthenticationFailed:
log('ERROR', 'Authentication Failure')
log('OK', 'Authentication Successful')
def get_config(group):
config = ConfigParser.ConfigParser()
config.read(config_file)
if config.has_section(group):
return config
else:
raise Exception('Unknown config section')
def get_machine_uuid():
name = subprocess.Popen(['xenstore-read name'], shell=True, stdout=subprocess.PIPE).communicate()[0]
id = name.strip()
return id[9:]
| <commit_before>from __future__ import print_function
import os, pyrax, sys
import pyrax.exceptions as pexc
from termcolor import colored
import ConfigParser
from subprocess import check_output
path = os.path.dirname(os.path.realpath(__file__))
config_file = path + "/config.ini"
def log(level, message):
if level == 'OK':
print(colored('[ OK ]', 'green'), "\t", message, file=sys.stderr)
elif level == 'INFO':
print(colored('[ INFO ]', 'blue'), "\t", message, file=sys.stderr)
elif level == 'ERROR':
print(colored('[ FAIL ]', 'red'), "\t", message, file=sys.stderr)
else:
print(message)
def authenticate(dc):
pyrax.set_setting("identity_type", "rackspace")
log("INFO", "Authenticating")
try:
pyrax.set_credential_file(config_file, region=dc)
except pexc.AuthenticationFailed:
log('ERROR', 'Authentication Failure')
log('OK', 'Authentication Successful')
def get_config(group):
config = ConfigParser.ConfigParser()
config.read(config_file)
if config.has_section(group):
return config
else:
raise Exception('Unknown config section')
def get_machine_uuid():
name = check_output('xenstore-read name', shell=True)
id = name.strip()
return id[9:]
<commit_msg>Fix for python 2.6 compatibility in subprocess<commit_after> | from __future__ import print_function
import os, pyrax, sys
import pyrax.exceptions as pexc
from termcolor import colored
import ConfigParser
import subprocess
path = os.path.dirname(os.path.realpath(__file__))
config_file = path + "/config.ini"
def log(level, message):
if level == 'OK':
print(colored('[ OK ]', 'green'), "\t", message, file=sys.stderr)
elif level == 'INFO':
print(colored('[ INFO ]', 'blue'), "\t", message, file=sys.stderr)
elif level == 'ERROR':
print(colored('[ FAIL ]', 'red'), "\t", message, file=sys.stderr)
else:
print(message)
def authenticate(dc):
pyrax.set_setting("identity_type", "rackspace")
log("INFO", "Authenticating")
try:
pyrax.set_credential_file(config_file, region=dc)
except pexc.AuthenticationFailed:
log('ERROR', 'Authentication Failure')
log('OK', 'Authentication Successful')
def get_config(group):
config = ConfigParser.ConfigParser()
config.read(config_file)
if config.has_section(group):
return config
else:
raise Exception('Unknown config section')
def get_machine_uuid():
name = subprocess.Popen(['xenstore-read name'], shell=True, stdout=subprocess.PIPE).communicate()[0]
id = name.strip()
return id[9:]
| from __future__ import print_function
import os, pyrax, sys
import pyrax.exceptions as pexc
from termcolor import colored
import ConfigParser
from subprocess import check_output
path = os.path.dirname(os.path.realpath(__file__))
config_file = path + "/config.ini"
def log(level, message):
if level == 'OK':
print(colored('[ OK ]', 'green'), "\t", message, file=sys.stderr)
elif level == 'INFO':
print(colored('[ INFO ]', 'blue'), "\t", message, file=sys.stderr)
elif level == 'ERROR':
print(colored('[ FAIL ]', 'red'), "\t", message, file=sys.stderr)
else:
print(message)
def authenticate(dc):
pyrax.set_setting("identity_type", "rackspace")
log("INFO", "Authenticating")
try:
pyrax.set_credential_file(config_file, region=dc)
except pexc.AuthenticationFailed:
log('ERROR', 'Authentication Failure')
log('OK', 'Authentication Successful')
def get_config(group):
config = ConfigParser.ConfigParser()
config.read(config_file)
if config.has_section(group):
return config
else:
raise Exception('Unknown config section')
def get_machine_uuid():
name = check_output('xenstore-read name', shell=True)
id = name.strip()
return id[9:]
Fix for python 2.6 compatibility in subprocessfrom __future__ import print_function
import os, pyrax, sys
import pyrax.exceptions as pexc
from termcolor import colored
import ConfigParser
import subprocess
path = os.path.dirname(os.path.realpath(__file__))
config_file = path + "/config.ini"
def log(level, message):
if level == 'OK':
print(colored('[ OK ]', 'green'), "\t", message, file=sys.stderr)
elif level == 'INFO':
print(colored('[ INFO ]', 'blue'), "\t", message, file=sys.stderr)
elif level == 'ERROR':
print(colored('[ FAIL ]', 'red'), "\t", message, file=sys.stderr)
else:
print(message)
def authenticate(dc):
pyrax.set_setting("identity_type", "rackspace")
log("INFO", "Authenticating")
try:
pyrax.set_credential_file(config_file, region=dc)
except pexc.AuthenticationFailed:
log('ERROR', 'Authentication Failure')
log('OK', 'Authentication Successful')
def get_config(group):
config = ConfigParser.ConfigParser()
config.read(config_file)
if config.has_section(group):
return config
else:
raise Exception('Unknown config section')
def get_machine_uuid():
name = subprocess.Popen(['xenstore-read name'], shell=True, stdout=subprocess.PIPE).communicate()[0]
id = name.strip()
return id[9:]
| <commit_before>from __future__ import print_function
import os, pyrax, sys
import pyrax.exceptions as pexc
from termcolor import colored
import ConfigParser
from subprocess import check_output
path = os.path.dirname(os.path.realpath(__file__))
config_file = path + "/config.ini"
def log(level, message):
if level == 'OK':
print(colored('[ OK ]', 'green'), "\t", message, file=sys.stderr)
elif level == 'INFO':
print(colored('[ INFO ]', 'blue'), "\t", message, file=sys.stderr)
elif level == 'ERROR':
print(colored('[ FAIL ]', 'red'), "\t", message, file=sys.stderr)
else:
print(message)
def authenticate(dc):
pyrax.set_setting("identity_type", "rackspace")
log("INFO", "Authenticating")
try:
pyrax.set_credential_file(config_file, region=dc)
except pexc.AuthenticationFailed:
log('ERROR', 'Authentication Failure')
log('OK', 'Authentication Successful')
def get_config(group):
config = ConfigParser.ConfigParser()
config.read(config_file)
if config.has_section(group):
return config
else:
raise Exception('Unknown config section')
def get_machine_uuid():
name = check_output('xenstore-read name', shell=True)
id = name.strip()
return id[9:]
<commit_msg>Fix for python 2.6 compatibility in subprocess<commit_after>from __future__ import print_function
import os, pyrax, sys
import pyrax.exceptions as pexc
from termcolor import colored
import ConfigParser
import subprocess
path = os.path.dirname(os.path.realpath(__file__))
config_file = path + "/config.ini"
def log(level, message):
if level == 'OK':
print(colored('[ OK ]', 'green'), "\t", message, file=sys.stderr)
elif level == 'INFO':
print(colored('[ INFO ]', 'blue'), "\t", message, file=sys.stderr)
elif level == 'ERROR':
print(colored('[ FAIL ]', 'red'), "\t", message, file=sys.stderr)
else:
print(message)
def authenticate(dc):
pyrax.set_setting("identity_type", "rackspace")
log("INFO", "Authenticating")
try:
pyrax.set_credential_file(config_file, region=dc)
except pexc.AuthenticationFailed:
log('ERROR', 'Authentication Failure')
log('OK', 'Authentication Successful')
def get_config(group):
config = ConfigParser.ConfigParser()
config.read(config_file)
if config.has_section(group):
return config
else:
raise Exception('Unknown config section')
def get_machine_uuid():
name = subprocess.Popen(['xenstore-read name'], shell=True, stdout=subprocess.PIPE).communicate()[0]
id = name.strip()
return id[9:]
|
9347f3bc4a9c37c7013e8666f86cceee1a7a17f9 | genome_designer/main/startup.py | genome_designer/main/startup.py | """Actions to run at server startup.
"""
from django.db import connection
def run():
"""Call this from manage.py or tests.
"""
_add_custom_mult_agg_function()
def _add_custom_mult_agg_function():
"""Make sure the Postgresql database has a custom function array_agg_mult.
NOTE: Figured out the raw sql query by running psql with -E flag
and then calling \df to list functions. The -E flag causes the internal
raw sql of the commands to be shown.
"""
cursor = connection.cursor()
cursor.execute(
'SELECT p.proname '
'FROM pg_catalog.pg_proc p '
'WHERE p.proname=\'array_agg_mult\''
)
mult_agg_exists = bool(cursor.fetchone())
if not mult_agg_exists:
cursor.execute(
'CREATE AGGREGATE array_agg_mult (anyarray) ('
' SFUNC = array_cat'
' ,STYPE = anyarray'
' ,INITCOND = \'{}\''
');'
)
| """Actions to run at server startup.
"""
from django.db import connection
from django.db import transaction
def run():
"""Call this from manage.py or tests.
"""
_add_custom_mult_agg_function()
def _add_custom_mult_agg_function():
"""Make sure the Postgresql database has a custom function array_agg_mult.
NOTE: Figured out the raw sql query by running psql with -E flag
and then calling \df to list functions. The -E flag causes the internal
raw sql of the commands to be shown.
"""
cursor = connection.cursor()
cursor.execute(
'SELECT p.proname '
'FROM pg_catalog.pg_proc p '
'WHERE p.proname=\'array_agg_mult\''
)
mult_agg_exists = bool(cursor.fetchone())
if not mult_agg_exists:
cursor.execute(
'CREATE AGGREGATE array_agg_mult (anyarray) ('
' SFUNC = array_cat'
' ,STYPE = anyarray'
' ,INITCOND = \'{}\''
');'
)
transaction.commit_unless_managed()
| Fix bug with array_agg_mult() function not actually being created. | Fix bug with array_agg_mult() function not actually being created.
| Python | mit | churchlab/millstone,woodymit/millstone_accidental_source,churchlab/millstone,woodymit/millstone,churchlab/millstone,woodymit/millstone_accidental_source,churchlab/millstone,woodymit/millstone,woodymit/millstone_accidental_source,woodymit/millstone,woodymit/millstone,woodymit/millstone_accidental_source | """Actions to run at server startup.
"""
from django.db import connection
def run():
"""Call this from manage.py or tests.
"""
_add_custom_mult_agg_function()
def _add_custom_mult_agg_function():
"""Make sure the Postgresql database has a custom function array_agg_mult.
NOTE: Figured out the raw sql query by running psql with -E flag
and then calling \df to list functions. The -E flag causes the internal
raw sql of the commands to be shown.
"""
cursor = connection.cursor()
cursor.execute(
'SELECT p.proname '
'FROM pg_catalog.pg_proc p '
'WHERE p.proname=\'array_agg_mult\''
)
mult_agg_exists = bool(cursor.fetchone())
if not mult_agg_exists:
cursor.execute(
'CREATE AGGREGATE array_agg_mult (anyarray) ('
' SFUNC = array_cat'
' ,STYPE = anyarray'
' ,INITCOND = \'{}\''
');'
)
Fix bug with array_agg_mult() function not actually being created. | """Actions to run at server startup.
"""
from django.db import connection
from django.db import transaction
def run():
"""Call this from manage.py or tests.
"""
_add_custom_mult_agg_function()
def _add_custom_mult_agg_function():
"""Make sure the Postgresql database has a custom function array_agg_mult.
NOTE: Figured out the raw sql query by running psql with -E flag
and then calling \df to list functions. The -E flag causes the internal
raw sql of the commands to be shown.
"""
cursor = connection.cursor()
cursor.execute(
'SELECT p.proname '
'FROM pg_catalog.pg_proc p '
'WHERE p.proname=\'array_agg_mult\''
)
mult_agg_exists = bool(cursor.fetchone())
if not mult_agg_exists:
cursor.execute(
'CREATE AGGREGATE array_agg_mult (anyarray) ('
' SFUNC = array_cat'
' ,STYPE = anyarray'
' ,INITCOND = \'{}\''
');'
)
transaction.commit_unless_managed()
| <commit_before>"""Actions to run at server startup.
"""
from django.db import connection
def run():
"""Call this from manage.py or tests.
"""
_add_custom_mult_agg_function()
def _add_custom_mult_agg_function():
"""Make sure the Postgresql database has a custom function array_agg_mult.
NOTE: Figured out the raw sql query by running psql with -E flag
and then calling \df to list functions. The -E flag causes the internal
raw sql of the commands to be shown.
"""
cursor = connection.cursor()
cursor.execute(
'SELECT p.proname '
'FROM pg_catalog.pg_proc p '
'WHERE p.proname=\'array_agg_mult\''
)
mult_agg_exists = bool(cursor.fetchone())
if not mult_agg_exists:
cursor.execute(
'CREATE AGGREGATE array_agg_mult (anyarray) ('
' SFUNC = array_cat'
' ,STYPE = anyarray'
' ,INITCOND = \'{}\''
');'
)
<commit_msg>Fix bug with array_agg_mult() function not actually being created.<commit_after> | """Actions to run at server startup.
"""
from django.db import connection
from django.db import transaction
def run():
"""Call this from manage.py or tests.
"""
_add_custom_mult_agg_function()
def _add_custom_mult_agg_function():
"""Make sure the Postgresql database has a custom function array_agg_mult.
NOTE: Figured out the raw sql query by running psql with -E flag
and then calling \df to list functions. The -E flag causes the internal
raw sql of the commands to be shown.
"""
cursor = connection.cursor()
cursor.execute(
'SELECT p.proname '
'FROM pg_catalog.pg_proc p '
'WHERE p.proname=\'array_agg_mult\''
)
mult_agg_exists = bool(cursor.fetchone())
if not mult_agg_exists:
cursor.execute(
'CREATE AGGREGATE array_agg_mult (anyarray) ('
' SFUNC = array_cat'
' ,STYPE = anyarray'
' ,INITCOND = \'{}\''
');'
)
transaction.commit_unless_managed()
| """Actions to run at server startup.
"""
from django.db import connection
def run():
"""Call this from manage.py or tests.
"""
_add_custom_mult_agg_function()
def _add_custom_mult_agg_function():
"""Make sure the Postgresql database has a custom function array_agg_mult.
NOTE: Figured out the raw sql query by running psql with -E flag
and then calling \df to list functions. The -E flag causes the internal
raw sql of the commands to be shown.
"""
cursor = connection.cursor()
cursor.execute(
'SELECT p.proname '
'FROM pg_catalog.pg_proc p '
'WHERE p.proname=\'array_agg_mult\''
)
mult_agg_exists = bool(cursor.fetchone())
if not mult_agg_exists:
cursor.execute(
'CREATE AGGREGATE array_agg_mult (anyarray) ('
' SFUNC = array_cat'
' ,STYPE = anyarray'
' ,INITCOND = \'{}\''
');'
)
Fix bug with array_agg_mult() function not actually being created."""Actions to run at server startup.
"""
from django.db import connection
from django.db import transaction
def run():
"""Call this from manage.py or tests.
"""
_add_custom_mult_agg_function()
def _add_custom_mult_agg_function():
"""Make sure the Postgresql database has a custom function array_agg_mult.
NOTE: Figured out the raw sql query by running psql with -E flag
and then calling \df to list functions. The -E flag causes the internal
raw sql of the commands to be shown.
"""
cursor = connection.cursor()
cursor.execute(
'SELECT p.proname '
'FROM pg_catalog.pg_proc p '
'WHERE p.proname=\'array_agg_mult\''
)
mult_agg_exists = bool(cursor.fetchone())
if not mult_agg_exists:
cursor.execute(
'CREATE AGGREGATE array_agg_mult (anyarray) ('
' SFUNC = array_cat'
' ,STYPE = anyarray'
' ,INITCOND = \'{}\''
');'
)
transaction.commit_unless_managed()
| <commit_before>"""Actions to run at server startup.
"""
from django.db import connection
def run():
"""Call this from manage.py or tests.
"""
_add_custom_mult_agg_function()
def _add_custom_mult_agg_function():
"""Make sure the Postgresql database has a custom function array_agg_mult.
NOTE: Figured out the raw sql query by running psql with -E flag
and then calling \df to list functions. The -E flag causes the internal
raw sql of the commands to be shown.
"""
cursor = connection.cursor()
cursor.execute(
'SELECT p.proname '
'FROM pg_catalog.pg_proc p '
'WHERE p.proname=\'array_agg_mult\''
)
mult_agg_exists = bool(cursor.fetchone())
if not mult_agg_exists:
cursor.execute(
'CREATE AGGREGATE array_agg_mult (anyarray) ('
' SFUNC = array_cat'
' ,STYPE = anyarray'
' ,INITCOND = \'{}\''
');'
)
<commit_msg>Fix bug with array_agg_mult() function not actually being created.<commit_after>"""Actions to run at server startup.
"""
from django.db import connection
from django.db import transaction
def run():
"""Call this from manage.py or tests.
"""
_add_custom_mult_agg_function()
def _add_custom_mult_agg_function():
"""Make sure the Postgresql database has a custom function array_agg_mult.
NOTE: Figured out the raw sql query by running psql with -E flag
and then calling \df to list functions. The -E flag causes the internal
raw sql of the commands to be shown.
"""
cursor = connection.cursor()
cursor.execute(
'SELECT p.proname '
'FROM pg_catalog.pg_proc p '
'WHERE p.proname=\'array_agg_mult\''
)
mult_agg_exists = bool(cursor.fetchone())
if not mult_agg_exists:
cursor.execute(
'CREATE AGGREGATE array_agg_mult (anyarray) ('
' SFUNC = array_cat'
' ,STYPE = anyarray'
' ,INITCOND = \'{}\''
');'
)
transaction.commit_unless_managed()
|
3e2b06aa73488323600a5942588b556f2d78c2af | persons/tests.py | persons/tests.py | """
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
| from datetime import datetime
from django.test import TestCase
from unittest import skip
from .models import Person
from mks.models import Member
class PersonTests(TestCase):
@skip
def test_member_person_sync(self):
"""
Test member/person sync on member save()
"""
birth = datetime.now()
defaults = {
'name': 'The MK',
'date_of_birth': birth,
'family_status': 'XYZ',
'place_of_residence': 'AAA',
'phone': '000-1234',
'fax': '999-8765',
'gender': 'F',
}
mk = Member.objects.create(**defaults)
self.assertGreater(mk.person.count(), 0)
person = Person.objects.filter(mk=mk)[0]
for field in defaults:
self.assertEqual(getattr(mk, field), getattr(person, field))
mk.delete()
| Test case for Member/Person sync | Test case for Member/Person sync
Skipped for now, to help with #4049
| Python | bsd-3-clause | navotsil/Open-Knesset,ofri/Open-Knesset,Shrulik/Open-Knesset,DanaOshri/Open-Knesset,jspan/Open-Knesset,navotsil/Open-Knesset,MeirKriheli/Open-Knesset,jspan/Open-Knesset,Shrulik/Open-Knesset,ofri/Open-Knesset,habeanf/Open-Knesset,jspan/Open-Knesset,noamelf/Open-Knesset,otadmor/Open-Knesset,alonisser/Open-Knesset,navotsil/Open-Knesset,alonisser/Open-Knesset,habeanf/Open-Knesset,OriHoch/Open-Knesset,daonb/Open-Knesset,navotsil/Open-Knesset,Shrulik/Open-Knesset,daonb/Open-Knesset,alonisser/Open-Knesset,daonb/Open-Knesset,noamelf/Open-Knesset,ofri/Open-Knesset,OriHoch/Open-Knesset,DanaOshri/Open-Knesset,MeirKriheli/Open-Knesset,jspan/Open-Knesset,Shrulik/Open-Knesset,noamelf/Open-Knesset,noamelf/Open-Knesset,OriHoch/Open-Knesset,DanaOshri/Open-Knesset,OriHoch/Open-Knesset,habeanf/Open-Knesset,daonb/Open-Knesset,habeanf/Open-Knesset,alonisser/Open-Knesset,otadmor/Open-Knesset,MeirKriheli/Open-Knesset,ofri/Open-Knesset,otadmor/Open-Knesset,DanaOshri/Open-Knesset,MeirKriheli/Open-Knesset,otadmor/Open-Knesset | """
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
Test case for Member/Person sync
Skipped for now, to help with #4049 | from datetime import datetime
from django.test import TestCase
from unittest import skip
from .models import Person
from mks.models import Member
class PersonTests(TestCase):
@skip
def test_member_person_sync(self):
"""
Test member/person sync on member save()
"""
birth = datetime.now()
defaults = {
'name': 'The MK',
'date_of_birth': birth,
'family_status': 'XYZ',
'place_of_residence': 'AAA',
'phone': '000-1234',
'fax': '999-8765',
'gender': 'F',
}
mk = Member.objects.create(**defaults)
self.assertGreater(mk.person.count(), 0)
person = Person.objects.filter(mk=mk)[0]
for field in defaults:
self.assertEqual(getattr(mk, field), getattr(person, field))
mk.delete()
| <commit_before>"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
<commit_msg>Test case for Member/Person sync
Skipped for now, to help with #4049<commit_after> | from datetime import datetime
from django.test import TestCase
from unittest import skip
from .models import Person
from mks.models import Member
class PersonTests(TestCase):
@skip
def test_member_person_sync(self):
"""
Test member/person sync on member save()
"""
birth = datetime.now()
defaults = {
'name': 'The MK',
'date_of_birth': birth,
'family_status': 'XYZ',
'place_of_residence': 'AAA',
'phone': '000-1234',
'fax': '999-8765',
'gender': 'F',
}
mk = Member.objects.create(**defaults)
self.assertGreater(mk.person.count(), 0)
person = Person.objects.filter(mk=mk)[0]
for field in defaults:
self.assertEqual(getattr(mk, field), getattr(person, field))
mk.delete()
| """
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
Test case for Member/Person sync
Skipped for now, to help with #4049from datetime import datetime
from django.test import TestCase
from unittest import skip
from .models import Person
from mks.models import Member
class PersonTests(TestCase):
@skip
def test_member_person_sync(self):
"""
Test member/person sync on member save()
"""
birth = datetime.now()
defaults = {
'name': 'The MK',
'date_of_birth': birth,
'family_status': 'XYZ',
'place_of_residence': 'AAA',
'phone': '000-1234',
'fax': '999-8765',
'gender': 'F',
}
mk = Member.objects.create(**defaults)
self.assertGreater(mk.person.count(), 0)
person = Person.objects.filter(mk=mk)[0]
for field in defaults:
self.assertEqual(getattr(mk, field), getattr(person, field))
mk.delete()
| <commit_before>"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
<commit_msg>Test case for Member/Person sync
Skipped for now, to help with #4049<commit_after>from datetime import datetime
from django.test import TestCase
from unittest import skip
from .models import Person
from mks.models import Member
class PersonTests(TestCase):
@skip
def test_member_person_sync(self):
"""
Test member/person sync on member save()
"""
birth = datetime.now()
defaults = {
'name': 'The MK',
'date_of_birth': birth,
'family_status': 'XYZ',
'place_of_residence': 'AAA',
'phone': '000-1234',
'fax': '999-8765',
'gender': 'F',
}
mk = Member.objects.create(**defaults)
self.assertGreater(mk.person.count(), 0)
person = Person.objects.filter(mk=mk)[0]
for field in defaults:
self.assertEqual(getattr(mk, field), getattr(person, field))
mk.delete()
|
8377f3e61441a7f465feefba905cd3c82586e1a5 | geomdl/visualization/__init__.py | geomdl/visualization/__init__.py | """ NURBS-Python Visualization Component
.. moduleauthor:: Onur Rauf Bingol <orbingol@gmail.com>
"""
| """ NURBS-Python Visualization Component
.. moduleauthor:: Onur Rauf Bingol <orbingol@gmail.com>
"""
__author__ = "Onur Rauf Bingol"
__version__ = "1.0.0"
__license__ = "MIT"
| Add metadata to visualization module | Add metadata to visualization module
| Python | mit | orbingol/NURBS-Python,orbingol/NURBS-Python | """ NURBS-Python Visualization Component
.. moduleauthor:: Onur Rauf Bingol <orbingol@gmail.com>
"""
Add metadata to visualization module | """ NURBS-Python Visualization Component
.. moduleauthor:: Onur Rauf Bingol <orbingol@gmail.com>
"""
__author__ = "Onur Rauf Bingol"
__version__ = "1.0.0"
__license__ = "MIT"
| <commit_before>""" NURBS-Python Visualization Component
.. moduleauthor:: Onur Rauf Bingol <orbingol@gmail.com>
"""
<commit_msg>Add metadata to visualization module<commit_after> | """ NURBS-Python Visualization Component
.. moduleauthor:: Onur Rauf Bingol <orbingol@gmail.com>
"""
__author__ = "Onur Rauf Bingol"
__version__ = "1.0.0"
__license__ = "MIT"
| """ NURBS-Python Visualization Component
.. moduleauthor:: Onur Rauf Bingol <orbingol@gmail.com>
"""
Add metadata to visualization module""" NURBS-Python Visualization Component
.. moduleauthor:: Onur Rauf Bingol <orbingol@gmail.com>
"""
__author__ = "Onur Rauf Bingol"
__version__ = "1.0.0"
__license__ = "MIT"
| <commit_before>""" NURBS-Python Visualization Component
.. moduleauthor:: Onur Rauf Bingol <orbingol@gmail.com>
"""
<commit_msg>Add metadata to visualization module<commit_after>""" NURBS-Python Visualization Component
.. moduleauthor:: Onur Rauf Bingol <orbingol@gmail.com>
"""
__author__ = "Onur Rauf Bingol"
__version__ = "1.0.0"
__license__ = "MIT"
|
3358d47cc9bdad5abaa1e8a9358d49539e6256b1 | scuevals_api/resources/official_user_types.py | scuevals_api/resources/official_user_types.py | from flask_jwt_extended import current_user
from flask_restful import Resource
from marshmallow import fields, Schema
from scuevals_api.auth import auth_required
from scuevals_api.models import Permission, OfficialUserType, db
from scuevals_api.utils import use_args
class OfficialUserTypeSchema(Schema):
email = fields.Str(required=True)
type = fields.Str(required=True)
class Meta:
strict = True
class OfficialUserTypeResource(Resource):
@auth_required(Permission.UpdateOfficialUserTypes)
@use_args({'official_user_types': fields.List(fields.Nested(OfficialUserTypeSchema), required=True)},
locations=('json',))
def post(self, args):
for out in args['official_user_types']:
db.session.merge(OfficialUserType(
email=out['email'],
type=out['type'],
university_id=current_user.university_id
))
db.session.commit()
return {'result': 'success', 'updated_count': len(args['official_user_types'])}, 200
| from flask_jwt_extended import current_user
from flask_restful import Resource
from marshmallow import fields, Schema
from scuevals_api.auth import auth_required
from scuevals_api.models import Permission, OfficialUserType, db
from scuevals_api.utils import use_args
class OfficialUserTypeSchema(Schema):
email = fields.Str(required=True)
type = fields.Str(required=True)
class Meta:
strict = True
class OfficialUserTypeResource(Resource):
@auth_required(Permission.UpdateOfficialUserTypes)
@use_args({'official_user_types': fields.List(fields.Nested(OfficialUserTypeSchema), required=True)},
locations=('json',))
def post(self, args):
for out in args['official_user_types']:
db.session.merge(OfficialUserType(
email=out['email'].lower(),
type=out['type'],
university_id=current_user.university_id
))
db.session.commit()
return {'result': 'success', 'updated_count': len(args['official_user_types'])}, 200
| Make sure official user type emails are lower case | Make sure official user type emails are lower case
| Python | agpl-3.0 | SCUEvals/scuevals-api,SCUEvals/scuevals-api | from flask_jwt_extended import current_user
from flask_restful import Resource
from marshmallow import fields, Schema
from scuevals_api.auth import auth_required
from scuevals_api.models import Permission, OfficialUserType, db
from scuevals_api.utils import use_args
class OfficialUserTypeSchema(Schema):
email = fields.Str(required=True)
type = fields.Str(required=True)
class Meta:
strict = True
class OfficialUserTypeResource(Resource):
@auth_required(Permission.UpdateOfficialUserTypes)
@use_args({'official_user_types': fields.List(fields.Nested(OfficialUserTypeSchema), required=True)},
locations=('json',))
def post(self, args):
for out in args['official_user_types']:
db.session.merge(OfficialUserType(
email=out['email'],
type=out['type'],
university_id=current_user.university_id
))
db.session.commit()
return {'result': 'success', 'updated_count': len(args['official_user_types'])}, 200
Make sure official user type emails are lower case | from flask_jwt_extended import current_user
from flask_restful import Resource
from marshmallow import fields, Schema
from scuevals_api.auth import auth_required
from scuevals_api.models import Permission, OfficialUserType, db
from scuevals_api.utils import use_args
class OfficialUserTypeSchema(Schema):
email = fields.Str(required=True)
type = fields.Str(required=True)
class Meta:
strict = True
class OfficialUserTypeResource(Resource):
@auth_required(Permission.UpdateOfficialUserTypes)
@use_args({'official_user_types': fields.List(fields.Nested(OfficialUserTypeSchema), required=True)},
locations=('json',))
def post(self, args):
for out in args['official_user_types']:
db.session.merge(OfficialUserType(
email=out['email'].lower(),
type=out['type'],
university_id=current_user.university_id
))
db.session.commit()
return {'result': 'success', 'updated_count': len(args['official_user_types'])}, 200
| <commit_before>from flask_jwt_extended import current_user
from flask_restful import Resource
from marshmallow import fields, Schema
from scuevals_api.auth import auth_required
from scuevals_api.models import Permission, OfficialUserType, db
from scuevals_api.utils import use_args
class OfficialUserTypeSchema(Schema):
email = fields.Str(required=True)
type = fields.Str(required=True)
class Meta:
strict = True
class OfficialUserTypeResource(Resource):
@auth_required(Permission.UpdateOfficialUserTypes)
@use_args({'official_user_types': fields.List(fields.Nested(OfficialUserTypeSchema), required=True)},
locations=('json',))
def post(self, args):
for out in args['official_user_types']:
db.session.merge(OfficialUserType(
email=out['email'],
type=out['type'],
university_id=current_user.university_id
))
db.session.commit()
return {'result': 'success', 'updated_count': len(args['official_user_types'])}, 200
<commit_msg>Make sure official user type emails are lower case<commit_after> | from flask_jwt_extended import current_user
from flask_restful import Resource
from marshmallow import fields, Schema
from scuevals_api.auth import auth_required
from scuevals_api.models import Permission, OfficialUserType, db
from scuevals_api.utils import use_args
class OfficialUserTypeSchema(Schema):
email = fields.Str(required=True)
type = fields.Str(required=True)
class Meta:
strict = True
class OfficialUserTypeResource(Resource):
@auth_required(Permission.UpdateOfficialUserTypes)
@use_args({'official_user_types': fields.List(fields.Nested(OfficialUserTypeSchema), required=True)},
locations=('json',))
def post(self, args):
for out in args['official_user_types']:
db.session.merge(OfficialUserType(
email=out['email'].lower(),
type=out['type'],
university_id=current_user.university_id
))
db.session.commit()
return {'result': 'success', 'updated_count': len(args['official_user_types'])}, 200
| from flask_jwt_extended import current_user
from flask_restful import Resource
from marshmallow import fields, Schema
from scuevals_api.auth import auth_required
from scuevals_api.models import Permission, OfficialUserType, db
from scuevals_api.utils import use_args
class OfficialUserTypeSchema(Schema):
email = fields.Str(required=True)
type = fields.Str(required=True)
class Meta:
strict = True
class OfficialUserTypeResource(Resource):
@auth_required(Permission.UpdateOfficialUserTypes)
@use_args({'official_user_types': fields.List(fields.Nested(OfficialUserTypeSchema), required=True)},
locations=('json',))
def post(self, args):
for out in args['official_user_types']:
db.session.merge(OfficialUserType(
email=out['email'],
type=out['type'],
university_id=current_user.university_id
))
db.session.commit()
return {'result': 'success', 'updated_count': len(args['official_user_types'])}, 200
Make sure official user type emails are lower casefrom flask_jwt_extended import current_user
from flask_restful import Resource
from marshmallow import fields, Schema
from scuevals_api.auth import auth_required
from scuevals_api.models import Permission, OfficialUserType, db
from scuevals_api.utils import use_args
class OfficialUserTypeSchema(Schema):
email = fields.Str(required=True)
type = fields.Str(required=True)
class Meta:
strict = True
class OfficialUserTypeResource(Resource):
@auth_required(Permission.UpdateOfficialUserTypes)
@use_args({'official_user_types': fields.List(fields.Nested(OfficialUserTypeSchema), required=True)},
locations=('json',))
def post(self, args):
for out in args['official_user_types']:
db.session.merge(OfficialUserType(
email=out['email'].lower(),
type=out['type'],
university_id=current_user.university_id
))
db.session.commit()
return {'result': 'success', 'updated_count': len(args['official_user_types'])}, 200
| <commit_before>from flask_jwt_extended import current_user
from flask_restful import Resource
from marshmallow import fields, Schema
from scuevals_api.auth import auth_required
from scuevals_api.models import Permission, OfficialUserType, db
from scuevals_api.utils import use_args
class OfficialUserTypeSchema(Schema):
email = fields.Str(required=True)
type = fields.Str(required=True)
class Meta:
strict = True
class OfficialUserTypeResource(Resource):
@auth_required(Permission.UpdateOfficialUserTypes)
@use_args({'official_user_types': fields.List(fields.Nested(OfficialUserTypeSchema), required=True)},
locations=('json',))
def post(self, args):
for out in args['official_user_types']:
db.session.merge(OfficialUserType(
email=out['email'],
type=out['type'],
university_id=current_user.university_id
))
db.session.commit()
return {'result': 'success', 'updated_count': len(args['official_user_types'])}, 200
<commit_msg>Make sure official user type emails are lower case<commit_after>from flask_jwt_extended import current_user
from flask_restful import Resource
from marshmallow import fields, Schema
from scuevals_api.auth import auth_required
from scuevals_api.models import Permission, OfficialUserType, db
from scuevals_api.utils import use_args
class OfficialUserTypeSchema(Schema):
email = fields.Str(required=True)
type = fields.Str(required=True)
class Meta:
strict = True
class OfficialUserTypeResource(Resource):
@auth_required(Permission.UpdateOfficialUserTypes)
@use_args({'official_user_types': fields.List(fields.Nested(OfficialUserTypeSchema), required=True)},
locations=('json',))
def post(self, args):
for out in args['official_user_types']:
db.session.merge(OfficialUserType(
email=out['email'].lower(),
type=out['type'],
university_id=current_user.university_id
))
db.session.commit()
return {'result': 'success', 'updated_count': len(args['official_user_types'])}, 200
|
2fc3ee4edc4b7a1842fca369620c790244000f11 | flask_apidoc/commands.py | flask_apidoc/commands.py | # Copyright 2015 Vinicius Chiele. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import flask_script
except ImportError:
raise ImportError('Missing flask-script library (pip install flask-script)')
import subprocess
from flask_script import Command
class GenerateApiDoc(Command):
def __init__(self, input_path=None, output_path=None, template_path=None):
super().__init__()
self.input_path = input_path
self.output_path = output_path
self.template_path = template_path
def run(self):
cmd = ['apidoc']
if self.input_path:
cmd.append('--input')
cmd.append(self.input_path)
if self.output_path:
cmd.append('--output')
cmd.append(self.output_path)
if self.template_path:
cmd.append('--template')
cmd.append(self.template_path)
return subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
| # Copyright 2015 Vinicius Chiele. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import flask_script
except ImportError:
raise ImportError('Missing flask-script library (pip install flask-script)')
import subprocess
from flask_script import Command
class GenerateApiDoc(Command):
def __init__(self, input_path=None, output_path=None, template_path=None):
super().__init__()
self.input_path = input_path
self.output_path = output_path or 'static/docs'
self.template_path = template_path
def run(self):
cmd = ['apidoc']
if self.input_path:
cmd.append('--input')
cmd.append(self.input_path)
if self.output_path:
cmd.append('--output')
cmd.append(self.output_path)
if self.template_path:
cmd.append('--template')
cmd.append(self.template_path)
return subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
| Set static/docs as the default folder to generate the apidoc's files | Set static/docs as the default folder to generate the apidoc's files
| Python | mit | viniciuschiele/flask-apidoc | # Copyright 2015 Vinicius Chiele. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import flask_script
except ImportError:
raise ImportError('Missing flask-script library (pip install flask-script)')
import subprocess
from flask_script import Command
class GenerateApiDoc(Command):
def __init__(self, input_path=None, output_path=None, template_path=None):
super().__init__()
self.input_path = input_path
self.output_path = output_path
self.template_path = template_path
def run(self):
cmd = ['apidoc']
if self.input_path:
cmd.append('--input')
cmd.append(self.input_path)
if self.output_path:
cmd.append('--output')
cmd.append(self.output_path)
if self.template_path:
cmd.append('--template')
cmd.append(self.template_path)
return subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
Set static/docs as the default folder to generate the apidoc's files | # Copyright 2015 Vinicius Chiele. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import flask_script
except ImportError:
raise ImportError('Missing flask-script library (pip install flask-script)')
import subprocess
from flask_script import Command
class GenerateApiDoc(Command):
def __init__(self, input_path=None, output_path=None, template_path=None):
super().__init__()
self.input_path = input_path
self.output_path = output_path or 'static/docs'
self.template_path = template_path
def run(self):
cmd = ['apidoc']
if self.input_path:
cmd.append('--input')
cmd.append(self.input_path)
if self.output_path:
cmd.append('--output')
cmd.append(self.output_path)
if self.template_path:
cmd.append('--template')
cmd.append(self.template_path)
return subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
| <commit_before># Copyright 2015 Vinicius Chiele. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import flask_script
except ImportError:
raise ImportError('Missing flask-script library (pip install flask-script)')
import subprocess
from flask_script import Command
class GenerateApiDoc(Command):
def __init__(self, input_path=None, output_path=None, template_path=None):
super().__init__()
self.input_path = input_path
self.output_path = output_path
self.template_path = template_path
def run(self):
cmd = ['apidoc']
if self.input_path:
cmd.append('--input')
cmd.append(self.input_path)
if self.output_path:
cmd.append('--output')
cmd.append(self.output_path)
if self.template_path:
cmd.append('--template')
cmd.append(self.template_path)
return subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
<commit_msg>Set static/docs as the default folder to generate the apidoc's files<commit_after> | # Copyright 2015 Vinicius Chiele. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import flask_script
except ImportError:
raise ImportError('Missing flask-script library (pip install flask-script)')
import subprocess
from flask_script import Command
class GenerateApiDoc(Command):
def __init__(self, input_path=None, output_path=None, template_path=None):
super().__init__()
self.input_path = input_path
self.output_path = output_path or 'static/docs'
self.template_path = template_path
def run(self):
cmd = ['apidoc']
if self.input_path:
cmd.append('--input')
cmd.append(self.input_path)
if self.output_path:
cmd.append('--output')
cmd.append(self.output_path)
if self.template_path:
cmd.append('--template')
cmd.append(self.template_path)
return subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
| # Copyright 2015 Vinicius Chiele. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import flask_script
except ImportError:
raise ImportError('Missing flask-script library (pip install flask-script)')
import subprocess
from flask_script import Command
class GenerateApiDoc(Command):
def __init__(self, input_path=None, output_path=None, template_path=None):
super().__init__()
self.input_path = input_path
self.output_path = output_path
self.template_path = template_path
def run(self):
cmd = ['apidoc']
if self.input_path:
cmd.append('--input')
cmd.append(self.input_path)
if self.output_path:
cmd.append('--output')
cmd.append(self.output_path)
if self.template_path:
cmd.append('--template')
cmd.append(self.template_path)
return subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
Set static/docs as the default folder to generate the apidoc's files# Copyright 2015 Vinicius Chiele. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import flask_script
except ImportError:
raise ImportError('Missing flask-script library (pip install flask-script)')
import subprocess
from flask_script import Command
class GenerateApiDoc(Command):
def __init__(self, input_path=None, output_path=None, template_path=None):
super().__init__()
self.input_path = input_path
self.output_path = output_path or 'static/docs'
self.template_path = template_path
def run(self):
cmd = ['apidoc']
if self.input_path:
cmd.append('--input')
cmd.append(self.input_path)
if self.output_path:
cmd.append('--output')
cmd.append(self.output_path)
if self.template_path:
cmd.append('--template')
cmd.append(self.template_path)
return subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
| <commit_before># Copyright 2015 Vinicius Chiele. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import flask_script
except ImportError:
raise ImportError('Missing flask-script library (pip install flask-script)')
import subprocess
from flask_script import Command
class GenerateApiDoc(Command):
def __init__(self, input_path=None, output_path=None, template_path=None):
super().__init__()
self.input_path = input_path
self.output_path = output_path
self.template_path = template_path
def run(self):
cmd = ['apidoc']
if self.input_path:
cmd.append('--input')
cmd.append(self.input_path)
if self.output_path:
cmd.append('--output')
cmd.append(self.output_path)
if self.template_path:
cmd.append('--template')
cmd.append(self.template_path)
return subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
<commit_msg>Set static/docs as the default folder to generate the apidoc's files<commit_after># Copyright 2015 Vinicius Chiele. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import flask_script
except ImportError:
raise ImportError('Missing flask-script library (pip install flask-script)')
import subprocess
from flask_script import Command
class GenerateApiDoc(Command):
def __init__(self, input_path=None, output_path=None, template_path=None):
super().__init__()
self.input_path = input_path
self.output_path = output_path or 'static/docs'
self.template_path = template_path
def run(self):
cmd = ['apidoc']
if self.input_path:
cmd.append('--input')
cmd.append(self.input_path)
if self.output_path:
cmd.append('--output')
cmd.append(self.output_path)
if self.template_path:
cmd.append('--template')
cmd.append(self.template_path)
return subprocess.call(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
|
98771f6a7a96ccedf56e3619433e2451d5c3251f | exception/test1.py | exception/test1.py | #!/usr/local/bin/python
class MuffledCalculator:
muffled=False
def calc(self,expr):
try:
return eval(expr)
except ZeroDivisionError:
if self.muffled:
print "Can't divide zero"
else:
raise
a=MuffledCalculator()
print a.calc('2/1')
#print a.calc('1/0')
a.muffled=True
print a.calc('1/0')
| #!/usr/local/bin/python
#class MuffledCalculator:
# muffled=False
# def calc(self,expr):
# try:
# return eval(expr)
# except (ZeroDivisionError,TypeError):
# if self.muffled:
# print "There are errors."
# else:
# raise
#a=MuffledCalculator()
#print a.calc('2/1')
##print a.calc('2/"dsf"')
##print a.calc('1/0')
#a.muffled=True
#print a.calc('1/0')
#class Test:
# def init(self):
# try:
# x=1
# y='sg'
# print x/y
# except (ZeroDivisionError,TypeError),e:
# print e
#a=Test()
#a.init()
#class Test1:
# def init(self):
# try:
# x=1
# y='sg'
# print x/y
# except Exception,e:
# print e
#a=Test1()
#a.init()
#try:
# print 'Go!'
#except Exception,e:
# print e
#else:
# print 'Planned.'
x=1
try:
x=2
print x
x=1/0
except Exception,e:
x=3
print x
print e
finally:
x=4
print x
| Use finally and so on. | Use finally and so on.
| Python | apache-2.0 | Vayne-Lover/Python | #!/usr/local/bin/python
class MuffledCalculator:
muffled=False
def calc(self,expr):
try:
return eval(expr)
except ZeroDivisionError:
if self.muffled:
print "Can't divide zero"
else:
raise
a=MuffledCalculator()
print a.calc('2/1')
#print a.calc('1/0')
a.muffled=True
print a.calc('1/0')
Use finally and so on. | #!/usr/local/bin/python
#class MuffledCalculator:
# muffled=False
# def calc(self,expr):
# try:
# return eval(expr)
# except (ZeroDivisionError,TypeError):
# if self.muffled:
# print "There are errors."
# else:
# raise
#a=MuffledCalculator()
#print a.calc('2/1')
##print a.calc('2/"dsf"')
##print a.calc('1/0')
#a.muffled=True
#print a.calc('1/0')
#class Test:
# def init(self):
# try:
# x=1
# y='sg'
# print x/y
# except (ZeroDivisionError,TypeError),e:
# print e
#a=Test()
#a.init()
#class Test1:
# def init(self):
# try:
# x=1
# y='sg'
# print x/y
# except Exception,e:
# print e
#a=Test1()
#a.init()
#try:
# print 'Go!'
#except Exception,e:
# print e
#else:
# print 'Planned.'
x=1
try:
x=2
print x
x=1/0
except Exception,e:
x=3
print x
print e
finally:
x=4
print x
| <commit_before>#!/usr/local/bin/python
class MuffledCalculator:
muffled=False
def calc(self,expr):
try:
return eval(expr)
except ZeroDivisionError:
if self.muffled:
print "Can't divide zero"
else:
raise
a=MuffledCalculator()
print a.calc('2/1')
#print a.calc('1/0')
a.muffled=True
print a.calc('1/0')
<commit_msg>Use finally and so on.<commit_after> | #!/usr/local/bin/python
#class MuffledCalculator:
# muffled=False
# def calc(self,expr):
# try:
# return eval(expr)
# except (ZeroDivisionError,TypeError):
# if self.muffled:
# print "There are errors."
# else:
# raise
#a=MuffledCalculator()
#print a.calc('2/1')
##print a.calc('2/"dsf"')
##print a.calc('1/0')
#a.muffled=True
#print a.calc('1/0')
#class Test:
# def init(self):
# try:
# x=1
# y='sg'
# print x/y
# except (ZeroDivisionError,TypeError),e:
# print e
#a=Test()
#a.init()
#class Test1:
# def init(self):
# try:
# x=1
# y='sg'
# print x/y
# except Exception,e:
# print e
#a=Test1()
#a.init()
#try:
# print 'Go!'
#except Exception,e:
# print e
#else:
# print 'Planned.'
x=1
try:
x=2
print x
x=1/0
except Exception,e:
x=3
print x
print e
finally:
x=4
print x
| #!/usr/local/bin/python
class MuffledCalculator:
muffled=False
def calc(self,expr):
try:
return eval(expr)
except ZeroDivisionError:
if self.muffled:
print "Can't divide zero"
else:
raise
a=MuffledCalculator()
print a.calc('2/1')
#print a.calc('1/0')
a.muffled=True
print a.calc('1/0')
Use finally and so on.#!/usr/local/bin/python
#class MuffledCalculator:
# muffled=False
# def calc(self,expr):
# try:
# return eval(expr)
# except (ZeroDivisionError,TypeError):
# if self.muffled:
# print "There are errors."
# else:
# raise
#a=MuffledCalculator()
#print a.calc('2/1')
##print a.calc('2/"dsf"')
##print a.calc('1/0')
#a.muffled=True
#print a.calc('1/0')
#class Test:
# def init(self):
# try:
# x=1
# y='sg'
# print x/y
# except (ZeroDivisionError,TypeError),e:
# print e
#a=Test()
#a.init()
#class Test1:
# def init(self):
# try:
# x=1
# y='sg'
# print x/y
# except Exception,e:
# print e
#a=Test1()
#a.init()
#try:
# print 'Go!'
#except Exception,e:
# print e
#else:
# print 'Planned.'
x=1
try:
x=2
print x
x=1/0
except Exception,e:
x=3
print x
print e
finally:
x=4
print x
| <commit_before>#!/usr/local/bin/python
class MuffledCalculator:
muffled=False
def calc(self,expr):
try:
return eval(expr)
except ZeroDivisionError:
if self.muffled:
print "Can't divide zero"
else:
raise
a=MuffledCalculator()
print a.calc('2/1')
#print a.calc('1/0')
a.muffled=True
print a.calc('1/0')
<commit_msg>Use finally and so on.<commit_after>#!/usr/local/bin/python
#class MuffledCalculator:
# muffled=False
# def calc(self,expr):
# try:
# return eval(expr)
# except (ZeroDivisionError,TypeError):
# if self.muffled:
# print "There are errors."
# else:
# raise
#a=MuffledCalculator()
#print a.calc('2/1')
##print a.calc('2/"dsf"')
##print a.calc('1/0')
#a.muffled=True
#print a.calc('1/0')
#class Test:
# def init(self):
# try:
# x=1
# y='sg'
# print x/y
# except (ZeroDivisionError,TypeError),e:
# print e
#a=Test()
#a.init()
#class Test1:
# def init(self):
# try:
# x=1
# y='sg'
# print x/y
# except Exception,e:
# print e
#a=Test1()
#a.init()
#try:
# print 'Go!'
#except Exception,e:
# print e
#else:
# print 'Planned.'
x=1
try:
x=2
print x
x=1/0
except Exception,e:
x=3
print x
print e
finally:
x=4
print x
|
661d42468359836c0ce9ee4e267241a4aaf7a021 | lot/views.py | lot/views.py | import json
from django.conf import settings
from django.http import HttpResponseRedirect, HttpResponseNotFound
from django.shortcuts import get_object_or_404, resolve_url
from django.utils.http import is_safe_url
from django.views.generic import View
from django.contrib.auth import authenticate, login
from .models import LOT
class LOTLogin(View):
def get(self, request, uuid):
lot = get_object_or_404(LOT, uuid=uuid)
if not lot.verify():
lot.delete()
return HttpResponseNotFound()
user = authenticate(lot_uuid=uuid)
login(request, user)
try:
session_data = json.loads(lot.session_data)
request.session.update(session_data)
except Exception:
# If not correctly serialized not set the session_data
pass
if lot.is_one_time():
lot.delete()
redirect_to = request.GET.get('next')
if lot.next_url:
redirect_to = resolve_url(lot.next_url)
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
return HttpResponseRedirect(redirect_to)
| import json
from django.conf import settings
from django.http import HttpResponseRedirect, HttpResponseNotFound
from django.shortcuts import get_object_or_404, resolve_url
from django.utils.http import is_safe_url
from django.views.generic import View
from django.contrib.auth import authenticate, login
from .models import LOT
class LOTLogin(View):
def get(self, request, uuid):
lot = get_object_or_404(LOT, uuid=uuid)
if not lot.verify():
lot.delete()
return HttpResponseNotFound()
user = authenticate(request, lot_uuid=uuid)
if user is not None:
login(request, user)
else:
raise RuntimeError('The authentication backend did not return a user')
try:
session_data = json.loads(lot.session_data)
request.session.update(session_data)
except Exception:
# If not correctly serialized not set the session_data
pass
if lot.is_one_time():
lot.delete()
redirect_to = request.GET.get('next')
if lot.next_url:
redirect_to = resolve_url(lot.next_url)
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
return HttpResponseRedirect(redirect_to)
| Check for an empty user | Check for an empty user | Python | bsd-3-clause | ABASystems/django-lot | import json
from django.conf import settings
from django.http import HttpResponseRedirect, HttpResponseNotFound
from django.shortcuts import get_object_or_404, resolve_url
from django.utils.http import is_safe_url
from django.views.generic import View
from django.contrib.auth import authenticate, login
from .models import LOT
class LOTLogin(View):
def get(self, request, uuid):
lot = get_object_or_404(LOT, uuid=uuid)
if not lot.verify():
lot.delete()
return HttpResponseNotFound()
user = authenticate(lot_uuid=uuid)
login(request, user)
try:
session_data = json.loads(lot.session_data)
request.session.update(session_data)
except Exception:
# If not correctly serialized not set the session_data
pass
if lot.is_one_time():
lot.delete()
redirect_to = request.GET.get('next')
if lot.next_url:
redirect_to = resolve_url(lot.next_url)
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
return HttpResponseRedirect(redirect_to)
Check for an empty user | import json
from django.conf import settings
from django.http import HttpResponseRedirect, HttpResponseNotFound
from django.shortcuts import get_object_or_404, resolve_url
from django.utils.http import is_safe_url
from django.views.generic import View
from django.contrib.auth import authenticate, login
from .models import LOT
class LOTLogin(View):
def get(self, request, uuid):
lot = get_object_or_404(LOT, uuid=uuid)
if not lot.verify():
lot.delete()
return HttpResponseNotFound()
user = authenticate(request, lot_uuid=uuid)
if user is not None:
login(request, user)
else:
raise RuntimeError('The authentication backend did not return a user')
try:
session_data = json.loads(lot.session_data)
request.session.update(session_data)
except Exception:
# If not correctly serialized not set the session_data
pass
if lot.is_one_time():
lot.delete()
redirect_to = request.GET.get('next')
if lot.next_url:
redirect_to = resolve_url(lot.next_url)
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
return HttpResponseRedirect(redirect_to)
| <commit_before>import json
from django.conf import settings
from django.http import HttpResponseRedirect, HttpResponseNotFound
from django.shortcuts import get_object_or_404, resolve_url
from django.utils.http import is_safe_url
from django.views.generic import View
from django.contrib.auth import authenticate, login
from .models import LOT
class LOTLogin(View):
def get(self, request, uuid):
lot = get_object_or_404(LOT, uuid=uuid)
if not lot.verify():
lot.delete()
return HttpResponseNotFound()
user = authenticate(lot_uuid=uuid)
login(request, user)
try:
session_data = json.loads(lot.session_data)
request.session.update(session_data)
except Exception:
# If not correctly serialized not set the session_data
pass
if lot.is_one_time():
lot.delete()
redirect_to = request.GET.get('next')
if lot.next_url:
redirect_to = resolve_url(lot.next_url)
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
return HttpResponseRedirect(redirect_to)
<commit_msg>Check for an empty user<commit_after> | import json
from django.conf import settings
from django.http import HttpResponseRedirect, HttpResponseNotFound
from django.shortcuts import get_object_or_404, resolve_url
from django.utils.http import is_safe_url
from django.views.generic import View
from django.contrib.auth import authenticate, login
from .models import LOT
class LOTLogin(View):
def get(self, request, uuid):
lot = get_object_or_404(LOT, uuid=uuid)
if not lot.verify():
lot.delete()
return HttpResponseNotFound()
user = authenticate(request, lot_uuid=uuid)
if user is not None:
login(request, user)
else:
raise RuntimeError('The authentication backend did not return a user')
try:
session_data = json.loads(lot.session_data)
request.session.update(session_data)
except Exception:
# If not correctly serialized not set the session_data
pass
if lot.is_one_time():
lot.delete()
redirect_to = request.GET.get('next')
if lot.next_url:
redirect_to = resolve_url(lot.next_url)
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
return HttpResponseRedirect(redirect_to)
| import json
from django.conf import settings
from django.http import HttpResponseRedirect, HttpResponseNotFound
from django.shortcuts import get_object_or_404, resolve_url
from django.utils.http import is_safe_url
from django.views.generic import View
from django.contrib.auth import authenticate, login
from .models import LOT
class LOTLogin(View):
def get(self, request, uuid):
lot = get_object_or_404(LOT, uuid=uuid)
if not lot.verify():
lot.delete()
return HttpResponseNotFound()
user = authenticate(lot_uuid=uuid)
login(request, user)
try:
session_data = json.loads(lot.session_data)
request.session.update(session_data)
except Exception:
# If not correctly serialized not set the session_data
pass
if lot.is_one_time():
lot.delete()
redirect_to = request.GET.get('next')
if lot.next_url:
redirect_to = resolve_url(lot.next_url)
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
return HttpResponseRedirect(redirect_to)
Check for an empty userimport json
from django.conf import settings
from django.http import HttpResponseRedirect, HttpResponseNotFound
from django.shortcuts import get_object_or_404, resolve_url
from django.utils.http import is_safe_url
from django.views.generic import View
from django.contrib.auth import authenticate, login
from .models import LOT
class LOTLogin(View):
def get(self, request, uuid):
lot = get_object_or_404(LOT, uuid=uuid)
if not lot.verify():
lot.delete()
return HttpResponseNotFound()
user = authenticate(request, lot_uuid=uuid)
if user is not None:
login(request, user)
else:
raise RuntimeError('The authentication backend did not return a user')
try:
session_data = json.loads(lot.session_data)
request.session.update(session_data)
except Exception:
# If not correctly serialized not set the session_data
pass
if lot.is_one_time():
lot.delete()
redirect_to = request.GET.get('next')
if lot.next_url:
redirect_to = resolve_url(lot.next_url)
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
return HttpResponseRedirect(redirect_to)
| <commit_before>import json
from django.conf import settings
from django.http import HttpResponseRedirect, HttpResponseNotFound
from django.shortcuts import get_object_or_404, resolve_url
from django.utils.http import is_safe_url
from django.views.generic import View
from django.contrib.auth import authenticate, login
from .models import LOT
class LOTLogin(View):
def get(self, request, uuid):
lot = get_object_or_404(LOT, uuid=uuid)
if not lot.verify():
lot.delete()
return HttpResponseNotFound()
user = authenticate(lot_uuid=uuid)
login(request, user)
try:
session_data = json.loads(lot.session_data)
request.session.update(session_data)
except Exception:
# If not correctly serialized not set the session_data
pass
if lot.is_one_time():
lot.delete()
redirect_to = request.GET.get('next')
if lot.next_url:
redirect_to = resolve_url(lot.next_url)
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
return HttpResponseRedirect(redirect_to)
<commit_msg>Check for an empty user<commit_after>import json
from django.conf import settings
from django.http import HttpResponseRedirect, HttpResponseNotFound
from django.shortcuts import get_object_or_404, resolve_url
from django.utils.http import is_safe_url
from django.views.generic import View
from django.contrib.auth import authenticate, login
from .models import LOT
class LOTLogin(View):
def get(self, request, uuid):
lot = get_object_or_404(LOT, uuid=uuid)
if not lot.verify():
lot.delete()
return HttpResponseNotFound()
user = authenticate(request, lot_uuid=uuid)
if user is not None:
login(request, user)
else:
raise RuntimeError('The authentication backend did not return a user')
try:
session_data = json.loads(lot.session_data)
request.session.update(session_data)
except Exception:
# If not correctly serialized not set the session_data
pass
if lot.is_one_time():
lot.delete()
redirect_to = request.GET.get('next')
if lot.next_url:
redirect_to = resolve_url(lot.next_url)
if not is_safe_url(url=redirect_to, host=request.get_host()):
redirect_to = resolve_url(settings.LOGIN_REDIRECT_URL)
return HttpResponseRedirect(redirect_to)
|
6fd0c91fd1bbc6ae1a8fae46503464ab63603d38 | pinry/api/api.py | pinry/api/api.py | from tastypie.resources import ModelResource
from tastypie import fields
from tastypie.authentication import BasicAuthentication
from tastypie.authorization import DjangoAuthorization
from django.contrib.auth.models import User
from pinry.pins.models import Pin
class PinResource(ModelResource): # pylint: disable-msg=R0904
thumbnail = fields.CharField(readonly=True)
class Meta:
queryset = Pin.objects.all()
resource_name = 'pin'
include_resource_uri = False
def dehydrate_thumbnail(self, bundle):
pin = Pin.objects.only('image').get(pk=bundle.data['id'])
return pin.image.url_200x1000
class UserResource(ModelResource):
class Meta:
queryset = User.objects.all()
resource_name = 'auth/user'
excludes = ['email', 'password', 'is_superuser']
# Add it here.
authentication = BasicAuthentication()
authorization = DjangoAuthorization()
| from tastypie.resources import ModelResource
from tastypie import fields
from tastypie.authentication import BasicAuthentication
from tastypie.authorization import DjangoAuthorization
from django.contrib.auth.models import User
from pinry.pins.models import Pin
class PinResource(ModelResource): # pylint: disable-msg=R0904
thumbnail = fields.CharField(readonly=True)
class Meta:
queryset = Pin.objects.all()
resource_name = 'pin'
include_resource_uri = False
filtering = {
'published': ['gt'],
}
def dehydrate_thumbnail(self, bundle):
pin = Pin.objects.only('image').get(pk=bundle.data['id'])
return pin.image.url_200x1000
class UserResource(ModelResource):
class Meta:
queryset = User.objects.all()
resource_name = 'auth/user'
excludes = ['email', 'password', 'is_superuser']
# Add it here.
authentication = BasicAuthentication()
authorization = DjangoAuthorization()
| Enable filtering over the published field | Enable filtering over the published field
Tastypie requires that we define a list of fields that can be used
for filtering; add the "published" to this list so we can query pinry
for the list of images created after some date.
| Python | bsd-2-clause | lapo-luchini/pinry,supervacuo/pinry,lapo-luchini/pinry,supervacuo/pinry,wangjun/pinry,QLGu/pinry,Stackato-Apps/pinry,dotcom900825/xishi,lapo-luchini/pinry,Stackato-Apps/pinry,MSylvia/pinry,pinry/pinry,MSylvia/pinry,pinry/pinry,pinry/pinry,Stackato-Apps/pinry,dotcom900825/xishi,QLGu/pinry,wangjun/pinry,QLGu/pinry,rafirosenberg/pinry,pinry/pinry,lapo-luchini/pinry,rafirosenberg/pinry,supervacuo/pinry,MSylvia/pinry,wangjun/pinry | from tastypie.resources import ModelResource
from tastypie import fields
from tastypie.authentication import BasicAuthentication
from tastypie.authorization import DjangoAuthorization
from django.contrib.auth.models import User
from pinry.pins.models import Pin
class PinResource(ModelResource): # pylint: disable-msg=R0904
thumbnail = fields.CharField(readonly=True)
class Meta:
queryset = Pin.objects.all()
resource_name = 'pin'
include_resource_uri = False
def dehydrate_thumbnail(self, bundle):
pin = Pin.objects.only('image').get(pk=bundle.data['id'])
return pin.image.url_200x1000
class UserResource(ModelResource):
class Meta:
queryset = User.objects.all()
resource_name = 'auth/user'
excludes = ['email', 'password', 'is_superuser']
# Add it here.
authentication = BasicAuthentication()
authorization = DjangoAuthorization()
Enable filtering over the published field
Tastypie requires that we define a list of fields that can be used
for filtering; add the "published" to this list so we can query pinry
for the list of images created after some date. | from tastypie.resources import ModelResource
from tastypie import fields
from tastypie.authentication import BasicAuthentication
from tastypie.authorization import DjangoAuthorization
from django.contrib.auth.models import User
from pinry.pins.models import Pin
class PinResource(ModelResource): # pylint: disable-msg=R0904
thumbnail = fields.CharField(readonly=True)
class Meta:
queryset = Pin.objects.all()
resource_name = 'pin'
include_resource_uri = False
filtering = {
'published': ['gt'],
}
def dehydrate_thumbnail(self, bundle):
pin = Pin.objects.only('image').get(pk=bundle.data['id'])
return pin.image.url_200x1000
class UserResource(ModelResource):
class Meta:
queryset = User.objects.all()
resource_name = 'auth/user'
excludes = ['email', 'password', 'is_superuser']
# Add it here.
authentication = BasicAuthentication()
authorization = DjangoAuthorization()
| <commit_before>from tastypie.resources import ModelResource
from tastypie import fields
from tastypie.authentication import BasicAuthentication
from tastypie.authorization import DjangoAuthorization
from django.contrib.auth.models import User
from pinry.pins.models import Pin
class PinResource(ModelResource): # pylint: disable-msg=R0904
thumbnail = fields.CharField(readonly=True)
class Meta:
queryset = Pin.objects.all()
resource_name = 'pin'
include_resource_uri = False
def dehydrate_thumbnail(self, bundle):
pin = Pin.objects.only('image').get(pk=bundle.data['id'])
return pin.image.url_200x1000
class UserResource(ModelResource):
class Meta:
queryset = User.objects.all()
resource_name = 'auth/user'
excludes = ['email', 'password', 'is_superuser']
# Add it here.
authentication = BasicAuthentication()
authorization = DjangoAuthorization()
<commit_msg>Enable filtering over the published field
Tastypie requires that we define a list of fields that can be used
for filtering; add the "published" to this list so we can query pinry
for the list of images created after some date.<commit_after> | from tastypie.resources import ModelResource
from tastypie import fields
from tastypie.authentication import BasicAuthentication
from tastypie.authorization import DjangoAuthorization
from django.contrib.auth.models import User
from pinry.pins.models import Pin
class PinResource(ModelResource): # pylint: disable-msg=R0904
thumbnail = fields.CharField(readonly=True)
class Meta:
queryset = Pin.objects.all()
resource_name = 'pin'
include_resource_uri = False
filtering = {
'published': ['gt'],
}
def dehydrate_thumbnail(self, bundle):
pin = Pin.objects.only('image').get(pk=bundle.data['id'])
return pin.image.url_200x1000
class UserResource(ModelResource):
class Meta:
queryset = User.objects.all()
resource_name = 'auth/user'
excludes = ['email', 'password', 'is_superuser']
# Add it here.
authentication = BasicAuthentication()
authorization = DjangoAuthorization()
| from tastypie.resources import ModelResource
from tastypie import fields
from tastypie.authentication import BasicAuthentication
from tastypie.authorization import DjangoAuthorization
from django.contrib.auth.models import User
from pinry.pins.models import Pin
class PinResource(ModelResource): # pylint: disable-msg=R0904
thumbnail = fields.CharField(readonly=True)
class Meta:
queryset = Pin.objects.all()
resource_name = 'pin'
include_resource_uri = False
def dehydrate_thumbnail(self, bundle):
pin = Pin.objects.only('image').get(pk=bundle.data['id'])
return pin.image.url_200x1000
class UserResource(ModelResource):
class Meta:
queryset = User.objects.all()
resource_name = 'auth/user'
excludes = ['email', 'password', 'is_superuser']
# Add it here.
authentication = BasicAuthentication()
authorization = DjangoAuthorization()
Enable filtering over the published field
Tastypie requires that we define a list of fields that can be used
for filtering; add the "published" to this list so we can query pinry
for the list of images created after some date.from tastypie.resources import ModelResource
from tastypie import fields
from tastypie.authentication import BasicAuthentication
from tastypie.authorization import DjangoAuthorization
from django.contrib.auth.models import User
from pinry.pins.models import Pin
class PinResource(ModelResource): # pylint: disable-msg=R0904
thumbnail = fields.CharField(readonly=True)
class Meta:
queryset = Pin.objects.all()
resource_name = 'pin'
include_resource_uri = False
filtering = {
'published': ['gt'],
}
def dehydrate_thumbnail(self, bundle):
pin = Pin.objects.only('image').get(pk=bundle.data['id'])
return pin.image.url_200x1000
class UserResource(ModelResource):
class Meta:
queryset = User.objects.all()
resource_name = 'auth/user'
excludes = ['email', 'password', 'is_superuser']
# Add it here.
authentication = BasicAuthentication()
authorization = DjangoAuthorization()
| <commit_before>from tastypie.resources import ModelResource
from tastypie import fields
from tastypie.authentication import BasicAuthentication
from tastypie.authorization import DjangoAuthorization
from django.contrib.auth.models import User
from pinry.pins.models import Pin
class PinResource(ModelResource): # pylint: disable-msg=R0904
thumbnail = fields.CharField(readonly=True)
class Meta:
queryset = Pin.objects.all()
resource_name = 'pin'
include_resource_uri = False
def dehydrate_thumbnail(self, bundle):
pin = Pin.objects.only('image').get(pk=bundle.data['id'])
return pin.image.url_200x1000
class UserResource(ModelResource):
class Meta:
queryset = User.objects.all()
resource_name = 'auth/user'
excludes = ['email', 'password', 'is_superuser']
# Add it here.
authentication = BasicAuthentication()
authorization = DjangoAuthorization()
<commit_msg>Enable filtering over the published field
Tastypie requires that we define a list of fields that can be used
for filtering; add the "published" to this list so we can query pinry
for the list of images created after some date.<commit_after>from tastypie.resources import ModelResource
from tastypie import fields
from tastypie.authentication import BasicAuthentication
from tastypie.authorization import DjangoAuthorization
from django.contrib.auth.models import User
from pinry.pins.models import Pin
class PinResource(ModelResource): # pylint: disable-msg=R0904
thumbnail = fields.CharField(readonly=True)
class Meta:
queryset = Pin.objects.all()
resource_name = 'pin'
include_resource_uri = False
filtering = {
'published': ['gt'],
}
def dehydrate_thumbnail(self, bundle):
pin = Pin.objects.only('image').get(pk=bundle.data['id'])
return pin.image.url_200x1000
class UserResource(ModelResource):
class Meta:
queryset = User.objects.all()
resource_name = 'auth/user'
excludes = ['email', 'password', 'is_superuser']
# Add it here.
authentication = BasicAuthentication()
authorization = DjangoAuthorization()
|
e23b146f613ed6e0090b0ef1f895ee1785e56f31 | plugins/brian.py | plugins/brian.py | """Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return generate_phrase(phrases, cache)
| """Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {}'.format(generate_phrase(phrases, cache))
| Use block quotes for Markov-chain plugin | Use block quotes for Markov-chain plugin
| Python | mit | kvchen/keffbot-py,kvchen/keffbot | """Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return generate_phrase(phrases, cache)
Use block quotes for Markov-chain plugin | """Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {}'.format(generate_phrase(phrases, cache))
| <commit_before>"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return generate_phrase(phrases, cache)
<commit_msg>Use block quotes for Markov-chain plugin<commit_after> | """Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {}'.format(generate_phrase(phrases, cache))
| """Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return generate_phrase(phrases, cache)
Use block quotes for Markov-chain plugin"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {}'.format(generate_phrase(phrases, cache))
| <commit_before>"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return generate_phrase(phrases, cache)
<commit_msg>Use block quotes for Markov-chain plugin<commit_after>"""Displays a randomly generated witticism from Brian Chu himself."""
import json
import random
__match__ = r"!brian"
with open('plugins/brian_corpus/cache.json', 'r') as infile:
cache = json.load(infile)
with open('plugins/brian_corpus/phrases.json', 'r') as infile:
phrases = json.load(infile)
def generate_phrase(phrases, cache):
seed_phrase = []
while len(seed_phrase) < 3:
seed_phrase = random.choice(phrases).split()
w1, w2 = seed_phrase[:2]
chosen = [w1, w2]
while "{}|{}".format(w1, w2) in cache:
choice = random.choice(cache["{}|{}".format(w1, w2)])
w1, w2 = w2, choice
chosen.append(choice)
return ' '.join(chosen)
def on_message(bot, channel, user, message):
return '> {}'.format(generate_phrase(phrases, cache))
|
11a21f4ce70cbd99b9a1b369f0ca6cada9934450 | storage/utils.py | storage/utils.py | import hashlib
def hash256(hash_data):
return hashlib.sha256(hashlib.sha256(hash_data).digest()).digest() | import hashlib
def hash256(hash_data: bytes):
return hashlib.sha256(hashlib.sha256(hash_data).digest()).digest() | Move hash256 to separate function | Move hash256 to separate function
| Python | mpl-2.0 | DvA-leopold/CrAB,DvA-leopold/CrAB | import hashlib
def hash256(hash_data):
return hashlib.sha256(hashlib.sha256(hash_data).digest()).digest()Move hash256 to separate function | import hashlib
def hash256(hash_data: bytes):
return hashlib.sha256(hashlib.sha256(hash_data).digest()).digest() | <commit_before>import hashlib
def hash256(hash_data):
return hashlib.sha256(hashlib.sha256(hash_data).digest()).digest()<commit_msg>Move hash256 to separate function<commit_after> | import hashlib
def hash256(hash_data: bytes):
return hashlib.sha256(hashlib.sha256(hash_data).digest()).digest() | import hashlib
def hash256(hash_data):
return hashlib.sha256(hashlib.sha256(hash_data).digest()).digest()Move hash256 to separate functionimport hashlib
def hash256(hash_data: bytes):
return hashlib.sha256(hashlib.sha256(hash_data).digest()).digest() | <commit_before>import hashlib
def hash256(hash_data):
return hashlib.sha256(hashlib.sha256(hash_data).digest()).digest()<commit_msg>Move hash256 to separate function<commit_after>import hashlib
def hash256(hash_data: bytes):
return hashlib.sha256(hashlib.sha256(hash_data).digest()).digest() |
4cef3788a19b9ad7059184a39accd2b551407de4 | tests/test_benchmark.py | tests/test_benchmark.py | import os
from subprocess import check_call
import pytest
def run(command, problem, so, shape, nbpml, *extra):
args = ["python", "../benchmarks/user/benchmark.py", command]
args.extend(["-P", str(problem)])
args.extend(["-so", str(so)])
args.extend(["-d"] + [str(i) for i in shape])
args.extend(["--nbpml", str(nbpml)])
args.extend(extra)
check_call(args)
def test_test_tti():
run('test', 'tti', 4, [20, 20, 20], 5)
def test_test_acoustic():
run('test', 'acoustic', 4, [20, 20, 20], 5)
def test_run_acoustic_fixed_bs():
run('run', 'acoustic', 4, [20, 20, 20], 5, '-bs', '5', '5', '4')
| from subprocess import check_call
def run_cmd(command, problem, so, shape, nbpml, *extra):
args = ["python", "../benchmarks/user/benchmark.py", command]
args.extend(["-P", str(problem)])
args.extend(["-so", str(so)])
args.extend(["-d"] + [str(i) for i in shape])
args.extend(["--nbpml", str(nbpml)])
args.extend(extra)
check_call(args)
def test_test_tti():
run_cmd('test', 'tti', 4, [20, 20, 20], 5)
def test_test_acoustic():
run_cmd('test', 'acoustic', 4, [20, 20, 20], 5)
def test_run_acoustic_fixed_bs():
run_cmd('run', 'acoustic', 4, [20, 20, 20], 5, '-bs', '5', '5', '4')
| Check bench mode==run with fixed block shape | tests: Check bench mode==run with fixed block shape
| Python | mit | opesci/devito,opesci/devito | import os
from subprocess import check_call
import pytest
def run(command, problem, so, shape, nbpml, *extra):
args = ["python", "../benchmarks/user/benchmark.py", command]
args.extend(["-P", str(problem)])
args.extend(["-so", str(so)])
args.extend(["-d"] + [str(i) for i in shape])
args.extend(["--nbpml", str(nbpml)])
args.extend(extra)
check_call(args)
def test_test_tti():
run('test', 'tti', 4, [20, 20, 20], 5)
def test_test_acoustic():
run('test', 'acoustic', 4, [20, 20, 20], 5)
def test_run_acoustic_fixed_bs():
run('run', 'acoustic', 4, [20, 20, 20], 5, '-bs', '5', '5', '4')
tests: Check bench mode==run with fixed block shape | from subprocess import check_call
def run_cmd(command, problem, so, shape, nbpml, *extra):
args = ["python", "../benchmarks/user/benchmark.py", command]
args.extend(["-P", str(problem)])
args.extend(["-so", str(so)])
args.extend(["-d"] + [str(i) for i in shape])
args.extend(["--nbpml", str(nbpml)])
args.extend(extra)
check_call(args)
def test_test_tti():
run_cmd('test', 'tti', 4, [20, 20, 20], 5)
def test_test_acoustic():
run_cmd('test', 'acoustic', 4, [20, 20, 20], 5)
def test_run_acoustic_fixed_bs():
run_cmd('run', 'acoustic', 4, [20, 20, 20], 5, '-bs', '5', '5', '4')
| <commit_before>import os
from subprocess import check_call
import pytest
def run(command, problem, so, shape, nbpml, *extra):
args = ["python", "../benchmarks/user/benchmark.py", command]
args.extend(["-P", str(problem)])
args.extend(["-so", str(so)])
args.extend(["-d"] + [str(i) for i in shape])
args.extend(["--nbpml", str(nbpml)])
args.extend(extra)
check_call(args)
def test_test_tti():
run('test', 'tti', 4, [20, 20, 20], 5)
def test_test_acoustic():
run('test', 'acoustic', 4, [20, 20, 20], 5)
def test_run_acoustic_fixed_bs():
run('run', 'acoustic', 4, [20, 20, 20], 5, '-bs', '5', '5', '4')
<commit_msg>tests: Check bench mode==run with fixed block shape<commit_after> | from subprocess import check_call
def run_cmd(command, problem, so, shape, nbpml, *extra):
args = ["python", "../benchmarks/user/benchmark.py", command]
args.extend(["-P", str(problem)])
args.extend(["-so", str(so)])
args.extend(["-d"] + [str(i) for i in shape])
args.extend(["--nbpml", str(nbpml)])
args.extend(extra)
check_call(args)
def test_test_tti():
run_cmd('test', 'tti', 4, [20, 20, 20], 5)
def test_test_acoustic():
run_cmd('test', 'acoustic', 4, [20, 20, 20], 5)
def test_run_acoustic_fixed_bs():
run_cmd('run', 'acoustic', 4, [20, 20, 20], 5, '-bs', '5', '5', '4')
| import os
from subprocess import check_call
import pytest
def run(command, problem, so, shape, nbpml, *extra):
args = ["python", "../benchmarks/user/benchmark.py", command]
args.extend(["-P", str(problem)])
args.extend(["-so", str(so)])
args.extend(["-d"] + [str(i) for i in shape])
args.extend(["--nbpml", str(nbpml)])
args.extend(extra)
check_call(args)
def test_test_tti():
run('test', 'tti', 4, [20, 20, 20], 5)
def test_test_acoustic():
run('test', 'acoustic', 4, [20, 20, 20], 5)
def test_run_acoustic_fixed_bs():
run('run', 'acoustic', 4, [20, 20, 20], 5, '-bs', '5', '5', '4')
tests: Check bench mode==run with fixed block shapefrom subprocess import check_call
def run_cmd(command, problem, so, shape, nbpml, *extra):
args = ["python", "../benchmarks/user/benchmark.py", command]
args.extend(["-P", str(problem)])
args.extend(["-so", str(so)])
args.extend(["-d"] + [str(i) for i in shape])
args.extend(["--nbpml", str(nbpml)])
args.extend(extra)
check_call(args)
def test_test_tti():
run_cmd('test', 'tti', 4, [20, 20, 20], 5)
def test_test_acoustic():
run_cmd('test', 'acoustic', 4, [20, 20, 20], 5)
def test_run_acoustic_fixed_bs():
run_cmd('run', 'acoustic', 4, [20, 20, 20], 5, '-bs', '5', '5', '4')
| <commit_before>import os
from subprocess import check_call
import pytest
def run(command, problem, so, shape, nbpml, *extra):
args = ["python", "../benchmarks/user/benchmark.py", command]
args.extend(["-P", str(problem)])
args.extend(["-so", str(so)])
args.extend(["-d"] + [str(i) for i in shape])
args.extend(["--nbpml", str(nbpml)])
args.extend(extra)
check_call(args)
def test_test_tti():
run('test', 'tti', 4, [20, 20, 20], 5)
def test_test_acoustic():
run('test', 'acoustic', 4, [20, 20, 20], 5)
def test_run_acoustic_fixed_bs():
run('run', 'acoustic', 4, [20, 20, 20], 5, '-bs', '5', '5', '4')
<commit_msg>tests: Check bench mode==run with fixed block shape<commit_after>from subprocess import check_call
def run_cmd(command, problem, so, shape, nbpml, *extra):
args = ["python", "../benchmarks/user/benchmark.py", command]
args.extend(["-P", str(problem)])
args.extend(["-so", str(so)])
args.extend(["-d"] + [str(i) for i in shape])
args.extend(["--nbpml", str(nbpml)])
args.extend(extra)
check_call(args)
def test_test_tti():
run_cmd('test', 'tti', 4, [20, 20, 20], 5)
def test_test_acoustic():
run_cmd('test', 'acoustic', 4, [20, 20, 20], 5)
def test_run_acoustic_fixed_bs():
run_cmd('run', 'acoustic', 4, [20, 20, 20], 5, '-bs', '5', '5', '4')
|
9a14fec9a4bb931b41ab62988975e5688f16573d | cms/tests/test_permalinks.py | cms/tests/test_permalinks.py | from django.contrib.contenttypes.models import ContentType
from django.core import urlresolvers
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.test import TestCase
from ..permalinks import expand, resolve, PermalinkError
class TestPermalinkModel(models.Model):
def __unicode__(self):
return 'Foo'
def get_absolute_url(self):
return '/foo/'
class PermalinksTest(TestCase):
def test_resolve(self):
obj = TestPermalinkModel.objects.create()
url = resolve('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, obj)
with self.assertRaises(PermalinkError):
# A valid URL, but not a permalink.
resolve('/admin/')
with self.assertRaises(ImproperlyConfigured):
urlresolvers.set_urlconf('cms.tests.urls')
resolve('/r/')
def test_expand(self):
obj = TestPermalinkModel.objects.create()
self.assertEqual(obj.__unicode__(), 'Foo')
url = expand('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, '/foo/')
| from django.contrib.contenttypes.models import ContentType
from django.core import urlresolvers
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.test import TestCase
from ..permalinks import expand, resolve, PermalinkError
class TestPermalinkModel(models.Model):
def __unicode__(self):
return 'Foo'
def get_absolute_url(self):
return '/foo/'
class PermalinksTest(TestCase):
def test_resolve(self):
obj = TestPermalinkModel.objects.create()
url = resolve('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, obj)
with self.assertRaises(PermalinkError):
# A valid URL, but not a permalink.
resolve('/admin/')
original_urlconf = urlresolvers.get_urlconf()
with self.assertRaises(ImproperlyConfigured):
urlresolvers.set_urlconf('cms.tests.urls')
resolve('/r/')
urlresolvers.set_urlconf(original_urlconf)
def test_expand(self):
obj = TestPermalinkModel.objects.create()
self.assertEqual(obj.__unicode__(), 'Foo')
url = expand('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, '/foo/')
| Fix permalinks test which was breaking other tests. | Fix permalinks test which was breaking other tests.
| Python | bsd-3-clause | lewiscollard/cms,jamesfoley/cms,danielsamuels/cms,lewiscollard/cms,dan-gamble/cms,dan-gamble/cms,lewiscollard/cms,danielsamuels/cms,dan-gamble/cms,jamesfoley/cms,danielsamuels/cms,jamesfoley/cms,jamesfoley/cms | from django.contrib.contenttypes.models import ContentType
from django.core import urlresolvers
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.test import TestCase
from ..permalinks import expand, resolve, PermalinkError
class TestPermalinkModel(models.Model):
def __unicode__(self):
return 'Foo'
def get_absolute_url(self):
return '/foo/'
class PermalinksTest(TestCase):
def test_resolve(self):
obj = TestPermalinkModel.objects.create()
url = resolve('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, obj)
with self.assertRaises(PermalinkError):
# A valid URL, but not a permalink.
resolve('/admin/')
with self.assertRaises(ImproperlyConfigured):
urlresolvers.set_urlconf('cms.tests.urls')
resolve('/r/')
def test_expand(self):
obj = TestPermalinkModel.objects.create()
self.assertEqual(obj.__unicode__(), 'Foo')
url = expand('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, '/foo/')
Fix permalinks test which was breaking other tests. | from django.contrib.contenttypes.models import ContentType
from django.core import urlresolvers
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.test import TestCase
from ..permalinks import expand, resolve, PermalinkError
class TestPermalinkModel(models.Model):
def __unicode__(self):
return 'Foo'
def get_absolute_url(self):
return '/foo/'
class PermalinksTest(TestCase):
def test_resolve(self):
obj = TestPermalinkModel.objects.create()
url = resolve('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, obj)
with self.assertRaises(PermalinkError):
# A valid URL, but not a permalink.
resolve('/admin/')
original_urlconf = urlresolvers.get_urlconf()
with self.assertRaises(ImproperlyConfigured):
urlresolvers.set_urlconf('cms.tests.urls')
resolve('/r/')
urlresolvers.set_urlconf(original_urlconf)
def test_expand(self):
obj = TestPermalinkModel.objects.create()
self.assertEqual(obj.__unicode__(), 'Foo')
url = expand('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, '/foo/')
| <commit_before>from django.contrib.contenttypes.models import ContentType
from django.core import urlresolvers
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.test import TestCase
from ..permalinks import expand, resolve, PermalinkError
class TestPermalinkModel(models.Model):
def __unicode__(self):
return 'Foo'
def get_absolute_url(self):
return '/foo/'
class PermalinksTest(TestCase):
def test_resolve(self):
obj = TestPermalinkModel.objects.create()
url = resolve('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, obj)
with self.assertRaises(PermalinkError):
# A valid URL, but not a permalink.
resolve('/admin/')
with self.assertRaises(ImproperlyConfigured):
urlresolvers.set_urlconf('cms.tests.urls')
resolve('/r/')
def test_expand(self):
obj = TestPermalinkModel.objects.create()
self.assertEqual(obj.__unicode__(), 'Foo')
url = expand('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, '/foo/')
<commit_msg>Fix permalinks test which was breaking other tests.<commit_after> | from django.contrib.contenttypes.models import ContentType
from django.core import urlresolvers
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.test import TestCase
from ..permalinks import expand, resolve, PermalinkError
class TestPermalinkModel(models.Model):
def __unicode__(self):
return 'Foo'
def get_absolute_url(self):
return '/foo/'
class PermalinksTest(TestCase):
def test_resolve(self):
obj = TestPermalinkModel.objects.create()
url = resolve('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, obj)
with self.assertRaises(PermalinkError):
# A valid URL, but not a permalink.
resolve('/admin/')
original_urlconf = urlresolvers.get_urlconf()
with self.assertRaises(ImproperlyConfigured):
urlresolvers.set_urlconf('cms.tests.urls')
resolve('/r/')
urlresolvers.set_urlconf(original_urlconf)
def test_expand(self):
obj = TestPermalinkModel.objects.create()
self.assertEqual(obj.__unicode__(), 'Foo')
url = expand('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, '/foo/')
| from django.contrib.contenttypes.models import ContentType
from django.core import urlresolvers
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.test import TestCase
from ..permalinks import expand, resolve, PermalinkError
class TestPermalinkModel(models.Model):
def __unicode__(self):
return 'Foo'
def get_absolute_url(self):
return '/foo/'
class PermalinksTest(TestCase):
def test_resolve(self):
obj = TestPermalinkModel.objects.create()
url = resolve('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, obj)
with self.assertRaises(PermalinkError):
# A valid URL, but not a permalink.
resolve('/admin/')
with self.assertRaises(ImproperlyConfigured):
urlresolvers.set_urlconf('cms.tests.urls')
resolve('/r/')
def test_expand(self):
obj = TestPermalinkModel.objects.create()
self.assertEqual(obj.__unicode__(), 'Foo')
url = expand('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, '/foo/')
Fix permalinks test which was breaking other tests.from django.contrib.contenttypes.models import ContentType
from django.core import urlresolvers
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.test import TestCase
from ..permalinks import expand, resolve, PermalinkError
class TestPermalinkModel(models.Model):
def __unicode__(self):
return 'Foo'
def get_absolute_url(self):
return '/foo/'
class PermalinksTest(TestCase):
def test_resolve(self):
obj = TestPermalinkModel.objects.create()
url = resolve('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, obj)
with self.assertRaises(PermalinkError):
# A valid URL, but not a permalink.
resolve('/admin/')
original_urlconf = urlresolvers.get_urlconf()
with self.assertRaises(ImproperlyConfigured):
urlresolvers.set_urlconf('cms.tests.urls')
resolve('/r/')
urlresolvers.set_urlconf(original_urlconf)
def test_expand(self):
obj = TestPermalinkModel.objects.create()
self.assertEqual(obj.__unicode__(), 'Foo')
url = expand('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, '/foo/')
| <commit_before>from django.contrib.contenttypes.models import ContentType
from django.core import urlresolvers
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.test import TestCase
from ..permalinks import expand, resolve, PermalinkError
class TestPermalinkModel(models.Model):
def __unicode__(self):
return 'Foo'
def get_absolute_url(self):
return '/foo/'
class PermalinksTest(TestCase):
def test_resolve(self):
obj = TestPermalinkModel.objects.create()
url = resolve('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, obj)
with self.assertRaises(PermalinkError):
# A valid URL, but not a permalink.
resolve('/admin/')
with self.assertRaises(ImproperlyConfigured):
urlresolvers.set_urlconf('cms.tests.urls')
resolve('/r/')
def test_expand(self):
obj = TestPermalinkModel.objects.create()
self.assertEqual(obj.__unicode__(), 'Foo')
url = expand('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, '/foo/')
<commit_msg>Fix permalinks test which was breaking other tests.<commit_after>from django.contrib.contenttypes.models import ContentType
from django.core import urlresolvers
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.test import TestCase
from ..permalinks import expand, resolve, PermalinkError
class TestPermalinkModel(models.Model):
def __unicode__(self):
return 'Foo'
def get_absolute_url(self):
return '/foo/'
class PermalinksTest(TestCase):
def test_resolve(self):
obj = TestPermalinkModel.objects.create()
url = resolve('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, obj)
with self.assertRaises(PermalinkError):
# A valid URL, but not a permalink.
resolve('/admin/')
original_urlconf = urlresolvers.get_urlconf()
with self.assertRaises(ImproperlyConfigured):
urlresolvers.set_urlconf('cms.tests.urls')
resolve('/r/')
urlresolvers.set_urlconf(original_urlconf)
def test_expand(self):
obj = TestPermalinkModel.objects.create()
self.assertEqual(obj.__unicode__(), 'Foo')
url = expand('/r/{}-{}/'.format(
ContentType.objects.get_for_model(TestPermalinkModel).pk,
obj.pk
))
self.assertEqual(url, '/foo/')
|
24341ee3dabcbad751c849ef8007b669bdce5141 | tests/test_bountyxml.py | tests/test_bountyxml.py | from hearthstone import bountyxml
def test_bountyxml_load():
bounty_db, _ = bountyxml.load()
assert bounty_db
assert bounty_db[47].boss_name == "Cap'n Hogger"
assert bounty_db[58].region_name == "The Barrens"
| from hearthstone import bountyxml
def test_bountyxml_load():
bounty_db, _ = bountyxml.load()
assert bounty_db
assert bounty_db[68].boss_name == "The Anointed Blades"
assert bounty_db[58].region_name == "The Barrens"
| Fix broken Mercenaries bountyxml test | Fix broken Mercenaries bountyxml test
| Python | mit | HearthSim/python-hearthstone | from hearthstone import bountyxml
def test_bountyxml_load():
bounty_db, _ = bountyxml.load()
assert bounty_db
assert bounty_db[47].boss_name == "Cap'n Hogger"
assert bounty_db[58].region_name == "The Barrens"
Fix broken Mercenaries bountyxml test | from hearthstone import bountyxml
def test_bountyxml_load():
bounty_db, _ = bountyxml.load()
assert bounty_db
assert bounty_db[68].boss_name == "The Anointed Blades"
assert bounty_db[58].region_name == "The Barrens"
| <commit_before>from hearthstone import bountyxml
def test_bountyxml_load():
bounty_db, _ = bountyxml.load()
assert bounty_db
assert bounty_db[47].boss_name == "Cap'n Hogger"
assert bounty_db[58].region_name == "The Barrens"
<commit_msg>Fix broken Mercenaries bountyxml test<commit_after> | from hearthstone import bountyxml
def test_bountyxml_load():
bounty_db, _ = bountyxml.load()
assert bounty_db
assert bounty_db[68].boss_name == "The Anointed Blades"
assert bounty_db[58].region_name == "The Barrens"
| from hearthstone import bountyxml
def test_bountyxml_load():
bounty_db, _ = bountyxml.load()
assert bounty_db
assert bounty_db[47].boss_name == "Cap'n Hogger"
assert bounty_db[58].region_name == "The Barrens"
Fix broken Mercenaries bountyxml testfrom hearthstone import bountyxml
def test_bountyxml_load():
bounty_db, _ = bountyxml.load()
assert bounty_db
assert bounty_db[68].boss_name == "The Anointed Blades"
assert bounty_db[58].region_name == "The Barrens"
| <commit_before>from hearthstone import bountyxml
def test_bountyxml_load():
bounty_db, _ = bountyxml.load()
assert bounty_db
assert bounty_db[47].boss_name == "Cap'n Hogger"
assert bounty_db[58].region_name == "The Barrens"
<commit_msg>Fix broken Mercenaries bountyxml test<commit_after>from hearthstone import bountyxml
def test_bountyxml_load():
bounty_db, _ = bountyxml.load()
assert bounty_db
assert bounty_db[68].boss_name == "The Anointed Blades"
assert bounty_db[58].region_name == "The Barrens"
|
32efe7a0365738f982030f7c5be3b702dbac87c8 | tests/test.py | tests/test.py | from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
| from devicehive import Handler
from devicehive import DeviceHive
import pytest
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def only_http_implementation(self):
if self.http_transport():
return
pytest.skip('Implemented only for http transport')
def only_websocket_implementation(self):
if self.websocket_transport():
return
pytest.skip('Implemented only for websocket transport')
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
| Add only_http_implementation and only_websocket_implementation methods | Add only_http_implementation and only_websocket_implementation methods
| Python | apache-2.0 | devicehive/devicehive-python | from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
Add only_http_implementation and only_websocket_implementation methods | from devicehive import Handler
from devicehive import DeviceHive
import pytest
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def only_http_implementation(self):
if self.http_transport():
return
pytest.skip('Implemented only for http transport')
def only_websocket_implementation(self):
if self.websocket_transport():
return
pytest.skip('Implemented only for websocket transport')
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
| <commit_before>from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
<commit_msg>Add only_http_implementation and only_websocket_implementation methods<commit_after> | from devicehive import Handler
from devicehive import DeviceHive
import pytest
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def only_http_implementation(self):
if self.http_transport():
return
pytest.skip('Implemented only for http transport')
def only_websocket_implementation(self):
if self.websocket_transport():
return
pytest.skip('Implemented only for websocket transport')
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
| from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
Add only_http_implementation and only_websocket_implementation methodsfrom devicehive import Handler
from devicehive import DeviceHive
import pytest
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def only_http_implementation(self):
if self.http_transport():
return
pytest.skip('Implemented only for http transport')
def only_websocket_implementation(self):
if self.websocket_transport():
return
pytest.skip('Implemented only for websocket transport')
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
| <commit_before>from devicehive import Handler
from devicehive import DeviceHive
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
<commit_msg>Add only_http_implementation and only_websocket_implementation methods<commit_after>from devicehive import Handler
from devicehive import DeviceHive
import pytest
class TestHandler(Handler):
"""Test handler class."""
def handle_connect(self):
if not self.options['handle_connect'](self):
self.api.disconnect()
def handle_event(self, event):
pass
class Test(object):
"""Test class."""
def __init__(self, transport_url, refresh_token):
self._transport_url = transport_url
self._refresh_token = refresh_token
self._transport_name = DeviceHive.transport_name(self._transport_url)
def transport_name(self):
return self._transport_name
def http_transport(self):
return self._transport_name == 'http'
def websocket_transport(self):
return self._transport_name == 'websocket'
def only_http_implementation(self):
if self.http_transport():
return
pytest.skip('Implemented only for http transport')
def only_websocket_implementation(self):
if self.websocket_transport():
return
pytest.skip('Implemented only for websocket transport')
def run(self, handle_connect, handle_event=None):
handler_options = {'handle_connect': handle_connect,
'handle_event': handle_event}
device_hive = DeviceHive(self._transport_url, TestHandler,
handler_options)
device_hive.connect(refresh_token=self._refresh_token)
exception_info = device_hive.join(print_exception=False)
if not exception_info:
return
raise exception_info[1]
|
d1a43bb960d695ce74d38e9bd95218f655f057a0 | forth-like/demo.py | forth-like/demo.py | #!/usr/bin/python
"""
demo.py -- Experimenting with expressing this forth-like pattern in Python.
It appears it can be done with varargs and splatting.
"""
import sys
import time
def retry(*args):
n = args[0]
f = args[1]
a = args[2:]
for i in range(n):
f(*a)
def hello_sleep(*args):
print 'hello'
time.sleep(args[0])
def retry_demo():
retry(5, hello_sleep, 0.1)
def timeout(*args):
seconds = args[0]
f = args[1]
a = args[2:]
# TODO: Set SIGALARM or something
print 'Running %s with args %s with timeout of %f' % (f, a, seconds)
f(*a)
def timeout_retry_demo():
timeout(0.3, retry, 5, hello_sleep, 0.1)
def main(_):
hello_sleep(0.1)
print('--')
retry_demo()
print('--')
timeout_retry_demo()
if __name__ == '__main__':
try:
main(sys.argv)
except RuntimeError as e:
print >>sys.stderr, 'FATAL: %s' % e
sys.exit(1)
| #!/usr/bin/python
"""
demo.py -- Experimenting with expressing this forth-like pattern in Python.
It appears it can be done with varargs and splatting.
"""
import sys
import time
def retry(n, f, *args):
for i in range(n):
f(*args)
def hello_sleep(t):
print 'hello'
time.sleep(t)
def retry_demo():
retry(5, hello_sleep, 0.1)
def timeout(seconds, f, *a):
# TODO: Set SIGALARM or something
print 'Running %s with args %s with timeout of %f' % (f, a, seconds)
f(*a)
def timeout_retry_demo():
timeout(0.3, retry, 5, hello_sleep, 0.1)
def main(_):
hello_sleep(0.1)
print('--')
retry_demo()
print('--')
timeout_retry_demo()
if __name__ == '__main__':
try:
main(sys.argv)
except RuntimeError as e:
print >>sys.stderr, 'FATAL: %s' % e
sys.exit(1)
| Remove manual indexing of args | Remove manual indexing of args | Python | apache-2.0 | oilshell/blog-code,oilshell/blog-code,oilshell/blog-code,oilshell/blog-code,oilshell/blog-code,oilshell/blog-code,oilshell/blog-code | #!/usr/bin/python
"""
demo.py -- Experimenting with expressing this forth-like pattern in Python.
It appears it can be done with varargs and splatting.
"""
import sys
import time
def retry(*args):
n = args[0]
f = args[1]
a = args[2:]
for i in range(n):
f(*a)
def hello_sleep(*args):
print 'hello'
time.sleep(args[0])
def retry_demo():
retry(5, hello_sleep, 0.1)
def timeout(*args):
seconds = args[0]
f = args[1]
a = args[2:]
# TODO: Set SIGALARM or something
print 'Running %s with args %s with timeout of %f' % (f, a, seconds)
f(*a)
def timeout_retry_demo():
timeout(0.3, retry, 5, hello_sleep, 0.1)
def main(_):
hello_sleep(0.1)
print('--')
retry_demo()
print('--')
timeout_retry_demo()
if __name__ == '__main__':
try:
main(sys.argv)
except RuntimeError as e:
print >>sys.stderr, 'FATAL: %s' % e
sys.exit(1)
Remove manual indexing of args | #!/usr/bin/python
"""
demo.py -- Experimenting with expressing this forth-like pattern in Python.
It appears it can be done with varargs and splatting.
"""
import sys
import time
def retry(n, f, *args):
for i in range(n):
f(*args)
def hello_sleep(t):
print 'hello'
time.sleep(t)
def retry_demo():
retry(5, hello_sleep, 0.1)
def timeout(seconds, f, *a):
# TODO: Set SIGALARM or something
print 'Running %s with args %s with timeout of %f' % (f, a, seconds)
f(*a)
def timeout_retry_demo():
timeout(0.3, retry, 5, hello_sleep, 0.1)
def main(_):
hello_sleep(0.1)
print('--')
retry_demo()
print('--')
timeout_retry_demo()
if __name__ == '__main__':
try:
main(sys.argv)
except RuntimeError as e:
print >>sys.stderr, 'FATAL: %s' % e
sys.exit(1)
| <commit_before>#!/usr/bin/python
"""
demo.py -- Experimenting with expressing this forth-like pattern in Python.
It appears it can be done with varargs and splatting.
"""
import sys
import time
def retry(*args):
n = args[0]
f = args[1]
a = args[2:]
for i in range(n):
f(*a)
def hello_sleep(*args):
print 'hello'
time.sleep(args[0])
def retry_demo():
retry(5, hello_sleep, 0.1)
def timeout(*args):
seconds = args[0]
f = args[1]
a = args[2:]
# TODO: Set SIGALARM or something
print 'Running %s with args %s with timeout of %f' % (f, a, seconds)
f(*a)
def timeout_retry_demo():
timeout(0.3, retry, 5, hello_sleep, 0.1)
def main(_):
hello_sleep(0.1)
print('--')
retry_demo()
print('--')
timeout_retry_demo()
if __name__ == '__main__':
try:
main(sys.argv)
except RuntimeError as e:
print >>sys.stderr, 'FATAL: %s' % e
sys.exit(1)
<commit_msg>Remove manual indexing of args<commit_after> | #!/usr/bin/python
"""
demo.py -- Experimenting with expressing this forth-like pattern in Python.
It appears it can be done with varargs and splatting.
"""
import sys
import time
def retry(n, f, *args):
for i in range(n):
f(*args)
def hello_sleep(t):
print 'hello'
time.sleep(t)
def retry_demo():
retry(5, hello_sleep, 0.1)
def timeout(seconds, f, *a):
# TODO: Set SIGALARM or something
print 'Running %s with args %s with timeout of %f' % (f, a, seconds)
f(*a)
def timeout_retry_demo():
timeout(0.3, retry, 5, hello_sleep, 0.1)
def main(_):
hello_sleep(0.1)
print('--')
retry_demo()
print('--')
timeout_retry_demo()
if __name__ == '__main__':
try:
main(sys.argv)
except RuntimeError as e:
print >>sys.stderr, 'FATAL: %s' % e
sys.exit(1)
| #!/usr/bin/python
"""
demo.py -- Experimenting with expressing this forth-like pattern in Python.
It appears it can be done with varargs and splatting.
"""
import sys
import time
def retry(*args):
n = args[0]
f = args[1]
a = args[2:]
for i in range(n):
f(*a)
def hello_sleep(*args):
print 'hello'
time.sleep(args[0])
def retry_demo():
retry(5, hello_sleep, 0.1)
def timeout(*args):
seconds = args[0]
f = args[1]
a = args[2:]
# TODO: Set SIGALARM or something
print 'Running %s with args %s with timeout of %f' % (f, a, seconds)
f(*a)
def timeout_retry_demo():
timeout(0.3, retry, 5, hello_sleep, 0.1)
def main(_):
hello_sleep(0.1)
print('--')
retry_demo()
print('--')
timeout_retry_demo()
if __name__ == '__main__':
try:
main(sys.argv)
except RuntimeError as e:
print >>sys.stderr, 'FATAL: %s' % e
sys.exit(1)
Remove manual indexing of args#!/usr/bin/python
"""
demo.py -- Experimenting with expressing this forth-like pattern in Python.
It appears it can be done with varargs and splatting.
"""
import sys
import time
def retry(n, f, *args):
for i in range(n):
f(*args)
def hello_sleep(t):
print 'hello'
time.sleep(t)
def retry_demo():
retry(5, hello_sleep, 0.1)
def timeout(seconds, f, *a):
# TODO: Set SIGALARM or something
print 'Running %s with args %s with timeout of %f' % (f, a, seconds)
f(*a)
def timeout_retry_demo():
timeout(0.3, retry, 5, hello_sleep, 0.1)
def main(_):
hello_sleep(0.1)
print('--')
retry_demo()
print('--')
timeout_retry_demo()
if __name__ == '__main__':
try:
main(sys.argv)
except RuntimeError as e:
print >>sys.stderr, 'FATAL: %s' % e
sys.exit(1)
| <commit_before>#!/usr/bin/python
"""
demo.py -- Experimenting with expressing this forth-like pattern in Python.
It appears it can be done with varargs and splatting.
"""
import sys
import time
def retry(*args):
n = args[0]
f = args[1]
a = args[2:]
for i in range(n):
f(*a)
def hello_sleep(*args):
print 'hello'
time.sleep(args[0])
def retry_demo():
retry(5, hello_sleep, 0.1)
def timeout(*args):
seconds = args[0]
f = args[1]
a = args[2:]
# TODO: Set SIGALARM or something
print 'Running %s with args %s with timeout of %f' % (f, a, seconds)
f(*a)
def timeout_retry_demo():
timeout(0.3, retry, 5, hello_sleep, 0.1)
def main(_):
hello_sleep(0.1)
print('--')
retry_demo()
print('--')
timeout_retry_demo()
if __name__ == '__main__':
try:
main(sys.argv)
except RuntimeError as e:
print >>sys.stderr, 'FATAL: %s' % e
sys.exit(1)
<commit_msg>Remove manual indexing of args<commit_after>#!/usr/bin/python
"""
demo.py -- Experimenting with expressing this forth-like pattern in Python.
It appears it can be done with varargs and splatting.
"""
import sys
import time
def retry(n, f, *args):
for i in range(n):
f(*args)
def hello_sleep(t):
print 'hello'
time.sleep(t)
def retry_demo():
retry(5, hello_sleep, 0.1)
def timeout(seconds, f, *a):
# TODO: Set SIGALARM or something
print 'Running %s with args %s with timeout of %f' % (f, a, seconds)
f(*a)
def timeout_retry_demo():
timeout(0.3, retry, 5, hello_sleep, 0.1)
def main(_):
hello_sleep(0.1)
print('--')
retry_demo()
print('--')
timeout_retry_demo()
if __name__ == '__main__':
try:
main(sys.argv)
except RuntimeError as e:
print >>sys.stderr, 'FATAL: %s' % e
sys.exit(1)
|
0b15611eb0020bc2cdb4a4435756315b0bd97a21 | seria/cli.py | seria/cli.py | # -*- coding: utf-8 -*-
import click
from .compat import StringIO
import seria
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.option('--xml', 'out_fmt', flag_value='xml')
@click.option('--yaml', 'out_fmt', flag_value='yaml')
@click.option('--json', 'out_fmt', flag_value='json')
@click.argument('input', type=click.File('rb'), default='-')
@click.argument('output', type=click.File('wb'), default='-')
def cli(out_fmt, input, output):
"""Converts text."""
_input = StringIO()
for l in input:
try:
_input.write(str(l))
except TypeError:
_input.write(bytes(l, 'utf-8'))
_serialized_obj = seria.load(_input)
output.write(_serialized_obj.dump(out_fmt))
if __name__ == '__main__':
cli(out_fmt, input, output)
| # -*- coding: utf-8 -*-
import click
from .compat import StringIO, str, builtin_str
import seria
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.option('--xml', 'out_fmt', flag_value='xml')
@click.option('--yaml', 'out_fmt', flag_value='yaml')
@click.option('--json', 'out_fmt', flag_value='json')
@click.argument('input', type=click.File('r'), default='-')
@click.argument('output', type=click.File('w'), default='-')
def cli(out_fmt, input, output):
"""Converts text."""
_input = StringIO()
for l in input:
try:
_input.write(str(l))
except TypeError:
_input.write(bytes(l, 'utf-8'))
_input = seria.load(_input)
_out = (_input.dump(out_fmt))
output.write(_out)
if __name__ == '__main__':
cli(out_fmt, input, output) | Fix errors with 2/3 FLO support | Fix errors with 2/3 FLO support
| Python | mit | rtluckie/seria | # -*- coding: utf-8 -*-
import click
from .compat import StringIO
import seria
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.option('--xml', 'out_fmt', flag_value='xml')
@click.option('--yaml', 'out_fmt', flag_value='yaml')
@click.option('--json', 'out_fmt', flag_value='json')
@click.argument('input', type=click.File('rb'), default='-')
@click.argument('output', type=click.File('wb'), default='-')
def cli(out_fmt, input, output):
"""Converts text."""
_input = StringIO()
for l in input:
try:
_input.write(str(l))
except TypeError:
_input.write(bytes(l, 'utf-8'))
_serialized_obj = seria.load(_input)
output.write(_serialized_obj.dump(out_fmt))
if __name__ == '__main__':
cli(out_fmt, input, output)
Fix errors with 2/3 FLO support | # -*- coding: utf-8 -*-
import click
from .compat import StringIO, str, builtin_str
import seria
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.option('--xml', 'out_fmt', flag_value='xml')
@click.option('--yaml', 'out_fmt', flag_value='yaml')
@click.option('--json', 'out_fmt', flag_value='json')
@click.argument('input', type=click.File('r'), default='-')
@click.argument('output', type=click.File('w'), default='-')
def cli(out_fmt, input, output):
"""Converts text."""
_input = StringIO()
for l in input:
try:
_input.write(str(l))
except TypeError:
_input.write(bytes(l, 'utf-8'))
_input = seria.load(_input)
_out = (_input.dump(out_fmt))
output.write(_out)
if __name__ == '__main__':
cli(out_fmt, input, output) | <commit_before># -*- coding: utf-8 -*-
import click
from .compat import StringIO
import seria
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.option('--xml', 'out_fmt', flag_value='xml')
@click.option('--yaml', 'out_fmt', flag_value='yaml')
@click.option('--json', 'out_fmt', flag_value='json')
@click.argument('input', type=click.File('rb'), default='-')
@click.argument('output', type=click.File('wb'), default='-')
def cli(out_fmt, input, output):
"""Converts text."""
_input = StringIO()
for l in input:
try:
_input.write(str(l))
except TypeError:
_input.write(bytes(l, 'utf-8'))
_serialized_obj = seria.load(_input)
output.write(_serialized_obj.dump(out_fmt))
if __name__ == '__main__':
cli(out_fmt, input, output)
<commit_msg>Fix errors with 2/3 FLO support<commit_after> | # -*- coding: utf-8 -*-
import click
from .compat import StringIO, str, builtin_str
import seria
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.option('--xml', 'out_fmt', flag_value='xml')
@click.option('--yaml', 'out_fmt', flag_value='yaml')
@click.option('--json', 'out_fmt', flag_value='json')
@click.argument('input', type=click.File('r'), default='-')
@click.argument('output', type=click.File('w'), default='-')
def cli(out_fmt, input, output):
"""Converts text."""
_input = StringIO()
for l in input:
try:
_input.write(str(l))
except TypeError:
_input.write(bytes(l, 'utf-8'))
_input = seria.load(_input)
_out = (_input.dump(out_fmt))
output.write(_out)
if __name__ == '__main__':
cli(out_fmt, input, output) | # -*- coding: utf-8 -*-
import click
from .compat import StringIO
import seria
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.option('--xml', 'out_fmt', flag_value='xml')
@click.option('--yaml', 'out_fmt', flag_value='yaml')
@click.option('--json', 'out_fmt', flag_value='json')
@click.argument('input', type=click.File('rb'), default='-')
@click.argument('output', type=click.File('wb'), default='-')
def cli(out_fmt, input, output):
"""Converts text."""
_input = StringIO()
for l in input:
try:
_input.write(str(l))
except TypeError:
_input.write(bytes(l, 'utf-8'))
_serialized_obj = seria.load(_input)
output.write(_serialized_obj.dump(out_fmt))
if __name__ == '__main__':
cli(out_fmt, input, output)
Fix errors with 2/3 FLO support# -*- coding: utf-8 -*-
import click
from .compat import StringIO, str, builtin_str
import seria
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.option('--xml', 'out_fmt', flag_value='xml')
@click.option('--yaml', 'out_fmt', flag_value='yaml')
@click.option('--json', 'out_fmt', flag_value='json')
@click.argument('input', type=click.File('r'), default='-')
@click.argument('output', type=click.File('w'), default='-')
def cli(out_fmt, input, output):
"""Converts text."""
_input = StringIO()
for l in input:
try:
_input.write(str(l))
except TypeError:
_input.write(bytes(l, 'utf-8'))
_input = seria.load(_input)
_out = (_input.dump(out_fmt))
output.write(_out)
if __name__ == '__main__':
cli(out_fmt, input, output) | <commit_before># -*- coding: utf-8 -*-
import click
from .compat import StringIO
import seria
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.option('--xml', 'out_fmt', flag_value='xml')
@click.option('--yaml', 'out_fmt', flag_value='yaml')
@click.option('--json', 'out_fmt', flag_value='json')
@click.argument('input', type=click.File('rb'), default='-')
@click.argument('output', type=click.File('wb'), default='-')
def cli(out_fmt, input, output):
"""Converts text."""
_input = StringIO()
for l in input:
try:
_input.write(str(l))
except TypeError:
_input.write(bytes(l, 'utf-8'))
_serialized_obj = seria.load(_input)
output.write(_serialized_obj.dump(out_fmt))
if __name__ == '__main__':
cli(out_fmt, input, output)
<commit_msg>Fix errors with 2/3 FLO support<commit_after># -*- coding: utf-8 -*-
import click
from .compat import StringIO, str, builtin_str
import seria
CONTEXT_SETTINGS = dict(help_option_names=['-h', '--help'])
@click.command(context_settings=CONTEXT_SETTINGS)
@click.option('--xml', 'out_fmt', flag_value='xml')
@click.option('--yaml', 'out_fmt', flag_value='yaml')
@click.option('--json', 'out_fmt', flag_value='json')
@click.argument('input', type=click.File('r'), default='-')
@click.argument('output', type=click.File('w'), default='-')
def cli(out_fmt, input, output):
"""Converts text."""
_input = StringIO()
for l in input:
try:
_input.write(str(l))
except TypeError:
_input.write(bytes(l, 'utf-8'))
_input = seria.load(_input)
_out = (_input.dump(out_fmt))
output.write(_out)
if __name__ == '__main__':
cli(out_fmt, input, output) |
b2d4bf11b293073c4410ca1be98f657a30c35762 | python/triple-sum.py | python/triple-sum.py |
# A special triplet is defined as: a <= b <= c for
# a in list_a, b in list_b, and c in list_c
def get_num_special_triplets(list_a, list_b, list_c):
# remove duplicates and sort lists
list_a = sorted(set(list_a))
list_b = sorted(set(list_b))
list_c = sorted(set(list_c))
num_special_triplets = 0
for b in list_b:
len_a_candidates = num_elements_less_than(b, list_a)
len_c_candidates = num_elements_less_than(b, list_c)
num_special_triplets += 1 * len_a_candidates * len_c_candidates
return num_special_triplets
def num_elements_less_than(target, sorted_list):
for index, candidate in enumerate(sorted_list):
if candidate > target:
return index
return len(sorted_list)
if __name__ == '__main__':
_ = input().split()
list_a = list(map(int, input().rstrip().split()))
list_b = list(map(int, input().rstrip().split()))
list_c = list(map(int, input().rstrip().split()))
num_special_triplets = get_num_special_triplets(list_a, list_b, list_c)
print(num_special_triplets)
| def get_num_special_triplets(list_a, list_b, list_c):
a_index = 0
c_index = 0
num_special_triplets = 0
for b in list_b:
while a_index < len(list_a) and list_a[a_index] <= b:
a_index += 1
while c_index < len(list_c) and list_c[c_index] <= b:
c_index += 1
num_special_triplets += a_index * c_index
return num_special_triplets
if __name__ == '__main__':
_ = input().split()
# Remove duplicates and sort lists
list_a = sorted(set(map(int, input().rstrip().split())))
list_b = sorted(set(map(int, input().rstrip().split())))
list_c = sorted(set(map(int, input().rstrip().split())))
result = get_num_special_triplets(list_a, list_b, list_c)
print(result)
| Improve time complexity of solution | Improve time complexity of solution
| Python | mit | rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank,rootulp/hackerrank |
# A special triplet is defined as: a <= b <= c for
# a in list_a, b in list_b, and c in list_c
def get_num_special_triplets(list_a, list_b, list_c):
# remove duplicates and sort lists
list_a = sorted(set(list_a))
list_b = sorted(set(list_b))
list_c = sorted(set(list_c))
num_special_triplets = 0
for b in list_b:
len_a_candidates = num_elements_less_than(b, list_a)
len_c_candidates = num_elements_less_than(b, list_c)
num_special_triplets += 1 * len_a_candidates * len_c_candidates
return num_special_triplets
def num_elements_less_than(target, sorted_list):
for index, candidate in enumerate(sorted_list):
if candidate > target:
return index
return len(sorted_list)
if __name__ == '__main__':
_ = input().split()
list_a = list(map(int, input().rstrip().split()))
list_b = list(map(int, input().rstrip().split()))
list_c = list(map(int, input().rstrip().split()))
num_special_triplets = get_num_special_triplets(list_a, list_b, list_c)
print(num_special_triplets)
Improve time complexity of solution | def get_num_special_triplets(list_a, list_b, list_c):
a_index = 0
c_index = 0
num_special_triplets = 0
for b in list_b:
while a_index < len(list_a) and list_a[a_index] <= b:
a_index += 1
while c_index < len(list_c) and list_c[c_index] <= b:
c_index += 1
num_special_triplets += a_index * c_index
return num_special_triplets
if __name__ == '__main__':
_ = input().split()
# Remove duplicates and sort lists
list_a = sorted(set(map(int, input().rstrip().split())))
list_b = sorted(set(map(int, input().rstrip().split())))
list_c = sorted(set(map(int, input().rstrip().split())))
result = get_num_special_triplets(list_a, list_b, list_c)
print(result)
| <commit_before>
# A special triplet is defined as: a <= b <= c for
# a in list_a, b in list_b, and c in list_c
def get_num_special_triplets(list_a, list_b, list_c):
# remove duplicates and sort lists
list_a = sorted(set(list_a))
list_b = sorted(set(list_b))
list_c = sorted(set(list_c))
num_special_triplets = 0
for b in list_b:
len_a_candidates = num_elements_less_than(b, list_a)
len_c_candidates = num_elements_less_than(b, list_c)
num_special_triplets += 1 * len_a_candidates * len_c_candidates
return num_special_triplets
def num_elements_less_than(target, sorted_list):
for index, candidate in enumerate(sorted_list):
if candidate > target:
return index
return len(sorted_list)
if __name__ == '__main__':
_ = input().split()
list_a = list(map(int, input().rstrip().split()))
list_b = list(map(int, input().rstrip().split()))
list_c = list(map(int, input().rstrip().split()))
num_special_triplets = get_num_special_triplets(list_a, list_b, list_c)
print(num_special_triplets)
<commit_msg>Improve time complexity of solution<commit_after> | def get_num_special_triplets(list_a, list_b, list_c):
a_index = 0
c_index = 0
num_special_triplets = 0
for b in list_b:
while a_index < len(list_a) and list_a[a_index] <= b:
a_index += 1
while c_index < len(list_c) and list_c[c_index] <= b:
c_index += 1
num_special_triplets += a_index * c_index
return num_special_triplets
if __name__ == '__main__':
_ = input().split()
# Remove duplicates and sort lists
list_a = sorted(set(map(int, input().rstrip().split())))
list_b = sorted(set(map(int, input().rstrip().split())))
list_c = sorted(set(map(int, input().rstrip().split())))
result = get_num_special_triplets(list_a, list_b, list_c)
print(result)
|
# A special triplet is defined as: a <= b <= c for
# a in list_a, b in list_b, and c in list_c
def get_num_special_triplets(list_a, list_b, list_c):
# remove duplicates and sort lists
list_a = sorted(set(list_a))
list_b = sorted(set(list_b))
list_c = sorted(set(list_c))
num_special_triplets = 0
for b in list_b:
len_a_candidates = num_elements_less_than(b, list_a)
len_c_candidates = num_elements_less_than(b, list_c)
num_special_triplets += 1 * len_a_candidates * len_c_candidates
return num_special_triplets
def num_elements_less_than(target, sorted_list):
for index, candidate in enumerate(sorted_list):
if candidate > target:
return index
return len(sorted_list)
if __name__ == '__main__':
_ = input().split()
list_a = list(map(int, input().rstrip().split()))
list_b = list(map(int, input().rstrip().split()))
list_c = list(map(int, input().rstrip().split()))
num_special_triplets = get_num_special_triplets(list_a, list_b, list_c)
print(num_special_triplets)
Improve time complexity of solutiondef get_num_special_triplets(list_a, list_b, list_c):
a_index = 0
c_index = 0
num_special_triplets = 0
for b in list_b:
while a_index < len(list_a) and list_a[a_index] <= b:
a_index += 1
while c_index < len(list_c) and list_c[c_index] <= b:
c_index += 1
num_special_triplets += a_index * c_index
return num_special_triplets
if __name__ == '__main__':
_ = input().split()
# Remove duplicates and sort lists
list_a = sorted(set(map(int, input().rstrip().split())))
list_b = sorted(set(map(int, input().rstrip().split())))
list_c = sorted(set(map(int, input().rstrip().split())))
result = get_num_special_triplets(list_a, list_b, list_c)
print(result)
| <commit_before>
# A special triplet is defined as: a <= b <= c for
# a in list_a, b in list_b, and c in list_c
def get_num_special_triplets(list_a, list_b, list_c):
# remove duplicates and sort lists
list_a = sorted(set(list_a))
list_b = sorted(set(list_b))
list_c = sorted(set(list_c))
num_special_triplets = 0
for b in list_b:
len_a_candidates = num_elements_less_than(b, list_a)
len_c_candidates = num_elements_less_than(b, list_c)
num_special_triplets += 1 * len_a_candidates * len_c_candidates
return num_special_triplets
def num_elements_less_than(target, sorted_list):
for index, candidate in enumerate(sorted_list):
if candidate > target:
return index
return len(sorted_list)
if __name__ == '__main__':
_ = input().split()
list_a = list(map(int, input().rstrip().split()))
list_b = list(map(int, input().rstrip().split()))
list_c = list(map(int, input().rstrip().split()))
num_special_triplets = get_num_special_triplets(list_a, list_b, list_c)
print(num_special_triplets)
<commit_msg>Improve time complexity of solution<commit_after>def get_num_special_triplets(list_a, list_b, list_c):
a_index = 0
c_index = 0
num_special_triplets = 0
for b in list_b:
while a_index < len(list_a) and list_a[a_index] <= b:
a_index += 1
while c_index < len(list_c) and list_c[c_index] <= b:
c_index += 1
num_special_triplets += a_index * c_index
return num_special_triplets
if __name__ == '__main__':
_ = input().split()
# Remove duplicates and sort lists
list_a = sorted(set(map(int, input().rstrip().split())))
list_b = sorted(set(map(int, input().rstrip().split())))
list_c = sorted(set(map(int, input().rstrip().split())))
result = get_num_special_triplets(list_a, list_b, list_c)
print(result)
|
74feef6094d884b0116fef895885aa47233801c1 | gitdir/__init__.py | gitdir/__init__.py | import os
import pathlib
GITDIR = pathlib.Path(os.environ.get('GITDIR', '/opt/git')) #TODO check permissions
| import os
import pathlib
GLOBAL_GITDIR = pathlib.Path('/opt/git')
LOCAL_GITDIR = pathlib.Path.home() / 'git'
if 'GITDIR' in is.environ:
GITDIR = pathlib.Path(os.environ['GITDIR'])
elif LOCAL_GITDIR.exists() and not GLOBAL_GITDIR.exists(): #TODO check permissions
GITDIR = LOCAL_GITDIR
else:
GITDIR = GLOBAL_GITDIR
| Fix GITDIR constant to use local gitdir if global doesn't exist | Fix GITDIR constant to use local gitdir if global doesn't exist
| Python | mit | fenhl/gitdir | import os
import pathlib
GITDIR = pathlib.Path(os.environ.get('GITDIR', '/opt/git')) #TODO check permissions
Fix GITDIR constant to use local gitdir if global doesn't exist | import os
import pathlib
GLOBAL_GITDIR = pathlib.Path('/opt/git')
LOCAL_GITDIR = pathlib.Path.home() / 'git'
if 'GITDIR' in is.environ:
GITDIR = pathlib.Path(os.environ['GITDIR'])
elif LOCAL_GITDIR.exists() and not GLOBAL_GITDIR.exists(): #TODO check permissions
GITDIR = LOCAL_GITDIR
else:
GITDIR = GLOBAL_GITDIR
| <commit_before>import os
import pathlib
GITDIR = pathlib.Path(os.environ.get('GITDIR', '/opt/git')) #TODO check permissions
<commit_msg>Fix GITDIR constant to use local gitdir if global doesn't exist<commit_after> | import os
import pathlib
GLOBAL_GITDIR = pathlib.Path('/opt/git')
LOCAL_GITDIR = pathlib.Path.home() / 'git'
if 'GITDIR' in is.environ:
GITDIR = pathlib.Path(os.environ['GITDIR'])
elif LOCAL_GITDIR.exists() and not GLOBAL_GITDIR.exists(): #TODO check permissions
GITDIR = LOCAL_GITDIR
else:
GITDIR = GLOBAL_GITDIR
| import os
import pathlib
GITDIR = pathlib.Path(os.environ.get('GITDIR', '/opt/git')) #TODO check permissions
Fix GITDIR constant to use local gitdir if global doesn't existimport os
import pathlib
GLOBAL_GITDIR = pathlib.Path('/opt/git')
LOCAL_GITDIR = pathlib.Path.home() / 'git'
if 'GITDIR' in is.environ:
GITDIR = pathlib.Path(os.environ['GITDIR'])
elif LOCAL_GITDIR.exists() and not GLOBAL_GITDIR.exists(): #TODO check permissions
GITDIR = LOCAL_GITDIR
else:
GITDIR = GLOBAL_GITDIR
| <commit_before>import os
import pathlib
GITDIR = pathlib.Path(os.environ.get('GITDIR', '/opt/git')) #TODO check permissions
<commit_msg>Fix GITDIR constant to use local gitdir if global doesn't exist<commit_after>import os
import pathlib
GLOBAL_GITDIR = pathlib.Path('/opt/git')
LOCAL_GITDIR = pathlib.Path.home() / 'git'
if 'GITDIR' in is.environ:
GITDIR = pathlib.Path(os.environ['GITDIR'])
elif LOCAL_GITDIR.exists() and not GLOBAL_GITDIR.exists(): #TODO check permissions
GITDIR = LOCAL_GITDIR
else:
GITDIR = GLOBAL_GITDIR
|
4947ebf9460c2cf2ba8338de92601804dec2148a | src/svg_icons/templatetags/svg_icons.py | src/svg_icons/templatetags/svg_icons.py | import json
from django.core.cache import cache
from django.conf import settings
from django.template import Library, TemplateSyntaxError
register = Library()
@register.inclusion_tag('svg_icons/icon.html')
def icon(name, **kwargs):
"""Render a SVG icon defined in a json file to our template.
..:json example (stripped)::
{
"icons": [
{
"icon": {
"paths": [
"M365.339 474.828c-19.319-12.616-42.222-18.062....."
]
},
"properties": {
"name": "tools"
}
}
]
}
"""
cache_key = ':'.join([
getattr(settings, 'SVG_ICONS_CACHE_KEY_PREFIX', 'svg-icons'), name])
icon_paths = cache.get(cache_key)
if not icon_paths:
source_file = getattr(settings, 'SVG_ICONS_SOURCE_FILE', False)
if not source_file:
raise ValueError("SVG_ICONS_SOURCE_FILE needs to be set")
data = json.load(open(source_file, 'r'))
for icon_data in data['icons']:
if name != icon_data['properties']['name']:
continue
icon_paths = icon_data['icon']['paths']
if not icon_paths:
raise TemplateSyntaxError("Requested icon does not exist")
cache.set(cache_key, icon_paths)
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
height = kwargs.get('height', settings.SVG_ICONS_DEFAULT_HEIGHT)
return {
'width': kwargs.get('size', width),
'height': kwargs.get('size', height),
'className': kwargs.get('className'),
'paths': icon_paths,
}
| import json
from importlib import import_module
from django.core.cache import cache
from django.conf import settings
from django.template import Library, TemplateSyntaxError
reader_class = getattr(settings, 'SVG_ICONS_READER_CLASS', 'svg_icons.readers.icomoon.IcomoonReader')
try:
module, cls = reader_class.rsplit('.', 1)
module = import_module(module)
Reader = getattr(module, cls)
except ImportError:
raise ValueError("No valid icon reader class found.")
register = Library()
icons = Reader()
@register.inclusion_tag('svg_icons/icon.html')
def icon(name, **kwargs):
"""Render the SVG icon paths returned by the
icon reader in the template.
"""
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
height = kwargs.get('height', settings.SVG_ICONS_DEFAULT_HEIGHT)
return {
'width': kwargs.get('size', width),
'height': kwargs.get('size', height),
'className': kwargs.get('className'),
'paths': icons.get_svg_paths(name),
}
| Use the new reader classes in the template tag | Use the new reader classes in the template tag
| Python | apache-2.0 | mikedingjan/django-svg-icons,mikedingjan/django-svg-icons | import json
from django.core.cache import cache
from django.conf import settings
from django.template import Library, TemplateSyntaxError
register = Library()
@register.inclusion_tag('svg_icons/icon.html')
def icon(name, **kwargs):
"""Render a SVG icon defined in a json file to our template.
..:json example (stripped)::
{
"icons": [
{
"icon": {
"paths": [
"M365.339 474.828c-19.319-12.616-42.222-18.062....."
]
},
"properties": {
"name": "tools"
}
}
]
}
"""
cache_key = ':'.join([
getattr(settings, 'SVG_ICONS_CACHE_KEY_PREFIX', 'svg-icons'), name])
icon_paths = cache.get(cache_key)
if not icon_paths:
source_file = getattr(settings, 'SVG_ICONS_SOURCE_FILE', False)
if not source_file:
raise ValueError("SVG_ICONS_SOURCE_FILE needs to be set")
data = json.load(open(source_file, 'r'))
for icon_data in data['icons']:
if name != icon_data['properties']['name']:
continue
icon_paths = icon_data['icon']['paths']
if not icon_paths:
raise TemplateSyntaxError("Requested icon does not exist")
cache.set(cache_key, icon_paths)
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
height = kwargs.get('height', settings.SVG_ICONS_DEFAULT_HEIGHT)
return {
'width': kwargs.get('size', width),
'height': kwargs.get('size', height),
'className': kwargs.get('className'),
'paths': icon_paths,
}
Use the new reader classes in the template tag | import json
from importlib import import_module
from django.core.cache import cache
from django.conf import settings
from django.template import Library, TemplateSyntaxError
reader_class = getattr(settings, 'SVG_ICONS_READER_CLASS', 'svg_icons.readers.icomoon.IcomoonReader')
try:
module, cls = reader_class.rsplit('.', 1)
module = import_module(module)
Reader = getattr(module, cls)
except ImportError:
raise ValueError("No valid icon reader class found.")
register = Library()
icons = Reader()
@register.inclusion_tag('svg_icons/icon.html')
def icon(name, **kwargs):
"""Render the SVG icon paths returned by the
icon reader in the template.
"""
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
height = kwargs.get('height', settings.SVG_ICONS_DEFAULT_HEIGHT)
return {
'width': kwargs.get('size', width),
'height': kwargs.get('size', height),
'className': kwargs.get('className'),
'paths': icons.get_svg_paths(name),
}
| <commit_before>import json
from django.core.cache import cache
from django.conf import settings
from django.template import Library, TemplateSyntaxError
register = Library()
@register.inclusion_tag('svg_icons/icon.html')
def icon(name, **kwargs):
"""Render a SVG icon defined in a json file to our template.
..:json example (stripped)::
{
"icons": [
{
"icon": {
"paths": [
"M365.339 474.828c-19.319-12.616-42.222-18.062....."
]
},
"properties": {
"name": "tools"
}
}
]
}
"""
cache_key = ':'.join([
getattr(settings, 'SVG_ICONS_CACHE_KEY_PREFIX', 'svg-icons'), name])
icon_paths = cache.get(cache_key)
if not icon_paths:
source_file = getattr(settings, 'SVG_ICONS_SOURCE_FILE', False)
if not source_file:
raise ValueError("SVG_ICONS_SOURCE_FILE needs to be set")
data = json.load(open(source_file, 'r'))
for icon_data in data['icons']:
if name != icon_data['properties']['name']:
continue
icon_paths = icon_data['icon']['paths']
if not icon_paths:
raise TemplateSyntaxError("Requested icon does not exist")
cache.set(cache_key, icon_paths)
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
height = kwargs.get('height', settings.SVG_ICONS_DEFAULT_HEIGHT)
return {
'width': kwargs.get('size', width),
'height': kwargs.get('size', height),
'className': kwargs.get('className'),
'paths': icon_paths,
}
<commit_msg>Use the new reader classes in the template tag<commit_after> | import json
from importlib import import_module
from django.core.cache import cache
from django.conf import settings
from django.template import Library, TemplateSyntaxError
reader_class = getattr(settings, 'SVG_ICONS_READER_CLASS', 'svg_icons.readers.icomoon.IcomoonReader')
try:
module, cls = reader_class.rsplit('.', 1)
module = import_module(module)
Reader = getattr(module, cls)
except ImportError:
raise ValueError("No valid icon reader class found.")
register = Library()
icons = Reader()
@register.inclusion_tag('svg_icons/icon.html')
def icon(name, **kwargs):
"""Render the SVG icon paths returned by the
icon reader in the template.
"""
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
height = kwargs.get('height', settings.SVG_ICONS_DEFAULT_HEIGHT)
return {
'width': kwargs.get('size', width),
'height': kwargs.get('size', height),
'className': kwargs.get('className'),
'paths': icons.get_svg_paths(name),
}
| import json
from django.core.cache import cache
from django.conf import settings
from django.template import Library, TemplateSyntaxError
register = Library()
@register.inclusion_tag('svg_icons/icon.html')
def icon(name, **kwargs):
"""Render a SVG icon defined in a json file to our template.
..:json example (stripped)::
{
"icons": [
{
"icon": {
"paths": [
"M365.339 474.828c-19.319-12.616-42.222-18.062....."
]
},
"properties": {
"name": "tools"
}
}
]
}
"""
cache_key = ':'.join([
getattr(settings, 'SVG_ICONS_CACHE_KEY_PREFIX', 'svg-icons'), name])
icon_paths = cache.get(cache_key)
if not icon_paths:
source_file = getattr(settings, 'SVG_ICONS_SOURCE_FILE', False)
if not source_file:
raise ValueError("SVG_ICONS_SOURCE_FILE needs to be set")
data = json.load(open(source_file, 'r'))
for icon_data in data['icons']:
if name != icon_data['properties']['name']:
continue
icon_paths = icon_data['icon']['paths']
if not icon_paths:
raise TemplateSyntaxError("Requested icon does not exist")
cache.set(cache_key, icon_paths)
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
height = kwargs.get('height', settings.SVG_ICONS_DEFAULT_HEIGHT)
return {
'width': kwargs.get('size', width),
'height': kwargs.get('size', height),
'className': kwargs.get('className'),
'paths': icon_paths,
}
Use the new reader classes in the template tagimport json
from importlib import import_module
from django.core.cache import cache
from django.conf import settings
from django.template import Library, TemplateSyntaxError
reader_class = getattr(settings, 'SVG_ICONS_READER_CLASS', 'svg_icons.readers.icomoon.IcomoonReader')
try:
module, cls = reader_class.rsplit('.', 1)
module = import_module(module)
Reader = getattr(module, cls)
except ImportError:
raise ValueError("No valid icon reader class found.")
register = Library()
icons = Reader()
@register.inclusion_tag('svg_icons/icon.html')
def icon(name, **kwargs):
"""Render the SVG icon paths returned by the
icon reader in the template.
"""
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
height = kwargs.get('height', settings.SVG_ICONS_DEFAULT_HEIGHT)
return {
'width': kwargs.get('size', width),
'height': kwargs.get('size', height),
'className': kwargs.get('className'),
'paths': icons.get_svg_paths(name),
}
| <commit_before>import json
from django.core.cache import cache
from django.conf import settings
from django.template import Library, TemplateSyntaxError
register = Library()
@register.inclusion_tag('svg_icons/icon.html')
def icon(name, **kwargs):
"""Render a SVG icon defined in a json file to our template.
..:json example (stripped)::
{
"icons": [
{
"icon": {
"paths": [
"M365.339 474.828c-19.319-12.616-42.222-18.062....."
]
},
"properties": {
"name": "tools"
}
}
]
}
"""
cache_key = ':'.join([
getattr(settings, 'SVG_ICONS_CACHE_KEY_PREFIX', 'svg-icons'), name])
icon_paths = cache.get(cache_key)
if not icon_paths:
source_file = getattr(settings, 'SVG_ICONS_SOURCE_FILE', False)
if not source_file:
raise ValueError("SVG_ICONS_SOURCE_FILE needs to be set")
data = json.load(open(source_file, 'r'))
for icon_data in data['icons']:
if name != icon_data['properties']['name']:
continue
icon_paths = icon_data['icon']['paths']
if not icon_paths:
raise TemplateSyntaxError("Requested icon does not exist")
cache.set(cache_key, icon_paths)
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
height = kwargs.get('height', settings.SVG_ICONS_DEFAULT_HEIGHT)
return {
'width': kwargs.get('size', width),
'height': kwargs.get('size', height),
'className': kwargs.get('className'),
'paths': icon_paths,
}
<commit_msg>Use the new reader classes in the template tag<commit_after>import json
from importlib import import_module
from django.core.cache import cache
from django.conf import settings
from django.template import Library, TemplateSyntaxError
reader_class = getattr(settings, 'SVG_ICONS_READER_CLASS', 'svg_icons.readers.icomoon.IcomoonReader')
try:
module, cls = reader_class.rsplit('.', 1)
module = import_module(module)
Reader = getattr(module, cls)
except ImportError:
raise ValueError("No valid icon reader class found.")
register = Library()
icons = Reader()
@register.inclusion_tag('svg_icons/icon.html')
def icon(name, **kwargs):
"""Render the SVG icon paths returned by the
icon reader in the template.
"""
width = kwargs.get('width', settings.SVG_ICONS_DEFAULT_WIDTH)
height = kwargs.get('height', settings.SVG_ICONS_DEFAULT_HEIGHT)
return {
'width': kwargs.get('size', width),
'height': kwargs.get('size', height),
'className': kwargs.get('className'),
'paths': icons.get_svg_paths(name),
}
|
bcc40b08c59ba8fcb8efc9044c2ea6e11ed9df12 | tests/api/views/users/list_test.py | tests/api/views/users/list_test.py | from tests.data import add_fixtures, users
def test_list_users(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/')
assert res.status_code == 200
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
u'club': None,
}]
}
| from tests.data import add_fixtures, users, clubs
def test_list_users(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/')
assert res.status_code == 200
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
u'club': None,
}]
}
def test_with_club(db_session, client):
john = users.john(club=clubs.lva())
add_fixtures(db_session, john)
res = client.get('/users')
assert res.status_code == 200
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
u'club': {
u'id': john.club.id,
u'name': u'LV Aachen',
},
}]
}
def test_with_club_parameter(db_session, client):
john = users.john(club=clubs.lva())
add_fixtures(db_session, john, users.jane(), users.max())
res = client.get('/users')
assert res.status_code == 200
assert len(res.json['users']) == 3
res = client.get('/users?club={club}'.format(club=john.club.id))
assert res.status_code == 200
assert len(res.json['users']) == 1
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
}]
}
| Add more "GET /users" tests | tests/api: Add more "GET /users" tests
| Python | agpl-3.0 | RBE-Avionik/skylines,Harry-R/skylines,RBE-Avionik/skylines,Turbo87/skylines,Harry-R/skylines,shadowoneau/skylines,shadowoneau/skylines,RBE-Avionik/skylines,Turbo87/skylines,skylines-project/skylines,Turbo87/skylines,RBE-Avionik/skylines,shadowoneau/skylines,Harry-R/skylines,Turbo87/skylines,shadowoneau/skylines,skylines-project/skylines,skylines-project/skylines,skylines-project/skylines,Harry-R/skylines | from tests.data import add_fixtures, users
def test_list_users(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/')
assert res.status_code == 200
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
u'club': None,
}]
}
tests/api: Add more "GET /users" tests | from tests.data import add_fixtures, users, clubs
def test_list_users(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/')
assert res.status_code == 200
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
u'club': None,
}]
}
def test_with_club(db_session, client):
john = users.john(club=clubs.lva())
add_fixtures(db_session, john)
res = client.get('/users')
assert res.status_code == 200
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
u'club': {
u'id': john.club.id,
u'name': u'LV Aachen',
},
}]
}
def test_with_club_parameter(db_session, client):
john = users.john(club=clubs.lva())
add_fixtures(db_session, john, users.jane(), users.max())
res = client.get('/users')
assert res.status_code == 200
assert len(res.json['users']) == 3
res = client.get('/users?club={club}'.format(club=john.club.id))
assert res.status_code == 200
assert len(res.json['users']) == 1
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
}]
}
| <commit_before>from tests.data import add_fixtures, users
def test_list_users(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/')
assert res.status_code == 200
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
u'club': None,
}]
}
<commit_msg>tests/api: Add more "GET /users" tests<commit_after> | from tests.data import add_fixtures, users, clubs
def test_list_users(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/')
assert res.status_code == 200
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
u'club': None,
}]
}
def test_with_club(db_session, client):
john = users.john(club=clubs.lva())
add_fixtures(db_session, john)
res = client.get('/users')
assert res.status_code == 200
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
u'club': {
u'id': john.club.id,
u'name': u'LV Aachen',
},
}]
}
def test_with_club_parameter(db_session, client):
john = users.john(club=clubs.lva())
add_fixtures(db_session, john, users.jane(), users.max())
res = client.get('/users')
assert res.status_code == 200
assert len(res.json['users']) == 3
res = client.get('/users?club={club}'.format(club=john.club.id))
assert res.status_code == 200
assert len(res.json['users']) == 1
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
}]
}
| from tests.data import add_fixtures, users
def test_list_users(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/')
assert res.status_code == 200
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
u'club': None,
}]
}
tests/api: Add more "GET /users" testsfrom tests.data import add_fixtures, users, clubs
def test_list_users(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/')
assert res.status_code == 200
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
u'club': None,
}]
}
def test_with_club(db_session, client):
john = users.john(club=clubs.lva())
add_fixtures(db_session, john)
res = client.get('/users')
assert res.status_code == 200
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
u'club': {
u'id': john.club.id,
u'name': u'LV Aachen',
},
}]
}
def test_with_club_parameter(db_session, client):
john = users.john(club=clubs.lva())
add_fixtures(db_session, john, users.jane(), users.max())
res = client.get('/users')
assert res.status_code == 200
assert len(res.json['users']) == 3
res = client.get('/users?club={club}'.format(club=john.club.id))
assert res.status_code == 200
assert len(res.json['users']) == 1
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
}]
}
| <commit_before>from tests.data import add_fixtures, users
def test_list_users(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/')
assert res.status_code == 200
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
u'club': None,
}]
}
<commit_msg>tests/api: Add more "GET /users" tests<commit_after>from tests.data import add_fixtures, users, clubs
def test_list_users(db_session, client):
john = users.john()
add_fixtures(db_session, john)
res = client.get('/users/')
assert res.status_code == 200
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
u'club': None,
}]
}
def test_with_club(db_session, client):
john = users.john(club=clubs.lva())
add_fixtures(db_session, john)
res = client.get('/users')
assert res.status_code == 200
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
u'club': {
u'id': john.club.id,
u'name': u'LV Aachen',
},
}]
}
def test_with_club_parameter(db_session, client):
john = users.john(club=clubs.lva())
add_fixtures(db_session, john, users.jane(), users.max())
res = client.get('/users')
assert res.status_code == 200
assert len(res.json['users']) == 3
res = client.get('/users?club={club}'.format(club=john.club.id))
assert res.status_code == 200
assert len(res.json['users']) == 1
assert res.json == {
u'users': [{
u'id': john.id,
u'name': u'John Doe',
}]
}
|
e593306092292f72009e13bafe1cbb83f85d7937 | indra/bel/ndex_client.py | indra/bel/ndex_client.py | import requests
import json
import time
ndex_base_url = 'http://services.bigmech.ndexbio.org'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
if status == 200:
return res.text
task_id = res.json()['task_id']
print 'NDEx services task submitted...' % task_id
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx services task complete.' % task_id
return res.text
| import requests
import json
import time
ndex_base_url = 'http://services.bigmech.ndexbio.org'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
if status == 200:
return res.text
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
| Fix messages in NDEx client | Fix messages in NDEx client
| Python | bsd-2-clause | jmuhlich/indra,pvtodorov/indra,pvtodorov/indra,bgyori/indra,sorgerlab/belpy,bgyori/indra,johnbachman/belpy,johnbachman/belpy,johnbachman/indra,sorgerlab/indra,johnbachman/indra,bgyori/indra,johnbachman/belpy,sorgerlab/indra,pvtodorov/indra,sorgerlab/indra,johnbachman/indra,jmuhlich/indra,jmuhlich/indra,sorgerlab/belpy,sorgerlab/belpy,pvtodorov/indra | import requests
import json
import time
ndex_base_url = 'http://services.bigmech.ndexbio.org'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
if status == 200:
return res.text
task_id = res.json()['task_id']
print 'NDEx services task submitted...' % task_id
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx services task complete.' % task_id
return res.text
Fix messages in NDEx client | import requests
import json
import time
ndex_base_url = 'http://services.bigmech.ndexbio.org'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
if status == 200:
return res.text
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
| <commit_before>import requests
import json
import time
ndex_base_url = 'http://services.bigmech.ndexbio.org'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
if status == 200:
return res.text
task_id = res.json()['task_id']
print 'NDEx services task submitted...' % task_id
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx services task complete.' % task_id
return res.text
<commit_msg>Fix messages in NDEx client<commit_after> | import requests
import json
import time
ndex_base_url = 'http://services.bigmech.ndexbio.org'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
if status == 200:
return res.text
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
| import requests
import json
import time
ndex_base_url = 'http://services.bigmech.ndexbio.org'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
if status == 200:
return res.text
task_id = res.json()['task_id']
print 'NDEx services task submitted...' % task_id
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx services task complete.' % task_id
return res.text
Fix messages in NDEx clientimport requests
import json
import time
ndex_base_url = 'http://services.bigmech.ndexbio.org'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
if status == 200:
return res.text
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
| <commit_before>import requests
import json
import time
ndex_base_url = 'http://services.bigmech.ndexbio.org'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
if status == 200:
return res.text
task_id = res.json()['task_id']
print 'NDEx services task submitted...' % task_id
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx services task complete.' % task_id
return res.text
<commit_msg>Fix messages in NDEx client<commit_after>import requests
import json
import time
ndex_base_url = 'http://services.bigmech.ndexbio.org'
def send_request(url_suffix, params):
res = requests.post(ndex_base_url + url_suffix, data=json.dumps(params))
res_json = get_result(res)
return res_json
def get_result(res):
status = res.status_code
if status == 200:
return res.text
task_id = res.json()['task_id']
print 'NDEx task submitted...'
time_used = 0
try:
while status != 200:
res = requests.get(ndex_base_url + '/task/' + task_id)
status = res.status_code
if status != 200:
time.sleep(5)
time_used += 5
except KeyError:
next
return None
print 'NDEx task complete.'
return res.text
|
e57c2cefa9e8403aa9a2f27d791604a179c8b998 | quickfileopen.py | quickfileopen.py | import sublime
import sublime_plugin
class QuickFileOpenCommand(sublime_plugin.WindowCommand):
def run(self):
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
if type(files) == list:
self.window.show_quick_panel(files, self.on_done)
elif files is None:
self.window.show_quick_panel(['Set the \'files\' setting to use QuickFileOpen'], None)
else:
sublime.error_message('The \'files\' setting must be a list')
def on_done(self, selected):
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
fileName = files[selected]
self.window.open_file(fileName)
| import sublime
import sublime_plugin
class QuickFileOpenCommand(sublime_plugin.WindowCommand):
def run(self):
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
if type(files) == list:
self.window.show_quick_panel(files, self.on_done)
elif files is None:
self.window.show_quick_panel(['Set the \'files\' setting to use QuickFileOpen'], None)
else:
sublime.error_message('The \'files\' setting must be a list')
def on_done(self, selected):
if selected == -1:
return
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
fileName = files[selected]
self.window.open_file(fileName)
| Handle the case where the user exits the panel | Handle the case where the user exits the panel
| Python | mit | gsingh93/sublime-quick-file-open | import sublime
import sublime_plugin
class QuickFileOpenCommand(sublime_plugin.WindowCommand):
def run(self):
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
if type(files) == list:
self.window.show_quick_panel(files, self.on_done)
elif files is None:
self.window.show_quick_panel(['Set the \'files\' setting to use QuickFileOpen'], None)
else:
sublime.error_message('The \'files\' setting must be a list')
def on_done(self, selected):
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
fileName = files[selected]
self.window.open_file(fileName)
Handle the case where the user exits the panel | import sublime
import sublime_plugin
class QuickFileOpenCommand(sublime_plugin.WindowCommand):
def run(self):
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
if type(files) == list:
self.window.show_quick_panel(files, self.on_done)
elif files is None:
self.window.show_quick_panel(['Set the \'files\' setting to use QuickFileOpen'], None)
else:
sublime.error_message('The \'files\' setting must be a list')
def on_done(self, selected):
if selected == -1:
return
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
fileName = files[selected]
self.window.open_file(fileName)
| <commit_before>import sublime
import sublime_plugin
class QuickFileOpenCommand(sublime_plugin.WindowCommand):
def run(self):
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
if type(files) == list:
self.window.show_quick_panel(files, self.on_done)
elif files is None:
self.window.show_quick_panel(['Set the \'files\' setting to use QuickFileOpen'], None)
else:
sublime.error_message('The \'files\' setting must be a list')
def on_done(self, selected):
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
fileName = files[selected]
self.window.open_file(fileName)
<commit_msg>Handle the case where the user exits the panel<commit_after> | import sublime
import sublime_plugin
class QuickFileOpenCommand(sublime_plugin.WindowCommand):
def run(self):
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
if type(files) == list:
self.window.show_quick_panel(files, self.on_done)
elif files is None:
self.window.show_quick_panel(['Set the \'files\' setting to use QuickFileOpen'], None)
else:
sublime.error_message('The \'files\' setting must be a list')
def on_done(self, selected):
if selected == -1:
return
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
fileName = files[selected]
self.window.open_file(fileName)
| import sublime
import sublime_plugin
class QuickFileOpenCommand(sublime_plugin.WindowCommand):
def run(self):
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
if type(files) == list:
self.window.show_quick_panel(files, self.on_done)
elif files is None:
self.window.show_quick_panel(['Set the \'files\' setting to use QuickFileOpen'], None)
else:
sublime.error_message('The \'files\' setting must be a list')
def on_done(self, selected):
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
fileName = files[selected]
self.window.open_file(fileName)
Handle the case where the user exits the panelimport sublime
import sublime_plugin
class QuickFileOpenCommand(sublime_plugin.WindowCommand):
def run(self):
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
if type(files) == list:
self.window.show_quick_panel(files, self.on_done)
elif files is None:
self.window.show_quick_panel(['Set the \'files\' setting to use QuickFileOpen'], None)
else:
sublime.error_message('The \'files\' setting must be a list')
def on_done(self, selected):
if selected == -1:
return
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
fileName = files[selected]
self.window.open_file(fileName)
| <commit_before>import sublime
import sublime_plugin
class QuickFileOpenCommand(sublime_plugin.WindowCommand):
def run(self):
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
if type(files) == list:
self.window.show_quick_panel(files, self.on_done)
elif files is None:
self.window.show_quick_panel(['Set the \'files\' setting to use QuickFileOpen'], None)
else:
sublime.error_message('The \'files\' setting must be a list')
def on_done(self, selected):
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
fileName = files[selected]
self.window.open_file(fileName)
<commit_msg>Handle the case where the user exits the panel<commit_after>import sublime
import sublime_plugin
class QuickFileOpenCommand(sublime_plugin.WindowCommand):
def run(self):
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
if type(files) == list:
self.window.show_quick_panel(files, self.on_done)
elif files is None:
self.window.show_quick_panel(['Set the \'files\' setting to use QuickFileOpen'], None)
else:
sublime.error_message('The \'files\' setting must be a list')
def on_done(self, selected):
if selected == -1:
return
settings = sublime.load_settings('QuickFileOpen.sublime-settings')
files = settings.get('files')
fileName = files[selected]
self.window.open_file(fileName)
|
d5ad324355e0abdf0a6bdcb41e1f07224742b537 | src/main.py | src/main.py | #!/usr/bin/env python3
# card-fight-thingy - Simplistic battle card game... thingy
#
# The MIT License (MIT)
#
# Copyright (c) 2015 The Underscores
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import game
import menu
menu.init()
menu.chooseOption()
| #!/usr/bin/env python3
# card-fight-thingy - Simplistic battle card game... thingy
#
# The MIT License (MIT)
#
# Copyright (c) 2015 The Underscores
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import game
import menu
menu.init()
menu.chooseOption()
| Remove needless import of sys module | Remove needless import of sys module
| Python | mit | TheUnderscores/card-fight-thingy | #!/usr/bin/env python3
# card-fight-thingy - Simplistic battle card game... thingy
#
# The MIT License (MIT)
#
# Copyright (c) 2015 The Underscores
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import game
import menu
menu.init()
menu.chooseOption()
Remove needless import of sys module | #!/usr/bin/env python3
# card-fight-thingy - Simplistic battle card game... thingy
#
# The MIT License (MIT)
#
# Copyright (c) 2015 The Underscores
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import game
import menu
menu.init()
menu.chooseOption()
| <commit_before>#!/usr/bin/env python3
# card-fight-thingy - Simplistic battle card game... thingy
#
# The MIT License (MIT)
#
# Copyright (c) 2015 The Underscores
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import game
import menu
menu.init()
menu.chooseOption()
<commit_msg>Remove needless import of sys module<commit_after> | #!/usr/bin/env python3
# card-fight-thingy - Simplistic battle card game... thingy
#
# The MIT License (MIT)
#
# Copyright (c) 2015 The Underscores
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import game
import menu
menu.init()
menu.chooseOption()
| #!/usr/bin/env python3
# card-fight-thingy - Simplistic battle card game... thingy
#
# The MIT License (MIT)
#
# Copyright (c) 2015 The Underscores
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import game
import menu
menu.init()
menu.chooseOption()
Remove needless import of sys module#!/usr/bin/env python3
# card-fight-thingy - Simplistic battle card game... thingy
#
# The MIT License (MIT)
#
# Copyright (c) 2015 The Underscores
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import game
import menu
menu.init()
menu.chooseOption()
| <commit_before>#!/usr/bin/env python3
# card-fight-thingy - Simplistic battle card game... thingy
#
# The MIT License (MIT)
#
# Copyright (c) 2015 The Underscores
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import game
import menu
menu.init()
menu.chooseOption()
<commit_msg>Remove needless import of sys module<commit_after>#!/usr/bin/env python3
# card-fight-thingy - Simplistic battle card game... thingy
#
# The MIT License (MIT)
#
# Copyright (c) 2015 The Underscores
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import game
import menu
menu.init()
menu.chooseOption()
|
5bdfb968d6a05fcb727866bee063f996232bf9b8 | tests/matchers/test_called.py | tests/matchers/test_called.py | from unittest.case import TestCase
from mock import Mock
from robber import expect
from robber.matchers.called import Called
class TestCalled(TestCase):
def test_matches(self):
mock = Mock()
mock()
expect(Called(mock).matches()) == True
def test_failure_message(self):
mock = Mock()
called = Called(mock)
message = called.failure_message()
expect(message) == 'Expected {function} to be called'.format(function=mock)
def test_register(self):
expect(expect.matcher('called')) == Called
expect(expect.matcher('__called__')) == Called
def test_not_a_mock(self):
with self.assertRaises(TypeError):
expect(Called("a").matches()) == True
with self.assertRaises(TypeError):
expect(Called(1).matches()) == True
| from unittest import TestCase
from mock import Mock
from robber import expect
from robber.matchers.called import Called
class TestCalled(TestCase):
def test_matches(self):
mock = Mock()
mock()
expect(Called(mock).matches()) == True
def test_failure_message(self):
mock = Mock()
called = Called(mock)
message = called.failure_message()
expect(message) == 'Expected {function} to be called'.format(function=mock)
def test_register(self):
expect(expect.matcher('called')) == Called
expect(expect.matcher('__called__')) == Called
def test_not_a_mock(self):
with self.assertRaises(TypeError):
expect(Called("a").matches()) == True
with self.assertRaises(TypeError):
expect(Called(1).matches()) == True
| Make import compatible with python 2.6 | [r] Make import compatible with python 2.6
| Python | mit | vesln/robber.py | from unittest.case import TestCase
from mock import Mock
from robber import expect
from robber.matchers.called import Called
class TestCalled(TestCase):
def test_matches(self):
mock = Mock()
mock()
expect(Called(mock).matches()) == True
def test_failure_message(self):
mock = Mock()
called = Called(mock)
message = called.failure_message()
expect(message) == 'Expected {function} to be called'.format(function=mock)
def test_register(self):
expect(expect.matcher('called')) == Called
expect(expect.matcher('__called__')) == Called
def test_not_a_mock(self):
with self.assertRaises(TypeError):
expect(Called("a").matches()) == True
with self.assertRaises(TypeError):
expect(Called(1).matches()) == True
[r] Make import compatible with python 2.6 | from unittest import TestCase
from mock import Mock
from robber import expect
from robber.matchers.called import Called
class TestCalled(TestCase):
def test_matches(self):
mock = Mock()
mock()
expect(Called(mock).matches()) == True
def test_failure_message(self):
mock = Mock()
called = Called(mock)
message = called.failure_message()
expect(message) == 'Expected {function} to be called'.format(function=mock)
def test_register(self):
expect(expect.matcher('called')) == Called
expect(expect.matcher('__called__')) == Called
def test_not_a_mock(self):
with self.assertRaises(TypeError):
expect(Called("a").matches()) == True
with self.assertRaises(TypeError):
expect(Called(1).matches()) == True
| <commit_before>from unittest.case import TestCase
from mock import Mock
from robber import expect
from robber.matchers.called import Called
class TestCalled(TestCase):
def test_matches(self):
mock = Mock()
mock()
expect(Called(mock).matches()) == True
def test_failure_message(self):
mock = Mock()
called = Called(mock)
message = called.failure_message()
expect(message) == 'Expected {function} to be called'.format(function=mock)
def test_register(self):
expect(expect.matcher('called')) == Called
expect(expect.matcher('__called__')) == Called
def test_not_a_mock(self):
with self.assertRaises(TypeError):
expect(Called("a").matches()) == True
with self.assertRaises(TypeError):
expect(Called(1).matches()) == True
<commit_msg>[r] Make import compatible with python 2.6<commit_after> | from unittest import TestCase
from mock import Mock
from robber import expect
from robber.matchers.called import Called
class TestCalled(TestCase):
def test_matches(self):
mock = Mock()
mock()
expect(Called(mock).matches()) == True
def test_failure_message(self):
mock = Mock()
called = Called(mock)
message = called.failure_message()
expect(message) == 'Expected {function} to be called'.format(function=mock)
def test_register(self):
expect(expect.matcher('called')) == Called
expect(expect.matcher('__called__')) == Called
def test_not_a_mock(self):
with self.assertRaises(TypeError):
expect(Called("a").matches()) == True
with self.assertRaises(TypeError):
expect(Called(1).matches()) == True
| from unittest.case import TestCase
from mock import Mock
from robber import expect
from robber.matchers.called import Called
class TestCalled(TestCase):
def test_matches(self):
mock = Mock()
mock()
expect(Called(mock).matches()) == True
def test_failure_message(self):
mock = Mock()
called = Called(mock)
message = called.failure_message()
expect(message) == 'Expected {function} to be called'.format(function=mock)
def test_register(self):
expect(expect.matcher('called')) == Called
expect(expect.matcher('__called__')) == Called
def test_not_a_mock(self):
with self.assertRaises(TypeError):
expect(Called("a").matches()) == True
with self.assertRaises(TypeError):
expect(Called(1).matches()) == True
[r] Make import compatible with python 2.6from unittest import TestCase
from mock import Mock
from robber import expect
from robber.matchers.called import Called
class TestCalled(TestCase):
def test_matches(self):
mock = Mock()
mock()
expect(Called(mock).matches()) == True
def test_failure_message(self):
mock = Mock()
called = Called(mock)
message = called.failure_message()
expect(message) == 'Expected {function} to be called'.format(function=mock)
def test_register(self):
expect(expect.matcher('called')) == Called
expect(expect.matcher('__called__')) == Called
def test_not_a_mock(self):
with self.assertRaises(TypeError):
expect(Called("a").matches()) == True
with self.assertRaises(TypeError):
expect(Called(1).matches()) == True
| <commit_before>from unittest.case import TestCase
from mock import Mock
from robber import expect
from robber.matchers.called import Called
class TestCalled(TestCase):
def test_matches(self):
mock = Mock()
mock()
expect(Called(mock).matches()) == True
def test_failure_message(self):
mock = Mock()
called = Called(mock)
message = called.failure_message()
expect(message) == 'Expected {function} to be called'.format(function=mock)
def test_register(self):
expect(expect.matcher('called')) == Called
expect(expect.matcher('__called__')) == Called
def test_not_a_mock(self):
with self.assertRaises(TypeError):
expect(Called("a").matches()) == True
with self.assertRaises(TypeError):
expect(Called(1).matches()) == True
<commit_msg>[r] Make import compatible with python 2.6<commit_after>from unittest import TestCase
from mock import Mock
from robber import expect
from robber.matchers.called import Called
class TestCalled(TestCase):
def test_matches(self):
mock = Mock()
mock()
expect(Called(mock).matches()) == True
def test_failure_message(self):
mock = Mock()
called = Called(mock)
message = called.failure_message()
expect(message) == 'Expected {function} to be called'.format(function=mock)
def test_register(self):
expect(expect.matcher('called')) == Called
expect(expect.matcher('__called__')) == Called
def test_not_a_mock(self):
with self.assertRaises(TypeError):
expect(Called("a").matches()) == True
with self.assertRaises(TypeError):
expect(Called(1).matches()) == True
|
d3b8b948dac6ccce68ccf21311397ce6792fddc6 | tests/test_modules/os_test.py | tests/test_modules/os_test.py | from subprocess import call, check_output
class TestOs:
""" Contains test methods to test
if the vagrant OS got installed properly """
| from subprocess import call, check_output
class TestOs:
""" Contains test methods to test
if the vagrant OS got installed properly """
def uname_kernel(self):
""" returns output of uname -s """
output = check_output(["uname", "-s"]).decode("utf-8").lstrip().rstrip()
return output
def uname_os(self):
""" returns the output of uname -o """
output = check_output(["uname", "-o"]).decode("utf-8").lstrip().rstrip()
return output
def test_uname_kernel(self):
""" tests the output of uname_kernel() """
assert self.uname_kernel() == "Linux"
def test_uname_os(self):
""" tests the output of uname_os() """
assert self.uname_os() == "GNU/Linux"
| Add tests for vagrant guest OS | Add tests for vagrant guest OS
| Python | bsd-3-clause | DevBlend/DevBlend,DevBlend/zenias,byteknacker/fcc-python-vagrant,DevBlend/DevBlend,DevBlend/zenias,DevBlend/zenias,byteknacker/fcc-python-vagrant,DevBlend/DevBlend,DevBlend/zenias,DevBlend/zenias,DevBlend/DevBlend,DevBlend/DevBlend,DevBlend/zenias,DevBlend/DevBlend | from subprocess import call, check_output
class TestOs:
""" Contains test methods to test
if the vagrant OS got installed properly """
Add tests for vagrant guest OS | from subprocess import call, check_output
class TestOs:
""" Contains test methods to test
if the vagrant OS got installed properly """
def uname_kernel(self):
""" returns output of uname -s """
output = check_output(["uname", "-s"]).decode("utf-8").lstrip().rstrip()
return output
def uname_os(self):
""" returns the output of uname -o """
output = check_output(["uname", "-o"]).decode("utf-8").lstrip().rstrip()
return output
def test_uname_kernel(self):
""" tests the output of uname_kernel() """
assert self.uname_kernel() == "Linux"
def test_uname_os(self):
""" tests the output of uname_os() """
assert self.uname_os() == "GNU/Linux"
| <commit_before>from subprocess import call, check_output
class TestOs:
""" Contains test methods to test
if the vagrant OS got installed properly """
<commit_msg>Add tests for vagrant guest OS<commit_after> | from subprocess import call, check_output
class TestOs:
""" Contains test methods to test
if the vagrant OS got installed properly """
def uname_kernel(self):
""" returns output of uname -s """
output = check_output(["uname", "-s"]).decode("utf-8").lstrip().rstrip()
return output
def uname_os(self):
""" returns the output of uname -o """
output = check_output(["uname", "-o"]).decode("utf-8").lstrip().rstrip()
return output
def test_uname_kernel(self):
""" tests the output of uname_kernel() """
assert self.uname_kernel() == "Linux"
def test_uname_os(self):
""" tests the output of uname_os() """
assert self.uname_os() == "GNU/Linux"
| from subprocess import call, check_output
class TestOs:
""" Contains test methods to test
if the vagrant OS got installed properly """
Add tests for vagrant guest OSfrom subprocess import call, check_output
class TestOs:
""" Contains test methods to test
if the vagrant OS got installed properly """
def uname_kernel(self):
""" returns output of uname -s """
output = check_output(["uname", "-s"]).decode("utf-8").lstrip().rstrip()
return output
def uname_os(self):
""" returns the output of uname -o """
output = check_output(["uname", "-o"]).decode("utf-8").lstrip().rstrip()
return output
def test_uname_kernel(self):
""" tests the output of uname_kernel() """
assert self.uname_kernel() == "Linux"
def test_uname_os(self):
""" tests the output of uname_os() """
assert self.uname_os() == "GNU/Linux"
| <commit_before>from subprocess import call, check_output
class TestOs:
""" Contains test methods to test
if the vagrant OS got installed properly """
<commit_msg>Add tests for vagrant guest OS<commit_after>from subprocess import call, check_output
class TestOs:
""" Contains test methods to test
if the vagrant OS got installed properly """
def uname_kernel(self):
""" returns output of uname -s """
output = check_output(["uname", "-s"]).decode("utf-8").lstrip().rstrip()
return output
def uname_os(self):
""" returns the output of uname -o """
output = check_output(["uname", "-o"]).decode("utf-8").lstrip().rstrip()
return output
def test_uname_kernel(self):
""" tests the output of uname_kernel() """
assert self.uname_kernel() == "Linux"
def test_uname_os(self):
""" tests the output of uname_os() """
assert self.uname_os() == "GNU/Linux"
|
83fdf3051786806486f4ff9e4b05616603f7211a | thinc/about.py | thinc/about.py | # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = "thinc"
__version__ = "8.0.0.dev0"
__summary__ = "Practical Machine Learning for NLP"
__uri__ = "https://github.com/explosion/thinc"
__author__ = "Matthew Honnibal"
__email__ = "matt@explosion.ai"
__license__ = "MIT"
__title__ = "thinc"
__release__ = True
| # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = "thinc"
__version__ = "7.4.0.dev2"
__summary__ = "Practical Machine Learning for NLP"
__uri__ = "https://github.com/explosion/thinc"
__author__ = "Matthew Honnibal"
__email__ = "matt@explosion.ai"
__license__ = "MIT"
__title__ = "thinc"
__release__ = True
| Revert "Set version to v8.0.0.dev0" | Revert "Set version to v8.0.0.dev0"
This reverts commit 69732c4f9f7ff1a89b85e9c211991154dcce59f4.
| Python | mit | explosion/thinc,spacy-io/thinc,explosion/thinc,explosion/thinc,explosion/thinc,spacy-io/thinc,spacy-io/thinc | # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = "thinc"
__version__ = "8.0.0.dev0"
__summary__ = "Practical Machine Learning for NLP"
__uri__ = "https://github.com/explosion/thinc"
__author__ = "Matthew Honnibal"
__email__ = "matt@explosion.ai"
__license__ = "MIT"
__title__ = "thinc"
__release__ = True
Revert "Set version to v8.0.0.dev0"
This reverts commit 69732c4f9f7ff1a89b85e9c211991154dcce59f4. | # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = "thinc"
__version__ = "7.4.0.dev2"
__summary__ = "Practical Machine Learning for NLP"
__uri__ = "https://github.com/explosion/thinc"
__author__ = "Matthew Honnibal"
__email__ = "matt@explosion.ai"
__license__ = "MIT"
__title__ = "thinc"
__release__ = True
| <commit_before># inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = "thinc"
__version__ = "8.0.0.dev0"
__summary__ = "Practical Machine Learning for NLP"
__uri__ = "https://github.com/explosion/thinc"
__author__ = "Matthew Honnibal"
__email__ = "matt@explosion.ai"
__license__ = "MIT"
__title__ = "thinc"
__release__ = True
<commit_msg>Revert "Set version to v8.0.0.dev0"
This reverts commit 69732c4f9f7ff1a89b85e9c211991154dcce59f4.<commit_after> | # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = "thinc"
__version__ = "7.4.0.dev2"
__summary__ = "Practical Machine Learning for NLP"
__uri__ = "https://github.com/explosion/thinc"
__author__ = "Matthew Honnibal"
__email__ = "matt@explosion.ai"
__license__ = "MIT"
__title__ = "thinc"
__release__ = True
| # inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = "thinc"
__version__ = "8.0.0.dev0"
__summary__ = "Practical Machine Learning for NLP"
__uri__ = "https://github.com/explosion/thinc"
__author__ = "Matthew Honnibal"
__email__ = "matt@explosion.ai"
__license__ = "MIT"
__title__ = "thinc"
__release__ = True
Revert "Set version to v8.0.0.dev0"
This reverts commit 69732c4f9f7ff1a89b85e9c211991154dcce59f4.# inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = "thinc"
__version__ = "7.4.0.dev2"
__summary__ = "Practical Machine Learning for NLP"
__uri__ = "https://github.com/explosion/thinc"
__author__ = "Matthew Honnibal"
__email__ = "matt@explosion.ai"
__license__ = "MIT"
__title__ = "thinc"
__release__ = True
| <commit_before># inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = "thinc"
__version__ = "8.0.0.dev0"
__summary__ = "Practical Machine Learning for NLP"
__uri__ = "https://github.com/explosion/thinc"
__author__ = "Matthew Honnibal"
__email__ = "matt@explosion.ai"
__license__ = "MIT"
__title__ = "thinc"
__release__ = True
<commit_msg>Revert "Set version to v8.0.0.dev0"
This reverts commit 69732c4f9f7ff1a89b85e9c211991154dcce59f4.<commit_after># inspired from:
# https://python-packaging-user-guide.readthedocs.org/en/latest/single_source_version/
# https://github.com/pypa/warehouse/blob/master/warehouse/__about__.py
__name__ = "thinc"
__version__ = "7.4.0.dev2"
__summary__ = "Practical Machine Learning for NLP"
__uri__ = "https://github.com/explosion/thinc"
__author__ = "Matthew Honnibal"
__email__ = "matt@explosion.ai"
__license__ = "MIT"
__title__ = "thinc"
__release__ = True
|
1126c0e795d94e004fae03edda3e299b2d3b764c | todo/models.py | todo/models.py | from django.db import models
from django.utils.text import slugify
from common.models import TimeStampedModel
class List(TimeStampedModel):
name = models.CharField(max_length=50)
slug = models.CharField(max_length=50, editable=False)
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(List, self).save(*args, **kwargs)
class Item(TimeStampedModel):
list = models.ForeignKey(List)
name = models.CharField(max_length=100)
completed = models.BooleanField(default=False)
def __unicode__(self):
return self.name
| from django.contrib.auth.models import User
from django.db import models
from django.utils.text import slugify
from common.models import DeleteSafeTimeStampedMixin
class List(DeleteSafeTimeStampedMixin):
name = models.CharField(max_length=50)
slug = models.CharField(max_length=50, editable=False)
author = models.ForeignKey(User, related_name="lists_authored")
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(List, self).save(*args, **kwargs)
class Item(DeleteSafeTimeStampedMixin):
list = models.ForeignKey(List, related_name="items")
name = models.CharField(max_length=100)
completed = models.BooleanField(default=False)
author = models.ForeignKey(User, related_name="items_authored")
def __unicode__(self):
return self.name
| Attach user with List and Item | Attach user with List and Item
| Python | mit | ajoyoommen/zerrenda,ajoyoommen/zerrenda | from django.db import models
from django.utils.text import slugify
from common.models import TimeStampedModel
class List(TimeStampedModel):
name = models.CharField(max_length=50)
slug = models.CharField(max_length=50, editable=False)
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(List, self).save(*args, **kwargs)
class Item(TimeStampedModel):
list = models.ForeignKey(List)
name = models.CharField(max_length=100)
completed = models.BooleanField(default=False)
def __unicode__(self):
return self.name
Attach user with List and Item | from django.contrib.auth.models import User
from django.db import models
from django.utils.text import slugify
from common.models import DeleteSafeTimeStampedMixin
class List(DeleteSafeTimeStampedMixin):
name = models.CharField(max_length=50)
slug = models.CharField(max_length=50, editable=False)
author = models.ForeignKey(User, related_name="lists_authored")
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(List, self).save(*args, **kwargs)
class Item(DeleteSafeTimeStampedMixin):
list = models.ForeignKey(List, related_name="items")
name = models.CharField(max_length=100)
completed = models.BooleanField(default=False)
author = models.ForeignKey(User, related_name="items_authored")
def __unicode__(self):
return self.name
| <commit_before>from django.db import models
from django.utils.text import slugify
from common.models import TimeStampedModel
class List(TimeStampedModel):
name = models.CharField(max_length=50)
slug = models.CharField(max_length=50, editable=False)
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(List, self).save(*args, **kwargs)
class Item(TimeStampedModel):
list = models.ForeignKey(List)
name = models.CharField(max_length=100)
completed = models.BooleanField(default=False)
def __unicode__(self):
return self.name
<commit_msg>Attach user with List and Item<commit_after> | from django.contrib.auth.models import User
from django.db import models
from django.utils.text import slugify
from common.models import DeleteSafeTimeStampedMixin
class List(DeleteSafeTimeStampedMixin):
name = models.CharField(max_length=50)
slug = models.CharField(max_length=50, editable=False)
author = models.ForeignKey(User, related_name="lists_authored")
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(List, self).save(*args, **kwargs)
class Item(DeleteSafeTimeStampedMixin):
list = models.ForeignKey(List, related_name="items")
name = models.CharField(max_length=100)
completed = models.BooleanField(default=False)
author = models.ForeignKey(User, related_name="items_authored")
def __unicode__(self):
return self.name
| from django.db import models
from django.utils.text import slugify
from common.models import TimeStampedModel
class List(TimeStampedModel):
name = models.CharField(max_length=50)
slug = models.CharField(max_length=50, editable=False)
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(List, self).save(*args, **kwargs)
class Item(TimeStampedModel):
list = models.ForeignKey(List)
name = models.CharField(max_length=100)
completed = models.BooleanField(default=False)
def __unicode__(self):
return self.name
Attach user with List and Itemfrom django.contrib.auth.models import User
from django.db import models
from django.utils.text import slugify
from common.models import DeleteSafeTimeStampedMixin
class List(DeleteSafeTimeStampedMixin):
name = models.CharField(max_length=50)
slug = models.CharField(max_length=50, editable=False)
author = models.ForeignKey(User, related_name="lists_authored")
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(List, self).save(*args, **kwargs)
class Item(DeleteSafeTimeStampedMixin):
list = models.ForeignKey(List, related_name="items")
name = models.CharField(max_length=100)
completed = models.BooleanField(default=False)
author = models.ForeignKey(User, related_name="items_authored")
def __unicode__(self):
return self.name
| <commit_before>from django.db import models
from django.utils.text import slugify
from common.models import TimeStampedModel
class List(TimeStampedModel):
name = models.CharField(max_length=50)
slug = models.CharField(max_length=50, editable=False)
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(List, self).save(*args, **kwargs)
class Item(TimeStampedModel):
list = models.ForeignKey(List)
name = models.CharField(max_length=100)
completed = models.BooleanField(default=False)
def __unicode__(self):
return self.name
<commit_msg>Attach user with List and Item<commit_after>from django.contrib.auth.models import User
from django.db import models
from django.utils.text import slugify
from common.models import DeleteSafeTimeStampedMixin
class List(DeleteSafeTimeStampedMixin):
name = models.CharField(max_length=50)
slug = models.CharField(max_length=50, editable=False)
author = models.ForeignKey(User, related_name="lists_authored")
def __unicode__(self):
return self.name
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
super(List, self).save(*args, **kwargs)
class Item(DeleteSafeTimeStampedMixin):
list = models.ForeignKey(List, related_name="items")
name = models.CharField(max_length=100)
completed = models.BooleanField(default=False)
author = models.ForeignKey(User, related_name="items_authored")
def __unicode__(self):
return self.name
|
5a6a96435b7cf45cbbc5f2b81a7be84cd986b456 | haas/__main__.py | haas/__main__.py | # -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
import sys # pragma: no cover
from .main import main # pragma: no cover
if __name__ == '__main__':
sys.exit(main())
| # -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
import sys # pragma: no cover
from haas.main import main # pragma: no cover
if __name__ == '__main__':
sys.exit(main())
| Use absolute import for main entry point. | Use absolute import for main entry point.
| Python | bsd-3-clause | itziakos/haas,sjagoe/haas,scalative/haas,sjagoe/haas,itziakos/haas,scalative/haas | # -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
import sys # pragma: no cover
from .main import main # pragma: no cover
if __name__ == '__main__':
sys.exit(main())
Use absolute import for main entry point. | # -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
import sys # pragma: no cover
from haas.main import main # pragma: no cover
if __name__ == '__main__':
sys.exit(main())
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
import sys # pragma: no cover
from .main import main # pragma: no cover
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Use absolute import for main entry point.<commit_after> | # -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
import sys # pragma: no cover
from haas.main import main # pragma: no cover
if __name__ == '__main__':
sys.exit(main())
| # -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
import sys # pragma: no cover
from .main import main # pragma: no cover
if __name__ == '__main__':
sys.exit(main())
Use absolute import for main entry point.# -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
import sys # pragma: no cover
from haas.main import main # pragma: no cover
if __name__ == '__main__':
sys.exit(main())
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
import sys # pragma: no cover
from .main import main # pragma: no cover
if __name__ == '__main__':
sys.exit(main())
<commit_msg>Use absolute import for main entry point.<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2013-2014 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
import sys # pragma: no cover
from haas.main import main # pragma: no cover
if __name__ == '__main__':
sys.exit(main())
|
e84d6dc5be2e2ac2d95b81e4df18bc0ad939916a | __init__.py | __init__.py | import os
from flask import Flask, redirect, request, render_template
from flask_mail import Mail, Message
app = Flask (__name__)
ALLOWED_EXTENSIONS = set(['txt', 'png', 'jpg', 'jpeg'])
mail = Mail(app)
@app.route("/")
def index():
return render_template('in-development.html.j2')
if __name__ == "__main__":
app.run(host='0.0.0.0')
| import os
from flask import Flask, redirect, request, render_template
from flask_mail import Mail, Message
app = Flask (__name__)
ALLOWED_EXTENSIONS = set(['txt', 'png', 'jpg', 'jpeg'])
mail = Mail(app)
@app.route("/")
def index():
return render_template('index.html.j2')
if __name__ == "__main__":
app.run()
| Split into new branch for development of the main site | Split into new branch for development of the main site
| Python | mit | JonathanPeterCole/Tech-Support-Site,JonathanPeterCole/Tech-Support-Site | import os
from flask import Flask, redirect, request, render_template
from flask_mail import Mail, Message
app = Flask (__name__)
ALLOWED_EXTENSIONS = set(['txt', 'png', 'jpg', 'jpeg'])
mail = Mail(app)
@app.route("/")
def index():
return render_template('in-development.html.j2')
if __name__ == "__main__":
app.run(host='0.0.0.0')
Split into new branch for development of the main site | import os
from flask import Flask, redirect, request, render_template
from flask_mail import Mail, Message
app = Flask (__name__)
ALLOWED_EXTENSIONS = set(['txt', 'png', 'jpg', 'jpeg'])
mail = Mail(app)
@app.route("/")
def index():
return render_template('index.html.j2')
if __name__ == "__main__":
app.run()
| <commit_before>import os
from flask import Flask, redirect, request, render_template
from flask_mail import Mail, Message
app = Flask (__name__)
ALLOWED_EXTENSIONS = set(['txt', 'png', 'jpg', 'jpeg'])
mail = Mail(app)
@app.route("/")
def index():
return render_template('in-development.html.j2')
if __name__ == "__main__":
app.run(host='0.0.0.0')
<commit_msg>Split into new branch for development of the main site<commit_after> | import os
from flask import Flask, redirect, request, render_template
from flask_mail import Mail, Message
app = Flask (__name__)
ALLOWED_EXTENSIONS = set(['txt', 'png', 'jpg', 'jpeg'])
mail = Mail(app)
@app.route("/")
def index():
return render_template('index.html.j2')
if __name__ == "__main__":
app.run()
| import os
from flask import Flask, redirect, request, render_template
from flask_mail import Mail, Message
app = Flask (__name__)
ALLOWED_EXTENSIONS = set(['txt', 'png', 'jpg', 'jpeg'])
mail = Mail(app)
@app.route("/")
def index():
return render_template('in-development.html.j2')
if __name__ == "__main__":
app.run(host='0.0.0.0')
Split into new branch for development of the main siteimport os
from flask import Flask, redirect, request, render_template
from flask_mail import Mail, Message
app = Flask (__name__)
ALLOWED_EXTENSIONS = set(['txt', 'png', 'jpg', 'jpeg'])
mail = Mail(app)
@app.route("/")
def index():
return render_template('index.html.j2')
if __name__ == "__main__":
app.run()
| <commit_before>import os
from flask import Flask, redirect, request, render_template
from flask_mail import Mail, Message
app = Flask (__name__)
ALLOWED_EXTENSIONS = set(['txt', 'png', 'jpg', 'jpeg'])
mail = Mail(app)
@app.route("/")
def index():
return render_template('in-development.html.j2')
if __name__ == "__main__":
app.run(host='0.0.0.0')
<commit_msg>Split into new branch for development of the main site<commit_after>import os
from flask import Flask, redirect, request, render_template
from flask_mail import Mail, Message
app = Flask (__name__)
ALLOWED_EXTENSIONS = set(['txt', 'png', 'jpg', 'jpeg'])
mail = Mail(app)
@app.route("/")
def index():
return render_template('index.html.j2')
if __name__ == "__main__":
app.run()
|
30609236e703123c464c2e3927417ae677f37c4e | nimp/base_commands/__init__.py | nimp/base_commands/__init__.py | # -*- coding: utf-8 -*-
# Copyright (c) 2014-2019 Dontnod Entertainment
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
''' Nimp subcommands declarations '''
__all__ = [
'build',
'check',
'commandlet',
'dev',
'download_fileset',
'fileset',
'p4',
'package',
'run',
'run_hook',
'symbol_server',
'update_symbol_server',
'upload',
'upload_fileset',
]
| # -*- coding: utf-8 -*-
# Copyright (c) 2014-2019 Dontnod Entertainment
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
''' Nimp subcommands declarations '''
__all__ = [
'build',
'check',
'commandlet',
'dev',
'download_fileset',
'fileset',
'p4',
'package',
'run',
'symbol_server',
'update_symbol_server',
'upload',
'upload_fileset',
]
| Remove run_hook from list of imported commands. | Remove run_hook from list of imported commands.
| Python | mit | dontnod/nimp | # -*- coding: utf-8 -*-
# Copyright (c) 2014-2019 Dontnod Entertainment
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
''' Nimp subcommands declarations '''
__all__ = [
'build',
'check',
'commandlet',
'dev',
'download_fileset',
'fileset',
'p4',
'package',
'run',
'run_hook',
'symbol_server',
'update_symbol_server',
'upload',
'upload_fileset',
]
Remove run_hook from list of imported commands. | # -*- coding: utf-8 -*-
# Copyright (c) 2014-2019 Dontnod Entertainment
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
''' Nimp subcommands declarations '''
__all__ = [
'build',
'check',
'commandlet',
'dev',
'download_fileset',
'fileset',
'p4',
'package',
'run',
'symbol_server',
'update_symbol_server',
'upload',
'upload_fileset',
]
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2014-2019 Dontnod Entertainment
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
''' Nimp subcommands declarations '''
__all__ = [
'build',
'check',
'commandlet',
'dev',
'download_fileset',
'fileset',
'p4',
'package',
'run',
'run_hook',
'symbol_server',
'update_symbol_server',
'upload',
'upload_fileset',
]
<commit_msg>Remove run_hook from list of imported commands.<commit_after> | # -*- coding: utf-8 -*-
# Copyright (c) 2014-2019 Dontnod Entertainment
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
''' Nimp subcommands declarations '''
__all__ = [
'build',
'check',
'commandlet',
'dev',
'download_fileset',
'fileset',
'p4',
'package',
'run',
'symbol_server',
'update_symbol_server',
'upload',
'upload_fileset',
]
| # -*- coding: utf-8 -*-
# Copyright (c) 2014-2019 Dontnod Entertainment
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
''' Nimp subcommands declarations '''
__all__ = [
'build',
'check',
'commandlet',
'dev',
'download_fileset',
'fileset',
'p4',
'package',
'run',
'run_hook',
'symbol_server',
'update_symbol_server',
'upload',
'upload_fileset',
]
Remove run_hook from list of imported commands.# -*- coding: utf-8 -*-
# Copyright (c) 2014-2019 Dontnod Entertainment
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
''' Nimp subcommands declarations '''
__all__ = [
'build',
'check',
'commandlet',
'dev',
'download_fileset',
'fileset',
'p4',
'package',
'run',
'symbol_server',
'update_symbol_server',
'upload',
'upload_fileset',
]
| <commit_before># -*- coding: utf-8 -*-
# Copyright (c) 2014-2019 Dontnod Entertainment
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
''' Nimp subcommands declarations '''
__all__ = [
'build',
'check',
'commandlet',
'dev',
'download_fileset',
'fileset',
'p4',
'package',
'run',
'run_hook',
'symbol_server',
'update_symbol_server',
'upload',
'upload_fileset',
]
<commit_msg>Remove run_hook from list of imported commands.<commit_after># -*- coding: utf-8 -*-
# Copyright (c) 2014-2019 Dontnod Entertainment
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
''' Nimp subcommands declarations '''
__all__ = [
'build',
'check',
'commandlet',
'dev',
'download_fileset',
'fileset',
'p4',
'package',
'run',
'symbol_server',
'update_symbol_server',
'upload',
'upload_fileset',
]
|
1cff28b9612c156363ed87cdde1718ee83b65776 | real_estate_agency/resale/serializers.py | real_estate_agency/resale/serializers.py | from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
| from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
decoration = serializers.ReadOnlyField(source='decoration.name')
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
| Make ResaleApartmentSerializer return Decoration.name on decoration field. | Make ResaleApartmentSerializer return Decoration.name on decoration field.
It allows to show readable value at resale detailed page.
| Python | mit | Dybov/real_estate_agency,Dybov/real_estate_agency,Dybov/real_estate_agency | from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
Make ResaleApartmentSerializer return Decoration.name on decoration field.
It allows to show readable value at resale detailed page. | from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
decoration = serializers.ReadOnlyField(source='decoration.name')
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
| <commit_before>from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
<commit_msg>Make ResaleApartmentSerializer return Decoration.name on decoration field.
It allows to show readable value at resale detailed page.<commit_after> | from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
decoration = serializers.ReadOnlyField(source='decoration.name')
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
| from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
Make ResaleApartmentSerializer return Decoration.name on decoration field.
It allows to show readable value at resale detailed page.from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
decoration = serializers.ReadOnlyField(source='decoration.name')
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
| <commit_before>from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
<commit_msg>Make ResaleApartmentSerializer return Decoration.name on decoration field.
It allows to show readable value at resale detailed page.<commit_after>from rest_framework import serializers
from .models import ResaleApartment, ResaleApartmentImage
class ResaleApartmentImageSerializer(serializers.ModelSerializer):
class Meta:
model = ResaleApartmentImage
fields = '__all__'
class ResaleApartmentSerializer(serializers.ModelSerializer):
# images = ResaleApartmentImageSerializer(source='photos', many=True)
get_building_type_display = serializers.ReadOnlyField()
price_per_square_meter = serializers.ReadOnlyField()
neighbourhood = serializers.StringRelatedField()
decoration = serializers.ReadOnlyField(source='decoration.name')
class Meta:
model = ResaleApartment
fields = (
'total_area',
'address',
'floor',
'number_of_storeys',
# 'images',
'full_price',
'old_price',
'price_per_square_meter',
'neighbourhood',
'get_building_type_display',
'number_of_storeys',
'date_of_construction',
'celling_height',
'decoration',
'kitchen_area',
'balcony_area',
'id',
)
|
1e513d901dfef9135a62c8f99633b10d3900ecb8 | orator/schema/mysql_builder.py | orator/schema/mysql_builder.py | # -*- coding: utf-8 -*-
from .builder import SchemaBuilder
class MySQLSchemaBuilder(SchemaBuilder):
def has_table(self, table):
"""
Determine if the given table exists.
:param table: The table
:type table: str
:rtype: bool
"""
sql = self._grammar.compile_table_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
return len(self._connection.select(sql, [database, table])) > 0
def get_column_listing(self, table):
"""
Get the column listing for a given table.
:param table: The table
:type table: str
:rtype: list
"""
sql = self._grammar.compile_column_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
results = self._connection.select(sql, [database, table])
return self._connection.get_post_processor().process_column_listing(results)
| # -*- coding: utf-8 -*-
from .builder import SchemaBuilder
class MySQLSchemaBuilder(SchemaBuilder):
def has_table(self, table):
"""
Determine if the given table exists.
:param table: The table
:type table: str
:rtype: bool
"""
sql = self._grammar.compile_table_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
return len(self._connection.select(sql, [database, table])) > 0
def get_column_listing(self, table):
"""
Get the column listing for a given table.
:param table: The table
:type table: str
:rtype: list
"""
sql = self._grammar.compile_column_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
results = []
for result in self._connection.select(sql, [database, table]):
new_result = {}
for key, value in result.items():
new_result[key.lower()] = value
results.append(new_result)
return self._connection.get_post_processor().process_column_listing(results)
| Fix case when processing column names for MySQL | Fix case when processing column names for MySQL
| Python | mit | sdispater/orator | # -*- coding: utf-8 -*-
from .builder import SchemaBuilder
class MySQLSchemaBuilder(SchemaBuilder):
def has_table(self, table):
"""
Determine if the given table exists.
:param table: The table
:type table: str
:rtype: bool
"""
sql = self._grammar.compile_table_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
return len(self._connection.select(sql, [database, table])) > 0
def get_column_listing(self, table):
"""
Get the column listing for a given table.
:param table: The table
:type table: str
:rtype: list
"""
sql = self._grammar.compile_column_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
results = self._connection.select(sql, [database, table])
return self._connection.get_post_processor().process_column_listing(results)
Fix case when processing column names for MySQL | # -*- coding: utf-8 -*-
from .builder import SchemaBuilder
class MySQLSchemaBuilder(SchemaBuilder):
def has_table(self, table):
"""
Determine if the given table exists.
:param table: The table
:type table: str
:rtype: bool
"""
sql = self._grammar.compile_table_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
return len(self._connection.select(sql, [database, table])) > 0
def get_column_listing(self, table):
"""
Get the column listing for a given table.
:param table: The table
:type table: str
:rtype: list
"""
sql = self._grammar.compile_column_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
results = []
for result in self._connection.select(sql, [database, table]):
new_result = {}
for key, value in result.items():
new_result[key.lower()] = value
results.append(new_result)
return self._connection.get_post_processor().process_column_listing(results)
| <commit_before># -*- coding: utf-8 -*-
from .builder import SchemaBuilder
class MySQLSchemaBuilder(SchemaBuilder):
def has_table(self, table):
"""
Determine if the given table exists.
:param table: The table
:type table: str
:rtype: bool
"""
sql = self._grammar.compile_table_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
return len(self._connection.select(sql, [database, table])) > 0
def get_column_listing(self, table):
"""
Get the column listing for a given table.
:param table: The table
:type table: str
:rtype: list
"""
sql = self._grammar.compile_column_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
results = self._connection.select(sql, [database, table])
return self._connection.get_post_processor().process_column_listing(results)
<commit_msg>Fix case when processing column names for MySQL<commit_after> | # -*- coding: utf-8 -*-
from .builder import SchemaBuilder
class MySQLSchemaBuilder(SchemaBuilder):
def has_table(self, table):
"""
Determine if the given table exists.
:param table: The table
:type table: str
:rtype: bool
"""
sql = self._grammar.compile_table_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
return len(self._connection.select(sql, [database, table])) > 0
def get_column_listing(self, table):
"""
Get the column listing for a given table.
:param table: The table
:type table: str
:rtype: list
"""
sql = self._grammar.compile_column_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
results = []
for result in self._connection.select(sql, [database, table]):
new_result = {}
for key, value in result.items():
new_result[key.lower()] = value
results.append(new_result)
return self._connection.get_post_processor().process_column_listing(results)
| # -*- coding: utf-8 -*-
from .builder import SchemaBuilder
class MySQLSchemaBuilder(SchemaBuilder):
def has_table(self, table):
"""
Determine if the given table exists.
:param table: The table
:type table: str
:rtype: bool
"""
sql = self._grammar.compile_table_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
return len(self._connection.select(sql, [database, table])) > 0
def get_column_listing(self, table):
"""
Get the column listing for a given table.
:param table: The table
:type table: str
:rtype: list
"""
sql = self._grammar.compile_column_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
results = self._connection.select(sql, [database, table])
return self._connection.get_post_processor().process_column_listing(results)
Fix case when processing column names for MySQL# -*- coding: utf-8 -*-
from .builder import SchemaBuilder
class MySQLSchemaBuilder(SchemaBuilder):
def has_table(self, table):
"""
Determine if the given table exists.
:param table: The table
:type table: str
:rtype: bool
"""
sql = self._grammar.compile_table_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
return len(self._connection.select(sql, [database, table])) > 0
def get_column_listing(self, table):
"""
Get the column listing for a given table.
:param table: The table
:type table: str
:rtype: list
"""
sql = self._grammar.compile_column_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
results = []
for result in self._connection.select(sql, [database, table]):
new_result = {}
for key, value in result.items():
new_result[key.lower()] = value
results.append(new_result)
return self._connection.get_post_processor().process_column_listing(results)
| <commit_before># -*- coding: utf-8 -*-
from .builder import SchemaBuilder
class MySQLSchemaBuilder(SchemaBuilder):
def has_table(self, table):
"""
Determine if the given table exists.
:param table: The table
:type table: str
:rtype: bool
"""
sql = self._grammar.compile_table_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
return len(self._connection.select(sql, [database, table])) > 0
def get_column_listing(self, table):
"""
Get the column listing for a given table.
:param table: The table
:type table: str
:rtype: list
"""
sql = self._grammar.compile_column_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
results = self._connection.select(sql, [database, table])
return self._connection.get_post_processor().process_column_listing(results)
<commit_msg>Fix case when processing column names for MySQL<commit_after># -*- coding: utf-8 -*-
from .builder import SchemaBuilder
class MySQLSchemaBuilder(SchemaBuilder):
def has_table(self, table):
"""
Determine if the given table exists.
:param table: The table
:type table: str
:rtype: bool
"""
sql = self._grammar.compile_table_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
return len(self._connection.select(sql, [database, table])) > 0
def get_column_listing(self, table):
"""
Get the column listing for a given table.
:param table: The table
:type table: str
:rtype: list
"""
sql = self._grammar.compile_column_exists()
database = self._connection.get_database_name()
table = self._connection.get_table_prefix() + table
results = []
for result in self._connection.select(sql, [database, table]):
new_result = {}
for key, value in result.items():
new_result[key.lower()] = value
results.append(new_result)
return self._connection.get_post_processor().process_column_listing(results)
|
0a6078f5d0537cea9f36894b736fa274c3fa3e47 | molo/core/cookiecutter/scaffold/{{cookiecutter.directory}}/{{cookiecutter.app_name}}/forms.py | molo/core/cookiecutter/scaffold/{{cookiecutter.directory}}/{{cookiecutter.app_name}}/forms.py | from django import forms
class MediaForm(forms.Form):
zip_file = forms.FileField(label="Zipped Media File")
def clean_data_file(self):
file = self.cleaned_data['data_file']
if file:
extension = file.name.split('.')[-1]
if extension != 'zip':
raise forms.ValidationError('File Type Is Not .zip')
return file
'''
def process_data(self):
print("I am processing the form")
file = self.cleaned_data['data_file'].file
# delete root media file
# unzip file in root folder (make sure it's called 'media')
'''
| from django import forms
class MediaForm(forms.Form):
zip_file = forms.FileField(label="Zipped Media File")
def clean_zip_file(self):
file = self.cleaned_data['zip_file']
if file:
extension = file.name.split('.')[-1]
if extension != 'zip':
raise forms.ValidationError('File Type Is Not .zip')
return file
| Fix upload form to only accept .zip files | Fix upload form to only accept .zip files
| Python | bsd-2-clause | praekelt/molo,praekelt/molo,praekelt/molo,praekelt/molo | from django import forms
class MediaForm(forms.Form):
zip_file = forms.FileField(label="Zipped Media File")
def clean_data_file(self):
file = self.cleaned_data['data_file']
if file:
extension = file.name.split('.')[-1]
if extension != 'zip':
raise forms.ValidationError('File Type Is Not .zip')
return file
'''
def process_data(self):
print("I am processing the form")
file = self.cleaned_data['data_file'].file
# delete root media file
# unzip file in root folder (make sure it's called 'media')
'''
Fix upload form to only accept .zip files | from django import forms
class MediaForm(forms.Form):
zip_file = forms.FileField(label="Zipped Media File")
def clean_zip_file(self):
file = self.cleaned_data['zip_file']
if file:
extension = file.name.split('.')[-1]
if extension != 'zip':
raise forms.ValidationError('File Type Is Not .zip')
return file
| <commit_before>from django import forms
class MediaForm(forms.Form):
zip_file = forms.FileField(label="Zipped Media File")
def clean_data_file(self):
file = self.cleaned_data['data_file']
if file:
extension = file.name.split('.')[-1]
if extension != 'zip':
raise forms.ValidationError('File Type Is Not .zip')
return file
'''
def process_data(self):
print("I am processing the form")
file = self.cleaned_data['data_file'].file
# delete root media file
# unzip file in root folder (make sure it's called 'media')
'''
<commit_msg>Fix upload form to only accept .zip files<commit_after> | from django import forms
class MediaForm(forms.Form):
zip_file = forms.FileField(label="Zipped Media File")
def clean_zip_file(self):
file = self.cleaned_data['zip_file']
if file:
extension = file.name.split('.')[-1]
if extension != 'zip':
raise forms.ValidationError('File Type Is Not .zip')
return file
| from django import forms
class MediaForm(forms.Form):
zip_file = forms.FileField(label="Zipped Media File")
def clean_data_file(self):
file = self.cleaned_data['data_file']
if file:
extension = file.name.split('.')[-1]
if extension != 'zip':
raise forms.ValidationError('File Type Is Not .zip')
return file
'''
def process_data(self):
print("I am processing the form")
file = self.cleaned_data['data_file'].file
# delete root media file
# unzip file in root folder (make sure it's called 'media')
'''
Fix upload form to only accept .zip filesfrom django import forms
class MediaForm(forms.Form):
zip_file = forms.FileField(label="Zipped Media File")
def clean_zip_file(self):
file = self.cleaned_data['zip_file']
if file:
extension = file.name.split('.')[-1]
if extension != 'zip':
raise forms.ValidationError('File Type Is Not .zip')
return file
| <commit_before>from django import forms
class MediaForm(forms.Form):
zip_file = forms.FileField(label="Zipped Media File")
def clean_data_file(self):
file = self.cleaned_data['data_file']
if file:
extension = file.name.split('.')[-1]
if extension != 'zip':
raise forms.ValidationError('File Type Is Not .zip')
return file
'''
def process_data(self):
print("I am processing the form")
file = self.cleaned_data['data_file'].file
# delete root media file
# unzip file in root folder (make sure it's called 'media')
'''
<commit_msg>Fix upload form to only accept .zip files<commit_after>from django import forms
class MediaForm(forms.Form):
zip_file = forms.FileField(label="Zipped Media File")
def clean_zip_file(self):
file = self.cleaned_data['zip_file']
if file:
extension = file.name.split('.')[-1]
if extension != 'zip':
raise forms.ValidationError('File Type Is Not .zip')
return file
|
8a55e9fac0885c5b6eb21cd8f3da54a105de8010 | sktracker/__init__.py | sktracker/__init__.py | """Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
color
Color space conversion.
Utility Functions
-----------------
img_as_float
Convert an image to floating point format, with values in [0, 1].
"""
try:
from .version import __version__
except ImportError:
__version__ = "dev"
| """Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
color
Color space conversion.
"""
try:
from .version import __version__
except ImportError:
__version__ = "dev"
import utils
| Fix utils module import mechanism | Fix utils module import mechanism
| Python | bsd-3-clause | bnoi/scikit-tracker,bnoi/scikit-tracker,bnoi/scikit-tracker | """Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
color
Color space conversion.
Utility Functions
-----------------
img_as_float
Convert an image to floating point format, with values in [0, 1].
"""
try:
from .version import __version__
except ImportError:
__version__ = "dev"
Fix utils module import mechanism | """Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
color
Color space conversion.
"""
try:
from .version import __version__
except ImportError:
__version__ = "dev"
import utils
| <commit_before>"""Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
color
Color space conversion.
Utility Functions
-----------------
img_as_float
Convert an image to floating point format, with values in [0, 1].
"""
try:
from .version import __version__
except ImportError:
__version__ = "dev"
<commit_msg>Fix utils module import mechanism<commit_after> | """Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
color
Color space conversion.
"""
try:
from .version import __version__
except ImportError:
__version__ = "dev"
import utils
| """Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
color
Color space conversion.
Utility Functions
-----------------
img_as_float
Convert an image to floating point format, with values in [0, 1].
"""
try:
from .version import __version__
except ImportError:
__version__ = "dev"
Fix utils module import mechanism"""Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
color
Color space conversion.
"""
try:
from .version import __version__
except ImportError:
__version__ = "dev"
import utils
| <commit_before>"""Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
color
Color space conversion.
Utility Functions
-----------------
img_as_float
Convert an image to floating point format, with values in [0, 1].
"""
try:
from .version import __version__
except ImportError:
__version__ = "dev"
<commit_msg>Fix utils module import mechanism<commit_after>"""Object detection and tracking for cell biology
`scikit-learn` is bla bla bla.
Subpackages
-----------
color
Color space conversion.
"""
try:
from .version import __version__
except ImportError:
__version__ = "dev"
import utils
|
e92fa763729ce68e86da3664ae1a1ed37e3200a5 | ynr/apps/uk_results/serializers.py | ynr/apps/uk_results/serializers.py | from __future__ import unicode_literals
from rest_framework import serializers
from candidates.serializers import MembershipSerializer
from .models import CandidateResult, ResultSet
class CandidateResultSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = CandidateResult
fields = (
'id', 'url',
'membership',
'result_set',
'num_ballots', 'is_winner',
)
membership = MembershipSerializer(read_only=True)
# result_set = ResultSetSerializer(read_only=True)
class ResultSetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ResultSet
fields = (
'id', 'url',
'candidate_results',
'ip_address',
'num_turnout_reported', 'num_spoilt_ballots',
# 'post_result',
'user', 'user_id',
)
# post_result = PostResultSerializer()
user = serializers.ReadOnlyField(source='user.username')
user_id = serializers.ReadOnlyField(source='user.id')
candidate_results = CandidateResultSerializer(many=True, read_only=True)
| from __future__ import unicode_literals
from rest_framework import serializers
from candidates.serializers import MembershipSerializer
from .models import CandidateResult, ResultSet
class CandidateResultSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = CandidateResult
fields = (
'id', 'url',
'membership',
'result_set',
'num_ballots', 'is_winner',
)
membership = MembershipSerializer(read_only=True)
# result_set = ResultSetSerializer(read_only=True)
class ResultSetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ResultSet
fields = (
'id',
'url',
'candidate_results',
'ip_address',
'num_turnout_reported',
'num_spoilt_ballots',
'user',
'user_id',
'ballot_paper_id'
)
# post_result = PostResultSerializer()
user = serializers.ReadOnlyField(source='user.username')
ballot_paper_id = serializers.ReadOnlyField(
source='post_election.ballot_paper_id')
user_id = serializers.ReadOnlyField(source='user.id')
candidate_results = CandidateResultSerializer(many=True, read_only=True)
| Add ballot paper ID to API | Add ballot paper ID to API
| Python | agpl-3.0 | DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative,DemocracyClub/yournextrepresentative | from __future__ import unicode_literals
from rest_framework import serializers
from candidates.serializers import MembershipSerializer
from .models import CandidateResult, ResultSet
class CandidateResultSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = CandidateResult
fields = (
'id', 'url',
'membership',
'result_set',
'num_ballots', 'is_winner',
)
membership = MembershipSerializer(read_only=True)
# result_set = ResultSetSerializer(read_only=True)
class ResultSetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ResultSet
fields = (
'id', 'url',
'candidate_results',
'ip_address',
'num_turnout_reported', 'num_spoilt_ballots',
# 'post_result',
'user', 'user_id',
)
# post_result = PostResultSerializer()
user = serializers.ReadOnlyField(source='user.username')
user_id = serializers.ReadOnlyField(source='user.id')
candidate_results = CandidateResultSerializer(many=True, read_only=True)
Add ballot paper ID to API | from __future__ import unicode_literals
from rest_framework import serializers
from candidates.serializers import MembershipSerializer
from .models import CandidateResult, ResultSet
class CandidateResultSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = CandidateResult
fields = (
'id', 'url',
'membership',
'result_set',
'num_ballots', 'is_winner',
)
membership = MembershipSerializer(read_only=True)
# result_set = ResultSetSerializer(read_only=True)
class ResultSetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ResultSet
fields = (
'id',
'url',
'candidate_results',
'ip_address',
'num_turnout_reported',
'num_spoilt_ballots',
'user',
'user_id',
'ballot_paper_id'
)
# post_result = PostResultSerializer()
user = serializers.ReadOnlyField(source='user.username')
ballot_paper_id = serializers.ReadOnlyField(
source='post_election.ballot_paper_id')
user_id = serializers.ReadOnlyField(source='user.id')
candidate_results = CandidateResultSerializer(many=True, read_only=True)
| <commit_before>from __future__ import unicode_literals
from rest_framework import serializers
from candidates.serializers import MembershipSerializer
from .models import CandidateResult, ResultSet
class CandidateResultSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = CandidateResult
fields = (
'id', 'url',
'membership',
'result_set',
'num_ballots', 'is_winner',
)
membership = MembershipSerializer(read_only=True)
# result_set = ResultSetSerializer(read_only=True)
class ResultSetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ResultSet
fields = (
'id', 'url',
'candidate_results',
'ip_address',
'num_turnout_reported', 'num_spoilt_ballots',
# 'post_result',
'user', 'user_id',
)
# post_result = PostResultSerializer()
user = serializers.ReadOnlyField(source='user.username')
user_id = serializers.ReadOnlyField(source='user.id')
candidate_results = CandidateResultSerializer(many=True, read_only=True)
<commit_msg>Add ballot paper ID to API<commit_after> | from __future__ import unicode_literals
from rest_framework import serializers
from candidates.serializers import MembershipSerializer
from .models import CandidateResult, ResultSet
class CandidateResultSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = CandidateResult
fields = (
'id', 'url',
'membership',
'result_set',
'num_ballots', 'is_winner',
)
membership = MembershipSerializer(read_only=True)
# result_set = ResultSetSerializer(read_only=True)
class ResultSetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ResultSet
fields = (
'id',
'url',
'candidate_results',
'ip_address',
'num_turnout_reported',
'num_spoilt_ballots',
'user',
'user_id',
'ballot_paper_id'
)
# post_result = PostResultSerializer()
user = serializers.ReadOnlyField(source='user.username')
ballot_paper_id = serializers.ReadOnlyField(
source='post_election.ballot_paper_id')
user_id = serializers.ReadOnlyField(source='user.id')
candidate_results = CandidateResultSerializer(many=True, read_only=True)
| from __future__ import unicode_literals
from rest_framework import serializers
from candidates.serializers import MembershipSerializer
from .models import CandidateResult, ResultSet
class CandidateResultSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = CandidateResult
fields = (
'id', 'url',
'membership',
'result_set',
'num_ballots', 'is_winner',
)
membership = MembershipSerializer(read_only=True)
# result_set = ResultSetSerializer(read_only=True)
class ResultSetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ResultSet
fields = (
'id', 'url',
'candidate_results',
'ip_address',
'num_turnout_reported', 'num_spoilt_ballots',
# 'post_result',
'user', 'user_id',
)
# post_result = PostResultSerializer()
user = serializers.ReadOnlyField(source='user.username')
user_id = serializers.ReadOnlyField(source='user.id')
candidate_results = CandidateResultSerializer(many=True, read_only=True)
Add ballot paper ID to APIfrom __future__ import unicode_literals
from rest_framework import serializers
from candidates.serializers import MembershipSerializer
from .models import CandidateResult, ResultSet
class CandidateResultSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = CandidateResult
fields = (
'id', 'url',
'membership',
'result_set',
'num_ballots', 'is_winner',
)
membership = MembershipSerializer(read_only=True)
# result_set = ResultSetSerializer(read_only=True)
class ResultSetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ResultSet
fields = (
'id',
'url',
'candidate_results',
'ip_address',
'num_turnout_reported',
'num_spoilt_ballots',
'user',
'user_id',
'ballot_paper_id'
)
# post_result = PostResultSerializer()
user = serializers.ReadOnlyField(source='user.username')
ballot_paper_id = serializers.ReadOnlyField(
source='post_election.ballot_paper_id')
user_id = serializers.ReadOnlyField(source='user.id')
candidate_results = CandidateResultSerializer(many=True, read_only=True)
| <commit_before>from __future__ import unicode_literals
from rest_framework import serializers
from candidates.serializers import MembershipSerializer
from .models import CandidateResult, ResultSet
class CandidateResultSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = CandidateResult
fields = (
'id', 'url',
'membership',
'result_set',
'num_ballots', 'is_winner',
)
membership = MembershipSerializer(read_only=True)
# result_set = ResultSetSerializer(read_only=True)
class ResultSetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ResultSet
fields = (
'id', 'url',
'candidate_results',
'ip_address',
'num_turnout_reported', 'num_spoilt_ballots',
# 'post_result',
'user', 'user_id',
)
# post_result = PostResultSerializer()
user = serializers.ReadOnlyField(source='user.username')
user_id = serializers.ReadOnlyField(source='user.id')
candidate_results = CandidateResultSerializer(many=True, read_only=True)
<commit_msg>Add ballot paper ID to API<commit_after>from __future__ import unicode_literals
from rest_framework import serializers
from candidates.serializers import MembershipSerializer
from .models import CandidateResult, ResultSet
class CandidateResultSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = CandidateResult
fields = (
'id', 'url',
'membership',
'result_set',
'num_ballots', 'is_winner',
)
membership = MembershipSerializer(read_only=True)
# result_set = ResultSetSerializer(read_only=True)
class ResultSetSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
model = ResultSet
fields = (
'id',
'url',
'candidate_results',
'ip_address',
'num_turnout_reported',
'num_spoilt_ballots',
'user',
'user_id',
'ballot_paper_id'
)
# post_result = PostResultSerializer()
user = serializers.ReadOnlyField(source='user.username')
ballot_paper_id = serializers.ReadOnlyField(
source='post_election.ballot_paper_id')
user_id = serializers.ReadOnlyField(source='user.id')
candidate_results = CandidateResultSerializer(many=True, read_only=True)
|
3c8b3a24978cf757fb3a8cc8660eb4554f183e0f | social_auth/fields.py | social_auth/fields.py | from django.core.exceptions import ValidationError
from django.db import models
from django.utils import simplejson
class JSONField(models.TextField):
"""Simple JSON field that stores python structures as JSON strings
on database.
"""
__metaclass__ = models.SubfieldBase
def to_python(self, value):
"""
Convert the input JSON value into python structures, raises
django.core.exceptions.ValidationError if the data can't be converted.
"""
if self.blank and not value:
return None
if isinstance(value, basestring):
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
else:
return value
def validate(self, value, model_instance):
"""Check value is a valid JSON string, raise ValidationError on
error."""
super(JSONField, self).validate(value, model_instance)
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
def get_db_prep_save(self, value):
"""Convert value to JSON string before save"""
try:
value = simplejson.dumps(value)
except Exception, e:
raise ValidationError(str(e))
return super(JSONField, self).get_db_prep_save(value)
| from django.core.exceptions import ValidationError
from django.db import models
from django.utils import simplejson
class JSONField(models.TextField):
"""Simple JSON field that stores python structures as JSON strings
on database.
"""
__metaclass__ = models.SubfieldBase
def to_python(self, value):
"""
Convert the input JSON value into python structures, raises
django.core.exceptions.ValidationError if the data can't be converted.
"""
if self.blank and not value:
return None
if isinstance(value, basestring):
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
else:
return value
def validate(self, value, model_instance):
"""Check value is a valid JSON string, raise ValidationError on
error."""
super(JSONField, self).validate(value, model_instance)
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
def get_db_prep_value(self, value, connection, prepared=False):
"""Convert value to JSON string before save"""
try:
return simplejson.dumps(value)
except Exception, e:
raise ValidationError(str(e))
| Use get_db_prep_value instead of get_db_prep_save. Closes gh-42 | Use get_db_prep_value instead of get_db_prep_save. Closes gh-42
| Python | bsd-3-clause | WW-Digital/django-social-auth,1st/django-social-auth,vxvinh1511/django-social-auth,limdauto/django-social-auth,michael-borisov/django-social-auth,caktus/django-social-auth,thesealion/django-social-auth,lovehhf/django-social-auth,dongguangming/django-social-auth,VishvajitP/django-social-auth,MjAbuz/django-social-auth,duoduo369/django-social-auth,MjAbuz/django-social-auth,mayankcu/Django-social,omab/django-social-auth,antoviaque/django-social-auth-norel,qas612820704/django-social-auth,vuchau/django-social-auth,adw0rd/django-social-auth,vuchau/django-social-auth,beswarm/django-social-auth,czpython/django-social-auth,VishvajitP/django-social-auth,qas612820704/django-social-auth,getsentry/django-social-auth,beswarm/django-social-auth,limdauto/django-social-auth,omab/django-social-auth,sk7/django-social-auth,krvss/django-social-auth,vxvinh1511/django-social-auth,michael-borisov/django-social-auth,lovehhf/django-social-auth,caktus/django-social-auth,gustavoam/django-social-auth,brianmckinneyrocks/django-social-auth,brianmckinneyrocks/django-social-auth,thesealion/django-social-auth,gustavoam/django-social-auth,dongguangming/django-social-auth | from django.core.exceptions import ValidationError
from django.db import models
from django.utils import simplejson
class JSONField(models.TextField):
"""Simple JSON field that stores python structures as JSON strings
on database.
"""
__metaclass__ = models.SubfieldBase
def to_python(self, value):
"""
Convert the input JSON value into python structures, raises
django.core.exceptions.ValidationError if the data can't be converted.
"""
if self.blank and not value:
return None
if isinstance(value, basestring):
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
else:
return value
def validate(self, value, model_instance):
"""Check value is a valid JSON string, raise ValidationError on
error."""
super(JSONField, self).validate(value, model_instance)
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
def get_db_prep_save(self, value):
"""Convert value to JSON string before save"""
try:
value = simplejson.dumps(value)
except Exception, e:
raise ValidationError(str(e))
return super(JSONField, self).get_db_prep_save(value)
Use get_db_prep_value instead of get_db_prep_save. Closes gh-42 | from django.core.exceptions import ValidationError
from django.db import models
from django.utils import simplejson
class JSONField(models.TextField):
"""Simple JSON field that stores python structures as JSON strings
on database.
"""
__metaclass__ = models.SubfieldBase
def to_python(self, value):
"""
Convert the input JSON value into python structures, raises
django.core.exceptions.ValidationError if the data can't be converted.
"""
if self.blank and not value:
return None
if isinstance(value, basestring):
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
else:
return value
def validate(self, value, model_instance):
"""Check value is a valid JSON string, raise ValidationError on
error."""
super(JSONField, self).validate(value, model_instance)
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
def get_db_prep_value(self, value, connection, prepared=False):
"""Convert value to JSON string before save"""
try:
return simplejson.dumps(value)
except Exception, e:
raise ValidationError(str(e))
| <commit_before>from django.core.exceptions import ValidationError
from django.db import models
from django.utils import simplejson
class JSONField(models.TextField):
"""Simple JSON field that stores python structures as JSON strings
on database.
"""
__metaclass__ = models.SubfieldBase
def to_python(self, value):
"""
Convert the input JSON value into python structures, raises
django.core.exceptions.ValidationError if the data can't be converted.
"""
if self.blank and not value:
return None
if isinstance(value, basestring):
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
else:
return value
def validate(self, value, model_instance):
"""Check value is a valid JSON string, raise ValidationError on
error."""
super(JSONField, self).validate(value, model_instance)
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
def get_db_prep_save(self, value):
"""Convert value to JSON string before save"""
try:
value = simplejson.dumps(value)
except Exception, e:
raise ValidationError(str(e))
return super(JSONField, self).get_db_prep_save(value)
<commit_msg>Use get_db_prep_value instead of get_db_prep_save. Closes gh-42<commit_after> | from django.core.exceptions import ValidationError
from django.db import models
from django.utils import simplejson
class JSONField(models.TextField):
"""Simple JSON field that stores python structures as JSON strings
on database.
"""
__metaclass__ = models.SubfieldBase
def to_python(self, value):
"""
Convert the input JSON value into python structures, raises
django.core.exceptions.ValidationError if the data can't be converted.
"""
if self.blank and not value:
return None
if isinstance(value, basestring):
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
else:
return value
def validate(self, value, model_instance):
"""Check value is a valid JSON string, raise ValidationError on
error."""
super(JSONField, self).validate(value, model_instance)
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
def get_db_prep_value(self, value, connection, prepared=False):
"""Convert value to JSON string before save"""
try:
return simplejson.dumps(value)
except Exception, e:
raise ValidationError(str(e))
| from django.core.exceptions import ValidationError
from django.db import models
from django.utils import simplejson
class JSONField(models.TextField):
"""Simple JSON field that stores python structures as JSON strings
on database.
"""
__metaclass__ = models.SubfieldBase
def to_python(self, value):
"""
Convert the input JSON value into python structures, raises
django.core.exceptions.ValidationError if the data can't be converted.
"""
if self.blank and not value:
return None
if isinstance(value, basestring):
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
else:
return value
def validate(self, value, model_instance):
"""Check value is a valid JSON string, raise ValidationError on
error."""
super(JSONField, self).validate(value, model_instance)
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
def get_db_prep_save(self, value):
"""Convert value to JSON string before save"""
try:
value = simplejson.dumps(value)
except Exception, e:
raise ValidationError(str(e))
return super(JSONField, self).get_db_prep_save(value)
Use get_db_prep_value instead of get_db_prep_save. Closes gh-42from django.core.exceptions import ValidationError
from django.db import models
from django.utils import simplejson
class JSONField(models.TextField):
"""Simple JSON field that stores python structures as JSON strings
on database.
"""
__metaclass__ = models.SubfieldBase
def to_python(self, value):
"""
Convert the input JSON value into python structures, raises
django.core.exceptions.ValidationError if the data can't be converted.
"""
if self.blank and not value:
return None
if isinstance(value, basestring):
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
else:
return value
def validate(self, value, model_instance):
"""Check value is a valid JSON string, raise ValidationError on
error."""
super(JSONField, self).validate(value, model_instance)
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
def get_db_prep_value(self, value, connection, prepared=False):
"""Convert value to JSON string before save"""
try:
return simplejson.dumps(value)
except Exception, e:
raise ValidationError(str(e))
| <commit_before>from django.core.exceptions import ValidationError
from django.db import models
from django.utils import simplejson
class JSONField(models.TextField):
"""Simple JSON field that stores python structures as JSON strings
on database.
"""
__metaclass__ = models.SubfieldBase
def to_python(self, value):
"""
Convert the input JSON value into python structures, raises
django.core.exceptions.ValidationError if the data can't be converted.
"""
if self.blank and not value:
return None
if isinstance(value, basestring):
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
else:
return value
def validate(self, value, model_instance):
"""Check value is a valid JSON string, raise ValidationError on
error."""
super(JSONField, self).validate(value, model_instance)
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
def get_db_prep_save(self, value):
"""Convert value to JSON string before save"""
try:
value = simplejson.dumps(value)
except Exception, e:
raise ValidationError(str(e))
return super(JSONField, self).get_db_prep_save(value)
<commit_msg>Use get_db_prep_value instead of get_db_prep_save. Closes gh-42<commit_after>from django.core.exceptions import ValidationError
from django.db import models
from django.utils import simplejson
class JSONField(models.TextField):
"""Simple JSON field that stores python structures as JSON strings
on database.
"""
__metaclass__ = models.SubfieldBase
def to_python(self, value):
"""
Convert the input JSON value into python structures, raises
django.core.exceptions.ValidationError if the data can't be converted.
"""
if self.blank and not value:
return None
if isinstance(value, basestring):
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
else:
return value
def validate(self, value, model_instance):
"""Check value is a valid JSON string, raise ValidationError on
error."""
super(JSONField, self).validate(value, model_instance)
try:
return simplejson.loads(value)
except Exception, e:
raise ValidationError(str(e))
def get_db_prep_value(self, value, connection, prepared=False):
"""Convert value to JSON string before save"""
try:
return simplejson.dumps(value)
except Exception, e:
raise ValidationError(str(e))
|
49b3fe4e334e25c6afaa9ccace72f5c985e761cb | wcontrol/src/models.py | wcontrol/src/models.py | from app import db
from flask_login import UserMixin
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
nickname = db.Column(db.String(64), index=True, unique=True)
name = db.Column(db.String(64), index=True)
email = db.Column(db.String(64), index=True, unique=True)
date = db.Column(db.DateTime)
height = db.Column(db.Float(Precision=2))
age = db.Column(db.Integer)
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def __repr__(self):
return '<User %r>' % (self.nickname)
def get_id(self):
try:
return unicode(self.id) # python2
except NameError:
return str(self.id) # python3
@staticmethod
def make_unique_nickname(nickname):
if User.query.filter_by(nickname=nickname).first() is None:
return nickname
version = 2
while True:
new_nickname = nickname + str(version)
if User.query.filter_by(nickname=new_nickname).first() is None:
break
version += 1
return new_nickname
| from app import db
from flask_login import UserMixin
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
nickname = db.Column(db.String(64), index=True, unique=True)
name = db.Column(db.String(64), index=True)
email = db.Column(db.String(64), index=True, unique=True)
date = db.Column(db.DateTime)
height = db.Column(db.Float(Precision=2))
age = db.Column(db.Integer)
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def __repr__(self):
return '<User %r>' % (self.nickname)
def get_id(self):
try:
return unicode(self.id)
except NameError:
return str(self.id)
@staticmethod
def make_unique_nickname(nickname):
if User.query.filter_by(nickname=nickname).first() is None:
return nickname
version = 2
while True:
new_nickname = nickname + str(version)
if User.query.filter_by(nickname=new_nickname).first() is None:
break
version += 1
return new_nickname
| Modify to fit with PEP8 standard | Modify to fit with PEP8 standard
| Python | mit | pahumadad/weight-control,pahumadad/weight-control,pahumadad/weight-control,pahumadad/weight-control | from app import db
from flask_login import UserMixin
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
nickname = db.Column(db.String(64), index=True, unique=True)
name = db.Column(db.String(64), index=True)
email = db.Column(db.String(64), index=True, unique=True)
date = db.Column(db.DateTime)
height = db.Column(db.Float(Precision=2))
age = db.Column(db.Integer)
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def __repr__(self):
return '<User %r>' % (self.nickname)
def get_id(self):
try:
return unicode(self.id) # python2
except NameError:
return str(self.id) # python3
@staticmethod
def make_unique_nickname(nickname):
if User.query.filter_by(nickname=nickname).first() is None:
return nickname
version = 2
while True:
new_nickname = nickname + str(version)
if User.query.filter_by(nickname=new_nickname).first() is None:
break
version += 1
return new_nickname
Modify to fit with PEP8 standard | from app import db
from flask_login import UserMixin
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
nickname = db.Column(db.String(64), index=True, unique=True)
name = db.Column(db.String(64), index=True)
email = db.Column(db.String(64), index=True, unique=True)
date = db.Column(db.DateTime)
height = db.Column(db.Float(Precision=2))
age = db.Column(db.Integer)
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def __repr__(self):
return '<User %r>' % (self.nickname)
def get_id(self):
try:
return unicode(self.id)
except NameError:
return str(self.id)
@staticmethod
def make_unique_nickname(nickname):
if User.query.filter_by(nickname=nickname).first() is None:
return nickname
version = 2
while True:
new_nickname = nickname + str(version)
if User.query.filter_by(nickname=new_nickname).first() is None:
break
version += 1
return new_nickname
| <commit_before>from app import db
from flask_login import UserMixin
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
nickname = db.Column(db.String(64), index=True, unique=True)
name = db.Column(db.String(64), index=True)
email = db.Column(db.String(64), index=True, unique=True)
date = db.Column(db.DateTime)
height = db.Column(db.Float(Precision=2))
age = db.Column(db.Integer)
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def __repr__(self):
return '<User %r>' % (self.nickname)
def get_id(self):
try:
return unicode(self.id) # python2
except NameError:
return str(self.id) # python3
@staticmethod
def make_unique_nickname(nickname):
if User.query.filter_by(nickname=nickname).first() is None:
return nickname
version = 2
while True:
new_nickname = nickname + str(version)
if User.query.filter_by(nickname=new_nickname).first() is None:
break
version += 1
return new_nickname
<commit_msg>Modify to fit with PEP8 standard<commit_after> | from app import db
from flask_login import UserMixin
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
nickname = db.Column(db.String(64), index=True, unique=True)
name = db.Column(db.String(64), index=True)
email = db.Column(db.String(64), index=True, unique=True)
date = db.Column(db.DateTime)
height = db.Column(db.Float(Precision=2))
age = db.Column(db.Integer)
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def __repr__(self):
return '<User %r>' % (self.nickname)
def get_id(self):
try:
return unicode(self.id)
except NameError:
return str(self.id)
@staticmethod
def make_unique_nickname(nickname):
if User.query.filter_by(nickname=nickname).first() is None:
return nickname
version = 2
while True:
new_nickname = nickname + str(version)
if User.query.filter_by(nickname=new_nickname).first() is None:
break
version += 1
return new_nickname
| from app import db
from flask_login import UserMixin
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
nickname = db.Column(db.String(64), index=True, unique=True)
name = db.Column(db.String(64), index=True)
email = db.Column(db.String(64), index=True, unique=True)
date = db.Column(db.DateTime)
height = db.Column(db.Float(Precision=2))
age = db.Column(db.Integer)
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def __repr__(self):
return '<User %r>' % (self.nickname)
def get_id(self):
try:
return unicode(self.id) # python2
except NameError:
return str(self.id) # python3
@staticmethod
def make_unique_nickname(nickname):
if User.query.filter_by(nickname=nickname).first() is None:
return nickname
version = 2
while True:
new_nickname = nickname + str(version)
if User.query.filter_by(nickname=new_nickname).first() is None:
break
version += 1
return new_nickname
Modify to fit with PEP8 standardfrom app import db
from flask_login import UserMixin
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
nickname = db.Column(db.String(64), index=True, unique=True)
name = db.Column(db.String(64), index=True)
email = db.Column(db.String(64), index=True, unique=True)
date = db.Column(db.DateTime)
height = db.Column(db.Float(Precision=2))
age = db.Column(db.Integer)
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def __repr__(self):
return '<User %r>' % (self.nickname)
def get_id(self):
try:
return unicode(self.id)
except NameError:
return str(self.id)
@staticmethod
def make_unique_nickname(nickname):
if User.query.filter_by(nickname=nickname).first() is None:
return nickname
version = 2
while True:
new_nickname = nickname + str(version)
if User.query.filter_by(nickname=new_nickname).first() is None:
break
version += 1
return new_nickname
| <commit_before>from app import db
from flask_login import UserMixin
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
nickname = db.Column(db.String(64), index=True, unique=True)
name = db.Column(db.String(64), index=True)
email = db.Column(db.String(64), index=True, unique=True)
date = db.Column(db.DateTime)
height = db.Column(db.Float(Precision=2))
age = db.Column(db.Integer)
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def __repr__(self):
return '<User %r>' % (self.nickname)
def get_id(self):
try:
return unicode(self.id) # python2
except NameError:
return str(self.id) # python3
@staticmethod
def make_unique_nickname(nickname):
if User.query.filter_by(nickname=nickname).first() is None:
return nickname
version = 2
while True:
new_nickname = nickname + str(version)
if User.query.filter_by(nickname=new_nickname).first() is None:
break
version += 1
return new_nickname
<commit_msg>Modify to fit with PEP8 standard<commit_after>from app import db
from flask_login import UserMixin
class User(UserMixin, db.Model):
__tablename__ = 'users'
id = db.Column(db.Integer, primary_key=True)
nickname = db.Column(db.String(64), index=True, unique=True)
name = db.Column(db.String(64), index=True)
email = db.Column(db.String(64), index=True, unique=True)
date = db.Column(db.DateTime)
height = db.Column(db.Float(Precision=2))
age = db.Column(db.Integer)
@property
def is_authenticated(self):
return True
@property
def is_active(self):
return True
@property
def is_anonymous(self):
return False
def __repr__(self):
return '<User %r>' % (self.nickname)
def get_id(self):
try:
return unicode(self.id)
except NameError:
return str(self.id)
@staticmethod
def make_unique_nickname(nickname):
if User.query.filter_by(nickname=nickname).first() is None:
return nickname
version = 2
while True:
new_nickname = nickname + str(version)
if User.query.filter_by(nickname=new_nickname).first() is None:
break
version += 1
return new_nickname
|
b54a6353a746d54869a7cadca1bdcfb1e1cd3d51 | moviemanager/models.py | moviemanager/models.py | from django.db import models
class Movie(models.Model):
tmdb_id = models.IntegerField() | from django.contrib.auth.models import User
from django.db import models
class Movie(models.Model):
tmdb_id = models.IntegerField()
score = models.IntegerField()
submitter = models.ForeignKey(User) | Add some extra fields to movie model | Add some extra fields to movie model
| Python | mit | simon-andrews/movieman2,simon-andrews/movieman2 | from django.db import models
class Movie(models.Model):
tmdb_id = models.IntegerField()Add some extra fields to movie model | from django.contrib.auth.models import User
from django.db import models
class Movie(models.Model):
tmdb_id = models.IntegerField()
score = models.IntegerField()
submitter = models.ForeignKey(User) | <commit_before>from django.db import models
class Movie(models.Model):
tmdb_id = models.IntegerField()<commit_msg>Add some extra fields to movie model<commit_after> | from django.contrib.auth.models import User
from django.db import models
class Movie(models.Model):
tmdb_id = models.IntegerField()
score = models.IntegerField()
submitter = models.ForeignKey(User) | from django.db import models
class Movie(models.Model):
tmdb_id = models.IntegerField()Add some extra fields to movie modelfrom django.contrib.auth.models import User
from django.db import models
class Movie(models.Model):
tmdb_id = models.IntegerField()
score = models.IntegerField()
submitter = models.ForeignKey(User) | <commit_before>from django.db import models
class Movie(models.Model):
tmdb_id = models.IntegerField()<commit_msg>Add some extra fields to movie model<commit_after>from django.contrib.auth.models import User
from django.db import models
class Movie(models.Model):
tmdb_id = models.IntegerField()
score = models.IntegerField()
submitter = models.ForeignKey(User) |
43b077050cd0e914fbe7398f9077e787c496afe4 | runtests.py | runtests.py | #!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
if not settings.configured:
settings_dict = dict(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django_pandas',
'django_pandas.tests',
),
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"USER": "",
"PASSWORD": "",
"HOST": "",
"PORT": "",
}
},
MIDDLEWARE_CLASSES = ()
)
settings.configure(**settings_dict)
if django.VERSION >= (1, 7):
django.setup()
def runtests(*test_args):
if not test_args:
test_args = ['tests']
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(
verbosity=1, interactive=True, failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
| #!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
if not settings.configured:
settings_dict = dict(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django_pandas',
'django_pandas.tests',
),
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"USER": "",
"PASSWORD": "",
"HOST": "",
"PORT": "",
}
},
MIDDLEWARE_CLASSES = ()
)
settings.configure(**settings_dict)
if django.VERSION >= (1, 7):
django.setup()
def runtests(*test_args):
if not test_args:
test_args = ['django_pandas']
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
if django.VERSION < (1, 8):
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(
verbosity=1, interactive=True, failfast=False).run_tests(['tests'])
sys.exit(failures)
else:
from django.test.runner import DiscoverRunner
failures = DiscoverRunner(
verbosity=1, interactive=True, failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
| Add support for django 1.8 by using test runner | Add support for django 1.8 by using test runner
| Python | bsd-3-clause | sternb0t/django-pandas,perpetua1/django-pandas,arcticshores/django-pandas,chrisdev/django-pandas | #!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
if not settings.configured:
settings_dict = dict(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django_pandas',
'django_pandas.tests',
),
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"USER": "",
"PASSWORD": "",
"HOST": "",
"PORT": "",
}
},
MIDDLEWARE_CLASSES = ()
)
settings.configure(**settings_dict)
if django.VERSION >= (1, 7):
django.setup()
def runtests(*test_args):
if not test_args:
test_args = ['tests']
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(
verbosity=1, interactive=True, failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
Add support for django 1.8 by using test runner | #!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
if not settings.configured:
settings_dict = dict(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django_pandas',
'django_pandas.tests',
),
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"USER": "",
"PASSWORD": "",
"HOST": "",
"PORT": "",
}
},
MIDDLEWARE_CLASSES = ()
)
settings.configure(**settings_dict)
if django.VERSION >= (1, 7):
django.setup()
def runtests(*test_args):
if not test_args:
test_args = ['django_pandas']
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
if django.VERSION < (1, 8):
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(
verbosity=1, interactive=True, failfast=False).run_tests(['tests'])
sys.exit(failures)
else:
from django.test.runner import DiscoverRunner
failures = DiscoverRunner(
verbosity=1, interactive=True, failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
| <commit_before>#!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
if not settings.configured:
settings_dict = dict(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django_pandas',
'django_pandas.tests',
),
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"USER": "",
"PASSWORD": "",
"HOST": "",
"PORT": "",
}
},
MIDDLEWARE_CLASSES = ()
)
settings.configure(**settings_dict)
if django.VERSION >= (1, 7):
django.setup()
def runtests(*test_args):
if not test_args:
test_args = ['tests']
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(
verbosity=1, interactive=True, failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
<commit_msg>Add support for django 1.8 by using test runner<commit_after> | #!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
if not settings.configured:
settings_dict = dict(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django_pandas',
'django_pandas.tests',
),
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"USER": "",
"PASSWORD": "",
"HOST": "",
"PORT": "",
}
},
MIDDLEWARE_CLASSES = ()
)
settings.configure(**settings_dict)
if django.VERSION >= (1, 7):
django.setup()
def runtests(*test_args):
if not test_args:
test_args = ['django_pandas']
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
if django.VERSION < (1, 8):
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(
verbosity=1, interactive=True, failfast=False).run_tests(['tests'])
sys.exit(failures)
else:
from django.test.runner import DiscoverRunner
failures = DiscoverRunner(
verbosity=1, interactive=True, failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
| #!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
if not settings.configured:
settings_dict = dict(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django_pandas',
'django_pandas.tests',
),
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"USER": "",
"PASSWORD": "",
"HOST": "",
"PORT": "",
}
},
MIDDLEWARE_CLASSES = ()
)
settings.configure(**settings_dict)
if django.VERSION >= (1, 7):
django.setup()
def runtests(*test_args):
if not test_args:
test_args = ['tests']
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(
verbosity=1, interactive=True, failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
Add support for django 1.8 by using test runner#!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
if not settings.configured:
settings_dict = dict(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django_pandas',
'django_pandas.tests',
),
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"USER": "",
"PASSWORD": "",
"HOST": "",
"PORT": "",
}
},
MIDDLEWARE_CLASSES = ()
)
settings.configure(**settings_dict)
if django.VERSION >= (1, 7):
django.setup()
def runtests(*test_args):
if not test_args:
test_args = ['django_pandas']
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
if django.VERSION < (1, 8):
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(
verbosity=1, interactive=True, failfast=False).run_tests(['tests'])
sys.exit(failures)
else:
from django.test.runner import DiscoverRunner
failures = DiscoverRunner(
verbosity=1, interactive=True, failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
| <commit_before>#!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
if not settings.configured:
settings_dict = dict(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django_pandas',
'django_pandas.tests',
),
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"USER": "",
"PASSWORD": "",
"HOST": "",
"PORT": "",
}
},
MIDDLEWARE_CLASSES = ()
)
settings.configure(**settings_dict)
if django.VERSION >= (1, 7):
django.setup()
def runtests(*test_args):
if not test_args:
test_args = ['tests']
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(
verbosity=1, interactive=True, failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
<commit_msg>Add support for django 1.8 by using test runner<commit_after>#!/usr/bin/env python
import os
import sys
import django
from django.conf import settings
if not settings.configured:
settings_dict = dict(
INSTALLED_APPS=(
'django.contrib.contenttypes',
'django_pandas',
'django_pandas.tests',
),
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"USER": "",
"PASSWORD": "",
"HOST": "",
"PORT": "",
}
},
MIDDLEWARE_CLASSES = ()
)
settings.configure(**settings_dict)
if django.VERSION >= (1, 7):
django.setup()
def runtests(*test_args):
if not test_args:
test_args = ['django_pandas']
parent = os.path.dirname(os.path.abspath(__file__))
sys.path.insert(0, parent)
if django.VERSION < (1, 8):
from django.test.simple import DjangoTestSuiteRunner
failures = DjangoTestSuiteRunner(
verbosity=1, interactive=True, failfast=False).run_tests(['tests'])
sys.exit(failures)
else:
from django.test.runner import DiscoverRunner
failures = DiscoverRunner(
verbosity=1, interactive=True, failfast=False).run_tests(test_args)
sys.exit(failures)
if __name__ == '__main__':
runtests()
|
894086bda2545fc7d094def6f925b96905920992 | isso/utils/http.py | isso/utils/http.py | # -*- encoding: utf-8 -*-
import socket
try:
import httplib
except ImportError:
import http.client as httplib
from isso.wsgi import urlsplit
class curl(object):
"""Easy to use wrapper around :module:`httplib`. Use as context-manager
so we can close the response properly.
.. code-block:: python
with http.curl('GET', 'http://localhost:8080', '/') as resp:
if resp: # may be None if request failed
return resp.status
"""
def __init__(self, method, host, path, timeout=3):
self.method = method
self.host = host
self.path = path
self.timeout = timeout
def __enter__(self):
host, port, ssl = urlsplit(self.host)
http = httplib.HTTPSConnection if ssl else httplib.HTTPConnection
self.con = http(host, port, timeout=self.timeout)
try:
self.con.request(self.method, self.path)
except (httplib.HTTPException, socket.error):
return None
return self.con.getresponse()
def __exit__(self, exc_type, exc_value, traceback):
self.con.close()
| # -*- encoding: utf-8 -*-
import socket
try:
import httplib
except ImportError:
import http.client as httplib
from isso.wsgi import urlsplit
class curl(object):
"""Easy to use wrapper around :module:`httplib`. Use as context-manager
so we can close the response properly.
.. code-block:: python
with http.curl('GET', 'http://localhost:8080', '/') as resp:
if resp: # may be None if request failed
return resp.status
"""
def __init__(self, method, host, path, timeout=3):
self.method = method
self.host = host
self.path = path
self.timeout = timeout
def __enter__(self):
host, port, ssl = urlsplit(self.host)
http = httplib.HTTPSConnection if ssl else httplib.HTTPConnection
self.con = http(host, port, timeout=self.timeout)
try:
self.con.request(self.method, self.path)
except (httplib.HTTPException, socket.error):
return None
try:
return self.con.getresponse()
except (socket.timeout, socket.error):
return None
def __exit__(self, exc_type, exc_value, traceback):
self.con.close()
| Fix catch socket timeout and error exceptions | Fix catch socket timeout and error exceptions | Python | mit | jiumx60rus/isso,janusnic/isso,WQuanfeng/isso,mathstuf/isso,janusnic/isso,Mushiyo/isso,posativ/isso,xuhdev/isso,jelmer/isso,princesuke/isso,mathstuf/isso,princesuke/isso,WQuanfeng/isso,jelmer/isso,jelmer/isso,xuhdev/isso,posativ/isso,Mushiyo/isso,jelmer/isso,xuhdev/isso,princesuke/isso,posativ/isso,jiumx60rus/isso,mathstuf/isso,Mushiyo/isso,WQuanfeng/isso,Mushiyo/isso,jiumx60rus/isso,janusnic/isso,posativ/isso | # -*- encoding: utf-8 -*-
import socket
try:
import httplib
except ImportError:
import http.client as httplib
from isso.wsgi import urlsplit
class curl(object):
"""Easy to use wrapper around :module:`httplib`. Use as context-manager
so we can close the response properly.
.. code-block:: python
with http.curl('GET', 'http://localhost:8080', '/') as resp:
if resp: # may be None if request failed
return resp.status
"""
def __init__(self, method, host, path, timeout=3):
self.method = method
self.host = host
self.path = path
self.timeout = timeout
def __enter__(self):
host, port, ssl = urlsplit(self.host)
http = httplib.HTTPSConnection if ssl else httplib.HTTPConnection
self.con = http(host, port, timeout=self.timeout)
try:
self.con.request(self.method, self.path)
except (httplib.HTTPException, socket.error):
return None
return self.con.getresponse()
def __exit__(self, exc_type, exc_value, traceback):
self.con.close()
Fix catch socket timeout and error exceptions | # -*- encoding: utf-8 -*-
import socket
try:
import httplib
except ImportError:
import http.client as httplib
from isso.wsgi import urlsplit
class curl(object):
"""Easy to use wrapper around :module:`httplib`. Use as context-manager
so we can close the response properly.
.. code-block:: python
with http.curl('GET', 'http://localhost:8080', '/') as resp:
if resp: # may be None if request failed
return resp.status
"""
def __init__(self, method, host, path, timeout=3):
self.method = method
self.host = host
self.path = path
self.timeout = timeout
def __enter__(self):
host, port, ssl = urlsplit(self.host)
http = httplib.HTTPSConnection if ssl else httplib.HTTPConnection
self.con = http(host, port, timeout=self.timeout)
try:
self.con.request(self.method, self.path)
except (httplib.HTTPException, socket.error):
return None
try:
return self.con.getresponse()
except (socket.timeout, socket.error):
return None
def __exit__(self, exc_type, exc_value, traceback):
self.con.close()
| <commit_before># -*- encoding: utf-8 -*-
import socket
try:
import httplib
except ImportError:
import http.client as httplib
from isso.wsgi import urlsplit
class curl(object):
"""Easy to use wrapper around :module:`httplib`. Use as context-manager
so we can close the response properly.
.. code-block:: python
with http.curl('GET', 'http://localhost:8080', '/') as resp:
if resp: # may be None if request failed
return resp.status
"""
def __init__(self, method, host, path, timeout=3):
self.method = method
self.host = host
self.path = path
self.timeout = timeout
def __enter__(self):
host, port, ssl = urlsplit(self.host)
http = httplib.HTTPSConnection if ssl else httplib.HTTPConnection
self.con = http(host, port, timeout=self.timeout)
try:
self.con.request(self.method, self.path)
except (httplib.HTTPException, socket.error):
return None
return self.con.getresponse()
def __exit__(self, exc_type, exc_value, traceback):
self.con.close()
<commit_msg>Fix catch socket timeout and error exceptions<commit_after> | # -*- encoding: utf-8 -*-
import socket
try:
import httplib
except ImportError:
import http.client as httplib
from isso.wsgi import urlsplit
class curl(object):
"""Easy to use wrapper around :module:`httplib`. Use as context-manager
so we can close the response properly.
.. code-block:: python
with http.curl('GET', 'http://localhost:8080', '/') as resp:
if resp: # may be None if request failed
return resp.status
"""
def __init__(self, method, host, path, timeout=3):
self.method = method
self.host = host
self.path = path
self.timeout = timeout
def __enter__(self):
host, port, ssl = urlsplit(self.host)
http = httplib.HTTPSConnection if ssl else httplib.HTTPConnection
self.con = http(host, port, timeout=self.timeout)
try:
self.con.request(self.method, self.path)
except (httplib.HTTPException, socket.error):
return None
try:
return self.con.getresponse()
except (socket.timeout, socket.error):
return None
def __exit__(self, exc_type, exc_value, traceback):
self.con.close()
| # -*- encoding: utf-8 -*-
import socket
try:
import httplib
except ImportError:
import http.client as httplib
from isso.wsgi import urlsplit
class curl(object):
"""Easy to use wrapper around :module:`httplib`. Use as context-manager
so we can close the response properly.
.. code-block:: python
with http.curl('GET', 'http://localhost:8080', '/') as resp:
if resp: # may be None if request failed
return resp.status
"""
def __init__(self, method, host, path, timeout=3):
self.method = method
self.host = host
self.path = path
self.timeout = timeout
def __enter__(self):
host, port, ssl = urlsplit(self.host)
http = httplib.HTTPSConnection if ssl else httplib.HTTPConnection
self.con = http(host, port, timeout=self.timeout)
try:
self.con.request(self.method, self.path)
except (httplib.HTTPException, socket.error):
return None
return self.con.getresponse()
def __exit__(self, exc_type, exc_value, traceback):
self.con.close()
Fix catch socket timeout and error exceptions# -*- encoding: utf-8 -*-
import socket
try:
import httplib
except ImportError:
import http.client as httplib
from isso.wsgi import urlsplit
class curl(object):
"""Easy to use wrapper around :module:`httplib`. Use as context-manager
so we can close the response properly.
.. code-block:: python
with http.curl('GET', 'http://localhost:8080', '/') as resp:
if resp: # may be None if request failed
return resp.status
"""
def __init__(self, method, host, path, timeout=3):
self.method = method
self.host = host
self.path = path
self.timeout = timeout
def __enter__(self):
host, port, ssl = urlsplit(self.host)
http = httplib.HTTPSConnection if ssl else httplib.HTTPConnection
self.con = http(host, port, timeout=self.timeout)
try:
self.con.request(self.method, self.path)
except (httplib.HTTPException, socket.error):
return None
try:
return self.con.getresponse()
except (socket.timeout, socket.error):
return None
def __exit__(self, exc_type, exc_value, traceback):
self.con.close()
| <commit_before># -*- encoding: utf-8 -*-
import socket
try:
import httplib
except ImportError:
import http.client as httplib
from isso.wsgi import urlsplit
class curl(object):
"""Easy to use wrapper around :module:`httplib`. Use as context-manager
so we can close the response properly.
.. code-block:: python
with http.curl('GET', 'http://localhost:8080', '/') as resp:
if resp: # may be None if request failed
return resp.status
"""
def __init__(self, method, host, path, timeout=3):
self.method = method
self.host = host
self.path = path
self.timeout = timeout
def __enter__(self):
host, port, ssl = urlsplit(self.host)
http = httplib.HTTPSConnection if ssl else httplib.HTTPConnection
self.con = http(host, port, timeout=self.timeout)
try:
self.con.request(self.method, self.path)
except (httplib.HTTPException, socket.error):
return None
return self.con.getresponse()
def __exit__(self, exc_type, exc_value, traceback):
self.con.close()
<commit_msg>Fix catch socket timeout and error exceptions<commit_after># -*- encoding: utf-8 -*-
import socket
try:
import httplib
except ImportError:
import http.client as httplib
from isso.wsgi import urlsplit
class curl(object):
"""Easy to use wrapper around :module:`httplib`. Use as context-manager
so we can close the response properly.
.. code-block:: python
with http.curl('GET', 'http://localhost:8080', '/') as resp:
if resp: # may be None if request failed
return resp.status
"""
def __init__(self, method, host, path, timeout=3):
self.method = method
self.host = host
self.path = path
self.timeout = timeout
def __enter__(self):
host, port, ssl = urlsplit(self.host)
http = httplib.HTTPSConnection if ssl else httplib.HTTPConnection
self.con = http(host, port, timeout=self.timeout)
try:
self.con.request(self.method, self.path)
except (httplib.HTTPException, socket.error):
return None
try:
return self.con.getresponse()
except (socket.timeout, socket.error):
return None
def __exit__(self, exc_type, exc_value, traceback):
self.con.close()
|
6913ef43184eda1166541b2c59e0a82c3981d643 | spreadflow_pdf/test/test_savepdfpages.py | spreadflow_pdf/test/test_savepdfpages.py | from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import copy
from twisted.internet import defer
from mock import Mock
from testtools import ExpectedException, TestCase, run_test_with
from testtools.twistedsupport import AsynchronousDeferredRunTest
from spreadflow_core.scheduler import Scheduler
from spreadflow_delta.test.matchers import MatchesSendDeltaItemInvocation
from spreadflow_pdf.proc import SavePdfPages
class SavePdfPagesTestCase(TestCase):
pass
| from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import copy
import pdfrw
from twisted.internet import defer
from mock import Mock, patch, mock_open, call
from testtools import ExpectedException, TestCase, run_test_with
from testtools.twistedsupport import AsynchronousDeferredRunTest
from spreadflow_core.scheduler import Scheduler
from spreadflow_delta.test.matchers import MatchesSendDeltaItemInvocation
from spreadflow_pdf.proc import SavePdfPages
class SavePdfPagesTestCase(TestCase):
def test_load_all(self):
sut = SavePdfPages(key='test_content', destkey='test_savepath')
insert = {
'inserts': ['a'],
'deletes': [],
'data': {
'a': {
'test_content': [
'pdf content of page 1',
'pdf content of page 2',
'pdf content of page 3'
],
'test_savepath': '/path/to/some/dumpfile.pdf'
}
}
}
expected = copy.deepcopy(insert)
matches = MatchesSendDeltaItemInvocation(expected, sut)
send = Mock(spec=Scheduler.send)
open_mock = mock_open()
open_mock.return_value.name = '/path/to/some/tmpXYZabc'
# FIXME, patch os.rename, os.unlink
with patch('tempfile.NamedTemporaryFile', open_mock):
with patch('pdfrw.PdfWriter', spec=pdfrw.PdfWriter) as writer_mock:
with patch('os.rename') as rename_mock:
with patch('os.unlink') as unlink_mock:
sut(insert, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
open_mock.assert_called_once_with(dir='/path/to/some', delete=False)
writer_mock.assert_called_with(version='1.3', compress=False)
writer_mock.return_value.assert_has_calls([
call.addpage(u'pdf content of page 1'),
call.addpage(u'pdf content of page 2'),
call.addpage(u'pdf content of page 3'),
call.write(open_mock.return_value)
])
rename_mock.called_once_with('/path/to/some/tmpXYZabc', '/path/to/some/dumpfile.pdf')
unlink_mock.assert_not_called()
| Add test coverage for save pages (happy path) | Add test coverage for save pages (happy path)
| Python | mit | znerol/spreadflow-pdf | from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import copy
from twisted.internet import defer
from mock import Mock
from testtools import ExpectedException, TestCase, run_test_with
from testtools.twistedsupport import AsynchronousDeferredRunTest
from spreadflow_core.scheduler import Scheduler
from spreadflow_delta.test.matchers import MatchesSendDeltaItemInvocation
from spreadflow_pdf.proc import SavePdfPages
class SavePdfPagesTestCase(TestCase):
pass
Add test coverage for save pages (happy path) | from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import copy
import pdfrw
from twisted.internet import defer
from mock import Mock, patch, mock_open, call
from testtools import ExpectedException, TestCase, run_test_with
from testtools.twistedsupport import AsynchronousDeferredRunTest
from spreadflow_core.scheduler import Scheduler
from spreadflow_delta.test.matchers import MatchesSendDeltaItemInvocation
from spreadflow_pdf.proc import SavePdfPages
class SavePdfPagesTestCase(TestCase):
def test_load_all(self):
sut = SavePdfPages(key='test_content', destkey='test_savepath')
insert = {
'inserts': ['a'],
'deletes': [],
'data': {
'a': {
'test_content': [
'pdf content of page 1',
'pdf content of page 2',
'pdf content of page 3'
],
'test_savepath': '/path/to/some/dumpfile.pdf'
}
}
}
expected = copy.deepcopy(insert)
matches = MatchesSendDeltaItemInvocation(expected, sut)
send = Mock(spec=Scheduler.send)
open_mock = mock_open()
open_mock.return_value.name = '/path/to/some/tmpXYZabc'
# FIXME, patch os.rename, os.unlink
with patch('tempfile.NamedTemporaryFile', open_mock):
with patch('pdfrw.PdfWriter', spec=pdfrw.PdfWriter) as writer_mock:
with patch('os.rename') as rename_mock:
with patch('os.unlink') as unlink_mock:
sut(insert, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
open_mock.assert_called_once_with(dir='/path/to/some', delete=False)
writer_mock.assert_called_with(version='1.3', compress=False)
writer_mock.return_value.assert_has_calls([
call.addpage(u'pdf content of page 1'),
call.addpage(u'pdf content of page 2'),
call.addpage(u'pdf content of page 3'),
call.write(open_mock.return_value)
])
rename_mock.called_once_with('/path/to/some/tmpXYZabc', '/path/to/some/dumpfile.pdf')
unlink_mock.assert_not_called()
| <commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import copy
from twisted.internet import defer
from mock import Mock
from testtools import ExpectedException, TestCase, run_test_with
from testtools.twistedsupport import AsynchronousDeferredRunTest
from spreadflow_core.scheduler import Scheduler
from spreadflow_delta.test.matchers import MatchesSendDeltaItemInvocation
from spreadflow_pdf.proc import SavePdfPages
class SavePdfPagesTestCase(TestCase):
pass
<commit_msg>Add test coverage for save pages (happy path)<commit_after> | from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import copy
import pdfrw
from twisted.internet import defer
from mock import Mock, patch, mock_open, call
from testtools import ExpectedException, TestCase, run_test_with
from testtools.twistedsupport import AsynchronousDeferredRunTest
from spreadflow_core.scheduler import Scheduler
from spreadflow_delta.test.matchers import MatchesSendDeltaItemInvocation
from spreadflow_pdf.proc import SavePdfPages
class SavePdfPagesTestCase(TestCase):
def test_load_all(self):
sut = SavePdfPages(key='test_content', destkey='test_savepath')
insert = {
'inserts': ['a'],
'deletes': [],
'data': {
'a': {
'test_content': [
'pdf content of page 1',
'pdf content of page 2',
'pdf content of page 3'
],
'test_savepath': '/path/to/some/dumpfile.pdf'
}
}
}
expected = copy.deepcopy(insert)
matches = MatchesSendDeltaItemInvocation(expected, sut)
send = Mock(spec=Scheduler.send)
open_mock = mock_open()
open_mock.return_value.name = '/path/to/some/tmpXYZabc'
# FIXME, patch os.rename, os.unlink
with patch('tempfile.NamedTemporaryFile', open_mock):
with patch('pdfrw.PdfWriter', spec=pdfrw.PdfWriter) as writer_mock:
with patch('os.rename') as rename_mock:
with patch('os.unlink') as unlink_mock:
sut(insert, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
open_mock.assert_called_once_with(dir='/path/to/some', delete=False)
writer_mock.assert_called_with(version='1.3', compress=False)
writer_mock.return_value.assert_has_calls([
call.addpage(u'pdf content of page 1'),
call.addpage(u'pdf content of page 2'),
call.addpage(u'pdf content of page 3'),
call.write(open_mock.return_value)
])
rename_mock.called_once_with('/path/to/some/tmpXYZabc', '/path/to/some/dumpfile.pdf')
unlink_mock.assert_not_called()
| from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import copy
from twisted.internet import defer
from mock import Mock
from testtools import ExpectedException, TestCase, run_test_with
from testtools.twistedsupport import AsynchronousDeferredRunTest
from spreadflow_core.scheduler import Scheduler
from spreadflow_delta.test.matchers import MatchesSendDeltaItemInvocation
from spreadflow_pdf.proc import SavePdfPages
class SavePdfPagesTestCase(TestCase):
pass
Add test coverage for save pages (happy path)from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import copy
import pdfrw
from twisted.internet import defer
from mock import Mock, patch, mock_open, call
from testtools import ExpectedException, TestCase, run_test_with
from testtools.twistedsupport import AsynchronousDeferredRunTest
from spreadflow_core.scheduler import Scheduler
from spreadflow_delta.test.matchers import MatchesSendDeltaItemInvocation
from spreadflow_pdf.proc import SavePdfPages
class SavePdfPagesTestCase(TestCase):
def test_load_all(self):
sut = SavePdfPages(key='test_content', destkey='test_savepath')
insert = {
'inserts': ['a'],
'deletes': [],
'data': {
'a': {
'test_content': [
'pdf content of page 1',
'pdf content of page 2',
'pdf content of page 3'
],
'test_savepath': '/path/to/some/dumpfile.pdf'
}
}
}
expected = copy.deepcopy(insert)
matches = MatchesSendDeltaItemInvocation(expected, sut)
send = Mock(spec=Scheduler.send)
open_mock = mock_open()
open_mock.return_value.name = '/path/to/some/tmpXYZabc'
# FIXME, patch os.rename, os.unlink
with patch('tempfile.NamedTemporaryFile', open_mock):
with patch('pdfrw.PdfWriter', spec=pdfrw.PdfWriter) as writer_mock:
with patch('os.rename') as rename_mock:
with patch('os.unlink') as unlink_mock:
sut(insert, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
open_mock.assert_called_once_with(dir='/path/to/some', delete=False)
writer_mock.assert_called_with(version='1.3', compress=False)
writer_mock.return_value.assert_has_calls([
call.addpage(u'pdf content of page 1'),
call.addpage(u'pdf content of page 2'),
call.addpage(u'pdf content of page 3'),
call.write(open_mock.return_value)
])
rename_mock.called_once_with('/path/to/some/tmpXYZabc', '/path/to/some/dumpfile.pdf')
unlink_mock.assert_not_called()
| <commit_before>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import copy
from twisted.internet import defer
from mock import Mock
from testtools import ExpectedException, TestCase, run_test_with
from testtools.twistedsupport import AsynchronousDeferredRunTest
from spreadflow_core.scheduler import Scheduler
from spreadflow_delta.test.matchers import MatchesSendDeltaItemInvocation
from spreadflow_pdf.proc import SavePdfPages
class SavePdfPagesTestCase(TestCase):
pass
<commit_msg>Add test coverage for save pages (happy path)<commit_after>from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import copy
import pdfrw
from twisted.internet import defer
from mock import Mock, patch, mock_open, call
from testtools import ExpectedException, TestCase, run_test_with
from testtools.twistedsupport import AsynchronousDeferredRunTest
from spreadflow_core.scheduler import Scheduler
from spreadflow_delta.test.matchers import MatchesSendDeltaItemInvocation
from spreadflow_pdf.proc import SavePdfPages
class SavePdfPagesTestCase(TestCase):
def test_load_all(self):
sut = SavePdfPages(key='test_content', destkey='test_savepath')
insert = {
'inserts': ['a'],
'deletes': [],
'data': {
'a': {
'test_content': [
'pdf content of page 1',
'pdf content of page 2',
'pdf content of page 3'
],
'test_savepath': '/path/to/some/dumpfile.pdf'
}
}
}
expected = copy.deepcopy(insert)
matches = MatchesSendDeltaItemInvocation(expected, sut)
send = Mock(spec=Scheduler.send)
open_mock = mock_open()
open_mock.return_value.name = '/path/to/some/tmpXYZabc'
# FIXME, patch os.rename, os.unlink
with patch('tempfile.NamedTemporaryFile', open_mock):
with patch('pdfrw.PdfWriter', spec=pdfrw.PdfWriter) as writer_mock:
with patch('os.rename') as rename_mock:
with patch('os.unlink') as unlink_mock:
sut(insert, send)
self.assertEquals(send.call_count, 1)
self.assertThat(send.call_args, matches)
open_mock.assert_called_once_with(dir='/path/to/some', delete=False)
writer_mock.assert_called_with(version='1.3', compress=False)
writer_mock.return_value.assert_has_calls([
call.addpage(u'pdf content of page 1'),
call.addpage(u'pdf content of page 2'),
call.addpage(u'pdf content of page 3'),
call.write(open_mock.return_value)
])
rename_mock.called_once_with('/path/to/some/tmpXYZabc', '/path/to/some/dumpfile.pdf')
unlink_mock.assert_not_called()
|
0eb20c8025a838d93a5854442640550d5bf05b0b | settings.py | settings.py | #!/usr/bin/env python
"""settings.py
Udacity conference server-side Python App Engine app user settings
$Id$
created/forked from conference.py by wesc on 2014 may 24
"""
# Replace the following lines with client IDs obtained from the APIs
# Console or Cloud Console.
WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com'
ANDROID_CLIENT_ID = 'replace with Android client ID'
IOS_CLIENT_ID = 'replace with iOS client ID'
ANDROID_AUDIENCE = WEB_CLIENT_ID
| #!/usr/bin/env python
"""settings.py
Udacity conference server-side Python App Engine app user settings
$Id$
created/forked from conference.py by wesc on 2014 may 24
"""
# Replace the following lines with client IDs obtained from the APIs
# Console or Cloud Console.
WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com'
ANDROID_CLIENT_ID = '757224007118-dpqfa375ra8rgbpslig7beh4jb6qd03s.apps.googleusercontent.com'
IOS_CLIENT_ID = '757224007118-nfgr65ic7dpiv5inbvta8a2b4j2h7d09.apps.googleusercontent.com'
ANDROID_AUDIENCE = WEB_CLIENT_ID
| Add android and ios client IDs | Add android and ios client IDs
| Python | apache-2.0 | elbernante/conference-central,elbernante/conference-central,elbernante/conference-central | #!/usr/bin/env python
"""settings.py
Udacity conference server-side Python App Engine app user settings
$Id$
created/forked from conference.py by wesc on 2014 may 24
"""
# Replace the following lines with client IDs obtained from the APIs
# Console or Cloud Console.
WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com'
ANDROID_CLIENT_ID = 'replace with Android client ID'
IOS_CLIENT_ID = 'replace with iOS client ID'
ANDROID_AUDIENCE = WEB_CLIENT_ID
Add android and ios client IDs | #!/usr/bin/env python
"""settings.py
Udacity conference server-side Python App Engine app user settings
$Id$
created/forked from conference.py by wesc on 2014 may 24
"""
# Replace the following lines with client IDs obtained from the APIs
# Console or Cloud Console.
WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com'
ANDROID_CLIENT_ID = '757224007118-dpqfa375ra8rgbpslig7beh4jb6qd03s.apps.googleusercontent.com'
IOS_CLIENT_ID = '757224007118-nfgr65ic7dpiv5inbvta8a2b4j2h7d09.apps.googleusercontent.com'
ANDROID_AUDIENCE = WEB_CLIENT_ID
| <commit_before>#!/usr/bin/env python
"""settings.py
Udacity conference server-side Python App Engine app user settings
$Id$
created/forked from conference.py by wesc on 2014 may 24
"""
# Replace the following lines with client IDs obtained from the APIs
# Console or Cloud Console.
WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com'
ANDROID_CLIENT_ID = 'replace with Android client ID'
IOS_CLIENT_ID = 'replace with iOS client ID'
ANDROID_AUDIENCE = WEB_CLIENT_ID
<commit_msg>Add android and ios client IDs<commit_after> | #!/usr/bin/env python
"""settings.py
Udacity conference server-side Python App Engine app user settings
$Id$
created/forked from conference.py by wesc on 2014 may 24
"""
# Replace the following lines with client IDs obtained from the APIs
# Console or Cloud Console.
WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com'
ANDROID_CLIENT_ID = '757224007118-dpqfa375ra8rgbpslig7beh4jb6qd03s.apps.googleusercontent.com'
IOS_CLIENT_ID = '757224007118-nfgr65ic7dpiv5inbvta8a2b4j2h7d09.apps.googleusercontent.com'
ANDROID_AUDIENCE = WEB_CLIENT_ID
| #!/usr/bin/env python
"""settings.py
Udacity conference server-side Python App Engine app user settings
$Id$
created/forked from conference.py by wesc on 2014 may 24
"""
# Replace the following lines with client IDs obtained from the APIs
# Console or Cloud Console.
WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com'
ANDROID_CLIENT_ID = 'replace with Android client ID'
IOS_CLIENT_ID = 'replace with iOS client ID'
ANDROID_AUDIENCE = WEB_CLIENT_ID
Add android and ios client IDs#!/usr/bin/env python
"""settings.py
Udacity conference server-side Python App Engine app user settings
$Id$
created/forked from conference.py by wesc on 2014 may 24
"""
# Replace the following lines with client IDs obtained from the APIs
# Console or Cloud Console.
WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com'
ANDROID_CLIENT_ID = '757224007118-dpqfa375ra8rgbpslig7beh4jb6qd03s.apps.googleusercontent.com'
IOS_CLIENT_ID = '757224007118-nfgr65ic7dpiv5inbvta8a2b4j2h7d09.apps.googleusercontent.com'
ANDROID_AUDIENCE = WEB_CLIENT_ID
| <commit_before>#!/usr/bin/env python
"""settings.py
Udacity conference server-side Python App Engine app user settings
$Id$
created/forked from conference.py by wesc on 2014 may 24
"""
# Replace the following lines with client IDs obtained from the APIs
# Console or Cloud Console.
WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com'
ANDROID_CLIENT_ID = 'replace with Android client ID'
IOS_CLIENT_ID = 'replace with iOS client ID'
ANDROID_AUDIENCE = WEB_CLIENT_ID
<commit_msg>Add android and ios client IDs<commit_after>#!/usr/bin/env python
"""settings.py
Udacity conference server-side Python App Engine app user settings
$Id$
created/forked from conference.py by wesc on 2014 may 24
"""
# Replace the following lines with client IDs obtained from the APIs
# Console or Cloud Console.
WEB_CLIENT_ID = '757224007118-0lblpo8abqeantp8mvckmabupik9edk4.apps.googleusercontent.com'
ANDROID_CLIENT_ID = '757224007118-dpqfa375ra8rgbpslig7beh4jb6qd03s.apps.googleusercontent.com'
IOS_CLIENT_ID = '757224007118-nfgr65ic7dpiv5inbvta8a2b4j2h7d09.apps.googleusercontent.com'
ANDROID_AUDIENCE = WEB_CLIENT_ID
|
0b56e5d8b1da9c5b76a39cead7f4642384750c0a | utils/http.py | utils/http.py | # Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
import requests
from django.conf import settings
AUTH = getattr(settings, 'BASIC_AUTH_DOMAINS', None)
def url_exists(url):
"""Check that a url (when following redirection) exists.
This is needed because Django's validators rely on Python's urllib2
which in verions < 2.6 won't follow redirects.
"""
try:
# This AUTH stuff is a hack to get around the HTTP Basic Auth on dev
# and staging to prevent partner stuff from going public.
if AUTH:
for domain, auth in AUTH.items():
if domain in url:
return 200 <= requests.head(url, auth=auth).status_code < 400
return 200 <= requests.head(url).status_code < 400
except requests.ConnectionError:
return False
| # Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
import requests
def url_exists(url):
"""Check that a url (when following redirection) exists.
This is needed because Django's validators rely on Python's urllib2 which in
verions < 2.6 won't follow redirects.
"""
try:
return 200 <= requests.head(url).status_code < 400
except requests.ConnectionError:
return False
| Remove the unnecessary and never-used basic auth hack. | Remove the unnecessary and never-used basic auth hack.
| Python | agpl-3.0 | ReachingOut/unisubs,ofer43211/unisubs,ReachingOut/unisubs,ujdhesa/unisubs,eloquence/unisubs,pculture/unisubs,ujdhesa/unisubs,wevoice/wesub,ReachingOut/unisubs,wevoice/wesub,pculture/unisubs,eloquence/unisubs,norayr/unisubs,norayr/unisubs,eloquence/unisubs,pculture/unisubs,wevoice/wesub,ReachingOut/unisubs,pculture/unisubs,norayr/unisubs,ujdhesa/unisubs,ofer43211/unisubs,ofer43211/unisubs,wevoice/wesub,ofer43211/unisubs,ujdhesa/unisubs,eloquence/unisubs,norayr/unisubs | # Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
import requests
from django.conf import settings
AUTH = getattr(settings, 'BASIC_AUTH_DOMAINS', None)
def url_exists(url):
"""Check that a url (when following redirection) exists.
This is needed because Django's validators rely on Python's urllib2
which in verions < 2.6 won't follow redirects.
"""
try:
# This AUTH stuff is a hack to get around the HTTP Basic Auth on dev
# and staging to prevent partner stuff from going public.
if AUTH:
for domain, auth in AUTH.items():
if domain in url:
return 200 <= requests.head(url, auth=auth).status_code < 400
return 200 <= requests.head(url).status_code < 400
except requests.ConnectionError:
return False
Remove the unnecessary and never-used basic auth hack. | # Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
import requests
def url_exists(url):
"""Check that a url (when following redirection) exists.
This is needed because Django's validators rely on Python's urllib2 which in
verions < 2.6 won't follow redirects.
"""
try:
return 200 <= requests.head(url).status_code < 400
except requests.ConnectionError:
return False
| <commit_before># Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
import requests
from django.conf import settings
AUTH = getattr(settings, 'BASIC_AUTH_DOMAINS', None)
def url_exists(url):
"""Check that a url (when following redirection) exists.
This is needed because Django's validators rely on Python's urllib2
which in verions < 2.6 won't follow redirects.
"""
try:
# This AUTH stuff is a hack to get around the HTTP Basic Auth on dev
# and staging to prevent partner stuff from going public.
if AUTH:
for domain, auth in AUTH.items():
if domain in url:
return 200 <= requests.head(url, auth=auth).status_code < 400
return 200 <= requests.head(url).status_code < 400
except requests.ConnectionError:
return False
<commit_msg>Remove the unnecessary and never-used basic auth hack.<commit_after> | # Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
import requests
def url_exists(url):
"""Check that a url (when following redirection) exists.
This is needed because Django's validators rely on Python's urllib2 which in
verions < 2.6 won't follow redirects.
"""
try:
return 200 <= requests.head(url).status_code < 400
except requests.ConnectionError:
return False
| # Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
import requests
from django.conf import settings
AUTH = getattr(settings, 'BASIC_AUTH_DOMAINS', None)
def url_exists(url):
"""Check that a url (when following redirection) exists.
This is needed because Django's validators rely on Python's urllib2
which in verions < 2.6 won't follow redirects.
"""
try:
# This AUTH stuff is a hack to get around the HTTP Basic Auth on dev
# and staging to prevent partner stuff from going public.
if AUTH:
for domain, auth in AUTH.items():
if domain in url:
return 200 <= requests.head(url, auth=auth).status_code < 400
return 200 <= requests.head(url).status_code < 400
except requests.ConnectionError:
return False
Remove the unnecessary and never-used basic auth hack.# Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
import requests
def url_exists(url):
"""Check that a url (when following redirection) exists.
This is needed because Django's validators rely on Python's urllib2 which in
verions < 2.6 won't follow redirects.
"""
try:
return 200 <= requests.head(url).status_code < 400
except requests.ConnectionError:
return False
| <commit_before># Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
import requests
from django.conf import settings
AUTH = getattr(settings, 'BASIC_AUTH_DOMAINS', None)
def url_exists(url):
"""Check that a url (when following redirection) exists.
This is needed because Django's validators rely on Python's urllib2
which in verions < 2.6 won't follow redirects.
"""
try:
# This AUTH stuff is a hack to get around the HTTP Basic Auth on dev
# and staging to prevent partner stuff from going public.
if AUTH:
for domain, auth in AUTH.items():
if domain in url:
return 200 <= requests.head(url, auth=auth).status_code < 400
return 200 <= requests.head(url).status_code < 400
except requests.ConnectionError:
return False
<commit_msg>Remove the unnecessary and never-used basic auth hack.<commit_after># Universal Subtitles, universalsubtitles.org
#
# Copyright (C) 2012 Participatory Culture Foundation
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see
# http://www.gnu.org/licenses/agpl-3.0.html.
import requests
def url_exists(url):
"""Check that a url (when following redirection) exists.
This is needed because Django's validators rely on Python's urllib2 which in
verions < 2.6 won't follow redirects.
"""
try:
return 200 <= requests.head(url).status_code < 400
except requests.ConnectionError:
return False
|
5dc69c3e88ab4df897c9a1d632a47de42e5bc9c0 | app.py | app.py | """Module containing factory function for the application"""
from flask import Flask, redirect
from flask_restplus import Api
from config import app_config
from api_v1 import Blueprint_apiV1
from api_v1.models import db
Api_V1 = Api(
app=Blueprint_apiV1,
title="Shopping List Api",
description="An API for a Shopping List Application",
contact="machariamarigi@gmail.com"
)
def create_app(environment):
"""Factory function for the application"""
app = Flask(__name__)
app.config.from_object(app_config[environment])
db.init_app(app)
app.register_blueprint(Blueprint_apiV1)
# add namespaces here
from api_v1 import authenticate
Api_V1.add_namespace(authenticate.auth)
from api_v1 import endpoints
Api_V1.add_namespace(endpoints.sh_ns)
@app.route('/')
def reroute():
"""Method to route root path to /api/v1"""
return redirect('/api/v1')
return app
| """Module containing factory function for the application"""
from flask import Flask, redirect
from flask_restplus import Api
from flask_cors import CORS
from config import app_config
from api_v1 import Blueprint_apiV1
from api_v1.models import db
Api_V1 = Api(
app=Blueprint_apiV1,
title="Shopping List Api",
description="An API for a Shopping List Application",
contact="machariamarigi@gmail.com"
)
def create_app(environment):
"""Factory function for the application"""
app = Flask(__name__)
app.config.from_object(app_config[environment])
db.init_app(app)
CORS(app)
app.register_blueprint(Blueprint_apiV1)
# add namespaces here
from api_v1 import authenticate
Api_V1.add_namespace(authenticate.auth)
from api_v1 import endpoints
Api_V1.add_namespace(endpoints.sh_ns)
@app.route('/')
def reroute():
"""Method to route root path to /api/v1"""
return redirect('/api/v1')
return app
| Enable Cross Origin Resource Sharing | [Update] Enable Cross Origin Resource Sharing
| Python | mit | machariamarigi/shopping_list_api | """Module containing factory function for the application"""
from flask import Flask, redirect
from flask_restplus import Api
from config import app_config
from api_v1 import Blueprint_apiV1
from api_v1.models import db
Api_V1 = Api(
app=Blueprint_apiV1,
title="Shopping List Api",
description="An API for a Shopping List Application",
contact="machariamarigi@gmail.com"
)
def create_app(environment):
"""Factory function for the application"""
app = Flask(__name__)
app.config.from_object(app_config[environment])
db.init_app(app)
app.register_blueprint(Blueprint_apiV1)
# add namespaces here
from api_v1 import authenticate
Api_V1.add_namespace(authenticate.auth)
from api_v1 import endpoints
Api_V1.add_namespace(endpoints.sh_ns)
@app.route('/')
def reroute():
"""Method to route root path to /api/v1"""
return redirect('/api/v1')
return app
[Update] Enable Cross Origin Resource Sharing | """Module containing factory function for the application"""
from flask import Flask, redirect
from flask_restplus import Api
from flask_cors import CORS
from config import app_config
from api_v1 import Blueprint_apiV1
from api_v1.models import db
Api_V1 = Api(
app=Blueprint_apiV1,
title="Shopping List Api",
description="An API for a Shopping List Application",
contact="machariamarigi@gmail.com"
)
def create_app(environment):
"""Factory function for the application"""
app = Flask(__name__)
app.config.from_object(app_config[environment])
db.init_app(app)
CORS(app)
app.register_blueprint(Blueprint_apiV1)
# add namespaces here
from api_v1 import authenticate
Api_V1.add_namespace(authenticate.auth)
from api_v1 import endpoints
Api_V1.add_namespace(endpoints.sh_ns)
@app.route('/')
def reroute():
"""Method to route root path to /api/v1"""
return redirect('/api/v1')
return app
| <commit_before>"""Module containing factory function for the application"""
from flask import Flask, redirect
from flask_restplus import Api
from config import app_config
from api_v1 import Blueprint_apiV1
from api_v1.models import db
Api_V1 = Api(
app=Blueprint_apiV1,
title="Shopping List Api",
description="An API for a Shopping List Application",
contact="machariamarigi@gmail.com"
)
def create_app(environment):
"""Factory function for the application"""
app = Flask(__name__)
app.config.from_object(app_config[environment])
db.init_app(app)
app.register_blueprint(Blueprint_apiV1)
# add namespaces here
from api_v1 import authenticate
Api_V1.add_namespace(authenticate.auth)
from api_v1 import endpoints
Api_V1.add_namespace(endpoints.sh_ns)
@app.route('/')
def reroute():
"""Method to route root path to /api/v1"""
return redirect('/api/v1')
return app
<commit_msg>[Update] Enable Cross Origin Resource Sharing<commit_after> | """Module containing factory function for the application"""
from flask import Flask, redirect
from flask_restplus import Api
from flask_cors import CORS
from config import app_config
from api_v1 import Blueprint_apiV1
from api_v1.models import db
Api_V1 = Api(
app=Blueprint_apiV1,
title="Shopping List Api",
description="An API for a Shopping List Application",
contact="machariamarigi@gmail.com"
)
def create_app(environment):
"""Factory function for the application"""
app = Flask(__name__)
app.config.from_object(app_config[environment])
db.init_app(app)
CORS(app)
app.register_blueprint(Blueprint_apiV1)
# add namespaces here
from api_v1 import authenticate
Api_V1.add_namespace(authenticate.auth)
from api_v1 import endpoints
Api_V1.add_namespace(endpoints.sh_ns)
@app.route('/')
def reroute():
"""Method to route root path to /api/v1"""
return redirect('/api/v1')
return app
| """Module containing factory function for the application"""
from flask import Flask, redirect
from flask_restplus import Api
from config import app_config
from api_v1 import Blueprint_apiV1
from api_v1.models import db
Api_V1 = Api(
app=Blueprint_apiV1,
title="Shopping List Api",
description="An API for a Shopping List Application",
contact="machariamarigi@gmail.com"
)
def create_app(environment):
"""Factory function for the application"""
app = Flask(__name__)
app.config.from_object(app_config[environment])
db.init_app(app)
app.register_blueprint(Blueprint_apiV1)
# add namespaces here
from api_v1 import authenticate
Api_V1.add_namespace(authenticate.auth)
from api_v1 import endpoints
Api_V1.add_namespace(endpoints.sh_ns)
@app.route('/')
def reroute():
"""Method to route root path to /api/v1"""
return redirect('/api/v1')
return app
[Update] Enable Cross Origin Resource Sharing"""Module containing factory function for the application"""
from flask import Flask, redirect
from flask_restplus import Api
from flask_cors import CORS
from config import app_config
from api_v1 import Blueprint_apiV1
from api_v1.models import db
Api_V1 = Api(
app=Blueprint_apiV1,
title="Shopping List Api",
description="An API for a Shopping List Application",
contact="machariamarigi@gmail.com"
)
def create_app(environment):
"""Factory function for the application"""
app = Flask(__name__)
app.config.from_object(app_config[environment])
db.init_app(app)
CORS(app)
app.register_blueprint(Blueprint_apiV1)
# add namespaces here
from api_v1 import authenticate
Api_V1.add_namespace(authenticate.auth)
from api_v1 import endpoints
Api_V1.add_namespace(endpoints.sh_ns)
@app.route('/')
def reroute():
"""Method to route root path to /api/v1"""
return redirect('/api/v1')
return app
| <commit_before>"""Module containing factory function for the application"""
from flask import Flask, redirect
from flask_restplus import Api
from config import app_config
from api_v1 import Blueprint_apiV1
from api_v1.models import db
Api_V1 = Api(
app=Blueprint_apiV1,
title="Shopping List Api",
description="An API for a Shopping List Application",
contact="machariamarigi@gmail.com"
)
def create_app(environment):
"""Factory function for the application"""
app = Flask(__name__)
app.config.from_object(app_config[environment])
db.init_app(app)
app.register_blueprint(Blueprint_apiV1)
# add namespaces here
from api_v1 import authenticate
Api_V1.add_namespace(authenticate.auth)
from api_v1 import endpoints
Api_V1.add_namespace(endpoints.sh_ns)
@app.route('/')
def reroute():
"""Method to route root path to /api/v1"""
return redirect('/api/v1')
return app
<commit_msg>[Update] Enable Cross Origin Resource Sharing<commit_after>"""Module containing factory function for the application"""
from flask import Flask, redirect
from flask_restplus import Api
from flask_cors import CORS
from config import app_config
from api_v1 import Blueprint_apiV1
from api_v1.models import db
Api_V1 = Api(
app=Blueprint_apiV1,
title="Shopping List Api",
description="An API for a Shopping List Application",
contact="machariamarigi@gmail.com"
)
def create_app(environment):
"""Factory function for the application"""
app = Flask(__name__)
app.config.from_object(app_config[environment])
db.init_app(app)
CORS(app)
app.register_blueprint(Blueprint_apiV1)
# add namespaces here
from api_v1 import authenticate
Api_V1.add_namespace(authenticate.auth)
from api_v1 import endpoints
Api_V1.add_namespace(endpoints.sh_ns)
@app.route('/')
def reroute():
"""Method to route root path to /api/v1"""
return redirect('/api/v1')
return app
|
2adc021a520baa356c46ad1316893c1cd96f3147 | knights/lexer.py | knights/lexer.py | from enum import Enum
import re
Token = Enum('Token', 'load comment text var block',)
tag_re = re.compile(
'|'.join([
r'{\!\s*(?P<load>.+?)\s*\!}',
r'{%\s*(?P<tag>.+?)\s*%}',
r'{{\s*(?P<var>.+?)\s*}}',
r'{#\s*(?P<comment>.+?)\s*#}'
]),
re.DOTALL
)
def tokenise(template):
'''A generator which yields (type, content) pairs'''
upto = 0
# XXX Track line numbers and update nodes, so we can annotate the code
for m in tag_re.finditer(template):
start, end = m.span()
if upto < start:
yield (Token.text, template[upto:start])
upto = end
load, tag, var, comment = m.groups()
if load is not None:
yield (Token.load, load)
elif tag is not None:
yield (Token.block, tag)
elif var is not None:
yield (Token.var, var)
else:
yield (Token.comment, comment)
if upto < len(template):
yield (Token.text, template[upto:])
| from enum import Enum
import re
TokenType = Enum('Token', 'load comment text var block',)
tag_re = re.compile(
'|'.join([
r'{\!\s*(?P<load>.+?)\s*\!}',
r'{%\s*(?P<tag>.+?)\s*%}',
r'{{\s*(?P<var>.+?)\s*}}',
r'{#\s*(?P<comment>.+?)\s*#}'
]),
re.DOTALL
)
class Token:
def __init__(self, mode, token, lineno=None):
self.mode = mode
self.token = token
self.lineno = lineno
def tokenise(template):
'''A generator which yields Token instances'''
upto = 0
# XXX Track line numbers and update nodes, so we can annotate the code
for m in tag_re.finditer(template):
start, end = m.span()
lineno = template.count('\n', 0, start)
if upto < start:
yield Token(TokenType.text, template[upto:start], lineno)
upto = end
load, tag, var, comment = m.groups()
if load is not None:
yield Token(TokenType.load, load, lineno)
elif tag is not None:
yield Token(TokenType.block, tag, lineno)
elif var is not None:
yield Token(TokenType.var, var, lineno)
else:
yield Token(TokenType.comment, comment, lineno)
if upto < len(template):
yield Token(TokenType.text, template[upto:], lineno)
| Rework Lexer to use Token object | Rework Lexer to use Token object
| Python | mit | funkybob/knights-templater,funkybob/knights-templater | from enum import Enum
import re
Token = Enum('Token', 'load comment text var block',)
tag_re = re.compile(
'|'.join([
r'{\!\s*(?P<load>.+?)\s*\!}',
r'{%\s*(?P<tag>.+?)\s*%}',
r'{{\s*(?P<var>.+?)\s*}}',
r'{#\s*(?P<comment>.+?)\s*#}'
]),
re.DOTALL
)
def tokenise(template):
'''A generator which yields (type, content) pairs'''
upto = 0
# XXX Track line numbers and update nodes, so we can annotate the code
for m in tag_re.finditer(template):
start, end = m.span()
if upto < start:
yield (Token.text, template[upto:start])
upto = end
load, tag, var, comment = m.groups()
if load is not None:
yield (Token.load, load)
elif tag is not None:
yield (Token.block, tag)
elif var is not None:
yield (Token.var, var)
else:
yield (Token.comment, comment)
if upto < len(template):
yield (Token.text, template[upto:])
Rework Lexer to use Token object | from enum import Enum
import re
TokenType = Enum('Token', 'load comment text var block',)
tag_re = re.compile(
'|'.join([
r'{\!\s*(?P<load>.+?)\s*\!}',
r'{%\s*(?P<tag>.+?)\s*%}',
r'{{\s*(?P<var>.+?)\s*}}',
r'{#\s*(?P<comment>.+?)\s*#}'
]),
re.DOTALL
)
class Token:
def __init__(self, mode, token, lineno=None):
self.mode = mode
self.token = token
self.lineno = lineno
def tokenise(template):
'''A generator which yields Token instances'''
upto = 0
# XXX Track line numbers and update nodes, so we can annotate the code
for m in tag_re.finditer(template):
start, end = m.span()
lineno = template.count('\n', 0, start)
if upto < start:
yield Token(TokenType.text, template[upto:start], lineno)
upto = end
load, tag, var, comment = m.groups()
if load is not None:
yield Token(TokenType.load, load, lineno)
elif tag is not None:
yield Token(TokenType.block, tag, lineno)
elif var is not None:
yield Token(TokenType.var, var, lineno)
else:
yield Token(TokenType.comment, comment, lineno)
if upto < len(template):
yield Token(TokenType.text, template[upto:], lineno)
| <commit_before>from enum import Enum
import re
Token = Enum('Token', 'load comment text var block',)
tag_re = re.compile(
'|'.join([
r'{\!\s*(?P<load>.+?)\s*\!}',
r'{%\s*(?P<tag>.+?)\s*%}',
r'{{\s*(?P<var>.+?)\s*}}',
r'{#\s*(?P<comment>.+?)\s*#}'
]),
re.DOTALL
)
def tokenise(template):
'''A generator which yields (type, content) pairs'''
upto = 0
# XXX Track line numbers and update nodes, so we can annotate the code
for m in tag_re.finditer(template):
start, end = m.span()
if upto < start:
yield (Token.text, template[upto:start])
upto = end
load, tag, var, comment = m.groups()
if load is not None:
yield (Token.load, load)
elif tag is not None:
yield (Token.block, tag)
elif var is not None:
yield (Token.var, var)
else:
yield (Token.comment, comment)
if upto < len(template):
yield (Token.text, template[upto:])
<commit_msg>Rework Lexer to use Token object<commit_after> | from enum import Enum
import re
TokenType = Enum('Token', 'load comment text var block',)
tag_re = re.compile(
'|'.join([
r'{\!\s*(?P<load>.+?)\s*\!}',
r'{%\s*(?P<tag>.+?)\s*%}',
r'{{\s*(?P<var>.+?)\s*}}',
r'{#\s*(?P<comment>.+?)\s*#}'
]),
re.DOTALL
)
class Token:
def __init__(self, mode, token, lineno=None):
self.mode = mode
self.token = token
self.lineno = lineno
def tokenise(template):
'''A generator which yields Token instances'''
upto = 0
# XXX Track line numbers and update nodes, so we can annotate the code
for m in tag_re.finditer(template):
start, end = m.span()
lineno = template.count('\n', 0, start)
if upto < start:
yield Token(TokenType.text, template[upto:start], lineno)
upto = end
load, tag, var, comment = m.groups()
if load is not None:
yield Token(TokenType.load, load, lineno)
elif tag is not None:
yield Token(TokenType.block, tag, lineno)
elif var is not None:
yield Token(TokenType.var, var, lineno)
else:
yield Token(TokenType.comment, comment, lineno)
if upto < len(template):
yield Token(TokenType.text, template[upto:], lineno)
| from enum import Enum
import re
Token = Enum('Token', 'load comment text var block',)
tag_re = re.compile(
'|'.join([
r'{\!\s*(?P<load>.+?)\s*\!}',
r'{%\s*(?P<tag>.+?)\s*%}',
r'{{\s*(?P<var>.+?)\s*}}',
r'{#\s*(?P<comment>.+?)\s*#}'
]),
re.DOTALL
)
def tokenise(template):
'''A generator which yields (type, content) pairs'''
upto = 0
# XXX Track line numbers and update nodes, so we can annotate the code
for m in tag_re.finditer(template):
start, end = m.span()
if upto < start:
yield (Token.text, template[upto:start])
upto = end
load, tag, var, comment = m.groups()
if load is not None:
yield (Token.load, load)
elif tag is not None:
yield (Token.block, tag)
elif var is not None:
yield (Token.var, var)
else:
yield (Token.comment, comment)
if upto < len(template):
yield (Token.text, template[upto:])
Rework Lexer to use Token objectfrom enum import Enum
import re
TokenType = Enum('Token', 'load comment text var block',)
tag_re = re.compile(
'|'.join([
r'{\!\s*(?P<load>.+?)\s*\!}',
r'{%\s*(?P<tag>.+?)\s*%}',
r'{{\s*(?P<var>.+?)\s*}}',
r'{#\s*(?P<comment>.+?)\s*#}'
]),
re.DOTALL
)
class Token:
def __init__(self, mode, token, lineno=None):
self.mode = mode
self.token = token
self.lineno = lineno
def tokenise(template):
'''A generator which yields Token instances'''
upto = 0
# XXX Track line numbers and update nodes, so we can annotate the code
for m in tag_re.finditer(template):
start, end = m.span()
lineno = template.count('\n', 0, start)
if upto < start:
yield Token(TokenType.text, template[upto:start], lineno)
upto = end
load, tag, var, comment = m.groups()
if load is not None:
yield Token(TokenType.load, load, lineno)
elif tag is not None:
yield Token(TokenType.block, tag, lineno)
elif var is not None:
yield Token(TokenType.var, var, lineno)
else:
yield Token(TokenType.comment, comment, lineno)
if upto < len(template):
yield Token(TokenType.text, template[upto:], lineno)
| <commit_before>from enum import Enum
import re
Token = Enum('Token', 'load comment text var block',)
tag_re = re.compile(
'|'.join([
r'{\!\s*(?P<load>.+?)\s*\!}',
r'{%\s*(?P<tag>.+?)\s*%}',
r'{{\s*(?P<var>.+?)\s*}}',
r'{#\s*(?P<comment>.+?)\s*#}'
]),
re.DOTALL
)
def tokenise(template):
'''A generator which yields (type, content) pairs'''
upto = 0
# XXX Track line numbers and update nodes, so we can annotate the code
for m in tag_re.finditer(template):
start, end = m.span()
if upto < start:
yield (Token.text, template[upto:start])
upto = end
load, tag, var, comment = m.groups()
if load is not None:
yield (Token.load, load)
elif tag is not None:
yield (Token.block, tag)
elif var is not None:
yield (Token.var, var)
else:
yield (Token.comment, comment)
if upto < len(template):
yield (Token.text, template[upto:])
<commit_msg>Rework Lexer to use Token object<commit_after>from enum import Enum
import re
TokenType = Enum('Token', 'load comment text var block',)
tag_re = re.compile(
'|'.join([
r'{\!\s*(?P<load>.+?)\s*\!}',
r'{%\s*(?P<tag>.+?)\s*%}',
r'{{\s*(?P<var>.+?)\s*}}',
r'{#\s*(?P<comment>.+?)\s*#}'
]),
re.DOTALL
)
class Token:
def __init__(self, mode, token, lineno=None):
self.mode = mode
self.token = token
self.lineno = lineno
def tokenise(template):
'''A generator which yields Token instances'''
upto = 0
# XXX Track line numbers and update nodes, so we can annotate the code
for m in tag_re.finditer(template):
start, end = m.span()
lineno = template.count('\n', 0, start)
if upto < start:
yield Token(TokenType.text, template[upto:start], lineno)
upto = end
load, tag, var, comment = m.groups()
if load is not None:
yield Token(TokenType.load, load, lineno)
elif tag is not None:
yield Token(TokenType.block, tag, lineno)
elif var is not None:
yield Token(TokenType.var, var, lineno)
else:
yield Token(TokenType.comment, comment, lineno)
if upto < len(template):
yield Token(TokenType.text, template[upto:], lineno)
|
890860f89e353e6afb45dec06e3617f0f70e1ad7 | examples/basic_datalogger.py | examples/basic_datalogger.py | from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_defaults()
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
time.sleep(0.8)
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print "Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print "Error occured: %s" % e
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print e
finally:
m.close()
| from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_defaults()
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print "Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print "Error occured: %s" % e
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print e
finally:
m.close()
| Remove time.sleep from example, no longer required | Datalogger: Remove time.sleep from example, no longer required
| Python | mit | liquidinstruments/pymoku,benizl/pymoku | from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_defaults()
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
time.sleep(0.8)
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print "Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print "Error occured: %s" % e
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print e
finally:
m.close()
Datalogger: Remove time.sleep from example, no longer required | from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_defaults()
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print "Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print "Error occured: %s" % e
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print e
finally:
m.close()
| <commit_before>from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_defaults()
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
time.sleep(0.8)
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print "Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print "Error occured: %s" % e
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print e
finally:
m.close()
<commit_msg>Datalogger: Remove time.sleep from example, no longer required<commit_after> | from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_defaults()
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print "Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print "Error occured: %s" % e
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print e
finally:
m.close()
| from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_defaults()
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
time.sleep(0.8)
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print "Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print "Error occured: %s" % e
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print e
finally:
m.close()
Datalogger: Remove time.sleep from example, no longer requiredfrom pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_defaults()
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print "Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print "Error occured: %s" % e
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print e
finally:
m.close()
| <commit_before>from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_defaults()
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
time.sleep(0.8)
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print "Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print "Error occured: %s" % e
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print e
finally:
m.close()
<commit_msg>Datalogger: Remove time.sleep from example, no longer required<commit_after>from pymoku import Moku
from pymoku.instruments import *
import time, logging
logging.basicConfig(format='%(asctime)s:%(name)s:%(levelname)s::%(message)s')
logging.getLogger('pymoku').setLevel(logging.INFO)
# Use Moku.get_by_serial() or get_by_name() if you don't know the IP
m = Moku.get_by_name('example')
i = Oscilloscope()
m.attach_instrument(i)
try:
i.set_defaults()
i.set_samplerate(10)
i.set_xmode(OSC_ROLL)
i.commit()
i.datalogger_stop()
i.datalogger_start(start=0, duration=10, use_sd=True, ch1=True, ch2=True, filetype='bin')
while True:
time.sleep(1)
trems, treme = i.datalogger_remaining()
samples = i.datalogger_samples()
print "Captured (%d samples); %d seconds from start, %d from end" % (samples, trems, treme)
if i.datalogger_completed():
break
e = i.datalogger_error()
if e:
print "Error occured: %s" % e
i.datalogger_stop()
i.datalogger_upload()
except Exception as e:
print e
finally:
m.close()
|
5bb6e416cf7c59b7a5f85b1b0b037df7667d5d88 | staticbuilder/conf.py | staticbuilder/conf.py | from appconf import AppConf
class StaticBuilderConf(AppConf):
BUILD_COMMANDS = []
COLLECT_BUILT = True
INCLUDE_FILES = ['*.css', '*.js']
class Meta:
required = [
'BUILT_ROOT',
]
| from appconf import AppConf
class StaticBuilderConf(AppConf):
BUILD_COMMANDS = []
COLLECT_BUILT = True
INCLUDE_FILES = ['*']
class Meta:
required = [
'BUILT_ROOT',
]
| Include all static files in build by default | Include all static files in build by default
| Python | mit | hzdg/django-ecstatic,hzdg/django-staticbuilder | from appconf import AppConf
class StaticBuilderConf(AppConf):
BUILD_COMMANDS = []
COLLECT_BUILT = True
INCLUDE_FILES = ['*.css', '*.js']
class Meta:
required = [
'BUILT_ROOT',
]
Include all static files in build by default | from appconf import AppConf
class StaticBuilderConf(AppConf):
BUILD_COMMANDS = []
COLLECT_BUILT = True
INCLUDE_FILES = ['*']
class Meta:
required = [
'BUILT_ROOT',
]
| <commit_before>from appconf import AppConf
class StaticBuilderConf(AppConf):
BUILD_COMMANDS = []
COLLECT_BUILT = True
INCLUDE_FILES = ['*.css', '*.js']
class Meta:
required = [
'BUILT_ROOT',
]
<commit_msg>Include all static files in build by default<commit_after> | from appconf import AppConf
class StaticBuilderConf(AppConf):
BUILD_COMMANDS = []
COLLECT_BUILT = True
INCLUDE_FILES = ['*']
class Meta:
required = [
'BUILT_ROOT',
]
| from appconf import AppConf
class StaticBuilderConf(AppConf):
BUILD_COMMANDS = []
COLLECT_BUILT = True
INCLUDE_FILES = ['*.css', '*.js']
class Meta:
required = [
'BUILT_ROOT',
]
Include all static files in build by defaultfrom appconf import AppConf
class StaticBuilderConf(AppConf):
BUILD_COMMANDS = []
COLLECT_BUILT = True
INCLUDE_FILES = ['*']
class Meta:
required = [
'BUILT_ROOT',
]
| <commit_before>from appconf import AppConf
class StaticBuilderConf(AppConf):
BUILD_COMMANDS = []
COLLECT_BUILT = True
INCLUDE_FILES = ['*.css', '*.js']
class Meta:
required = [
'BUILT_ROOT',
]
<commit_msg>Include all static files in build by default<commit_after>from appconf import AppConf
class StaticBuilderConf(AppConf):
BUILD_COMMANDS = []
COLLECT_BUILT = True
INCLUDE_FILES = ['*']
class Meta:
required = [
'BUILT_ROOT',
]
|
a97b557146edfb340ad83fd95838dc2a627ce32f | src/urls.py | src/urls.py |
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.conf.urls import *
urlpatterns = [
url(r'^view_all/?$', 'argcache.views.view_all', name='view_all'),
url(r'^flush/([0-9]+)/?$', 'argcache.views.flush', name='flush')
]
|
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.conf.urls import url
from .views import view_all, flush
urlpatterns = [
url(r'^view_all/?$', view_all, name='view_all'),
url(r'^flush/([0-9]+)/?$', flush, name='flush')
]
| Fix urlconf to avoid string view arguments to url() | Fix urlconf to avoid string view arguments to url()
| Python | agpl-3.0 | luac/django-argcache,luac/django-argcache |
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.conf.urls import *
urlpatterns = [
url(r'^view_all/?$', 'argcache.views.view_all', name='view_all'),
url(r'^flush/([0-9]+)/?$', 'argcache.views.flush', name='flush')
]
Fix urlconf to avoid string view arguments to url() |
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.conf.urls import url
from .views import view_all, flush
urlpatterns = [
url(r'^view_all/?$', view_all, name='view_all'),
url(r'^flush/([0-9]+)/?$', flush, name='flush')
]
| <commit_before>
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.conf.urls import *
urlpatterns = [
url(r'^view_all/?$', 'argcache.views.view_all', name='view_all'),
url(r'^flush/([0-9]+)/?$', 'argcache.views.flush', name='flush')
]
<commit_msg>Fix urlconf to avoid string view arguments to url()<commit_after> |
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.conf.urls import url
from .views import view_all, flush
urlpatterns = [
url(r'^view_all/?$', view_all, name='view_all'),
url(r'^flush/([0-9]+)/?$', flush, name='flush')
]
|
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.conf.urls import *
urlpatterns = [
url(r'^view_all/?$', 'argcache.views.view_all', name='view_all'),
url(r'^flush/([0-9]+)/?$', 'argcache.views.flush', name='flush')
]
Fix urlconf to avoid string view arguments to url()
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.conf.urls import url
from .views import view_all, flush
urlpatterns = [
url(r'^view_all/?$', view_all, name='view_all'),
url(r'^flush/([0-9]+)/?$', flush, name='flush')
]
| <commit_before>
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.conf.urls import *
urlpatterns = [
url(r'^view_all/?$', 'argcache.views.view_all', name='view_all'),
url(r'^flush/([0-9]+)/?$', 'argcache.views.flush', name='flush')
]
<commit_msg>Fix urlconf to avoid string view arguments to url()<commit_after>
__author__ = "Individual contributors (see AUTHORS file)"
__date__ = "$DATE$"
__rev__ = "$REV$"
__license__ = "AGPL v.3"
__copyright__ = """
This file is part of ArgCache.
Copyright (c) 2015 by the individual contributors
(see AUTHORS file)
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from django.conf.urls import url
from .views import view_all, flush
urlpatterns = [
url(r'^view_all/?$', view_all, name='view_all'),
url(r'^flush/([0-9]+)/?$', flush, name='flush')
]
|
1ea6a0b43e4b05bdb743b0e2be86174062581d03 | errors.py | errors.py | class Errors:
'''enum-like values to use as exit codes'''
USAGE_ERROR = 1
DEPENDENCY_NOT_FOUND = 2
VERSION_ERROR = 3
GIT_CONFIG_ERROR = 4
STRANGE_ENVIRONMENT = 5
EATING_WITH_STAGED_CHANGES = 6
BAD_GIT_DIRECTORY = 7
BRANCH_EXISTS_ON_INIT = 8
NO_SUCH_BRANCH = 9
REPOSITORY_NOT_INITIALIZED = 10
GIT_DIRECTORY_RELATIVE = 11
FINDING_HEAD = 12
BAD_TREE = 13
GIT_DIRECTORY_MISSING = 14
| class Errors:
'''enum-like values to use as exit codes'''
USAGE_ERROR = 1
DEPENDENCY_NOT_FOUND = 2
VERSION_ERROR = 3
GIT_CONFIG_ERROR = 4
STRANGE_ENVIRONMENT = 5
EATING_WITH_STAGED_CHANGES = 6
BAD_GIT_DIRECTORY = 7
BRANCH_EXISTS_ON_INIT = 8
NO_SUCH_BRANCH = 9
REPOSITORY_NOT_INITIALIZED = 10
GIT_DIRECTORY_RELATIVE = 11
FINDING_HEAD = 12
BAD_TREE = 13
GIT_DIRECTORY_MISSING = 14
DIRECTORY_TO_BACKUP_MISSING = 15
| Add a new error code | Add a new error code
| Python | lgpl-2.1 | mhl/gib,mhl/gib | class Errors:
'''enum-like values to use as exit codes'''
USAGE_ERROR = 1
DEPENDENCY_NOT_FOUND = 2
VERSION_ERROR = 3
GIT_CONFIG_ERROR = 4
STRANGE_ENVIRONMENT = 5
EATING_WITH_STAGED_CHANGES = 6
BAD_GIT_DIRECTORY = 7
BRANCH_EXISTS_ON_INIT = 8
NO_SUCH_BRANCH = 9
REPOSITORY_NOT_INITIALIZED = 10
GIT_DIRECTORY_RELATIVE = 11
FINDING_HEAD = 12
BAD_TREE = 13
GIT_DIRECTORY_MISSING = 14
Add a new error code | class Errors:
'''enum-like values to use as exit codes'''
USAGE_ERROR = 1
DEPENDENCY_NOT_FOUND = 2
VERSION_ERROR = 3
GIT_CONFIG_ERROR = 4
STRANGE_ENVIRONMENT = 5
EATING_WITH_STAGED_CHANGES = 6
BAD_GIT_DIRECTORY = 7
BRANCH_EXISTS_ON_INIT = 8
NO_SUCH_BRANCH = 9
REPOSITORY_NOT_INITIALIZED = 10
GIT_DIRECTORY_RELATIVE = 11
FINDING_HEAD = 12
BAD_TREE = 13
GIT_DIRECTORY_MISSING = 14
DIRECTORY_TO_BACKUP_MISSING = 15
| <commit_before>class Errors:
'''enum-like values to use as exit codes'''
USAGE_ERROR = 1
DEPENDENCY_NOT_FOUND = 2
VERSION_ERROR = 3
GIT_CONFIG_ERROR = 4
STRANGE_ENVIRONMENT = 5
EATING_WITH_STAGED_CHANGES = 6
BAD_GIT_DIRECTORY = 7
BRANCH_EXISTS_ON_INIT = 8
NO_SUCH_BRANCH = 9
REPOSITORY_NOT_INITIALIZED = 10
GIT_DIRECTORY_RELATIVE = 11
FINDING_HEAD = 12
BAD_TREE = 13
GIT_DIRECTORY_MISSING = 14
<commit_msg>Add a new error code<commit_after> | class Errors:
'''enum-like values to use as exit codes'''
USAGE_ERROR = 1
DEPENDENCY_NOT_FOUND = 2
VERSION_ERROR = 3
GIT_CONFIG_ERROR = 4
STRANGE_ENVIRONMENT = 5
EATING_WITH_STAGED_CHANGES = 6
BAD_GIT_DIRECTORY = 7
BRANCH_EXISTS_ON_INIT = 8
NO_SUCH_BRANCH = 9
REPOSITORY_NOT_INITIALIZED = 10
GIT_DIRECTORY_RELATIVE = 11
FINDING_HEAD = 12
BAD_TREE = 13
GIT_DIRECTORY_MISSING = 14
DIRECTORY_TO_BACKUP_MISSING = 15
| class Errors:
'''enum-like values to use as exit codes'''
USAGE_ERROR = 1
DEPENDENCY_NOT_FOUND = 2
VERSION_ERROR = 3
GIT_CONFIG_ERROR = 4
STRANGE_ENVIRONMENT = 5
EATING_WITH_STAGED_CHANGES = 6
BAD_GIT_DIRECTORY = 7
BRANCH_EXISTS_ON_INIT = 8
NO_SUCH_BRANCH = 9
REPOSITORY_NOT_INITIALIZED = 10
GIT_DIRECTORY_RELATIVE = 11
FINDING_HEAD = 12
BAD_TREE = 13
GIT_DIRECTORY_MISSING = 14
Add a new error codeclass Errors:
'''enum-like values to use as exit codes'''
USAGE_ERROR = 1
DEPENDENCY_NOT_FOUND = 2
VERSION_ERROR = 3
GIT_CONFIG_ERROR = 4
STRANGE_ENVIRONMENT = 5
EATING_WITH_STAGED_CHANGES = 6
BAD_GIT_DIRECTORY = 7
BRANCH_EXISTS_ON_INIT = 8
NO_SUCH_BRANCH = 9
REPOSITORY_NOT_INITIALIZED = 10
GIT_DIRECTORY_RELATIVE = 11
FINDING_HEAD = 12
BAD_TREE = 13
GIT_DIRECTORY_MISSING = 14
DIRECTORY_TO_BACKUP_MISSING = 15
| <commit_before>class Errors:
'''enum-like values to use as exit codes'''
USAGE_ERROR = 1
DEPENDENCY_NOT_FOUND = 2
VERSION_ERROR = 3
GIT_CONFIG_ERROR = 4
STRANGE_ENVIRONMENT = 5
EATING_WITH_STAGED_CHANGES = 6
BAD_GIT_DIRECTORY = 7
BRANCH_EXISTS_ON_INIT = 8
NO_SUCH_BRANCH = 9
REPOSITORY_NOT_INITIALIZED = 10
GIT_DIRECTORY_RELATIVE = 11
FINDING_HEAD = 12
BAD_TREE = 13
GIT_DIRECTORY_MISSING = 14
<commit_msg>Add a new error code<commit_after>class Errors:
'''enum-like values to use as exit codes'''
USAGE_ERROR = 1
DEPENDENCY_NOT_FOUND = 2
VERSION_ERROR = 3
GIT_CONFIG_ERROR = 4
STRANGE_ENVIRONMENT = 5
EATING_WITH_STAGED_CHANGES = 6
BAD_GIT_DIRECTORY = 7
BRANCH_EXISTS_ON_INIT = 8
NO_SUCH_BRANCH = 9
REPOSITORY_NOT_INITIALIZED = 10
GIT_DIRECTORY_RELATIVE = 11
FINDING_HEAD = 12
BAD_TREE = 13
GIT_DIRECTORY_MISSING = 14
DIRECTORY_TO_BACKUP_MISSING = 15
|
c29f55196f97ef3fa70124628fd94c78b90162ea | python/getmonotime.py | python/getmonotime.py | import getopt, sys
if __name__ == '__main__':
sippy_path = None
try:
opts, args = getopt.getopt(sys.argv[1:], 's:S:i:o:b')
except getopt.GetoptError:
usage()
for o, a in opts:
if o == '-S':
sippy_path = a.strip()
continue
if sippy_path != None:
sys.path.insert(0, sippy_path)
from sippy.Time.clock_dtime import clock_getdtime, CLOCK_MONOTONIC
print clock_getdtime(CLOCK_MONOTONIC)
| import getopt, sys
if __name__ == '__main__':
sippy_path = None
try:
opts, args = getopt.getopt(sys.argv[1:], 'rS:')
except getopt.GetoptError:
usage()
out_realtime = False
for o, a in opts:
if o == '-S':
sippy_path = a.strip()
continue
if o == '-r':
out_realtime = True
if sippy_path != None:
sys.path.insert(0, sippy_path)
from sippy.Time.clock_dtime import clock_getdtime, CLOCK_MONOTONIC
if not out_realtime:
print(clock_getdtime(CLOCK_MONOTONIC))
else:
from sippy.Time.clock_dtime import CLOCK_REALTIME
print("%f %f" % (clock_getdtime(CLOCK_MONOTONIC), clock_getdtime(CLOCK_REALTIME)))
| Add an option to output both realtime and monotime. | Add an option to output both realtime and monotime.
| Python | bsd-2-clause | synety-jdebp/rtpproxy,dsanders11/rtpproxy,synety-jdebp/rtpproxy,jevonearth/rtpproxy,jevonearth/rtpproxy,synety-jdebp/rtpproxy,sippy/rtpproxy,dsanders11/rtpproxy,jevonearth/rtpproxy,jevonearth/rtpproxy,dsanders11/rtpproxy,sippy/rtpproxy,synety-jdebp/rtpproxy,sippy/rtpproxy | import getopt, sys
if __name__ == '__main__':
sippy_path = None
try:
opts, args = getopt.getopt(sys.argv[1:], 's:S:i:o:b')
except getopt.GetoptError:
usage()
for o, a in opts:
if o == '-S':
sippy_path = a.strip()
continue
if sippy_path != None:
sys.path.insert(0, sippy_path)
from sippy.Time.clock_dtime import clock_getdtime, CLOCK_MONOTONIC
print clock_getdtime(CLOCK_MONOTONIC)
Add an option to output both realtime and monotime. | import getopt, sys
if __name__ == '__main__':
sippy_path = None
try:
opts, args = getopt.getopt(sys.argv[1:], 'rS:')
except getopt.GetoptError:
usage()
out_realtime = False
for o, a in opts:
if o == '-S':
sippy_path = a.strip()
continue
if o == '-r':
out_realtime = True
if sippy_path != None:
sys.path.insert(0, sippy_path)
from sippy.Time.clock_dtime import clock_getdtime, CLOCK_MONOTONIC
if not out_realtime:
print(clock_getdtime(CLOCK_MONOTONIC))
else:
from sippy.Time.clock_dtime import CLOCK_REALTIME
print("%f %f" % (clock_getdtime(CLOCK_MONOTONIC), clock_getdtime(CLOCK_REALTIME)))
| <commit_before>import getopt, sys
if __name__ == '__main__':
sippy_path = None
try:
opts, args = getopt.getopt(sys.argv[1:], 's:S:i:o:b')
except getopt.GetoptError:
usage()
for o, a in opts:
if o == '-S':
sippy_path = a.strip()
continue
if sippy_path != None:
sys.path.insert(0, sippy_path)
from sippy.Time.clock_dtime import clock_getdtime, CLOCK_MONOTONIC
print clock_getdtime(CLOCK_MONOTONIC)
<commit_msg>Add an option to output both realtime and monotime.<commit_after> | import getopt, sys
if __name__ == '__main__':
sippy_path = None
try:
opts, args = getopt.getopt(sys.argv[1:], 'rS:')
except getopt.GetoptError:
usage()
out_realtime = False
for o, a in opts:
if o == '-S':
sippy_path = a.strip()
continue
if o == '-r':
out_realtime = True
if sippy_path != None:
sys.path.insert(0, sippy_path)
from sippy.Time.clock_dtime import clock_getdtime, CLOCK_MONOTONIC
if not out_realtime:
print(clock_getdtime(CLOCK_MONOTONIC))
else:
from sippy.Time.clock_dtime import CLOCK_REALTIME
print("%f %f" % (clock_getdtime(CLOCK_MONOTONIC), clock_getdtime(CLOCK_REALTIME)))
| import getopt, sys
if __name__ == '__main__':
sippy_path = None
try:
opts, args = getopt.getopt(sys.argv[1:], 's:S:i:o:b')
except getopt.GetoptError:
usage()
for o, a in opts:
if o == '-S':
sippy_path = a.strip()
continue
if sippy_path != None:
sys.path.insert(0, sippy_path)
from sippy.Time.clock_dtime import clock_getdtime, CLOCK_MONOTONIC
print clock_getdtime(CLOCK_MONOTONIC)
Add an option to output both realtime and monotime.import getopt, sys
if __name__ == '__main__':
sippy_path = None
try:
opts, args = getopt.getopt(sys.argv[1:], 'rS:')
except getopt.GetoptError:
usage()
out_realtime = False
for o, a in opts:
if o == '-S':
sippy_path = a.strip()
continue
if o == '-r':
out_realtime = True
if sippy_path != None:
sys.path.insert(0, sippy_path)
from sippy.Time.clock_dtime import clock_getdtime, CLOCK_MONOTONIC
if not out_realtime:
print(clock_getdtime(CLOCK_MONOTONIC))
else:
from sippy.Time.clock_dtime import CLOCK_REALTIME
print("%f %f" % (clock_getdtime(CLOCK_MONOTONIC), clock_getdtime(CLOCK_REALTIME)))
| <commit_before>import getopt, sys
if __name__ == '__main__':
sippy_path = None
try:
opts, args = getopt.getopt(sys.argv[1:], 's:S:i:o:b')
except getopt.GetoptError:
usage()
for o, a in opts:
if o == '-S':
sippy_path = a.strip()
continue
if sippy_path != None:
sys.path.insert(0, sippy_path)
from sippy.Time.clock_dtime import clock_getdtime, CLOCK_MONOTONIC
print clock_getdtime(CLOCK_MONOTONIC)
<commit_msg>Add an option to output both realtime and monotime.<commit_after>import getopt, sys
if __name__ == '__main__':
sippy_path = None
try:
opts, args = getopt.getopt(sys.argv[1:], 'rS:')
except getopt.GetoptError:
usage()
out_realtime = False
for o, a in opts:
if o == '-S':
sippy_path = a.strip()
continue
if o == '-r':
out_realtime = True
if sippy_path != None:
sys.path.insert(0, sippy_path)
from sippy.Time.clock_dtime import clock_getdtime, CLOCK_MONOTONIC
if not out_realtime:
print(clock_getdtime(CLOCK_MONOTONIC))
else:
from sippy.Time.clock_dtime import CLOCK_REALTIME
print("%f %f" % (clock_getdtime(CLOCK_MONOTONIC), clock_getdtime(CLOCK_REALTIME)))
|
75dbd6cea16b6d8c59ae3f26691a22419f2a8269 | winthrop/books/views.py | winthrop/books/views.py | from dal import autocomplete
from .models import Publisher
class PublisherAutocomplete(autocomplete.Select2QuerySetView):
# basic publisher autocomplete lookup, based on
# django-autocomplete-light tutorial
# restricted to staff only in url config
def get_queryset(self):
return Publisher.objects.filter(name__contains=self.q)
| from dal import autocomplete
from .models import Publisher
class PublisherAutocomplete(autocomplete.Select2QuerySetView):
# basic publisher autocomplete lookup, based on
# django-autocomplete-light tutorial
# restricted to staff only in url config
def get_queryset(self):
return Publisher.objects.filter(name__icontains=self.q)
| Fix publisher autocomplete so searches is case-insensitive | Fix publisher autocomplete so searches is case-insensitive
| Python | apache-2.0 | Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django,Princeton-CDH/winthrop-django | from dal import autocomplete
from .models import Publisher
class PublisherAutocomplete(autocomplete.Select2QuerySetView):
# basic publisher autocomplete lookup, based on
# django-autocomplete-light tutorial
# restricted to staff only in url config
def get_queryset(self):
return Publisher.objects.filter(name__contains=self.q)
Fix publisher autocomplete so searches is case-insensitive | from dal import autocomplete
from .models import Publisher
class PublisherAutocomplete(autocomplete.Select2QuerySetView):
# basic publisher autocomplete lookup, based on
# django-autocomplete-light tutorial
# restricted to staff only in url config
def get_queryset(self):
return Publisher.objects.filter(name__icontains=self.q)
| <commit_before>from dal import autocomplete
from .models import Publisher
class PublisherAutocomplete(autocomplete.Select2QuerySetView):
# basic publisher autocomplete lookup, based on
# django-autocomplete-light tutorial
# restricted to staff only in url config
def get_queryset(self):
return Publisher.objects.filter(name__contains=self.q)
<commit_msg>Fix publisher autocomplete so searches is case-insensitive<commit_after> | from dal import autocomplete
from .models import Publisher
class PublisherAutocomplete(autocomplete.Select2QuerySetView):
# basic publisher autocomplete lookup, based on
# django-autocomplete-light tutorial
# restricted to staff only in url config
def get_queryset(self):
return Publisher.objects.filter(name__icontains=self.q)
| from dal import autocomplete
from .models import Publisher
class PublisherAutocomplete(autocomplete.Select2QuerySetView):
# basic publisher autocomplete lookup, based on
# django-autocomplete-light tutorial
# restricted to staff only in url config
def get_queryset(self):
return Publisher.objects.filter(name__contains=self.q)
Fix publisher autocomplete so searches is case-insensitivefrom dal import autocomplete
from .models import Publisher
class PublisherAutocomplete(autocomplete.Select2QuerySetView):
# basic publisher autocomplete lookup, based on
# django-autocomplete-light tutorial
# restricted to staff only in url config
def get_queryset(self):
return Publisher.objects.filter(name__icontains=self.q)
| <commit_before>from dal import autocomplete
from .models import Publisher
class PublisherAutocomplete(autocomplete.Select2QuerySetView):
# basic publisher autocomplete lookup, based on
# django-autocomplete-light tutorial
# restricted to staff only in url config
def get_queryset(self):
return Publisher.objects.filter(name__contains=self.q)
<commit_msg>Fix publisher autocomplete so searches is case-insensitive<commit_after>from dal import autocomplete
from .models import Publisher
class PublisherAutocomplete(autocomplete.Select2QuerySetView):
# basic publisher autocomplete lookup, based on
# django-autocomplete-light tutorial
# restricted to staff only in url config
def get_queryset(self):
return Publisher.objects.filter(name__icontains=self.q)
|
ca2789ad15cba31449e4946494122ab271a83c92 | inspirationforge/settings/production.py | inspirationforge/settings/production.py | from .base import *
DEBUG = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
# TODO: Add MEDIA_ROOT setting.
#MEDIA_ROOT = get_secret("MEDIA_ROOT")
# Security-related settings
ALLOWED_HOSTS = ["*"]
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
CSRF_COOKIE_HTTPONLY = True
X_FRAME_OPTIONS = 'DENY'
# Static asset configuration
STATIC_URL = '/static/'
STATIC_ROOT = 'staticfiles'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
| from .base import *
DEBUG = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Security-related settings
ALLOWED_HOSTS = ["*"]
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
CSRF_COOKIE_HTTPONLY = True
X_FRAME_OPTIONS = 'DENY'
# Static asset configuration
STATIC_URL = '/static/'
STATIC_ROOT = 'staticfiles'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
| Set MEDIA_ROOT setting for Production. | Set MEDIA_ROOT setting for Production.
| Python | mit | FarmCodeGary/InspirationForge,FarmCodeGary/InspirationForge,FarmCodeGary/InspirationForge | from .base import *
DEBUG = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
# TODO: Add MEDIA_ROOT setting.
#MEDIA_ROOT = get_secret("MEDIA_ROOT")
# Security-related settings
ALLOWED_HOSTS = ["*"]
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
CSRF_COOKIE_HTTPONLY = True
X_FRAME_OPTIONS = 'DENY'
# Static asset configuration
STATIC_URL = '/static/'
STATIC_ROOT = 'staticfiles'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
Set MEDIA_ROOT setting for Production. | from .base import *
DEBUG = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Security-related settings
ALLOWED_HOSTS = ["*"]
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
CSRF_COOKIE_HTTPONLY = True
X_FRAME_OPTIONS = 'DENY'
# Static asset configuration
STATIC_URL = '/static/'
STATIC_ROOT = 'staticfiles'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
| <commit_before>from .base import *
DEBUG = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
# TODO: Add MEDIA_ROOT setting.
#MEDIA_ROOT = get_secret("MEDIA_ROOT")
# Security-related settings
ALLOWED_HOSTS = ["*"]
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
CSRF_COOKIE_HTTPONLY = True
X_FRAME_OPTIONS = 'DENY'
# Static asset configuration
STATIC_URL = '/static/'
STATIC_ROOT = 'staticfiles'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
<commit_msg>Set MEDIA_ROOT setting for Production.<commit_after> | from .base import *
DEBUG = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Security-related settings
ALLOWED_HOSTS = ["*"]
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
CSRF_COOKIE_HTTPONLY = True
X_FRAME_OPTIONS = 'DENY'
# Static asset configuration
STATIC_URL = '/static/'
STATIC_ROOT = 'staticfiles'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
| from .base import *
DEBUG = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
# TODO: Add MEDIA_ROOT setting.
#MEDIA_ROOT = get_secret("MEDIA_ROOT")
# Security-related settings
ALLOWED_HOSTS = ["*"]
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
CSRF_COOKIE_HTTPONLY = True
X_FRAME_OPTIONS = 'DENY'
# Static asset configuration
STATIC_URL = '/static/'
STATIC_ROOT = 'staticfiles'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
Set MEDIA_ROOT setting for Production.from .base import *
DEBUG = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Security-related settings
ALLOWED_HOSTS = ["*"]
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
CSRF_COOKIE_HTTPONLY = True
X_FRAME_OPTIONS = 'DENY'
# Static asset configuration
STATIC_URL = '/static/'
STATIC_ROOT = 'staticfiles'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
| <commit_before>from .base import *
DEBUG = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
# TODO: Add MEDIA_ROOT setting.
#MEDIA_ROOT = get_secret("MEDIA_ROOT")
# Security-related settings
ALLOWED_HOSTS = ["*"]
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
CSRF_COOKIE_HTTPONLY = True
X_FRAME_OPTIONS = 'DENY'
# Static asset configuration
STATIC_URL = '/static/'
STATIC_ROOT = 'staticfiles'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
<commit_msg>Set MEDIA_ROOT setting for Production.<commit_after>from .base import *
DEBUG = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.8/howto/static-files/
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Security-related settings
ALLOWED_HOSTS = ["*"]
SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https')
CSRF_COOKIE_HTTPONLY = True
X_FRAME_OPTIONS = 'DENY'
# Static asset configuration
STATIC_URL = '/static/'
STATIC_ROOT = 'staticfiles'
STATICFILES_DIRS = (os.path.join(BASE_DIR, 'static'),)
|
3f2ab5710fb07a78c5b7f67afb785e9aab5e7695 | evergreen/core/threadpool.py | evergreen/core/threadpool.py | #
# This file is part of Evergreen. See the NOTICE for more information.
#
import pyuv
import sys
from evergreen.event import Event
from evergreen.futures import Future
__all__ = ('ThreadPool')
"""Internal thread pool which uses the pyuv work queuing capability. This module
is for internal use of Evergreen.
"""
class _Work(object):
__slots__ = ('func', 'args', 'kwargs', 'result', 'exc')
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
self.result = None
self.exc = None
def __call__(self):
try:
self.result = self.func(*self.args, **self.kwargs)
except BaseException:
self.exc = sys.exc_info()
class ThreadPool(object):
def __init__(self, loop):
self.loop = loop
def spawn(self, func, *args, **kwargs):
fut = Future()
work = _Work(func, *args, **kwargs)
def after(error):
if error is not None:
assert error == pyuv.errno.UV_ECANCELLED
return
if work.exc is not None:
fut.set_exception(work.exc)
else:
fut.set_result(work.result)
fut.set_running_or_notify_cancel()
self.loop._loop.queue_work(work, after)
return fut
| #
# This file is part of Evergreen. See the NOTICE for more information.
#
import pyuv
import sys
from evergreen.event import Event
from evergreen.futures import Future
__all__ = ('ThreadPool')
"""Internal thread pool which uses the pyuv work queuing capability. This module
is for internal use of Evergreen.
"""
class _Work(object):
__slots__ = ('func', 'args', 'kwargs', 'result', 'exc')
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
self.result = None
self.exc = None
def __call__(self):
try:
self.result = self.func(*self.args, **self.kwargs)
except BaseException:
self.exc = sys.exc_info()
self = None
class ThreadPool(object):
def __init__(self, loop):
self.loop = loop
def spawn(self, func, *args, **kwargs):
fut = Future()
work = _Work(func, *args, **kwargs)
def after(error):
if error is not None:
assert error == pyuv.errno.UV_ECANCELLED
return
if work.exc is not None:
fut.set_exception(work.exc)
else:
fut.set_result(work.result)
fut.set_running_or_notify_cancel()
self.loop._loop.queue_work(work, after)
return fut
| Break potential cycle because we are storing the traceback | Break potential cycle because we are storing the traceback
| Python | mit | saghul/evergreen,saghul/evergreen | #
# This file is part of Evergreen. See the NOTICE for more information.
#
import pyuv
import sys
from evergreen.event import Event
from evergreen.futures import Future
__all__ = ('ThreadPool')
"""Internal thread pool which uses the pyuv work queuing capability. This module
is for internal use of Evergreen.
"""
class _Work(object):
__slots__ = ('func', 'args', 'kwargs', 'result', 'exc')
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
self.result = None
self.exc = None
def __call__(self):
try:
self.result = self.func(*self.args, **self.kwargs)
except BaseException:
self.exc = sys.exc_info()
class ThreadPool(object):
def __init__(self, loop):
self.loop = loop
def spawn(self, func, *args, **kwargs):
fut = Future()
work = _Work(func, *args, **kwargs)
def after(error):
if error is not None:
assert error == pyuv.errno.UV_ECANCELLED
return
if work.exc is not None:
fut.set_exception(work.exc)
else:
fut.set_result(work.result)
fut.set_running_or_notify_cancel()
self.loop._loop.queue_work(work, after)
return fut
Break potential cycle because we are storing the traceback | #
# This file is part of Evergreen. See the NOTICE for more information.
#
import pyuv
import sys
from evergreen.event import Event
from evergreen.futures import Future
__all__ = ('ThreadPool')
"""Internal thread pool which uses the pyuv work queuing capability. This module
is for internal use of Evergreen.
"""
class _Work(object):
__slots__ = ('func', 'args', 'kwargs', 'result', 'exc')
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
self.result = None
self.exc = None
def __call__(self):
try:
self.result = self.func(*self.args, **self.kwargs)
except BaseException:
self.exc = sys.exc_info()
self = None
class ThreadPool(object):
def __init__(self, loop):
self.loop = loop
def spawn(self, func, *args, **kwargs):
fut = Future()
work = _Work(func, *args, **kwargs)
def after(error):
if error is not None:
assert error == pyuv.errno.UV_ECANCELLED
return
if work.exc is not None:
fut.set_exception(work.exc)
else:
fut.set_result(work.result)
fut.set_running_or_notify_cancel()
self.loop._loop.queue_work(work, after)
return fut
| <commit_before>#
# This file is part of Evergreen. See the NOTICE for more information.
#
import pyuv
import sys
from evergreen.event import Event
from evergreen.futures import Future
__all__ = ('ThreadPool')
"""Internal thread pool which uses the pyuv work queuing capability. This module
is for internal use of Evergreen.
"""
class _Work(object):
__slots__ = ('func', 'args', 'kwargs', 'result', 'exc')
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
self.result = None
self.exc = None
def __call__(self):
try:
self.result = self.func(*self.args, **self.kwargs)
except BaseException:
self.exc = sys.exc_info()
class ThreadPool(object):
def __init__(self, loop):
self.loop = loop
def spawn(self, func, *args, **kwargs):
fut = Future()
work = _Work(func, *args, **kwargs)
def after(error):
if error is not None:
assert error == pyuv.errno.UV_ECANCELLED
return
if work.exc is not None:
fut.set_exception(work.exc)
else:
fut.set_result(work.result)
fut.set_running_or_notify_cancel()
self.loop._loop.queue_work(work, after)
return fut
<commit_msg>Break potential cycle because we are storing the traceback<commit_after> | #
# This file is part of Evergreen. See the NOTICE for more information.
#
import pyuv
import sys
from evergreen.event import Event
from evergreen.futures import Future
__all__ = ('ThreadPool')
"""Internal thread pool which uses the pyuv work queuing capability. This module
is for internal use of Evergreen.
"""
class _Work(object):
__slots__ = ('func', 'args', 'kwargs', 'result', 'exc')
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
self.result = None
self.exc = None
def __call__(self):
try:
self.result = self.func(*self.args, **self.kwargs)
except BaseException:
self.exc = sys.exc_info()
self = None
class ThreadPool(object):
def __init__(self, loop):
self.loop = loop
def spawn(self, func, *args, **kwargs):
fut = Future()
work = _Work(func, *args, **kwargs)
def after(error):
if error is not None:
assert error == pyuv.errno.UV_ECANCELLED
return
if work.exc is not None:
fut.set_exception(work.exc)
else:
fut.set_result(work.result)
fut.set_running_or_notify_cancel()
self.loop._loop.queue_work(work, after)
return fut
| #
# This file is part of Evergreen. See the NOTICE for more information.
#
import pyuv
import sys
from evergreen.event import Event
from evergreen.futures import Future
__all__ = ('ThreadPool')
"""Internal thread pool which uses the pyuv work queuing capability. This module
is for internal use of Evergreen.
"""
class _Work(object):
__slots__ = ('func', 'args', 'kwargs', 'result', 'exc')
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
self.result = None
self.exc = None
def __call__(self):
try:
self.result = self.func(*self.args, **self.kwargs)
except BaseException:
self.exc = sys.exc_info()
class ThreadPool(object):
def __init__(self, loop):
self.loop = loop
def spawn(self, func, *args, **kwargs):
fut = Future()
work = _Work(func, *args, **kwargs)
def after(error):
if error is not None:
assert error == pyuv.errno.UV_ECANCELLED
return
if work.exc is not None:
fut.set_exception(work.exc)
else:
fut.set_result(work.result)
fut.set_running_or_notify_cancel()
self.loop._loop.queue_work(work, after)
return fut
Break potential cycle because we are storing the traceback#
# This file is part of Evergreen. See the NOTICE for more information.
#
import pyuv
import sys
from evergreen.event import Event
from evergreen.futures import Future
__all__ = ('ThreadPool')
"""Internal thread pool which uses the pyuv work queuing capability. This module
is for internal use of Evergreen.
"""
class _Work(object):
__slots__ = ('func', 'args', 'kwargs', 'result', 'exc')
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
self.result = None
self.exc = None
def __call__(self):
try:
self.result = self.func(*self.args, **self.kwargs)
except BaseException:
self.exc = sys.exc_info()
self = None
class ThreadPool(object):
def __init__(self, loop):
self.loop = loop
def spawn(self, func, *args, **kwargs):
fut = Future()
work = _Work(func, *args, **kwargs)
def after(error):
if error is not None:
assert error == pyuv.errno.UV_ECANCELLED
return
if work.exc is not None:
fut.set_exception(work.exc)
else:
fut.set_result(work.result)
fut.set_running_or_notify_cancel()
self.loop._loop.queue_work(work, after)
return fut
| <commit_before>#
# This file is part of Evergreen. See the NOTICE for more information.
#
import pyuv
import sys
from evergreen.event import Event
from evergreen.futures import Future
__all__ = ('ThreadPool')
"""Internal thread pool which uses the pyuv work queuing capability. This module
is for internal use of Evergreen.
"""
class _Work(object):
__slots__ = ('func', 'args', 'kwargs', 'result', 'exc')
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
self.result = None
self.exc = None
def __call__(self):
try:
self.result = self.func(*self.args, **self.kwargs)
except BaseException:
self.exc = sys.exc_info()
class ThreadPool(object):
def __init__(self, loop):
self.loop = loop
def spawn(self, func, *args, **kwargs):
fut = Future()
work = _Work(func, *args, **kwargs)
def after(error):
if error is not None:
assert error == pyuv.errno.UV_ECANCELLED
return
if work.exc is not None:
fut.set_exception(work.exc)
else:
fut.set_result(work.result)
fut.set_running_or_notify_cancel()
self.loop._loop.queue_work(work, after)
return fut
<commit_msg>Break potential cycle because we are storing the traceback<commit_after>#
# This file is part of Evergreen. See the NOTICE for more information.
#
import pyuv
import sys
from evergreen.event import Event
from evergreen.futures import Future
__all__ = ('ThreadPool')
"""Internal thread pool which uses the pyuv work queuing capability. This module
is for internal use of Evergreen.
"""
class _Work(object):
__slots__ = ('func', 'args', 'kwargs', 'result', 'exc')
def __init__(self, func, *args, **kwargs):
self.func = func
self.args = args
self.kwargs = kwargs
self.result = None
self.exc = None
def __call__(self):
try:
self.result = self.func(*self.args, **self.kwargs)
except BaseException:
self.exc = sys.exc_info()
self = None
class ThreadPool(object):
def __init__(self, loop):
self.loop = loop
def spawn(self, func, *args, **kwargs):
fut = Future()
work = _Work(func, *args, **kwargs)
def after(error):
if error is not None:
assert error == pyuv.errno.UV_ECANCELLED
return
if work.exc is not None:
fut.set_exception(work.exc)
else:
fut.set_result(work.result)
fut.set_running_or_notify_cancel()
self.loop._loop.queue_work(work, after)
return fut
|
56cdcde184b613dabdcc3f999b90915f75e03726 | tests/backends/__init__.py | tests/backends/__init__.py | from mopidy.models import Track
class BaseCurrentPlaylistControllerTest(object):
uris = []
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_add(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri)
self.assertEqual(uri, playlist.tracks[-1].uri)
def test_add_at_position(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri, 0)
self.assertEqual(uri, playlist.tracks[0].uri)
# FIXME test other placements
class BasePlaybackControllerTest(object):
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_play(self):
playback = self.backend.playback
self.assertEqual(playback.state, playback.STOPPED)
playback.play()
self.assertEqual(playback.state, playback.PLAYING)
def test_next(self):
playback = self.backend.playback
current_song = playback.playlist_position
playback.next()
self.assertEqual(playback.playlist_position, current_song+1)
| from mopidy.models import Track
class BaseCurrentPlaylistControllerTest(object):
uris = []
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_add(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri)
self.assertEqual(uri, playlist.tracks[-1].uri)
def test_add_at_position(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri, 0)
self.assertEqual(uri, playlist.tracks[0].uri)
# FIXME test other placements
class BasePlaybackControllerTest(object):
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_play_with_no_current_track(self):
playback = self.backend.playback
self.assertEqual(playback.state, playback.STOPPED)
result = playback.play()
self.assertEqual(result, False)
self.assertEqual(playback.state, playback.STOPPED)
def test_next(self):
playback = self.backend.playback
current_song = playback.playlist_position
playback.next()
self.assertEqual(playback.playlist_position, current_song+1)
| Update test to check basic case for playback without current track | Update test to check basic case for playback without current track
| Python | apache-2.0 | hkariti/mopidy,mopidy/mopidy,abarisain/mopidy,quartz55/mopidy,ZenithDK/mopidy,quartz55/mopidy,priestd09/mopidy,diandiankan/mopidy,dbrgn/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,adamcik/mopidy,rawdlite/mopidy,liamw9534/mopidy,pacificIT/mopidy,jcass77/mopidy,bencevans/mopidy,mokieyue/mopidy,swak/mopidy,bacontext/mopidy,mopidy/mopidy,quartz55/mopidy,hkariti/mopidy,ali/mopidy,rawdlite/mopidy,priestd09/mopidy,adamcik/mopidy,jodal/mopidy,kingosticks/mopidy,glogiotatidis/mopidy,ali/mopidy,jodal/mopidy,kingosticks/mopidy,mokieyue/mopidy,dbrgn/mopidy,tkem/mopidy,bacontext/mopidy,glogiotatidis/mopidy,glogiotatidis/mopidy,adamcik/mopidy,hkariti/mopidy,jcass77/mopidy,ali/mopidy,pacificIT/mopidy,vrs01/mopidy,swak/mopidy,jmarsik/mopidy,woutervanwijk/mopidy,jodal/mopidy,tkem/mopidy,kingosticks/mopidy,bacontext/mopidy,jmarsik/mopidy,dbrgn/mopidy,bencevans/mopidy,ZenithDK/mopidy,jcass77/mopidy,ali/mopidy,SuperStarPL/mopidy,hkariti/mopidy,vrs01/mopidy,bacontext/mopidy,vrs01/mopidy,tkem/mopidy,dbrgn/mopidy,rawdlite/mopidy,SuperStarPL/mopidy,pacificIT/mopidy,mokieyue/mopidy,diandiankan/mopidy,jmarsik/mopidy,quartz55/mopidy,vrs01/mopidy,liamw9534/mopidy,ZenithDK/mopidy,pacificIT/mopidy,jmarsik/mopidy,woutervanwijk/mopidy,bencevans/mopidy,bencevans/mopidy,tkem/mopidy,priestd09/mopidy,swak/mopidy,mopidy/mopidy,abarisain/mopidy,ZenithDK/mopidy,swak/mopidy,glogiotatidis/mopidy,mokieyue/mopidy,rawdlite/mopidy,diandiankan/mopidy,SuperStarPL/mopidy | from mopidy.models import Track
class BaseCurrentPlaylistControllerTest(object):
uris = []
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_add(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri)
self.assertEqual(uri, playlist.tracks[-1].uri)
def test_add_at_position(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri, 0)
self.assertEqual(uri, playlist.tracks[0].uri)
# FIXME test other placements
class BasePlaybackControllerTest(object):
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_play(self):
playback = self.backend.playback
self.assertEqual(playback.state, playback.STOPPED)
playback.play()
self.assertEqual(playback.state, playback.PLAYING)
def test_next(self):
playback = self.backend.playback
current_song = playback.playlist_position
playback.next()
self.assertEqual(playback.playlist_position, current_song+1)
Update test to check basic case for playback without current track | from mopidy.models import Track
class BaseCurrentPlaylistControllerTest(object):
uris = []
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_add(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri)
self.assertEqual(uri, playlist.tracks[-1].uri)
def test_add_at_position(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri, 0)
self.assertEqual(uri, playlist.tracks[0].uri)
# FIXME test other placements
class BasePlaybackControllerTest(object):
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_play_with_no_current_track(self):
playback = self.backend.playback
self.assertEqual(playback.state, playback.STOPPED)
result = playback.play()
self.assertEqual(result, False)
self.assertEqual(playback.state, playback.STOPPED)
def test_next(self):
playback = self.backend.playback
current_song = playback.playlist_position
playback.next()
self.assertEqual(playback.playlist_position, current_song+1)
| <commit_before>from mopidy.models import Track
class BaseCurrentPlaylistControllerTest(object):
uris = []
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_add(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri)
self.assertEqual(uri, playlist.tracks[-1].uri)
def test_add_at_position(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri, 0)
self.assertEqual(uri, playlist.tracks[0].uri)
# FIXME test other placements
class BasePlaybackControllerTest(object):
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_play(self):
playback = self.backend.playback
self.assertEqual(playback.state, playback.STOPPED)
playback.play()
self.assertEqual(playback.state, playback.PLAYING)
def test_next(self):
playback = self.backend.playback
current_song = playback.playlist_position
playback.next()
self.assertEqual(playback.playlist_position, current_song+1)
<commit_msg>Update test to check basic case for playback without current track<commit_after> | from mopidy.models import Track
class BaseCurrentPlaylistControllerTest(object):
uris = []
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_add(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri)
self.assertEqual(uri, playlist.tracks[-1].uri)
def test_add_at_position(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri, 0)
self.assertEqual(uri, playlist.tracks[0].uri)
# FIXME test other placements
class BasePlaybackControllerTest(object):
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_play_with_no_current_track(self):
playback = self.backend.playback
self.assertEqual(playback.state, playback.STOPPED)
result = playback.play()
self.assertEqual(result, False)
self.assertEqual(playback.state, playback.STOPPED)
def test_next(self):
playback = self.backend.playback
current_song = playback.playlist_position
playback.next()
self.assertEqual(playback.playlist_position, current_song+1)
| from mopidy.models import Track
class BaseCurrentPlaylistControllerTest(object):
uris = []
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_add(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri)
self.assertEqual(uri, playlist.tracks[-1].uri)
def test_add_at_position(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri, 0)
self.assertEqual(uri, playlist.tracks[0].uri)
# FIXME test other placements
class BasePlaybackControllerTest(object):
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_play(self):
playback = self.backend.playback
self.assertEqual(playback.state, playback.STOPPED)
playback.play()
self.assertEqual(playback.state, playback.PLAYING)
def test_next(self):
playback = self.backend.playback
current_song = playback.playlist_position
playback.next()
self.assertEqual(playback.playlist_position, current_song+1)
Update test to check basic case for playback without current trackfrom mopidy.models import Track
class BaseCurrentPlaylistControllerTest(object):
uris = []
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_add(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri)
self.assertEqual(uri, playlist.tracks[-1].uri)
def test_add_at_position(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri, 0)
self.assertEqual(uri, playlist.tracks[0].uri)
# FIXME test other placements
class BasePlaybackControllerTest(object):
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_play_with_no_current_track(self):
playback = self.backend.playback
self.assertEqual(playback.state, playback.STOPPED)
result = playback.play()
self.assertEqual(result, False)
self.assertEqual(playback.state, playback.STOPPED)
def test_next(self):
playback = self.backend.playback
current_song = playback.playlist_position
playback.next()
self.assertEqual(playback.playlist_position, current_song+1)
| <commit_before>from mopidy.models import Track
class BaseCurrentPlaylistControllerTest(object):
uris = []
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_add(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri)
self.assertEqual(uri, playlist.tracks[-1].uri)
def test_add_at_position(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri, 0)
self.assertEqual(uri, playlist.tracks[0].uri)
# FIXME test other placements
class BasePlaybackControllerTest(object):
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_play(self):
playback = self.backend.playback
self.assertEqual(playback.state, playback.STOPPED)
playback.play()
self.assertEqual(playback.state, playback.PLAYING)
def test_next(self):
playback = self.backend.playback
current_song = playback.playlist_position
playback.next()
self.assertEqual(playback.playlist_position, current_song+1)
<commit_msg>Update test to check basic case for playback without current track<commit_after>from mopidy.models import Track
class BaseCurrentPlaylistControllerTest(object):
uris = []
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_add(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri)
self.assertEqual(uri, playlist.tracks[-1].uri)
def test_add_at_position(self):
playlist = self.backend.current_playlist
for uri in self.uris:
playlist.add(uri, 0)
self.assertEqual(uri, playlist.tracks[0].uri)
# FIXME test other placements
class BasePlaybackControllerTest(object):
backend_class = None
def setUp(self):
self.backend = self.backend_class()
def test_play_with_no_current_track(self):
playback = self.backend.playback
self.assertEqual(playback.state, playback.STOPPED)
result = playback.play()
self.assertEqual(result, False)
self.assertEqual(playback.state, playback.STOPPED)
def test_next(self):
playback = self.backend.playback
current_song = playback.playlist_position
playback.next()
self.assertEqual(playback.playlist_position, current_song+1)
|
7ebda7fca01372ae49a8c66812c958fc8200f4b0 | apps/events/filters.py | apps/events/filters.py | import django_filters
from django_filters.filters import Lookup
from apps.events.models import Event
class ListFilter(django_filters.Filter):
# https://github.com/carltongibson/django-filter/issues/137#issuecomment-37820702
def filter(self, qs, value):
value_list = value.split(u',')
return super(ListFilter, self).filter(qs, Lookup(value_list, 'in'))
class EventDateFilter(django_filters.FilterSet):
event_start__gte = django_filters.DateTimeFilter(name='event_start', lookup_expr='gte')
event_start__lte = django_filters.DateTimeFilter(name='event_start', lookup_expr='lte')
event_end__gte = django_filters.DateTimeFilter(name='event_end', lookup_expr='gte')
event_end__lte = django_filters.DateTimeFilter(name='event_end', lookup_expr='lte')
attendance_event__isnull = django_filters.BooleanFilter(name='attendance_event', lookup_expr='isnull')
event_type = ListFilter()
class Meta:
model = Event
fields = ('event_start', 'event_end', 'event_type')
| import django_filters
from django_filters.filters import Lookup
from apps.events.models import Event
class ListFilter(django_filters.Filter):
# https://github.com/carltongibson/django-filter/issues/137#issuecomment-37820702
def filter(self, qs, value):
value_list = value.split(u',')
return super(ListFilter, self).filter(qs, Lookup(value_list, 'in'))
class EventDateFilter(django_filters.FilterSet):
event_start__gte = django_filters.DateTimeFilter(field_name='event_start', lookup_expr='gte')
event_start__lte = django_filters.DateTimeFilter(field_name='event_start', lookup_expr='lte')
event_end__gte = django_filters.DateTimeFilter(field_name='event_end', lookup_expr='gte')
event_end__lte = django_filters.DateTimeFilter(field_name='event_end', lookup_expr='lte')
attendance_event__isnull = django_filters.BooleanFilter(field_name='attendance_event', lookup_expr='isnull')
event_type = ListFilter()
class Meta:
model = Event
fields = ('event_start', 'event_end', 'event_type')
| Change Django field filter kwarg from name to field_name for Django 2 support | Change Django field filter kwarg from name to field_name for Django 2 support
| Python | mit | dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4,dotKom/onlineweb4 | import django_filters
from django_filters.filters import Lookup
from apps.events.models import Event
class ListFilter(django_filters.Filter):
# https://github.com/carltongibson/django-filter/issues/137#issuecomment-37820702
def filter(self, qs, value):
value_list = value.split(u',')
return super(ListFilter, self).filter(qs, Lookup(value_list, 'in'))
class EventDateFilter(django_filters.FilterSet):
event_start__gte = django_filters.DateTimeFilter(name='event_start', lookup_expr='gte')
event_start__lte = django_filters.DateTimeFilter(name='event_start', lookup_expr='lte')
event_end__gte = django_filters.DateTimeFilter(name='event_end', lookup_expr='gte')
event_end__lte = django_filters.DateTimeFilter(name='event_end', lookup_expr='lte')
attendance_event__isnull = django_filters.BooleanFilter(name='attendance_event', lookup_expr='isnull')
event_type = ListFilter()
class Meta:
model = Event
fields = ('event_start', 'event_end', 'event_type')
Change Django field filter kwarg from name to field_name for Django 2 support | import django_filters
from django_filters.filters import Lookup
from apps.events.models import Event
class ListFilter(django_filters.Filter):
# https://github.com/carltongibson/django-filter/issues/137#issuecomment-37820702
def filter(self, qs, value):
value_list = value.split(u',')
return super(ListFilter, self).filter(qs, Lookup(value_list, 'in'))
class EventDateFilter(django_filters.FilterSet):
event_start__gte = django_filters.DateTimeFilter(field_name='event_start', lookup_expr='gte')
event_start__lte = django_filters.DateTimeFilter(field_name='event_start', lookup_expr='lte')
event_end__gte = django_filters.DateTimeFilter(field_name='event_end', lookup_expr='gte')
event_end__lte = django_filters.DateTimeFilter(field_name='event_end', lookup_expr='lte')
attendance_event__isnull = django_filters.BooleanFilter(field_name='attendance_event', lookup_expr='isnull')
event_type = ListFilter()
class Meta:
model = Event
fields = ('event_start', 'event_end', 'event_type')
| <commit_before>import django_filters
from django_filters.filters import Lookup
from apps.events.models import Event
class ListFilter(django_filters.Filter):
# https://github.com/carltongibson/django-filter/issues/137#issuecomment-37820702
def filter(self, qs, value):
value_list = value.split(u',')
return super(ListFilter, self).filter(qs, Lookup(value_list, 'in'))
class EventDateFilter(django_filters.FilterSet):
event_start__gte = django_filters.DateTimeFilter(name='event_start', lookup_expr='gte')
event_start__lte = django_filters.DateTimeFilter(name='event_start', lookup_expr='lte')
event_end__gte = django_filters.DateTimeFilter(name='event_end', lookup_expr='gte')
event_end__lte = django_filters.DateTimeFilter(name='event_end', lookup_expr='lte')
attendance_event__isnull = django_filters.BooleanFilter(name='attendance_event', lookup_expr='isnull')
event_type = ListFilter()
class Meta:
model = Event
fields = ('event_start', 'event_end', 'event_type')
<commit_msg>Change Django field filter kwarg from name to field_name for Django 2 support<commit_after> | import django_filters
from django_filters.filters import Lookup
from apps.events.models import Event
class ListFilter(django_filters.Filter):
# https://github.com/carltongibson/django-filter/issues/137#issuecomment-37820702
def filter(self, qs, value):
value_list = value.split(u',')
return super(ListFilter, self).filter(qs, Lookup(value_list, 'in'))
class EventDateFilter(django_filters.FilterSet):
event_start__gte = django_filters.DateTimeFilter(field_name='event_start', lookup_expr='gte')
event_start__lte = django_filters.DateTimeFilter(field_name='event_start', lookup_expr='lte')
event_end__gte = django_filters.DateTimeFilter(field_name='event_end', lookup_expr='gte')
event_end__lte = django_filters.DateTimeFilter(field_name='event_end', lookup_expr='lte')
attendance_event__isnull = django_filters.BooleanFilter(field_name='attendance_event', lookup_expr='isnull')
event_type = ListFilter()
class Meta:
model = Event
fields = ('event_start', 'event_end', 'event_type')
| import django_filters
from django_filters.filters import Lookup
from apps.events.models import Event
class ListFilter(django_filters.Filter):
# https://github.com/carltongibson/django-filter/issues/137#issuecomment-37820702
def filter(self, qs, value):
value_list = value.split(u',')
return super(ListFilter, self).filter(qs, Lookup(value_list, 'in'))
class EventDateFilter(django_filters.FilterSet):
event_start__gte = django_filters.DateTimeFilter(name='event_start', lookup_expr='gte')
event_start__lte = django_filters.DateTimeFilter(name='event_start', lookup_expr='lte')
event_end__gte = django_filters.DateTimeFilter(name='event_end', lookup_expr='gte')
event_end__lte = django_filters.DateTimeFilter(name='event_end', lookup_expr='lte')
attendance_event__isnull = django_filters.BooleanFilter(name='attendance_event', lookup_expr='isnull')
event_type = ListFilter()
class Meta:
model = Event
fields = ('event_start', 'event_end', 'event_type')
Change Django field filter kwarg from name to field_name for Django 2 supportimport django_filters
from django_filters.filters import Lookup
from apps.events.models import Event
class ListFilter(django_filters.Filter):
# https://github.com/carltongibson/django-filter/issues/137#issuecomment-37820702
def filter(self, qs, value):
value_list = value.split(u',')
return super(ListFilter, self).filter(qs, Lookup(value_list, 'in'))
class EventDateFilter(django_filters.FilterSet):
event_start__gte = django_filters.DateTimeFilter(field_name='event_start', lookup_expr='gte')
event_start__lte = django_filters.DateTimeFilter(field_name='event_start', lookup_expr='lte')
event_end__gte = django_filters.DateTimeFilter(field_name='event_end', lookup_expr='gte')
event_end__lte = django_filters.DateTimeFilter(field_name='event_end', lookup_expr='lte')
attendance_event__isnull = django_filters.BooleanFilter(field_name='attendance_event', lookup_expr='isnull')
event_type = ListFilter()
class Meta:
model = Event
fields = ('event_start', 'event_end', 'event_type')
| <commit_before>import django_filters
from django_filters.filters import Lookup
from apps.events.models import Event
class ListFilter(django_filters.Filter):
# https://github.com/carltongibson/django-filter/issues/137#issuecomment-37820702
def filter(self, qs, value):
value_list = value.split(u',')
return super(ListFilter, self).filter(qs, Lookup(value_list, 'in'))
class EventDateFilter(django_filters.FilterSet):
event_start__gte = django_filters.DateTimeFilter(name='event_start', lookup_expr='gte')
event_start__lte = django_filters.DateTimeFilter(name='event_start', lookup_expr='lte')
event_end__gte = django_filters.DateTimeFilter(name='event_end', lookup_expr='gte')
event_end__lte = django_filters.DateTimeFilter(name='event_end', lookup_expr='lte')
attendance_event__isnull = django_filters.BooleanFilter(name='attendance_event', lookup_expr='isnull')
event_type = ListFilter()
class Meta:
model = Event
fields = ('event_start', 'event_end', 'event_type')
<commit_msg>Change Django field filter kwarg from name to field_name for Django 2 support<commit_after>import django_filters
from django_filters.filters import Lookup
from apps.events.models import Event
class ListFilter(django_filters.Filter):
# https://github.com/carltongibson/django-filter/issues/137#issuecomment-37820702
def filter(self, qs, value):
value_list = value.split(u',')
return super(ListFilter, self).filter(qs, Lookup(value_list, 'in'))
class EventDateFilter(django_filters.FilterSet):
event_start__gte = django_filters.DateTimeFilter(field_name='event_start', lookup_expr='gte')
event_start__lte = django_filters.DateTimeFilter(field_name='event_start', lookup_expr='lte')
event_end__gte = django_filters.DateTimeFilter(field_name='event_end', lookup_expr='gte')
event_end__lte = django_filters.DateTimeFilter(field_name='event_end', lookup_expr='lte')
attendance_event__isnull = django_filters.BooleanFilter(field_name='attendance_event', lookup_expr='isnull')
event_type = ListFilter()
class Meta:
model = Event
fields = ('event_start', 'event_end', 'event_type')
|
d6a1e13ed6fc1db1d2087ba56fc9130f9ab641f9 | tests/__init__.py | tests/__init__.py | from __future__ import unicode_literals
import os
import sys
def path_to_data_dir(name):
if not isinstance(name, bytes):
name = name.encode(sys.getfilesystemencoding())
path = os.path.dirname(__file__)
path = os.path.join(path, b'data')
path = os.path.abspath(path)
return os.path.join(path, name)
class IsA(object):
def __init__(self, klass):
self.klass = klass
def __eq__(self, rhs):
try:
return isinstance(rhs, self.klass)
except TypeError:
return type(rhs) == type(self.klass)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def __repr__(self):
return str(self.klass)
any_int = IsA(int)
any_str = IsA(str)
any_unicode = IsA(unicode)
| from __future__ import unicode_literals
import os
def path_to_data_dir(name):
if not isinstance(name, bytes):
name = name.encode('utf-8')
path = os.path.dirname(__file__)
path = os.path.join(path, b'data')
path = os.path.abspath(path)
return os.path.join(path, name)
class IsA(object):
def __init__(self, klass):
self.klass = klass
def __eq__(self, rhs):
try:
return isinstance(rhs, self.klass)
except TypeError:
return type(rhs) == type(self.klass)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def __repr__(self):
return str(self.klass)
any_int = IsA(int)
any_str = IsA(str)
any_unicode = IsA(unicode)
| Use utf-8 when encoding our test data paths to bytes | tests: Use utf-8 when encoding our test data paths to bytes
| Python | apache-2.0 | rawdlite/mopidy,jcass77/mopidy,SuperStarPL/mopidy,jcass77/mopidy,bencevans/mopidy,jmarsik/mopidy,vrs01/mopidy,jcass77/mopidy,ali/mopidy,pacificIT/mopidy,tkem/mopidy,jodal/mopidy,diandiankan/mopidy,tkem/mopidy,adamcik/mopidy,vrs01/mopidy,mokieyue/mopidy,adamcik/mopidy,bencevans/mopidy,swak/mopidy,swak/mopidy,ali/mopidy,rawdlite/mopidy,ali/mopidy,jmarsik/mopidy,rawdlite/mopidy,jmarsik/mopidy,ZenithDK/mopidy,ZenithDK/mopidy,kingosticks/mopidy,diandiankan/mopidy,priestd09/mopidy,hkariti/mopidy,pacificIT/mopidy,bencevans/mopidy,diandiankan/mopidy,SuperStarPL/mopidy,quartz55/mopidy,tkem/mopidy,liamw9534/mopidy,ali/mopidy,dbrgn/mopidy,hkariti/mopidy,glogiotatidis/mopidy,swak/mopidy,mokieyue/mopidy,mopidy/mopidy,bacontext/mopidy,priestd09/mopidy,quartz55/mopidy,woutervanwijk/mopidy,glogiotatidis/mopidy,bacontext/mopidy,abarisain/mopidy,kingosticks/mopidy,jodal/mopidy,quartz55/mopidy,mokieyue/mopidy,dbrgn/mopidy,hkariti/mopidy,vrs01/mopidy,jmarsik/mopidy,diandiankan/mopidy,quartz55/mopidy,pacificIT/mopidy,vrs01/mopidy,dbrgn/mopidy,woutervanwijk/mopidy,glogiotatidis/mopidy,bacontext/mopidy,abarisain/mopidy,SuperStarPL/mopidy,tkem/mopidy,glogiotatidis/mopidy,SuperStarPL/mopidy,liamw9534/mopidy,pacificIT/mopidy,bencevans/mopidy,mopidy/mopidy,mopidy/mopidy,kingosticks/mopidy,dbrgn/mopidy,priestd09/mopidy,jodal/mopidy,ZenithDK/mopidy,mokieyue/mopidy,bacontext/mopidy,rawdlite/mopidy,adamcik/mopidy,swak/mopidy,ZenithDK/mopidy,hkariti/mopidy | from __future__ import unicode_literals
import os
import sys
def path_to_data_dir(name):
if not isinstance(name, bytes):
name = name.encode(sys.getfilesystemencoding())
path = os.path.dirname(__file__)
path = os.path.join(path, b'data')
path = os.path.abspath(path)
return os.path.join(path, name)
class IsA(object):
def __init__(self, klass):
self.klass = klass
def __eq__(self, rhs):
try:
return isinstance(rhs, self.klass)
except TypeError:
return type(rhs) == type(self.klass)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def __repr__(self):
return str(self.klass)
any_int = IsA(int)
any_str = IsA(str)
any_unicode = IsA(unicode)
tests: Use utf-8 when encoding our test data paths to bytes | from __future__ import unicode_literals
import os
def path_to_data_dir(name):
if not isinstance(name, bytes):
name = name.encode('utf-8')
path = os.path.dirname(__file__)
path = os.path.join(path, b'data')
path = os.path.abspath(path)
return os.path.join(path, name)
class IsA(object):
def __init__(self, klass):
self.klass = klass
def __eq__(self, rhs):
try:
return isinstance(rhs, self.klass)
except TypeError:
return type(rhs) == type(self.klass)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def __repr__(self):
return str(self.klass)
any_int = IsA(int)
any_str = IsA(str)
any_unicode = IsA(unicode)
| <commit_before>from __future__ import unicode_literals
import os
import sys
def path_to_data_dir(name):
if not isinstance(name, bytes):
name = name.encode(sys.getfilesystemencoding())
path = os.path.dirname(__file__)
path = os.path.join(path, b'data')
path = os.path.abspath(path)
return os.path.join(path, name)
class IsA(object):
def __init__(self, klass):
self.klass = klass
def __eq__(self, rhs):
try:
return isinstance(rhs, self.klass)
except TypeError:
return type(rhs) == type(self.klass)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def __repr__(self):
return str(self.klass)
any_int = IsA(int)
any_str = IsA(str)
any_unicode = IsA(unicode)
<commit_msg>tests: Use utf-8 when encoding our test data paths to bytes<commit_after> | from __future__ import unicode_literals
import os
def path_to_data_dir(name):
if not isinstance(name, bytes):
name = name.encode('utf-8')
path = os.path.dirname(__file__)
path = os.path.join(path, b'data')
path = os.path.abspath(path)
return os.path.join(path, name)
class IsA(object):
def __init__(self, klass):
self.klass = klass
def __eq__(self, rhs):
try:
return isinstance(rhs, self.klass)
except TypeError:
return type(rhs) == type(self.klass)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def __repr__(self):
return str(self.klass)
any_int = IsA(int)
any_str = IsA(str)
any_unicode = IsA(unicode)
| from __future__ import unicode_literals
import os
import sys
def path_to_data_dir(name):
if not isinstance(name, bytes):
name = name.encode(sys.getfilesystemencoding())
path = os.path.dirname(__file__)
path = os.path.join(path, b'data')
path = os.path.abspath(path)
return os.path.join(path, name)
class IsA(object):
def __init__(self, klass):
self.klass = klass
def __eq__(self, rhs):
try:
return isinstance(rhs, self.klass)
except TypeError:
return type(rhs) == type(self.klass)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def __repr__(self):
return str(self.klass)
any_int = IsA(int)
any_str = IsA(str)
any_unicode = IsA(unicode)
tests: Use utf-8 when encoding our test data paths to bytesfrom __future__ import unicode_literals
import os
def path_to_data_dir(name):
if not isinstance(name, bytes):
name = name.encode('utf-8')
path = os.path.dirname(__file__)
path = os.path.join(path, b'data')
path = os.path.abspath(path)
return os.path.join(path, name)
class IsA(object):
def __init__(self, klass):
self.klass = klass
def __eq__(self, rhs):
try:
return isinstance(rhs, self.klass)
except TypeError:
return type(rhs) == type(self.klass)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def __repr__(self):
return str(self.klass)
any_int = IsA(int)
any_str = IsA(str)
any_unicode = IsA(unicode)
| <commit_before>from __future__ import unicode_literals
import os
import sys
def path_to_data_dir(name):
if not isinstance(name, bytes):
name = name.encode(sys.getfilesystemencoding())
path = os.path.dirname(__file__)
path = os.path.join(path, b'data')
path = os.path.abspath(path)
return os.path.join(path, name)
class IsA(object):
def __init__(self, klass):
self.klass = klass
def __eq__(self, rhs):
try:
return isinstance(rhs, self.klass)
except TypeError:
return type(rhs) == type(self.klass)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def __repr__(self):
return str(self.klass)
any_int = IsA(int)
any_str = IsA(str)
any_unicode = IsA(unicode)
<commit_msg>tests: Use utf-8 when encoding our test data paths to bytes<commit_after>from __future__ import unicode_literals
import os
def path_to_data_dir(name):
if not isinstance(name, bytes):
name = name.encode('utf-8')
path = os.path.dirname(__file__)
path = os.path.join(path, b'data')
path = os.path.abspath(path)
return os.path.join(path, name)
class IsA(object):
def __init__(self, klass):
self.klass = klass
def __eq__(self, rhs):
try:
return isinstance(rhs, self.klass)
except TypeError:
return type(rhs) == type(self.klass)
def __ne__(self, rhs):
return not self.__eq__(rhs)
def __repr__(self):
return str(self.klass)
any_int = IsA(int)
any_str = IsA(str)
any_unicode = IsA(unicode)
|
fbd99212c7af806137f996ac3c1d6c018f9402a7 | puffin/core/compose.py | puffin/core/compose.py | from .applications import get_application_domain, get_application_name
from .machine import get_env_vars
from .. import app
from subprocess import Popen, STDOUT, PIPE
from os import environ
from os.path import join
def init():
pass
def compose_start(machine, user, application, **environment):
compose_run(machine, user, application, "up", "-d", **environment)
def compose_stop(machine, user, application):
compose_run(machine, user, application, "down")
def compose_run(machine, user, application, *arguments, **environment):
name = get_application_name(user, application)
args = ["docker-compose", "-f", application.compose, "-p", name]
args += arguments
domain = get_application_domain(user, application)
env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain)
env.update(get_env_vars(machine))
env.update(**environment)
process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env)
process.wait()
out, err = process.communicate()
print(out)
#app.logger.info("Compose:", out)
| from .applications import get_application_domain, get_application_name
from .machine import get_env_vars
from .. import app
from subprocess import Popen, STDOUT, PIPE
from os import environ
from os.path import join
def init():
pass
def compose_start(machine, user, application, **environment):
compose_run(machine, user, application, "up", "-d", **environment)
def compose_stop(machine, user, application):
compose_run(machine, user, application, "down")
def compose_run(machine, user, application, *arguments, **environment):
name = get_application_name(user, application)
args = ["docker-compose", "-f", application.compose, "-p", name]
args += arguments
domain = get_application_domain(user, application)
env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain,
LETSENCRYPT_HOST=domain)
env.update(get_env_vars(machine))
env.update(**environment)
process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env)
process.wait()
out, err = process.communicate()
print(out)
#app.logger.info("Compose:", out)
| Add Let's Encrypt env var | Add Let's Encrypt env var
| Python | agpl-3.0 | puffinrocks/puffin,loomchild/puffin,loomchild/puffin,loomchild/puffin,puffinrocks/puffin,loomchild/jenca-puffin,loomchild/puffin,loomchild/puffin,loomchild/jenca-puffin | from .applications import get_application_domain, get_application_name
from .machine import get_env_vars
from .. import app
from subprocess import Popen, STDOUT, PIPE
from os import environ
from os.path import join
def init():
pass
def compose_start(machine, user, application, **environment):
compose_run(machine, user, application, "up", "-d", **environment)
def compose_stop(machine, user, application):
compose_run(machine, user, application, "down")
def compose_run(machine, user, application, *arguments, **environment):
name = get_application_name(user, application)
args = ["docker-compose", "-f", application.compose, "-p", name]
args += arguments
domain = get_application_domain(user, application)
env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain)
env.update(get_env_vars(machine))
env.update(**environment)
process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env)
process.wait()
out, err = process.communicate()
print(out)
#app.logger.info("Compose:", out)
Add Let's Encrypt env var | from .applications import get_application_domain, get_application_name
from .machine import get_env_vars
from .. import app
from subprocess import Popen, STDOUT, PIPE
from os import environ
from os.path import join
def init():
pass
def compose_start(machine, user, application, **environment):
compose_run(machine, user, application, "up", "-d", **environment)
def compose_stop(machine, user, application):
compose_run(machine, user, application, "down")
def compose_run(machine, user, application, *arguments, **environment):
name = get_application_name(user, application)
args = ["docker-compose", "-f", application.compose, "-p", name]
args += arguments
domain = get_application_domain(user, application)
env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain,
LETSENCRYPT_HOST=domain)
env.update(get_env_vars(machine))
env.update(**environment)
process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env)
process.wait()
out, err = process.communicate()
print(out)
#app.logger.info("Compose:", out)
| <commit_before>from .applications import get_application_domain, get_application_name
from .machine import get_env_vars
from .. import app
from subprocess import Popen, STDOUT, PIPE
from os import environ
from os.path import join
def init():
pass
def compose_start(machine, user, application, **environment):
compose_run(machine, user, application, "up", "-d", **environment)
def compose_stop(machine, user, application):
compose_run(machine, user, application, "down")
def compose_run(machine, user, application, *arguments, **environment):
name = get_application_name(user, application)
args = ["docker-compose", "-f", application.compose, "-p", name]
args += arguments
domain = get_application_domain(user, application)
env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain)
env.update(get_env_vars(machine))
env.update(**environment)
process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env)
process.wait()
out, err = process.communicate()
print(out)
#app.logger.info("Compose:", out)
<commit_msg>Add Let's Encrypt env var<commit_after> | from .applications import get_application_domain, get_application_name
from .machine import get_env_vars
from .. import app
from subprocess import Popen, STDOUT, PIPE
from os import environ
from os.path import join
def init():
pass
def compose_start(machine, user, application, **environment):
compose_run(machine, user, application, "up", "-d", **environment)
def compose_stop(machine, user, application):
compose_run(machine, user, application, "down")
def compose_run(machine, user, application, *arguments, **environment):
name = get_application_name(user, application)
args = ["docker-compose", "-f", application.compose, "-p", name]
args += arguments
domain = get_application_domain(user, application)
env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain,
LETSENCRYPT_HOST=domain)
env.update(get_env_vars(machine))
env.update(**environment)
process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env)
process.wait()
out, err = process.communicate()
print(out)
#app.logger.info("Compose:", out)
| from .applications import get_application_domain, get_application_name
from .machine import get_env_vars
from .. import app
from subprocess import Popen, STDOUT, PIPE
from os import environ
from os.path import join
def init():
pass
def compose_start(machine, user, application, **environment):
compose_run(machine, user, application, "up", "-d", **environment)
def compose_stop(machine, user, application):
compose_run(machine, user, application, "down")
def compose_run(machine, user, application, *arguments, **environment):
name = get_application_name(user, application)
args = ["docker-compose", "-f", application.compose, "-p", name]
args += arguments
domain = get_application_domain(user, application)
env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain)
env.update(get_env_vars(machine))
env.update(**environment)
process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env)
process.wait()
out, err = process.communicate()
print(out)
#app.logger.info("Compose:", out)
Add Let's Encrypt env varfrom .applications import get_application_domain, get_application_name
from .machine import get_env_vars
from .. import app
from subprocess import Popen, STDOUT, PIPE
from os import environ
from os.path import join
def init():
pass
def compose_start(machine, user, application, **environment):
compose_run(machine, user, application, "up", "-d", **environment)
def compose_stop(machine, user, application):
compose_run(machine, user, application, "down")
def compose_run(machine, user, application, *arguments, **environment):
name = get_application_name(user, application)
args = ["docker-compose", "-f", application.compose, "-p", name]
args += arguments
domain = get_application_domain(user, application)
env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain,
LETSENCRYPT_HOST=domain)
env.update(get_env_vars(machine))
env.update(**environment)
process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env)
process.wait()
out, err = process.communicate()
print(out)
#app.logger.info("Compose:", out)
| <commit_before>from .applications import get_application_domain, get_application_name
from .machine import get_env_vars
from .. import app
from subprocess import Popen, STDOUT, PIPE
from os import environ
from os.path import join
def init():
pass
def compose_start(machine, user, application, **environment):
compose_run(machine, user, application, "up", "-d", **environment)
def compose_stop(machine, user, application):
compose_run(machine, user, application, "down")
def compose_run(machine, user, application, *arguments, **environment):
name = get_application_name(user, application)
args = ["docker-compose", "-f", application.compose, "-p", name]
args += arguments
domain = get_application_domain(user, application)
env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain)
env.update(get_env_vars(machine))
env.update(**environment)
process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env)
process.wait()
out, err = process.communicate()
print(out)
#app.logger.info("Compose:", out)
<commit_msg>Add Let's Encrypt env var<commit_after>from .applications import get_application_domain, get_application_name
from .machine import get_env_vars
from .. import app
from subprocess import Popen, STDOUT, PIPE
from os import environ
from os.path import join
def init():
pass
def compose_start(machine, user, application, **environment):
compose_run(machine, user, application, "up", "-d", **environment)
def compose_stop(machine, user, application):
compose_run(machine, user, application, "down")
def compose_run(machine, user, application, *arguments, **environment):
name = get_application_name(user, application)
args = ["docker-compose", "-f", application.compose, "-p", name]
args += arguments
domain = get_application_domain(user, application)
env = dict(PATH=environ['PATH'], VIRTUAL_HOST=domain,
LETSENCRYPT_HOST=domain)
env.update(get_env_vars(machine))
env.update(**environment)
process = Popen(args, stderr=STDOUT, stdout=PIPE, universal_newlines=True, env=env)
process.wait()
out, err = process.communicate()
print(out)
#app.logger.info("Compose:", out)
|
abfc91a687b33dda2659025af254bc9f50c077b5 | publishers/migrations/0008_fix_name_indices.py | publishers/migrations/0008_fix_name_indices.py | # Generated by Django 2.1.7 on 2019-05-12 16:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('publishers', '0007_publisher_romeo_parent_id'),
]
operations = [
migrations.AlterField(
model_name='journal',
name='title',
field=models.CharField(max_length=256),
),
migrations.AlterField(
model_name='publisher',
name='name',
field=models.CharField(max_length=256),
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_journal_title_upper ON public.papers_journal USING btree (UPPER(title));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_journal_title_upper;
"""
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_publisher_name_upper ON public.papers_publisher USING btree (UPPER(name));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_publisher_name_upper;
"""
),
]
| # Generated by Django 2.1.7 on 2019-05-12 16:08
from django.db import migrations, models
class Migration(migrations.Migration):
atomic = False
dependencies = [
('publishers', '0007_publisher_romeo_parent_id'),
]
operations = [
migrations.AlterField(
model_name='journal',
name='title',
field=models.CharField(max_length=256),
),
migrations.AlterField(
model_name='publisher',
name='name',
field=models.CharField(max_length=256),
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_journal_title_upper ON public.papers_journal USING btree (UPPER(title));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_journal_title_upper;
""",
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_publisher_name_upper ON public.papers_publisher USING btree (UPPER(name));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_publisher_name_upper;
"""
),
]
| Mark index migration as non atomic | Mark index migration as non atomic
| Python | agpl-3.0 | dissemin/dissemin,wetneb/dissemin,wetneb/dissemin,dissemin/dissemin,dissemin/dissemin,dissemin/dissemin,wetneb/dissemin,dissemin/dissemin,wetneb/dissemin | # Generated by Django 2.1.7 on 2019-05-12 16:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('publishers', '0007_publisher_romeo_parent_id'),
]
operations = [
migrations.AlterField(
model_name='journal',
name='title',
field=models.CharField(max_length=256),
),
migrations.AlterField(
model_name='publisher',
name='name',
field=models.CharField(max_length=256),
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_journal_title_upper ON public.papers_journal USING btree (UPPER(title));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_journal_title_upper;
"""
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_publisher_name_upper ON public.papers_publisher USING btree (UPPER(name));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_publisher_name_upper;
"""
),
]
Mark index migration as non atomic | # Generated by Django 2.1.7 on 2019-05-12 16:08
from django.db import migrations, models
class Migration(migrations.Migration):
atomic = False
dependencies = [
('publishers', '0007_publisher_romeo_parent_id'),
]
operations = [
migrations.AlterField(
model_name='journal',
name='title',
field=models.CharField(max_length=256),
),
migrations.AlterField(
model_name='publisher',
name='name',
field=models.CharField(max_length=256),
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_journal_title_upper ON public.papers_journal USING btree (UPPER(title));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_journal_title_upper;
""",
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_publisher_name_upper ON public.papers_publisher USING btree (UPPER(name));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_publisher_name_upper;
"""
),
]
| <commit_before># Generated by Django 2.1.7 on 2019-05-12 16:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('publishers', '0007_publisher_romeo_parent_id'),
]
operations = [
migrations.AlterField(
model_name='journal',
name='title',
field=models.CharField(max_length=256),
),
migrations.AlterField(
model_name='publisher',
name='name',
field=models.CharField(max_length=256),
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_journal_title_upper ON public.papers_journal USING btree (UPPER(title));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_journal_title_upper;
"""
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_publisher_name_upper ON public.papers_publisher USING btree (UPPER(name));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_publisher_name_upper;
"""
),
]
<commit_msg>Mark index migration as non atomic<commit_after> | # Generated by Django 2.1.7 on 2019-05-12 16:08
from django.db import migrations, models
class Migration(migrations.Migration):
atomic = False
dependencies = [
('publishers', '0007_publisher_romeo_parent_id'),
]
operations = [
migrations.AlterField(
model_name='journal',
name='title',
field=models.CharField(max_length=256),
),
migrations.AlterField(
model_name='publisher',
name='name',
field=models.CharField(max_length=256),
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_journal_title_upper ON public.papers_journal USING btree (UPPER(title));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_journal_title_upper;
""",
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_publisher_name_upper ON public.papers_publisher USING btree (UPPER(name));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_publisher_name_upper;
"""
),
]
| # Generated by Django 2.1.7 on 2019-05-12 16:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('publishers', '0007_publisher_romeo_parent_id'),
]
operations = [
migrations.AlterField(
model_name='journal',
name='title',
field=models.CharField(max_length=256),
),
migrations.AlterField(
model_name='publisher',
name='name',
field=models.CharField(max_length=256),
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_journal_title_upper ON public.papers_journal USING btree (UPPER(title));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_journal_title_upper;
"""
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_publisher_name_upper ON public.papers_publisher USING btree (UPPER(name));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_publisher_name_upper;
"""
),
]
Mark index migration as non atomic# Generated by Django 2.1.7 on 2019-05-12 16:08
from django.db import migrations, models
class Migration(migrations.Migration):
atomic = False
dependencies = [
('publishers', '0007_publisher_romeo_parent_id'),
]
operations = [
migrations.AlterField(
model_name='journal',
name='title',
field=models.CharField(max_length=256),
),
migrations.AlterField(
model_name='publisher',
name='name',
field=models.CharField(max_length=256),
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_journal_title_upper ON public.papers_journal USING btree (UPPER(title));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_journal_title_upper;
""",
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_publisher_name_upper ON public.papers_publisher USING btree (UPPER(name));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_publisher_name_upper;
"""
),
]
| <commit_before># Generated by Django 2.1.7 on 2019-05-12 16:08
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('publishers', '0007_publisher_romeo_parent_id'),
]
operations = [
migrations.AlterField(
model_name='journal',
name='title',
field=models.CharField(max_length=256),
),
migrations.AlterField(
model_name='publisher',
name='name',
field=models.CharField(max_length=256),
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_journal_title_upper ON public.papers_journal USING btree (UPPER(title));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_journal_title_upper;
"""
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_publisher_name_upper ON public.papers_publisher USING btree (UPPER(name));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_publisher_name_upper;
"""
),
]
<commit_msg>Mark index migration as non atomic<commit_after># Generated by Django 2.1.7 on 2019-05-12 16:08
from django.db import migrations, models
class Migration(migrations.Migration):
atomic = False
dependencies = [
('publishers', '0007_publisher_romeo_parent_id'),
]
operations = [
migrations.AlterField(
model_name='journal',
name='title',
field=models.CharField(max_length=256),
),
migrations.AlterField(
model_name='publisher',
name='name',
field=models.CharField(max_length=256),
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_journal_title_upper ON public.papers_journal USING btree (UPPER(title));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_journal_title_upper;
""",
),
migrations.RunSQL(
sql="""
CREATE INDEX CONCURRENTLY papers_publisher_name_upper ON public.papers_publisher USING btree (UPPER(name));
""",
reverse_sql="""
DROP INDEX CONCURRENTLY papers_publisher_name_upper;
"""
),
]
|
95c7037a4a1e9c3921c3b4584046824ed469ae7f | osfclient/tests/test_session.py | osfclient/tests/test_session.py | from osfclient.models import OSFSession
def test_basic_auth():
session = OSFSession()
session.basic_auth('joe@example.com', 'secret_password')
assert session.auth == ('joe@example.com', 'secret_password')
assert 'Authorization' not in session.headers
def test_basic_build_url():
session = OSFSession()
url = session.build_url("some", "path")
assert url.startswith(session.base_url)
assert url.endswith("/some/path/")
| from unittest.mock import patch
from unittest.mock import MagicMock
import pytest
from osfclient.models import OSFSession
from osfclient.exceptions import UnauthorizedException
def test_basic_auth():
session = OSFSession()
session.basic_auth('joe@example.com', 'secret_password')
assert session.auth == ('joe@example.com', 'secret_password')
assert 'Authorization' not in session.headers
def test_basic_build_url():
session = OSFSession()
url = session.build_url("some", "path")
assert url.startswith(session.base_url)
assert url.endswith("/some/path/")
@patch('osfclient.models.session.requests.Session.put')
def test_unauthorized_put(mock_put):
mock_response = MagicMock()
mock_response.status_code = 401
mock_put.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
with pytest.raises(UnauthorizedException):
session.put(url)
mock_put.assert_called_once_with(url)
@patch('osfclient.models.session.requests.Session.get')
def test_unauthorized_get(mock_get):
mock_response = MagicMock()
mock_response.status_code = 401
mock_get.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
with pytest.raises(UnauthorizedException):
session.get(url)
mock_get.assert_called_once_with(url)
@patch('osfclient.models.session.requests.Session.put')
def test_put(mock_put):
mock_response = MagicMock()
mock_response.status_code = 200
mock_put.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
response = session.put(url)
assert response == mock_response
mock_put.assert_called_once_with(url)
@patch('osfclient.models.session.requests.Session.get')
def test_get(mock_get):
mock_response = MagicMock()
mock_response.status_code = 200
mock_get.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
response = session.get(url)
assert response == mock_response
mock_get.assert_called_once_with(url)
| Add test for osfclient's session object | Add test for osfclient's session object
Check that exceptions are raised on unauthed HTTP put/get
| Python | bsd-3-clause | betatim/osf-cli,betatim/osf-cli | from osfclient.models import OSFSession
def test_basic_auth():
session = OSFSession()
session.basic_auth('joe@example.com', 'secret_password')
assert session.auth == ('joe@example.com', 'secret_password')
assert 'Authorization' not in session.headers
def test_basic_build_url():
session = OSFSession()
url = session.build_url("some", "path")
assert url.startswith(session.base_url)
assert url.endswith("/some/path/")
Add test for osfclient's session object
Check that exceptions are raised on unauthed HTTP put/get | from unittest.mock import patch
from unittest.mock import MagicMock
import pytest
from osfclient.models import OSFSession
from osfclient.exceptions import UnauthorizedException
def test_basic_auth():
session = OSFSession()
session.basic_auth('joe@example.com', 'secret_password')
assert session.auth == ('joe@example.com', 'secret_password')
assert 'Authorization' not in session.headers
def test_basic_build_url():
session = OSFSession()
url = session.build_url("some", "path")
assert url.startswith(session.base_url)
assert url.endswith("/some/path/")
@patch('osfclient.models.session.requests.Session.put')
def test_unauthorized_put(mock_put):
mock_response = MagicMock()
mock_response.status_code = 401
mock_put.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
with pytest.raises(UnauthorizedException):
session.put(url)
mock_put.assert_called_once_with(url)
@patch('osfclient.models.session.requests.Session.get')
def test_unauthorized_get(mock_get):
mock_response = MagicMock()
mock_response.status_code = 401
mock_get.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
with pytest.raises(UnauthorizedException):
session.get(url)
mock_get.assert_called_once_with(url)
@patch('osfclient.models.session.requests.Session.put')
def test_put(mock_put):
mock_response = MagicMock()
mock_response.status_code = 200
mock_put.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
response = session.put(url)
assert response == mock_response
mock_put.assert_called_once_with(url)
@patch('osfclient.models.session.requests.Session.get')
def test_get(mock_get):
mock_response = MagicMock()
mock_response.status_code = 200
mock_get.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
response = session.get(url)
assert response == mock_response
mock_get.assert_called_once_with(url)
| <commit_before>from osfclient.models import OSFSession
def test_basic_auth():
session = OSFSession()
session.basic_auth('joe@example.com', 'secret_password')
assert session.auth == ('joe@example.com', 'secret_password')
assert 'Authorization' not in session.headers
def test_basic_build_url():
session = OSFSession()
url = session.build_url("some", "path")
assert url.startswith(session.base_url)
assert url.endswith("/some/path/")
<commit_msg>Add test for osfclient's session object
Check that exceptions are raised on unauthed HTTP put/get<commit_after> | from unittest.mock import patch
from unittest.mock import MagicMock
import pytest
from osfclient.models import OSFSession
from osfclient.exceptions import UnauthorizedException
def test_basic_auth():
session = OSFSession()
session.basic_auth('joe@example.com', 'secret_password')
assert session.auth == ('joe@example.com', 'secret_password')
assert 'Authorization' not in session.headers
def test_basic_build_url():
session = OSFSession()
url = session.build_url("some", "path")
assert url.startswith(session.base_url)
assert url.endswith("/some/path/")
@patch('osfclient.models.session.requests.Session.put')
def test_unauthorized_put(mock_put):
mock_response = MagicMock()
mock_response.status_code = 401
mock_put.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
with pytest.raises(UnauthorizedException):
session.put(url)
mock_put.assert_called_once_with(url)
@patch('osfclient.models.session.requests.Session.get')
def test_unauthorized_get(mock_get):
mock_response = MagicMock()
mock_response.status_code = 401
mock_get.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
with pytest.raises(UnauthorizedException):
session.get(url)
mock_get.assert_called_once_with(url)
@patch('osfclient.models.session.requests.Session.put')
def test_put(mock_put):
mock_response = MagicMock()
mock_response.status_code = 200
mock_put.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
response = session.put(url)
assert response == mock_response
mock_put.assert_called_once_with(url)
@patch('osfclient.models.session.requests.Session.get')
def test_get(mock_get):
mock_response = MagicMock()
mock_response.status_code = 200
mock_get.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
response = session.get(url)
assert response == mock_response
mock_get.assert_called_once_with(url)
| from osfclient.models import OSFSession
def test_basic_auth():
session = OSFSession()
session.basic_auth('joe@example.com', 'secret_password')
assert session.auth == ('joe@example.com', 'secret_password')
assert 'Authorization' not in session.headers
def test_basic_build_url():
session = OSFSession()
url = session.build_url("some", "path")
assert url.startswith(session.base_url)
assert url.endswith("/some/path/")
Add test for osfclient's session object
Check that exceptions are raised on unauthed HTTP put/getfrom unittest.mock import patch
from unittest.mock import MagicMock
import pytest
from osfclient.models import OSFSession
from osfclient.exceptions import UnauthorizedException
def test_basic_auth():
session = OSFSession()
session.basic_auth('joe@example.com', 'secret_password')
assert session.auth == ('joe@example.com', 'secret_password')
assert 'Authorization' not in session.headers
def test_basic_build_url():
session = OSFSession()
url = session.build_url("some", "path")
assert url.startswith(session.base_url)
assert url.endswith("/some/path/")
@patch('osfclient.models.session.requests.Session.put')
def test_unauthorized_put(mock_put):
mock_response = MagicMock()
mock_response.status_code = 401
mock_put.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
with pytest.raises(UnauthorizedException):
session.put(url)
mock_put.assert_called_once_with(url)
@patch('osfclient.models.session.requests.Session.get')
def test_unauthorized_get(mock_get):
mock_response = MagicMock()
mock_response.status_code = 401
mock_get.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
with pytest.raises(UnauthorizedException):
session.get(url)
mock_get.assert_called_once_with(url)
@patch('osfclient.models.session.requests.Session.put')
def test_put(mock_put):
mock_response = MagicMock()
mock_response.status_code = 200
mock_put.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
response = session.put(url)
assert response == mock_response
mock_put.assert_called_once_with(url)
@patch('osfclient.models.session.requests.Session.get')
def test_get(mock_get):
mock_response = MagicMock()
mock_response.status_code = 200
mock_get.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
response = session.get(url)
assert response == mock_response
mock_get.assert_called_once_with(url)
| <commit_before>from osfclient.models import OSFSession
def test_basic_auth():
session = OSFSession()
session.basic_auth('joe@example.com', 'secret_password')
assert session.auth == ('joe@example.com', 'secret_password')
assert 'Authorization' not in session.headers
def test_basic_build_url():
session = OSFSession()
url = session.build_url("some", "path")
assert url.startswith(session.base_url)
assert url.endswith("/some/path/")
<commit_msg>Add test for osfclient's session object
Check that exceptions are raised on unauthed HTTP put/get<commit_after>from unittest.mock import patch
from unittest.mock import MagicMock
import pytest
from osfclient.models import OSFSession
from osfclient.exceptions import UnauthorizedException
def test_basic_auth():
session = OSFSession()
session.basic_auth('joe@example.com', 'secret_password')
assert session.auth == ('joe@example.com', 'secret_password')
assert 'Authorization' not in session.headers
def test_basic_build_url():
session = OSFSession()
url = session.build_url("some", "path")
assert url.startswith(session.base_url)
assert url.endswith("/some/path/")
@patch('osfclient.models.session.requests.Session.put')
def test_unauthorized_put(mock_put):
mock_response = MagicMock()
mock_response.status_code = 401
mock_put.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
with pytest.raises(UnauthorizedException):
session.put(url)
mock_put.assert_called_once_with(url)
@patch('osfclient.models.session.requests.Session.get')
def test_unauthorized_get(mock_get):
mock_response = MagicMock()
mock_response.status_code = 401
mock_get.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
with pytest.raises(UnauthorizedException):
session.get(url)
mock_get.assert_called_once_with(url)
@patch('osfclient.models.session.requests.Session.put')
def test_put(mock_put):
mock_response = MagicMock()
mock_response.status_code = 200
mock_put.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
response = session.put(url)
assert response == mock_response
mock_put.assert_called_once_with(url)
@patch('osfclient.models.session.requests.Session.get')
def test_get(mock_get):
mock_response = MagicMock()
mock_response.status_code = 200
mock_get.return_value = mock_response
url = 'http://example.com/foo'
session = OSFSession()
response = session.get(url)
assert response == mock_response
mock_get.assert_called_once_with(url)
|
8febff1065c67db1599f6b9bccd27f843981dd95 | main/management/commands/poll_rss.py | main/management/commands/poll_rss.py |
from datetime import datetime
from time import mktime
from django.core.management.base import BaseCommand
from django.utils.timezone import get_default_timezone, make_aware
from feedparser import parse
from ...models import Link
class Command(BaseCommand):
def handle(self, *urls, **options):
for url in urls:
for entry in parse(url).entries:
link = self.entry_to_link_dict(entry)
try:
Link.objects.get(link=link["link"])
except Link.DoesNotExist:
Link.objects.create(**link)
def entry_to_link_dict(self, entry):
link = {"title": entry.title, "user_id": 1, "gen_description": False}
try:
link["link"] = entry.summary.split('href="')[2].split('"')[0]
except IndexError:
link["link"] = entry.link
try:
publish_date = entry.published_parsed
except AttributeError:
pass
else:
publish_date = datetime.fromtimestamp(mktime(publish_date))
publish_date = make_aware(publish_date, get_default_timezone())
link["publish_date"] = publish_date
return link
|
from datetime import datetime
from time import mktime
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from django.utils.timezone import get_default_timezone, make_aware
from feedparser import parse
from mezzanine.generic.models import Rating
from ...models import Link
class Command(BaseCommand):
def handle(self, *urls, **options):
try:
user_id = User.objects.filter(is_superuser=1)[0].id
except IndexError:
return
for url in urls:
for entry in parse(url).entries:
link = self.entry_to_link_dict(entry)
link["user_id"] = user_id
try:
obj = Link.objects.get(link=link["link"])
except Link.DoesNotExist:
obj = Link.objects.create(**link)
obj.rating.add(Rating(value=1, user_id=user_id))
def entry_to_link_dict(self, entry):
link = {"title": entry.title, "gen_description": False}
try:
link["link"] = entry.summary.split('href="')[2].split('"')[0]
except IndexError:
link["link"] = entry.link
try:
publish_date = entry.published_parsed
except AttributeError:
pass
else:
publish_date = datetime.fromtimestamp(mktime(publish_date))
publish_date = make_aware(publish_date, get_default_timezone())
link["publish_date"] = publish_date
return link
| Add an initial rating to new links in the rss seeder. | Add an initial rating to new links in the rss seeder.
| Python | bsd-2-clause | tsybulevskij/drum,j00bar/drum,j00bar/drum,yodermk/drum,skybluejamie/wikipeace,yodermk/drum,renyi/drum,sing1ee/drum,yodermk/drum,renyi/drum,tsybulevskij/drum,stephenmcd/drum,sing1ee/drum,j00bar/drum,stephenmcd/drum,abendig/drum,tsybulevskij/drum,renyi/drum,skybluejamie/wikipeace,abendig/drum,sing1ee/drum,abendig/drum |
from datetime import datetime
from time import mktime
from django.core.management.base import BaseCommand
from django.utils.timezone import get_default_timezone, make_aware
from feedparser import parse
from ...models import Link
class Command(BaseCommand):
def handle(self, *urls, **options):
for url in urls:
for entry in parse(url).entries:
link = self.entry_to_link_dict(entry)
try:
Link.objects.get(link=link["link"])
except Link.DoesNotExist:
Link.objects.create(**link)
def entry_to_link_dict(self, entry):
link = {"title": entry.title, "user_id": 1, "gen_description": False}
try:
link["link"] = entry.summary.split('href="')[2].split('"')[0]
except IndexError:
link["link"] = entry.link
try:
publish_date = entry.published_parsed
except AttributeError:
pass
else:
publish_date = datetime.fromtimestamp(mktime(publish_date))
publish_date = make_aware(publish_date, get_default_timezone())
link["publish_date"] = publish_date
return link
Add an initial rating to new links in the rss seeder. |
from datetime import datetime
from time import mktime
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from django.utils.timezone import get_default_timezone, make_aware
from feedparser import parse
from mezzanine.generic.models import Rating
from ...models import Link
class Command(BaseCommand):
def handle(self, *urls, **options):
try:
user_id = User.objects.filter(is_superuser=1)[0].id
except IndexError:
return
for url in urls:
for entry in parse(url).entries:
link = self.entry_to_link_dict(entry)
link["user_id"] = user_id
try:
obj = Link.objects.get(link=link["link"])
except Link.DoesNotExist:
obj = Link.objects.create(**link)
obj.rating.add(Rating(value=1, user_id=user_id))
def entry_to_link_dict(self, entry):
link = {"title": entry.title, "gen_description": False}
try:
link["link"] = entry.summary.split('href="')[2].split('"')[0]
except IndexError:
link["link"] = entry.link
try:
publish_date = entry.published_parsed
except AttributeError:
pass
else:
publish_date = datetime.fromtimestamp(mktime(publish_date))
publish_date = make_aware(publish_date, get_default_timezone())
link["publish_date"] = publish_date
return link
| <commit_before>
from datetime import datetime
from time import mktime
from django.core.management.base import BaseCommand
from django.utils.timezone import get_default_timezone, make_aware
from feedparser import parse
from ...models import Link
class Command(BaseCommand):
def handle(self, *urls, **options):
for url in urls:
for entry in parse(url).entries:
link = self.entry_to_link_dict(entry)
try:
Link.objects.get(link=link["link"])
except Link.DoesNotExist:
Link.objects.create(**link)
def entry_to_link_dict(self, entry):
link = {"title": entry.title, "user_id": 1, "gen_description": False}
try:
link["link"] = entry.summary.split('href="')[2].split('"')[0]
except IndexError:
link["link"] = entry.link
try:
publish_date = entry.published_parsed
except AttributeError:
pass
else:
publish_date = datetime.fromtimestamp(mktime(publish_date))
publish_date = make_aware(publish_date, get_default_timezone())
link["publish_date"] = publish_date
return link
<commit_msg>Add an initial rating to new links in the rss seeder.<commit_after> |
from datetime import datetime
from time import mktime
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from django.utils.timezone import get_default_timezone, make_aware
from feedparser import parse
from mezzanine.generic.models import Rating
from ...models import Link
class Command(BaseCommand):
def handle(self, *urls, **options):
try:
user_id = User.objects.filter(is_superuser=1)[0].id
except IndexError:
return
for url in urls:
for entry in parse(url).entries:
link = self.entry_to_link_dict(entry)
link["user_id"] = user_id
try:
obj = Link.objects.get(link=link["link"])
except Link.DoesNotExist:
obj = Link.objects.create(**link)
obj.rating.add(Rating(value=1, user_id=user_id))
def entry_to_link_dict(self, entry):
link = {"title": entry.title, "gen_description": False}
try:
link["link"] = entry.summary.split('href="')[2].split('"')[0]
except IndexError:
link["link"] = entry.link
try:
publish_date = entry.published_parsed
except AttributeError:
pass
else:
publish_date = datetime.fromtimestamp(mktime(publish_date))
publish_date = make_aware(publish_date, get_default_timezone())
link["publish_date"] = publish_date
return link
|
from datetime import datetime
from time import mktime
from django.core.management.base import BaseCommand
from django.utils.timezone import get_default_timezone, make_aware
from feedparser import parse
from ...models import Link
class Command(BaseCommand):
def handle(self, *urls, **options):
for url in urls:
for entry in parse(url).entries:
link = self.entry_to_link_dict(entry)
try:
Link.objects.get(link=link["link"])
except Link.DoesNotExist:
Link.objects.create(**link)
def entry_to_link_dict(self, entry):
link = {"title": entry.title, "user_id": 1, "gen_description": False}
try:
link["link"] = entry.summary.split('href="')[2].split('"')[0]
except IndexError:
link["link"] = entry.link
try:
publish_date = entry.published_parsed
except AttributeError:
pass
else:
publish_date = datetime.fromtimestamp(mktime(publish_date))
publish_date = make_aware(publish_date, get_default_timezone())
link["publish_date"] = publish_date
return link
Add an initial rating to new links in the rss seeder.
from datetime import datetime
from time import mktime
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from django.utils.timezone import get_default_timezone, make_aware
from feedparser import parse
from mezzanine.generic.models import Rating
from ...models import Link
class Command(BaseCommand):
def handle(self, *urls, **options):
try:
user_id = User.objects.filter(is_superuser=1)[0].id
except IndexError:
return
for url in urls:
for entry in parse(url).entries:
link = self.entry_to_link_dict(entry)
link["user_id"] = user_id
try:
obj = Link.objects.get(link=link["link"])
except Link.DoesNotExist:
obj = Link.objects.create(**link)
obj.rating.add(Rating(value=1, user_id=user_id))
def entry_to_link_dict(self, entry):
link = {"title": entry.title, "gen_description": False}
try:
link["link"] = entry.summary.split('href="')[2].split('"')[0]
except IndexError:
link["link"] = entry.link
try:
publish_date = entry.published_parsed
except AttributeError:
pass
else:
publish_date = datetime.fromtimestamp(mktime(publish_date))
publish_date = make_aware(publish_date, get_default_timezone())
link["publish_date"] = publish_date
return link
| <commit_before>
from datetime import datetime
from time import mktime
from django.core.management.base import BaseCommand
from django.utils.timezone import get_default_timezone, make_aware
from feedparser import parse
from ...models import Link
class Command(BaseCommand):
def handle(self, *urls, **options):
for url in urls:
for entry in parse(url).entries:
link = self.entry_to_link_dict(entry)
try:
Link.objects.get(link=link["link"])
except Link.DoesNotExist:
Link.objects.create(**link)
def entry_to_link_dict(self, entry):
link = {"title": entry.title, "user_id": 1, "gen_description": False}
try:
link["link"] = entry.summary.split('href="')[2].split('"')[0]
except IndexError:
link["link"] = entry.link
try:
publish_date = entry.published_parsed
except AttributeError:
pass
else:
publish_date = datetime.fromtimestamp(mktime(publish_date))
publish_date = make_aware(publish_date, get_default_timezone())
link["publish_date"] = publish_date
return link
<commit_msg>Add an initial rating to new links in the rss seeder.<commit_after>
from datetime import datetime
from time import mktime
from django.contrib.auth.models import User
from django.core.management.base import BaseCommand
from django.utils.timezone import get_default_timezone, make_aware
from feedparser import parse
from mezzanine.generic.models import Rating
from ...models import Link
class Command(BaseCommand):
def handle(self, *urls, **options):
try:
user_id = User.objects.filter(is_superuser=1)[0].id
except IndexError:
return
for url in urls:
for entry in parse(url).entries:
link = self.entry_to_link_dict(entry)
link["user_id"] = user_id
try:
obj = Link.objects.get(link=link["link"])
except Link.DoesNotExist:
obj = Link.objects.create(**link)
obj.rating.add(Rating(value=1, user_id=user_id))
def entry_to_link_dict(self, entry):
link = {"title": entry.title, "gen_description": False}
try:
link["link"] = entry.summary.split('href="')[2].split('"')[0]
except IndexError:
link["link"] = entry.link
try:
publish_date = entry.published_parsed
except AttributeError:
pass
else:
publish_date = datetime.fromtimestamp(mktime(publish_date))
publish_date = make_aware(publish_date, get_default_timezone())
link["publish_date"] = publish_date
return link
|
9499721aa6a3ae6c01b94594f6a9e595560c2c7e | geocoder/opencage_reverse.py | geocoder/opencage_reverse.py | #!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
import logging
from geocoder.opencage import OpenCageResult, OpenCageQuery
from geocoder.location import Location
class OpenCageReverseResult(OpenCageResult):
@property
def ok(self):
return bool(self.address)
class OpenCageReverse(OpenCageQuery):
"""
OpenCage Geocoding Services
===========================
OpenCage Geocoder simple, easy, and open geocoding for the entire world
Our API combines multiple geocoding systems in the background.
Each is optimized for different parts of the world and types of requests.
We aggregate the best results from open data sources and algorithms so you don't have to.
Each is optimized for different parts of the world and types of requests.
API Reference
-------------
https://geocoder.opencagedata.com/api
"""
provider = 'opencage'
method = 'reverse'
_URL = 'http://api.opencagedata.com/geocode/v1/json'
_RESULT_CLASS = OpenCageReverseResult
def _build_params(self, location, provider_key, **kwargs):
location = Location(location)
return {
'query': self.location,
'key': provider_key,
}
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
g = OpenCageReverse([45.4049053, -75.7077965])
g.debug()
| #!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
import logging
from geocoder.opencage import OpenCageResult, OpenCageQuery
from geocoder.location import Location
class OpenCageReverseResult(OpenCageResult):
@property
def ok(self):
return bool(self.address)
class OpenCageReverse(OpenCageQuery):
"""
OpenCage Geocoding Services
===========================
OpenCage Geocoder simple, easy, and open geocoding for the entire world
Our API combines multiple geocoding systems in the background.
Each is optimized for different parts of the world and types of requests.
We aggregate the best results from open data sources and algorithms so you don't have to.
Each is optimized for different parts of the world and types of requests.
API Reference
-------------
https://geocoder.opencagedata.com/api
"""
provider = 'opencage'
method = 'reverse'
_URL = 'http://api.opencagedata.com/geocode/v1/json'
_RESULT_CLASS = OpenCageReverseResult
def _build_params(self, location, provider_key, **kwargs):
location = Location(location)
return {
'query': location,
'key': provider_key,
}
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
g = OpenCageReverse([45.4049053, -75.7077965])
g.debug()
| Fix opencage reverse issue where it was using self.lcoation instead of just location | Fix opencage reverse issue where it was using self.lcoation instead of just location
| Python | mit | DenisCarriere/geocoder | #!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
import logging
from geocoder.opencage import OpenCageResult, OpenCageQuery
from geocoder.location import Location
class OpenCageReverseResult(OpenCageResult):
@property
def ok(self):
return bool(self.address)
class OpenCageReverse(OpenCageQuery):
"""
OpenCage Geocoding Services
===========================
OpenCage Geocoder simple, easy, and open geocoding for the entire world
Our API combines multiple geocoding systems in the background.
Each is optimized for different parts of the world and types of requests.
We aggregate the best results from open data sources and algorithms so you don't have to.
Each is optimized for different parts of the world and types of requests.
API Reference
-------------
https://geocoder.opencagedata.com/api
"""
provider = 'opencage'
method = 'reverse'
_URL = 'http://api.opencagedata.com/geocode/v1/json'
_RESULT_CLASS = OpenCageReverseResult
def _build_params(self, location, provider_key, **kwargs):
location = Location(location)
return {
'query': self.location,
'key': provider_key,
}
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
g = OpenCageReverse([45.4049053, -75.7077965])
g.debug()
Fix opencage reverse issue where it was using self.lcoation instead of just location | #!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
import logging
from geocoder.opencage import OpenCageResult, OpenCageQuery
from geocoder.location import Location
class OpenCageReverseResult(OpenCageResult):
@property
def ok(self):
return bool(self.address)
class OpenCageReverse(OpenCageQuery):
"""
OpenCage Geocoding Services
===========================
OpenCage Geocoder simple, easy, and open geocoding for the entire world
Our API combines multiple geocoding systems in the background.
Each is optimized for different parts of the world and types of requests.
We aggregate the best results from open data sources and algorithms so you don't have to.
Each is optimized for different parts of the world and types of requests.
API Reference
-------------
https://geocoder.opencagedata.com/api
"""
provider = 'opencage'
method = 'reverse'
_URL = 'http://api.opencagedata.com/geocode/v1/json'
_RESULT_CLASS = OpenCageReverseResult
def _build_params(self, location, provider_key, **kwargs):
location = Location(location)
return {
'query': location,
'key': provider_key,
}
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
g = OpenCageReverse([45.4049053, -75.7077965])
g.debug()
| <commit_before>#!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
import logging
from geocoder.opencage import OpenCageResult, OpenCageQuery
from geocoder.location import Location
class OpenCageReverseResult(OpenCageResult):
@property
def ok(self):
return bool(self.address)
class OpenCageReverse(OpenCageQuery):
"""
OpenCage Geocoding Services
===========================
OpenCage Geocoder simple, easy, and open geocoding for the entire world
Our API combines multiple geocoding systems in the background.
Each is optimized for different parts of the world and types of requests.
We aggregate the best results from open data sources and algorithms so you don't have to.
Each is optimized for different parts of the world and types of requests.
API Reference
-------------
https://geocoder.opencagedata.com/api
"""
provider = 'opencage'
method = 'reverse'
_URL = 'http://api.opencagedata.com/geocode/v1/json'
_RESULT_CLASS = OpenCageReverseResult
def _build_params(self, location, provider_key, **kwargs):
location = Location(location)
return {
'query': self.location,
'key': provider_key,
}
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
g = OpenCageReverse([45.4049053, -75.7077965])
g.debug()
<commit_msg>Fix opencage reverse issue where it was using self.lcoation instead of just location<commit_after> | #!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
import logging
from geocoder.opencage import OpenCageResult, OpenCageQuery
from geocoder.location import Location
class OpenCageReverseResult(OpenCageResult):
@property
def ok(self):
return bool(self.address)
class OpenCageReverse(OpenCageQuery):
"""
OpenCage Geocoding Services
===========================
OpenCage Geocoder simple, easy, and open geocoding for the entire world
Our API combines multiple geocoding systems in the background.
Each is optimized for different parts of the world and types of requests.
We aggregate the best results from open data sources and algorithms so you don't have to.
Each is optimized for different parts of the world and types of requests.
API Reference
-------------
https://geocoder.opencagedata.com/api
"""
provider = 'opencage'
method = 'reverse'
_URL = 'http://api.opencagedata.com/geocode/v1/json'
_RESULT_CLASS = OpenCageReverseResult
def _build_params(self, location, provider_key, **kwargs):
location = Location(location)
return {
'query': location,
'key': provider_key,
}
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
g = OpenCageReverse([45.4049053, -75.7077965])
g.debug()
| #!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
import logging
from geocoder.opencage import OpenCageResult, OpenCageQuery
from geocoder.location import Location
class OpenCageReverseResult(OpenCageResult):
@property
def ok(self):
return bool(self.address)
class OpenCageReverse(OpenCageQuery):
"""
OpenCage Geocoding Services
===========================
OpenCage Geocoder simple, easy, and open geocoding for the entire world
Our API combines multiple geocoding systems in the background.
Each is optimized for different parts of the world and types of requests.
We aggregate the best results from open data sources and algorithms so you don't have to.
Each is optimized for different parts of the world and types of requests.
API Reference
-------------
https://geocoder.opencagedata.com/api
"""
provider = 'opencage'
method = 'reverse'
_URL = 'http://api.opencagedata.com/geocode/v1/json'
_RESULT_CLASS = OpenCageReverseResult
def _build_params(self, location, provider_key, **kwargs):
location = Location(location)
return {
'query': self.location,
'key': provider_key,
}
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
g = OpenCageReverse([45.4049053, -75.7077965])
g.debug()
Fix opencage reverse issue where it was using self.lcoation instead of just location#!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
import logging
from geocoder.opencage import OpenCageResult, OpenCageQuery
from geocoder.location import Location
class OpenCageReverseResult(OpenCageResult):
@property
def ok(self):
return bool(self.address)
class OpenCageReverse(OpenCageQuery):
"""
OpenCage Geocoding Services
===========================
OpenCage Geocoder simple, easy, and open geocoding for the entire world
Our API combines multiple geocoding systems in the background.
Each is optimized for different parts of the world and types of requests.
We aggregate the best results from open data sources and algorithms so you don't have to.
Each is optimized for different parts of the world and types of requests.
API Reference
-------------
https://geocoder.opencagedata.com/api
"""
provider = 'opencage'
method = 'reverse'
_URL = 'http://api.opencagedata.com/geocode/v1/json'
_RESULT_CLASS = OpenCageReverseResult
def _build_params(self, location, provider_key, **kwargs):
location = Location(location)
return {
'query': location,
'key': provider_key,
}
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
g = OpenCageReverse([45.4049053, -75.7077965])
g.debug()
| <commit_before>#!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
import logging
from geocoder.opencage import OpenCageResult, OpenCageQuery
from geocoder.location import Location
class OpenCageReverseResult(OpenCageResult):
@property
def ok(self):
return bool(self.address)
class OpenCageReverse(OpenCageQuery):
"""
OpenCage Geocoding Services
===========================
OpenCage Geocoder simple, easy, and open geocoding for the entire world
Our API combines multiple geocoding systems in the background.
Each is optimized for different parts of the world and types of requests.
We aggregate the best results from open data sources and algorithms so you don't have to.
Each is optimized for different parts of the world and types of requests.
API Reference
-------------
https://geocoder.opencagedata.com/api
"""
provider = 'opencage'
method = 'reverse'
_URL = 'http://api.opencagedata.com/geocode/v1/json'
_RESULT_CLASS = OpenCageReverseResult
def _build_params(self, location, provider_key, **kwargs):
location = Location(location)
return {
'query': self.location,
'key': provider_key,
}
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
g = OpenCageReverse([45.4049053, -75.7077965])
g.debug()
<commit_msg>Fix opencage reverse issue where it was using self.lcoation instead of just location<commit_after>#!/usr/bin/python
# coding: utf8
from __future__ import absolute_import
import logging
from geocoder.opencage import OpenCageResult, OpenCageQuery
from geocoder.location import Location
class OpenCageReverseResult(OpenCageResult):
@property
def ok(self):
return bool(self.address)
class OpenCageReverse(OpenCageQuery):
"""
OpenCage Geocoding Services
===========================
OpenCage Geocoder simple, easy, and open geocoding for the entire world
Our API combines multiple geocoding systems in the background.
Each is optimized for different parts of the world and types of requests.
We aggregate the best results from open data sources and algorithms so you don't have to.
Each is optimized for different parts of the world and types of requests.
API Reference
-------------
https://geocoder.opencagedata.com/api
"""
provider = 'opencage'
method = 'reverse'
_URL = 'http://api.opencagedata.com/geocode/v1/json'
_RESULT_CLASS = OpenCageReverseResult
def _build_params(self, location, provider_key, **kwargs):
location = Location(location)
return {
'query': location,
'key': provider_key,
}
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
g = OpenCageReverse([45.4049053, -75.7077965])
g.debug()
|
a3d404a7f7352fd85a821b445ebeb8d7ca9b21c9 | sigma_core/serializers/group.py | sigma_core/serializers/group.py | from rest_framework import serializers
from sigma_core.models.group import Group
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
| from rest_framework import serializers
from sigma_core.models.group import Group
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
visibility = serializers.SerializerMethodField()
membership_policy = serializers.SerializerMethodField()
validation_policy = serializers.SerializerMethodField()
type = serializers.SerializerMethodField()
def get_visibility(self, obj):
return obj.get_visibility_display()
def get_membership_policy(self, obj):
return obj.get_membership_policy_display()
def get_validation_policy(self, obj):
return obj.get_validation_policy_display()
def get_type(self, obj):
return obj.get_type_display()
| Change GroupSerializer to display attributes as strings | Change GroupSerializer to display attributes as strings
| Python | agpl-3.0 | ProjetSigma/backend,ProjetSigma/backend | from rest_framework import serializers
from sigma_core.models.group import Group
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
Change GroupSerializer to display attributes as strings | from rest_framework import serializers
from sigma_core.models.group import Group
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
visibility = serializers.SerializerMethodField()
membership_policy = serializers.SerializerMethodField()
validation_policy = serializers.SerializerMethodField()
type = serializers.SerializerMethodField()
def get_visibility(self, obj):
return obj.get_visibility_display()
def get_membership_policy(self, obj):
return obj.get_membership_policy_display()
def get_validation_policy(self, obj):
return obj.get_validation_policy_display()
def get_type(self, obj):
return obj.get_type_display()
| <commit_before>from rest_framework import serializers
from sigma_core.models.group import Group
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
<commit_msg>Change GroupSerializer to display attributes as strings<commit_after> | from rest_framework import serializers
from sigma_core.models.group import Group
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
visibility = serializers.SerializerMethodField()
membership_policy = serializers.SerializerMethodField()
validation_policy = serializers.SerializerMethodField()
type = serializers.SerializerMethodField()
def get_visibility(self, obj):
return obj.get_visibility_display()
def get_membership_policy(self, obj):
return obj.get_membership_policy_display()
def get_validation_policy(self, obj):
return obj.get_validation_policy_display()
def get_type(self, obj):
return obj.get_type_display()
| from rest_framework import serializers
from sigma_core.models.group import Group
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
Change GroupSerializer to display attributes as stringsfrom rest_framework import serializers
from sigma_core.models.group import Group
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
visibility = serializers.SerializerMethodField()
membership_policy = serializers.SerializerMethodField()
validation_policy = serializers.SerializerMethodField()
type = serializers.SerializerMethodField()
def get_visibility(self, obj):
return obj.get_visibility_display()
def get_membership_policy(self, obj):
return obj.get_membership_policy_display()
def get_validation_policy(self, obj):
return obj.get_validation_policy_display()
def get_type(self, obj):
return obj.get_type_display()
| <commit_before>from rest_framework import serializers
from sigma_core.models.group import Group
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
<commit_msg>Change GroupSerializer to display attributes as strings<commit_after>from rest_framework import serializers
from sigma_core.models.group import Group
class GroupSerializer(serializers.ModelSerializer):
class Meta:
model = Group
visibility = serializers.SerializerMethodField()
membership_policy = serializers.SerializerMethodField()
validation_policy = serializers.SerializerMethodField()
type = serializers.SerializerMethodField()
def get_visibility(self, obj):
return obj.get_visibility_display()
def get_membership_policy(self, obj):
return obj.get_membership_policy_display()
def get_validation_policy(self, obj):
return obj.get_validation_policy_display()
def get_type(self, obj):
return obj.get_type_display()
|
ab25537b67b14a8574028280c1e16637faa8037c | gunicorn/workers/gtornado.py | gunicorn/workers/gtornado.py | # -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import os
import sys
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from gunicorn.workers.base import Worker
from gunicorn import __version__ as gversion
def patch_request_handler():
web = sys.modules.pop("tornado.web")
old_clear = web.RequestHandler.clear
def clear(self):
old_clear(self)
self._headers["Server"] += " (Gunicorn/%s)" % gversion
web.RequestHandler.clear = clear
sys.modules["tornado.web"] = web
class TornadoWorker(Worker):
@classmethod
def setup(cls):
patch_request_handler()
def watchdog(self):
self.notify()
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s" % self)
self.ioloop.stop()
def run(self):
self.socket.setblocking(0)
self.ioloop = IOLoop.instance()
PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()
server = HTTPServer(self.app, io_loop=self.ioloop)
server._socket = self.socket
server.start(num_processes=1)
self.ioloop.start()
| # -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import os
import sys
import tornado.web
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from gunicorn.workers.base import Worker
from gunicorn import __version__ as gversion
def patch_request_handler():
web = sys.modules.pop("tornado.web")
old_clear = web.RequestHandler.clear
def clear(self):
old_clear(self)
self._headers["Server"] += " (Gunicorn/%s)" % gversion
web.RequestHandler.clear = clear
sys.modules["tornado.web"] = web
class TornadoWorker(Worker):
@classmethod
def setup(cls):
patch_request_handler()
def watchdog(self):
self.notify()
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s" % self)
self.ioloop.stop()
def run(self):
self.socket.setblocking(0)
self.ioloop = IOLoop.instance()
PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()
server = HTTPServer(self.app, io_loop=self.ioloop)
server._socket = self.socket
server.start(num_processes=1)
self.ioloop.start()
| Fix assumption that tornado.web was imported. | Fix assumption that tornado.web was imported.
| Python | mit | mvaled/gunicorn,wong2/gunicorn,prezi/gunicorn,elelianghh/gunicorn,wong2/gunicorn,WSDC-NITWarangal/gunicorn,MrKiven/gunicorn,mvaled/gunicorn,alex/gunicorn,1stvamp/gunicorn,z-fork/gunicorn,1stvamp/gunicorn,ephes/gunicorn,prezi/gunicorn,gtrdotmcs/gunicorn,tejasmanohar/gunicorn,urbaniak/gunicorn,gtrdotmcs/gunicorn,prezi/gunicorn,harrisonfeng/gunicorn,zhoucen/gunicorn,alex/gunicorn,urbaniak/gunicorn,ccl0326/gunicorn,ccl0326/gunicorn,jamesblunt/gunicorn,alex/gunicorn,pschanely/gunicorn,pschanely/gunicorn,1stvamp/gunicorn,jamesblunt/gunicorn,jamesblunt/gunicorn,GitHublong/gunicorn,beni55/gunicorn,malept/gunicorn,pschanely/gunicorn,gtrdotmcs/gunicorn,ammaraskar/gunicorn,malept/gunicorn,zhoucen/gunicorn,tempbottle/gunicorn,zhoucen/gunicorn,malept/gunicorn,keakon/gunicorn,ccl0326/gunicorn,urbaniak/gunicorn,wong2/gunicorn,mvaled/gunicorn | # -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import os
import sys
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from gunicorn.workers.base import Worker
from gunicorn import __version__ as gversion
def patch_request_handler():
web = sys.modules.pop("tornado.web")
old_clear = web.RequestHandler.clear
def clear(self):
old_clear(self)
self._headers["Server"] += " (Gunicorn/%s)" % gversion
web.RequestHandler.clear = clear
sys.modules["tornado.web"] = web
class TornadoWorker(Worker):
@classmethod
def setup(cls):
patch_request_handler()
def watchdog(self):
self.notify()
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s" % self)
self.ioloop.stop()
def run(self):
self.socket.setblocking(0)
self.ioloop = IOLoop.instance()
PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()
server = HTTPServer(self.app, io_loop=self.ioloop)
server._socket = self.socket
server.start(num_processes=1)
self.ioloop.start()
Fix assumption that tornado.web was imported. | # -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import os
import sys
import tornado.web
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from gunicorn.workers.base import Worker
from gunicorn import __version__ as gversion
def patch_request_handler():
web = sys.modules.pop("tornado.web")
old_clear = web.RequestHandler.clear
def clear(self):
old_clear(self)
self._headers["Server"] += " (Gunicorn/%s)" % gversion
web.RequestHandler.clear = clear
sys.modules["tornado.web"] = web
class TornadoWorker(Worker):
@classmethod
def setup(cls):
patch_request_handler()
def watchdog(self):
self.notify()
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s" % self)
self.ioloop.stop()
def run(self):
self.socket.setblocking(0)
self.ioloop = IOLoop.instance()
PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()
server = HTTPServer(self.app, io_loop=self.ioloop)
server._socket = self.socket
server.start(num_processes=1)
self.ioloop.start()
| <commit_before># -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import os
import sys
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from gunicorn.workers.base import Worker
from gunicorn import __version__ as gversion
def patch_request_handler():
web = sys.modules.pop("tornado.web")
old_clear = web.RequestHandler.clear
def clear(self):
old_clear(self)
self._headers["Server"] += " (Gunicorn/%s)" % gversion
web.RequestHandler.clear = clear
sys.modules["tornado.web"] = web
class TornadoWorker(Worker):
@classmethod
def setup(cls):
patch_request_handler()
def watchdog(self):
self.notify()
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s" % self)
self.ioloop.stop()
def run(self):
self.socket.setblocking(0)
self.ioloop = IOLoop.instance()
PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()
server = HTTPServer(self.app, io_loop=self.ioloop)
server._socket = self.socket
server.start(num_processes=1)
self.ioloop.start()
<commit_msg>Fix assumption that tornado.web was imported.<commit_after> | # -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import os
import sys
import tornado.web
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from gunicorn.workers.base import Worker
from gunicorn import __version__ as gversion
def patch_request_handler():
web = sys.modules.pop("tornado.web")
old_clear = web.RequestHandler.clear
def clear(self):
old_clear(self)
self._headers["Server"] += " (Gunicorn/%s)" % gversion
web.RequestHandler.clear = clear
sys.modules["tornado.web"] = web
class TornadoWorker(Worker):
@classmethod
def setup(cls):
patch_request_handler()
def watchdog(self):
self.notify()
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s" % self)
self.ioloop.stop()
def run(self):
self.socket.setblocking(0)
self.ioloop = IOLoop.instance()
PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()
server = HTTPServer(self.app, io_loop=self.ioloop)
server._socket = self.socket
server.start(num_processes=1)
self.ioloop.start()
| # -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import os
import sys
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from gunicorn.workers.base import Worker
from gunicorn import __version__ as gversion
def patch_request_handler():
web = sys.modules.pop("tornado.web")
old_clear = web.RequestHandler.clear
def clear(self):
old_clear(self)
self._headers["Server"] += " (Gunicorn/%s)" % gversion
web.RequestHandler.clear = clear
sys.modules["tornado.web"] = web
class TornadoWorker(Worker):
@classmethod
def setup(cls):
patch_request_handler()
def watchdog(self):
self.notify()
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s" % self)
self.ioloop.stop()
def run(self):
self.socket.setblocking(0)
self.ioloop = IOLoop.instance()
PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()
server = HTTPServer(self.app, io_loop=self.ioloop)
server._socket = self.socket
server.start(num_processes=1)
self.ioloop.start()
Fix assumption that tornado.web was imported.# -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import os
import sys
import tornado.web
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from gunicorn.workers.base import Worker
from gunicorn import __version__ as gversion
def patch_request_handler():
web = sys.modules.pop("tornado.web")
old_clear = web.RequestHandler.clear
def clear(self):
old_clear(self)
self._headers["Server"] += " (Gunicorn/%s)" % gversion
web.RequestHandler.clear = clear
sys.modules["tornado.web"] = web
class TornadoWorker(Worker):
@classmethod
def setup(cls):
patch_request_handler()
def watchdog(self):
self.notify()
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s" % self)
self.ioloop.stop()
def run(self):
self.socket.setblocking(0)
self.ioloop = IOLoop.instance()
PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()
server = HTTPServer(self.app, io_loop=self.ioloop)
server._socket = self.socket
server.start(num_processes=1)
self.ioloop.start()
| <commit_before># -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import os
import sys
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from gunicorn.workers.base import Worker
from gunicorn import __version__ as gversion
def patch_request_handler():
web = sys.modules.pop("tornado.web")
old_clear = web.RequestHandler.clear
def clear(self):
old_clear(self)
self._headers["Server"] += " (Gunicorn/%s)" % gversion
web.RequestHandler.clear = clear
sys.modules["tornado.web"] = web
class TornadoWorker(Worker):
@classmethod
def setup(cls):
patch_request_handler()
def watchdog(self):
self.notify()
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s" % self)
self.ioloop.stop()
def run(self):
self.socket.setblocking(0)
self.ioloop = IOLoop.instance()
PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()
server = HTTPServer(self.app, io_loop=self.ioloop)
server._socket = self.socket
server.start(num_processes=1)
self.ioloop.start()
<commit_msg>Fix assumption that tornado.web was imported.<commit_after># -*- coding: utf-8 -
#
# This file is part of gunicorn released under the MIT license.
# See the NOTICE for more information.
import os
import sys
import tornado.web
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop, PeriodicCallback
from gunicorn.workers.base import Worker
from gunicorn import __version__ as gversion
def patch_request_handler():
web = sys.modules.pop("tornado.web")
old_clear = web.RequestHandler.clear
def clear(self):
old_clear(self)
self._headers["Server"] += " (Gunicorn/%s)" % gversion
web.RequestHandler.clear = clear
sys.modules["tornado.web"] = web
class TornadoWorker(Worker):
@classmethod
def setup(cls):
patch_request_handler()
def watchdog(self):
self.notify()
if self.ppid != os.getppid():
self.log.info("Parent changed, shutting down: %s" % self)
self.ioloop.stop()
def run(self):
self.socket.setblocking(0)
self.ioloop = IOLoop.instance()
PeriodicCallback(self.watchdog, 1000, io_loop=self.ioloop).start()
server = HTTPServer(self.app, io_loop=self.ioloop)
server._socket = self.socket
server.start(num_processes=1)
self.ioloop.start()
|
dd5774c30f950c8a52b977a5529300e8edce4bc7 | migrations/versions/2c240cb3edd1_.py | migrations/versions/2c240cb3edd1_.py | """Add movie metadata (imdb rating, number of votes, metascore) and relevancy
Revision ID: 2c240cb3edd1
Revises: 588336e02ca
Create Date: 2014-02-09 13:46:18.630000
"""
# revision identifiers, used by Alembic.
revision = '2c240cb3edd1'
down_revision = '588336e02ca'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('imdb_rating', sa.Float(), nullable=False, default=0))
op.add_column('movie', sa.Column('metascore', sa.Integer(), nullable=False, default=0))
op.add_column('movie', sa.Column('number_of_imdb_votes', sa.Integer(), nullable=False, default=0))
op.add_column('movie', sa.Column('relevancy', sa.Float(), nullable=False, default=0))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('movie', 'relevancy')
op.drop_column('movie', 'number_of_imdb_votes')
op.drop_column('movie', 'metascore')
op.drop_column('movie', 'imdb_rating')
### end Alembic commands ###
| """Add movie metadata (imdb rating, number of votes, metascore) and relevancy
Revision ID: 2c240cb3edd1
Revises: 588336e02ca
Create Date: 2014-02-09 13:46:18.630000
"""
# revision identifiers, used by Alembic.
revision = '2c240cb3edd1'
down_revision = '588336e02ca'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('imdb_rating', sa.Float(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('metascore', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('number_of_imdb_votes', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('relevancy', sa.Float(), nullable=False, server_default='0'))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('movie', 'relevancy')
op.drop_column('movie', 'number_of_imdb_votes')
op.drop_column('movie', 'metascore')
op.drop_column('movie', 'imdb_rating')
### end Alembic commands ###
| Fix proper default values for metadata migration | Fix proper default values for metadata migration
| Python | mit | streamr/marvin,streamr/marvin,streamr/marvin | """Add movie metadata (imdb rating, number of votes, metascore) and relevancy
Revision ID: 2c240cb3edd1
Revises: 588336e02ca
Create Date: 2014-02-09 13:46:18.630000
"""
# revision identifiers, used by Alembic.
revision = '2c240cb3edd1'
down_revision = '588336e02ca'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('imdb_rating', sa.Float(), nullable=False, default=0))
op.add_column('movie', sa.Column('metascore', sa.Integer(), nullable=False, default=0))
op.add_column('movie', sa.Column('number_of_imdb_votes', sa.Integer(), nullable=False, default=0))
op.add_column('movie', sa.Column('relevancy', sa.Float(), nullable=False, default=0))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('movie', 'relevancy')
op.drop_column('movie', 'number_of_imdb_votes')
op.drop_column('movie', 'metascore')
op.drop_column('movie', 'imdb_rating')
### end Alembic commands ###
Fix proper default values for metadata migration | """Add movie metadata (imdb rating, number of votes, metascore) and relevancy
Revision ID: 2c240cb3edd1
Revises: 588336e02ca
Create Date: 2014-02-09 13:46:18.630000
"""
# revision identifiers, used by Alembic.
revision = '2c240cb3edd1'
down_revision = '588336e02ca'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('imdb_rating', sa.Float(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('metascore', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('number_of_imdb_votes', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('relevancy', sa.Float(), nullable=False, server_default='0'))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('movie', 'relevancy')
op.drop_column('movie', 'number_of_imdb_votes')
op.drop_column('movie', 'metascore')
op.drop_column('movie', 'imdb_rating')
### end Alembic commands ###
| <commit_before>"""Add movie metadata (imdb rating, number of votes, metascore) and relevancy
Revision ID: 2c240cb3edd1
Revises: 588336e02ca
Create Date: 2014-02-09 13:46:18.630000
"""
# revision identifiers, used by Alembic.
revision = '2c240cb3edd1'
down_revision = '588336e02ca'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('imdb_rating', sa.Float(), nullable=False, default=0))
op.add_column('movie', sa.Column('metascore', sa.Integer(), nullable=False, default=0))
op.add_column('movie', sa.Column('number_of_imdb_votes', sa.Integer(), nullable=False, default=0))
op.add_column('movie', sa.Column('relevancy', sa.Float(), nullable=False, default=0))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('movie', 'relevancy')
op.drop_column('movie', 'number_of_imdb_votes')
op.drop_column('movie', 'metascore')
op.drop_column('movie', 'imdb_rating')
### end Alembic commands ###
<commit_msg>Fix proper default values for metadata migration<commit_after> | """Add movie metadata (imdb rating, number of votes, metascore) and relevancy
Revision ID: 2c240cb3edd1
Revises: 588336e02ca
Create Date: 2014-02-09 13:46:18.630000
"""
# revision identifiers, used by Alembic.
revision = '2c240cb3edd1'
down_revision = '588336e02ca'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('imdb_rating', sa.Float(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('metascore', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('number_of_imdb_votes', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('relevancy', sa.Float(), nullable=False, server_default='0'))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('movie', 'relevancy')
op.drop_column('movie', 'number_of_imdb_votes')
op.drop_column('movie', 'metascore')
op.drop_column('movie', 'imdb_rating')
### end Alembic commands ###
| """Add movie metadata (imdb rating, number of votes, metascore) and relevancy
Revision ID: 2c240cb3edd1
Revises: 588336e02ca
Create Date: 2014-02-09 13:46:18.630000
"""
# revision identifiers, used by Alembic.
revision = '2c240cb3edd1'
down_revision = '588336e02ca'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('imdb_rating', sa.Float(), nullable=False, default=0))
op.add_column('movie', sa.Column('metascore', sa.Integer(), nullable=False, default=0))
op.add_column('movie', sa.Column('number_of_imdb_votes', sa.Integer(), nullable=False, default=0))
op.add_column('movie', sa.Column('relevancy', sa.Float(), nullable=False, default=0))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('movie', 'relevancy')
op.drop_column('movie', 'number_of_imdb_votes')
op.drop_column('movie', 'metascore')
op.drop_column('movie', 'imdb_rating')
### end Alembic commands ###
Fix proper default values for metadata migration"""Add movie metadata (imdb rating, number of votes, metascore) and relevancy
Revision ID: 2c240cb3edd1
Revises: 588336e02ca
Create Date: 2014-02-09 13:46:18.630000
"""
# revision identifiers, used by Alembic.
revision = '2c240cb3edd1'
down_revision = '588336e02ca'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('imdb_rating', sa.Float(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('metascore', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('number_of_imdb_votes', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('relevancy', sa.Float(), nullable=False, server_default='0'))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('movie', 'relevancy')
op.drop_column('movie', 'number_of_imdb_votes')
op.drop_column('movie', 'metascore')
op.drop_column('movie', 'imdb_rating')
### end Alembic commands ###
| <commit_before>"""Add movie metadata (imdb rating, number of votes, metascore) and relevancy
Revision ID: 2c240cb3edd1
Revises: 588336e02ca
Create Date: 2014-02-09 13:46:18.630000
"""
# revision identifiers, used by Alembic.
revision = '2c240cb3edd1'
down_revision = '588336e02ca'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('imdb_rating', sa.Float(), nullable=False, default=0))
op.add_column('movie', sa.Column('metascore', sa.Integer(), nullable=False, default=0))
op.add_column('movie', sa.Column('number_of_imdb_votes', sa.Integer(), nullable=False, default=0))
op.add_column('movie', sa.Column('relevancy', sa.Float(), nullable=False, default=0))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('movie', 'relevancy')
op.drop_column('movie', 'number_of_imdb_votes')
op.drop_column('movie', 'metascore')
op.drop_column('movie', 'imdb_rating')
### end Alembic commands ###
<commit_msg>Fix proper default values for metadata migration<commit_after>"""Add movie metadata (imdb rating, number of votes, metascore) and relevancy
Revision ID: 2c240cb3edd1
Revises: 588336e02ca
Create Date: 2014-02-09 13:46:18.630000
"""
# revision identifiers, used by Alembic.
revision = '2c240cb3edd1'
down_revision = '588336e02ca'
from alembic import op
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('movie', sa.Column('imdb_rating', sa.Float(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('metascore', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('number_of_imdb_votes', sa.Integer(), nullable=False, server_default='0'))
op.add_column('movie', sa.Column('relevancy', sa.Float(), nullable=False, server_default='0'))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('movie', 'relevancy')
op.drop_column('movie', 'number_of_imdb_votes')
op.drop_column('movie', 'metascore')
op.drop_column('movie', 'imdb_rating')
### end Alembic commands ###
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.